summaryrefslogtreecommitdiffstats
path: root/meta/lib/oeqa
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oeqa')
-rw-r--r--meta/lib/oeqa/buildperf/base.py2
-rw-r--r--meta/lib/oeqa/buildtools-docs/cases/README2
-rw-r--r--meta/lib/oeqa/buildtools-docs/cases/build.py19
-rw-r--r--meta/lib/oeqa/buildtools/cases/README2
-rw-r--r--meta/lib/oeqa/buildtools/cases/build.py32
-rw-r--r--meta/lib/oeqa/buildtools/cases/gcc.py31
-rw-r--r--meta/lib/oeqa/buildtools/cases/https.py22
-rw-r--r--meta/lib/oeqa/buildtools/cases/sanity.py24
-rw-r--r--meta/lib/oeqa/controllers/__init__.py2
-rw-r--r--meta/lib/oeqa/controllers/controllerimage.py (renamed from meta/lib/oeqa/controllers/masterimage.py)44
-rw-r--r--meta/lib/oeqa/controllers/testtargetloader.py2
-rw-r--r--meta/lib/oeqa/core/case.py17
-rw-r--r--meta/lib/oeqa/core/context.py2
-rw-r--r--meta/lib/oeqa/core/decorator/__init__.py11
-rw-r--r--meta/lib/oeqa/core/decorator/data.py86
-rw-r--r--meta/lib/oeqa/core/decorator/oetimeout.py5
-rw-r--r--meta/lib/oeqa/core/loader.py12
-rw-r--r--meta/lib/oeqa/core/runner.py14
-rw-r--r--meta/lib/oeqa/core/target/__init__.py1
-rw-r--r--meta/lib/oeqa/core/target/qemu.py40
-rw-r--r--meta/lib/oeqa/core/target/serial.py315
-rw-r--r--meta/lib/oeqa/core/target/ssh.py115
-rw-r--r--meta/lib/oeqa/core/tests/cases/timeout.py13
-rw-r--r--meta/lib/oeqa/core/tests/common.py1
-rwxr-xr-xmeta/lib/oeqa/core/tests/test_data.py2
-rwxr-xr-xmeta/lib/oeqa/core/tests/test_decorators.py6
-rw-r--r--meta/lib/oeqa/core/utils/concurrencytest.py68
-rw-r--r--meta/lib/oeqa/core/utils/misc.py47
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml20
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE201
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT25
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml8
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs48
-rw-r--r--meta/lib/oeqa/files/test.rs2
-rw-r--r--meta/lib/oeqa/files/testresults/testresults.json2
-rw-r--r--meta/lib/oeqa/manual/bsp-hw.json280
-rw-r--r--meta/lib/oeqa/manual/build-appliance.json2
-rw-r--r--meta/lib/oeqa/manual/crops.json294
-rw-r--r--meta/lib/oeqa/manual/eclipse-plugin.json322
-rw-r--r--meta/lib/oeqa/manual/sdk.json2
-rw-r--r--meta/lib/oeqa/manual/toaster-managed-mode.json16
-rw-r--r--meta/lib/oeqa/oetest.py24
-rw-r--r--meta/lib/oeqa/runtime/case.py18
-rw-r--r--meta/lib/oeqa/runtime/cases/_qemutiny.py13
-rw-r--r--meta/lib/oeqa/runtime/cases/apt.py40
-rw-r--r--meta/lib/oeqa/runtime/cases/boot.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/buildcpio.py9
-rw-r--r--meta/lib/oeqa/runtime/cases/buildgalculator.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildlzip.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/connman.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/date.py15
-rw-r--r--meta/lib/oeqa/runtime/cases/df.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/dnf.py88
-rw-r--r--meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py27
-rw-r--r--meta/lib/oeqa/runtime/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/gi.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/go.py21
-rw-r--r--meta/lib/oeqa/runtime/cases/gstreamer.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/kernelmodule.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ksample.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/ldd.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/login.py116
-rw-r--r--meta/lib/oeqa/runtime/cases/logrotate.py16
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp.py28
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp_stress.py3
-rw-r--r--meta/lib/oeqa/runtime/cases/maturin.py58
-rw-r--r--meta/lib/oeqa/runtime/cases/multilib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/oe_syslog.py15
-rw-r--r--meta/lib/oeqa/runtime/cases/opkg.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/pam.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt62
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt19
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt35
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt19
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt4
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt10
l---------meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt1
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py382
-rw-r--r--meta/lib/oeqa/runtime/cases/perl.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ping.py35
-rw-r--r--meta/lib/oeqa/runtime/cases/ptest.py14
-rw-r--r--meta/lib/oeqa/runtime/cases/python.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/rpm.py58
-rw-r--r--meta/lib/oeqa/runtime/cases/rt.py19
-rw-r--r--meta/lib/oeqa/runtime/cases/rtc.py17
-rw-r--r--meta/lib/oeqa/runtime/cases/runlevel.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/rust.py64
-rw-r--r--meta/lib/oeqa/runtime/cases/scons.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/skeletoninit.py9
-rw-r--r--meta/lib/oeqa/runtime/cases/ssh.py33
-rw-r--r--meta/lib/oeqa/runtime/cases/stap.py42
-rw-r--r--meta/lib/oeqa/runtime/cases/storage.py18
-rw-r--r--meta/lib/oeqa/runtime/cases/suspend.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/systemd.py41
-rw-r--r--meta/lib/oeqa/runtime/cases/terminal.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/uki.py16
-rw-r--r--meta/lib/oeqa/runtime/cases/usb_hid.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/weston.py22
-rw-r--r--meta/lib/oeqa/runtime/cases/x32lib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/xorg.py2
-rw-r--r--meta/lib/oeqa/runtime/context.py60
-rw-r--r--meta/lib/oeqa/runtime/decorator/package.py18
-rw-r--r--meta/lib/oeqa/runtime/files/hello.stp1
-rw-r--r--meta/lib/oeqa/sdk/case.py67
-rw-r--r--meta/lib/oeqa/sdk/cases/autotools.py (renamed from meta/lib/oeqa/sdk/cases/buildcpio.py)24
-rw-r--r--meta/lib/oeqa/sdk/cases/buildepoxy.py41
-rw-r--r--meta/lib/oeqa/sdk/cases/buildgalculator.py43
-rw-r--r--meta/lib/oeqa/sdk/cases/cmake.py (renamed from meta/lib/oeqa/sdk/cases/assimp.py)23
-rw-r--r--meta/lib/oeqa/sdk/cases/gcc.py6
-rw-r--r--meta/lib/oeqa/sdk/cases/gtk3.py40
-rw-r--r--meta/lib/oeqa/sdk/cases/kmod.py39
-rw-r--r--meta/lib/oeqa/sdk/cases/makefile.py (renamed from meta/lib/oeqa/sdk/cases/buildlzip.py)12
-rw-r--r--meta/lib/oeqa/sdk/cases/manifest.py26
-rw-r--r--meta/lib/oeqa/sdk/cases/maturin.py66
-rw-r--r--meta/lib/oeqa/sdk/cases/meson.py72
-rw-r--r--meta/lib/oeqa/sdk/cases/perl.py7
-rw-r--r--meta/lib/oeqa/sdk/cases/python.py18
-rw-r--r--meta/lib/oeqa/sdk/cases/rust.py58
-rw-r--r--meta/lib/oeqa/sdk/context.py15
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml6
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/build.rs3
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/src/main.rs3
-rw-r--r--meta/lib/oeqa/sdk/testmetaidesupport.py45
-rw-r--r--meta/lib/oeqa/sdk/testsdk.py39
-rw-r--r--meta/lib/oeqa/sdkext/cases/devtool.py9
-rw-r--r--meta/lib/oeqa/sdkext/context.py4
-rw-r--r--meta/lib/oeqa/sdkext/testsdk.py10
-rw-r--r--meta/lib/oeqa/selftest/case.py19
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py68
-rw-r--r--meta/lib/oeqa/selftest/cases/barebox.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/baremetal.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/bbclasses.py106
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py161
-rw-r--r--meta/lib/oeqa/selftest/cases/bblock.py203
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py182
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py148
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py63
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py61
-rw-r--r--meta/lib/oeqa/selftest/cases/c_cpp.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py462
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py160
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py1412
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py35
-rw-r--r--meta/lib/oeqa/selftest/cases/efibootpartition.py46
-rw-r--r--meta/lib/oeqa/selftest/cases/esdk.py (renamed from meta/lib/oeqa/selftest/cases/eSDK.py)16
-rw-r--r--meta/lib/oeqa/selftest/cases/externalsrc.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py69
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py1864
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/gdbserver.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/gitarchivetests.py136
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py179
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py142
-rw-r--r--meta/lib/oeqa/selftest/cases/intercept.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py9
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py41
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/newlib.py13
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py69
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py541
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py72
-rw-r--r--meta/lib/oeqa/selftest/cases/picolibc.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py35
-rw-r--r--meta/lib/oeqa/selftest/cases/pseudo.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py713
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py20
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py180
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py279
-rw-r--r--meta/lib/oeqa/selftest/cases/retain.py241
-rw-r--r--meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py97
-rw-r--r--meta/lib/oeqa/selftest/cases/rpmtests.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/runqemu.py83
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py169
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py135
-rw-r--r--meta/lib/oeqa/selftest/cases/sdk.py39
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py3
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py288
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py761
-rw-r--r--meta/lib/oeqa/selftest/cases/sysroot.py59
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/toolchain.py71
-rw-r--r--meta/lib/oeqa/selftest/cases/uboot.py98
-rw-r--r--meta/lib/oeqa/selftest/cases/uki.py141
-rw-r--r--meta/lib/oeqa/selftest/cases/usergrouptests.py57
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py1098
-rw-r--r--meta/lib/oeqa/selftest/cases/wrapper.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py39
-rw-r--r--meta/lib/oeqa/selftest/context.py161
-rw-r--r--meta/lib/oeqa/targetcontrol.py26
-rw-r--r--meta/lib/oeqa/utils/__init__.py15
-rw-r--r--meta/lib/oeqa/utils/buildproject.py3
-rw-r--r--meta/lib/oeqa/utils/commands.py81
-rw-r--r--meta/lib/oeqa/utils/decorators.py85
-rw-r--r--meta/lib/oeqa/utils/dump.py89
-rw-r--r--meta/lib/oeqa/utils/ftools.py2
-rw-r--r--meta/lib/oeqa/utils/gitarchive.py62
-rw-r--r--meta/lib/oeqa/utils/httpserver.py29
-rw-r--r--meta/lib/oeqa/utils/logparser.py98
-rw-r--r--meta/lib/oeqa/utils/metadata.py11
-rw-r--r--meta/lib/oeqa/utils/network.py2
-rw-r--r--meta/lib/oeqa/utils/nfs.py10
-rw-r--r--meta/lib/oeqa/utils/package_manager.py2
-rw-r--r--meta/lib/oeqa/utils/postactions.py102
-rw-r--r--meta/lib/oeqa/utils/qemurunner.py469
-rw-r--r--meta/lib/oeqa/utils/qemutinyrunner.py6
-rw-r--r--meta/lib/oeqa/utils/sshcontrol.py6
-rw-r--r--meta/lib/oeqa/utils/subprocesstweak.py15
-rw-r--r--meta/lib/oeqa/utils/targetbuild.py4
-rw-r--r--meta/lib/oeqa/utils/testexport.py10
235 files changed, 13531 insertions, 3876 deletions
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py
index 5f1805d86c..5d656c781a 100644
--- a/meta/lib/oeqa/buildperf/base.py
+++ b/meta/lib/oeqa/buildperf/base.py
@@ -444,7 +444,7 @@ class BuildPerfTestCase(unittest.TestCase):
444 buildstats = [] 444 buildstats = []
445 for fname in os.listdir(bs_dir): 445 for fname in os.listdir(bs_dir):
446 recipe_dir = os.path.join(bs_dir, fname) 446 recipe_dir = os.path.join(bs_dir, fname)
447 if not os.path.isdir(recipe_dir): 447 if not os.path.isdir(recipe_dir) or fname == "reduced_proc_pressure":
448 continue 448 continue
449 name, epoch, version, revision = split_nevr(fname) 449 name, epoch, version, revision = split_nevr(fname)
450 recipe_bs = OrderedDict((('name', name), 450 recipe_bs = OrderedDict((('name', name),
diff --git a/meta/lib/oeqa/buildtools-docs/cases/README b/meta/lib/oeqa/buildtools-docs/cases/README
new file mode 100644
index 0000000000..f8edbc7dad
--- /dev/null
+++ b/meta/lib/oeqa/buildtools-docs/cases/README
@@ -0,0 +1,2 @@
1These test cases are used by build-docs-tarball, and are not used by the testsdk
2class.
diff --git a/meta/lib/oeqa/buildtools-docs/cases/build.py b/meta/lib/oeqa/buildtools-docs/cases/build.py
new file mode 100644
index 0000000000..6e3ee94292
--- /dev/null
+++ b/meta/lib/oeqa/buildtools-docs/cases/build.py
@@ -0,0 +1,19 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import tempfile
8from oeqa.sdk.case import OESDKTestCase
9from oeqa.utils.subprocesstweak import errors_have_output
10errors_have_output()
11
12class BuildTests(OESDKTestCase):
13 """
14 Verify that our docs can build using our docs tools tarball.
15 """
16 def test_docs_build(self):
17 with tempfile.TemporaryDirectory(prefix='docs-tarball-build-', dir=self.tc.sdk_dir) as testdir:
18 self._run('git clone git://git.yoctoproject.org/yocto-docs %s' % testdir)
19 self._run('cd %s/documentation && make html' % testdir)
diff --git a/meta/lib/oeqa/buildtools/cases/README b/meta/lib/oeqa/buildtools/cases/README
new file mode 100644
index 0000000000..d4f20faa9f
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/README
@@ -0,0 +1,2 @@
1These test cases are used by buildtools-tarball, and are not used by the testsdk
2class.
diff --git a/meta/lib/oeqa/buildtools/cases/build.py b/meta/lib/oeqa/buildtools/cases/build.py
new file mode 100644
index 0000000000..c85c32496b
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/build.py
@@ -0,0 +1,32 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os, tempfile
8import time
9from oeqa.sdk.case import OESDKTestCase
10from oeqa.utils.subprocesstweak import errors_have_output
11errors_have_output()
12
13class BuildTests(OESDKTestCase):
14 """
15 Verify that bitbake can build virtual/libc inside the buildtools.
16 """
17 def test_libc(self):
18 with tempfile.TemporaryDirectory(prefix='bitbake-build-', dir=self.tc.sdk_dir) as testdir:
19 corebase = self.td['COREBASE']
20
21 self._run('. %s/oe-init-build-env %s' % (corebase, testdir))
22 with open(os.path.join(testdir, 'conf', 'local.conf'), 'ta') as conf:
23 conf.write('\n')
24 conf.write('DL_DIR = "%s"\n' % self.td['DL_DIR'])
25
26 try:
27 self._run('. %s/oe-init-build-env %s && bitbake virtual/libc' % (corebase, testdir))
28 finally:
29 delay = 10
30 while delay and (os.path.exists(testdir + "/bitbake.lock") or os.path.exists(testdir + "/cache/hashserv.db-wal")):
31 time.sleep(1)
32 delay = delay - 1
diff --git a/meta/lib/oeqa/buildtools/cases/gcc.py b/meta/lib/oeqa/buildtools/cases/gcc.py
new file mode 100644
index 0000000000..a62c4d0bc4
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/gcc.py
@@ -0,0 +1,31 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os.path
8from oeqa.sdk.case import OESDKTestCase
9
10class GccTests(OESDKTestCase):
11 def test_verify_specs(self):
12 """
13 Verify that the compiler has been relocated successfully and isn't
14 looking in the hard-coded prefix.
15 """
16 # Canonicalise the SDK root
17 sdk_base = os.path.realpath(self.tc.sdk_dir)
18 # Canonicalise the location of GCC
19 gcc_path = os.path.realpath(self._run("command -v gcc").strip())
20 # Skip the test if the GCC didn't come from the buildtools, as it only
21 # comes with buildtools-extended-tarball.
22 if os.path.commonprefix((sdk_base, gcc_path)) != sdk_base:
23 self.skipTest("Buildtools does not provide GCC")
24
25 # This is the prefix that GCC is build with, and should be replaced at
26 # installation time.
27 sdkpath = self.td.get("SDKPATH")
28 self.assertTrue(sdkpath)
29
30 for line in self._run('gcc -dumpspecs').splitlines():
31 self.assertNotIn(sdkpath, line)
diff --git a/meta/lib/oeqa/buildtools/cases/https.py b/meta/lib/oeqa/buildtools/cases/https.py
new file mode 100644
index 0000000000..4525e3d758
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/https.py
@@ -0,0 +1,22 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.sdk.case import OESDKTestCase
8from oeqa.utils.subprocesstweak import errors_have_output
9errors_have_output()
10
11class HTTPTests(OESDKTestCase):
12 """
13 Verify that HTTPS certificates are working correctly, as this depends on
14 environment variables being set correctly.
15 """
16
17 def test_wget(self):
18 self._run('env -i wget --debug --output-document /dev/null https://yoctoproject.org/connectivity.html')
19
20 def test_python(self):
21 # urlopen() returns a file-like object on success and throws an exception otherwise
22 self._run('python3 -c \'import urllib.request; urllib.request.urlopen("https://yoctoproject.org/connectivity.html")\'')
diff --git a/meta/lib/oeqa/buildtools/cases/sanity.py b/meta/lib/oeqa/buildtools/cases/sanity.py
new file mode 100644
index 0000000000..a55d456656
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/sanity.py
@@ -0,0 +1,24 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import os.path
9from oeqa.sdk.case import OESDKTestCase
10
11class SanityTests(OESDKTestCase):
12 def test_tools(self):
13 """
14 Test that wget and tar come from the buildtools, not the host. This
15 verifies that the buildtools have installed correctly. We can't check
16 for gcc as that is only installed by buildtools-extended.
17 """
18 for command in ("tar", "wget"):
19 # Canonicalise the SDK root
20 sdk_base = os.path.realpath(self.tc.sdk_dir)
21 # Canonicalise the location of this command
22 tool_path = os.path.realpath(self._run("command -v %s" % command).strip())
23 # Assert that the tool was found inside the SDK root
24 self.assertEqual(os.path.commonprefix((sdk_base, tool_path)), sdk_base)
diff --git a/meta/lib/oeqa/controllers/__init__.py b/meta/lib/oeqa/controllers/__init__.py
index cc3836c4bf..0fc905be9a 100644
--- a/meta/lib/oeqa/controllers/__init__.py
+++ b/meta/lib/oeqa/controllers/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# Enable other layers to have modules in the same named directory 6# Enable other layers to have modules in the same named directory
diff --git a/meta/lib/oeqa/controllers/masterimage.py b/meta/lib/oeqa/controllers/controllerimage.py
index 0bf5917e48..78a4aaff87 100644
--- a/meta/lib/oeqa/controllers/masterimage.py
+++ b/meta/lib/oeqa/controllers/controllerimage.py
@@ -3,13 +3,13 @@
3# SPDX-License-Identifier: MIT 3# SPDX-License-Identifier: MIT
4# 4#
5# This module adds support to testimage.bbclass to deploy images and run 5# This module adds support to testimage.bbclass to deploy images and run
6# tests using a "master image" - this is a "known good" image that is 6# tests using a "controller image" - this is a "known good" image that is
7# installed onto the device as part of initial setup and will be booted into 7# installed onto the device as part of initial setup and will be booted into
8# with no interaction; we can then use it to deploy the image to be tested 8# with no interaction; we can then use it to deploy the image to be tested
9# to a second partition before running the tests. 9# to a second partition before running the tests.
10# 10#
11# For an example master image, see core-image-testmaster 11# For an example controller image, see core-image-testcontroller
12# (meta/recipes-extended/images/core-image-testmaster.bb) 12# (meta/recipes-extended/images/core-image-testcontroller.bb)
13 13
14import os 14import os
15import bb 15import bb
@@ -24,12 +24,12 @@ from oeqa.utils import CommandError
24 24
25from abc import ABCMeta, abstractmethod 25from abc import ABCMeta, abstractmethod
26 26
27class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta): 27class ControllerImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta):
28 28
29 supported_image_fstypes = ['tar.gz', 'tar.bz2'] 29 supported_image_fstypes = ['tar.gz', 'tar.bz2']
30 30
31 def __init__(self, d): 31 def __init__(self, d):
32 super(MasterImageHardwareTarget, self).__init__(d) 32 super(ControllerImageHardwareTarget, self).__init__(d)
33 33
34 # target ip 34 # target ip
35 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') 35 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.')
@@ -61,8 +61,8 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
61 if not os.path.isfile(self.kernel): 61 if not os.path.isfile(self.kernel):
62 bb.fatal("No kernel found. Expected path: %s" % self.kernel) 62 bb.fatal("No kernel found. Expected path: %s" % self.kernel)
63 63
64 # master ssh connection 64 # controller ssh connection
65 self.master = None 65 self.controller = None
66 # if the user knows what they are doing, then by all means... 66 # if the user knows what they are doing, then by all means...
67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS") 67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS")
68 self.deploy_cmds = None 68 self.deploy_cmds = None
@@ -119,19 +119,19 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
119 119
120 def deploy(self): 120 def deploy(self):
121 # base class just sets the ssh log file for us 121 # base class just sets the ssh log file for us
122 super(MasterImageHardwareTarget, self).deploy() 122 super(ControllerImageHardwareTarget, self).deploy()
123 self.master = sshcontrol.SSHControl(ip=self.ip, logfile=self.sshlog, timeout=600, port=self.port) 123 self.controller = sshcontrol.SSHControl(ip=self.ip, logfile=self.sshlog, timeout=600, port=self.port)
124 status, output = self.master.run("cat /etc/masterimage") 124 status, output = self.controller.run("cat /etc/controllerimage")
125 if status != 0: 125 if status != 0:
126 # We're not booted into the master image, so try rebooting 126 # We're not booted into the controller image, so try rebooting
127 bb.plain("%s - booting into the master image" % self.pn) 127 bb.plain("%s - booting into the controller image" % self.pn)
128 self.power_ctl("cycle") 128 self.power_ctl("cycle")
129 self._wait_until_booted() 129 self._wait_until_booted()
130 130
131 bb.plain("%s - deploying image on target" % self.pn) 131 bb.plain("%s - deploying image on target" % self.pn)
132 status, output = self.master.run("cat /etc/masterimage") 132 status, output = self.controller.run("cat /etc/controllerimage")
133 if status != 0: 133 if status != 0:
134 bb.fatal("No ssh connectivity or target isn't running a master image.\n%s" % output) 134 bb.fatal("No ssh connectivity or target isn't running a controller image.\n%s" % output)
135 if self.user_cmds: 135 if self.user_cmds:
136 self.deploy_cmds = self.user_cmds.split("\n") 136 self.deploy_cmds = self.user_cmds.split("\n")
137 try: 137 try:
@@ -156,10 +156,10 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
156 156
157 def stop(self): 157 def stop(self):
158 bb.plain("%s - reboot/powercycle target" % self.pn) 158 bb.plain("%s - reboot/powercycle target" % self.pn)
159 self.power_cycle(self.master) 159 self.power_cycle(self.controller)
160 160
161 161
162class SystemdbootTarget(MasterImageHardwareTarget): 162class SystemdbootTarget(ControllerImageHardwareTarget):
163 163
164 def __init__(self, d): 164 def __init__(self, d):
165 super(SystemdbootTarget, self).__init__(d) 165 super(SystemdbootTarget, self).__init__(d)
@@ -184,16 +184,16 @@ class SystemdbootTarget(MasterImageHardwareTarget):
184 184
185 def _deploy(self): 185 def _deploy(self):
186 # make sure these aren't mounted 186 # make sure these aren't mounted
187 self.master.run("umount /boot; umount /mnt/testrootfs; umount /sys/firmware/efi/efivars;") 187 self.controller.run("umount /boot; umount /mnt/testrootfs; umount /sys/firmware/efi/efivars;")
188 # from now on, every deploy cmd should return 0 188 # from now on, every deploy cmd should return 0
189 # else an exception will be thrown by sshcontrol 189 # else an exception will be thrown by sshcontrol
190 self.master.ignore_status = False 190 self.controller.ignore_status = False
191 self.master.copy_to(self.rootfs, "~/test-rootfs." + self.image_fstype) 191 self.controller.copy_to(self.rootfs, "~/test-rootfs." + self.image_fstype)
192 self.master.copy_to(self.kernel, "~/test-kernel") 192 self.controller.copy_to(self.kernel, "~/test-kernel")
193 for cmd in self.deploy_cmds: 193 for cmd in self.deploy_cmds:
194 self.master.run(cmd) 194 self.controller.run(cmd)
195 195
196 def _start(self, params=None): 196 def _start(self, params=None):
197 self.power_cycle(self.master) 197 self.power_cycle(self.controller)
198 # there are better ways than a timeout but this should work for now 198 # there are better ways than a timeout but this should work for now
199 time.sleep(120) 199 time.sleep(120)
diff --git a/meta/lib/oeqa/controllers/testtargetloader.py b/meta/lib/oeqa/controllers/testtargetloader.py
index 23101c7371..209ff7061a 100644
--- a/meta/lib/oeqa/controllers/testtargetloader.py
+++ b/meta/lib/oeqa/controllers/testtargetloader.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oeqa/core/case.py b/meta/lib/oeqa/core/case.py
index aae451fef2..ad5524a714 100644
--- a/meta/lib/oeqa/core/case.py
+++ b/meta/lib/oeqa/core/case.py
@@ -5,6 +5,7 @@
5# 5#
6 6
7import base64 7import base64
8import os
8import zlib 9import zlib
9import unittest 10import unittest
10 11
@@ -43,8 +44,13 @@ class OETestCase(unittest.TestCase):
43 clss.tearDownClassMethod() 44 clss.tearDownClassMethod()
44 45
45 def _oeSetUp(self): 46 def _oeSetUp(self):
46 for d in self.decorators: 47 try:
47 d.setUpDecorator() 48 for d in self.decorators:
49 d.setUpDecorator()
50 except:
51 for d in self.decorators:
52 d.tearDownDecorator()
53 raise
48 self.setUpMethod() 54 self.setUpMethod()
49 55
50 def _oeTearDown(self): 56 def _oeTearDown(self):
@@ -52,6 +58,13 @@ class OETestCase(unittest.TestCase):
52 d.tearDownDecorator() 58 d.tearDownDecorator()
53 self.tearDownMethod() 59 self.tearDownMethod()
54 60
61 def assertFileExists(self, filename, msg=None):
62 """
63 Test that filename exists. If it does not, the test will fail.
64 """
65 if not os.path.exists(filename):
66 self.fail(msg or "%s does not exist" % filename)
67
55class OEPTestResultTestCase: 68class OEPTestResultTestCase:
56 """ 69 """
57 Mix-in class to provide functions to make interacting with extraresults for 70 Mix-in class to provide functions to make interacting with extraresults for
diff --git a/meta/lib/oeqa/core/context.py b/meta/lib/oeqa/core/context.py
index 2abe353d27..9313271f58 100644
--- a/meta/lib/oeqa/core/context.py
+++ b/meta/lib/oeqa/core/context.py
@@ -81,7 +81,7 @@ class OETestContext(object):
81 def runTests(self, processes=None, skips=[]): 81 def runTests(self, processes=None, skips=[]):
82 self.runner = self.runnerClass(self, descriptions=False, verbosity=2) 82 self.runner = self.runnerClass(self, descriptions=False, verbosity=2)
83 83
84 # Dinamically skip those tests specified though arguments 84 # Dynamically skip those tests specified though arguments
85 self.skipTests(skips) 85 self.skipTests(skips)
86 86
87 self._run_start_time = time.time() 87 self._run_start_time = time.time()
diff --git a/meta/lib/oeqa/core/decorator/__init__.py b/meta/lib/oeqa/core/decorator/__init__.py
index 1a82518ab6..93efd30e1d 100644
--- a/meta/lib/oeqa/core/decorator/__init__.py
+++ b/meta/lib/oeqa/core/decorator/__init__.py
@@ -5,8 +5,7 @@
5# 5#
6 6
7from functools import wraps 7from functools import wraps
8from abc import abstractmethod, ABCMeta 8from abc import ABCMeta
9from oeqa.core.utils.misc import strToList
10 9
11decoratorClasses = set() 10decoratorClasses = set()
12 11
@@ -65,15 +64,11 @@ class OETestDiscover(OETestDecorator):
65 return registry['cases'] 64 return registry['cases']
66 65
67def OETestTag(*tags): 66def OETestTag(*tags):
68 expandedtags = []
69 for tag in tags:
70 expandedtags += strToList(tag)
71 def decorator(item): 67 def decorator(item):
72 if hasattr(item, "__oeqa_testtags"): 68 if hasattr(item, "__oeqa_testtags"):
73 # do not append, create a new list (to handle classes with inheritance) 69 # do not append, create a new list (to handle classes with inheritance)
74 item.__oeqa_testtags = list(item.__oeqa_testtags) + expandedtags 70 item.__oeqa_testtags = list(item.__oeqa_testtags) + list(tags)
75 else: 71 else:
76 item.__oeqa_testtags = expandedtags 72 item.__oeqa_testtags = tags
77 return item 73 return item
78 return decorator 74 return decorator
79
diff --git a/meta/lib/oeqa/core/decorator/data.py b/meta/lib/oeqa/core/decorator/data.py
index bc4939e87c..0daf46334f 100644
--- a/meta/lib/oeqa/core/decorator/data.py
+++ b/meta/lib/oeqa/core/decorator/data.py
@@ -13,8 +13,8 @@ def has_feature(td, feature):
13 Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES. 13 Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES.
14 """ 14 """
15 15
16 if (feature in td.get('DISTRO_FEATURES', '') or 16 if (feature in td.get('DISTRO_FEATURES', '').split() or
17 feature in td.get('IMAGE_FEATURES', '')): 17 feature in td.get('IMAGE_FEATURES', '').split()):
18 return True 18 return True
19 return False 19 return False
20 20
@@ -23,18 +23,7 @@ def has_machine(td, machine):
23 Checks for MACHINE. 23 Checks for MACHINE.
24 """ 24 """
25 25
26 if (machine in td.get('MACHINE', '')): 26 if (machine == td.get('MACHINE', '')):
27 return True
28 return False
29
30def is_qemu(td, qemu):
31 """
32 Checks if MACHINE is qemu.
33 """
34
35 machine = td.get('MACHINE', '')
36 if (qemu in td.get('MACHINE', '') or
37 machine.startswith('qemu')):
38 return True 27 return True
39 return False 28 return False
40 29
@@ -189,34 +178,65 @@ class skipIfMachine(OETestDecorator):
189@registerDecorator 178@registerDecorator
190class skipIfNotQemu(OETestDecorator): 179class skipIfNotQemu(OETestDecorator):
191 """ 180 """
192 Skip test based on MACHINE. 181 Skip test if MACHINE is not qemu*
193
194 value must be a qemu MACHINE or it will skip the test
195 with msg as the reason.
196 """ 182 """
183 def setUpDecorator(self):
184 self.logger.debug("Checking if not qemu MACHINE")
185 if not self.case.td.get('MACHINE', '').startswith('qemu'):
186 self.case.skipTest('Test only runs on qemu machines')
197 187
198 attrs = ('value', 'msg') 188@registerDecorator
199 189class skipIfNotQemuUsermode(OETestDecorator):
190 """
191 Skip test if MACHINE_FEATURES does not contain qemu-usermode
192 """
200 def setUpDecorator(self): 193 def setUpDecorator(self):
201 msg = ('Checking if %s is not this MACHINE' % self.value) 194 self.logger.debug("Checking if MACHINE_FEATURES does not contain qemu-usermode")
202 self.logger.debug(msg) 195 if 'qemu-usermode' not in self.case.td.get('MACHINE_FEATURES', '').split():
203 if not is_qemu(self.case.td, self.value): 196 self.case.skipTest('Test requires qemu-usermode in MACHINE_FEATURES')
204 self.case.skipTest(self.msg)
205 197
206@registerDecorator 198@registerDecorator
207class skipIfQemu(OETestDecorator): 199class skipIfQemu(OETestDecorator):
208 """ 200 """
209 Skip test based on Qemu Machine. 201 Skip test if MACHINE is qemu*
202 """
203 def setUpDecorator(self):
204 self.logger.debug("Checking if qemu MACHINE")
205 if self.case.td.get('MACHINE', '').startswith('qemu'):
206 self.case.skipTest('Test only runs on real hardware')
210 207
211 value must not be a qemu machine or it will skip the test 208@registerDecorator
212 with msg as the reason. 209class skipIfArch(OETestDecorator):
213 """ 210 """
211 Skip test if HOST_ARCH is present in the tuple specified.
212 """
214 213
215 attrs = ('value', 'msg') 214 attrs = ('archs',)
215 def setUpDecorator(self):
216 arch = self.case.td['HOST_ARCH']
217 if arch in self.archs:
218 self.case.skipTest('Test skipped on %s' % arch)
219
220@registerDecorator
221class skipIfNotArch(OETestDecorator):
222 """
223 Skip test if HOST_ARCH is not present in the tuple specified.
224 """
216 225
226 attrs = ('archs',)
217 def setUpDecorator(self): 227 def setUpDecorator(self):
218 msg = ('Checking if %s is this MACHINE' % self.value) 228 arch = self.case.td['HOST_ARCH']
219 self.logger.debug(msg) 229 if arch not in self.archs:
220 if is_qemu(self.case.td, self.value): 230 self.case.skipTest('Test skipped on %s' % arch)
221 self.case.skipTest(self.msg)
222 231
232@registerDecorator
233class skipIfNotBuildArch(OETestDecorator):
234 """
235 Skip test if BUILD_ARCH is not present in the tuple specified.
236 """
237
238 attrs = ('archs',)
239 def setUpDecorator(self):
240 arch = self.case.td['BUILD_ARCH']
241 if arch not in self.archs:
242 self.case.skipTest('Test skipped on %s' % arch)
diff --git a/meta/lib/oeqa/core/decorator/oetimeout.py b/meta/lib/oeqa/core/decorator/oetimeout.py
index df90d1c798..5e6873ad48 100644
--- a/meta/lib/oeqa/core/decorator/oetimeout.py
+++ b/meta/lib/oeqa/core/decorator/oetimeout.py
@@ -24,5 +24,6 @@ class OETimeout(OETestDecorator):
24 24
25 def tearDownDecorator(self): 25 def tearDownDecorator(self):
26 signal.alarm(0) 26 signal.alarm(0)
27 signal.signal(signal.SIGALRM, self.alarmSignal) 27 if hasattr(self, 'alarmSignal'):
28 self.logger.debug("Removed SIGALRM handler") 28 signal.signal(signal.SIGALRM, self.alarmSignal)
29 self.logger.debug("Removed SIGALRM handler")
diff --git a/meta/lib/oeqa/core/loader.py b/meta/lib/oeqa/core/loader.py
index 11978213b8..d12d5a055c 100644
--- a/meta/lib/oeqa/core/loader.py
+++ b/meta/lib/oeqa/core/loader.py
@@ -37,7 +37,7 @@ def _find_duplicated_modules(suite, directory):
37 if path: 37 if path:
38 raise ImportError("Duplicated %s module found in %s" % (module, path)) 38 raise ImportError("Duplicated %s module found in %s" % (module, path))
39 39
40def _built_modules_dict(modules): 40def _built_modules_dict(modules, logger):
41 modules_dict = {} 41 modules_dict = {}
42 42
43 if modules == None: 43 if modules == None:
@@ -48,6 +48,9 @@ def _built_modules_dict(modules):
48 # characters, whereas class names do 48 # characters, whereas class names do
49 m = re.match(r'^([0-9a-z_.]+)(?:\.(\w[^.]*)(?:\.([^.]+))?)?$', module, flags=re.ASCII) 49 m = re.match(r'^([0-9a-z_.]+)(?:\.(\w[^.]*)(?:\.([^.]+))?)?$', module, flags=re.ASCII)
50 if not m: 50 if not m:
51 logger.warn("module '%s' was skipped from selected modules, "\
52 "because it doesn't match with module name assumptions: "\
53 "package and module names do not contain upper case characters, whereas class names do" % module)
51 continue 54 continue
52 55
53 module_name, class_name, test_name = m.groups() 56 module_name, class_name, test_name = m.groups()
@@ -58,6 +61,8 @@ def _built_modules_dict(modules):
58 modules_dict[module_name][class_name] = [] 61 modules_dict[module_name][class_name] = []
59 if test_name and test_name not in modules_dict[module_name][class_name]: 62 if test_name and test_name not in modules_dict[module_name][class_name]:
60 modules_dict[module_name][class_name].append(test_name) 63 modules_dict[module_name][class_name].append(test_name)
64 if modules and not modules_dict:
65 raise OEQATestNotFound("All selected modules were skipped, this would trigger selftest with all tests and -r ignored.")
61 66
62 return modules_dict 67 return modules_dict
63 68
@@ -71,7 +76,7 @@ class OETestLoader(unittest.TestLoader):
71 *args, **kwargs): 76 *args, **kwargs):
72 self.tc = tc 77 self.tc = tc
73 78
74 self.modules = _built_modules_dict(modules) 79 self.modules = _built_modules_dict(modules, tc.logger)
75 80
76 self.tests = tests 81 self.tests = tests
77 self.modules_required = modules_required 82 self.modules_required = modules_required
@@ -311,6 +316,9 @@ class OETestLoader(unittest.TestLoader):
311 module_name_small in self.modules) \ 316 module_name_small in self.modules) \
312 else False 317 else False
313 318
319 if any(c.isupper() for c in module.__name__):
320 raise SystemExit("Module '%s' contains uppercase characters and this isn't supported. Please fix the module name." % module.__name__)
321
314 return (load_module, load_underscore) 322 return (load_module, load_underscore)
315 323
316 324
diff --git a/meta/lib/oeqa/core/runner.py b/meta/lib/oeqa/core/runner.py
index d50690ab37..b683d9b80a 100644
--- a/meta/lib/oeqa/core/runner.py
+++ b/meta/lib/oeqa/core/runner.py
@@ -44,6 +44,7 @@ class OETestResult(_TestResult):
44 self.endtime = {} 44 self.endtime = {}
45 self.progressinfo = {} 45 self.progressinfo = {}
46 self.extraresults = {} 46 self.extraresults = {}
47 self.shownmsg = []
47 48
48 # Inject into tc so that TestDepends decorator can see results 49 # Inject into tc so that TestDepends decorator can see results
49 tc.results = self 50 tc.results = self
@@ -74,6 +75,7 @@ class OETestResult(_TestResult):
74 for (scase, msg) in getattr(self, t): 75 for (scase, msg) in getattr(self, t):
75 if test.id() == scase.id(): 76 if test.id() == scase.id():
76 self.tc.logger.info(str(msg)) 77 self.tc.logger.info(str(msg))
78 self.shownmsg.append(test.id())
77 break 79 break
78 80
79 def logSummary(self, component, context_msg=''): 81 def logSummary(self, component, context_msg=''):
@@ -169,7 +171,6 @@ class OETestResult(_TestResult):
169 171
170 def logDetails(self, json_file_dir=None, configuration=None, result_id=None, 172 def logDetails(self, json_file_dir=None, configuration=None, result_id=None,
171 dump_streams=False): 173 dump_streams=False):
172 self.tc.logger.info("RESULTS:")
173 174
174 result = self.extraresults 175 result = self.extraresults
175 logs = {} 176 logs = {}
@@ -193,6 +194,10 @@ class OETestResult(_TestResult):
193 report = {'status': status} 194 report = {'status': status}
194 if log: 195 if log:
195 report['log'] = log 196 report['log'] = log
197 # Class setup failures wouldn't enter stopTest so would never display
198 if case.id() not in self.shownmsg:
199 self.tc.logger.info("Failure (%s) for %s:\n" % (status, case.id()) + log)
200
196 if duration: 201 if duration:
197 report['duration'] = duration 202 report['duration'] = duration
198 203
@@ -215,6 +220,7 @@ class OETestResult(_TestResult):
215 report['stderr'] = stderr 220 report['stderr'] = stderr
216 result[case.id()] = report 221 result[case.id()] = report
217 222
223 self.tc.logger.info("RESULTS:")
218 for i in ['PASSED', 'SKIPPED', 'EXPECTEDFAIL', 'ERROR', 'FAILED', 'UNKNOWN']: 224 for i in ['PASSED', 'SKIPPED', 'EXPECTEDFAIL', 'ERROR', 'FAILED', 'UNKNOWN']:
219 if i not in logs: 225 if i not in logs:
220 continue 226 continue
@@ -229,6 +235,10 @@ class OETestResult(_TestResult):
229 # Override as we unexpected successes aren't failures for us 235 # Override as we unexpected successes aren't failures for us
230 return (len(self.failures) == len(self.errors) == 0) 236 return (len(self.failures) == len(self.errors) == 0)
231 237
238 def hasAnyFailingTest(self):
239 # Account for expected failures
240 return not self.wasSuccessful() or len(self.expectedFailures)
241
232class OEListTestsResult(object): 242class OEListTestsResult(object):
233 def wasSuccessful(self): 243 def wasSuccessful(self):
234 return True 244 return True
@@ -347,7 +357,7 @@ class OETestResultJSONHelper(object):
347 os.makedirs(write_dir, exist_ok=True) 357 os.makedirs(write_dir, exist_ok=True)
348 test_results = self._get_existing_testresults_if_available(write_dir) 358 test_results = self._get_existing_testresults_if_available(write_dir)
349 test_results[result_id] = {'configuration': configuration, 'result': test_result} 359 test_results[result_id] = {'configuration': configuration, 'result': test_result}
350 json_testresults = json.dumps(test_results, sort_keys=True, indent=4) 360 json_testresults = json.dumps(test_results, sort_keys=True, indent=1)
351 self._write_file(write_dir, self.testresult_filename, json_testresults) 361 self._write_file(write_dir, self.testresult_filename, json_testresults)
352 if has_bb: 362 if has_bb:
353 bb.utils.unlockfile(lf) 363 bb.utils.unlockfile(lf)
diff --git a/meta/lib/oeqa/core/target/__init__.py b/meta/lib/oeqa/core/target/__init__.py
index 1382aa9b52..177f648fe3 100644
--- a/meta/lib/oeqa/core/target/__init__.py
+++ b/meta/lib/oeqa/core/target/__init__.py
@@ -10,6 +10,7 @@ class OETarget(object):
10 10
11 def __init__(self, logger, *args, **kwargs): 11 def __init__(self, logger, *args, **kwargs):
12 self.logger = logger 12 self.logger = logger
13 self.runner = None
13 14
14 @abstractmethod 15 @abstractmethod
15 def start(self): 16 def start(self):
diff --git a/meta/lib/oeqa/core/target/qemu.py b/meta/lib/oeqa/core/target/qemu.py
index 0f29414df5..d93b3ac94a 100644
--- a/meta/lib/oeqa/core/target/qemu.py
+++ b/meta/lib/oeqa/core/target/qemu.py
@@ -8,20 +8,21 @@ import os
8import sys 8import sys
9import signal 9import signal
10import time 10import time
11import glob
12import subprocess
11from collections import defaultdict 13from collections import defaultdict
12 14
13from .ssh import OESSHTarget 15from .ssh import OESSHTarget
14from oeqa.utils.qemurunner import QemuRunner 16from oeqa.utils.qemurunner import QemuRunner
15from oeqa.utils.dump import TargetDumper
16 17
17supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] 18supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic']
18 19
19class OEQemuTarget(OESSHTarget): 20class OEQemuTarget(OESSHTarget):
20 def __init__(self, logger, server_ip, timeout=300, user='root', 21 def __init__(self, logger, server_ip, timeout=300, user='root',
21 port=None, machine='', rootfs='', kernel='', kvm=False, slirp=False, 22 port=None, machine='', rootfs='', kernel='', kvm=False, slirp=False,
22 dump_dir='', dump_host_cmds='', display='', bootlog='', 23 dump_dir='', display='', bootlog='',
23 tmpdir='', dir_image='', boottime=60, serial_ports=2, 24 tmpdir='', dir_image='', boottime=60, serial_ports=2,
24 boot_patterns = defaultdict(str), ovmf=False, **kwargs): 25 boot_patterns = defaultdict(str), ovmf=False, tmpfsdir=None, **kwargs):
25 26
26 super(OEQemuTarget, self).__init__(logger, None, server_ip, timeout, 27 super(OEQemuTarget, self).__init__(logger, None, server_ip, timeout,
27 user, port) 28 user, port)
@@ -35,17 +36,15 @@ class OEQemuTarget(OESSHTarget):
35 self.ovmf = ovmf 36 self.ovmf = ovmf
36 self.use_slirp = slirp 37 self.use_slirp = slirp
37 self.boot_patterns = boot_patterns 38 self.boot_patterns = boot_patterns
39 self.dump_dir = dump_dir
40 self.bootlog = bootlog
38 41
39 self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir, 42 self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir,
40 deploy_dir_image=dir_image, display=display, 43 deploy_dir_image=dir_image, display=display,
41 logfile=bootlog, boottime=boottime, 44 logfile=bootlog, boottime=boottime,
42 use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir, 45 use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir, logger=logger,
43 dump_host_cmds=dump_host_cmds, logger=logger,
44 serial_ports=serial_ports, boot_patterns = boot_patterns, 46 serial_ports=serial_ports, boot_patterns = boot_patterns,
45 use_ovmf=ovmf) 47 use_ovmf=ovmf, tmpfsdir=tmpfsdir)
46 dump_target_cmds = kwargs.get("testimage_dump_target")
47 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner)
48 self.target_dumper.create_dir("qemu")
49 48
50 def start(self, params=None, extra_bootparams=None, runqemuparams=''): 49 def start(self, params=None, extra_bootparams=None, runqemuparams=''):
51 if self.use_slirp and not self.server_ip: 50 if self.use_slirp and not self.server_ip:
@@ -68,7 +67,28 @@ class OEQemuTarget(OESSHTarget):
68 self.server_ip = self.runner.server_ip 67 self.server_ip = self.runner.server_ip
69 else: 68 else:
70 self.stop() 69 self.stop()
71 raise RuntimeError("FAILED to start qemu - check the task log and the boot log") 70 # Display the first 20 lines of top and
71 # last 20 lines of the bootlog when the
72 # target is not being booted up.
73 topfile = glob.glob(self.dump_dir + "/*_qemu/host_*_top")
74 msg = "\n\n===== start: snippet =====\n\n"
75 for f in topfile:
76 msg += "file: %s\n\n" % f
77 with open(f) as tf:
78 for x in range(20):
79 msg += next(tf)
80 msg += "\n\n===== end: snippet =====\n\n"
81 blcmd = ["tail", "-20", self.bootlog]
82 msg += "===== start: snippet =====\n\n"
83 try:
84 out = subprocess.check_output(blcmd, stderr=subprocess.STDOUT, timeout=1).decode('utf-8')
85 msg += "file: %s\n\n" % self.bootlog
86 msg += out
87 except (subprocess.CalledProcessError, subprocess.TimeoutExpired, FileNotFoundError) as err:
88 msg += "Error running command: %s\n%s\n" % (blcmd, err)
89 msg += "\n\n===== end: snippet =====\n"
90
91 raise RuntimeError("FAILED to start qemu - check the task log and the boot log %s" % (msg))
72 92
73 def stop(self): 93 def stop(self):
74 self.runner.stop() 94 self.runner.stop()
diff --git a/meta/lib/oeqa/core/target/serial.py b/meta/lib/oeqa/core/target/serial.py
new file mode 100644
index 0000000000..7c2cd8b248
--- /dev/null
+++ b/meta/lib/oeqa/core/target/serial.py
@@ -0,0 +1,315 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import base64
6import logging
7import os
8from threading import Lock
9from . import OETarget
10
11class OESerialTarget(OETarget):
12
13 def __init__(self, logger, target_ip, server_ip, server_port=0,
14 timeout=300, serialcontrol_cmd=None, serialcontrol_extra_args=None,
15 serialcontrol_ps1=None, serialcontrol_connect_timeout=None,
16 machine=None, **kwargs):
17 if not logger:
18 logger = logging.getLogger('target')
19 logger.setLevel(logging.INFO)
20 filePath = os.path.join(os.getcwd(), 'remoteTarget.log')
21 fileHandler = logging.FileHandler(filePath, 'w', 'utf-8')
22 formatter = logging.Formatter(
23 '%(asctime)s.%(msecs)03d %(levelname)s: %(message)s',
24 '%H:%M:%S')
25 fileHandler.setFormatter(formatter)
26 logger.addHandler(fileHandler)
27
28 super(OESerialTarget, self).__init__(logger)
29
30 if serialcontrol_ps1:
31 self.target_ps1 = serialcontrol_ps1
32 elif machine:
33 # fallback to a default value which assumes root@machine
34 self.target_ps1 = f'root@{machine}:.*# '
35 else:
36 raise ValueError("Unable to determine shell command prompt (PS1) format.")
37
38 if not serialcontrol_cmd:
39 raise ValueError("Unable to determine serial control command.")
40
41 if serialcontrol_extra_args:
42 self.connection_script = f'{serialcontrol_cmd} {serialcontrol_extra_args}'
43 else:
44 self.connection_script = serialcontrol_cmd
45
46 if serialcontrol_connect_timeout:
47 self.connect_timeout = serialcontrol_connect_timeout
48 else:
49 self.connect_timeout = 10 # default to 10s connection timeout
50
51 self.default_command_timeout = timeout
52 self.ip = target_ip
53 self.server_ip = server_ip
54 self.server_port = server_port
55 self.conn = None
56 self.mutex = Lock()
57
58 def start(self, **kwargs):
59 pass
60
61 def stop(self, **kwargs):
62 pass
63
64 def get_connection(self):
65 if self.conn is None:
66 self.conn = SerialConnection(self.connection_script,
67 self.target_ps1,
68 self.connect_timeout,
69 self.default_command_timeout)
70
71 return self.conn
72
73 def run(self, cmd, timeout=None):
74 """
75 Runs command on target over the provided serial connection.
76 The first call will open the connection, and subsequent
77 calls will re-use the same connection to send new commands.
78
79 command: Command to run on target.
80 timeout: <value>: Kill command after <val> seconds.
81 None: Kill command default value seconds.
82 0: No timeout, runs until return.
83 """
84 # Lock needed to avoid multiple threads running commands concurrently
85 # A serial connection can only be used by one caller at a time
86 with self.mutex:
87 conn = self.get_connection()
88
89 self.logger.debug(f"[Running]$ {cmd}")
90 # Run the command, then echo $? to get the command's return code
91 try:
92 output = conn.run_command(cmd, timeout)
93 status = conn.run_command("echo $?")
94 self.logger.debug(f" [stdout]: {output}")
95 self.logger.debug(f" [ret code]: {status}\n\n")
96 except SerialTimeoutException as e:
97 self.logger.debug(e)
98 output = ""
99 status = 255
100
101 # Return to $HOME after each command to simulate a stateless SSH connection
102 conn.run_command('cd "$HOME"')
103
104 return (int(status), output)
105
106 def copyTo(self, localSrc, remoteDst):
107 """
108 Copies files by converting them to base 32, then transferring
109 the ASCII text to the target, and decoding it in place on the
110 target.
111
112 On a 115k baud serial connection, this method transfers at
113 roughly 30kbps.
114 """
115 with open(localSrc, 'rb') as file:
116 data = file.read()
117
118 b32 = base64.b32encode(data).decode('utf-8')
119
120 # To avoid shell line limits, send a chunk at a time
121 SPLIT_LEN = 512
122 lines = [b32[i:i+SPLIT_LEN] for i in range(0, len(b32), SPLIT_LEN)]
123
124 with self.mutex:
125 conn = self.get_connection()
126
127 filename = os.path.basename(localSrc)
128 TEMP = f'/tmp/{filename}.b32'
129
130 # Create or empty out the temp file
131 conn.run_command(f'echo -n "" > {TEMP}')
132
133 for line in lines:
134 conn.run_command(f'echo -n {line} >> {TEMP}')
135
136 # Check to see whether the remoteDst is a directory
137 is_directory = conn.run_command(f'[[ -d {remoteDst} ]]; echo $?')
138 if int(is_directory) == 0:
139 # append the localSrc filename to the end of remoteDst
140 remoteDst = os.path.join(remoteDst, filename)
141
142 conn.run_command(f'base32 -d {TEMP} > {remoteDst}')
143 conn.run_command(f'rm {TEMP}')
144
145 return 0, 'Success'
146
147 def copyFrom(self, remoteSrc, localDst):
148 """
149 Copies files by converting them to base 32 on the target, then
150 transferring the ASCII text to the host. That text is then
151 decoded here and written out to the destination.
152
153 On a 115k baud serial connection, this method transfers at
154 roughly 30kbps.
155 """
156 with self.mutex:
157 b32 = self.get_connection().run_command(f'base32 {remoteSrc}')
158
159 data = base64.b32decode(b32.replace('\r\n', ''))
160
161 # If the local path is a directory, get the filename from
162 # the remoteSrc path and append it to localDst
163 if os.path.isdir(localDst):
164 filename = os.path.basename(remoteSrc)
165 localDst = os.path.join(localDst, filename)
166
167 with open(localDst, 'wb') as file:
168 file.write(data)
169
170 return 0, 'Success'
171
172 def copyDirTo(self, localSrc, remoteDst):
173 """
174 Copy recursively localSrc directory to remoteDst in target.
175 """
176
177 for root, dirs, files in os.walk(localSrc):
178 # Create directories in the target as needed
179 for d in dirs:
180 tmpDir = os.path.join(root, d).replace(localSrc, "")
181 newDir = os.path.join(remoteDst, tmpDir.lstrip("/"))
182 cmd = "mkdir -p %s" % newDir
183 self.run(cmd)
184
185 # Copy files into the target
186 for f in files:
187 tmpFile = os.path.join(root, f).replace(localSrc, "")
188 dstFile = os.path.join(remoteDst, tmpFile.lstrip("/"))
189 srcFile = os.path.join(root, f)
190 self.copyTo(srcFile, dstFile)
191
192 def deleteFiles(self, remotePath, files):
193 """
194 Deletes files in target's remotePath.
195 """
196
197 cmd = "rm"
198 if not isinstance(files, list):
199 files = [files]
200
201 for f in files:
202 cmd = "%s %s" % (cmd, os.path.join(remotePath, f))
203
204 self.run(cmd)
205
206 def deleteDir(self, remotePath):
207 """
208 Deletes target's remotePath directory.
209 """
210
211 cmd = "rmdir %s" % remotePath
212 self.run(cmd)
213
214 def deleteDirStructure(self, localPath, remotePath):
215 """
216 Delete recursively localPath structure directory in target's remotePath.
217
218 This function is useful to delete a package that is installed in the
219 device under test (DUT) and the host running the test has such package
220 extracted in tmp directory.
221
222 Example:
223 pwd: /home/user/tmp
224 tree: .
225 └── work
226 ├── dir1
227 │   └── file1
228 └── dir2
229
230 localpath = "/home/user/tmp" and remotepath = "/home/user"
231
232 With the above variables this function will try to delete the
233 directory in the DUT in this order:
234 /home/user/work/dir1/file1
235 /home/user/work/dir1 (if dir is empty)
236 /home/user/work/dir2 (if dir is empty)
237 /home/user/work (if dir is empty)
238 """
239
240 for root, dirs, files in os.walk(localPath, topdown=False):
241 # Delete files first
242 tmpDir = os.path.join(root).replace(localPath, "")
243 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
244 self.deleteFiles(remoteDir, files)
245
246 # Remove dirs if empty
247 for d in dirs:
248 tmpDir = os.path.join(root, d).replace(localPath, "")
249 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
250 self.deleteDir(remoteDir)
251
252class SerialTimeoutException(Exception):
253 def __init__(self, msg):
254 self.msg = msg
255 def __str__(self):
256 return self.msg
257
258class SerialConnection:
259
260 def __init__(self, script, target_prompt, connect_timeout, default_command_timeout):
261 import pexpect # limiting scope to avoid build dependency
262 self.prompt = target_prompt
263 self.connect_timeout = connect_timeout
264 self.default_command_timeout = default_command_timeout
265 self.conn = pexpect.spawn('/bin/bash', ['-c', script], encoding='utf8')
266 self._seek_to_clean_shell()
267 # Disable echo to avoid the need to parse the outgoing command
268 self.run_command('stty -echo')
269
270 def _seek_to_clean_shell(self):
271 """
272 Attempts to find a clean shell, meaning it is clear and
273 ready to accept a new command. This is necessary to ensure
274 the correct output is captured from each command.
275 """
276 import pexpect # limiting scope to avoid build dependency
277 # Look for a clean shell
278 # Wait a short amount of time for the connection to finish
279 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT],
280 timeout=self.connect_timeout)
281
282 # if a timeout occurred, send an empty line and wait for a clean shell
283 if pexpect_code == 1:
284 # send a newline to clear and present the shell
285 self.conn.sendline("")
286 pexpect_code = self.conn.expect(self.prompt)
287
288 def run_command(self, cmd, timeout=None):
289 """
290 Runs command on target over the provided serial connection.
291 Returns any output on the shell while the command was run.
292
293 command: Command to run on target.
294 timeout: <value>: Kill command after <val> seconds.
295 None: Kill command default value seconds.
296 0: No timeout, runs until return.
297 """
298 import pexpect # limiting scope to avoid build dependency
299 # Convert from the OETarget defaults to pexpect timeout values
300 if timeout is None:
301 timeout = self.default_command_timeout
302 elif timeout == 0:
303 timeout = None # passing None to pexpect is infinite timeout
304
305 self.conn.sendline(cmd)
306 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT], timeout=timeout)
307
308 # check for timeout
309 if pexpect_code == 1:
310 self.conn.send('\003') # send Ctrl+C
311 self._seek_to_clean_shell()
312 raise SerialTimeoutException(f'Timeout executing: {cmd} after {timeout}s')
313
314 return self.conn.before.removesuffix('\r\n')
315
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py
index 461448dbc5..8b5c450a05 100644
--- a/meta/lib/oeqa/core/target/ssh.py
+++ b/meta/lib/oeqa/core/target/ssh.py
@@ -34,12 +34,17 @@ class OESSHTarget(OETarget):
34 self.timeout = timeout 34 self.timeout = timeout
35 self.user = user 35 self.user = user
36 ssh_options = [ 36 ssh_options = [
37 '-o', 'ServerAliveCountMax=2',
38 '-o', 'ServerAliveInterval=30',
37 '-o', 'UserKnownHostsFile=/dev/null', 39 '-o', 'UserKnownHostsFile=/dev/null',
38 '-o', 'StrictHostKeyChecking=no', 40 '-o', 'StrictHostKeyChecking=no',
39 '-o', 'LogLevel=ERROR' 41 '-o', 'LogLevel=ERROR'
40 ] 42 ]
43 scp_options = [
44 '-r'
45 ]
41 self.ssh = ['ssh', '-l', self.user ] + ssh_options 46 self.ssh = ['ssh', '-l', self.user ] + ssh_options
42 self.scp = ['scp'] + ssh_options 47 self.scp = ['scp'] + ssh_options + scp_options
43 if port: 48 if port:
44 self.ssh = self.ssh + [ '-p', port ] 49 self.ssh = self.ssh + [ '-p', port ]
45 self.scp = self.scp + [ '-P', port ] 50 self.scp = self.scp + [ '-P', port ]
@@ -50,14 +55,14 @@ class OESSHTarget(OETarget):
50 def stop(self, **kwargs): 55 def stop(self, **kwargs):
51 pass 56 pass
52 57
53 def _run(self, command, timeout=None, ignore_status=True): 58 def _run(self, command, timeout=None, ignore_status=True, raw=False):
54 """ 59 """
55 Runs command in target using SSHProcess. 60 Runs command in target using SSHProcess.
56 """ 61 """
57 self.logger.debug("[Running]$ %s" % " ".join(command)) 62 self.logger.debug("[Running]$ %s" % " ".join(command))
58 63
59 starttime = time.time() 64 starttime = time.time()
60 status, output = SSHCall(command, self.logger, timeout) 65 status, output = SSHCall(command, self.logger, timeout, raw)
61 self.logger.debug("[Command returned '%d' after %.2f seconds]" 66 self.logger.debug("[Command returned '%d' after %.2f seconds]"
62 "" % (status, time.time() - starttime)) 67 "" % (status, time.time() - starttime))
63 68
@@ -67,7 +72,7 @@ class OESSHTarget(OETarget):
67 72
68 return (status, output) 73 return (status, output)
69 74
70 def run(self, command, timeout=None): 75 def run(self, command, timeout=None, ignore_status=True, raw=False):
71 """ 76 """
72 Runs command in target. 77 Runs command in target.
73 78
@@ -86,10 +91,12 @@ class OESSHTarget(OETarget):
86 else: 91 else:
87 processTimeout = self.timeout 92 processTimeout = self.timeout
88 93
89 status, output = self._run(sshCmd, processTimeout, True) 94 status, output = self._run(sshCmd, processTimeout, ignore_status, raw)
90 self.logger.debug('Command: %s\nOutput: %s\n' % (command, output)) 95 if len(output) > (64 * 1024):
91 if (status == 255) and (('No route to host') in output): 96 self.logger.debug('Command: %s\nStatus: %d Output length: %s\n' % (command, status, len(output)))
92 self.target_dumper.dump_target() 97 else:
98 self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output))
99
93 return (status, output) 100 return (status, output)
94 101
95 def copyTo(self, localSrc, remoteDst): 102 def copyTo(self, localSrc, remoteDst):
@@ -202,32 +209,51 @@ class OESSHTarget(OETarget):
202 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) 209 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
203 self.deleteDir(remoteDir) 210 self.deleteDir(remoteDir)
204 211
205def SSHCall(command, logger, timeout=None, **opts): 212def SSHCall(command, logger, timeout=None, raw=False, **opts):
206 213
207 def run(): 214 def run():
208 nonlocal output 215 nonlocal output
209 nonlocal process 216 nonlocal process
217 output_raw = bytearray()
210 starttime = time.time() 218 starttime = time.time()
219 progress = time.time()
211 process = subprocess.Popen(command, **options) 220 process = subprocess.Popen(command, **options)
221 has_timeout = False
222 appendline = None
212 if timeout: 223 if timeout:
213 endtime = starttime + timeout 224 endtime = starttime + timeout
214 eof = False 225 eof = False
215 while time.time() < endtime and not eof: 226 os.set_blocking(process.stdout.fileno(), False)
216 logger.debug('time: %s, endtime: %s' % (time.time(), endtime)) 227 while not has_timeout and not eof:
217 try: 228 try:
218 if select.select([process.stdout], [], [], 5)[0] != []: 229 if select.select([process.stdout], [], [], 5)[0] != []:
219 reader = codecs.getreader('utf-8')(process.stdout, 'ignore') 230 # wait a bit for more data, tries to avoid reading single characters
220 data = reader.read(1024, 4096) 231 time.sleep(0.2)
232 data = process.stdout.read()
221 if not data: 233 if not data:
222 process.stdout.close()
223 eof = True 234 eof = True
224 else: 235 else:
225 output += data 236 output_raw.extend(data)
226 logger.debug('Partial data from SSH call: %s' % data) 237 # ignore errors to capture as much as possible
238 #logger.debug('Partial data from SSH call:\n%s' % data.decode('utf-8', errors='ignore'))
227 endtime = time.time() + timeout 239 endtime = time.time() + timeout
228 except InterruptedError: 240 except InterruptedError:
241 logger.debug('InterruptedError')
242 continue
243 except BlockingIOError:
244 logger.debug('BlockingIOError')
229 continue 245 continue
230 246
247 if time.time() >= endtime:
248 logger.debug('SSHCall has timeout! Time: %s, endtime: %s' % (time.time(), endtime))
249 has_timeout = True
250
251 if time.time() >= (progress + 60):
252 logger.debug('Waiting for process output at time: %s with datasize: %s' % (time.time(), len(output_raw)))
253 progress = time.time()
254
255 process.stdout.close()
256
231 # process hasn't returned yet 257 # process hasn't returned yet
232 if not eof: 258 if not eof:
233 process.terminate() 259 process.terminate()
@@ -235,20 +261,58 @@ def SSHCall(command, logger, timeout=None, **opts):
235 try: 261 try:
236 process.kill() 262 process.kill()
237 except OSError: 263 except OSError:
264 logger.debug('OSError when killing process')
238 pass 265 pass
239 endtime = time.time() - starttime 266 endtime = time.time() - starttime
240 lastline = ("\nProcess killed - no output for %d seconds. Total" 267 appendline = ("\nProcess killed - no output for %d seconds. Total"
241 " running time: %d seconds." % (timeout, endtime)) 268 " running time: %d seconds." % (timeout, endtime))
242 logger.debug('Received data from SSH call %s ' % lastline) 269 logger.debug('Received data from SSH call:\n%s ' % appendline)
243 output += lastline 270 process.wait()
244 271
272 if raw:
273 output = bytes(output_raw)
274 if appendline:
275 output += bytes(appendline, "utf-8")
276 else:
277 output = output_raw.decode('utf-8', errors='ignore')
278 if appendline:
279 output += appendline
245 else: 280 else:
246 output = process.communicate()[0].decode('utf-8', errors='ignore') 281 output = output_raw = process.communicate()[0]
247 logger.debug('Data from SSH call: %s' % output.rstrip()) 282 if not raw:
283 output = output_raw.decode('utf-8', errors='ignore')
284
285 if len(output) < (64 * 1024):
286 if output.rstrip():
287 logger.debug('Data from SSH call:\n%s' % output.rstrip())
288 else:
289 logger.debug('No output from SSH call')
290
291 # timout or not, make sure process exits and is not hanging
292 if process.returncode == None:
293 try:
294 process.wait(timeout=5)
295 except TimeoutExpired:
296 try:
297 process.kill()
298 except OSError:
299 logger.debug('OSError')
300 pass
301 process.wait()
302
303 if has_timeout:
304 # Version of openssh before 8.6_p1 returns error code 0 when killed
305 # by a signal, when the timeout occurs we will receive a 0 error
306 # code because the process is been terminated and it's wrong because
307 # that value means success, but the process timed out.
308 # Afterwards, from version 8.6_p1 onwards, the returned code is 255.
309 # Fix this behaviour by checking the return code
310 if process.returncode == 0:
311 process.returncode = 255
248 312
249 options = { 313 options = {
250 "stdout": subprocess.PIPE, 314 "stdout": subprocess.PIPE,
251 "stderr": subprocess.STDOUT, 315 "stderr": subprocess.STDOUT if not raw else None,
252 "stdin": None, 316 "stdin": None,
253 "shell": False, 317 "shell": False,
254 "bufsize": -1, 318 "bufsize": -1,
@@ -271,6 +335,9 @@ def SSHCall(command, logger, timeout=None, **opts):
271 # whilst running and ensure we don't leave a process behind. 335 # whilst running and ensure we don't leave a process behind.
272 if process.poll() is None: 336 if process.poll() is None:
273 process.kill() 337 process.kill()
338 if process.returncode == None:
339 process.wait()
274 logger.debug('Something went wrong, killing SSH process') 340 logger.debug('Something went wrong, killing SSH process')
275 raise 341 raise
276 return (process.wait(), output.rstrip()) 342
343 return (process.returncode, output if raw else output.rstrip())
diff --git a/meta/lib/oeqa/core/tests/cases/timeout.py b/meta/lib/oeqa/core/tests/cases/timeout.py
index 5dfecc7b7c..69cf969a67 100644
--- a/meta/lib/oeqa/core/tests/cases/timeout.py
+++ b/meta/lib/oeqa/core/tests/cases/timeout.py
@@ -8,6 +8,7 @@ from time import sleep
8 8
9from oeqa.core.case import OETestCase 9from oeqa.core.case import OETestCase
10from oeqa.core.decorator.oetimeout import OETimeout 10from oeqa.core.decorator.oetimeout import OETimeout
11from oeqa.core.decorator.depends import OETestDepends
11 12
12class TimeoutTest(OETestCase): 13class TimeoutTest(OETestCase):
13 14
@@ -19,3 +20,15 @@ class TimeoutTest(OETestCase):
19 def testTimeoutFail(self): 20 def testTimeoutFail(self):
20 sleep(2) 21 sleep(2)
21 self.assertTrue(True, msg='How is this possible?') 22 self.assertTrue(True, msg='How is this possible?')
23
24
25 def testTimeoutSkip(self):
26 self.skipTest("This test needs to be skipped, so that testTimeoutDepends()'s OETestDepends kicks in")
27
28 @OETestDepends(["timeout.TimeoutTest.testTimeoutSkip"])
29 @OETimeout(3)
30 def testTimeoutDepends(self):
31 self.assertTrue(False, msg='How is this possible?')
32
33 def testTimeoutUnrelated(self):
34 sleep(6)
diff --git a/meta/lib/oeqa/core/tests/common.py b/meta/lib/oeqa/core/tests/common.py
index 88cc758ad3..bcc4fde632 100644
--- a/meta/lib/oeqa/core/tests/common.py
+++ b/meta/lib/oeqa/core/tests/common.py
@@ -9,7 +9,6 @@ import os
9 9
10import unittest 10import unittest
11import logging 11import logging
12import os
13 12
14logger = logging.getLogger("oeqa") 13logger = logging.getLogger("oeqa")
15logger.setLevel(logging.INFO) 14logger.setLevel(logging.INFO)
diff --git a/meta/lib/oeqa/core/tests/test_data.py b/meta/lib/oeqa/core/tests/test_data.py
index ac74098b78..acd726f3a0 100755
--- a/meta/lib/oeqa/core/tests/test_data.py
+++ b/meta/lib/oeqa/core/tests/test_data.py
@@ -33,7 +33,7 @@ class TestData(TestBase):
33 33
34 def test_data_fail_wrong_variable(self): 34 def test_data_fail_wrong_variable(self):
35 expectedError = 'AssertionError' 35 expectedError = 'AssertionError'
36 d = {'IMAGE' : 'core-image-sato', 'ARCH' : 'arm'} 36 d = {'IMAGE' : 'core-image-weston', 'ARCH' : 'arm'}
37 37
38 tc = self._testLoader(d=d, modules=self.modules) 38 tc = self._testLoader(d=d, modules=self.modules)
39 results = tc.runTests() 39 results = tc.runTests()
diff --git a/meta/lib/oeqa/core/tests/test_decorators.py b/meta/lib/oeqa/core/tests/test_decorators.py
index b798bf7d33..5095f39948 100755
--- a/meta/lib/oeqa/core/tests/test_decorators.py
+++ b/meta/lib/oeqa/core/tests/test_decorators.py
@@ -133,5 +133,11 @@ class TestTimeoutDecorator(TestBase):
133 msg = "OETestTimeout didn't restore SIGALRM" 133 msg = "OETestTimeout didn't restore SIGALRM"
134 self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg) 134 self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg)
135 135
136 def test_timeout_cancel(self):
137 tests = ['timeout.TimeoutTest.testTimeoutSkip', 'timeout.TimeoutTest.testTimeoutDepends', 'timeout.TimeoutTest.testTimeoutUnrelated']
138 msg = 'Unrelated test failed to complete'
139 tc = self._testLoader(modules=self.modules, tests=tests)
140 self.assertTrue(tc.runTests().wasSuccessful(), msg=msg)
141
136if __name__ == '__main__': 142if __name__ == '__main__':
137 unittest.main() 143 unittest.main()
diff --git a/meta/lib/oeqa/core/utils/concurrencytest.py b/meta/lib/oeqa/core/utils/concurrencytest.py
index b2eb68fb02..d10f8f7f04 100644
--- a/meta/lib/oeqa/core/utils/concurrencytest.py
+++ b/meta/lib/oeqa/core/utils/concurrencytest.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-or-later 5# SPDX-License-Identifier: GPL-2.0-or-later
4# 6#
5# Modified for use in OE by Richard Purdie, 2018 7# Modified for use in OE by Richard Purdie, 2018
@@ -48,11 +50,16 @@ _all__ = [
48# 50#
49class BBThreadsafeForwardingResult(ThreadsafeForwardingResult): 51class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
50 52
51 def __init__(self, target, semaphore, threadnum, totalinprocess, totaltests): 53 def __init__(self, target, semaphore, threadnum, totalinprocess, totaltests, output, finalresult):
52 super(BBThreadsafeForwardingResult, self).__init__(target, semaphore) 54 super(BBThreadsafeForwardingResult, self).__init__(target, semaphore)
53 self.threadnum = threadnum 55 self.threadnum = threadnum
54 self.totalinprocess = totalinprocess 56 self.totalinprocess = totalinprocess
55 self.totaltests = totaltests 57 self.totaltests = totaltests
58 self.buffer = True
59 self.outputbuf = output
60 self.finalresult = finalresult
61 self.finalresult.buffer = True
62 self.target = target
56 63
57 def _add_result_with_semaphore(self, method, test, *args, **kwargs): 64 def _add_result_with_semaphore(self, method, test, *args, **kwargs):
58 self.semaphore.acquire() 65 self.semaphore.acquire()
@@ -61,16 +68,19 @@ class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
61 self.result.starttime[test.id()] = self._test_start.timestamp() 68 self.result.starttime[test.id()] = self._test_start.timestamp()
62 self.result.threadprogress[self.threadnum].append(test.id()) 69 self.result.threadprogress[self.threadnum].append(test.id())
63 totalprogress = sum(len(x) for x in self.result.threadprogress.values()) 70 totalprogress = sum(len(x) for x in self.result.threadprogress.values())
64 self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s)" % ( 71 self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s failed) (%s)" % (
65 self.threadnum, 72 self.threadnum,
66 len(self.result.threadprogress[self.threadnum]), 73 len(self.result.threadprogress[self.threadnum]),
67 self.totalinprocess, 74 self.totalinprocess,
68 totalprogress, 75 totalprogress,
69 self.totaltests, 76 self.totaltests,
70 "{0:.2f}".format(time.time()-self._test_start.timestamp()), 77 "{0:.2f}".format(time.time()-self._test_start.timestamp()),
78 self.target.failed_tests,
71 test.id()) 79 test.id())
72 finally: 80 finally:
73 self.semaphore.release() 81 self.semaphore.release()
82 self.finalresult._stderr_buffer = io.StringIO(initial_value=self.outputbuf.getvalue().decode("utf-8"))
83 self.finalresult._stdout_buffer = io.StringIO()
74 super(BBThreadsafeForwardingResult, self)._add_result_with_semaphore(method, test, *args, **kwargs) 84 super(BBThreadsafeForwardingResult, self)._add_result_with_semaphore(method, test, *args, **kwargs)
75 85
76class ProxyTestResult: 86class ProxyTestResult:
@@ -183,35 +193,28 @@ class dummybuf(object):
183# 193#
184class ConcurrentTestSuite(unittest.TestSuite): 194class ConcurrentTestSuite(unittest.TestSuite):
185 195
186 def __init__(self, suite, processes, setupfunc, removefunc): 196 def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
187 super(ConcurrentTestSuite, self).__init__([suite]) 197 super(ConcurrentTestSuite, self).__init__([suite])
188 self.processes = processes 198 self.processes = processes
189 self.setupfunc = setupfunc 199 self.setupfunc = setupfunc
190 self.removefunc = removefunc 200 self.removefunc = removefunc
201 self.bb_vars = bb_vars
191 202
192 def run(self, result): 203 def run(self, result):
193 tests, totaltests = fork_for_tests(self.processes, self) 204 testservers, totaltests = fork_for_tests(self.processes, self)
194 try: 205 try:
195 threads = {} 206 threads = {}
196 queue = Queue() 207 queue = Queue()
197 semaphore = threading.Semaphore(1) 208 semaphore = threading.Semaphore(1)
198 result.threadprogress = {} 209 result.threadprogress = {}
199 for i, (test, testnum) in enumerate(tests): 210 for i, (testserver, testnum, output) in enumerate(testservers):
200 result.threadprogress[i] = [] 211 result.threadprogress[i] = []
201 process_result = BBThreadsafeForwardingResult( 212 process_result = BBThreadsafeForwardingResult(
202 ExtraResultsDecoderTestResult(result), 213 ExtraResultsDecoderTestResult(result),
203 semaphore, i, testnum, totaltests) 214 semaphore, i, testnum, totaltests, output, result)
204 # Force buffering of stdout/stderr so the console doesn't get corrupted by test output
205 # as per default in parent code
206 process_result.buffer = True
207 # We have to add a buffer object to stdout to keep subunit happy
208 process_result._stderr_buffer = io.StringIO()
209 process_result._stderr_buffer.buffer = dummybuf(process_result._stderr_buffer)
210 process_result._stdout_buffer = io.StringIO()
211 process_result._stdout_buffer.buffer = dummybuf(process_result._stdout_buffer)
212 reader_thread = threading.Thread( 215 reader_thread = threading.Thread(
213 target=self._run_test, args=(test, process_result, queue)) 216 target=self._run_test, args=(testserver, process_result, queue))
214 threads[test] = reader_thread, process_result 217 threads[testserver] = reader_thread, process_result
215 reader_thread.start() 218 reader_thread.start()
216 while threads: 219 while threads:
217 finished_test = queue.get() 220 finished_test = queue.get()
@@ -222,13 +225,13 @@ class ConcurrentTestSuite(unittest.TestSuite):
222 process_result.stop() 225 process_result.stop()
223 raise 226 raise
224 finally: 227 finally:
225 for test in tests: 228 for testserver in testservers:
226 test[0]._stream.close() 229 testserver[0]._stream.close()
227 230
228 def _run_test(self, test, process_result, queue): 231 def _run_test(self, testserver, process_result, queue):
229 try: 232 try:
230 try: 233 try:
231 test.run(process_result) 234 testserver.run(process_result)
232 except Exception: 235 except Exception:
233 # The run logic itself failed 236 # The run logic itself failed
234 case = testtools.ErrorHolder( 237 case = testtools.ErrorHolder(
@@ -236,12 +239,12 @@ class ConcurrentTestSuite(unittest.TestSuite):
236 error=sys.exc_info()) 239 error=sys.exc_info())
237 case.run(process_result) 240 case.run(process_result)
238 finally: 241 finally:
239 queue.put(test) 242 queue.put(testserver)
240 243
241def fork_for_tests(concurrency_num, suite): 244def fork_for_tests(concurrency_num, suite):
242 result = [] 245 testservers = []
243 if 'BUILDDIR' in os.environ: 246 if 'BUILDDIR' in os.environ:
244 selftestdir = get_test_layer() 247 selftestdir = get_test_layer(suite.bb_vars['BBLAYERS'])
245 248
246 test_blocks = partition_tests(suite, concurrency_num) 249 test_blocks = partition_tests(suite, concurrency_num)
247 # Clear the tests from the original suite so it doesn't keep them alive 250 # Clear the tests from the original suite so it doesn't keep them alive
@@ -261,7 +264,7 @@ def fork_for_tests(concurrency_num, suite):
261 ourpid = os.getpid() 264 ourpid = os.getpid()
262 try: 265 try:
263 newbuilddir = None 266 newbuilddir = None
264 stream = os.fdopen(c2pwrite, 'wb', 1) 267 stream = os.fdopen(c2pwrite, 'wb')
265 os.close(c2pread) 268 os.close(c2pread)
266 269
267 (builddir, newbuilddir) = suite.setupfunc("-st-" + str(ourpid), selftestdir, process_suite) 270 (builddir, newbuilddir) = suite.setupfunc("-st-" + str(ourpid), selftestdir, process_suite)
@@ -273,10 +276,11 @@ def fork_for_tests(concurrency_num, suite):
273 newsi = os.open(os.devnull, os.O_RDWR) 276 newsi = os.open(os.devnull, os.O_RDWR)
274 os.dup2(newsi, sys.stdin.fileno()) 277 os.dup2(newsi, sys.stdin.fileno())
275 278
279 # Send stdout/stderr over the stream
280 os.dup2(c2pwrite, sys.stdout.fileno())
281 os.dup2(c2pwrite, sys.stderr.fileno())
282
276 subunit_client = TestProtocolClient(stream) 283 subunit_client = TestProtocolClient(stream)
277 # Force buffering of stdout/stderr so the console doesn't get corrupted by test output
278 # as per default in parent code
279 subunit_client.buffer = True
280 subunit_result = AutoTimingTestResultDecorator(subunit_client) 284 subunit_result = AutoTimingTestResultDecorator(subunit_client)
281 unittest_result = process_suite.run(ExtraResultsEncoderTestResult(subunit_result)) 285 unittest_result = process_suite.run(ExtraResultsEncoderTestResult(subunit_result))
282 if ourpid != os.getpid(): 286 if ourpid != os.getpid():
@@ -305,10 +309,12 @@ def fork_for_tests(concurrency_num, suite):
305 os._exit(0) 309 os._exit(0)
306 else: 310 else:
307 os.close(c2pwrite) 311 os.close(c2pwrite)
308 stream = os.fdopen(c2pread, 'rb', 1) 312 stream = os.fdopen(c2pread, 'rb')
309 test = ProtocolTestCase(stream) 313 # Collect stdout/stderr into an io buffer
310 result.append((test, numtests)) 314 output = io.BytesIO()
311 return result, totaltests 315 testserver = ProtocolTestCase(stream, passthrough=output)
316 testservers.append((testserver, numtests, output))
317 return testservers, totaltests
312 318
313def partition_tests(suite, count): 319def partition_tests(suite, count):
314 # Keep tests from the same class together but allow tests from modules 320 # Keep tests from the same class together but allow tests from modules
diff --git a/meta/lib/oeqa/core/utils/misc.py b/meta/lib/oeqa/core/utils/misc.py
deleted file mode 100644
index e1a59588eb..0000000000
--- a/meta/lib/oeqa/core/utils/misc.py
+++ /dev/null
@@ -1,47 +0,0 @@
1#
2# Copyright (C) 2016 Intel Corporation
3#
4# SPDX-License-Identifier: MIT
5#
6
7def toList(obj, obj_type, obj_name="Object"):
8 if isinstance(obj, obj_type):
9 return [obj]
10 elif isinstance(obj, list):
11 return obj
12 else:
13 raise TypeError("%s must be %s or list" % (obj_name, obj_type))
14
15def toSet(obj, obj_type, obj_name="Object"):
16 if isinstance(obj, obj_type):
17 return {obj}
18 elif isinstance(obj, list):
19 return set(obj)
20 elif isinstance(obj, set):
21 return obj
22 else:
23 raise TypeError("%s must be %s or set" % (obj_name, obj_type))
24
25def strToList(obj, obj_name="Object"):
26 return toList(obj, str, obj_name)
27
28def strToSet(obj, obj_name="Object"):
29 return toSet(obj, str, obj_name)
30
31def intToList(obj, obj_name="Object"):
32 return toList(obj, int, obj_name)
33
34def dataStoteToDict(d, variables):
35 data = {}
36
37 for v in variables:
38 data[v] = d.getVar(v)
39
40 return data
41
42def updateTestData(d, td, variables):
43 """
44 Updates variables with values of data store to test data.
45 """
46 for var in variables:
47 td[var] = d.getVar(var)
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
new file mode 100644
index 0000000000..a78ada2593
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
@@ -0,0 +1,20 @@
1[package]
2name = "guessing-game"
3version = "0.1.0"
4edition = "2021"
5
6# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
7
8[lib]
9name = "guessing_game"
10# "cdylib" is necessary to produce a shared library for Python to import from.
11crate-type = ["cdylib"]
12
13[dependencies]
14rand = "0.8.4"
15
16[dependencies.pyo3]
17version = "0.24.1"
18# "abi3-py38" tells pyo3 (and maturin) to build using the stable ABI with minimum Python version 3.8
19features = ["abi3-py38"]
20
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
@@ -0,0 +1,201 @@
1 Apache License
2 Version 2.0, January 2004
3 http://www.apache.org/licenses/
4
5TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
71. Definitions.
8
9 "License" shall mean the terms and conditions for use, reproduction,
10 and distribution as defined by Sections 1 through 9 of this document.
11
12 "Licensor" shall mean the copyright owner or entity authorized by
13 the copyright owner that is granting the License.
14
15 "Legal Entity" shall mean the union of the acting entity and all
16 other entities that control, are controlled by, or are under common
17 control with that entity. For the purposes of this definition,
18 "control" means (i) the power, direct or indirect, to cause the
19 direction or management of such entity, whether by contract or
20 otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 outstanding shares, or (iii) beneficial ownership of such entity.
22
23 "You" (or "Your") shall mean an individual or Legal Entity
24 exercising permissions granted by this License.
25
26 "Source" form shall mean the preferred form for making modifications,
27 including but not limited to software source code, documentation
28 source, and configuration files.
29
30 "Object" form shall mean any form resulting from mechanical
31 transformation or translation of a Source form, including but
32 not limited to compiled object code, generated documentation,
33 and conversions to other media types.
34
35 "Work" shall mean the work of authorship, whether in Source or
36 Object form, made available under the License, as indicated by a
37 copyright notice that is included in or attached to the work
38 (an example is provided in the Appendix below).
39
40 "Derivative Works" shall mean any work, whether in Source or Object
41 form, that is based on (or derived from) the Work and for which the
42 editorial revisions, annotations, elaborations, or other modifications
43 represent, as a whole, an original work of authorship. For the purposes
44 of this License, Derivative Works shall not include works that remain
45 separable from, or merely link (or bind by name) to the interfaces of,
46 the Work and Derivative Works thereof.
47
48 "Contribution" shall mean any work of authorship, including
49 the original version of the Work and any modifications or additions
50 to that Work or Derivative Works thereof, that is intentionally
51 submitted to Licensor for inclusion in the Work by the copyright owner
52 or by an individual or Legal Entity authorized to submit on behalf of
53 the copyright owner. For the purposes of this definition, "submitted"
54 means any form of electronic, verbal, or written communication sent
55 to the Licensor or its representatives, including but not limited to
56 communication on electronic mailing lists, source code control systems,
57 and issue tracking systems that are managed by, or on behalf of, the
58 Licensor for the purpose of discussing and improving the Work, but
59 excluding communication that is conspicuously marked or otherwise
60 designated in writing by the copyright owner as "Not a Contribution."
61
62 "Contributor" shall mean Licensor and any individual or Legal Entity
63 on behalf of whom a Contribution has been received by Licensor and
64 subsequently incorporated within the Work.
65
662. Grant of Copyright License. Subject to the terms and conditions of
67 this License, each Contributor hereby grants to You a perpetual,
68 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 copyright license to reproduce, prepare Derivative Works of,
70 publicly display, publicly perform, sublicense, and distribute the
71 Work and such Derivative Works in Source or Object form.
72
733. Grant of Patent License. Subject to the terms and conditions of
74 this License, each Contributor hereby grants to You a perpetual,
75 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 (except as stated in this section) patent license to make, have made,
77 use, offer to sell, sell, import, and otherwise transfer the Work,
78 where such license applies only to those patent claims licensable
79 by such Contributor that are necessarily infringed by their
80 Contribution(s) alone or by combination of their Contribution(s)
81 with the Work to which such Contribution(s) was submitted. If You
82 institute patent litigation against any entity (including a
83 cross-claim or counterclaim in a lawsuit) alleging that the Work
84 or a Contribution incorporated within the Work constitutes direct
85 or contributory patent infringement, then any patent licenses
86 granted to You under this License for that Work shall terminate
87 as of the date such litigation is filed.
88
894. Redistribution. You may reproduce and distribute copies of the
90 Work or Derivative Works thereof in any medium, with or without
91 modifications, and in Source or Object form, provided that You
92 meet the following conditions:
93
94 (a) You must give any other recipients of the Work or
95 Derivative Works a copy of this License; and
96
97 (b) You must cause any modified files to carry prominent notices
98 stating that You changed the files; and
99
100 (c) You must retain, in the Source form of any Derivative Works
101 that You distribute, all copyright, patent, trademark, and
102 attribution notices from the Source form of the Work,
103 excluding those notices that do not pertain to any part of
104 the Derivative Works; and
105
106 (d) If the Work includes a "NOTICE" text file as part of its
107 distribution, then any Derivative Works that You distribute must
108 include a readable copy of the attribution notices contained
109 within such NOTICE file, excluding those notices that do not
110 pertain to any part of the Derivative Works, in at least one
111 of the following places: within a NOTICE text file distributed
112 as part of the Derivative Works; within the Source form or
113 documentation, if provided along with the Derivative Works; or,
114 within a display generated by the Derivative Works, if and
115 wherever such third-party notices normally appear. The contents
116 of the NOTICE file are for informational purposes only and
117 do not modify the License. You may add Your own attribution
118 notices within Derivative Works that You distribute, alongside
119 or as an addendum to the NOTICE text from the Work, provided
120 that such additional attribution notices cannot be construed
121 as modifying the License.
122
123 You may add Your own copyright statement to Your modifications and
124 may provide additional or different license terms and conditions
125 for use, reproduction, or distribution of Your modifications, or
126 for any such Derivative Works as a whole, provided Your use,
127 reproduction, and distribution of the Work otherwise complies with
128 the conditions stated in this License.
129
1305. Submission of Contributions. Unless You explicitly state otherwise,
131 any Contribution intentionally submitted for inclusion in the Work
132 by You to the Licensor shall be under the terms and conditions of
133 this License, without any additional terms or conditions.
134 Notwithstanding the above, nothing herein shall supersede or modify
135 the terms of any separate license agreement you may have executed
136 with Licensor regarding such Contributions.
137
1386. Trademarks. This License does not grant permission to use the trade
139 names, trademarks, service marks, or product names of the Licensor,
140 except as required for reasonable and customary use in describing the
141 origin of the Work and reproducing the content of the NOTICE file.
142
1437. Disclaimer of Warranty. Unless required by applicable law or
144 agreed to in writing, Licensor provides the Work (and each
145 Contributor provides its Contributions) on an "AS IS" BASIS,
146 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 implied, including, without limitation, any warranties or conditions
148 of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 PARTICULAR PURPOSE. You are solely responsible for determining the
150 appropriateness of using or redistributing the Work and assume any
151 risks associated with Your exercise of permissions under this License.
152
1538. Limitation of Liability. In no event and under no legal theory,
154 whether in tort (including negligence), contract, or otherwise,
155 unless required by applicable law (such as deliberate and grossly
156 negligent acts) or agreed to in writing, shall any Contributor be
157 liable to You for damages, including any direct, indirect, special,
158 incidental, or consequential damages of any character arising as a
159 result of this License or out of the use or inability to use the
160 Work (including but not limited to damages for loss of goodwill,
161 work stoppage, computer failure or malfunction, or any and all
162 other commercial damages or losses), even if such Contributor
163 has been advised of the possibility of such damages.
164
1659. Accepting Warranty or Additional Liability. While redistributing
166 the Work or Derivative Works thereof, You may choose to offer,
167 and charge a fee for, acceptance of support, warranty, indemnity,
168 or other liability obligations and/or rights consistent with this
169 License. However, in accepting such obligations, You may act only
170 on Your own behalf and on Your sole responsibility, not on behalf
171 of any other Contributor, and only if You agree to indemnify,
172 defend, and hold each Contributor harmless for any liability
173 incurred by, or claims asserted against, such Contributor by reason
174 of your accepting any such warranty or additional liability.
175
176END OF TERMS AND CONDITIONS
177
178APPENDIX: How to apply the Apache License to your work.
179
180 To apply the Apache License to your work, attach the following
181 boilerplate notice, with the fields enclosed by brackets "[]"
182 replaced with your own identifying information. (Don't include
183 the brackets!) The text should be enclosed in the appropriate
184 comment syntax for the file format. We also recommend that a
185 file or class name and description of purpose be included on the
186 same "printed page" as the copyright notice for easier
187 identification within third-party archives.
188
189Copyright [yyyy] [name of copyright owner]
190
191Licensed under the Apache License, Version 2.0 (the "License");
192you may not use this file except in compliance with the License.
193You may obtain a copy of the License at
194
195 http://www.apache.org/licenses/LICENSE-2.0
196
197Unless required by applicable law or agreed to in writing, software
198distributed under the License is distributed on an "AS IS" BASIS,
199WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200See the License for the specific language governing permissions and
201limitations under the License.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
new file mode 100644
index 0000000000..c4a9a58791
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
@@ -0,0 +1,25 @@
1Copyright (c) 2018 konstin
2
3Permission is hereby granted, free of charge, to any
4person obtaining a copy of this software and associated
5documentation files (the "Software"), to deal in the
6Software without restriction, including without
7limitation the rights to use, copy, modify, merge,
8publish, distribute, sublicense, and/or sell copies of
9the Software, and to permit persons to whom the Software
10is furnished to do so, subject to the following
11conditions:
12
13The above copyright notice and this permission notice
14shall be included in all copies or substantial portions
15of the Software.
16
17THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
18ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
19TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
20PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
21SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
22CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
24IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25DEALINGS IN THE SOFTWARE.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
new file mode 100644
index 0000000000..ff35abc472
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
@@ -0,0 +1,8 @@
1[build-system]
2requires = ["maturin>=1.0,<2.0"]
3build-backend = "maturin"
4
5[tool.maturin]
6# "extension-module" tells pyo3 we want to build an extension module (skips linking against libpython.so)
7features = ["pyo3/extension-module"]
8
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
new file mode 100644
index 0000000000..6828466ed1
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
@@ -0,0 +1,48 @@
1use pyo3::prelude::*;
2use rand::Rng;
3use std::cmp::Ordering;
4use std::io;
5
6#[pyfunction]
7fn guess_the_number() {
8 println!("Guess the number!");
9
10 let secret_number = rand::thread_rng().gen_range(1..101);
11
12 loop {
13 println!("Please input your guess.");
14
15 let mut guess = String::new();
16
17 io::stdin()
18 .read_line(&mut guess)
19 .expect("Failed to read line");
20
21 let guess: u32 = match guess.trim().parse() {
22 Ok(num) => num,
23 Err(_) => continue,
24 };
25
26 println!("You guessed: {}", guess);
27
28 match guess.cmp(&secret_number) {
29 Ordering::Less => println!("Too small!"),
30 Ordering::Greater => println!("Too big!"),
31 Ordering::Equal => {
32 println!("You win!");
33 break;
34 }
35 }
36 }
37}
38
39/// A Python module implemented in Rust. The name of this function must match
40/// the `lib.name` setting in the `Cargo.toml`, else Python will not be able to
41/// import the module.
42#[pymodule]
43fn guessing_game(_py: Python, m: &PyModule) -> PyResult<()> {
44 m.add_function(wrap_pyfunction!(guess_the_number, m)?)?;
45
46 Ok(())
47}
48
diff --git a/meta/lib/oeqa/files/test.rs b/meta/lib/oeqa/files/test.rs
new file mode 100644
index 0000000000..f79c691f08
--- /dev/null
+++ b/meta/lib/oeqa/files/test.rs
@@ -0,0 +1,2 @@
1fn main() {
2}
diff --git a/meta/lib/oeqa/files/testresults/testresults.json b/meta/lib/oeqa/files/testresults/testresults.json
index 1a62155618..86e5e412af 100644
--- a/meta/lib/oeqa/files/testresults/testresults.json
+++ b/meta/lib/oeqa/files/testresults/testresults.json
@@ -1,5 +1,5 @@
1{ 1{
2 "runtime_core-image-minimal_qemuarm_20181225195701": { 2 "runtime_core-image-minimal:qemuarm_20181225195701": {
3 "configuration": { 3 "configuration": {
4 "DISTRO": "poky", 4 "DISTRO": "poky",
5 "HOST_DISTRO": "ubuntu-16.04", 5 "HOST_DISTRO": "ubuntu-16.04",
diff --git a/meta/lib/oeqa/manual/bsp-hw.json b/meta/lib/oeqa/manual/bsp-hw.json
index 75b89758cb..308a0807f3 100644
--- a/meta/lib/oeqa/manual/bsp-hw.json
+++ b/meta/lib/oeqa/manual/bsp-hw.json
@@ -26,7 +26,7 @@
26 "expected_results": "" 26 "expected_results": ""
27 }, 27 },
28 "5": { 28 "5": {
29 "action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was whitelisted according to bug 9652.", 29 "action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was allowed according to bug 9652.",
30 "expected_results": "" 30 "expected_results": ""
31 } 31 }
32 }, 32 },
@@ -61,70 +61,6 @@
61 }, 61 },
62 { 62 {
63 "test": { 63 "test": {
64 "@alias": "bsps-hw.bsps-hw.boot_from_runlevel_3",
65 "author": [
66 {
67 "email": "alexandru.c.georgescu@intel.com",
68 "name": "alexandru.c.georgescu@intel.com"
69 }
70 ],
71 "execution": {
72 "1": {
73 "action": "Boot into system and edit /etc/inittab to make sure that system enter at the run level 3 by default, this is done by changing the line \n\n\nid:5:initdefault \n\nto \n\nid:3:initdefault \n\n",
74 "expected_results": ""
75 },
76 "2": {
77 "action": "Reboot system, and press \"Tab\" to enter \"grub\"",
78 "expected_results": ""
79 },
80 "3": {
81 "action": "Get into the \"kernel\" line with the edit option \"e\" and add \"psplash=false text\" at the end line.",
82 "expected_results": ""
83 },
84 "4": {
85 "action": "Press \"F10\" or \"ctrl+x\" to boot system",
86 "expected_results": ""
87 },
88 "5": {
89 "action": "If system ask you for a login type \"root\"",
90 "expected_results": "System should boot to run level 3, showing the command prompt."
91 }
92 },
93 "summary": "boot_from_runlevel_3"
94 }
95 },
96 {
97 "test": {
98 "@alias": "bsps-hw.bsps-hw.boot_from_runlevel_5",
99 "author": [
100 {
101 "email": "alexandru.c.georgescu@intel.com",
102 "name": "alexandru.c.georgescu@intel.com"
103 }
104 ],
105 "execution": {
106 "1": {
107 "action": "Boot into system and edit /etc/inittab to make sure that system enter at the run level 5 by default, this is done by changing the line \n\nid:3:initdefault \n\nto \n\nid:5:initdefault \n\n",
108 "expected_results": ""
109 },
110 "2": {
111 "action": "Reboot system, and press \"Tab\" to enter \"grub\"",
112 "expected_results": ""
113 },
114 "3": {
115 "action": "Get into the \"kernel\" line with the edit option \"e\" and add \"psplash=false text\" at the end line.",
116 "expected_results": ""
117 },
118 "4": {
119 "action": "Press \"F10\" or \"ctrl+x\" to boot system \nNote: The test is only for sato image.",
120 "expected_results": "System should boot to runlevel 5 ."
121 }
122 },
123 "summary": "boot_from_runlevel_5"
124 }
125 },
126 {
127 "test": {
128 "@alias": "bsps-hw.bsps-hw.switch_among_multi_applications_and_desktop", 64 "@alias": "bsps-hw.bsps-hw.switch_among_multi_applications_and_desktop",
129 "author": [ 65 "author": [
130 { 66 {
@@ -155,70 +91,6 @@
155 }, 91 },
156 { 92 {
157 "test": { 93 "test": {
158 "@alias": "bsps-hw.bsps-hw.ethernet_static_ip_set_in_connman",
159 "author": [
160 {
161 "email": "alexandru.c.georgescu@intel.com",
162 "name": "alexandru.c.georgescu@intel.com"
163 }
164 ],
165 "execution": {
166 "1": {
167 "action": "Boot the system and check internet connection is on . ",
168 "expected_results": ""
169 },
170 "2": {
171 "action": "Launch connmand-properties (up-right corner on desktop)",
172 "expected_results": ""
173 },
174 "3": {
175 "action": "Choose Ethernet device and set a valid static ip address for it. \nFor example, in our internal network, we can set as following: \nip address: 10.239.48.xxx \nMask: 255.255.255.0 \nGateway (Broadcast): 10.239.48.255",
176 "expected_results": ""
177 },
178 "4": {
179 "action": "Check the Network configuration with \"ifconfig\"",
180 "expected_results": "Static IP was set successfully \n"
181 },
182 "5": {
183 "action": "ping to another IP adress",
184 "expected_results": "Ping works correclty\n"
185 }
186 },
187 "summary": "ethernet_static_ip_set_in_connman"
188 }
189 },
190 {
191 "test": {
192 "@alias": "bsps-hw.bsps-hw.ethernet_get_IP_in_connman_via_DHCP",
193 "author": [
194 {
195 "email": "alexandru.c.georgescu@intel.com",
196 "name": "alexandru.c.georgescu@intel.com"
197 }
198 ],
199 "execution": {
200 "1": {
201 "action": "Launch connmand-properties (up-right corner on your desktop). ",
202 "expected_results": ""
203 },
204 "2": {
205 "action": "Check if Ethernet device can work properly with static IP, doing \"ping XXX.XXX.XXX.XXX\", once this is set.",
206 "expected_results": "Ping executed successfully . \n\n"
207 },
208 "3": {
209 "action": "Then choose DHCP method for Ethernet device in connmand-properties.",
210 "expected_results": ""
211 },
212 "4": {
213 "action": "Check with 'ifconfig\" and \"ping\" if Ethernet device get IP address via DHCP.",
214 "expected_results": "Ethernet device can get dynamic IP address via DHCP in connmand ."
215 }
216 },
217 "summary": "ethernet_get_IP_in_connman_via_DHCP"
218 }
219 },
220 {
221 "test": {
222 "@alias": "bsps-hw.bsps-hw.connman_offline_mode_in_connman-gnome", 94 "@alias": "bsps-hw.bsps-hw.connman_offline_mode_in_connman-gnome",
223 "author": [ 95 "author": [
224 { 96 {
@@ -241,40 +113,6 @@
241 }, 113 },
242 { 114 {
243 "test": { 115 "test": {
244 "@alias": "bsps-hw.bsps-hw.standby",
245 "author": [
246 {
247 "email": "alexandru.c.georgescu@intel.com",
248 "name": "alexandru.c.georgescu@intel.com"
249 }
250 ],
251 "execution": {
252 "1": {
253 "action": "boot system and launch terminal; check output of \"date\" and launch script \"continue.sh\"",
254 "expected_results": ""
255 },
256 "2": {
257 "action": "echo \"mem\" > /sys/power/state",
258 "expected_results": ""
259 },
260 "3": {
261 "action": "After system go into S3 mode, move mouse or press any key to make it resume (on NUC press power button)",
262 "expected_results": ""
263 },
264 "4": {
265 "action": "Check \"date\" and script \"continue.sh\"",
266 "expected_results": ""
267 },
268 "5": {
269 "action": "Check if application can work as normal \ncontinue.sh as below: \n \n#!/bin/sh \n \ni=1 \nwhile [ 0 ] \ndo \n echo $i \n sleep 1 \n i=$((i+1)) \ndone ",
270 "expected_results": "Screen should resume back and script can run continuously incrementing the i's value from where it was before going to standby state. Date should be the same with the corresponding time increment."
271 }
272 },
273 "summary": "standby"
274 }
275 },
276 {
277 "test": {
278 "@alias": "bsps-hw.bsps-hw.check_CPU_utilization_after_standby", 116 "@alias": "bsps-hw.bsps-hw.check_CPU_utilization_after_standby",
279 "author": [ 117 "author": [
280 { 118 {
@@ -305,88 +143,6 @@
305 }, 143 },
306 { 144 {
307 "test": { 145 "test": {
308 "@alias": "bsps-hw.bsps-hw.Test_if_LAN_device_works_well_after_resume_from_suspend_state",
309 "author": [
310 {
311 "email": "alexandru.c.georgescu@intel.com",
312 "name": "alexandru.c.georgescu@intel.com"
313 }
314 ],
315 "execution": {
316 "1": {
317 "action": "boot system and launch terminal",
318 "expected_results": ""
319 },
320 "2": {
321 "action": "echo \"mem\" > /sys/power/state",
322 "expected_results": ""
323 },
324 "3": {
325 "action": "After system go into S3 mode, move mouse or press any key to make it resume",
326 "expected_results": ""
327 },
328 "4": {
329 "action": "check ping status \n\nNote: This TC apply only for core-image-full-cmd.",
330 "expected_results": "ping should always work before/after standby"
331 }
332 },
333 "summary": "Test_if_LAN_device_works_well_after_resume_from_suspend_state"
334 }
335 },
336 {
337 "test": {
338 "@alias": "bsps-hw.bsps-hw.Test_if_usb_hid_device_works_well_after_resume_from_suspend_state",
339 "author": [
340 {
341 "email": "alexandru.c.georgescu@intel.com",
342 "name": "alexandru.c.georgescu@intel.com"
343 }
344 ],
345 "execution": {
346 "1": {
347 "action": "boot system and launch terminal",
348 "expected_results": ""
349 },
350 "2": {
351 "action": "echo \"mem\" > /sys/power/state",
352 "expected_results": ""
353 },
354 "3": {
355 "action": "After system go into S3 mode, resume the device by pressing the power button or using HID devices",
356 "expected_results": "Devices resumes "
357 },
358 "4": {
359 "action": "check usb mouse and keyboard",
360 "expected_results": "Usb mouse and keyboard should work"
361 }
362 },
363 "summary": "Test_if_usb_hid_device_works_well_after_resume_from_suspend_state"
364 }
365 },
366 {
367 "test": {
368 "@alias": "bsps-hw.bsps-hw.click_terminal_icon_on_X_desktop",
369 "author": [
370 {
371 "email": "alexandru.c.georgescu@intel.com",
372 "name": "alexandru.c.georgescu@intel.com"
373 }
374 ],
375 "execution": {
376 "1": {
377 "action": "After system launch and X start up, click terminal icon on desktop",
378 "expected_results": ""
379 },
380 "2": {
381 "action": "Check if only one terminal window launched and no other problem met",
382 "expected_results": "There should be no problem after launching terminal . "
383 }
384 },
385 "summary": "click_terminal_icon_on_X_desktop"
386 }
387 },
388 {
389 "test": {
390 "@alias": "bsps-hw.bsps-hw.Add_multiple_files_in_media_player", 146 "@alias": "bsps-hw.bsps-hw.Add_multiple_files_in_media_player",
391 "author": [ 147 "author": [
392 { 148 {
@@ -839,40 +595,6 @@
839 }, 595 },
840 { 596 {
841 "test": { 597 "test": {
842 "@alias": "bsps-hw.bsps-hw.Check_if_RTC_(Real_Time_Clock)_can_work_correctly",
843 "author": [
844 {
845 "email": "yi.zhao@windriver.com",
846 "name": "yi.zhao@windriver.com"
847 }
848 ],
849 "execution": {
850 "1": {
851 "action": "Read time from RTC registers. root@localhost:/root> hwclock -r Sun Mar 22 04:05:47 1970 -0.001948 seconds ",
852 "expected_results": "Can read and set the time from RTC.\n"
853 },
854 "2": {
855 "action": "Set system current time root@localhost:/root> date 062309452008 ",
856 "expected_results": ""
857 },
858 "3": {
859 "action": "Synchronize the system current time to RTC registers root@localhost:/root> hwclock -w ",
860 "expected_results": ""
861 },
862 "4": {
863 "action": "Read time from RTC registers root@localhost:/root> hwclock -r ",
864 "expected_results": ""
865 },
866 "5": {
867 "action": "Reboot target and read time from RTC again\n",
868 "expected_results": ""
869 }
870 },
871 "summary": "Check_if_RTC_(Real_Time_Clock)_can_work_correctly"
872 }
873 },
874 {
875 "test": {
876 "@alias": "bsps-hw.bsps-hw.System_can_boot_up_via_NFS", 598 "@alias": "bsps-hw.bsps-hw.System_can_boot_up_via_NFS",
877 "author": [ 599 "author": [
878 { 600 {
diff --git a/meta/lib/oeqa/manual/build-appliance.json b/meta/lib/oeqa/manual/build-appliance.json
index 70f8c72c9b..82a556e93e 100644
--- a/meta/lib/oeqa/manual/build-appliance.json
+++ b/meta/lib/oeqa/manual/build-appliance.json
@@ -48,7 +48,7 @@
48 "expected_results": "" 48 "expected_results": ""
49 }, 49 },
50 "3": { 50 "3": {
51 "action": "Install a new package to the image, for example, acpid. Set the following line in conf/local.conf: IMAGE_INSTALL_append = \" acpid\"", 51 "action": "Install a new package to the image, for example, acpid. Set the following line in conf/local.conf: IMAGE_INSTALL:append = \" acpid\"",
52 "expected_results": "" 52 "expected_results": ""
53 }, 53 },
54 "4": { 54 "4": {
diff --git a/meta/lib/oeqa/manual/crops.json b/meta/lib/oeqa/manual/crops.json
deleted file mode 100644
index 5cfa653843..0000000000
--- a/meta/lib/oeqa/manual/crops.json
+++ /dev/null
@@ -1,294 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "crops-default.crops-default.sdkext_eSDK_devtool_build_make",
5 "author": [
6 {
7 "email": "francisco.j.pedraza.gonzalez@intel.com",
8 "name": "francisco.j.pedraza.gonzalez@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
14 "expected_results": ""
15 },
16 "2": {
17 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n\n",
22 "expected_results": ""
23 },
24 "4": {
25 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
26 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
27 },
28 "5": {
29 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
30 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces."
31 },
32 "6": {
33 "action": " source environment-setup-i586-poky-linux \n\n",
34 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
35 },
36 "7": {
37 "action": " run command which devtool \n\n",
38 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n "
39 },
40 "8": {
41 "action": "devtool add myapp <directory>(this is myapp dir) \n\n\n",
42 "expected_results": "The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb"
43 },
44 "9": {
45 "action": " devtool build myapp \n\n",
46 "expected_results": "This should compile an image"
47 },
48 "10": {
49 "action": " devtool reset myapp ",
50 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase."
51 }
52 },
53 "summary": "sdkext_eSDK_devtool_build_make"
54 }
55 },
56 {
57 "test": {
58 "@alias": "crops-default.crops-default.sdkext_devtool_build_esdk_package",
59 "author": [
60 {
61 "email": "francisco.j.pedraza.gonzalez@intel.com",
62 "name": "francisco.j.pedraza.gonzalez@intel.com"
63 }
64 ],
65 "execution": {
66 "1": {
67 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
68 "expected_results": ""
69 },
70 "2": {
71 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
72 "expected_results": ""
73 },
74 "3": {
75 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp/ \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
76 "expected_results": ""
77 },
78 "4": {
79 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
80 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
81 },
82 "5": {
83 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include<stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
84 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
85 },
86 "6": {
87 "action": " source environment-setup-i586-poky-linux \n\n",
88 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
89 },
90 "7": {
91 "action": " run command which devtool \n\n",
92 "expected_results": " this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
93 },
94 "8": {
95 "action": " devtool add myapp <directory> (this is myapp dir) \n\n",
96 "expected_results": " The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
97 },
98 "9": {
99 "action": " devtool package myapp \n\n",
100 "expected_results": " you should expect a package creation of myapp and it should be under the /tmp/deploy/ \n\n"
101 },
102 "10": {
103 "action": " devtool reset myapp ",
104 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase.\n</package_format>"
105 }
106 },
107 "summary": "sdkext_devtool_build_esdk_package"
108 }
109 },
110 {
111 "test": {
112 "@alias": "crops-default.crops-default.sdkext_devtool_build_cmake",
113 "author": [
114 {
115 "email": "francisco.j.pedraza.gonzalez@intel.com",
116 "name": "francisco.j.pedraza.gonzalez@intel.com"
117 }
118 ],
119 "execution": {
120 "1": {
121 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
122 "expected_results": ""
123 },
124 "2": {
125 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
126 "expected_results": ""
127 },
128 "3": {
129 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
130 "expected_results": ""
131 },
132 "4": {
133 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
134 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
135 },
136 "5": {
137 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
138 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
139 },
140 "6": {
141 "action": " source environment-setup-i586-poky-linux \n\n",
142 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
143 },
144 "7": {
145 "action": " run command which devtool \n\n",
146 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
147 },
148 "8": {
149 "action": " devtool add myapp <directory> (this is myapp_cmake dir) \n\n",
150 "expected_results": "The directory you should input is the myapp_cmake directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
151 },
152 "9": {
153 "action": " devtool build myapp \n\n",
154 "expected_results": "This should compile an image \n\n"
155 },
156 "10": {
157 "action": " devtool reset myapp ",
158 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase. "
159 }
160 },
161 "summary": "sdkext_devtool_build_cmake"
162 }
163 },
164 {
165 "test": {
166 "@alias": "crops-default.crops-default.sdkext_extend_autotools_recipe_creation",
167 "author": [
168 {
169 "email": "francisco.j.pedraza.gonzalez@intel.com",
170 "name": "francisco.j.pedraza.gonzalez@intel.com"
171 }
172 ],
173 "execution": {
174 "1": {
175 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
176 "expected_results": ""
177 },
178 "2": {
179 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
180 "expected_results": ""
181 },
182 "3": {
183 "action": " source environment-setup-i586-poky-linux \n\n",
184 "expected_results": " This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
185 },
186 "4": {
187 "action": "run command which devtool \n\n",
188 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
189 },
190 "5": {
191 "action": "devtool sdk-install -s libxml2 \n\n",
192 "expected_results": "this should install libxml2 \n\n"
193 },
194 "6": {
195 "action": "devtool add librdfa https://github.com/rdfa/librdfa \n\n",
196 "expected_results": "This should automatically create the recipe librdfa.bb under /recipes/librdfa/librdfa.bb \n\n"
197 },
198 "7": {
199 "action": "devtool build librdfa \n\n",
200 "expected_results": "This should compile \n\n"
201 },
202 "8": {
203 "action": "devtool reset librdfa ",
204 "expected_results": "This cleans sysroot of the librdfa recipe, but it leaves the source tree intact. meaning it does not erase."
205 }
206 },
207 "summary": "sdkext_extend_autotools_recipe_creation"
208 }
209 },
210 {
211 "test": {
212 "@alias": "crops-default.crops-default.sdkext_devtool_kernelmodule",
213 "author": [
214 {
215 "email": "francisco.j.pedraza.gonzalez@intel.com",
216 "name": "francisco.j.pedraza.gonzalez@intel.com"
217 }
218 ],
219 "execution": {
220 "1": {
221 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n",
222 "expected_results": ""
223 },
224 "2": {
225 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
226 "expected_results": ""
227 },
228 "3": {
229 "action": "source environment-setup-i586-poky-linux \n\n",
230 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n \n"
231 },
232 "4": {
233 "action": "run command which devtool \n\n",
234 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
235 },
236 "5": {
237 "action": "devtool add kernel-module-hello-world https://git.yoctoproject.org/git/kernel-module-hello-world \n\n",
238 "expected_results": "This should automatically create the recipe kernel-module-hello-world.bb under <crops-esdk-workdir-workspace>/recipes/kernel-module-hello-world/kernel-module-hello-world.bb "
239 },
240 "6": {
241 "action": "devtool build kernel-module-hello-world \n\n",
242 "expected_results": "This should compile an image \n\n"
243 },
244 "7": {
245 "action": "devtool reset kernel-module-hello-world ",
246 "expected_results": "This cleans sysroot of the kernel-module-hello-world recipe, but it leaves the source tree intact. meaning it does not erase."
247 }
248 },
249 "summary": "sdkext_devtool_kernelmodule"
250 }
251 },
252 {
253 "test": {
254 "@alias": "crops-default.crops-default.sdkext_recipes_for_nodejs",
255 "author": [
256 {
257 "email": "francisco.j.pedraza.gonzalez@intel.com",
258 "name": "francisco.j.pedraza.gonzalez@intel.com"
259 }
260 ],
261 "execution": {
262 "1": {
263 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\nlets say variable npm = npm://registry.npmjs.org;name=winston;version=2.2.0 \n\n",
264 "expected_results": ""
265 },
266 "2": {
267 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
268 "expected_results": ""
269 },
270 "3": {
271 "action": "source environment-setup-i586-poky-linux \n\n",
272 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
273 },
274 "4": {
275 "action": "run command which devtool \n\n",
276 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
277 },
278 "5": {
279 "action": " 4a) git clone git://git.openembedded.org/meta-openembedded in layers/build directory \n \n4b) Add meta-openembedded/meta-oe in bblayer.conf as mentioned below: ${SDKBASEMETAPATH}/layers/build/meta-openembedded/meta-oe \\ \n\n4c) devtool add \"npm://registry.npmjs.org;name=npm;version=2.2.0\" \n\n",
280 "expected_results": " This should automatically create the recipe npm.bb under /recipes/npm/npm.bb \n\n"
281 },
282 "6": {
283 "action": "devtool build npm \n\n",
284 "expected_results": "This should compile an image \n\n"
285 },
286 "7": {
287 "action": " devtool reset npm",
288 "expected_results": "This cleans sysroot of the npm recipe, but it leaves the source tree intact. meaning it does not erase."
289 }
290 },
291 "summary": "sdkext_recipes_for_nodejs"
292 }
293 }
294]
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json
deleted file mode 100644
index d77d0e673b..0000000000
--- a/meta/lib/oeqa/manual/eclipse-plugin.json
+++ /dev/null
@@ -1,322 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "eclipse-plugin.eclipse-plugin.support_SSH_connection_to_Target",
5 "author": [
6 {
7 "email": "ee.peng.yeoh@intel.com",
8 "name": "ee.peng.yeoh@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "In Eclipse, swich to Remote System Explorer to create a connention baseed on SSH, input the remote target IP address as the Host name, make sure disable the proxy in Window->Preferences->General->Network Connection, set Direct as Active Provider field. ",
14 "expected_results": "the connection based on SSH could be set up."
15 },
16 "2": {
17 "action": "Configure connection from Eclipse: Run->Run Configurations->C/C++ Remote Application\\ ->New Connection->General->SSH Only ",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Then right click to connect, input the user ID and password. ",
22 "expected_results": ""
23 },
24 "4": {
25 "action": "expand the connection, it will show the Sftp Files etc. \nNOTE. Might need to change dropbear to openssh and add the packagegroup-core-eclipse-debug recipe",
26 "expected_results": ""
27 }
28 },
29 "summary": "support_SSH_connection_to_Target"
30 }
31 },
32 {
33 "test": {
34 "@alias": "eclipse-plugin.eclipse-plugin.Launch_QEMU_from_Eclipse",
35 "author": [
36 {
37 "email": "ee.peng.yeoh@intel.com",
38 "name": "ee.peng.yeoh@intel.com"
39 }
40 ],
41 "execution": {
42 "1": {
43 "action": "Set the Yocto ADT's toolchain root location, sysroot location and kernel, in the menu Window -> Preferences -> Yocto ADT. \n \n",
44 "expected_results": ""
45 },
46 "2": {
47 "action": "wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/qemu (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
48 "expected_results": " Qemu can be lauched normally."
49 },
50 "3": {
51 "action": "(a)Point to the Toolchain: \n \nIf you are using a stand-alone pre-built toolchain, you should be pointing to the /opt/poky/{test-version} directory as Toolchain Root Location. This is the default location for toolchains installed by the ADT Installer or by hand. If ADT is installed in other location, use that location as Toolchain location.\nIf you are using a system-derived toolchain, the path you provide for the Toolchain Root Location field is the Yocto Project's build directory. \n \n E.g:/home/user/yocto/poky/build \n",
52 "expected_results": ""
53 },
54 "4": {
55 "action": "(b)Specify the Sysroot Location: \nSysroot Location is the location where the root filesystem for the target hardware is created on the development system by the ADT Installer (SYSROOT in step 2 of the case ADT installer Installation). \n \n Local : e.g: /home/user/qemux86-sato-sdk \nUsing ADT : e.g :/home/user/test-yocto/qemux86 \n\n",
56 "expected_results": ""
57 },
58 "5": {
59 "action": "(c)Select the Target Architecture: \n \nThe target architecture is the type of hardware you are going to use or emulate. Use the pull-down Target Architecture menu to make your selection. \n \n\n",
60 "expected_results": ""
61 },
62 "6": {
63 "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
64 "expected_results": ""
65 },
66 "7": {
67 "action": "(e) select OK to save the settings. \n\n\n1: In the Eclipse toolbar, expose the Run -> External Tools menu. Your image should appear as a selectable menu item. \n2: Select your image in the navigation pane to launch the emulator in a new window. \n3: If needed, enter your host root password in the shell window at the prompt. This sets up a Tap 0 connection needed for running in user-space NFS mode. \n",
68 "expected_results": ""
69 }
70 },
71 "summary": "Launch_QEMU_from_Eclipse"
72 }
73 },
74 {
75 "test": {
76 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project",
77 "author": [
78 {
79 "email": "ee.peng.yeoh@intel.com",
80 "name": "ee.peng.yeoh@intel.com"
81 }
82 ],
83 "execution": {
84 "1": {
85 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
86 "expected_results": ""
87 },
88 "2": {
89 "action": "Select File -> New -> Project.",
90 "expected_results": ""
91 },
92 "3": {
93 "action": "Double click C/C++.",
94 "expected_results": ""
95 },
96 "4": {
97 "action": "Click C or C++ Project to create the project.",
98 "expected_results": ""
99 },
100 "5": {
101 "action": "Expand Yocto ADT Project.",
102 "expected_results": ""
103 },
104 "6": {
105 "action": "Select Hello World ANSI C Autotools Project.",
106 "expected_results": ""
107 },
108 "7": {
109 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
110 "expected_results": ""
111 },
112 "8": {
113 "action": "Click Next.",
114 "expected_results": ""
115 },
116 "9": {
117 "action": "Add information in the Author and Copyright notice fields. \n1",
118 "expected_results": ""
119 },
120 "10": {
121 "action": "Click Finish. \n1",
122 "expected_results": ""
123 },
124 "11": {
125 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
126 "expected_results": ""
127 },
128 "12": {
129 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
130 "expected_results": ""
131 },
132 "13": {
133 "action": "In the Project Explorer window, right click the project -> Build project. \n1",
134 "expected_results": "Under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
135 },
136 "14": {
137 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
138 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
139 },
140 "15": {
141 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
142 "expected_results": ""
143 },
144 "16": {
145 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \nRight click it again and Debug as -> Debug Configurations \nUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \nin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application.\ne.g.: /home/root/myapplication \nIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
146 "expected_results": ""
147 },
148 "17": {
149 "action": "After all settings are done, select the Debug button on the bottom right corner",
150 "expected_results": ""
151 }
152 },
153 "summary": "Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project"
154 }
155 },
156 {
157 "test": {
158 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project",
159 "author": [
160 {
161 "email": "ee.peng.yeoh@intel.com",
162 "name": "ee.peng.yeoh@intel.com"
163 }
164 ],
165 "execution": {
166 "1": {
167 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
168 "expected_results": ""
169 },
170 "2": {
171 "action": "Select File -> New -> Project. ",
172 "expected_results": ""
173 },
174 "3": {
175 "action": "Double click C/C++. ",
176 "expected_results": ""
177 },
178 "4": {
179 "action": "Click C or C++ Project to create the project. ",
180 "expected_results": ""
181 },
182 "5": {
183 "action": "Expand Yocto ADT Project. ",
184 "expected_results": ""
185 },
186 "6": {
187 "action": "Select Hello World ANSI C++ Autotools Project. ",
188 "expected_results": ""
189 },
190 "7": {
191 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
192 "expected_results": ""
193 },
194 "8": {
195 "action": "Click Next.",
196 "expected_results": ""
197 },
198 "9": {
199 "action": "Add information in the Author and Copyright notice fields.",
200 "expected_results": ""
201 },
202 "10": {
203 "action": "Click Finish. \n1",
204 "expected_results": ""
205 },
206 "11": {
207 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
208 "expected_results": ""
209 },
210 "12": {
211 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
212 "expected_results": ""
213 },
214 "13": {
215 "action": "In the Project Explorer window, right click the project -> Build project. \n\n1",
216 "expected_results": "under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
217 },
218 "14": {
219 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
220 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
221 },
222 "15": {
223 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
224 "expected_results": ""
225 },
226 "16": {
227 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \n\t\tRight click it again and Debug as -> Debug Configurations \n\t\tUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. \n\t\te.g.: /home/root/myapplication \n\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
228 "expected_results": ""
229 },
230 "17": {
231 "action": "After all settings are done, select the Debug button on the bottom right corner",
232 "expected_results": ""
233 }
234 },
235 "summary": "Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project"
236 }
237 },
238 {
239 "test": {
240 "@alias": "eclipse-plugin.eclipse-plugin.Build_Eclipse_Plugin_from_source",
241 "author": [
242 {
243 "email": "laurentiu.serban@intel.com",
244 "name": "laurentiu.serban@intel.com"
245 }
246 ],
247 "execution": {
248 "1": {
249 "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n",
250 "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on http://autobuilder.yoctoproject.org/pub/releases/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
251 },
252 "2": {
253 "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n",
254 "expected_results": "After plugin is build you must have 4 archive in foder scripts from eclipse-poky: \n - org.yocto.bc - mars-master-$date.zip \n - org.yocto.doc - mars-master-$date.zip --> documentation \n - org.yocto.sdk - mars-master-$date.zip \n - org.yocto.sdk - mars-master-$date.-archive.zip --> plugin "
255 },
256 "3": {
257 "action": "Move to scripts/ folder. \n\n",
258 "expected_results": ""
259 },
260 "4": {
261 "action": "Run ./setup.sh \n\n",
262 "expected_results": ""
263 },
264 "5": {
265 "action": "When the script finishes, it prompts a command to issue to build the plugin. It should look similar to the following: \n\n$ ECLIPSE_HOME=/eclipse-poky/scripts/eclipse ./build.sh /&1 | tee -a build.log \n\nHere, the three arguments to the build script are tag name, branch for documentation and release name. \n\n",
266 "expected_results": ""
267 },
268 "6": {
269 "action": "On an eclipse without the Yocto Plugin, select \"Install New Software\" from Help pull-down menu \n\n",
270 "expected_results": ""
271 },
272 "7": {
273 "action": "Select Add and from the dialog choose Archive... Look for the *archive.zip file that was built previously with the build.sh script. Click OK. \n\n",
274 "expected_results": ""
275 },
276 "8": {
277 "action": "Select all components and proceed with Installation of plugin. Restarting eclipse might be required.\n",
278 "expected_results": ""
279 }
280 },
281 "summary": "Build_Eclipse_Plugin_from_source"
282 }
283 },
284 {
285 "test": {
286 "@alias": "eclipse-plugin.eclipse-plugin.Eclipse_Poky_installation_and_setup",
287 "author": [
288 {
289 "email": "ee.peng.yeoh@intel.com",
290 "name": "ee.peng.yeoh@intel.com"
291 }
292 ],
293 "execution": {
294 "1": {
295 "action": "Install SDK \n\ta)Download https://autobuilder.yocto.io/pub/releases//toolchain/x86_64/poky-glibc-x86_64-core-\timage-sato-i586-toolchain-.sh \n\tb)Run the SDK installer and accept the default installation directory ",
296 "expected_results": ""
297 },
298 "2": {
299 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) \n\ta) Go to https://www.eclipse.org/downloads/packages/all, click \"Oxygen R\" \n\tb) Click to download the build for your OS \n\tc) Click \"Download\" button to download from a mirror \n\td) Run \"tar xf\" to extract the downloaded archive ",
300 "expected_result": ""
301 },
302 "3": {
303 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) (Continue) \n\te) Run \"eclipse/eclipse\" to start Eclipse \n\tf) Optional step for host machine within Intel network: In Eclipse workbench window, go to \"Window\" menu -> \"Preferences...\". \n\tg) In \"Preferences\" dialog, go to \"General\" -> \"Network Connections\", set \"Active Provider\" to \"Manual\". In \"Proxy \tentries\" table, select HTTP and click \"Edit\" and enter host \"proxy-chain.intel.com\" port 911, click OK. Repeat for HTTPS with port 912 \nClick OK to close \"Preferences\" dialog. \n\th) Go to \"File\" menu -> \"Restart\" to restart Eclipse for proxy settings to take effect. ",
304 "expected_result": ""
305 },
306 "4": {
307 "action": "Install Eclipse Poky plugins \n\ta) Download https://autobuilder.yocto.io/pub/releases/<yocto-version>/eclipse-plugin/<eclipse-version>/org.yocto.sdk-development-<date>-archive.zip \n\tb) In Eclipse workbench window, go to \"Help\" menu -> \"Install New Software...\" \n\tc) In \"Install\" dialog, click \"Add...\" button \n\td) In \"Add Repository\" dialog, enter \"Eclipse Poky\" for (repository) Name, click \"Archive...\" ",
308 "expected_results": ""
309 },
310 "5": {
311 "action": "Install Eclipse Poky plugins (continue) \n\te) In \"Repository archive\" browse dialog, select the downloaded Eclipse Poky repository archive \n\tf) Back in \"Add Repository\" dialog, click \"OK\" \n\tg) Back in \"Install\" dialog, make sure \"Work with:\" is set to \"Eclipse Poky\" repository, tick \"Yocto Project \tDocumentation Plug-in\" and \"Yocto Project SDK Plug-in\", click \"Next >\" and verify plugins/features name/version, \tclick \"Next >\" and accept license agreement, click \"Finish\" \n\th) If \"Security Warning\" dialog appears, click \"OK\" to install unsigned content. \n\ti) In \"Software Updates\" dialog, click \"Yes\" to restart Eclipse to complete Eclipse Poky plugins installation. ",
312 "expected_results": ""
313 },
314 "6": {
315 "action": "Setup Eclipse Poky to use SDK \n\ta) In Eclipse workbench window, go to \"Window\" menu -> \"Preferences\". \n\tb) In \"Preferences\" window, go to \"Yocto Project SDK\", in \"Cross Compiler Options\" frame, select \"Standalone pre-\tbuilt toolchain\". ",
316 "expected_results": "Eclipse Poky plugins installed and running successfully, e.g. observe that \"Yocto Project Tools\" menu is available on Eclipse workbench window."
317 }
318 },
319 "summary": "Eclipse_Poky_installation_and_setup"
320 }
321 }
322]
diff --git a/meta/lib/oeqa/manual/sdk.json b/meta/lib/oeqa/manual/sdk.json
index 434982f7f5..21d892d26d 100644
--- a/meta/lib/oeqa/manual/sdk.json
+++ b/meta/lib/oeqa/manual/sdk.json
@@ -26,7 +26,7 @@
26 "expected_results": "Expect both qemu to boot up successfully." 26 "expected_results": "Expect both qemu to boot up successfully."
27 } 27 }
28 }, 28 },
29 "summary": "test_install_cross_toolchain_can_run_multiple_qemu_for_x86" 29 "summary": "test_install_cross_toolchain_can_run_multiple_qemu_for:x86"
30 } 30 }
31 } 31 }
32] \ No newline at end of file 32] \ No newline at end of file
diff --git a/meta/lib/oeqa/manual/toaster-managed-mode.json b/meta/lib/oeqa/manual/toaster-managed-mode.json
index 12374c7c64..1a71985c3c 100644
--- a/meta/lib/oeqa/manual/toaster-managed-mode.json
+++ b/meta/lib/oeqa/manual/toaster-managed-mode.json
@@ -136,7 +136,7 @@
136 "expected_results": "" 136 "expected_results": ""
137 }, 137 },
138 "3": { 138 "3": {
139 "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL_append - \"Not set\" \n\tPACKAGE_CLASES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n", 139 "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL:append - \"Not set\" \n\tPACKAGE_CLASSES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n",
140 "expected_results": "" 140 "expected_results": ""
141 }, 141 },
142 "4": { 142 "4": {
@@ -186,7 +186,7 @@
186 "expected_results": "" 186 "expected_results": ""
187 }, 187 },
188 "7": { 188 "7": {
189 "action": "IMAGE_INSTALL_append: \n\t- check that the \"change\" icon is present (represented by a pen icon) \n\t- click on the \"change\" icon and check that the variable becomes a text field, populated with the current value of the variable. \n\n\t- check that the save button is disabled when the text field is empty \n\t- insert test in the text field (for example \"package1\") and hit save; be aware that there is no input validation for this variable \n\t- check that a new \"delete\" icon(a trashcan) has appeared next to the pen icon \n\t- check that clicking on the trashcan icon resets the value to \"Not set\" and makes the trashcan icon dissapear \n\n", 189 "action": "IMAGE_INSTALL:append: \n\t- check that the \"change\" icon is present (represented by a pen icon) \n\t- click on the \"change\" icon and check that the variable becomes a text field, populated with the current value of the variable. \n\n\t- check that the save button is disabled when the text field is empty \n\t- insert test in the text field (for example \"package1\") and hit save; be aware that there is no input validation for this variable \n\t- check that a new \"delete\" icon(a trashcan) has appeared next to the pen icon \n\t- check that clicking on the trashcan icon resets the value to \"Not set\" and makes the trashcan icon dissapear \n\n",
190 "expected_results": "" 190 "expected_results": ""
191 }, 191 },
192 "8": { 192 "8": {
@@ -1574,7 +1574,7 @@
1574 "expected_results": "Open bitbake variables page. \n\n\t" 1574 "expected_results": "Open bitbake variables page. \n\n\t"
1575 }, 1575 },
1576 "5": { 1576 "5": {
1577 "action": "Click on change button for IMAGE_INSTALL_append and add a variable (ex: acpid). \n\n", 1577 "action": "Click on change button for IMAGE_INSTALL:append and add a variable (ex: acpid). \n\n",
1578 "expected_results": "Variable added. \n\n\t" 1578 "expected_results": "Variable added. \n\n\t"
1579 }, 1579 },
1580 "6": { 1580 "6": {
@@ -1590,7 +1590,7 @@
1590 "expected_results": "You should get results for ssh packages." 1590 "expected_results": "You should get results for ssh packages."
1591 } 1591 }
1592 }, 1592 },
1593 "summary": "Test_IMAGE_INSTALL_append_variable" 1593 "summary": "Test_IMAGE_INSTALL:append_variable"
1594 } 1594 }
1595 }, 1595 },
1596 { 1596 {
@@ -2348,7 +2348,7 @@
2348 "expected_results": "" 2348 "expected_results": ""
2349 }, 2349 },
2350 "3": { 2350 "3": {
2351 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. ", 2351 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. ",
2352 "expected_results": " All recipes are built correctly \n\n" 2352 "expected_results": " All recipes are built correctly \n\n"
2353 }, 2353 },
2354 "4": { 2354 "4": {
@@ -2382,7 +2382,7 @@
2382 "expected_results": "" 2382 "expected_results": ""
2383 }, 2383 },
2384 "3": { 2384 "3": {
2385 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. \n\n", 2385 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. \n\n",
2386 "expected_results": "All recipes are built correctly \n\n" 2386 "expected_results": "All recipes are built correctly \n\n"
2387 }, 2387 },
2388 "4": { 2388 "4": {
@@ -2420,7 +2420,7 @@
2420 "expected_results": "" 2420 "expected_results": ""
2421 }, 2421 },
2422 "3": { 2422 "3": {
2423 "action": "Build 4 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. \n\n", 2423 "action": "Build 4 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. \n\n",
2424 "expected_results": " All recipes are built correctly \n\n" 2424 "expected_results": " All recipes are built correctly \n\n"
2425 }, 2425 },
2426 "4": { 2426 "4": {
@@ -2569,4 +2569,4 @@
2569 "summary": "Download_task_log" 2569 "summary": "Download_task_log"
2570 } 2570 }
2571 } 2571 }
2572] \ No newline at end of file 2572]
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py
index 9c84466dd0..bcb6a878c7 100644
--- a/meta/lib/oeqa/oetest.py
+++ b/meta/lib/oeqa/oetest.py
@@ -28,7 +28,7 @@ try:
28 import oeqa.sdkext 28 import oeqa.sdkext
29except ImportError: 29except ImportError:
30 pass 30 pass
31from oeqa.utils.decorators import LogResults, gettag, getResults 31from oeqa.utils.decorators import LogResults, gettag
32 32
33logger = logging.getLogger("BitBake") 33logger = logging.getLogger("BitBake")
34 34
@@ -57,7 +57,6 @@ def filterByTagExp(testsuite, tagexp):
57@LogResults 57@LogResults
58class oeTest(unittest.TestCase): 58class oeTest(unittest.TestCase):
59 59
60 pscmd = "ps"
61 longMessage = True 60 longMessage = True
62 61
63 @classmethod 62 @classmethod
@@ -110,20 +109,6 @@ class oeRuntimeTest(oeTest):
110 def tearDown(self): 109 def tearDown(self):
111 # Uninstall packages in the DUT 110 # Uninstall packages in the DUT
112 self.tc.install_uninstall_packages(self.id(), False) 111 self.tc.install_uninstall_packages(self.id(), False)
113
114 res = getResults()
115 # If a test fails or there is an exception dump
116 # for QemuTarget only
117 if (type(self.target).__name__ == "QemuTarget" and
118 (self.id() in res.getErrorList() or
119 self.id() in res.getFailList())):
120 self.tc.host_dumper.create_dir(self._testMethodName)
121 self.tc.host_dumper.dump_host()
122 self.target.target_dumper.dump_target(
123 self.tc.host_dumper.dump_dir)
124 print ("%s dump data stored in %s" % (self._testMethodName,
125 self.tc.host_dumper.dump_dir))
126
127 self.tearDownLocal() 112 self.tearDownLocal()
128 113
129 # Method to be run after tearDown and implemented by child classes 114 # Method to be run after tearDown and implemented by child classes
@@ -256,7 +241,7 @@ class TestContext(object):
256 241
257 modules = [] 242 modules = []
258 for test in self.testslist: 243 for test in self.testslist:
259 if re.search("\w+\.\w+\.test_\S+", test): 244 if re.search(r"\w+\.\w+\.test_\S+", test):
260 test = '.'.join(t.split('.')[:3]) 245 test = '.'.join(t.split('.')[:3])
261 module = pkgutil.get_loader(test) 246 module = pkgutil.get_loader(test)
262 modules.append(module) 247 modules.append(module)
@@ -398,11 +383,6 @@ class RuntimeTestContext(TestContext):
398 def _get_test_suites_required(self): 383 def _get_test_suites_required(self):
399 return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"] 384 return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"]
400 385
401 def loadTests(self):
402 super(RuntimeTestContext, self).loadTests()
403 if oeTest.hasPackage("procps"):
404 oeRuntimeTest.pscmd = "ps -ef"
405
406 def extract_packages(self): 386 def extract_packages(self):
407 """ 387 """
408 Find packages that will be needed during runtime. 388 Find packages that will be needed during runtime.
diff --git a/meta/lib/oeqa/runtime/case.py b/meta/lib/oeqa/runtime/case.py
index f036982e1f..2a47771a3d 100644
--- a/meta/lib/oeqa/runtime/case.py
+++ b/meta/lib/oeqa/runtime/case.py
@@ -4,6 +4,9 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import os
8import subprocess
9import time
7from oeqa.core.case import OETestCase 10from oeqa.core.case import OETestCase
8from oeqa.utils.package_manager import install_package, uninstall_package 11from oeqa.utils.package_manager import install_package, uninstall_package
9 12
@@ -18,3 +21,18 @@ class OERuntimeTestCase(OETestCase):
18 def tearDown(self): 21 def tearDown(self):
19 super(OERuntimeTestCase, self).tearDown() 22 super(OERuntimeTestCase, self).tearDown()
20 uninstall_package(self) 23 uninstall_package(self)
24
25def run_network_serialdebug(runner):
26 if not runner:
27 return
28 status, output = runner.run_serial("ip addr")
29 print("ip addr on target: %s %s" % (output, status))
30 status, output = runner.run_serial("ping -c 1 %s" % self.target.server_ip)
31 print("ping on target for %s: %s %s" % (self.target.server_ip, output, status))
32 status, output = runner.run_serial("ping -c 1 %s" % self.target.ip)
33 print("ping on target for %s: %s %s" % (self.target.ip, output, status))
34 # Have to use a full path for netstat which isn't in HOSTTOOLS
35 subprocess.call(["/usr/bin/netstat", "-tunape"])
36 subprocess.call(["/usr/bin/netstat", "-ei"])
37 subprocess.call(["ps", "-awx"], shell=True)
38 print("PID: %s %s" % (str(os.getpid()), time.time()))
diff --git a/meta/lib/oeqa/runtime/cases/_qemutiny.py b/meta/lib/oeqa/runtime/cases/_qemutiny.py
index 6886e36502..816fd4a7cb 100644
--- a/meta/lib/oeqa/runtime/cases/_qemutiny.py
+++ b/meta/lib/oeqa/runtime/cases/_qemutiny.py
@@ -1,12 +1,19 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.runtime.case import OERuntimeTestCase 7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.target.qemu import OEQemuTarget
6 9
7class QemuTinyTest(OERuntimeTestCase): 10class QemuTinyTest(OERuntimeTestCase):
8 11
9 def test_boot_tiny(self): 12 def test_boot_tiny(self):
10 status, output = self.target.run_serial('uname -a') 13 # Until the target has explicit run_serial support, check that the
11 msg = "Cannot detect poky tiny boot!" 14 # target is the qemu runner
12 self.assertTrue("yocto-tiny" in output, msg) 15 if isinstance(self.target, OEQemuTarget):
16 status, output = self.target.runner.run_serial('uname -a')
17 self.assertIn("Linux", output)
18 else:
19 self.skipTest("Target %s is not OEQemuTarget" % self.target)
diff --git a/meta/lib/oeqa/runtime/cases/apt.py b/meta/lib/oeqa/runtime/cases/apt.py
index 53745df93f..8000645843 100644
--- a/meta/lib/oeqa/runtime/cases/apt.py
+++ b/meta/lib/oeqa/runtime/cases/apt.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -21,7 +23,7 @@ class AptRepoTest(AptTest):
21 23
22 @classmethod 24 @classmethod
23 def setUpClass(cls): 25 def setUpClass(cls):
24 service_repo = os.path.join(cls.tc.td['DEPLOY_DIR_DEB'], 'all') 26 service_repo = os.path.join(cls.tc.td['DEPLOY_DIR_DEB'], '')
25 cls.repo_server = HTTPService(service_repo, 27 cls.repo_server = HTTPService(service_repo,
26 '0.0.0.0', port=cls.tc.target.server_port, 28 '0.0.0.0', port=cls.tc.target.server_port,
27 logger=cls.tc.logger) 29 logger=cls.tc.logger)
@@ -34,20 +36,44 @@ class AptRepoTest(AptTest):
34 def setup_source_config_for_package_install(self): 36 def setup_source_config_for_package_install(self):
35 apt_get_source_server = 'http://%s:%s/' % (self.tc.target.server_ip, self.repo_server.port) 37 apt_get_source_server = 'http://%s:%s/' % (self.tc.target.server_ip, self.repo_server.port)
36 apt_get_sourceslist_dir = '/etc/apt/' 38 apt_get_sourceslist_dir = '/etc/apt/'
37 self.target.run('cd %s; echo deb [ allow-insecure=yes ] %s ./ > sources.list' % (apt_get_sourceslist_dir, apt_get_source_server)) 39 self.target.run('cd %s; echo deb [ allow-insecure=yes ] %s/all ./ > sources.list' % (apt_get_sourceslist_dir, apt_get_source_server))
40
41 def setup_source_config_for_package_install_signed(self):
42 apt_get_source_server = 'http://%s:%s' % (self.tc.target.server_ip, self.repo_server.port)
43 apt_get_sourceslist_dir = '/etc/apt/'
44 self.target.run("cd %s; cp sources.list sources.list.bak; sed -i 's|\[trusted=yes\] http://bogus_ip:bogus_port|%s|g' sources.list" % (apt_get_sourceslist_dir, apt_get_source_server))
38 45
39 def cleanup_source_config_for_package_install(self): 46 def cleanup_source_config_for_package_install(self):
40 apt_get_sourceslist_dir = '/etc/apt/' 47 apt_get_sourceslist_dir = '/etc/apt/'
41 self.target.run('cd %s; rm sources.list' % (apt_get_sourceslist_dir)) 48 self.target.run('cd %s; rm sources.list' % (apt_get_sourceslist_dir))
42 49
50 def cleanup_source_config_for_package_install_signed(self):
51 apt_get_sourceslist_dir = '/etc/apt/'
52 self.target.run('cd %s; mv sources.list.bak sources.list' % (apt_get_sourceslist_dir))
53
54 def setup_key(self):
55 # the key is found on the target /etc/pki/packagefeed-gpg/
56 # named PACKAGEFEED-GPG-KEY-poky-branch
57 self.target.run('cd %s; apt-key add P*' % ('/etc/pki/packagefeed-gpg'))
58
43 @skipIfNotFeature('package-management', 59 @skipIfNotFeature('package-management',
44 'Test requires package-management to be in IMAGE_FEATURES') 60 'Test requires package-management to be in IMAGE_FEATURES')
45 @skipIfNotDataVar('IMAGE_PKGTYPE', 'deb', 61 @skipIfNotDataVar('IMAGE_PKGTYPE', 'deb',
46 'DEB is not the primary package manager') 62 'DEB is not the primary package manager')
47 @OEHasPackage(['apt']) 63 @OEHasPackage(['apt'])
48 def test_apt_install_from_repo(self): 64 def test_apt_install_from_repo(self):
49 self.setup_source_config_for_package_install() 65 if not self.tc.td.get('PACKAGE_FEED_GPG_NAME'):
50 self.pkg('update') 66 self.setup_source_config_for_package_install()
51 self.pkg('remove --yes run-postinsts-dev') 67 self.pkg('update')
52 self.pkg('install --yes --allow-unauthenticated run-postinsts-dev') 68 self.pkg('remove --yes run-postinsts-dev')
53 self.cleanup_source_config_for_package_install() 69 self.pkg('install --yes --allow-unauthenticated run-postinsts-dev')
70 self.cleanup_source_config_for_package_install()
71 else:
72 # when we are here a key has been set to sign the package feed and
73 # public key and gnupg installed on the image by test_testimage_apt
74 self.setup_source_config_for_package_install_signed()
75 self.setup_key()
76 self.pkg('update')
77 self.pkg('install --yes run-postinsts-dev')
78 self.pkg('remove --yes run-postinsts-dev')
79 self.cleanup_source_config_for_package_install_signed()
diff --git a/meta/lib/oeqa/runtime/cases/boot.py b/meta/lib/oeqa/runtime/cases/boot.py
index 2142f400a0..dcee3311f7 100644
--- a/meta/lib/oeqa/runtime/cases/boot.py
+++ b/meta/lib/oeqa/runtime/cases/boot.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ from oeqa.core.decorator.data import skipIfQemu
13class BootTest(OERuntimeTestCase): 15class BootTest(OERuntimeTestCase):
14 16
15 @OETimeout(120) 17 @OETimeout(120)
16 @skipIfQemu('qemuall', 'Test only runs on real hardware') 18 @skipIfQemu()
17 @OETestDepends(['ssh.SSHTest.test_ssh']) 19 @OETestDepends(['ssh.SSHTest.test_ssh'])
18 def test_reboot(self): 20 def test_reboot(self):
19 output = '' 21 output = ''
diff --git a/meta/lib/oeqa/runtime/cases/buildcpio.py b/meta/lib/oeqa/runtime/cases/buildcpio.py
index e29bf16ccb..0c9c57a3cb 100644
--- a/meta/lib/oeqa/runtime/cases/buildcpio.py
+++ b/meta/lib/oeqa/runtime/cases/buildcpio.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -12,7 +14,7 @@ class BuildCpioTest(OERuntimeTestCase):
12 14
13 @classmethod 15 @classmethod
14 def setUpClass(cls): 16 def setUpClass(cls):
15 uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.13.tar.gz' 17 uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.15.tar.gz'
16 cls.project = TargetBuildProject(cls.tc.target, 18 cls.project = TargetBuildProject(cls.tc.target,
17 uri, 19 uri,
18 dl_dir = cls.tc.td['DL_DIR']) 20 dl_dir = cls.tc.td['DL_DIR'])
@@ -27,7 +29,6 @@ class BuildCpioTest(OERuntimeTestCase):
27 @OEHasPackage(['autoconf']) 29 @OEHasPackage(['autoconf'])
28 def test_cpio(self): 30 def test_cpio(self):
29 self.project.download_archive() 31 self.project.download_archive()
30 self.project.run_configure('--disable-maintainer-mode', 32 self.project.run_configure(configure_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'")
31 'sed -i -e "/char \*program_name/d" src/global.c;') 33 self.project.run_make(make_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'")
32 self.project.run_make()
33 self.project.run_install() 34 self.project.run_install()
diff --git a/meta/lib/oeqa/runtime/cases/buildgalculator.py b/meta/lib/oeqa/runtime/cases/buildgalculator.py
index e5cc3e2888..2cfb3243dc 100644
--- a/meta/lib/oeqa/runtime/cases/buildgalculator.py
+++ b/meta/lib/oeqa/runtime/cases/buildgalculator.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/buildlzip.py b/meta/lib/oeqa/runtime/cases/buildlzip.py
index bc70b41461..921a0bca61 100644
--- a/meta/lib/oeqa/runtime/cases/buildlzip.py
+++ b/meta/lib/oeqa/runtime/cases/buildlzip.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ class BuildLzipTest(OERuntimeTestCase):
13 @classmethod 15 @classmethod
14 def setUpClass(cls): 16 def setUpClass(cls):
15 uri = 'http://downloads.yoctoproject.org/mirror/sources' 17 uri = 'http://downloads.yoctoproject.org/mirror/sources'
16 uri = '%s/lzip-1.19.tar.gz' % uri 18 uri = '%s/lzip-1.25.tar.gz' % uri
17 cls.project = TargetBuildProject(cls.tc.target, 19 cls.project = TargetBuildProject(cls.tc.target,
18 uri, 20 uri,
19 dl_dir = cls.tc.td['DL_DIR']) 21 dl_dir = cls.tc.td['DL_DIR'])
diff --git a/meta/lib/oeqa/runtime/cases/connman.py b/meta/lib/oeqa/runtime/cases/connman.py
index f0d15fac9b..a488752e3f 100644
--- a/meta/lib/oeqa/runtime/cases/connman.py
+++ b/meta/lib/oeqa/runtime/cases/connman.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/date.py b/meta/lib/oeqa/runtime/cases/date.py
index fdd2a6ae58..a2523de67a 100644
--- a/meta/lib/oeqa/runtime/cases/date.py
+++ b/meta/lib/oeqa/runtime/cases/date.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,12 +15,12 @@ class DateTest(OERuntimeTestCase):
13 def setUp(self): 15 def setUp(self):
14 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 16 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
15 self.logger.debug('Stopping systemd-timesyncd daemon') 17 self.logger.debug('Stopping systemd-timesyncd daemon')
16 self.target.run('systemctl disable --now systemd-timesyncd') 18 self.target.run('systemctl disable --now --runtime systemd-timesyncd')
17 19
18 def tearDown(self): 20 def tearDown(self):
19 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 21 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
20 self.logger.debug('Starting systemd-timesyncd daemon') 22 self.logger.debug('Starting systemd-timesyncd daemon')
21 self.target.run('systemctl enable --now systemd-timesyncd') 23 self.target.run('systemctl enable --now --runtime systemd-timesyncd')
22 24
23 @OETestDepends(['ssh.SSHTest.test_ssh']) 25 @OETestDepends(['ssh.SSHTest.test_ssh'])
24 @OEHasPackage(['coreutils', 'busybox']) 26 @OEHasPackage(['coreutils', 'busybox'])
@@ -28,14 +30,13 @@ class DateTest(OERuntimeTestCase):
28 self.assertEqual(status, 0, msg=msg) 30 self.assertEqual(status, 0, msg=msg)
29 oldDate = output 31 oldDate = output
30 32
31 sampleDate = '"2016-08-09 10:00:00"' 33 sampleTimestamp = 1488800000
32 (status, output) = self.target.run("date -s %s" % sampleDate) 34 (status, output) = self.target.run("date -s @%d" % sampleTimestamp)
33 self.assertEqual(status, 0, msg='Date set failed, output: %s' % output) 35 self.assertEqual(status, 0, msg='Date set failed, output: %s' % output)
34 36
35 (status, output) = self.target.run("date -R") 37 (status, output) = self.target.run('date +"%s"')
36 p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output)
37 msg = 'The date was not set correctly, output: %s' % output 38 msg = 'The date was not set correctly, output: %s' % output
38 self.assertTrue(p, msg=msg) 39 self.assertTrue(int(output) - sampleTimestamp < 300, msg=msg)
39 40
40 (status, output) = self.target.run('date -s "%s"' % oldDate) 41 (status, output) = self.target.run('date -s "%s"' % oldDate)
41 msg = 'Failed to reset date, output: %s' % output 42 msg = 'Failed to reset date, output: %s' % output
diff --git a/meta/lib/oeqa/runtime/cases/df.py b/meta/lib/oeqa/runtime/cases/df.py
index bb155c9cf9..43e0ebf9ea 100644
--- a/meta/lib/oeqa/runtime/cases/df.py
+++ b/meta/lib/oeqa/runtime/cases/df.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/dnf.py b/meta/lib/oeqa/runtime/cases/dnf.py
index f40c63026e..3ccb18ce83 100644
--- a/meta/lib/oeqa/runtime/cases/dnf.py
+++ b/meta/lib/oeqa/runtime/cases/dnf.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -73,48 +75,43 @@ class DnfRepoTest(DnfTest):
73 def test_dnf_makecache(self): 75 def test_dnf_makecache(self):
74 self.dnf_with_repo('makecache') 76 self.dnf_with_repo('makecache')
75 77
76
77# Does not work when repo is specified on the command line
78# @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
79# def test_dnf_repolist(self):
80# self.dnf_with_repo('repolist')
81
82 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 78 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
83 def test_dnf_repoinfo(self): 79 def test_dnf_repoinfo(self):
84 self.dnf_with_repo('repoinfo') 80 self.dnf_with_repo('repoinfo')
85 81
86 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 82 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
87 def test_dnf_install(self): 83 def test_dnf_install(self):
88 output = self.dnf_with_repo('list run-postinsts-dev') 84 self.dnf_with_repo('remove -y dnf-test-*')
89 if 'Installed Packages' in output: 85 self.dnf_with_repo('install -y dnf-test-dep')
90 self.dnf_with_repo('remove -y run-postinsts-dev')
91 self.dnf_with_repo('install -y run-postinsts-dev')
92 86
93 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) 87 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
94 def test_dnf_install_dependency(self): 88 def test_dnf_install_dependency(self):
95 self.dnf_with_repo('remove -y run-postinsts') 89 self.dnf_with_repo('remove -y dnf-test-*')
96 self.dnf_with_repo('install -y run-postinsts-dev') 90 self.dnf_with_repo('install -y dnf-test-main')
91 output = self.dnf('list --installed dnf-test-*')
92 self.assertIn("dnf-test-main.", output)
93 self.assertIn("dnf-test-dep.", output)
97 94
98 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency']) 95 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency'])
99 def test_dnf_install_from_disk(self): 96 def test_dnf_install_from_disk(self):
100 self.dnf_with_repo('remove -y run-postinsts-dev') 97 self.dnf_with_repo('remove -y dnf-test-dep')
101 self.dnf_with_repo('install -y --downloadonly run-postinsts-dev') 98 self.dnf_with_repo('install -y --downloadonly dnf-test-dep')
102 status, output = self.target.run('find /var/cache/dnf -name run-postinsts-dev*rpm', 1500) 99 status, output = self.target.run('find /var/cache/dnf -name dnf-test-dep*rpm')
103 self.assertEqual(status, 0, output) 100 self.assertEqual(status, 0, output)
104 self.dnf_with_repo('install -y %s' % output) 101 self.dnf_with_repo('install -y %s' % output)
105 102
106 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk']) 103 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk'])
107 def test_dnf_install_from_http(self): 104 def test_dnf_install_from_http(self):
108 output = subprocess.check_output('%s %s -name run-postinsts-dev*' % (bb.utils.which(os.getenv('PATH'), "find"), 105 output = subprocess.check_output('%s %s -name dnf-test-dep*' % (bb.utils.which(os.getenv('PATH'), "find"),
109 os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8") 106 os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8")
110 rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1] 107 rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1]
111 url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path) 108 url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path)
112 self.dnf_with_repo('remove -y run-postinsts-dev') 109 self.dnf_with_repo('remove -y dnf-test-dep')
113 self.dnf_with_repo('install -y %s' % url) 110 self.dnf_with_repo('install -y %s' % url)
114 111
115 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) 112 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
116 def test_dnf_reinstall(self): 113 def test_dnf_reinstall(self):
117 self.dnf_with_repo('reinstall -y run-postinsts-dev') 114 self.dnf_with_repo('reinstall -y dnf-test-main')
118 115
119 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 116 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
120 @skipIfInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when not enable usrmerge') 117 @skipIfInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when not enable usrmerge')
@@ -137,55 +134,40 @@ class DnfRepoTest(DnfTest):
137 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500) 134 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
138 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500) 135 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
139 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500) 136 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
140 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath) 137 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
141 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500) 138 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
142 self.assertEqual(0, status, output) 139 self.assertEqual(0, status, output)
143 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500) 140 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500)
144 self.assertEqual(0, status, output) 141 self.assertEqual(0, status, output)
145 142
146 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 143 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
147 @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmege') 144 @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmerge')
148 @OEHasPackage('busybox') 145 @OEHasPackage('busybox')
149 def test_dnf_installroot_usrmerge(self): 146 def test_dnf_installroot_usrmerge(self):
150 rootpath = '/home/root/chroot/test' 147 rootpath = '/home/root/chroot/test'
151 #Copy necessary files to avoid errors with not yet installed tools on 148 #Copy necessary files to avoid errors with not yet installed tools on
152 #installroot directory. 149 #installroot directory.
153 self.target.run('mkdir -p %s/etc' % rootpath, 1500) 150 self.target.run('mkdir -p %s/etc' % rootpath)
154 self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath), 1500) 151 self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath))
155 self.target.run('ln -sf -r %s/usr/bin %s/bin' % (rootpath, rootpath), 1500) 152 self.target.run('ln -sf usr/bin %s/bin' % (rootpath))
156 self.target.run('ln -sf -r %s/usr/sbin %s/sbin' % (rootpath, rootpath), 1500) 153 self.target.run('ln -sf usr/sbin %s/sbin' % (rootpath))
157 self.target.run('mkdir -p %s/dev' % rootpath, 1500) 154 self.target.run('mkdir -p %s/dev' % rootpath)
158 #Handle different architectures lib dirs 155 #Handle different architectures lib dirs
159 self.target.run('mkdir -p %s/usr/lib' % rootpath, 1500) 156 self.target.run("for l in /lib*; do mkdir -p %s/usr/$l; ln -s usr/$l %s/$l; done" % (rootpath, rootpath))
160 self.target.run('mkdir -p %s/usr/libx32' % rootpath, 1500) 157 self.target.run('cp -r /etc/rpm %s/etc' % rootpath)
161 self.target.run('mkdir -p %s/usr/lib64' % rootpath, 1500) 158 self.target.run('cp -r /etc/dnf %s/etc' % rootpath)
162 self.target.run('cp /lib/libtinfo.so.5 %s/usr/lib' % rootpath, 1500) 159 self.target.run('cp /bin/busybox %s/bin/sh' % rootpath)
163 self.target.run('cp /libx32/libtinfo.so.5 %s/usr/libx32' % rootpath, 1500) 160 self.target.run('mount -o bind /dev %s/dev/' % rootpath)
164 self.target.run('cp /lib64/libtinfo.so.5 %s/usr/lib64' % rootpath, 1500) 161 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
165 self.target.run('ln -sf -r %s/lib %s/usr/lib' % (rootpath,rootpath), 1500) 162 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath)
166 self.target.run('ln -sf -r %s/libx32 %s/usr/libx32' % (rootpath,rootpath), 1500)
167 self.target.run('ln -sf -r %s/lib64 %s/usr/lib64' % (rootpath,rootpath), 1500)
168 self.target.run('cp -r /etc/rpm %s/etc' % rootpath, 1500)
169 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
170 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
171 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
172 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath)
173 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
174 self.assertEqual(0, status, output) 163 self.assertEqual(0, status, output)
175 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500) 164 status, output = self.target.run('test -e %s/bin/busybox' % rootpath)
176 self.assertEqual(0, status, output) 165 self.assertEqual(0, status, output)
177 166
178 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 167 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
179 def test_dnf_exclude(self): 168 def test_dnf_exclude(self):
180 excludepkg = 'curl-dev' 169 self.dnf_with_repo('remove -y dnf-test-*')
181 self.dnf_with_repo('install -y curl*') 170 self.dnf_with_repo('install -y --exclude=dnf-test-dep dnf-test-*')
182 self.dnf('list %s' % excludepkg, 0) 171 output = self.dnf('list --installed dnf-test-*')
183 #Avoid remove dependencies to skip some errors on different archs and images 172 self.assertIn("dnf-test-main.", output)
184 self.dnf_with_repo('remove --setopt=clean_requirements_on_remove=0 -y curl*') 173 self.assertNotIn("dnf-test-dev.", output)
185 #check curl-dev is not installed adter removing all curl occurrences
186 status, output = self.target.run('dnf list --installed | grep %s'% excludepkg, 1500)
187 self.assertEqual(1, status, "%s was not removed, is listed as installed"%excludepkg)
188 self.dnf_with_repo('install -y --exclude=%s --exclude=curl-staticdev curl*' % excludepkg)
189 #check curl-dev is not installed after being excluded
190 status, output = self.target.run('dnf list --installed | grep %s'% excludepkg , 1500)
191 self.assertEqual(1, status, "%s was not excluded, is listed as installed"%excludepkg)
diff --git a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
index e010612838..c3be60f006 100644
--- a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
+++ b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
@@ -1,28 +1,15 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
4 9
5class Ethernet_Test(OERuntimeTestCase): 10class Ethernet_Test(OERuntimeTestCase):
6 11
7 def set_ip(self, x): 12 @skipIfQemu()
8 x = x.split(".")
9 sample_host_address = '150'
10 x[3] = sample_host_address
11 x = '.'.join(x)
12 return x
13
14 @skipIfQemu('qemuall', 'Test only runs on real hardware')
15 @OETestDepends(['ssh.SSHTest.test_ssh'])
16 def test_set_virtual_ip(self):
17 (status, output) = self.target.run("ifconfig eth0 | grep 'inet ' | awk '{print $2}'")
18 self.assertEqual(status, 0, msg='Failed to get ip address. Make sure you have an ethernet connection on your device, output: %s' % output)
19 original_ip = output
20 virtual_ip = self.set_ip(original_ip)
21
22 (status, output) = self.target.run("ifconfig eth0:1 %s netmask 255.255.255.0 && sleep 2 && ping -c 5 %s && ifconfig eth0:1 down" % (virtual_ip,virtual_ip))
23 self.assertEqual(status, 0, msg='Failed to create virtual ip address, output: %s' % output)
24
25 @OETestDepends(['ethernet_ip_connman.Ethernet_Test.test_set_virtual_ip'])
26 def test_get_ip_from_dhcp(self): 13 def test_get_ip_from_dhcp(self):
27 (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'") 14 (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'")
28 self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output) 15 self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output)
@@ -33,4 +20,4 @@ class Ethernet_Test(OERuntimeTestCase):
33 default_gateway = output 20 default_gateway = output
34 21
35 (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway)) 22 (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway))
36 self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output) \ No newline at end of file 23 self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output)
diff --git a/meta/lib/oeqa/runtime/cases/gcc.py b/meta/lib/oeqa/runtime/cases/gcc.py
index 1b6e431bf4..17b1483e8d 100644
--- a/meta/lib/oeqa/runtime/cases/gcc.py
+++ b/meta/lib/oeqa/runtime/cases/gcc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/gi.py b/meta/lib/oeqa/runtime/cases/gi.py
index 42bd100a31..78c7ddda2c 100644
--- a/meta/lib/oeqa/runtime/cases/gi.py
+++ b/meta/lib/oeqa/runtime/cases/gi.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/go.py b/meta/lib/oeqa/runtime/cases/go.py
new file mode 100644
index 0000000000..39a80f4dca
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/go.py
@@ -0,0 +1,21 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage
10
11class GoHelloworldTest(OERuntimeTestCase):
12 @OETestDepends(['ssh.SSHTest.test_ssh'])
13 @OEHasPackage(['go-helloworld'])
14 def test_gohelloworld(self):
15 cmd = "go-helloworld"
16 status, output = self.target.run(cmd)
17 msg = 'Exit status was not 0. Output: %s' % output
18 self.assertEqual(status, 0, msg=msg)
19
20 msg = 'Incorrect output: %s' % output
21 self.assertEqual(output, "Hello, world!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/gstreamer.py b/meta/lib/oeqa/runtime/cases/gstreamer.py
index f735f82e3b..2295769cfd 100644
--- a/meta/lib/oeqa/runtime/cases/gstreamer.py
+++ b/meta/lib/oeqa/runtime/cases/gstreamer.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/kernelmodule.py b/meta/lib/oeqa/runtime/cases/kernelmodule.py
index 47fd2f850c..9c42fcc586 100644
--- a/meta/lib/oeqa/runtime/cases/kernelmodule.py
+++ b/meta/lib/oeqa/runtime/cases/kernelmodule.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/ksample.py b/meta/lib/oeqa/runtime/cases/ksample.py
index a9a1620ebd..b6848762e3 100644
--- a/meta/lib/oeqa/runtime/cases/ksample.py
+++ b/meta/lib/oeqa/runtime/cases/ksample.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -10,7 +12,7 @@ from oeqa.core.decorator.depends import OETestDepends
10from oeqa.core.decorator.data import skipIfNotFeature 12from oeqa.core.decorator.data import skipIfNotFeature
11 13
12# need some kernel fragments 14# need some kernel fragments
13# echo "KERNEL_FEATURES_append += \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf 15# echo "KERNEL_FEATURES:append = \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf
14class KSample(OERuntimeTestCase): 16class KSample(OERuntimeTestCase):
15 def cmd_and_check(self, cmd='', match_string=''): 17 def cmd_and_check(self, cmd='', match_string=''):
16 status, output = self.target.run(cmd) 18 status, output = self.target.run(cmd)
diff --git a/meta/lib/oeqa/runtime/cases/ldd.py b/meta/lib/oeqa/runtime/cases/ldd.py
index 9c2caa8f65..f6841c6675 100644
--- a/meta/lib/oeqa/runtime/cases/ldd.py
+++ b/meta/lib/oeqa/runtime/cases/ldd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/login.py b/meta/lib/oeqa/runtime/cases/login.py
new file mode 100644
index 0000000000..e1bc60d49b
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/login.py
@@ -0,0 +1,116 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import subprocess
9import tempfile
10import time
11import os
12from datetime import datetime
13from oeqa.runtime.case import OERuntimeTestCase
14from oeqa.runtime.decorator.package import OEHasPackage
15
16### Status of qemu images.
17# - runqemu qemuppc64 comes up blank. (skip)
18# - qemuarmv5 comes up with multiple heads but sending "head" to screendump.
19# seems to create a png with a bad header? (skip for now, but come back to fix)
20# - qemuriscv32 and qemuloongarch64 doesn't work with testimage apparently? (skip)
21# - qemumips64 is missing mouse icon.
22# - qemumips takes forever to render and is missing mouse icon.
23# - qemuarm and qemuppc are odd as they don't resize so we need to just set width.
24# - All images have home and screen flipper icons not always rendered fully at first.
25# the sleep seems to help this out some, depending on machine load.
26###
27
28class LoginTest(OERuntimeTestCase):
29 @OEHasPackage(['matchbox-desktop', 'dbus-wait'])
30 def test_screenshot(self):
31 if self.td.get('MACHINE') in ("qemuppc64", "qemuarmv5", "qemuriscv32", "qemuriscv64", "qemuloongarch64"):
32 self.skipTest("{0} is not currently supported.".format(self.td.get('MACHINE')))
33
34 pn = self.td.get('PN')
35
36 ourenv = os.environ.copy()
37 origpath = self.td.get("ORIGPATH")
38 if origpath:
39 ourenv['PATH'] = ourenv['PATH'] + ":" + origpath
40
41 for cmd in ["identify.im7", "convert.im7", "compare.im7"]:
42 try:
43 subprocess.check_output(["which", cmd], env=ourenv)
44 except subprocess.CalledProcessError:
45 self.skipTest("%s (from imagemagick) not available" % cmd)
46
47
48 # Store images so we can debug them if needed
49 saved_screenshots_dir = self.td.get('T') + "/saved-screenshots/"
50
51 ###
52 # This is a really horrible way of doing this but I've not found the
53 # right event to determine "The system is loaded and screen is rendered"
54 #
55 # Using dbus-wait for matchbox is the wrong answer because while it
56 # ensures the system is up, it doesn't mean the screen is rendered.
57 #
58 # Checking the qmp socket doesn't work afaik either.
59 #
60 # One way to do this is to do compares of known good screendumps until
61 # we either get expected or close to expected or we time out. Part of the
62 # issue here with that is that there is a very fine difference in the
63 # diff between a screendump where the icons haven't loaded yet and
64 # one where they won't load. I'll look at that next, but, for now, this.
65 #
66 # Which is ugly and I hate it but it 'works' for various definitions of
67 # 'works'.
68 ###
69 # RP: if the signal is sent before we run this, it will never be seen and we'd timeout
70 #status, output = self.target.run('dbus-wait org.matchbox_project.desktop Loaded')
71 #if status != 0 or "Timeout" in output:
72 # self.fail('dbus-wait failed (%s, %s). This could mean that the image never loaded the matchbox desktop.' % (status, output))
73
74 # Start taking screenshots every 2 seconds until diff=0 or timeout is 60 seconds
75 timeout = time.time() + 60
76 diff = True
77 with tempfile.NamedTemporaryFile(prefix="oeqa-screenshot-login", suffix=".png") as t:
78 while diff != 0 and time.time() < timeout:
79 time.sleep(2)
80 ret = self.target.runner.run_monitor("screendump", args={"filename": t.name, "format":"png"})
81
82 # Find out size of image so we can determine where to blank out clock.
83 # qemuarm and qemuppc are odd as it doesn't resize the window and returns
84 # incorrect widths
85 if self.td.get('MACHINE') == "qemuarm" or self.td.get('MACHINE') == "qemuppc":
86 width = "640"
87 else:
88 cmd = "identify.im7 -ping -format '%w' {0}".format(t.name)
89 width = subprocess.check_output(cmd, shell=True, env=ourenv).decode()
90
91 rblank = int(float(width))
92 lblank = rblank-80
93
94 # Use the meta-oe version of convert, along with it's suffix. This blanks out the clock.
95 cmd = "convert.im7 {0} -fill white -draw 'rectangle {1},4 {2},28' {3}".format(t.name, str(rblank), str(lblank), t.name)
96 convert_out=subprocess.check_output(cmd, shell=True, env=ourenv).decode()
97
98 bb.utils.mkdirhier(saved_screenshots_dir)
99 savedfile = "{0}/saved-{1}-{2}-{3}.png".format(saved_screenshots_dir, \
100 datetime.timestamp(datetime.now()), \
101 pn, \
102 self.td.get('MACHINE'))
103 shutil.copy2(t.name, savedfile)
104
105 refimage = self.td.get('COREBASE') + "/meta/files/screenshot-tests/" + pn + "-" + self.td.get('MACHINE') +".png"
106 if not os.path.exists(refimage):
107 self.skipTest("No reference image for comparision (%s)" % refimage)
108
109 cmd = "compare.im7 -metric MSE {0} {1} /dev/null".format(t.name, refimage)
110 compare_out = subprocess.run(cmd, shell=True, capture_output=True, text=True, env=ourenv)
111 diff=float(compare_out.stderr.replace("(", "").replace(")","").split()[1])
112 if diff > 0:
113 # Keep a copy of the failed screenshot so we can see what happened.
114 self.fail("Screenshot diff is {0}. Failed image stored in {1}".format(str(diff), savedfile))
115 else:
116 self.assertEqual(0, diff, "Screenshot diff is {0}.".format(str(diff)))
diff --git a/meta/lib/oeqa/runtime/cases/logrotate.py b/meta/lib/oeqa/runtime/cases/logrotate.py
index a4efcd07c0..6ad980cb6a 100644
--- a/meta/lib/oeqa/runtime/cases/logrotate.py
+++ b/meta/lib/oeqa/runtime/cases/logrotate.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -17,7 +19,7 @@ class LogrotateTest(OERuntimeTestCase):
17 19
18 @classmethod 20 @classmethod
19 def tearDownClass(cls): 21 def tearDownClass(cls):
20 cls.tc.target.run('mv -f $HOME/wtmp.oeqabak /etc/logrotate.d/wtmp && rm -rf $HOME/logrotate_dir') 22 cls.tc.target.run('mv -f $HOME/wtmp.oeqabak /etc/logrotate.d/wtmp && rm -rf /var/log//logrotate_dir')
21 cls.tc.target.run('rm -rf /var/log/logrotate_testfile && rm -rf /etc/logrotate.d/logrotate_testfile') 23 cls.tc.target.run('rm -rf /var/log/logrotate_testfile && rm -rf /etc/logrotate.d/logrotate_testfile')
22 24
23 @OETestDepends(['ssh.SSHTest.test_ssh']) 25 @OETestDepends(['ssh.SSHTest.test_ssh'])
@@ -29,17 +31,17 @@ class LogrotateTest(OERuntimeTestCase):
29 msg = ('Could not create/update /var/log/wtmp with touch') 31 msg = ('Could not create/update /var/log/wtmp with touch')
30 self.assertEqual(status, 0, msg = msg) 32 self.assertEqual(status, 0, msg = msg)
31 33
32 status, output = self.target.run('mkdir $HOME/logrotate_dir') 34 status, output = self.target.run('mkdir /var/log//logrotate_dir')
33 msg = ('Could not create logrotate_dir. Output: %s' % output) 35 msg = ('Could not create logrotate_dir. Output: %s' % output)
34 self.assertEqual(status, 0, msg = msg) 36 self.assertEqual(status, 0, msg = msg)
35 37
36 status, output = self.target.run('echo "create \n olddir $HOME/logrotate_dir \n include /etc/logrotate.d/wtmp" > /tmp/logrotate-test.conf') 38 status, output = self.target.run('echo "create \n olddir /var/log//logrotate_dir \n include /etc/logrotate.d/wtmp" > /tmp/logrotate-test.conf')
37 msg = ('Could not write to /tmp/logrotate-test.conf') 39 msg = ('Could not write to /tmp/logrotate-test.conf')
38 self.assertEqual(status, 0, msg = msg) 40 self.assertEqual(status, 0, msg = msg)
39 41
40 # If logrotate fails to rotate the log, view the verbose output of logrotate to see what prevented it 42 # If logrotate fails to rotate the log, view the verbose output of logrotate to see what prevented it
41 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test.conf') 43 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test.conf')
42 status, _ = self.target.run('find $HOME/logrotate_dir -type f | grep wtmp.1') 44 status, _ = self.target.run('find /var/log//logrotate_dir -type f | grep wtmp.1')
43 msg = ("logrotate did not successfully rotate the wtmp log. Output from logrotate -vf: \n%s" % (logrotate_output)) 45 msg = ("logrotate did not successfully rotate the wtmp log. Output from logrotate -vf: \n%s" % (logrotate_output))
44 self.assertEqual(status, 0, msg = msg) 46 self.assertEqual(status, 0, msg = msg)
45 47
@@ -54,17 +56,17 @@ class LogrotateTest(OERuntimeTestCase):
54 msg = ('Could not write to /etc/logrotate.d/logrotate_testfile') 56 msg = ('Could not write to /etc/logrotate.d/logrotate_testfile')
55 self.assertEqual(status, 0, msg = msg) 57 self.assertEqual(status, 0, msg = msg)
56 58
57 status, output = self.target.run('echo "create \n olddir $HOME/logrotate_dir \n include /etc/logrotate.d/logrotate_testfile" > /tmp/logrotate-test2.conf') 59 status, output = self.target.run('echo "create \n olddir /var/log//logrotate_dir \n include /etc/logrotate.d/logrotate_testfile" > /tmp/logrotate-test2.conf')
58 msg = ('Could not write to /tmp/logrotate_test2.conf') 60 msg = ('Could not write to /tmp/logrotate_test2.conf')
59 self.assertEqual(status, 0, msg = msg) 61 self.assertEqual(status, 0, msg = msg)
60 62
61 status, output = self.target.run('find $HOME/logrotate_dir -type f | grep logrotate_testfile.1') 63 status, output = self.target.run('find /var/log//logrotate_dir -type f | grep logrotate_testfile.1')
62 msg = ('A rotated log for logrotate_testfile is already present in logrotate_dir') 64 msg = ('A rotated log for logrotate_testfile is already present in logrotate_dir')
63 self.assertEqual(status, 1, msg = msg) 65 self.assertEqual(status, 1, msg = msg)
64 66
65 # If logrotate fails to rotate the log, view the verbose output of logrotate instead of just listing the files in olddir 67 # If logrotate fails to rotate the log, view the verbose output of logrotate instead of just listing the files in olddir
66 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test2.conf') 68 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test2.conf')
67 status, _ = self.target.run('find $HOME/logrotate_dir -type f | grep logrotate_testfile.1') 69 status, _ = self.target.run('find /var/log//logrotate_dir -type f | grep logrotate_testfile.1')
68 msg = ('logrotate did not successfully rotate the logrotate_test log. Output from logrotate -vf: \n%s' % (logrotate_output)) 70 msg = ('logrotate did not successfully rotate the logrotate_test log. Output from logrotate -vf: \n%s' % (logrotate_output))
69 self.assertEqual(status, 0, msg = msg) 71 self.assertEqual(status, 0, msg = msg)
70 72
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py
index a66d5d13d7..e81360670c 100644
--- a/meta/lib/oeqa/runtime/cases/ltp.py
+++ b/meta/lib/oeqa/runtime/cases/ltp.py
@@ -57,37 +57,47 @@ class LtpTestBase(OERuntimeTestCase):
57 57
58class LtpTest(LtpTestBase): 58class LtpTest(LtpTestBase):
59 59
60 ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "connectors", "commands", "net.ipv6_lib", "input","fs_perms_simple"] 60 ltp_groups = ["math", "syscalls", "dio", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"]
61 61
62 ltp_fs = ["fs", "fsx", "fs_bind"] 62 ltp_fs = ["fs", "fs_bind"]
63 # skip kernel cpuhotplug 63 # skip kernel cpuhotplug
64 ltp_kernel = ["power_management_tests", "hyperthreading ", "kernel_misc", "hugetlb"] 64 ltp_kernel = ["power_management_tests", "hyperthreading ", "kernel_misc", "hugetlb"]
65 ltp_groups += ltp_fs 65 ltp_groups += ltp_fs
66 66
67 def runltp(self, ltp_group): 67 def runltp(self, ltp_group):
68 cmd = '/opt/ltp/runltp -f %s -p -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group) 68 # LTP appends to log files, so ensure we start with a clean log
69 self.target.deleteFiles("/opt/ltp/results/", ltp_group)
70
71 cmd = '/opt/ltp/runltp -f %s -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group)
72
69 starttime = time.time() 73 starttime = time.time()
70 (status, output) = self.target.run(cmd) 74 (status, output) = self.target.run(cmd, timeout=1200)
71 endtime = time.time() 75 endtime = time.time()
72 76
77 # status of 1 is 'just' tests failing. 255 likely was a command output timeout
78 if status and status != 1:
79 msg = 'Command %s returned exit code %s' % (cmd, status)
80 self.target.logger.warning(msg)
81
82 # Write the console log to disk for convenience
73 with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f: 83 with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f:
74 f.write(output) 84 f.write(output)
75 85
86 # Also put the console log into the test result JSON
76 self.extras['ltpresult.rawlogs']['log'] = self.extras['ltpresult.rawlogs']['log'] + output 87 self.extras['ltpresult.rawlogs']['log'] = self.extras['ltpresult.rawlogs']['log'] + output
77 88
78 # copy nice log from DUT 89 # Copy the machine-readable test results locally so we can parse it
79 dst = os.path.join(self.ltptest_log_dir, "%s" % ltp_group ) 90 dst = os.path.join(self.ltptest_log_dir, ltp_group)
80 remote_src = "/opt/ltp/results/%s" % ltp_group 91 remote_src = "/opt/ltp/results/%s" % ltp_group
81 (status, output) = self.target.copyFrom(remote_src, dst, True) 92 (status, output) = self.target.copyFrom(remote_src, dst, True)
82 msg = 'File could not be copied. Output: %s' % output
83 if status: 93 if status:
94 msg = 'File could not be copied. Output: %s' % output
84 self.target.logger.warning(msg) 95 self.target.logger.warning(msg)
85 96
86 parser = LtpParser() 97 parser = LtpParser()
87 results, sections = parser.parse(dst) 98 results, sections = parser.parse(dst)
88 99
89 runtime = int(endtime-starttime) 100 sections['duration'] = int(endtime-starttime)
90 sections['duration'] = runtime
91 self.sections[ltp_group] = sections 101 self.sections[ltp_group] = sections
92 102
93 failed_tests = {} 103 failed_tests = {}
diff --git a/meta/lib/oeqa/runtime/cases/ltp_stress.py b/meta/lib/oeqa/runtime/cases/ltp_stress.py
index 2445ffbc93..ce6f4bf59d 100644
--- a/meta/lib/oeqa/runtime/cases/ltp_stress.py
+++ b/meta/lib/oeqa/runtime/cases/ltp_stress.py
@@ -89,8 +89,7 @@ class LtpStressTest(LtpStressBase):
89 89
90 # LTP stress runtime tests 90 # LTP stress runtime tests
91 # 91 #
92 @skipIfQemu('qemuall', 'Test only runs on real hardware') 92 @skipIfQemu()
93
94 @OETestDepends(['ssh.SSHTest.test_ssh']) 93 @OETestDepends(['ssh.SSHTest.test_ssh'])
95 @OEHasPackage(["ltp"]) 94 @OEHasPackage(["ltp"])
96 def test_ltp_stress(self): 95 def test_ltp_stress(self):
diff --git a/meta/lib/oeqa/runtime/cases/maturin.py b/meta/lib/oeqa/runtime/cases/maturin.py
new file mode 100644
index 0000000000..4e6384fe5e
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/maturin.py
@@ -0,0 +1,58 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8
9from oeqa.runtime.case import OERuntimeTestCase
10from oeqa.core.decorator.depends import OETestDepends
11from oeqa.runtime.decorator.package import OEHasPackage
12
13
14class MaturinTest(OERuntimeTestCase):
15 @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
16 @OEHasPackage(['python3-maturin'])
17 def test_maturin_list_python(self):
18 status, output = self.target.run("maturin list-python")
19 self.assertEqual(status, 0)
20 _, py_major = self.target.run("python3 -c 'import sys; print(sys.version_info.major)'")
21 _, py_minor = self.target.run("python3 -c 'import sys; print(sys.version_info.minor)'")
22 python_version = "%s.%s" % (py_major, py_minor)
23 self.assertEqual(output, "🐍 1 python interpreter found:\n"
24 " - CPython %s at /usr/bin/python%s" % (python_version, python_version))
25
26
27class MaturinDevelopTest(OERuntimeTestCase):
28 @classmethod
29 def setUp(cls):
30 dst = '/tmp'
31 src = os.path.join(cls.tc.files_dir, "maturin/guessing-game")
32 cls.tc.target.copyTo(src, dst)
33
34 @classmethod
35 def tearDown(cls):
36 cls.tc.target.run('rm -rf %s' % '/tmp/guessing-game/target')
37
38 @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
39 @OEHasPackage(['python3-maturin'])
40 def test_maturin_develop(self):
41 """
42 This test case requires:
43 (1) that a .venv can been created.
44 (2) DNS nameserver to resolve crate URIs for fetching
45 (3) a functional 'rustc' and 'cargo'
46 """
47 targetdir = os.path.join("/tmp", "guessing-game")
48 self.target.run("cd %s; python3 -m venv .venv" % targetdir)
49 self.target.run("echo 'nameserver 8.8.8.8' > /etc/resolv.conf")
50 cmd = "cd %s; maturin develop" % targetdir
51 status, output = self.target.run(cmd)
52 self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
53 self.assertRegex(output, r"🐍 Not using a specific python interpreter")
54 self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
55 self.assertRegex(output, r"Compiling guessing-game v0.1.0")
56 self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
57 self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
58 self.assertEqual(status, 0)
diff --git a/meta/lib/oeqa/runtime/cases/multilib.py b/meta/lib/oeqa/runtime/cases/multilib.py
index 0d1b9ae2c9..68556e45c5 100644
--- a/meta/lib/oeqa/runtime/cases/multilib.py
+++ b/meta/lib/oeqa/runtime/cases/multilib.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/oe_syslog.py b/meta/lib/oeqa/runtime/cases/oe_syslog.py
index f3c2bedbaf..adb876160d 100644
--- a/meta/lib/oeqa/runtime/cases/oe_syslog.py
+++ b/meta/lib/oeqa/runtime/cases/oe_syslog.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -114,18 +116,23 @@ class SyslogTestConfig(OERuntimeTestCase):
114 @OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger']) 116 @OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger'])
115 @OEHasPackage(["busybox-syslog"]) 117 @OEHasPackage(["busybox-syslog"])
116 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', 118 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
117 'Not appropiate for systemd image') 119 'Not appropriate for systemd image')
118 def test_syslog_startup_config(self): 120 def test_syslog_startup_config(self):
119 cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf' 121 cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf'
120 self.target.run(cmd) 122 self.target.run(cmd)
121 123
122 self.test_syslog_restart() 124 self.test_syslog_restart()
123 125
124 cmd = 'logger foobar && grep foobar /var/log/test' 126 cmd = 'logger foobar'
125 status,output = self.target.run(cmd) 127 status, output = self.target.run(cmd)
126 msg = 'Test log string not found. Output: %s ' % output 128 msg = 'Logger command failed, %s. Output: %s ' % (status, output)
127 self.assertEqual(status, 0, msg=msg) 129 self.assertEqual(status, 0, msg=msg)
128 130
131 cmd = 'cat /var/log/test'
132 status, output = self.target.run(cmd)
133 if "foobar" not in output or status:
134 self.fail("'foobar' not found in logfile, status %s, contents %s" % (status, output))
135
129 cmd = "sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf" 136 cmd = "sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf"
130 self.target.run(cmd) 137 self.target.run(cmd)
131 self.test_syslog_restart() 138 self.test_syslog_restart()
diff --git a/meta/lib/oeqa/runtime/cases/opkg.py b/meta/lib/oeqa/runtime/cases/opkg.py
index 9cfee1cd88..a29c93e59a 100644
--- a/meta/lib/oeqa/runtime/cases/opkg.py
+++ b/meta/lib/oeqa/runtime/cases/opkg.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/pam.py b/meta/lib/oeqa/runtime/cases/pam.py
index a482ded945..b3e8b56c3c 100644
--- a/meta/lib/oeqa/runtime/cases/pam.py
+++ b/meta/lib/oeqa/runtime/cases/pam.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
new file mode 100644
index 0000000000..f91abbc941
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
@@ -0,0 +1,62 @@
1# Xserver explains what the short codes mean
2(WW) warning, (EE) error, (NI) not implemented, (??) unknown.
3
4# Xserver warns if compiled with ACPI but no acpid running
5Open ACPI failed (/var/run/acpid.socket) (No such file or directory)
6
7# Some machines (eg qemux86) don't enable PAE (they probably should though)
8NX (Execute Disable) protection cannot be enabled: non-PAE kernel!
9
10# Connman's pacrunner warns if external connectivity isn't available
11Failed to find URL:http://ipv4.connman.net/online/status.html
12Failed to find URL:http://ipv6.connman.net/online/status.html
13
14# x86 on 6.6+ outputs this message, it is informational, not an error
15ACPI: _OSC evaluation for CPUs failed, trying _PDC
16
17# These should be reviewed to see if they are still needed
18dma timeout
19can\'t add hid device:
20usbhid: probe of
21_OSC failed (AE_ERROR)
22_OSC failed (AE_SUPPORT)
23AE_ALREADY_EXISTS
24ACPI _OSC request failed (AE_SUPPORT)
25can\'t disable ASPM
26Failed to load module "vesa"
27Failed to load module "modesetting"
28Failed to load module "glx"
29Failed to load module "fbdev"
30Failed to load module "ati"
31[drm] Cannot find any crtc or sizes
32_OSC failed (AE_NOT_FOUND); disabling ASPM
33hd.: possibly failed opcode
34NETLINK INITIALIZATION FAILED
35kernel: Cannot find map file
36omap_hwmod: debugss: _wait_target_disable failed
37VGA arbiter: cannot open kernel arbiter, no multi-card support
38Online check failed for
39netlink init failed
40Fast TSC calibration
41controller can't do DEVSLP, turning off
42stmmac_dvr_probe: warning: cannot get CSR clock
43error: couldn\'t mount because of unsupported optional features
44GPT: Use GNU Parted to correct GPT errors
45Cannot set xattr user.Librepo.DownloadInProgress
46Failed to read /var/lib/nfs/statd/state: Success
47error retry time-out =
48logind: cannot setup systemd-logind helper (-61), using legacy fallback
49Failed to rename network interface
50Failed to process device, ignoring: Device or resource busy
51Cannot find a map file
52[rdrand]: Initialization Failed
53[rndr ]: Initialization Failed
54[pulseaudio] authkey.c: Failed to open cookie file
55[pulseaudio] authkey.c: Failed to load authentication key
56was skipped because of a failed condition check
57was skipped because all trigger condition checks failed
58xf86OpenConsole: Switching VT failed
59Failed to read LoaderConfigTimeoutOneShot variable, ignoring: Operation not supported
60Failed to read LoaderEntryOneShot variable, ignoring: Operation not supported
61Direct firmware load for regulatory.db
62failed to load regulatory.db
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
new file mode 100644
index 0000000000..156b0f9c10
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
@@ -0,0 +1,19 @@
1# These should be reviewed to see if they are still needed
2cacheinfo: Failed to find cpu0 device node
3
4# 6.10 restructures sysctl registration such that mips
5# registers an empty table and generates harmless warnings:
6# failed when register_sysctl_sz sched_fair_sysctls to kernel
7# failed when register_sysctl_sz sched_core_sysctls to kernel
8failed when register_sysctl_sz sched
9
10# With qemu 9.1.0
11# pci 0000:00:00.0: BAR 2: can't handle BAR above 4GB (bus address 0x1f00000010)
12# pci 0000:00:00.0: BAR 5: error updating (0x1105d034 != 0x0100d034)
13BAR 0: error updating
14BAR 1: error updating
15BAR 2: error updating
16BAR 3: error updating
17BAR 4: error updating
18BAR 5: error updating
19: can't handle BAR above 4GB
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
new file mode 100644
index 0000000000..143db40d63
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
@@ -0,0 +1,35 @@
1# psplash
2FBIOPUT_VSCREENINFO failed, double buffering disabled
3
4# PCI host bridge to bus 0000:00
5# pci_bus 0000:00: root bus resource [mem 0x10000000-0x17ffffff]
6# pci_bus 0000:00: root bus resource [io 0x1000-0x1fffff]
7# pci_bus 0000:00: No busn resource found for root bus, will use [bus 00-ff]
8# pci 0000:00:00.0: [2046:ab11] type 00 class 0x100000
9# pci 0000:00:00.0: [Firmware Bug]: reg 0x10: invalid BAR (can't size)
10# pci 0000:00:00.0: [Firmware Bug]: reg 0x14: invalid BAR (can't size)
11# pci 0000:00:00.0: [Firmware Bug]: reg 0x18: invalid BAR (can't size)
12# pci 0000:00:00.0: [Firmware Bug]: reg 0x1c: invalid BAR (can't size)
13# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size)
14# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size)
15invalid BAR (can't size)
16# 6.10+ the invalid BAR warnings are of this format:
17# pci 0000:00:00.0: [Firmware Bug]: BAR 0: invalid; can't size
18# pci 0000:00:00.0: [Firmware Bug]: BAR 1: invalid; can't size
19# pci 0000:00:00.0: [Firmware Bug]: BAR 2: invalid; can't size
20# pci 0000:00:00.0: [Firmware Bug]: BAR 3: invalid; can't size
21# pci 0000:00:00.0: [Firmware Bug]: BAR 4: invalid; can't size
22# pci 0000:00:00.0: [Firmware Bug]: BAR 5: invalid; can't size
23invalid; can't size
24
25# These should be reviewed to see if they are still needed
26wrong ELF class
27fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge
28can't claim BAR
29amd_nb: Cannot enumerate AMD northbridges
30tsc: HPET/PMTIMER calibration failed
31modeset(0): Failed to initialize the DRI2 extension
32glamor initialization failed
33blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)
34floppy: error
35failed to IDENTIFY (I/O error, err_mask=0x4)
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
new file mode 100644
index 0000000000..260cdde620
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
@@ -0,0 +1,6 @@
1# These should be reviewed to see if they are still needed
2Fatal server error:
3(EE) Server terminated with error (1). Closing log file.
4dmi: Firmware registration failed.
5irq: type mismatch, failed to map hwirq-27 for /intc
6logind: failed to get session seat \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
new file mode 100644
index 0000000000..ed91107b7d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
@@ -0,0 +1,19 @@
1# Code is 2 JENT_ECOARSETIME: Timer too coarse for RNG.
2jitterentropy: Initialization failed with host not compliant with requirements: 2
3
4# These should be reviewed to see if they are still needed
5mmci-pl18x: probe of fpga:05 failed with error -22
6mmci-pl18x: probe of fpga:0b failed with error -22
7
8OF: amba_device_add() failed (-19) for /amba/smc@10100000
9OF: amba_device_add() failed (-19) for /amba/mpmc@10110000
10OF: amba_device_add() failed (-19) for /amba/sctl@101e0000
11OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000
12OF: amba_device_add() failed (-19) for /amba/sci@101f0000
13OF: amba_device_add() failed (-19) for /amba/spi@101f4000
14OF: amba_device_add() failed (-19) for /amba/ssp@101f4000
15OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000
16Failed to initialize '/amba/timer@101e3000': -22
17
18clcd-pl11x: probe of 10120000.display failed with error -2
19arm-charlcd 10008000.lcd: error -ENXIO: IRQ index 0 not found
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
new file mode 100644
index 0000000000..d9b58b58f1
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
@@ -0,0 +1,6 @@
1# These should be reviewed to see if they are still needed
2PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]
3host side 80-wire cable detection failed, limiting max speed
4mode "640x480" test failed
5can't handle BAR above 4GB
6Cannot reserve Legacy IO \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
new file mode 100644
index 0000000000..b736a2aeb7
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
@@ -0,0 +1,4 @@
1# These should be reviewed to see if they are still needed
2vio vio: uevent: failed to send synthetic uevent
3synth uevent: /devices/vio: failed to send uevent
4PCI 0000:00 Cannot reserve Legacy IO [io 0x10000-0x10fff] \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
new file mode 100644
index 0000000000..ebb76f1221
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
@@ -0,0 +1,2 @@
1# These should be reviewed to see if they are still needed
2Failed to access perfctr msr (MSR
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
new file mode 100644
index 0000000000..5985247daf
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
@@ -0,0 +1,10 @@
1# These should be reviewed to see if they are still needed
2[drm:psb_do_init] *ERROR* Debug is
3wrong ELF class
4Could not enable PowerButton event
5probe of LNXPWRBN:00 failed with error -22
6pmd_set_huge: Cannot satisfy
7failed to setup card detect gpio
8amd_nb: Cannot enumerate AMD northbridges
9failed to retrieve link info, disabling eDP
10Direct firmware load for iwlwifi
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
new file mode 120000
index 0000000000..404e384c32
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
@@ -0,0 +1 @@
parselogs-ignores-x86.txt \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py
index a1791b5cca..47c77fccd5 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -1,204 +1,49 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import collections
5import os 8import os
9import sys
6 10
7from subprocess import check_output
8from shutil import rmtree 11from shutil import rmtree
9from oeqa.runtime.case import OERuntimeTestCase 12from oeqa.runtime.case import OERuntimeTestCase
10from oeqa.core.decorator.depends import OETestDepends 13from oeqa.core.decorator.depends import OETestDepends
11from oeqa.core.decorator.data import skipIfDataVar
12from oeqa.runtime.decorator.package import OEHasPackage
13
14#in the future these lists could be moved outside of module
15errors = ["error", "cannot", "can\'t", "failed"]
16
17common_errors = [
18 "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.",
19 "dma timeout",
20 "can\'t add hid device:",
21 "usbhid: probe of ",
22 "_OSC failed (AE_ERROR)",
23 "_OSC failed (AE_SUPPORT)",
24 "AE_ALREADY_EXISTS",
25 "ACPI _OSC request failed (AE_SUPPORT)",
26 "can\'t disable ASPM",
27 "Failed to load module \"vesa\"",
28 "Failed to load module vesa",
29 "Failed to load module \"modesetting\"",
30 "Failed to load module modesetting",
31 "Failed to load module \"glx\"",
32 "Failed to load module \"fbdev\"",
33 "Failed to load module fbdev",
34 "Failed to load module glx",
35 "[drm] Cannot find any crtc or sizes - going 1024x768",
36 "_OSC failed (AE_NOT_FOUND); disabling ASPM",
37 "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)",
38 "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!",
39 "hd.: possibly failed opcode",
40 'NETLINK INITIALIZATION FAILED',
41 'kernel: Cannot find map file',
42 'omap_hwmod: debugss: _wait_target_disable failed',
43 'VGA arbiter: cannot open kernel arbiter, no multi-card support',
44 'Failed to find URL:http://ipv4.connman.net/online/status.html',
45 'Online check failed for',
46 'netlink init failed',
47 'Fast TSC calibration',
48 "BAR 0-9",
49 "Failed to load module \"ati\"",
50 "controller can't do DEVSLP, turning off",
51 "stmmac_dvr_probe: warning: cannot get CSR clock",
52 "error: couldn\'t mount because of unsupported optional features",
53 "GPT: Use GNU Parted to correct GPT errors",
54 "Cannot set xattr user.Librepo.DownloadInProgress",
55 "Failed to read /var/lib/nfs/statd/state: Success",
56 "error retry time-out =",
57 "logind: cannot setup systemd-logind helper (-61), using legacy fallback",
58 "Failed to rename network interface",
59 "Failed to process device, ignoring: Device or resource busy",
60 "Cannot find a map file",
61 "[rdrand]: Initialization Failed",
62 "[pulseaudio] authkey.c: Failed to open cookie file",
63 "[pulseaudio] authkey.c: Failed to load authentication key",
64 ]
65 14
66video_related = [ 15# importlib.resources.open_text in Python <3.10 doesn't search all directories
67] 16# when a package is split across multiple directories. Until we can rely on
17# 3.10+, reimplement the searching logic.
18if sys.version_info < (3, 10):
19 def _open_text(package, resource):
20 import importlib, pathlib
21 module = importlib.import_module(package)
22 for path in module.__path__:
23 candidate = pathlib.Path(path) / resource
24 if candidate.exists():
25 return candidate.open(encoding='utf-8')
26 raise FileNotFoundError
27else:
28 from importlib.resources import open_text as _open_text
68 29
69x86_common = [
70 '[drm:psb_do_init] *ERROR* Debug is',
71 'wrong ELF class',
72 'Could not enable PowerButton event',
73 'probe of LNXPWRBN:00 failed with error -22',
74 'pmd_set_huge: Cannot satisfy',
75 'failed to setup card detect gpio',
76 'amd_nb: Cannot enumerate AMD northbridges',
77 'failed to retrieve link info, disabling eDP',
78 'Direct firmware load for iwlwifi',
79 'Direct firmware load for regulatory.db',
80 'failed to load regulatory.db',
81] + common_errors
82 30
83qemux86_common = [ 31class ParseLogsTest(OERuntimeTestCase):
84 'wrong ELF class',
85 "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
86 "can't claim BAR ",
87 'amd_nb: Cannot enumerate AMD northbridges',
88 'tsc: HPET/PMTIMER calibration failed',
89 "modeset(0): Failed to initialize the DRI2 extension",
90 "glamor initialization failed",
91] + common_errors
92 32
93ignore_errors = { 33 # Which log files should be collected
94 'default' : common_errors, 34 log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"]
95 'qemux86' : [
96 'Failed to access perfctr msr (MSR',
97 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
98 ] + qemux86_common,
99 'qemux86-64' : qemux86_common,
100 'qemumips' : [
101 'Failed to load module "glx"',
102 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
103 'cacheinfo: Failed to find cpu0 device node',
104 ] + common_errors,
105 'qemumips64' : [
106 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
107 'cacheinfo: Failed to find cpu0 device node',
108 ] + common_errors,
109 'qemuppc' : [
110 'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]',
111 'host side 80-wire cable detection failed, limiting max speed',
112 'mode "640x480" test failed',
113 'Failed to load module "glx"',
114 'can\'t handle BAR above 4GB',
115 'Cannot reserve Legacy IO',
116 ] + common_errors,
117 'qemuarm' : [
118 'mmci-pl18x: probe of fpga:05 failed with error -22',
119 'mmci-pl18x: probe of fpga:0b failed with error -22',
120 'Failed to load module "glx"',
121 'OF: amba_device_add() failed (-19) for /amba/smc@10100000',
122 'OF: amba_device_add() failed (-19) for /amba/mpmc@10110000',
123 'OF: amba_device_add() failed (-19) for /amba/sctl@101e0000',
124 'OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000',
125 'OF: amba_device_add() failed (-19) for /amba/sci@101f0000',
126 'OF: amba_device_add() failed (-19) for /amba/spi@101f4000',
127 'OF: amba_device_add() failed (-19) for /amba/ssp@101f4000',
128 'OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000',
129 'Failed to initialize \'/amba/timer@101e3000\': -22',
130 'jitterentropy: Initialization failed with host not compliant with requirements: 2',
131 ] + common_errors,
132 'qemuarm64' : [
133 'Fatal server error:',
134 '(EE) Server terminated with error (1). Closing log file.',
135 'dmi: Firmware registration failed.',
136 'irq: type mismatch, failed to map hwirq-27 for /intc',
137 'logind: failed to get session seat',
138 ] + common_errors,
139 'intel-core2-32' : [
140 'ACPI: No _BQC method, cannot determine initial brightness',
141 '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness',
142 '(EE) Failed to load module "psb"',
143 '(EE) Failed to load module psb',
144 '(EE) Failed to load module "psbdrv"',
145 '(EE) Failed to load module psbdrv',
146 '(EE) open /dev/fb0: No such file or directory',
147 '(EE) AIGLX: reverting to software rendering',
148 'dmi: Firmware registration failed.',
149 'ioremap error for 0x78',
150 ] + x86_common,
151 'intel-corei7-64' : [
152 'can\'t set Max Payload Size to 256',
153 'intel_punit_ipc: can\'t request region for resource',
154 '[drm] parse error at position 4 in video mode \'efifb\'',
155 'ACPI Error: Could not enable RealTimeClock event',
156 'ACPI Warning: Could not enable fixed event - RealTimeClock',
157 'hci_intel INT33E1:00: Unable to retrieve gpio',
158 'hci_intel: probe of INT33E1:00 failed',
159 'can\'t derive routing for PCI INT A',
160 'failed to read out thermal zone',
161 'Bluetooth: hci0: Setting Intel event mask failed',
162 'ttyS2 - failed to request DMA',
163 'Bluetooth: hci0: Failed to send firmware data (-38)',
164 'atkbd serio0: Failed to enable keyboard on isa0060/serio0',
165 ] + x86_common,
166 'genericx86' : x86_common,
167 'genericx86-64' : [
168 'Direct firmware load for i915',
169 'Failed to load firmware i915',
170 'Failed to fetch GuC',
171 'Failed to initialize GuC',
172 'Failed to load DMC firmware',
173 'The driver is built-in, so to load the firmware you need to',
174 ] + x86_common,
175 'edgerouter' : [
176 'not creating \'/sys/firmware/fdt\'',
177 'Failed to find cpu0 device node',
178 'Fatal server error:',
179 'Server terminated with error',
180 ] + common_errors,
181 'beaglebone-yocto' : [
182 'Direct firmware load for regulatory.db',
183 'failed to load regulatory.db',
184 'l4_wkup_cm',
185 'Failed to load module "glx"',
186 'Failed to make EGL context current',
187 'glamor initialization failed',
188 ] + common_errors,
189}
190 35
191log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"] 36 # The keywords that identify error messages in the log files
37 errors = ["error", "cannot", "can't", "failed", "---[ cut here ]---", "No irq handler for vector"]
192 38
193class ParseLogsTest(OERuntimeTestCase): 39 # A list of error messages that should be ignored
40 ignore_errors = []
194 41
195 @classmethod 42 @classmethod
196 def setUpClass(cls): 43 def setUpClass(cls):
197 cls.errors = errors
198
199 # When systemd is enabled we need to notice errors on 44 # When systemd is enabled we need to notice errors on
200 # circular dependencies in units. 45 # circular dependencies in units.
201 if 'systemd' in cls.td.get('DISTRO_FEATURES', ''): 46 if 'systemd' in cls.td.get('DISTRO_FEATURES'):
202 cls.errors.extend([ 47 cls.errors.extend([
203 'Found ordering cycle on', 48 'Found ordering cycle on',
204 'Breaking ordering cycle by deleting job', 49 'Breaking ordering cycle by deleting job',
@@ -206,48 +51,22 @@ class ParseLogsTest(OERuntimeTestCase):
206 'Ordering cycle found, skipping', 51 'Ordering cycle found, skipping',
207 ]) 52 ])
208 53
209 cls.ignore_errors = ignore_errors 54 cls.errors = [s.casefold() for s in cls.errors]
210 cls.log_locations = log_locations
211 cls.msg = ''
212 is_lsb, _ = cls.tc.target.run("which LSB_Test.sh")
213 if is_lsb == 0:
214 for machine in cls.ignore_errors:
215 cls.ignore_errors[machine] = cls.ignore_errors[machine] \
216 + video_related
217
218 def getMachine(self):
219 return self.td.get('MACHINE', '')
220
221 def getWorkdir(self):
222 return self.td.get('WORKDIR', '')
223
224 # Get some information on the CPU of the machine to display at the
225 # beginning of the output. This info might be useful in some cases.
226 def getHardwareInfo(self):
227 hwi = ""
228 cmd = ('cat /proc/cpuinfo | grep "model name" | head -n1 | '
229 " awk 'BEGIN{FS=\":\"}{print $2}'")
230 _, cpu_name = self.target.run(cmd)
231
232 cmd = ('cat /proc/cpuinfo | grep "cpu cores" | head -n1 | '
233 "awk {'print $4'}")
234 _, cpu_physical_cores = self.target.run(cmd)
235
236 cmd = 'cat /proc/cpuinfo | grep "processor" | wc -l'
237 _, cpu_logical_cores = self.target.run(cmd)
238
239 _, cpu_arch = self.target.run('uname -m')
240 55
241 hwi += 'Machine information: \n' 56 cls.load_machine_ignores()
242 hwi += '*******************************\n'
243 hwi += 'Machine name: ' + self.getMachine() + '\n'
244 hwi += 'CPU: ' + str(cpu_name) + '\n'
245 hwi += 'Arch: ' + str(cpu_arch)+ '\n'
246 hwi += 'Physical cores: ' + str(cpu_physical_cores) + '\n'
247 hwi += 'Logical cores: ' + str(cpu_logical_cores) + '\n'
248 hwi += '*******************************\n'
249 57
250 return hwi 58 @classmethod
59 def load_machine_ignores(cls):
60 # Add TARGET_ARCH explicitly as not every machine has that in MACHINEOVERRDES (eg qemux86-64)
61 for candidate in ["common", cls.td.get("TARGET_ARCH")] + cls.td.get("MACHINEOVERRIDES").split(":"):
62 try:
63 name = f"parselogs-ignores-{candidate}.txt"
64 for line in _open_text("oeqa.runtime.cases", name):
65 line = line.strip()
66 if line and not line.startswith("#"):
67 cls.ignore_errors.append(line.casefold())
68 except FileNotFoundError:
69 pass
251 70
252 # Go through the log locations provided and if it's a folder 71 # Go through the log locations provided and if it's a folder
253 # create a list with all the .log files in it, if it's a file 72 # create a list with all the .log files in it, if it's a file
@@ -255,23 +74,23 @@ class ParseLogsTest(OERuntimeTestCase):
255 def getLogList(self, log_locations): 74 def getLogList(self, log_locations):
256 logs = [] 75 logs = []
257 for location in log_locations: 76 for location in log_locations:
258 status, _ = self.target.run('test -f ' + str(location)) 77 status, _ = self.target.run('test -f %s' % location)
259 if status == 0: 78 if status == 0:
260 logs.append(str(location)) 79 logs.append(location)
261 else: 80 else:
262 status, _ = self.target.run('test -d ' + str(location)) 81 status, _ = self.target.run('test -d %s' % location)
263 if status == 0: 82 if status == 0:
264 cmd = 'find ' + str(location) + '/*.log -maxdepth 1 -type f' 83 cmd = 'find %s -name \\*.log -maxdepth 1 -type f' % location
265 status, output = self.target.run(cmd) 84 status, output = self.target.run(cmd)
266 if status == 0: 85 if status == 0:
267 output = output.splitlines() 86 output = output.splitlines()
268 for logfile in output: 87 for logfile in output:
269 logs.append(os.path.join(location, str(logfile))) 88 logs.append(os.path.join(location, logfile))
270 return logs 89 return logs
271 90
272 # Copy the log files to be parsed locally 91 # Copy the log files to be parsed locally
273 def transfer_logs(self, log_list): 92 def transfer_logs(self, log_list):
274 workdir = self.getWorkdir() 93 workdir = self.td.get('WORKDIR')
275 self.target_logs = workdir + '/' + 'target_logs' 94 self.target_logs = workdir + '/' + 'target_logs'
276 target_logs = self.target_logs 95 target_logs = self.target_logs
277 if os.path.exists(target_logs): 96 if os.path.exists(target_logs):
@@ -288,65 +107,55 @@ class ParseLogsTest(OERuntimeTestCase):
288 logs = [f for f in dir_files if os.path.isfile(f)] 107 logs = [f for f in dir_files if os.path.isfile(f)]
289 return logs 108 return logs
290 109
291 # Build the grep command to be used with filters and exclusions 110 def get_context(self, lines, index, before=6, after=3):
292 def build_grepcmd(self, errors, ignore_errors, log): 111 """
293 grepcmd = 'grep ' 112 Given a set of lines and the index of the line that is important, return
294 grepcmd += '-Ei "' 113 a number of lines surrounding that line.
295 for error in errors: 114 """
296 grepcmd += '\<' + error + '\>' + '|' 115 last = len(lines)
297 grepcmd = grepcmd[:-1] 116
298 grepcmd += '" ' + str(log) + " | grep -Eiv \'" 117 start = index - before
299 118 end = index + after + 1
300 try: 119
301 errorlist = ignore_errors[self.getMachine()] 120 if start < 0:
302 except KeyError: 121 end -= start
303 self.msg += 'No ignore list found for this machine, using default\n' 122 start = 0
304 errorlist = ignore_errors['default'] 123 if end > last:
305 124 start -= end - last
306 for ignore_error in errorlist: 125 end = last
307 ignore_error = ignore_error.replace('(', '\(') 126
308 ignore_error = ignore_error.replace(')', '\)') 127 return lines[start:end]
309 ignore_error = ignore_error.replace("'", '.') 128
310 ignore_error = ignore_error.replace('?', '\?') 129 def test_get_context(self):
311 ignore_error = ignore_error.replace('[', '\[') 130 """
312 ignore_error = ignore_error.replace(']', '\]') 131 A test case for the test case.
313 ignore_error = ignore_error.replace('*', '\*') 132 """
314 ignore_error = ignore_error.replace('0-9', '[0-9]') 133 lines = list(range(0,10))
315 grepcmd += ignore_error + '|' 134 self.assertEqual(self.get_context(lines, 0, 2, 1), [0, 1, 2, 3])
316 grepcmd = grepcmd[:-1] 135 self.assertEqual(self.get_context(lines, 5, 2, 1), [3, 4, 5, 6])
317 grepcmd += "\'" 136 self.assertEqual(self.get_context(lines, 9, 2, 1), [6, 7, 8, 9])
318 137
319 return grepcmd 138 def parse_logs(self, logs, lines_before=10, lines_after=10):
320 139 """
321 # Grep only the errors so that their context could be collected. 140 Search the log files @logs looking for error lines (marked by
322 # Default context is 10 lines before and after the error itself 141 @self.errors), ignoring anything listed in @self.ignore_errors.
323 def parse_logs(self, errors, ignore_errors, logs, 142
324 lines_before = 10, lines_after = 10): 143 Returns a dictionary of log filenames to a dictionary of error lines to
325 results = {} 144 the error context (controlled by @lines_before and @lines_after).
326 rez = [] 145 """
327 grep_output = '' 146 results = collections.defaultdict(dict)
328 147
329 for log in logs: 148 for log in logs:
330 result = None 149 with open(log) as f:
331 thegrep = self.build_grepcmd(errors, ignore_errors, log) 150 lines = f.readlines()
332 151
333 try: 152 for i, line in enumerate(lines):
334 result = check_output(thegrep, shell=True).decode('utf-8') 153 line = line.strip()
335 except: 154 line_lower = line.casefold()
336 pass
337 155
338 if result is not None: 156 if any(keyword in line_lower for keyword in self.errors):
339 results[log] = {} 157 if not any(ignore in line_lower for ignore in self.ignore_errors):
340 rez = result.splitlines() 158 results[log][line] = "".join(self.get_context(lines, i, lines_before, lines_after))
341
342 for xrez in rez:
343 try:
344 cmd = ['grep', '-F', xrez, '-B', str(lines_before)]
345 cmd += ['-A', str(lines_after), log]
346 grep_output = check_output(cmd).decode('utf-8')
347 except:
348 pass
349 results[log][xrez]=grep_output
350 159
351 return results 160 return results
352 161
@@ -359,17 +168,18 @@ class ParseLogsTest(OERuntimeTestCase):
359 def test_parselogs(self): 168 def test_parselogs(self):
360 self.write_dmesg() 169 self.write_dmesg()
361 log_list = self.get_local_log_list(self.log_locations) 170 log_list = self.get_local_log_list(self.log_locations)
362 result = self.parse_logs(self.errors, self.ignore_errors, log_list) 171 result = self.parse_logs(log_list)
363 print(self.getHardwareInfo()) 172
364 errcount = 0 173 errcount = 0
174 self.msg = ""
365 for log in result: 175 for log in result:
366 self.msg += 'Log: ' + log + '\n' 176 self.msg += 'Log: ' + log + '\n'
367 self.msg += '-----------------------\n' 177 self.msg += '-----------------------\n'
368 for error in result[log]: 178 for error in result[log]:
369 errcount += 1 179 errcount += 1
370 self.msg += 'Central error: ' + str(error) + '\n' 180 self.msg += 'Central error: ' + error + '\n'
371 self.msg += '***********************\n' 181 self.msg += '***********************\n'
372 self.msg += result[str(log)][str(error)] + '\n' 182 self.msg += result[log][error] + '\n'
373 self.msg += '***********************\n' 183 self.msg += '***********************\n'
374 self.msg += '%s errors found in logs.' % errcount 184 self.msg += '%s errors found in logs.' % errcount
375 self.assertEqual(errcount, 0, msg=self.msg) 185 self.assertEqual(errcount, 0, msg=self.msg)
diff --git a/meta/lib/oeqa/runtime/cases/perl.py b/meta/lib/oeqa/runtime/cases/perl.py
index 2c6b3b7846..f11b300836 100644
--- a/meta/lib/oeqa/runtime/cases/perl.py
+++ b/meta/lib/oeqa/runtime/cases/perl.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/ping.py b/meta/lib/oeqa/runtime/cases/ping.py
index f6603f75ec..efb91d4cc9 100644
--- a/meta/lib/oeqa/runtime/cases/ping.py
+++ b/meta/lib/oeqa/runtime/cases/ping.py
@@ -1,11 +1,15 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from subprocess import Popen, PIPE 7from subprocess import Popen, PIPE
8from time import sleep
6 9
7from oeqa.runtime.case import OERuntimeTestCase 10from oeqa.runtime.case import OERuntimeTestCase, run_network_serialdebug
8from oeqa.core.decorator.oetimeout import OETimeout 11from oeqa.core.decorator.oetimeout import OETimeout
12from oeqa.core.exception import OEQATimeoutError
9 13
10class PingTest(OERuntimeTestCase): 14class PingTest(OERuntimeTestCase):
11 15
@@ -13,14 +17,27 @@ class PingTest(OERuntimeTestCase):
13 def test_ping(self): 17 def test_ping(self):
14 output = '' 18 output = ''
15 count = 0 19 count = 0
16 while count < 5: 20 self.assertNotEqual(len(self.target.ip), 0, msg="No target IP address set")
17 cmd = 'ping -c 1 %s' % self.target.ip 21
18 proc = Popen(cmd, shell=True, stdout=PIPE) 22 # If the target IP is localhost (because user-space networking is being used),
19 output += proc.communicate()[0].decode('utf-8') 23 # then there's no point in pinging it.
20 if proc.poll() == 0: 24 if self.target.ip.startswith("127.0.0.") or self.target.ip in ("localhost", "::1"):
21 count += 1 25 print("runtime/ping: localhost detected, not pinging")
22 else: 26 return
23 count = 0 27
28 try:
29 while count < 5:
30 cmd = 'ping -c 1 %s' % self.target.ip
31 proc = Popen(cmd, shell=True, stdout=PIPE)
32 output += proc.communicate()[0].decode('utf-8')
33 if proc.poll() == 0:
34 count += 1
35 else:
36 count = 0
37 sleep(1)
38 except OEQATimeoutError:
39 run_network_serialdebug(self.target.runner)
40 self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output))
24 msg = ('Expected 5 consecutive, got %d.\n' 41 msg = ('Expected 5 consecutive, got %d.\n'
25 'ping output is:\n%s' % (count,output)) 42 'ping output is:\n%s' % (count,output))
26 self.assertEqual(count, 5, msg = msg) 43 self.assertEqual(count, 5, msg = msg)
diff --git a/meta/lib/oeqa/runtime/cases/ptest.py b/meta/lib/oeqa/runtime/cases/ptest.py
index 0800f3c27f..fbaeb84d00 100644
--- a/meta/lib/oeqa/runtime/cases/ptest.py
+++ b/meta/lib/oeqa/runtime/cases/ptest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -57,7 +59,7 @@ class PtestRunnerTest(OERuntimeTestCase):
57 ptest_dirs = [ '/usr/lib' ] 59 ptest_dirs = [ '/usr/lib' ]
58 if not libdir in ptest_dirs: 60 if not libdir in ptest_dirs:
59 ptest_dirs.append(libdir) 61 ptest_dirs.append(libdir)
60 status, output = self.target.run('ptest-runner -d \"{}\"'.format(' '.join(ptest_dirs)), 0) 62 status, output = self.target.run('ptest-runner -t 450 -d \"{}\"'.format(' '.join(ptest_dirs)), 0)
61 os.makedirs(ptest_log_dir) 63 os.makedirs(ptest_log_dir)
62 with open(ptest_runner_log, 'w') as f: 64 with open(ptest_runner_log, 'w') as f:
63 f.write(output) 65 f.write(output)
@@ -81,17 +83,20 @@ class PtestRunnerTest(OERuntimeTestCase):
81 83
82 extras['ptestresult.sections'] = sections 84 extras['ptestresult.sections'] = sections
83 85
86 zerolength = []
84 trans = str.maketrans("()", "__") 87 trans = str.maketrans("()", "__")
85 for section in results: 88 for section in results:
86 for test in results[section]: 89 for test in results[section]:
87 result = results[section][test] 90 result = results[section][test]
88 testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split()) 91 testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split())
89 extras[testname] = {'status': result} 92 extras[testname] = {'status': result}
93 if not results[section]:
94 zerolength.append(section)
90 95
91 failed_tests = {} 96 failed_tests = {}
92 97
93 for section in sections: 98 for section in sections:
94 if 'exitcode' in sections[section].keys(): 99 if 'exitcode' in sections[section].keys() or 'timeout' in sections[section].keys():
95 failed_tests[section] = sections[section]["log"] 100 failed_tests[section] = sections[section]["log"]
96 101
97 for section in results: 102 for section in results:
@@ -105,7 +110,10 @@ class PtestRunnerTest(OERuntimeTestCase):
105 failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output 110 failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output
106 111
107 if failed_tests: 112 if failed_tests:
108 failmsg = failmsg + "Failed ptests:\n%s" % pprint.pformat(failed_tests) 113 failmsg = failmsg + "\nFailed ptests:\n%s\n" % pprint.pformat(failed_tests)
114
115 if zerolength:
116 failmsg = failmsg + "\nptests which had no test results:\n%s" % pprint.pformat(zerolength)
109 117
110 if failmsg: 118 if failmsg:
111 self.logger.warning("There were failing ptests.") 119 self.logger.warning("There were failing ptests.")
diff --git a/meta/lib/oeqa/runtime/cases/python.py b/meta/lib/oeqa/runtime/cases/python.py
index ec54f1e1db..5d6d133480 100644
--- a/meta/lib/oeqa/runtime/cases/python.py
+++ b/meta/lib/oeqa/runtime/cases/python.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/rpm.py b/meta/lib/oeqa/runtime/cases/rpm.py
index 8e18b426f8..ea5619ffea 100644
--- a/meta/lib/oeqa/runtime/cases/rpm.py
+++ b/meta/lib/oeqa/runtime/cases/rpm.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -49,21 +51,20 @@ class RpmBasicTest(OERuntimeTestCase):
49 msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output) 51 msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output)
50 self.assertEqual(status, 0, msg=msg) 52 self.assertEqual(status, 0, msg=msg)
51 53
52 def check_no_process_for_user(u): 54 def wait_for_no_process_for_user(u, timeout = 120):
53 _, output = self.target.run(self.tc.target_cmds['ps']) 55 timeout_at = time.time() + timeout
54 if u + ' ' in output: 56 while time.time() < timeout_at:
55 return False 57 _, output = self.target.run(self.tc.target_cmds['ps'])
56 else: 58 if u + ' ' not in output:
57 return True 59 return
60 time.sleep(1)
61 user_pss = [ps for ps in output.split("\n") if u + ' ' in ps]
62 msg = "User %s has processes still running: %s" % (u, "\n".join(user_pss))
63 self.fail(msg=msg)
58 64
59 def unset_up_test_user(u): 65 def unset_up_test_user(u):
60 # ensure no test1 process in running 66 # ensure no test1 process in running
61 timeout = time.time() + 30 67 wait_for_no_process_for_user(u)
62 while time.time() < timeout:
63 if check_no_process_for_user(u):
64 break
65 else:
66 time.sleep(1)
67 status, output = self.target.run('userdel -r %s' % u) 68 status, output = self.target.run('userdel -r %s' % u)
68 msg = 'Failed to erase user: %s' % output 69 msg = 'Failed to erase user: %s' % output
69 self.assertTrue(status == 0, msg=msg) 70 self.assertTrue(status == 0, msg=msg)
@@ -79,21 +80,24 @@ class RpmBasicTest(OERuntimeTestCase):
79 80
80class RpmInstallRemoveTest(OERuntimeTestCase): 81class RpmInstallRemoveTest(OERuntimeTestCase):
81 82
82 @classmethod 83 def _find_test_file(self):
83 def setUpClass(cls): 84 pkgarch = self.td['TUNE_PKGARCH'].replace('-', '_')
84 pkgarch = cls.td['TUNE_PKGARCH'].replace('-', '_') 85 rpmdir = os.path.join(self.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
85 rpmdir = os.path.join(cls.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
86 # Pick base-passwd-doc as a test file to get installed, because it's small 86 # Pick base-passwd-doc as a test file to get installed, because it's small
87 # and it will always be built for standard targets 87 # and it will always be built for standard targets
88 rpm_doc = 'base-passwd-doc-*.%s.rpm' % pkgarch 88 rpm_doc = 'base-passwd-doc-*.%s.rpm' % pkgarch
89 if not os.path.exists(rpmdir): 89 if not os.path.exists(rpmdir):
90 return 90 self.fail("Rpm directory {} does not exist".format(rpmdir))
91 for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc): 91 for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc):
92 cls.test_file = os.path.join(rpmdir, f) 92 self.test_file = os.path.join(rpmdir, f)
93 cls.dst = '/tmp/base-passwd-doc.rpm' 93 break
94 else:
95 self.fail("Couldn't find the test rpm file {} in {}".format(rpm_doc, rpmdir))
96 self.dst = '/tmp/base-passwd-doc.rpm'
94 97
95 @OETestDepends(['rpm.RpmBasicTest.test_rpm_query']) 98 @OETestDepends(['rpm.RpmBasicTest.test_rpm_query'])
96 def test_rpm_install(self): 99 def test_rpm_install(self):
100 self._find_test_file()
97 self.tc.target.copyTo(self.test_file, self.dst) 101 self.tc.target.copyTo(self.test_file, self.dst)
98 status, output = self.target.run('rpm -ivh /tmp/base-passwd-doc.rpm') 102 status, output = self.target.run('rpm -ivh /tmp/base-passwd-doc.rpm')
99 msg = 'Failed to install base-passwd-doc package: %s' % output 103 msg = 'Failed to install base-passwd-doc package: %s' % output
@@ -116,12 +120,13 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
116 Author: Alexander Kanavin <alex.kanavin@gmail.com> 120 Author: Alexander Kanavin <alex.kanavin@gmail.com>
117 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com> 121 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
118 """ 122 """
119 db_files_cmd = 'ls /var/lib/rpm/__db.*' 123 self._find_test_file()
124 db_files_cmd = 'ls /var/lib/rpm/rpmdb.sqlite*'
120 check_log_cmd = "grep RPM /var/log/messages | wc -l" 125 check_log_cmd = "grep RPM /var/log/messages | wc -l"
121 126
122 # Make sure that some database files are under /var/lib/rpm as '__db.xxx' 127 # Make sure that some database files are under /var/lib/rpm as 'rpmdb.sqlite'
123 status, output = self.target.run(db_files_cmd) 128 status, output = self.target.run(db_files_cmd)
124 msg = 'Failed to find database files under /var/lib/rpm/ as __db.xxx' 129 msg = 'Failed to find database files under /var/lib/rpm/ as rpmdb.sqlite'
125 self.assertEqual(0, status, msg=msg) 130 self.assertEqual(0, status, msg=msg)
126 131
127 self.tc.target.copyTo(self.test_file, self.dst) 132 self.tc.target.copyTo(self.test_file, self.dst)
@@ -141,13 +146,4 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
141 146
142 self.tc.target.run('rm -f %s' % self.dst) 147 self.tc.target.run('rm -f %s' % self.dst)
143 148
144 # if using systemd this should ensure all entries are flushed to /var
145 status, output = self.target.run("journalctl --sync")
146 # Get the amount of entries in the log file
147 status, output = self.target.run(check_log_cmd)
148 msg = 'Failed to get the final size of the log file.'
149 self.assertEqual(0, status, msg=msg)
150 149
151 # Check that there's enough of them
152 self.assertGreaterEqual(int(output), 80,
153 'Cound not find sufficient amount of rpm entries in /var/log/messages, found {} entries'.format(output))
diff --git a/meta/lib/oeqa/runtime/cases/rt.py b/meta/lib/oeqa/runtime/cases/rt.py
new file mode 100644
index 0000000000..15ab4dbbbb
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rt.py
@@ -0,0 +1,19 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9
10class RtTest(OERuntimeTestCase):
11 @OETestDepends(['ssh.SSHTest.test_ssh'])
12 def test_is_rt(self):
13 """
14 Check that the kernel has CONFIG_PREEMPT_RT enabled.
15 """
16 status, output = self.target.run("uname -a")
17 self.assertEqual(status, 0, msg=output)
18 # Split so we don't get a substring false-positive
19 self.assertIn("PREEMPT_RT", output.split())
diff --git a/meta/lib/oeqa/runtime/cases/rtc.py b/meta/lib/oeqa/runtime/cases/rtc.py
index a34c101a9d..6e45c5db4f 100644
--- a/meta/lib/oeqa/runtime/cases/rtc.py
+++ b/meta/lib/oeqa/runtime/cases/rtc.py
@@ -1,5 +1,11 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
8from oeqa.core.decorator.data import skipIfFeature
3from oeqa.runtime.decorator.package import OEHasPackage 9from oeqa.runtime.decorator.package import OEHasPackage
4 10
5import re 11import re
@@ -9,19 +15,21 @@ class RTCTest(OERuntimeTestCase):
9 def setUp(self): 15 def setUp(self):
10 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 16 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
11 self.logger.debug('Stopping systemd-timesyncd daemon') 17 self.logger.debug('Stopping systemd-timesyncd daemon')
12 self.target.run('systemctl disable --now systemd-timesyncd') 18 self.target.run('systemctl disable --now --runtime systemd-timesyncd')
13 19
14 def tearDown(self): 20 def tearDown(self):
15 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 21 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
16 self.logger.debug('Starting systemd-timesyncd daemon') 22 self.logger.debug('Starting systemd-timesyncd daemon')
17 self.target.run('systemctl enable --now systemd-timesyncd') 23 self.target.run('systemctl enable --now --runtime systemd-timesyncd')
18 24
25 @skipIfFeature('read-only-rootfs',
26 'Test does not work with read-only-rootfs in IMAGE_FEATURES')
19 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
20 @OEHasPackage(['coreutils', 'busybox']) 28 @OEHasPackage(['coreutils', 'busybox'])
21 def test_rtc(self): 29 def test_rtc(self):
22 (status, output) = self.target.run('hwclock -r') 30 (status, output) = self.target.run('hwclock -r')
23 self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output) 31 self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output)
24 32
25 (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"') 33 (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"')
26 self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime) 34 self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime)
27 35
@@ -32,7 +40,6 @@ class RTCTest(OERuntimeTestCase):
32 40
33 (status, output) = self.target.run('date %s' % current_datetime) 41 (status, output) = self.target.run('date %s' % current_datetime)
34 self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output) 42 self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output)
35 43
36 (status, output) = self.target.run('hwclock -w') 44 (status, output) = self.target.run('hwclock -w')
37 self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output) 45 self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output)
38
diff --git a/meta/lib/oeqa/runtime/cases/runlevel.py b/meta/lib/oeqa/runtime/cases/runlevel.py
index 3a4df8ace1..6734b0f5ed 100644
--- a/meta/lib/oeqa/runtime/cases/runlevel.py
+++ b/meta/lib/oeqa/runtime/cases/runlevel.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3 8
diff --git a/meta/lib/oeqa/runtime/cases/rust.py b/meta/lib/oeqa/runtime/cases/rust.py
new file mode 100644
index 0000000000..123c942012
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rust.py
@@ -0,0 +1,64 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage
10
11class RustCompileTest(OERuntimeTestCase):
12
13 @classmethod
14 def setUp(cls):
15 dst = '/tmp/'
16 src = os.path.join(cls.tc.files_dir, 'test.rs')
17 cls.tc.target.copyTo(src, dst)
18
19 @classmethod
20 def tearDown(cls):
21 files = '/tmp/test.rs /tmp/test'
22 cls.tc.target.run('rm %s' % files)
23 dirs = '/tmp/hello'
24 cls.tc.target.run('rm -r %s' % dirs)
25
26 @OETestDepends(['ssh.SSHTest.test_ssh'])
27 @OEHasPackage('rust')
28 @OEHasPackage('openssh-scp')
29 def test_rust_compile(self):
30 status, output = self.target.run('rustc /tmp/test.rs -o /tmp/test')
31 msg = 'rust compile failed, output: %s' % output
32 self.assertEqual(status, 0, msg=msg)
33
34 status, output = self.target.run('/tmp/test')
35 msg = 'running compiled file failed, output: %s' % output
36 self.assertEqual(status, 0, msg=msg)
37
38 @OETestDepends(['ssh.SSHTest.test_ssh'])
39 @OEHasPackage('cargo')
40 @OEHasPackage('openssh-scp')
41 def test_cargo_compile(self):
42 status, output = self.target.run('cargo new /tmp/hello')
43 msg = 'cargo new failed, output: %s' % output
44 self.assertEqual(status, 0, msg=msg)
45
46 status, output = self.target.run('cargo build --manifest-path=/tmp/hello/Cargo.toml')
47 msg = 'cargo build failed, output: %s' % output
48 self.assertEqual(status, 0, msg=msg)
49
50 status, output = self.target.run('cargo run --manifest-path=/tmp/hello/Cargo.toml')
51 msg = 'running compiled file failed, output: %s' % output
52 self.assertEqual(status, 0, msg=msg)
53
54class RustCLibExampleTest(OERuntimeTestCase):
55 @OETestDepends(['ssh.SSHTest.test_ssh'])
56 @OEHasPackage('rust-c-lib-example-bin')
57 def test_rust_c_lib_example(self):
58 cmd = "rust-c-lib-example-bin test"
59 status, output = self.target.run(cmd)
60 msg = 'Exit status was not 0. Output: %s' % output
61 self.assertEqual(status, 0, msg=msg)
62
63 msg = 'Incorrect output: %s' % output
64 self.assertEqual(output, "Hello world in rust from C!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/scons.py b/meta/lib/oeqa/runtime/cases/scons.py
index 3c7c7f7270..4a8d4d40ba 100644
--- a/meta/lib/oeqa/runtime/cases/scons.py
+++ b/meta/lib/oeqa/runtime/cases/scons.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py
index 3a5f292152..364264369a 100644
--- a/meta/lib/oeqa/runtime/cases/scp.py
+++ b/meta/lib/oeqa/runtime/cases/scp.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -23,7 +25,7 @@ class ScpTest(OERuntimeTestCase):
23 os.remove(cls.tmp_path) 25 os.remove(cls.tmp_path)
24 26
25 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
26 @OEHasPackage(['openssh-scp', 'dropbear']) 28 @OEHasPackage({'openssh-scp', 'openssh-sftp-server'})
27 def test_scp_file(self): 29 def test_scp_file(self):
28 dst = '/tmp/test_scp_file' 30 dst = '/tmp/test_scp_file'
29 31
diff --git a/meta/lib/oeqa/runtime/cases/skeletoninit.py b/meta/lib/oeqa/runtime/cases/skeletoninit.py
index 4779cd6bb4..be7b39a9a3 100644
--- a/meta/lib/oeqa/runtime/cases/skeletoninit.py
+++ b/meta/lib/oeqa/runtime/cases/skeletoninit.py
@@ -1,10 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284 7# Image under test must have meta-skeleton layer in bblayers and
6# testcase. Image under test must have meta-skeleton layer in bblayers and 8# IMAGE_INSTALL:append = " service" in local.conf
7# IMAGE_INSTALL_append = " service" in local.conf
8from oeqa.runtime.case import OERuntimeTestCase 9from oeqa.runtime.case import OERuntimeTestCase
9from oeqa.core.decorator.depends import OETestDepends 10from oeqa.core.decorator.depends import OETestDepends
10from oeqa.core.decorator.data import skipIfDataVar 11from oeqa.core.decorator.data import skipIfDataVar
@@ -15,7 +16,7 @@ class SkeletonBasicTest(OERuntimeTestCase):
15 @OETestDepends(['ssh.SSHTest.test_ssh']) 16 @OETestDepends(['ssh.SSHTest.test_ssh'])
16 @OEHasPackage(['service']) 17 @OEHasPackage(['service'])
17 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', 18 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
18 'Not appropiate for systemd image') 19 'Not appropriate for systemd image')
19 def test_skeleton_availability(self): 20 def test_skeleton_availability(self):
20 status, output = self.target.run('ls /etc/init.d/skeleton') 21 status, output = self.target.run('ls /etc/init.d/skeleton')
21 msg = 'skeleton init script not found. Output:\n%s' % output 22 msg = 'skeleton init script not found. Output:\n%s' % output
diff --git a/meta/lib/oeqa/runtime/cases/ssh.py b/meta/lib/oeqa/runtime/cases/ssh.py
index 60a5fbbfbf..b632a29a01 100644
--- a/meta/lib/oeqa/runtime/cases/ssh.py
+++ b/meta/lib/oeqa/runtime/cases/ssh.py
@@ -1,8 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.runtime.case import OERuntimeTestCase 7import time
8import signal
9
10from oeqa.runtime.case import OERuntimeTestCase, run_network_serialdebug
6from oeqa.core.decorator.depends import OETestDepends 11from oeqa.core.decorator.depends import OETestDepends
7from oeqa.runtime.decorator.package import OEHasPackage 12from oeqa.runtime.decorator.package import OEHasPackage
8 13
@@ -11,9 +16,23 @@ class SSHTest(OERuntimeTestCase):
11 @OETestDepends(['ping.PingTest.test_ping']) 16 @OETestDepends(['ping.PingTest.test_ping'])
12 @OEHasPackage(['dropbear', 'openssh-sshd']) 17 @OEHasPackage(['dropbear', 'openssh-sshd'])
13 def test_ssh(self): 18 def test_ssh(self):
14 (status, output) = self.target.run('uname -a') 19 for i in range(5):
15 self.assertEqual(status, 0, msg='SSH Test failed: %s' % output) 20 status, output = self.target.run("uname -a", timeout=30)
16 (status, output) = self.target.run('cat /etc/masterimage') 21 if status == 0:
17 msg = "This isn't the right image - /etc/masterimage " \ 22 break
18 "shouldn't be here %s" % output 23 elif status == 255 or status == -signal.SIGTERM:
19 self.assertEqual(status, 1, msg=msg) 24 # ssh returns 255 only if a ssh error occurs. This could
25 # be an issue with "Connection refused" because the port
26 # isn't open yet, and this could check explicitly for that
27 # here. However, let's keep it simple and just retry for
28 # all errors a limited amount of times with a sleep to
29 # give it time for the port to open.
30 # We sometimes see -15 (SIGTERM) on slow emulation machines too, likely
31 # from boot/init not being 100% complete, retry for these too.
32 time.sleep(5)
33 continue
34 else:
35 run_network_serialdebug(self.target.runner)
36 self.fail("uname failed with \"%s\" (exit code %s)" % (output, status))
37 if status != 0:
38 self.fail("ssh failed with \"%s\" (exit code %s)" % (output, status))
diff --git a/meta/lib/oeqa/runtime/cases/stap.py b/meta/lib/oeqa/runtime/cases/stap.py
index 5342f6ac34..6b55e7de50 100644
--- a/meta/lib/oeqa/runtime/cases/stap.py
+++ b/meta/lib/oeqa/runtime/cases/stap.py
@@ -1,37 +1,35 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.runtime.case import OERuntimeTestCase 9from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.core.decorator.data import skipIfNotFeature 10from oeqa.core.decorator.data import skipIfNotFeature
10from oeqa.runtime.decorator.package import OEHasPackage 11from oeqa.runtime.decorator.package import OEHasPackage
11 12
12class StapTest(OERuntimeTestCase): 13class StapTest(OERuntimeTestCase):
13 14 @skipIfNotFeature('tools-profile', 'Test requires tools-profile to be in IMAGE_FEATURES')
14 @classmethod
15 def setUp(cls):
16 src = os.path.join(cls.tc.runtime_files_dir, 'hello.stp')
17 dst = '/tmp/hello.stp'
18 cls.tc.target.copyTo(src, dst)
19
20 @classmethod
21 def tearDown(cls):
22 files = '/tmp/hello.stp'
23 cls.tc.target.run('rm %s' % files)
24
25 @skipIfNotFeature('tools-profile',
26 'Test requires tools-profile to be in IMAGE_FEATURES')
27 @OETestDepends(['kernelmodule.KernelModuleTest.test_kernel_module'])
28 @OEHasPackage(['systemtap']) 15 @OEHasPackage(['systemtap'])
16 @OEHasPackage(['gcc-symlinks'])
17 @OEHasPackage(['kernel-devsrc'])
29 def test_stap(self): 18 def test_stap(self):
30 cmds = [ 19 try:
31 'cd /usr/src/kernel && make scripts prepare', 20 cmd = 'make -j -C /usr/src/kernel scripts prepare'
32 'cd /lib/modules/`uname -r` && (if [ ! -e build ]; then ln -s /usr/src/kernel build; fi)',
33 'stap --disable-cache -DSTP_NO_VERREL_CHECK /tmp/hello.stp'
34 ]
35 for cmd in cmds:
36 status, output = self.target.run(cmd, 900) 21 status, output = self.target.run(cmd, 900)
37 self.assertEqual(status, 0, msg='\n'.join([cmd, output])) 22 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
23
24 cmd = 'stap -v -p4 -m stap_hello --disable-cache -DSTP_NO_VERREL_CHECK -e \'probe oneshot { print("Hello, "); println("SystemTap!") }\''
25 status, output = self.target.run(cmd, 900)
26 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
27
28 cmd = 'staprun -v -R -b1 stap_hello.ko'
29 status, output = self.target.run(cmd, 60)
30 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
31 self.assertIn('Hello, SystemTap!', output, msg='\n'.join([cmd, output]))
32 except:
33 status, dmesg = self.target.run('dmesg')
34 if status == 0:
35 print(dmesg)
diff --git a/meta/lib/oeqa/runtime/cases/storage.py b/meta/lib/oeqa/runtime/cases/storage.py
index 166d26b252..b05622fea8 100644
--- a/meta/lib/oeqa/runtime/cases/storage.py
+++ b/meta/lib/oeqa/runtime/cases/storage.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -91,24 +93,24 @@ class UsbTest(StorageBase):
91 self.test_file = "usb.tst" 93 self.test_file = "usb.tst"
92 self.test_dir = os.path.join(self.mount_point, "oeqa") 94 self.test_dir = os.path.join(self.mount_point, "oeqa")
93 95
94 @skipIfQemu('qemuall', 'Test only runs on real hardware') 96 @skipIfQemu()
95 @OETestDepends(['ssh.SSHTest.test_ssh']) 97 @OETestDepends(['ssh.SSHTest.test_ssh'])
96 def test_usb_mount(self): 98 def test_usb_mount(self):
97 self.storage_umount(2) 99 self.storage_umount(2)
98 self.storage_mount(5) 100 self.storage_mount(5)
99 101
100 @skipIfQemu('qemuall', 'Test only runs on real hardware') 102 @skipIfQemu()
101 @OETestDepends(['storage.UsbTest.test_usb_mount']) 103 @OETestDepends(['storage.UsbTest.test_usb_mount'])
102 def test_usb_basic_operations(self): 104 def test_usb_basic_operations(self):
103 self.storage_basic() 105 self.storage_basic()
104 106
105 @skipIfQemu('qemuall', 'Test only runs on real hardware') 107 @skipIfQemu()
106 @OETestDepends(['storage.UsbTest.test_usb_basic_operations']) 108 @OETestDepends(['storage.UsbTest.test_usb_basic_operations'])
107 def test_usb_basic_rw(self): 109 def test_usb_basic_rw(self):
108 self.storage_write() 110 self.storage_write()
109 self.storage_read() 111 self.storage_read()
110 112
111 @skipIfQemu('qemuall', 'Test only runs on real hardware') 113 @skipIfQemu()
112 @OETestDepends(['storage.UsbTest.test_usb_mount']) 114 @OETestDepends(['storage.UsbTest.test_usb_mount'])
113 def test_usb_umount(self): 115 def test_usb_umount(self):
114 self.storage_umount(2) 116 self.storage_umount(2)
@@ -126,24 +128,24 @@ class MMCTest(StorageBase):
126 self.test_file = "mmc.tst" 128 self.test_file = "mmc.tst"
127 self.test_dir = os.path.join(self.mount_point, "oeqa") 129 self.test_dir = os.path.join(self.mount_point, "oeqa")
128 130
129 @skipIfQemu('qemuall', 'Test only runs on real hardware') 131 @skipIfQemu()
130 @OETestDepends(['ssh.SSHTest.test_ssh']) 132 @OETestDepends(['ssh.SSHTest.test_ssh'])
131 def test_mmc_mount(self): 133 def test_mmc_mount(self):
132 self.storage_umount(2) 134 self.storage_umount(2)
133 self.storage_mount() 135 self.storage_mount()
134 136
135 @skipIfQemu('qemuall', 'Test only runs on real hardware') 137 @skipIfQemu()
136 @OETestDepends(['storage.MMCTest.test_mmc_mount']) 138 @OETestDepends(['storage.MMCTest.test_mmc_mount'])
137 def test_mmc_basic_operations(self): 139 def test_mmc_basic_operations(self):
138 self.storage_basic() 140 self.storage_basic()
139 141
140 @skipIfQemu('qemuall', 'Test only runs on real hardware') 142 @skipIfQemu()
141 @OETestDepends(['storage.MMCTest.test_mmc_basic_operations']) 143 @OETestDepends(['storage.MMCTest.test_mmc_basic_operations'])
142 def test_mmc_basic_rw(self): 144 def test_mmc_basic_rw(self):
143 self.storage_write() 145 self.storage_write()
144 self.storage_read() 146 self.storage_read()
145 147
146 @skipIfQemu('qemuall', 'Test only runs on real hardware') 148 @skipIfQemu()
147 @OETestDepends(['storage.MMCTest.test_mmc_mount']) 149 @OETestDepends(['storage.MMCTest.test_mmc_mount'])
148 def test_mmc_umount(self): 150 def test_mmc_umount(self):
149 self.storage_umount(2) 151 self.storage_umount(2)
diff --git a/meta/lib/oeqa/runtime/cases/suspend.py b/meta/lib/oeqa/runtime/cases/suspend.py
index 67b6f7e56f..a625cc5901 100644
--- a/meta/lib/oeqa/runtime/cases/suspend.py
+++ b/meta/lib/oeqa/runtime/cases/suspend.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
@@ -23,7 +28,7 @@ class Suspend_Test(OERuntimeTestCase):
23 (status, output) = self.target.run('sudo rtcwake -m mem -s 10') 28 (status, output) = self.target.run('sudo rtcwake -m mem -s 10')
24 self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) 29 self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
25 30
26 @skipIfQemu('qemuall', 'Test only runs on real hardware') 31 @skipIfQemu()
27 @OETestDepends(['ssh.SSHTest.test_ssh']) 32 @OETestDepends(['ssh.SSHTest.test_ssh'])
28 def test_suspend(self): 33 def test_suspend(self):
29 self.test_date() 34 self.test_date()
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py
index 7c44abe8ed..640f28abe9 100644
--- a/meta/lib/oeqa/runtime/cases/systemd.py
+++ b/meta/lib/oeqa/runtime/cases/systemd.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import re 7import re
8import threading
6import time 9import time
7 10
8from oeqa.runtime.case import OERuntimeTestCase 11from oeqa.runtime.case import OERuntimeTestCase
@@ -66,8 +69,8 @@ class SystemdBasicTests(SystemdTest):
66 """ 69 """
67 endtime = time.time() + (60 * 2) 70 endtime = time.time() + (60 * 2)
68 while True: 71 while True:
69 status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl --state=activating') 72 status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl is-system-running')
70 if "0 loaded units listed" in output: 73 if "running" in output or "degraded" in output:
71 return (True, '') 74 return (True, '')
72 if time.time() >= endtime: 75 if time.time() >= endtime:
73 return (False, output) 76 return (False, output)
@@ -134,6 +137,38 @@ class SystemdServiceTests(SystemdTest):
134 status = self.target.run('mount -oro,remount /')[0] 137 status = self.target.run('mount -oro,remount /')[0]
135 self.assertTrue(status == 0, msg='Remounting / as r/o failed') 138 self.assertTrue(status == 0, msg='Remounting / as r/o failed')
136 139
140 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
141 @skipIfNotFeature('minidebuginfo', 'Test requires minidebuginfo to be in DISTRO_FEATURES')
142 @OEHasPackage(['busybox'])
143 def test_systemd_coredump_minidebuginfo(self):
144 """
145 Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks,
146 extracted from the minidebuginfo metadata (.gnu_debugdata elf section).
147 """
148 # use "env sleep" instead of "sleep" to avoid calling the shell builtin function
149 t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && env sleep 1000",))
150 t_thread.start()
151 time.sleep(1)
152
153 status, sleep_pid = self.target.run('pidof sleep')
154 # cause segfault on purpose
155 self.target.run('kill -SEGV %s' % sleep_pid)
156 self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % sleep_pid)
157
158 # Give some time to systemd-coredump@.service to process the coredump
159 for x in range(20):
160 status, output = self.target.run('coredumpctl list %s' % sleep_pid)
161 if status == 0:
162 break
163 time.sleep(1)
164 else:
165 self.fail("Timed out waiting for coredump creation")
166
167 (status, output) = self.target.run('coredumpctl info %s' % sleep_pid)
168 self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output)
169 self.assertEqual('sleep_for_duration (busybox.nosuid' in output or 'xnanosleep (sleep.coreutils' in output,
170 True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output)
171
137class SystemdJournalTests(SystemdTest): 172class SystemdJournalTests(SystemdTest):
138 173
139 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) 174 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
@@ -152,7 +187,7 @@ class SystemdJournalTests(SystemdTest):
152 """ 187 """
153 188
154 # The expression chain that uniquely identifies the time boot message. 189 # The expression chain that uniquely identifies the time boot message.
155 expr_items=['Startup finished', 'kernel', 'userspace','\.$'] 190 expr_items=['Startup finished', 'kernel', 'userspace', r'\.$']
156 try: 191 try:
157 output = self.journalctl(args='-o cat --reverse') 192 output = self.journalctl(args='-o cat --reverse')
158 except AssertionError: 193 except AssertionError:
diff --git a/meta/lib/oeqa/runtime/cases/terminal.py b/meta/lib/oeqa/runtime/cases/terminal.py
index 8fcca99f47..96ba3c3195 100644
--- a/meta/lib/oeqa/runtime/cases/terminal.py
+++ b/meta/lib/oeqa/runtime/cases/terminal.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.runtime.decorator.package import OEHasPackage 8from oeqa.runtime.decorator.package import OEHasPackage
diff --git a/meta/lib/oeqa/runtime/cases/uki.py b/meta/lib/oeqa/runtime/cases/uki.py
new file mode 100644
index 0000000000..77bc5b9791
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/uki.py
@@ -0,0 +1,16 @@
1# SPDX-License-Identifier: MIT
2#
3
4from oeqa.runtime.case import OERuntimeTestCase
5from oeqa.core.decorator.data import skipIfNotInDataVar
6
7class UkiTest(OERuntimeTestCase):
8
9 @skipIfNotInDataVar('IMAGE_CLASSES', 'uki', 'Test case uki is for images which use uki.bbclass')
10 def test_uki(self):
11 uki_filename = self.td.get('UKI_FILENAME')
12 status, output = self.target.run('ls /boot/EFI/Linux/%s' % uki_filename)
13 self.assertEqual(status, 0, output)
14
15 status, output = self.target.run('echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep %s' % uki_filename)
16 self.assertEqual(status, 0, output)
diff --git a/meta/lib/oeqa/runtime/cases/usb_hid.py b/meta/lib/oeqa/runtime/cases/usb_hid.py
index 3c292cf661..6f23d2ff51 100644
--- a/meta/lib/oeqa/runtime/cases/usb_hid.py
+++ b/meta/lib/oeqa/runtime/cases/usb_hid.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
@@ -14,7 +19,7 @@ class USB_HID_Test(OERuntimeTestCase):
14 return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) 19 return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
15 20
16 @OEHasPackage(['xdotool']) 21 @OEHasPackage(['xdotool'])
17 @skipIfQemu('qemuall', 'Test only runs on real hardware') 22 @skipIfQemu()
18 @OETestDepends(['ssh.SSHTest.test_ssh']) 23 @OETestDepends(['ssh.SSHTest.test_ssh'])
19 def test_USB_Hid_input(self): 24 def test_USB_Hid_input(self):
20 self.keyboard_mouse_simulation() 25 self.keyboard_mouse_simulation()
diff --git a/meta/lib/oeqa/runtime/cases/weston.py b/meta/lib/oeqa/runtime/cases/weston.py
index a1c7183213..ee4d336482 100644
--- a/meta/lib/oeqa/runtime/cases/weston.py
+++ b/meta/lib/oeqa/runtime/cases/weston.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -10,7 +12,7 @@ import threading
10import time 12import time
11 13
12class WestonTest(OERuntimeTestCase): 14class WestonTest(OERuntimeTestCase):
13 weston_log_file = '/tmp/weston.log' 15 weston_log_file = '/tmp/weston-2.log'
14 16
15 @classmethod 17 @classmethod
16 def tearDownClass(cls): 18 def tearDownClass(cls):
@@ -31,13 +33,13 @@ class WestonTest(OERuntimeTestCase):
31 return output.split(" ") 33 return output.split(" ")
32 34
33 def get_weston_command(self, cmd): 35 def get_weston_command(self, cmd):
34 return 'export XDG_RUNTIME_DIR=/run/user/0; export WAYLAND_DISPLAY=wayland-0; %s' % cmd 36 return 'export XDG_RUNTIME_DIR=/run/user/`id -u weston`; export WAYLAND_DISPLAY=wayland-1; %s' % cmd
35 37
36 def run_weston_init(self): 38 def run_weston_init(self):
37 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']: 39 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']:
38 self.target.run('systemd-run --collect --unit=weston-ptest.service --uid=0 -p PAMName=login -p TTYPath=/dev/tty6 -E XDG_RUNTIME_DIR=/tmp -E WAYLAND_DISPLAY=wayland-0 /usr/bin/weston --socket=wayland-1 --log=%s' % self.weston_log_file) 40 self.target.run('systemd-run --collect --unit=weston-ptest.service --uid=0 -p PAMName=login -p TTYPath=/dev/tty6 -E XDG_RUNTIME_DIR=/tmp -E WAYLAND_DISPLAY=wayland-0 /usr/bin/weston --socket=wayland-1 --log=%s' % self.weston_log_file)
39 else: 41 else:
40 self.target.run(self.get_weston_command('openvt -- weston --socket=wayland-1 --log=%s' % self.weston_log_file)) 42 self.target.run(self.get_weston_command('openvt -- weston --socket=wayland-2 --log=%s' % self.weston_log_file))
41 43
42 def get_new_wayland_processes(self, existing_wl_processes): 44 def get_new_wayland_processes(self, existing_wl_processes):
43 try_cnt = 0 45 try_cnt = 0
@@ -53,7 +55,11 @@ class WestonTest(OERuntimeTestCase):
53 55
54 @OEHasPackage(['wayland-utils']) 56 @OEHasPackage(['wayland-utils'])
55 def test_wayland_info(self): 57 def test_wayland_info(self):
56 status, output = self.target.run(self.get_weston_command('wayland-info')) 58 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']:
59 command = 'XDG_RUNTIME_DIR=/run wayland-info'
60 else:
61 command = self.get_weston_command('wayland-info')
62 status, output = self.target.run(command)
57 self.assertEqual(status, 0, msg='wayland-info error: %s' % output) 63 self.assertEqual(status, 0, msg='wayland-info error: %s' % output)
58 64
59 @OEHasPackage(['weston']) 65 @OEHasPackage(['weston'])
@@ -73,3 +79,11 @@ class WestonTest(OERuntimeTestCase):
73 self.target.run('kill -9 %s' % w) 79 self.target.run('kill -9 %s' % w)
74 __, weston_log = self.target.run('cat %s' % self.weston_log_file) 80 __, weston_log = self.target.run('cat %s' % self.weston_log_file)
75 self.assertTrue(new_wl_processes, msg='Could not get new weston-desktop-shell processes (%s, try_cnt:%s) weston log: %s' % (new_wl_processes, try_cnt, weston_log)) 81 self.assertTrue(new_wl_processes, msg='Could not get new weston-desktop-shell processes (%s, try_cnt:%s) weston log: %s' % (new_wl_processes, try_cnt, weston_log))
82
83 @skipIfNotFeature('x11', 'Test requires x11 to be in DISTRO_FEATURES')
84 @OEHasPackage(['weston'])
85 def test_weston_supports_xwayland(self):
86 cmd ='cat %s | grep "xserver listening on display"' % self.weston_log_file
87 status, output = self.target.run(cmd)
88 msg = ('xwayland does not appear to be running')
89 self.assertEqual(status, 0, msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/x32lib.py b/meta/lib/oeqa/runtime/cases/x32lib.py
index f419c8f181..014da4b386 100644
--- a/meta/lib/oeqa/runtime/cases/x32lib.py
+++ b/meta/lib/oeqa/runtime/cases/x32lib.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/xorg.py b/meta/lib/oeqa/runtime/cases/xorg.py
index d6845587c2..09afb1e3d1 100644
--- a/meta/lib/oeqa/runtime/cases/xorg.py
+++ b/meta/lib/oeqa/runtime/cases/xorg.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py
index 3826f27642..daabc44910 100644
--- a/meta/lib/oeqa/runtime/context.py
+++ b/meta/lib/oeqa/runtime/context.py
@@ -5,11 +5,12 @@
5# 5#
6 6
7import os 7import os
8import sys
8 9
9from oeqa.core.context import OETestContext, OETestContextExecutor 10from oeqa.core.context import OETestContext, OETestContextExecutor
11from oeqa.core.target.serial import OESerialTarget
10from oeqa.core.target.ssh import OESSHTarget 12from oeqa.core.target.ssh import OESSHTarget
11from oeqa.core.target.qemu import OEQemuTarget 13from oeqa.core.target.qemu import OEQemuTarget
12from oeqa.utils.dump import HostDumper
13 14
14from oeqa.runtime.loader import OERuntimeTestLoader 15from oeqa.runtime.loader import OERuntimeTestLoader
15 16
@@ -19,12 +20,11 @@ class OERuntimeTestContext(OETestContext):
19 os.path.dirname(os.path.abspath(__file__)), "files") 20 os.path.dirname(os.path.abspath(__file__)), "files")
20 21
21 def __init__(self, td, logger, target, 22 def __init__(self, td, logger, target,
22 host_dumper, image_packages, extract_dir): 23 image_packages, extract_dir):
23 super(OERuntimeTestContext, self).__init__(td, logger) 24 super(OERuntimeTestContext, self).__init__(td, logger)
24 25
25 self.target = target 26 self.target = target
26 self.image_packages = image_packages 27 self.image_packages = image_packages
27 self.host_dumper = host_dumper
28 self.extract_dir = extract_dir 28 self.extract_dir = extract_dir
29 self._set_target_cmds() 29 self._set_target_cmds()
30 30
@@ -61,16 +61,16 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
61 runtime_group = self.parser.add_argument_group('runtime options') 61 runtime_group = self.parser.add_argument_group('runtime options')
62 62
63 runtime_group.add_argument('--target-type', action='store', 63 runtime_group.add_argument('--target-type', action='store',
64 default=self.default_target_type, choices=['simpleremote', 'qemu'], 64 default=self.default_target_type, choices=['simpleremote', 'qemu', 'serial'],
65 help="Target type of device under test, default: %s" \ 65 help="Target type of device under test, default: %s" \
66 % self.default_target_type) 66 % self.default_target_type)
67 runtime_group.add_argument('--target-ip', action='store', 67 runtime_group.add_argument('--target-ip', action='store',
68 default=self.default_target_ip, 68 default=self.default_target_ip,
69 help="IP address of device under test, default: %s" \ 69 help="IP address and optionally ssh port (default 22) of device under test, for example '192.168.0.7:22'. Default: %s" \
70 % self.default_target_ip) 70 % self.default_target_ip)
71 runtime_group.add_argument('--server-ip', action='store', 71 runtime_group.add_argument('--server-ip', action='store',
72 default=self.default_target_ip, 72 default=self.default_target_ip,
73 help="IP address of device under test, default: %s" \ 73 help="IP address of the test host from test target machine, default: %s" \
74 % self.default_server_ip) 74 % self.default_server_ip)
75 75
76 runtime_group.add_argument('--host-dumper-dir', action='store', 76 runtime_group.add_argument('--host-dumper-dir', action='store',
@@ -109,6 +109,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
109 target = OESSHTarget(logger, target_ip, server_ip, **kwargs) 109 target = OESSHTarget(logger, target_ip, server_ip, **kwargs)
110 elif target_type == 'qemu': 110 elif target_type == 'qemu':
111 target = OEQemuTarget(logger, server_ip, **kwargs) 111 target = OEQemuTarget(logger, server_ip, **kwargs)
112 elif target_type == 'serial':
113 target = OESerialTarget(logger, target_ip, server_ip, **kwargs)
112 else: 114 else:
113 # XXX: This code uses the old naming convention for controllers and 115 # XXX: This code uses the old naming convention for controllers and
114 # targets, the idea it is to leave just targets as the controller 116 # targets, the idea it is to leave just targets as the controller
@@ -119,8 +121,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
119 # XXX: Don't base your targets on this code it will be refactored 121 # XXX: Don't base your targets on this code it will be refactored
120 # in the near future. 122 # in the near future.
121 # Custom target module loading 123 # Custom target module loading
122 target_modules_path = kwargs.get('target_modules_path', '') 124 controller = OERuntimeTestContextExecutor.getControllerModule(target_type)
123 controller = OERuntimeTestContextExecutor.getControllerModule(target_type, target_modules_path)
124 target = controller(logger, target_ip, server_ip, **kwargs) 125 target = controller(logger, target_ip, server_ip, **kwargs)
125 126
126 return target 127 return target
@@ -130,15 +131,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
130 # AttributeError raised if not found. 131 # AttributeError raised if not found.
131 # ImportError raised if a provided module can not be imported. 132 # ImportError raised if a provided module can not be imported.
132 @staticmethod 133 @staticmethod
133 def getControllerModule(target, target_modules_path): 134 def getControllerModule(target):
134 controllerslist = OERuntimeTestContextExecutor._getControllerModulenames(target_modules_path) 135 controllerslist = OERuntimeTestContextExecutor._getControllerModulenames()
135 controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist) 136 controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist)
136 return controller 137 return controller
137 138
138 # Return a list of all python modules in lib/oeqa/controllers for each 139 # Return a list of all python modules in lib/oeqa/controllers for each
139 # layer in bbpath 140 # layer in bbpath
140 @staticmethod 141 @staticmethod
141 def _getControllerModulenames(target_modules_path): 142 def _getControllerModulenames():
142 143
143 controllerslist = [] 144 controllerslist = []
144 145
@@ -153,9 +154,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
153 else: 154 else:
154 raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module) 155 raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module)
155 156
156 extpath = target_modules_path.split(':') 157 # sys.path can contain duplicate paths, but because of the login in
157 for p in extpath: 158 # add_controller_list this doesn't work and causes testimage to abort.
158 controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers') 159 # Remove duplicates using an intermediate dictionary to ensure this
160 # doesn't happen.
161 for p in list(dict.fromkeys(sys.path)):
162 controllerpath = os.path.join(p, 'oeqa', 'controllers')
159 if os.path.exists(controllerpath): 163 if os.path.exists(controllerpath):
160 add_controller_list(controllerpath) 164 add_controller_list(controllerpath)
161 return controllerslist 165 return controllerslist
@@ -175,16 +179,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
175 # Search for and return a controller or None from given module name 179 # Search for and return a controller or None from given module name
176 @staticmethod 180 @staticmethod
177 def _loadControllerFromModule(target, modulename): 181 def _loadControllerFromModule(target, modulename):
178 obj = None
179 # import module, allowing it to raise import exception
180 module = __import__(modulename, globals(), locals(), [target])
181 # look for target class in the module, catching any exceptions as it
182 # is valid that a module may not have the target class.
183 try: 182 try:
184 obj = getattr(module, target) 183 import importlib
185 except: 184 module = importlib.import_module(modulename)
186 obj = None 185 return getattr(module, target)
187 return obj 186 except AttributeError:
187 return None
188 188
189 @staticmethod 189 @staticmethod
190 def readPackagesManifest(manifest): 190 def readPackagesManifest(manifest):
@@ -200,25 +200,25 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
200 200
201 return image_packages 201 return image_packages
202 202
203 @staticmethod
204 def getHostDumper(cmds, directory):
205 return HostDumper(cmds, directory)
206
207 def _process_args(self, logger, args): 203 def _process_args(self, logger, args):
208 if not args.packages_manifest: 204 if not args.packages_manifest:
209 raise TypeError('Manifest file not provided') 205 raise TypeError('Manifest file not provided')
210 206
211 super(OERuntimeTestContextExecutor, self)._process_args(logger, args) 207 super(OERuntimeTestContextExecutor, self)._process_args(logger, args)
212 208
209 td = self.tc_kwargs['init']['td']
210
213 target_kwargs = {} 211 target_kwargs = {}
212 target_kwargs['machine'] = td.get("MACHINE") or None
214 target_kwargs['qemuboot'] = args.qemu_boot 213 target_kwargs['qemuboot'] = args.qemu_boot
214 target_kwargs['serialcontrol_cmd'] = td.get("TEST_SERIALCONTROL_CMD") or None
215 target_kwargs['serialcontrol_extra_args'] = td.get("TEST_SERIALCONTROL_EXTRA_ARGS") or ""
216 target_kwargs['serialcontrol_ps1'] = td.get("TEST_SERIALCONTROL_PS1") or None
217 target_kwargs['serialcontrol_connect_timeout'] = td.get("TEST_SERIALCONTROL_CONNECT_TIMEOUT") or None
215 218
216 self.tc_kwargs['init']['target'] = \ 219 self.tc_kwargs['init']['target'] = \
217 OERuntimeTestContextExecutor.getTarget(args.target_type, 220 OERuntimeTestContextExecutor.getTarget(args.target_type,
218 None, args.target_ip, args.server_ip, **target_kwargs) 221 None, args.target_ip, args.server_ip, **target_kwargs)
219 self.tc_kwargs['init']['host_dumper'] = \
220 OERuntimeTestContextExecutor.getHostDumper(None,
221 args.host_dumper_dir)
222 self.tc_kwargs['init']['image_packages'] = \ 222 self.tc_kwargs['init']['image_packages'] = \
223 OERuntimeTestContextExecutor.readPackagesManifest( 223 OERuntimeTestContextExecutor.readPackagesManifest(
224 args.packages_manifest) 224 args.packages_manifest)
diff --git a/meta/lib/oeqa/runtime/decorator/package.py b/meta/lib/oeqa/runtime/decorator/package.py
index 57178655cc..b78ac9fc38 100644
--- a/meta/lib/oeqa/runtime/decorator/package.py
+++ b/meta/lib/oeqa/runtime/decorator/package.py
@@ -5,7 +5,6 @@
5# 5#
6 6
7from oeqa.core.decorator import OETestDecorator, registerDecorator 7from oeqa.core.decorator import OETestDecorator, registerDecorator
8from oeqa.core.utils.misc import strToSet
9 8
10@registerDecorator 9@registerDecorator
11class OEHasPackage(OETestDecorator): 10class OEHasPackage(OETestDecorator):
@@ -34,25 +33,30 @@ class OEHasPackage(OETestDecorator):
34 def setUpDecorator(self): 33 def setUpDecorator(self):
35 need_pkgs = set() 34 need_pkgs = set()
36 unneed_pkgs = set() 35 unneed_pkgs = set()
37 pkgs = strToSet(self.need_pkgs) 36
38 for pkg in pkgs: 37 # Turn literal strings into a list so we can just iterate over it
38 if isinstance(self.need_pkgs, str):
39 self.need_pkgs = [self.need_pkgs,]
40
41 mlprefix = self.case.td.get("MLPREFIX")
42 for pkg in self.need_pkgs:
39 if pkg.startswith('!'): 43 if pkg.startswith('!'):
40 unneed_pkgs.add(pkg[1:]) 44 unneed_pkgs.add(mlprefix + pkg[1:])
41 else: 45 else:
42 need_pkgs.add(pkg) 46 need_pkgs.add(mlprefix + pkg)
43 47
44 if unneed_pkgs: 48 if unneed_pkgs:
45 msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs) 49 msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs)
46 self.logger.debug(msg) 50 self.logger.debug(msg)
47 if not self.case.tc.image_packages.isdisjoint(unneed_pkgs): 51 if not self.case.tc.image_packages.isdisjoint(unneed_pkgs):
48 msg = "Test can't run with %s installed" % ', or'.join(unneed_pkgs) 52 msg = "Test can't run with %s installed" % ', or '.join(unneed_pkgs)
49 self._decorator_fail(msg) 53 self._decorator_fail(msg)
50 54
51 if need_pkgs: 55 if need_pkgs:
52 msg = 'Checking if at least one of %s is installed' % ', '.join(need_pkgs) 56 msg = 'Checking if at least one of %s is installed' % ', '.join(need_pkgs)
53 self.logger.debug(msg) 57 self.logger.debug(msg)
54 if self.case.tc.image_packages.isdisjoint(need_pkgs): 58 if self.case.tc.image_packages.isdisjoint(need_pkgs):
55 msg = "Test requires %s to be installed" % ', or'.join(need_pkgs) 59 msg = "Test requires %s to be installed" % ', or '.join(need_pkgs)
56 self._decorator_fail(msg) 60 self._decorator_fail(msg)
57 61
58 def _decorator_fail(self, msg): 62 def _decorator_fail(self, msg):
diff --git a/meta/lib/oeqa/runtime/files/hello.stp b/meta/lib/oeqa/runtime/files/hello.stp
deleted file mode 100644
index 3677147162..0000000000
--- a/meta/lib/oeqa/runtime/files/hello.stp
+++ /dev/null
@@ -1 +0,0 @@
1probe oneshot { println("hello world") }
diff --git a/meta/lib/oeqa/sdk/case.py b/meta/lib/oeqa/sdk/case.py
index c45882689c..1fd3b3b569 100644
--- a/meta/lib/oeqa/sdk/case.py
+++ b/meta/lib/oeqa/sdk/case.py
@@ -6,8 +6,11 @@
6 6
7import os 7import os
8import subprocess 8import subprocess
9import shutil
10import unittest
9 11
10from oeqa.core.case import OETestCase 12from oeqa.core.case import OETestCase
13from oeqa.sdkext.context import OESDKExtTestContext
11 14
12class OESDKTestCase(OETestCase): 15class OESDKTestCase(OETestCase):
13 def _run(self, cmd): 16 def _run(self, cmd):
@@ -15,18 +18,76 @@ class OESDKTestCase(OETestCase):
15 (self.tc.sdk_env, cmd), shell=True, executable="/bin/bash", 18 (self.tc.sdk_env, cmd), shell=True, executable="/bin/bash",
16 stderr=subprocess.STDOUT, universal_newlines=True) 19 stderr=subprocess.STDOUT, universal_newlines=True)
17 20
21 def ensure_host_package(self, *packages, recipe=None):
22 """
23 Check that the host variation of one of the packages listed is available
24 in the SDK (nativesdk-foo for SDK, foo-native for eSDK). The package is
25 a list for the case where debian-renaming may have occured, and the
26 manifest could contain 'foo' or 'libfoo'.
27
28 If testing an eSDK and the package is not found, then try to install the
29 specified recipe to install it from sstate.
30 """
31
32 # In a SDK the manifest is correct. In an eSDK the manifest may be
33 # correct (type=full) or not include packages that exist in sstate but
34 # not installed yet (minimal) so we should try to install the recipe.
35 for package in packages:
36 if isinstance(self.tc, OESDKExtTestContext):
37 package = package + "-native"
38 else:
39 package = "nativesdk-" + package
40
41 if self.tc.hasHostPackage(package):
42 break
43 else:
44 if isinstance(self.tc, OESDKExtTestContext):
45 recipe = (recipe or packages[0]) + "-native"
46 print("Trying to install %s..." % recipe)
47 self._run('devtool sdk-install %s' % recipe)
48 else:
49 raise unittest.SkipTest("Test %s needs one of %s" % (self.id(), ", ".join(packages)))
50
51 def ensure_target_package(self, *packages, multilib=False, recipe=None):
52 """
53 Check that at least one of the packages listed is available in the SDK,
54 adding the multilib prefix if required. The target package is a list for
55 the case where debian-renaming may have occured, and the manifest could
56 contain 'foo' or 'libfoo'.
57
58 If testing an eSDK and the package is not found, then try to install the
59 specified recipe to install it from sstate.
60 """
61
62 # In a SDK the manifest is correct. In an eSDK the manifest may be
63 # correct (type=full) or not include packages that exist in sstate but
64 # not installed yet (minimal) so we should try to install the recipe.
65 for package in packages:
66 if self.tc.hasTargetPackage(package, multilib=multilib):
67 break
68 else:
69 if isinstance(self.tc, OESDKExtTestContext):
70 recipe = recipe or packages[0]
71 print("Trying to install %s..." % recipe)
72 self._run('devtool sdk-install %s' % recipe)
73 else:
74 raise unittest.SkipTest("Test %s needs one of %s" % (self.id(), ", ".join(packages)))
75
76
18 def fetch(self, workdir, dl_dir, url, archive=None): 77 def fetch(self, workdir, dl_dir, url, archive=None):
19 if not archive: 78 if not archive:
20 from urllib.parse import urlparse 79 from urllib.parse import urlparse
21 archive = os.path.basename(urlparse(url).path) 80 archive = os.path.basename(urlparse(url).path)
22 81
23 if dl_dir: 82 if dl_dir:
24 tarball = os.path.join(dl_dir, archive) 83 archive_tarball = os.path.join(dl_dir, archive)
25 if os.path.exists(tarball): 84 if os.path.exists(archive_tarball):
26 return tarball 85 return archive_tarball
27 86
28 tarball = os.path.join(workdir, archive) 87 tarball = os.path.join(workdir, archive)
29 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT) 88 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT)
89 if dl_dir and not os.path.exists(archive_tarball):
90 shutil.copyfile(tarball, archive_tarball)
30 return tarball 91 return tarball
31 92
32 def check_elf(self, path, target_os=None, target_arch=None): 93 def check_elf(self, path, target_os=None, target_arch=None):
diff --git a/meta/lib/oeqa/sdk/cases/buildcpio.py b/meta/lib/oeqa/sdk/cases/autotools.py
index e7fc211a47..ee6c522551 100644
--- a/meta/lib/oeqa/sdk/cases/buildcpio.py
+++ b/meta/lib/oeqa/sdk/cases/autotools.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -11,16 +13,21 @@ from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output() 14errors_have_output()
13 15
14class BuildCpioTest(OESDKTestCase): 16class AutotoolsTest(OESDKTestCase):
15 """ 17 """
16 Check that autotools will cross-compile correctly. 18 Check that autotools will cross-compile correctly.
17 """ 19 """
20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("AutotoolsTest class: SDK doesn't contain a supported C library")
24
18 def test_cpio(self): 25 def test_cpio(self):
19 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir: 26 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir:
20 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz") 27 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz")
21 28
22 dirs = {} 29 dirs = {}
23 dirs["source"] = os.path.join(testdir, "cpio-2.13") 30 dirs["source"] = os.path.join(testdir, "cpio-2.15")
24 dirs["build"] = os.path.join(testdir, "build") 31 dirs["build"] = os.path.join(testdir, "build")
25 dirs["install"] = os.path.join(testdir, "install") 32 dirs["install"] = os.path.join(testdir, "install")
26 33
@@ -28,9 +35,14 @@ class BuildCpioTest(OESDKTestCase):
28 self.assertTrue(os.path.isdir(dirs["source"])) 35 self.assertTrue(os.path.isdir(dirs["source"]))
29 os.makedirs(dirs["build"]) 36 os.makedirs(dirs["build"])
30 37
31 self._run("sed -i -e '/char.*program_name/d' {source}/src/global.c".format(**dirs)) 38 self._run("cd {build} && {source}/configure CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration' $CONFIGURE_FLAGS".format(**dirs))
32 self._run("cd {build} && {source}/configure --disable-maintainer-mode $CONFIGURE_FLAGS".format(**dirs)) 39
33 self._run("cd {build} && make -j".format(**dirs)) 40 # Check that configure detected the target correctly
41 with open(os.path.join(dirs["build"], "config.log")) as f:
42 host_sys = self.td["HOST_SYS"]
43 self.assertIn(f"host_alias='{host_sys}'\n", f.readlines())
44
45 self._run("cd {build} && make CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration' -j".format(**dirs))
34 self._run("cd {build} && make install DESTDIR={install}".format(**dirs)) 46 self._run("cd {build} && make install DESTDIR={install}".format(**dirs))
35 47
36 self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "cpio")) 48 self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "cpio"))
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/buildepoxy.py
deleted file mode 100644
index 385f8ccca8..0000000000
--- a/meta/lib/oeqa/sdk/cases/buildepoxy.py
+++ /dev/null
@@ -1,41 +0,0 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import os
6import subprocess
7import tempfile
8import unittest
9
10from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output()
13
14class EpoxyTest(OESDKTestCase):
15 """
16 Test that Meson builds correctly.
17 """
18 def setUp(self):
19 if not (self.tc.hasHostPackage("nativesdk-meson")):
20 raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain Meson")
21
22 def test_epoxy(self):
23 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
24 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/anholt/libepoxy/releases/download/1.5.3/libepoxy-1.5.3.tar.xz")
25
26 dirs = {}
27 dirs["source"] = os.path.join(testdir, "libepoxy-1.5.3")
28 dirs["build"] = os.path.join(testdir, "build")
29 dirs["install"] = os.path.join(testdir, "install")
30
31 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
32 self.assertTrue(os.path.isdir(dirs["source"]))
33 os.makedirs(dirs["build"])
34
35 log = self._run("meson -Degl=no -Dglx=no -Dx11=false {build} {source}".format(**dirs))
36 # Check that Meson thinks we're doing a cross build and not a native
37 self.assertIn("Build type: cross build", log)
38 self._run("ninja -C {build} -v".format(**dirs))
39 self._run("DESTDIR={install} ninja -C {build} -v install".format(**dirs))
40
41 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libepoxy.so"))
diff --git a/meta/lib/oeqa/sdk/cases/buildgalculator.py b/meta/lib/oeqa/sdk/cases/buildgalculator.py
deleted file mode 100644
index eb3c8ddf39..0000000000
--- a/meta/lib/oeqa/sdk/cases/buildgalculator.py
+++ /dev/null
@@ -1,43 +0,0 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import os
6import subprocess
7import tempfile
8import unittest
9
10from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output()
13
14class GalculatorTest(OESDKTestCase):
15 """
16 Test that autotools and GTK+ 3 compiles correctly.
17 """
18 def setUp(self):
19 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \
20 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)):
21 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3")
22 if not (self.tc.hasHostPackage("nativesdk-gettext-dev")):
23 raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain gettext")
24
25 def test_galculator(self):
26 with tempfile.TemporaryDirectory(prefix="galculator", dir=self.tc.sdk_dir) as testdir:
27 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://galculator.mnim.org/downloads/galculator-2.1.4.tar.bz2")
28
29 dirs = {}
30 dirs["source"] = os.path.join(testdir, "galculator-2.1.4")
31 dirs["build"] = os.path.join(testdir, "build")
32 dirs["install"] = os.path.join(testdir, "install")
33
34 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
35 self.assertTrue(os.path.isdir(dirs["source"]))
36 os.makedirs(dirs["build"])
37
38 self._run("cd {source} && sed -i -e '/s_preferences.*prefs;/d' src/main.c && autoreconf -i -f -I $OECORE_TARGET_SYSROOT/usr/share/aclocal -I m4".format(**dirs))
39 self._run("cd {build} && {source}/configure $CONFIGURE_FLAGS".format(**dirs))
40 self._run("cd {build} && make -j".format(**dirs))
41 self._run("cd {build} && make install DESTDIR={install}".format(**dirs))
42
43 self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "galculator"))
diff --git a/meta/lib/oeqa/sdk/cases/assimp.py b/meta/lib/oeqa/sdk/cases/cmake.py
index f166758e49..070682ef08 100644
--- a/meta/lib/oeqa/sdk/cases/assimp.py
+++ b/meta/lib/oeqa/sdk/cases/cmake.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -11,30 +13,35 @@ from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output() 14errors_have_output()
13 15
14class BuildAssimp(OESDKTestCase): 16class CMakeTest(OESDKTestCase):
15 """ 17 """
16 Test case to build a project using cmake. 18 Test case to build a project using cmake.
17 """ 19 """
18 20
19 def setUp(self): 21 def setUp(self):
20 if not (self.tc.hasHostPackage("nativesdk-cmake") or 22 libc = self.td.get("TCLIBC")
21 self.tc.hasHostPackage("cmake-native")): 23 if libc in [ 'newlib' ]:
22 raise unittest.SkipTest("Needs cmake") 24 raise unittest.SkipTest("CMakeTest class: SDK doesn't contain a supported C library")
25
26 self.ensure_host_package("cmake")
23 27
24 def test_assimp(self): 28 def test_assimp(self):
25 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: 29 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir:
26 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v4.1.0.tar.gz") 30 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.4.1.tar.gz")
27 31
28 dirs = {} 32 dirs = {}
29 dirs["source"] = os.path.join(testdir, "assimp-4.1.0") 33 dirs["source"] = os.path.join(testdir, "assimp-5.4.1")
30 dirs["build"] = os.path.join(testdir, "build") 34 dirs["build"] = os.path.join(testdir, "build")
31 dirs["install"] = os.path.join(testdir, "install") 35 dirs["install"] = os.path.join(testdir, "install")
32 36
33 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT) 37 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
34 self.assertTrue(os.path.isdir(dirs["source"])) 38 self.assertTrue(os.path.isdir(dirs["source"]))
39 # Apply the zlib patch https://github.com/madler/zlib/commit/a566e156b3fa07b566ddbf6801b517a9dba04fa3
40 # this sed wont be needed once assimp moves its zlib copy to v1.3.1+
41 self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs))
35 os.makedirs(dirs["build"]) 42 os.makedirs(dirs["build"])
36 43
37 self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON {source}".format(**dirs)) 44 self._run("cd {build} && cmake -DASSIMP_WARNINGS_AS_ERRORS=OFF -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs))
38 self._run("cmake --build {build} -- -j".format(**dirs)) 45 self._run("cmake --build {build} -- -j".format(**dirs))
39 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs)) 46 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs))
40 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.4.1.0")) 47 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.4.1"))
diff --git a/meta/lib/oeqa/sdk/cases/gcc.py b/meta/lib/oeqa/sdk/cases/gcc.py
index eb08eadd28..e810d2c42b 100644
--- a/meta/lib/oeqa/sdk/cases/gcc.py
+++ b/meta/lib/oeqa/sdk/cases/gcc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -24,6 +26,10 @@ class GccCompileTest(OESDKTestCase):
24 os.path.join(self.tc.sdk_dir, f)) 26 os.path.join(self.tc.sdk_dir, f))
25 27
26 def setUp(self): 28 def setUp(self):
29 libc = self.td.get("TCLIBC")
30 if libc in [ 'newlib' ]:
31 raise unittest.SkipTest("GccCompileTest class: SDK doesn't contain a supported C library")
32
27 machine = self.td.get("MACHINE") 33 machine = self.td.get("MACHINE")
28 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or 34 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or
29 self.tc.hasHostPackage("^gcc-", regex=True)): 35 self.tc.hasHostPackage("^gcc-", regex=True)):
diff --git a/meta/lib/oeqa/sdk/cases/gtk3.py b/meta/lib/oeqa/sdk/cases/gtk3.py
new file mode 100644
index 0000000000..cdaf50ed38
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/gtk3.py
@@ -0,0 +1,40 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import subprocess
9import tempfile
10
11from oeqa.sdk.cases.meson import MesonTestBase
12
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class GTK3Test(MesonTestBase):
17
18 def setUp(self):
19 super().setUp()
20 self.ensure_target_package("gtk+3", "libgtk-3.0", recipe="gtk+3")
21 self.ensure_host_package("glib-2.0-utils", "libglib-2.0-utils", recipe="glib-2.0")
22
23 """
24 Test that autotools and GTK+ 3 compiles correctly.
25 """
26 def test_libhandy(self):
27 with tempfile.TemporaryDirectory(prefix="libhandy", dir=self.tc.sdk_dir) as testdir:
28 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://download.gnome.org/sources/libhandy/1.8/libhandy-1.8.3.tar.xz")
29
30 sourcedir = os.path.join(testdir, "libhandy-1.8.3")
31 builddir = os.path.join(testdir, "build")
32 installdir = os.path.join(testdir, "install")
33
34 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
35 self.assertTrue(os.path.isdir(sourcedir))
36 os.makedirs(builddir)
37
38 self.build_meson(sourcedir, builddir, installdir, "-Dglade_catalog=disabled -Dintrospection=disabled -Dvapi=false")
39 self.assertTrue(os.path.isdir(installdir))
40 self.check_elf(os.path.join(installdir, "usr", "local", "lib", "libhandy-1.so"))
diff --git a/meta/lib/oeqa/sdk/cases/kmod.py b/meta/lib/oeqa/sdk/cases/kmod.py
new file mode 100644
index 0000000000..0aa6f702e4
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/kmod.py
@@ -0,0 +1,39 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import subprocess
9import tempfile
10
11from oeqa.sdk.case import OESDKTestCase
12from oeqa.sdkext.context import OESDKExtTestContext
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class KernelModuleTest(OESDKTestCase):
17 """
18 Test that out-of-tree kernel modules build.
19 """
20 def test_cryptodev(self):
21 if isinstance(self.tc, OESDKExtTestContext):
22 self.skipTest(f"{self.id()} does not support eSDK (https://bugzilla.yoctoproject.org/show_bug.cgi?id=15850)")
23
24 self.ensure_target_package("kernel-devsrc")
25 # These targets need to be built before kernel modules can be built.
26 self._run("make -j -C $OECORE_TARGET_SYSROOT/usr/src/kernel prepare scripts")
27
28 with tempfile.TemporaryDirectory(prefix="cryptodev", dir=self.tc.sdk_dir) as testdir:
29 git_url = "https://github.com/cryptodev-linux/cryptodev-linux"
30 # This is a knnown-good commit post-1.13 that builds with kernel 6.7+
31 git_sha = "bb8bc7cf60d2c0b097c8b3b0e807f805b577a53f"
32
33 sourcedir = os.path.join(testdir, "cryptodev-linux")
34 subprocess.check_output(["git", "clone", git_url, sourcedir], stderr=subprocess.STDOUT)
35 self.assertTrue(os.path.isdir(sourcedir))
36 subprocess.check_output(["git", "-C", sourcedir, "checkout", git_sha], stderr=subprocess.STDOUT)
37
38 self._run("make -C %s V=1 KERNEL_DIR=$OECORE_TARGET_SYSROOT/usr/src/kernel" % sourcedir)
39 self.check_elf(os.path.join(sourcedir, "cryptodev.ko"))
diff --git a/meta/lib/oeqa/sdk/cases/buildlzip.py b/meta/lib/oeqa/sdk/cases/makefile.py
index 49ae756bf3..e1e2484820 100644
--- a/meta/lib/oeqa/sdk/cases/buildlzip.py
+++ b/meta/lib/oeqa/sdk/cases/makefile.py
@@ -1,16 +1,24 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os, tempfile, subprocess, unittest 7import os, tempfile, subprocess
8import unittest
6from oeqa.sdk.case import OESDKTestCase 9from oeqa.sdk.case import OESDKTestCase
7from oeqa.utils.subprocesstweak import errors_have_output 10from oeqa.utils.subprocesstweak import errors_have_output
8errors_have_output() 11errors_have_output()
9 12
10class BuildLzipTest(OESDKTestCase): 13class MakefileTest(OESDKTestCase):
11 """ 14 """
12 Test that "plain" compilation works, using just $CC $CFLAGS etc. 15 Test that "plain" compilation works, using just $CC $CFLAGS etc.
13 """ 16 """
17 def setUp(self):
18 libc = self.td.get("TCLIBC")
19 if libc in [ 'newlib' ]:
20 raise unittest.SkipTest("MakefileTest class: SDK doesn't contain a supported C library")
21
14 def test_lzip(self): 22 def test_lzip(self):
15 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir: 23 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir:
16 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz") 24 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz")
diff --git a/meta/lib/oeqa/sdk/cases/manifest.py b/meta/lib/oeqa/sdk/cases/manifest.py
new file mode 100644
index 0000000000..ee59a5f338
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/manifest.py
@@ -0,0 +1,26 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.sdk.case import OESDKTestCase
8from oeqa.sdkext.context import OESDKExtTestContext
9
10
11class ManifestTest(OESDKTestCase):
12 def test_manifests(self):
13 """
14 Verify that the host and target manifests are not empty, unless this is
15 a minimal eSDK without toolchain in which case they should be empty.
16 """
17 if (
18 isinstance(self.tc, OESDKExtTestContext)
19 and self.td.get("SDK_EXT_TYPE") == "minimal"
20 and self.td.get("SDK_INCLUDE_TOOLCHAIN") == "0"
21 ):
22 self.assertEqual(self.tc.target_pkg_manifest, {})
23 self.assertEqual(self.tc.host_pkg_manifest, {})
24 else:
25 self.assertNotEqual(self.tc.target_pkg_manifest, {})
26 self.assertNotEqual(self.tc.host_pkg_manifest, {})
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py
new file mode 100644
index 0000000000..e3e8edc781
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/maturin.py
@@ -0,0 +1,66 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import unittest
10
11from oeqa.sdk.case import OESDKTestCase
12from oeqa.utils.subprocesstweak import errors_have_output
13
14errors_have_output()
15
16
17class MaturinTest(OESDKTestCase):
18 def setUp(self):
19 self.ensure_host_package("python3-maturin")
20
21 def test_maturin_list_python(self):
22 out = self._run(r"""python3 -c 'import sys; print(f"{sys.executable}\n{sys.version_info.major}.{sys.version_info.minor}")'""")
23 executable, version = out.splitlines()
24
25 output = self._run("maturin list-python")
26 # The output looks like this:
27 # - CPython 3.13 at /usr/bin/python3
28 # We don't want to assume CPython so just check for the version and path.
29 expected = f"{version} at {executable}"
30 self.assertIn(expected, output)
31
32class MaturinDevelopTest(OESDKTestCase):
33 def setUp(self):
34 machine = self.td.get("MACHINE")
35 self.ensure_host_package("python3-maturin")
36
37 if not (
38 self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine)
39 ):
40 raise unittest.SkipTest(
41 "Testing 'maturin develop' requires Rust cross-canadian in the SDK"
42 )
43
44 def test_maturin_develop(self):
45 """
46 This test case requires:
47 (1) that a .venv can been created.
48 (2) a functional 'rustc' and 'cargo'
49 """
50 targetdir = os.path.join(self.tc.sdk_dir, "guessing-game")
51 try:
52 shutil.rmtree(targetdir)
53 except FileNotFoundError:
54 pass
55 shutil.copytree(
56 os.path.join(self.tc.files_dir, "maturin/guessing-game"), targetdir
57 )
58
59 self._run("cd %s; python3 -m venv .venv" % targetdir)
60 output = self._run("cd %s; maturin develop" % targetdir)
61 self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
62 self.assertRegex(output, r"🐍 Not using a specific python interpreter")
63 self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
64 self.assertRegex(output, r"Compiling guessing-game v0.1.0")
65 self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
66 self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
diff --git a/meta/lib/oeqa/sdk/cases/meson.py b/meta/lib/oeqa/sdk/cases/meson.py
new file mode 100644
index 0000000000..a809ca3a53
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/meson.py
@@ -0,0 +1,72 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
9import subprocess
10import tempfile
11import unittest
12
13from oeqa.sdk.case import OESDKTestCase
14from oeqa.sdkext.context import OESDKExtTestContext
15from oeqa.utils.subprocesstweak import errors_have_output
16errors_have_output()
17
18class MesonTestBase(OESDKTestCase):
19 def setUp(self):
20 libc = self.td.get("TCLIBC")
21 if libc in [ 'newlib' ]:
22 raise unittest.SkipTest("MesonTest class: SDK doesn't contain a supported C library")
23
24 if isinstance(self.tc, OESDKExtTestContext):
25 self.skipTest(f"{self.id()} does not support eSDK (https://bugzilla.yoctoproject.org/show_bug.cgi?id=15854)")
26
27 self.ensure_host_package("meson")
28 self.ensure_host_package("pkgconfig")
29
30 def build_meson(self, sourcedir, builddir, installdir=None, options=""):
31 """
32 Given a source tree in sourcedir, configure it to build in builddir with
33 the specified options, and if installdir is set also install.
34 """
35 log = self._run(f"meson setup --warnlevel 1 {builddir} {sourcedir} {options}")
36
37 # Check that Meson thinks we're doing a cross build and not a native
38 self.assertIn("Build type: cross build", log)
39
40 # Check that the cross-compiler used is the one we set.
41 data = json.loads(self._run(f"meson introspect --compilers {builddir}"))
42 self.assertIn(self.td.get("CC").split()[0], data["host"]["c"]["exelist"])
43
44 # Check that the target architectures was set correctly.
45 data = json.loads(self._run(f"meson introspect --machines {builddir}"))
46 self.assertEqual(data["host"]["cpu"], self.td["HOST_ARCH"])
47
48 self._run(f"meson compile -C {builddir} -v")
49
50 if installdir:
51 self._run(f"meson install -C {builddir} --destdir {installdir}")
52
53class MesonTest(MesonTestBase):
54 """
55 Test that Meson builds correctly.
56 """
57
58 def test_epoxy(self):
59 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
60 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/anholt/libepoxy/releases/download/1.5.3/libepoxy-1.5.3.tar.xz")
61
62 sourcedir = os.path.join(testdir, "libepoxy-1.5.3")
63 builddir = os.path.join(testdir, "build")
64 installdir = os.path.join(testdir, "install")
65
66 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
67 self.assertTrue(os.path.isdir(sourcedir))
68
69 os.makedirs(builddir)
70 self.build_meson(sourcedir, builddir, installdir, "-Degl=no -Dglx=no -Dx11=false")
71 self.assertTrue(os.path.isdir(installdir))
72 self.check_elf(os.path.join(installdir, "usr", "local", "lib", "libepoxy.so"))
diff --git a/meta/lib/oeqa/sdk/cases/perl.py b/meta/lib/oeqa/sdk/cases/perl.py
index 14d76d820f..a72bd2461a 100644
--- a/meta/lib/oeqa/sdk/cases/perl.py
+++ b/meta/lib/oeqa/sdk/cases/perl.py
@@ -1,8 +1,9 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import unittest
6from oeqa.sdk.case import OESDKTestCase 7from oeqa.sdk.case import OESDKTestCase
7 8
8from oeqa.utils.subprocesstweak import errors_have_output 9from oeqa.utils.subprocesstweak import errors_have_output
@@ -10,9 +11,7 @@ errors_have_output()
10 11
11class PerlTest(OESDKTestCase): 12class PerlTest(OESDKTestCase):
12 def setUp(self): 13 def setUp(self):
13 if not (self.tc.hasHostPackage("nativesdk-perl") or 14 self.ensure_host_package("perl")
14 self.tc.hasHostPackage("perl-native")):
15 raise unittest.SkipTest("No perl package in the SDK")
16 15
17 def test_perl(self): 16 def test_perl(self):
18 cmd = "perl -e '$_=\"Uryyb, jbeyq\"; tr/a-zA-Z/n-za-mN-ZA-M/;print'" 17 cmd = "perl -e '$_=\"Uryyb, jbeyq\"; tr/a-zA-Z/n-za-mN-ZA-M/;print'"
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py
index a334abce5f..b990cd889a 100644
--- a/meta/lib/oeqa/sdk/cases/python.py
+++ b/meta/lib/oeqa/sdk/cases/python.py
@@ -1,29 +1,17 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import subprocess, unittest
6from oeqa.sdk.case import OESDKTestCase 7from oeqa.sdk.case import OESDKTestCase
7 8
8from oeqa.utils.subprocesstweak import errors_have_output 9from oeqa.utils.subprocesstweak import errors_have_output
9errors_have_output() 10errors_have_output()
10 11
11class Python2Test(OESDKTestCase):
12 def setUp(self):
13 if not (self.tc.hasHostPackage("nativesdk-python-core") or
14 self.tc.hasHostPackage("python-core-native")):
15 raise unittest.SkipTest("No python package in the SDK")
16
17 def test_python2(self):
18 cmd = "python -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\""
19 output = self._run(cmd)
20 self.assertEqual(output, "Hello, world\n")
21
22class Python3Test(OESDKTestCase): 12class Python3Test(OESDKTestCase):
23 def setUp(self): 13 def setUp(self):
24 if not (self.tc.hasHostPackage("nativesdk-python3-core") or 14 self.ensure_host_package("python3-core", recipe="python3")
25 self.tc.hasHostPackage("python3-core-native")):
26 raise unittest.SkipTest("No python3 package in the SDK")
27 15
28 def test_python3(self): 16 def test_python3(self):
29 cmd = "python3 -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\"" 17 cmd = "python3 -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\""
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py
new file mode 100644
index 0000000000..4b115bebf5
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/rust.py
@@ -0,0 +1,58 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import unittest
10
11from oeqa.sdk.case import OESDKTestCase
12
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class RustCompileTest(OESDKTestCase):
17 td_vars = ['MACHINE']
18
19 @classmethod
20 def setUpClass(self):
21 targetdir = os.path.join(self.tc.sdk_dir, "hello")
22 try:
23 shutil.rmtree(targetdir)
24 except FileNotFoundError:
25 pass
26 shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
27
28 def setUp(self):
29 machine = self.td.get("MACHINE")
30 if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
31 raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
32
33 def test_cargo_build(self):
34 self._run('cd %s/hello; cargo add zstd' % (self.tc.sdk_dir))
35 self._run('cd %s/hello; cargo build' % self.tc.sdk_dir)
36
37class RustHostCompileTest(OESDKTestCase):
38 td_vars = ['MACHINE', 'SDK_SYS']
39
40 @classmethod
41 def setUpClass(self):
42 targetdir = os.path.join(self.tc.sdk_dir, "hello")
43 try:
44 shutil.rmtree(targetdir)
45 except FileNotFoundError:
46 pass
47 shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
48
49 def setUp(self):
50 machine = self.td.get("MACHINE")
51 if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
52 raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
53
54 def test_cargo_build(self):
55 sdksys = self.td.get("SDK_SYS")
56 self._run('cd %s/hello; cargo add zstd' % (self.tc.sdk_dir))
57 self._run('cd %s/hello; cargo build --target %s-gnu' % (self.tc.sdk_dir, sdksys))
58 self._run('cd %s/hello; cargo run --target %s-gnu' % (self.tc.sdk_dir, sdksys))
diff --git a/meta/lib/oeqa/sdk/context.py b/meta/lib/oeqa/sdk/context.py
index 01c38c24e6..d4fdd83207 100644
--- a/meta/lib/oeqa/sdk/context.py
+++ b/meta/lib/oeqa/sdk/context.py
@@ -23,6 +23,13 @@ class OESDKTestContext(OETestContext):
23 self.target_pkg_manifest = target_pkg_manifest 23 self.target_pkg_manifest = target_pkg_manifest
24 self.host_pkg_manifest = host_pkg_manifest 24 self.host_pkg_manifest = host_pkg_manifest
25 25
26 # match multilib according to sdk_env
27 self.multilib = ""
28 multilibs = self.td.get('MULTILIB_VARIANTS', '').split()
29 for ml in multilibs:
30 if ml in os.path.basename(self.sdk_env):
31 self.multilib = ml
32
26 def _hasPackage(self, manifest, pkg, regex=False): 33 def _hasPackage(self, manifest, pkg, regex=False):
27 if regex: 34 if regex:
28 # do regex match 35 # do regex match
@@ -40,12 +47,8 @@ class OESDKTestContext(OETestContext):
40 return self._hasPackage(self.host_pkg_manifest, pkg, regex=regex) 47 return self._hasPackage(self.host_pkg_manifest, pkg, regex=regex)
41 48
42 def hasTargetPackage(self, pkg, multilib=False, regex=False): 49 def hasTargetPackage(self, pkg, multilib=False, regex=False):
43 if multilib: 50 if multilib and self.multilib:
44 # match multilib according to sdk_env 51 pkg = self.multilib + '-' + pkg
45 mls = self.td.get('MULTILIB_VARIANTS', '').split()
46 for ml in mls:
47 if ('ml'+ml) in self.sdk_env:
48 pkg = ml + '-' + pkg
49 return self._hasPackage(self.target_pkg_manifest, pkg, regex=regex) 52 return self._hasPackage(self.target_pkg_manifest, pkg, regex=regex)
50 53
51class OESDKTestContextExecutor(OETestContextExecutor): 54class OESDKTestContextExecutor(OETestContextExecutor):
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
new file mode 100644
index 0000000000..fe619478a6
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
@@ -0,0 +1,6 @@
1[package]
2name = "hello"
3version = "0.1.0"
4edition = "2021"
5
6[dependencies]
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/build.rs b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
new file mode 100644
index 0000000000..b1a533d5df
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
@@ -0,0 +1,3 @@
1/* This is the simplest build script just to invoke host compiler
2 in the build process. */
3fn main() {}
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
new file mode 100644
index 0000000000..a06c03f82a
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
@@ -0,0 +1,3 @@
1fn main() {
2 println!("Hello, OpenEmbedded world!");
3}
diff --git a/meta/lib/oeqa/sdk/testmetaidesupport.py b/meta/lib/oeqa/sdk/testmetaidesupport.py
new file mode 100644
index 0000000000..00ef30e82e
--- /dev/null
+++ b/meta/lib/oeqa/sdk/testmetaidesupport.py
@@ -0,0 +1,45 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7class TestSDK(object):
8 def run(self, d):
9 import json
10 import logging
11 from oeqa.sdk.context import OESDKTestContext, OESDKTestContextExecutor
12 from oeqa.utils import make_logger_bitbake_compatible
13
14 pn = d.getVar("PN")
15
16 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
17
18 sdk_dir = d.expand("${WORKDIR}/testsdk/")
19 bb.utils.remove(sdk_dir, True)
20 bb.utils.mkdirhier(sdk_dir)
21
22 sdk_envs = OESDKTestContextExecutor._get_sdk_environs(d.getVar("DEPLOY_DIR_IMAGE"))
23 tdname = d.expand("${DEPLOY_DIR_IMAGE}/${PN}.testdata.json")
24 test_data = json.load(open(tdname, "r"))
25
26 host_pkg_manifest = {"cmake-native":"", "gcc-cross":"", "gettext-native":"", "meson-native":"", "perl-native":"", "python3-core-native":"", }
27 target_pkg_manifest = {"gtk+3":""}
28
29 for s in sdk_envs:
30 bb.plain("meta-ide-support based SDK testing environment: %s" % s)
31
32 sdk_env = sdk_envs[s]
33
34 tc = OESDKTestContext(td=test_data, logger=logger, sdk_dir=sdk_dir,
35 sdk_env=sdk_env, target_pkg_manifest=target_pkg_manifest,
36 host_pkg_manifest=host_pkg_manifest)
37
38 tc.loadTests(OESDKTestContextExecutor.default_cases)
39
40 results = tc.runTests()
41 if results:
42 results.logSummary(pn)
43
44 if (not results) or (not results.wasSuccessful()):
45 bb.fatal('%s - FAILED' % (pn,), forcelog=True)
diff --git a/meta/lib/oeqa/sdk/testsdk.py b/meta/lib/oeqa/sdk/testsdk.py
index 35e40187bc..cffcf9f49a 100644
--- a/meta/lib/oeqa/sdk/testsdk.py
+++ b/meta/lib/oeqa/sdk/testsdk.py
@@ -23,14 +23,6 @@ class TestSDKBase(object):
23 return configuration 23 return configuration
24 24
25 @staticmethod 25 @staticmethod
26 def get_sdk_json_result_dir(d):
27 json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
28 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
29 if custom_json_result_dir:
30 json_result_dir = custom_json_result_dir
31 return json_result_dir
32
33 @staticmethod
34 def get_sdk_result_id(configuration): 26 def get_sdk_result_id(configuration):
35 return '%s_%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['SDKMACHINE'], configuration['MACHINE'], configuration['STARTTIME']) 27 return '%s_%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['SDKMACHINE'], configuration['MACHINE'], configuration['STARTTIME'])
36 28
@@ -39,6 +31,28 @@ class TestSDK(TestSDKBase):
39 context_class = OESDKTestContext 31 context_class = OESDKTestContext
40 test_type = 'sdk' 32 test_type = 'sdk'
41 33
34 def sdk_dir_names(self, d):
35 """Return list from TESTSDK_CASE_DIRS."""
36 testdirs = d.getVar("TESTSDK_CASE_DIRS")
37 if testdirs:
38 return testdirs.split()
39
40 bb.fatal("TESTSDK_CASE_DIRS unset, can't find SDK test directories.")
41
42 def get_sdk_paths(self, d):
43 """
44 Return a list of paths where SDK test cases reside.
45
46 SDK tests are expected in <LAYER_DIR>/lib/oeqa/<dirname>/cases
47 """
48 paths = []
49 for layer in d.getVar("BBLAYERS").split():
50 for dirname in self.sdk_dir_names(d):
51 case_path = os.path.join(layer, "lib", "oeqa", dirname, "cases")
52 if os.path.isdir(case_path):
53 paths.append(case_path)
54 return paths
55
42 def get_tcname(self, d): 56 def get_tcname(self, d):
43 """ 57 """
44 Get the name of the SDK file 58 Get the name of the SDK file
@@ -72,6 +86,7 @@ class TestSDK(TestSDKBase):
72 86
73 from bb.utils import export_proxies 87 from bb.utils import export_proxies
74 from oeqa.utils import make_logger_bitbake_compatible 88 from oeqa.utils import make_logger_bitbake_compatible
89 from oeqa.utils import get_json_result_dir
75 90
76 pn = d.getVar("PN") 91 pn = d.getVar("PN")
77 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) 92 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -79,6 +94,9 @@ class TestSDK(TestSDKBase):
79 # sdk use network for download projects for build 94 # sdk use network for download projects for build
80 export_proxies(d) 95 export_proxies(d)
81 96
97 # We need the original PATH for testing the eSDK, not with our manipulations
98 os.environ['PATH'] = d.getVar("BB_ORIGENV", False).getVar("PATH")
99
82 tcname = self.get_tcname(d) 100 tcname = self.get_tcname(d)
83 101
84 if not os.path.exists(tcname): 102 if not os.path.exists(tcname):
@@ -118,7 +136,8 @@ class TestSDK(TestSDKBase):
118 host_pkg_manifest=host_pkg_manifest, **context_args) 136 host_pkg_manifest=host_pkg_manifest, **context_args)
119 137
120 try: 138 try:
121 tc.loadTests(self.context_executor_class.default_cases) 139 modules = (d.getVar("TESTSDK_SUITES") or "").split()
140 tc.loadTests(self.get_sdk_paths(d), modules)
122 except Exception as e: 141 except Exception as e:
123 import traceback 142 import traceback
124 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) 143 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc())
@@ -131,7 +150,7 @@ class TestSDK(TestSDKBase):
131 component = "%s %s" % (pn, self.context_executor_class.name) 150 component = "%s %s" % (pn, self.context_executor_class.name)
132 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env)) 151 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
133 configuration = self.get_sdk_configuration(d, self.test_type) 152 configuration = self.get_sdk_configuration(d, self.test_type)
134 result.logDetails(self.get_sdk_json_result_dir(d), 153 result.logDetails(get_json_result_dir(d),
135 configuration, 154 configuration,
136 self.get_sdk_result_id(configuration)) 155 self.get_sdk_result_id(configuration))
137 result.logSummary(component, context_msg) 156 result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/sdkext/cases/devtool.py b/meta/lib/oeqa/sdkext/cases/devtool.py
index a5c6a76e02..d0746e68eb 100644
--- a/meta/lib/oeqa/sdkext/cases/devtool.py
+++ b/meta/lib/oeqa/sdkext/cases/devtool.py
@@ -69,10 +69,9 @@ class DevtoolTest(OESDKExtTestCase):
69 self._test_devtool_build(self.myapp_cmake_dst) 69 self._test_devtool_build(self.myapp_cmake_dst)
70 70
71 def test_extend_autotools_recipe_creation(self): 71 def test_extend_autotools_recipe_creation(self):
72 req = 'https://github.com/rdfa/librdfa' 72 recipe = "test-dbus-wait"
73 recipe = "librdfa" 73 self._run('devtool sdk-install dbus')
74 self._run('devtool sdk-install libxml2') 74 self._run('devtool add %s https://git.yoctoproject.org/git/dbus-wait' % (recipe) )
75 self._run('devtool add %s %s' % (recipe, req) )
76 try: 75 try:
77 self._run('devtool build %s' % recipe) 76 self._run('devtool build %s' % recipe)
78 finally: 77 finally:
@@ -112,7 +111,7 @@ class SdkUpdateTest(OESDKExtTestCase):
112 cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir) 111 cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir)
113 subprocess.check_output(cmd, shell=True) 112 subprocess.check_output(cmd, shell=True)
114 113
115 self.http_service = HTTPService(self.publish_dir) 114 self.http_service = HTTPService(self.publish_dir, logger=self.logger)
116 self.http_service.start() 115 self.http_service.start()
117 116
118 self.http_url = "http://127.0.0.1:%d" % self.http_service.port 117 self.http_url = "http://127.0.0.1:%d" % self.http_service.port
diff --git a/meta/lib/oeqa/sdkext/context.py b/meta/lib/oeqa/sdkext/context.py
index 2ac2bf6ff7..2da57e2ccf 100644
--- a/meta/lib/oeqa/sdkext/context.py
+++ b/meta/lib/oeqa/sdkext/context.py
@@ -12,11 +12,11 @@ class OESDKExtTestContext(OESDKTestContext):
12 12
13 # FIXME - We really need to do better mapping of names here, this at 13 # FIXME - We really need to do better mapping of names here, this at
14 # least allows some tests to run 14 # least allows some tests to run
15 def hasHostPackage(self, pkg): 15 def hasHostPackage(self, pkg, regex=False):
16 # We force a toolchain to be installed into the eSDK even if its minimal 16 # We force a toolchain to be installed into the eSDK even if its minimal
17 if pkg.startswith("packagegroup-cross-canadian-"): 17 if pkg.startswith("packagegroup-cross-canadian-"):
18 return True 18 return True
19 return self._hasPackage(self.host_pkg_manifest, pkg) 19 return self._hasPackage(self.host_pkg_manifest, pkg, regex)
20 20
21class OESDKExtTestContextExecutor(OESDKTestContextExecutor): 21class OESDKExtTestContextExecutor(OESDKTestContextExecutor):
22 _context_class = OESDKExtTestContext 22 _context_class = OESDKExtTestContext
diff --git a/meta/lib/oeqa/sdkext/testsdk.py b/meta/lib/oeqa/sdkext/testsdk.py
index ffd185ec55..6dc23065a4 100644
--- a/meta/lib/oeqa/sdkext/testsdk.py
+++ b/meta/lib/oeqa/sdkext/testsdk.py
@@ -16,6 +16,7 @@ class TestSDKExt(TestSDKBase):
16 from bb.utils import export_proxies 16 from bb.utils import export_proxies
17 from oeqa.utils import avoid_paths_in_environ, make_logger_bitbake_compatible, subprocesstweak 17 from oeqa.utils import avoid_paths_in_environ, make_logger_bitbake_compatible, subprocesstweak
18 from oeqa.sdkext.context import OESDKExtTestContext, OESDKExtTestContextExecutor 18 from oeqa.sdkext.context import OESDKExtTestContext, OESDKExtTestContextExecutor
19 from oeqa.utils import get_json_result_dir
19 20
20 pn = d.getVar("PN") 21 pn = d.getVar("PN")
21 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) 22 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -67,10 +68,10 @@ class TestSDKExt(TestSDKBase):
67 # and we don't spend hours downloading kernels for the kernel module test 68 # and we don't spend hours downloading kernels for the kernel module test
68 # Abuse auto.conf since local.conf would be overwritten by the SDK 69 # Abuse auto.conf since local.conf would be overwritten by the SDK
69 with open(os.path.join(sdk_dir, 'conf', 'auto.conf'), 'a+') as f: 70 with open(os.path.join(sdk_dir, 'conf', 'auto.conf'), 'a+') as f:
70 f.write('SSTATE_MIRRORS += " \\n file://.* file://%s/PATH"\n' % test_data.get('SSTATE_DIR')) 71 f.write('SSTATE_MIRRORS += "file://.* file://%s/PATH"\n' % test_data.get('SSTATE_DIR'))
71 f.write('SOURCE_MIRROR_URL = "file://%s"\n' % test_data.get('DL_DIR')) 72 f.write('SOURCE_MIRROR_URL = "file://%s"\n' % test_data.get('DL_DIR'))
72 f.write('INHERIT += "own-mirrors"\n') 73 f.write('INHERIT += "own-mirrors"\n')
73 f.write('PREMIRRORS_prepend = " git://git.yoctoproject.org/.* git://%s/git2/git.yoctoproject.org.BASENAME \\n "\n' % test_data.get('DL_DIR')) 74 f.write('PREMIRRORS:prepend = "git://git.yoctoproject.org/.* git://%s/git2/git.yoctoproject.org.BASENAME "\n' % test_data.get('DL_DIR'))
74 75
75 # We need to do this in case we have a minimal SDK 76 # We need to do this in case we have a minimal SDK
76 subprocess.check_output(". %s > /dev/null; devtool sdk-install meta-extsdk-toolchain" % \ 77 subprocess.check_output(". %s > /dev/null; devtool sdk-install meta-extsdk-toolchain" % \
@@ -81,7 +82,8 @@ class TestSDKExt(TestSDKBase):
81 host_pkg_manifest=host_pkg_manifest) 82 host_pkg_manifest=host_pkg_manifest)
82 83
83 try: 84 try:
84 tc.loadTests(OESDKExtTestContextExecutor.default_cases) 85 modules = (d.getVar("TESTSDK_SUITES") or "").split()
86 tc.loadTests(OESDKExtTestContextExecutor.default_cases, modules)
85 except Exception as e: 87 except Exception as e:
86 import traceback 88 import traceback
87 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) 89 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc())
@@ -91,7 +93,7 @@ class TestSDKExt(TestSDKBase):
91 component = "%s %s" % (pn, OESDKExtTestContextExecutor.name) 93 component = "%s %s" % (pn, OESDKExtTestContextExecutor.name)
92 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env)) 94 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
93 configuration = self.get_sdk_configuration(d, 'sdkext') 95 configuration = self.get_sdk_configuration(d, 'sdkext')
94 result.logDetails(self.get_sdk_json_result_dir(d), 96 result.logDetails(get_json_result_dir(d),
95 configuration, 97 configuration,
96 self.get_sdk_result_id(configuration)) 98 self.get_sdk_result_id(configuration))
97 result.logSummary(component, context_msg) 99 result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/selftest/case.py b/meta/lib/oeqa/selftest/case.py
index dcad4f76ec..da35b25f68 100644
--- a/meta/lib/oeqa/selftest/case.py
+++ b/meta/lib/oeqa/selftest/case.py
@@ -117,10 +117,6 @@ class OESelftestTestCase(OETestCase):
117 if e.errno != errno.ENOENT: 117 if e.errno != errno.ENOENT:
118 raise 118 raise
119 119
120 if self.tc.custommachine:
121 machine_conf = 'MACHINE ??= "%s"\n' % self.tc.custommachine
122 self.set_machine_config(machine_conf)
123
124 # tests might need their own setup 120 # tests might need their own setup
125 # but if they overwrite this one they have to call 121 # but if they overwrite this one they have to call
126 # super each time, so let's give them an alternative 122 # super each time, so let's give them an alternative
@@ -178,19 +174,11 @@ class OESelftestTestCase(OETestCase):
178 self.logger.debug("Writing to: %s\n%s\n" % (dest_path, data)) 174 self.logger.debug("Writing to: %s\n%s\n" % (dest_path, data))
179 ftools.write_file(dest_path, data) 175 ftools.write_file(dest_path, data)
180 176
181 if not multiconfig and self.tc.custommachine and 'MACHINE' in data:
182 machine = get_bb_var('MACHINE')
183 self.logger.warning('MACHINE overridden: %s' % machine)
184
185 def append_config(self, data): 177 def append_config(self, data):
186 """Append to <builddir>/conf/selftest.inc""" 178 """Append to <builddir>/conf/selftest.inc"""
187 self.logger.debug("Appending to: %s\n%s\n" % (self.testinc_path, data)) 179 self.logger.debug("Appending to: %s\n%s\n" % (self.testinc_path, data))
188 ftools.append_file(self.testinc_path, data) 180 ftools.append_file(self.testinc_path, data)
189 181
190 if self.tc.custommachine and 'MACHINE' in data:
191 machine = get_bb_var('MACHINE')
192 self.logger.warning('MACHINE overridden: %s' % machine)
193
194 def remove_config(self, data): 182 def remove_config(self, data):
195 """Remove data from <builddir>/conf/selftest.inc""" 183 """Remove data from <builddir>/conf/selftest.inc"""
196 self.logger.debug("Removing from: %s\n%s\n" % (self.testinc_path, data)) 184 self.logger.debug("Removing from: %s\n%s\n" % (self.testinc_path, data))
@@ -249,6 +237,13 @@ class OESelftestTestCase(OETestCase):
249 self.logger.debug("Writing to: %s\n%s\n" % (self.machineinc_path, data)) 237 self.logger.debug("Writing to: %s\n%s\n" % (self.machineinc_path, data))
250 ftools.write_file(self.machineinc_path, data) 238 ftools.write_file(self.machineinc_path, data)
251 239
240 def disable_class(self, classname):
241 destfile = "%s/classes/%s.bbclass" % (self.builddir, classname)
242 os.makedirs(os.path.dirname(destfile), exist_ok=True)
243 self.track_for_cleanup(destfile)
244 self.logger.debug("Creating empty class: %s\n" % (destfile))
245 ftools.write_file(destfile, "")
246
252 # check does path exist 247 # check does path exist
253 def assertExists(self, expr, msg=None): 248 def assertExists(self, expr, msg=None):
254 if not os.path.exists(expr): 249 if not os.path.exists(expr):
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
index f7c356ad09..2c9584d329 100644
--- a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
+++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -6,8 +8,8 @@ import os
6import shutil 8import shutil
7 9
8import oeqa.utils.ftools as ftools 10import oeqa.utils.ftools as ftools
9from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer 11from oeqa.utils.commands import runCmd, bitbake, get_bb_var
10from oeqa.selftest.cases.sstate import SStateBase 12from oeqa.selftest.cases.sstatetests import SStateBase
11 13
12 14
13class RebuildFromSState(SStateBase): 15class RebuildFromSState(SStateBase):
@@ -90,7 +92,7 @@ class RebuildFromSState(SStateBase):
90 self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate))) 92 self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate)))
91 93
92 def test_sstate_relocation(self): 94 def test_sstate_relocation(self):
93 self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=True, rebuild_dependencies=True) 95 self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=True, rebuild_dependencies=True)
94 96
95 def test_sstate_rebuild(self): 97 def test_sstate_rebuild(self):
96 self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=False, rebuild_dependencies=True) 98 self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=False, rebuild_dependencies=True)
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
index ddd08ecf84..612ec675a7 100644
--- a/meta/lib/oeqa/selftest/cases/archiver.py
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import glob 8import glob
9import re
7from oeqa.utils.commands import bitbake, get_bb_vars 10from oeqa.utils.commands import bitbake, get_bb_vars
8from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
9 12
@@ -35,11 +38,11 @@ class Archiver(OESelftestTestCase):
35 src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) 38 src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
36 39
37 # Check that include_recipe was included 40 # Check that include_recipe was included
38 included_present = len(glob.glob(src_path + '/%s-*' % include_recipe)) 41 included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe))
39 self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) 42 self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
40 43
41 # Check that exclude_recipe was excluded 44 # Check that exclude_recipe was excluded
42 excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe)) 45 excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe))
43 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) 46 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
44 47
45 def test_archiver_filters_by_type(self): 48 def test_archiver_filters_by_type(self):
@@ -67,11 +70,11 @@ class Archiver(OESelftestTestCase):
67 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) 70 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
68 71
69 # Check that target_recipe was included 72 # Check that target_recipe was included
70 included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe)) 73 included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe))
71 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) 74 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe)
72 75
73 # Check that native_recipe was excluded 76 # Check that native_recipe was excluded
74 excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe)) 77 excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe))
75 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) 78 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe)
76 79
77 def test_archiver_filters_by_type_and_name(self): 80 def test_archiver_filters_by_type_and_name(self):
@@ -104,20 +107,51 @@ class Archiver(OESelftestTestCase):
104 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) 107 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
105 108
106 # Check that target_recipe[0] and native_recipes[1] were included 109 # Check that target_recipe[0] and native_recipes[1] were included
107 included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0])) 110 included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0]))
108 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) 111 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0])
109 112
110 included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1])) 113 included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1]))
111 self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) 114 self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1])
112 115
113 # Check that native_recipes[0] and target_recipes[1] were excluded 116 # Check that native_recipes[0] and target_recipes[1] were excluded
114 excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0])) 117 excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0]))
115 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) 118 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0])
116 119
117 excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1])) 120 excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
118 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) 121 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
119 122
123 def test_archiver_multiconfig_shared_unpack_and_patch(self):
124 """
125 Test that shared recipes in original mode with diff enabled works in multiconfig,
126 otherwise it will not build when using the same TMP dir.
127 """
128
129 features = 'BBMULTICONFIG = "mc1 mc2"\n'
130 features += 'INHERIT += "archiver"\n'
131 features += 'ARCHIVER_MODE[src] = "original"\n'
132 features += 'ARCHIVER_MODE[diff] = "1"\n'
133 self.write_config(features)
134
135 # We can use any machine in multiconfig as long as they are different
136 self.write_config('MACHINE = "qemuarm"\n', 'mc1')
137 self.write_config('MACHINE = "qemux86"\n', 'mc2')
138
139 task = 'do_unpack_and_patch'
140 # Use gcc-source as it is a shared recipe (appends the pv to the pn)
141 pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
142
143 # Generate the tasks signatures
144 bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task))
120 145
146 # Check the tasks signatures
147 # To be machine agnostic the tasks needs to generate the same signature for each machine
148 locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir
149 locked_sigs = open(locked_sigs_inc).read()
150 task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs)
151 uniq_sigs = set(task_sigs)
152 self.assertFalse(len(uniq_sigs) - 1, \
153 'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \
154 % (task, pn, locked_sigs_inc))
121 155
122 def test_archiver_srpm_mode(self): 156 def test_archiver_srpm_mode(self):
123 """ 157 """
@@ -156,28 +190,28 @@ class Archiver(OESelftestTestCase):
156 Test that the archiver works with `ARCHIVER_MODE[src] = "original"`. 190 Test that the archiver works with `ARCHIVER_MODE[src] = "original"`.
157 """ 191 """
158 192
159 self._test_archiver_mode('original', 'ed-1.14.1.tar.lz') 193 self._test_archiver_mode('original', 'ed-1.21.1.tar.lz')
160 194
161 def test_archiver_mode_patched(self): 195 def test_archiver_mode_patched(self):
162 """ 196 """
163 Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`. 197 Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`.
164 """ 198 """
165 199
166 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.gz') 200 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-patched.tar.xz')
167 201
168 def test_archiver_mode_configured(self): 202 def test_archiver_mode_configured(self):
169 """ 203 """
170 Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`. 204 Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`.
171 """ 205 """
172 206
173 self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.gz') 207 self._test_archiver_mode('configured', 'selftest-ed-native-1.21.1-r0-configured.tar.xz')
174 208
175 def test_archiver_mode_recipe(self): 209 def test_archiver_mode_recipe(self):
176 """ 210 """
177 Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`. 211 Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`.
178 """ 212 """
179 213
180 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.gz', 214 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-recipe.tar.xz',
181 'ARCHIVER_MODE[recipe] = "1"\n') 215 'ARCHIVER_MODE[recipe] = "1"\n')
182 216
183 def test_archiver_mode_diff(self): 217 def test_archiver_mode_diff(self):
@@ -186,7 +220,7 @@ class Archiver(OESelftestTestCase):
186 Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested. 220 Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested.
187 """ 221 """
188 222
189 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-diff.gz', 223 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-diff.gz',
190 'ARCHIVER_MODE[diff] = "1"\n') 224 'ARCHIVER_MODE[diff] = "1"\n')
191 225
192 def test_archiver_mode_dumpdata(self): 226 def test_archiver_mode_dumpdata(self):
@@ -194,7 +228,7 @@ class Archiver(OESelftestTestCase):
194 Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`. 228 Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`.
195 """ 229 """
196 230
197 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-showdata.dump', 231 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-showdata.dump',
198 'ARCHIVER_MODE[dumpdata] = "1"\n') 232 'ARCHIVER_MODE[dumpdata] = "1"\n')
199 233
200 def test_archiver_mode_mirror(self): 234 def test_archiver_mode_mirror(self):
@@ -202,7 +236,7 @@ class Archiver(OESelftestTestCase):
202 Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`. 236 Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`.
203 """ 237 """
204 238
205 self._test_archiver_mode('mirror', 'ed-1.14.1.tar.lz', 239 self._test_archiver_mode('mirror', 'ed-1.21.1.tar.lz',
206 'BB_GENERATE_MIRROR_TARBALLS = "1"\n') 240 'BB_GENERATE_MIRROR_TARBALLS = "1"\n')
207 241
208 def test_archiver_mode_mirror_excludes(self): 242 def test_archiver_mode_mirror_excludes(self):
@@ -213,7 +247,7 @@ class Archiver(OESelftestTestCase):
213 """ 247 """
214 248
215 target='selftest-ed' 249 target='selftest-ed'
216 target_file_name = 'ed-1.14.1.tar.lz' 250 target_file_name = 'ed-1.21.1.tar.lz'
217 251
218 features = 'INHERIT += "archiver"\n' 252 features = 'INHERIT += "archiver"\n'
219 features += 'ARCHIVER_MODE[src] = "mirror"\n' 253 features += 'ARCHIVER_MODE[src] = "mirror"\n'
@@ -251,7 +285,7 @@ class Archiver(OESelftestTestCase):
251 bitbake('-c deploy_archives %s' % (target)) 285 bitbake('-c deploy_archives %s' % (target))
252 286
253 bb_vars = get_bb_vars(['DEPLOY_DIR_SRC']) 287 bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
254 for target_file_name in ['ed-1.14.1.tar.lz', 'hello.c']: 288 for target_file_name in ['ed-1.21.1.tar.lz', 'hello.c']:
255 glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name) 289 glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
256 glob_result = glob.glob(glob_str) 290 glob_result = glob.glob(glob_str)
257 self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name)) 291 self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name))
diff --git a/meta/lib/oeqa/selftest/cases/barebox.py b/meta/lib/oeqa/selftest/cases/barebox.py
new file mode 100644
index 0000000000..3f8f232432
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/barebox.py
@@ -0,0 +1,44 @@
1# Qemu-based barebox bootloader integration testing
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, runqemu
10from oeqa.core.decorator.data import skipIfNotArch
11from oeqa.core.decorator import OETestTag
12
13barebox_boot_patterns = {
14 'search_reached_prompt': r"stop autoboot",
15 'search_login_succeeded': r"barebox@[^:]+:[^ ]+ ",
16 'search_cmd_finished': r"barebox@[a-zA-Z0-9\-\s]+:/"
17 }
18
19
20class BareboxTest(OESelftestTestCase):
21
22 @skipIfNotArch(['arm', 'aarch64'])
23 @OETestTag("runqemu")
24 def test_boot_barebox(self):
25 """
26 Tests building barebox and booting it with QEMU
27 """
28
29 self.write_config("""
30QB_DEFAULT_KERNEL = "barebox-dt-2nd.img"
31PREFERRED_PROVIDER_virtual/bootloader = "barebox"
32QEMU_USE_KVM = "False"
33""")
34
35 bitbake("virtual/bootloader core-image-minimal")
36
37 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic',
38 boot_patterns=barebox_boot_patterns) as qemu:
39
40 # test if barebox console works
41 cmd = "version"
42 status, output = qemu.run_serial(cmd)
43 self.assertEqual(status, 1, msg=output)
44 self.assertTrue("barebox" in output, msg=output)
diff --git a/meta/lib/oeqa/selftest/cases/baremetal.py b/meta/lib/oeqa/selftest/cases/baremetal.py
new file mode 100644
index 0000000000..cadaea2f1a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/baremetal.py
@@ -0,0 +1,14 @@
1
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake
10
11class BaremetalTest(OESelftestTestCase):
12 def test_baremetal(self):
13 self.write_config('TCLIBC = "baremetal"')
14 bitbake('baremetal-helloworld')
diff --git a/meta/lib/oeqa/selftest/cases/bbclasses.py b/meta/lib/oeqa/selftest/cases/bbclasses.py
new file mode 100644
index 0000000000..10545ebe65
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bbclasses.py
@@ -0,0 +1,106 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import get_bb_vars, bitbake
9
10class Systemd(OESelftestTestCase):
11 """
12 Tests related to the systemd bbclass.
13 """
14
15 def getVars(self, recipe):
16 self.bb_vars = get_bb_vars(
17 [
18 'BPN',
19 'D',
20 'INIT_D_DIR',
21 'prefix',
22 'systemd_system_unitdir',
23 'sysconfdir',
24 ],
25 recipe,
26 )
27
28 def fileExists(self, filename):
29 self.assertExists(filename.format(**self.bb_vars))
30
31 def fileNotExists(self, filename):
32 self.assertNotExists(filename.format(**self.bb_vars))
33
34 def test_systemd_in_distro(self):
35 """
36 Summary: Verify that no sysvinit files are installed when the
37 systemd distro feature is enabled, but sysvinit is not.
38 Expected: Systemd service file exists, but /etc does not.
39 Product: OE-Core
40 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
41 """
42
43 self.write_config("""
44DISTRO_FEATURES:append = " systemd usrmerge"
45DISTRO_FEATURES:remove = "sysvinit"
46VIRTUAL-RUNTIME_init_manager = "systemd"
47""")
48 bitbake("systemd-only systemd-and-sysvinit -c install")
49
50 self.getVars("systemd-only")
51 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
52
53 self.getVars("systemd-and-sysvinit")
54 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
55 self.fileNotExists("{D}{sysconfdir}")
56
57 def test_systemd_and_sysvinit_in_distro(self):
58 """
59 Summary: Verify that both systemd and sysvinit files are installed
60 when both the systemd and sysvinit distro features are
61 enabled.
62 Expected: Systemd service file and sysvinit initscript exist.
63 Product: OE-Core
64 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
65 """
66
67 self.write_config("""
68DISTRO_FEATURES:append = " systemd sysvinit usrmerge"
69VIRTUAL-RUNTIME_init_manager = "systemd"
70""")
71 bitbake("systemd-only systemd-and-sysvinit -c install")
72
73 self.getVars("systemd-only")
74 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
75
76 self.getVars("systemd-and-sysvinit")
77 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
78 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
79
80 def test_sysvinit_in_distro(self):
81 """
82 Summary: Verify that no systemd service files are installed when the
83 sysvinit distro feature is enabled, but systemd is not.
84 Expected: The systemd service file does not exist, nor does /usr.
85 The sysvinit initscript exists.
86 Product: OE-Core
87 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
88 """
89
90 self.write_config("""
91DISTRO_FEATURES:remove = "systemd"
92DISTRO_FEATURES:append = " sysvinit usrmerge"
93VIRTUAL-RUNTIME_init_manager = "sysvinit"
94""")
95 bitbake("systemd-only systemd-and-sysvinit -c install")
96
97 self.getVars("systemd-only")
98 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
99 self.fileNotExists("{D}{prefix}")
100 self.fileNotExists("{D}{sysconfdir}")
101 self.fileExists("{D}")
102
103 self.getVars("systemd-and-sysvinit")
104 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
105 self.fileNotExists("{D}{prefix}")
106 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py
index f131d9856c..68b0377720 100644
--- a/meta/lib/oeqa/selftest/cases/bblayers.py
+++ b/meta/lib/oeqa/selftest/cases/bblayers.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -6,12 +8,23 @@ import os
6import re 8import re
7 9
8import oeqa.utils.ftools as ftools 10import oeqa.utils.ftools as ftools
9from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars 11from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
10 12
11from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
12 14
13class BitbakeLayers(OESelftestTestCase): 15class BitbakeLayers(OESelftestTestCase):
14 16
17 @classmethod
18 def setUpClass(cls):
19 super(BitbakeLayers, cls).setUpClass()
20 bitbake("python3-jsonschema-native")
21 bitbake("-c addto_recipe_sysroot python3-jsonschema-native")
22
23 def test_bitbakelayers_layerindexshowdepends(self):
24 result = runCmd('bitbake-layers layerindex-show-depends meta-poky')
25 find_in_contents = re.search("openembedded-core", result.output)
26 self.assertTrue(find_in_contents, msg = "openembedded-core should have been listed at this step. bitbake-layers layerindex-show-depends meta-poky output: %s" % result.output)
27
15 def test_bitbakelayers_showcrossdepends(self): 28 def test_bitbakelayers_showcrossdepends(self):
16 result = runCmd('bitbake-layers show-cross-depends') 29 result = runCmd('bitbake-layers show-cross-depends')
17 self.assertIn('aspell', result.output) 30 self.assertIn('aspell', result.output)
@@ -41,7 +54,7 @@ class BitbakeLayers(OESelftestTestCase):
41 bb_file = os.path.join(testoutdir, recipe_path, recipe_file) 54 bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
42 self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.") 55 self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
43 contents = ftools.read_file(bb_file) 56 contents = ftools.read_file(bb_file)
44 find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents) 57 find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
45 self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output) 58 self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
46 59
47 def test_bitbakelayers_add_remove(self): 60 def test_bitbakelayers_add_remove(self):
@@ -72,8 +85,9 @@ class BitbakeLayers(OESelftestTestCase):
72 result = runCmd('bitbake-layers show-recipes -i image') 85 result = runCmd('bitbake-layers show-recipes -i image')
73 self.assertIn('core-image-minimal', result.output) 86 self.assertIn('core-image-minimal', result.output)
74 self.assertNotIn('mtd-utils:', result.output) 87 self.assertNotIn('mtd-utils:', result.output)
75 result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig') 88 result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig')
76 self.assertIn('libproxy:', result.output) 89 self.assertIn('libproxy:', result.output)
90 result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
77 self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either 91 self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
78 self.assertNotIn('wget:', result.output) # doesn't inherit cmake 92 self.assertNotIn('wget:', result.output) # doesn't inherit cmake
79 self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig 93 self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
@@ -106,6 +120,11 @@ class BitbakeLayers(OESelftestTestCase):
106 120
107 self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority)) 121 self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
108 122
123 result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1"))
124 for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'):
125 fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f)
126 self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath))
127
109 def get_recipe_basename(self, recipe): 128 def get_recipe_basename(self, recipe):
110 recipe_file = "" 129 recipe_file = ""
111 result = runCmd("bitbake-layers show-recipes -f %s" % recipe) 130 result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
@@ -116,3 +135,139 @@ class BitbakeLayers(OESelftestTestCase):
116 135
117 self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe) 136 self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
118 return os.path.basename(recipe_file) 137 return os.path.basename(recipe_file)
138
139 def validate_layersjson(self, json):
140 python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3')
141 jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema')
142 jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json')
143 result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema))
144
145 def test_validate_examplelayersjson(self):
146 json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json")
147 self.validate_layersjson(json)
148
149 def test_bitbakelayers_setup(self):
150 result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
151 jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
152 self.validate_layersjson(jsonfile)
153
154 # The revision-under-test may not necessarily be available on the remote server,
155 # so replace it with a revision that has a yocto-4.1 tag.
156 import json
157 with open(jsonfile) as f:
158 data = json.load(f)
159 for s in data['sources']:
160 data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0'
161 with open(jsonfile, 'w') as f:
162 json.dump(data, f)
163
164 testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout')
165 result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir))
166 layers_json = os.path.join(testcheckoutdir, ".oe-layers.json")
167 self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json))
168
169 # As setup-layers checkout out an old revision of poky, there is no setup-build symlink,
170 # and we need to run oe-setup-build directly from the current poky tree under test
171 oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build')
172 oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build')
173 os.symlink(oe_setup_build,oe_setup_build_l)
174
175 cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json)
176 result = runCmd(cmd)
177 cond = "conf/templates/default" in result.output
178 self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output))
179
180 # rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user
181 conf = None
182 if 'poky-default' in result.output:
183 conf = 'poky-default'
184 elif 'meta-default' in result.output:
185 conf = 'meta-default'
186 self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output))
187
188 cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf)
189 result = runCmd(cmd)
190
191 def test_bitbakelayers_updatelayer(self):
192 result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
193 jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
194 self.validate_layersjson(jsonfile)
195
196 import json
197 with open(jsonfile) as f:
198 data = json.load(f)
199 repos = []
200 for s in data['sources']:
201 repos.append(s)
202
203 self.assertTrue(len(repos) > 1, "Not enough repositories available")
204 self.validate_layersjson(jsonfile)
205
206 test_ref_1 = 'ref_1'
207 test_ref_2 = 'ref_2'
208
209 # Create a new layers setup using custom references
210 result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}'
211 .format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path))
212 self.validate_layersjson(jsonfile)
213
214 with open(jsonfile) as f:
215 data = json.load(f)
216 first_rev_1 = data['sources'][repos[0]]['git-remote']['rev']
217 first_desc_1 = data['sources'][repos[0]]['git-remote']['describe']
218 second_rev_1 = data['sources'][repos[1]]['git-remote']['rev']
219 second_desc_1 = data['sources'][repos[1]]['git-remote']['describe']
220
221 self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1))
222 self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1))
223 self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1))
224 self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1))
225
226 # Update one of the repositories in the layers setup using a different custom reference
227 # This should only update the selected repository, everything else should remain as is
228 result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}'
229 .format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path))
230 self.validate_layersjson(jsonfile)
231
232 with open(jsonfile) as f:
233 data = json.load(f)
234 first_rev_2 = data['sources'][repos[0]]['git-remote']['rev']
235 first_desc_2 = data['sources'][repos[0]]['git-remote']['describe']
236 second_rev_2 = data['sources'][repos[1]]['git-remote']['rev']
237 second_desc_2 = data['sources'][repos[1]]['git-remote']['describe']
238
239 self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2))
240 self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2))
241 self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2))
242 self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2))
243
244class BitbakeConfigBuild(OESelftestTestCase):
245 def test_enable_disable_fragments(self):
246 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None)
247 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None)
248
249 runCmd('bitbake-config-build enable-fragment selftest/test-fragment')
250 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), 'somevalue')
251 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None)
252
253 runCmd('bitbake-config-build enable-fragment selftest/more-fragments-here/test-another-fragment')
254 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), 'somevalue')
255 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), 'someothervalue')
256
257 fragment_metadata_command = "bitbake-getvar -f {} --value {}"
258 result = runCmd(fragment_metadata_command.format("selftest/test-fragment", "BB_CONF_FRAGMENT_SUMMARY"))
259 self.assertIn("This is a configuration fragment intended for testing in oe-selftest context", result.output)
260 result = runCmd(fragment_metadata_command.format("selftest/test-fragment", "BB_CONF_FRAGMENT_DESCRIPTION"))
261 self.assertIn("It defines a variable that can be checked inside the test.", result.output)
262 result = runCmd(fragment_metadata_command.format("selftest/more-fragments-here/test-another-fragment", "BB_CONF_FRAGMENT_SUMMARY"))
263 self.assertIn("This is a second configuration fragment intended for testing in oe-selftest context", result.output)
264 result = runCmd(fragment_metadata_command.format("selftest/more-fragments-here/test-another-fragment", "BB_CONF_FRAGMENT_DESCRIPTION"))
265 self.assertIn("It defines another variable that can be checked inside the test.", result.output)
266
267 runCmd('bitbake-config-build disable-fragment selftest/test-fragment')
268 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None)
269 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), 'someothervalue')
270
271 runCmd('bitbake-config-build disable-fragment selftest/more-fragments-here/test-another-fragment')
272 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None)
273 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None)
diff --git a/meta/lib/oeqa/selftest/cases/bblock.py b/meta/lib/oeqa/selftest/cases/bblock.py
new file mode 100644
index 0000000000..2b62d2a0aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblock.py
@@ -0,0 +1,203 @@
1#
2# Copyright (c) 2023 BayLibre, SAS
3# Author: Julien Stepahn <jstephan@baylibre.com>
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import bb.tinfoil
11
12import oeqa.utils.ftools as ftools
13from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
14
15from oeqa.selftest.case import OESelftestTestCase
16
17
18class BBLock(OESelftestTestCase):
19 @classmethod
20 def setUpClass(cls):
21 super(BBLock, cls).setUpClass()
22 cls.lockfile = cls.builddir + "/conf/bblock.conf"
23
24 def unlock_recipes(self, recipes=None, tasks=None):
25 cmd = "bblock -r "
26 if recipes:
27 cmd += " ".join(recipes)
28 if tasks:
29 cmd += " -t " + ",".join(tasks)
30 result = runCmd(cmd)
31
32 if recipes:
33 # ensure all signatures are removed from lockfile
34 contents = ftools.read_file(self.lockfile)
35 for recipe in recipes:
36 for task in tasks:
37 find_in_contents = re.search(
38 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
39 contents,
40 )
41 self.assertFalse(
42 find_in_contents,
43 msg="%s:%s should not be present into bblock.conf anymore"
44 % (recipe, task),
45 )
46 self.assertExists(self.lockfile)
47 else:
48 self.assertNotExists(self.lockfile)
49
50 def lock_recipes(self, recipes, tasks=None):
51 cmd = "bblock " + " ".join(recipes)
52 if tasks:
53 cmd += " -t " + ",".join(tasks)
54
55 result = runCmd(cmd)
56
57 self.assertExists(self.lockfile)
58
59 # ensure all signatures are added to lockfile
60 contents = ftools.read_file(self.lockfile)
61 for recipe in recipes:
62 if tasks:
63 for task in tasks:
64 find_in_contents = re.search(
65 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
66 contents,
67 )
68 self.assertTrue(
69 find_in_contents,
70 msg="%s:%s was not added into bblock.conf. bblock output: %s"
71 % (recipe, task, result.output),
72 )
73
74 def modify_tasks(self, recipes, tasks):
75 task_append = ""
76 for recipe in recipes:
77 bb_vars = get_bb_vars(["PV"], recipe)
78 recipe_pv = bb_vars["PV"]
79 recipe_append_file = recipe + "_" + recipe_pv + ".bbappend"
80
81 os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe))
82 recipe_append_path = os.path.join(
83 self.testlayer_path, "recipes-test", recipe, recipe_append_file
84 )
85
86 for task in tasks:
87 task_append += "%s:append() {\n#modify task hash \n}\n" % task
88 ftools.write_file(recipe_append_path, task_append)
89 self.add_command_to_tearDown(
90 "rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe)
91 )
92
93 def test_lock_single_recipe_single_task(self):
94 recipes = ["quilt"]
95 tasks = ["do_compile"]
96 self._run_test(recipes, tasks)
97
98 def test_lock_single_recipe_multiple_tasks(self):
99 recipes = ["quilt"]
100 tasks = ["do_compile", "do_install"]
101 self._run_test(recipes, tasks)
102
103 def test_lock_single_recipe_all_tasks(self):
104 recipes = ["quilt"]
105 self._run_test(recipes, None)
106
107 def test_lock_multiple_recipe_single_task(self):
108 recipes = ["quilt", "bc"]
109 tasks = ["do_compile"]
110 self._run_test(recipes, tasks)
111
112 def test_lock_architecture_specific(self):
113 # unlock all recipes and ensure no bblock.conf file exist
114 self.unlock_recipes()
115
116 recipes = ["quilt"]
117 tasks = ["do_compile"]
118
119 # lock quilt's do_compile task for another machine
120 if self.td["MACHINE"] == "qemux86-64":
121 machine = "qemuarm"
122 else:
123 machine = "qemux86-64"
124
125 self.write_config('MACHINE = "%s"\n' % machine)
126
127 self.lock_recipes(recipes, tasks)
128
129 self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"])
130 # modify quilt's do_compile task
131 self.modify_tasks(recipes, tasks)
132
133 # build quilt using the default machine
134 # No Note/Warning should be emitted since sig is locked for another machine
135 # (quilt package is architecture dependant)
136 info_message = "NOTE: The following recipes have locked tasks: " + recipes[0]
137 warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0])
138 result = bitbake(recipes[0] + " -n")
139 self.assertNotIn(info_message, result.output)
140 self.assertNotIn(warn_message, result.output)
141
142 # unlock all recipes
143 self.unlock_recipes()
144
145 def _run_test(self, recipes, tasks=None):
146 # unlock all recipes and ensure no bblock.conf file exist
147 self.unlock_recipes()
148
149 self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"')
150
151 # lock tasks for recipes
152 result = self.lock_recipes(recipes, tasks)
153
154 if not tasks:
155 tasks = []
156 result = bitbake("-c listtasks " + recipes[0])
157 with bb.tinfoil.Tinfoil() as tinfoil:
158 tinfoil.prepare(config_only=False, quiet=2)
159 d = tinfoil.parse_recipe(recipes[0])
160
161 for line in result.output.splitlines():
162 if line.startswith("do_"):
163 task = line.split()[0]
164 if "setscene" in task:
165 continue
166 if d.getVarFlag(task, "nostamp"):
167 continue
168 tasks.append(task)
169
170 # build recipes. At this stage we should have a Note about recipes
171 # having locked task's sig, but no warning since sig still match
172 info_message = "NOTE: The following recipes have locked tasks: " + " ".join(
173 recipes
174 )
175 for recipe in recipes:
176 result = bitbake(recipe + " -n")
177 self.assertIn(info_message, result.output)
178 for task in tasks:
179 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
180 self.assertNotIn(warn_message, result.output)
181
182 # modify all tasks that are locked to trigger a sig change then build the recipes
183 # at this stage we should have a Note as before, but also a Warning for all
184 # locked tasks indicating the sig mismatch
185 self.modify_tasks(recipes, tasks)
186 for recipe in recipes:
187 result = bitbake(recipe + " -n")
188 self.assertIn(info_message, result.output)
189 for task in tasks:
190 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
191 self.assertIn(warn_message, result.output)
192
193 # unlock all tasks and rebuild, no more Note/Warning should remain
194 self.unlock_recipes(recipes, tasks)
195 for recipe in recipes:
196 result = bitbake(recipe + " -n")
197 self.assertNotIn(info_message, result.output)
198 for task in tasks:
199 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
200 self.assertNotIn(warn_message, result.output)
201
202 # unlock all recipes
203 self.unlock_recipes()
diff --git a/meta/lib/oeqa/selftest/cases/bblogging.py b/meta/lib/oeqa/selftest/cases/bblogging.py
new file mode 100644
index 0000000000..040c6db089
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblogging.py
@@ -0,0 +1,182 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake
10
11class BitBakeLogging(OESelftestTestCase):
12
13 def assertCount(self, item, entry, count):
14 self.assertEqual(item.count(entry), count, msg="Output:\n'''\n%s\n'''\ndoesn't contain %d copies of:\n'''\n%s\n'''\n" % (item, count, entry))
15
16 def test_shell_loggingA(self):
17 # no logs, no verbose
18 self.write_config('BBINCLUDELOGS = ""')
19 result = bitbake("logging-test -c shelltest -f", ignore_status = True)
20 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
21 self.assertNotIn("This is shell stdout", result.output)
22 self.assertNotIn("This is shell stderr", result.output)
23
24 def test_shell_loggingB(self):
25 # logs, no verbose
26 self.write_config('BBINCLUDELOGS = "yes"')
27 result = bitbake("logging-test -c shelltest -f", ignore_status = True)
28 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
29 self.assertCount(result.output, "This is shell stdout", 1)
30 self.assertCount(result.output, "This is shell stderr", 1)
31
32 def test_shell_loggingC(self):
33 # no logs, verbose
34 self.write_config('BBINCLUDELOGS = ""')
35 result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
36 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
37 # two copies due to set +x
38 self.assertCount(result.output, "This is shell stdout", 2)
39 self.assertCount(result.output, "This is shell stderr", 2)
40
41 def test_shell_loggingD(self):
42 # logs, verbose
43 self.write_config('BBINCLUDELOGS = "yes"')
44 result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
45 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
46 # two copies due to set +x
47 self.assertCount(result.output, "This is shell stdout", 2)
48 self.assertCount(result.output, "This is shell stderr", 2)
49
50 def test_python_exec_func_shell_loggingA(self):
51 # no logs, no verbose
52 self.write_config('BBINCLUDELOGS = ""')
53 result = bitbake("logging-test -c pythontest_exec_func_shell -f",
54 ignore_status = True)
55 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
56 self.assertNotIn("This is shell stdout", result.output)
57 self.assertNotIn("This is shell stderr", result.output)
58
59 def test_python_exec_func_shell_loggingB(self):
60 # logs, no verbose
61 self.write_config('BBINCLUDELOGS = "yes"')
62 result = bitbake("logging-test -c pythontest_exec_func_shell -f",
63 ignore_status = True)
64 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
65 self.assertCount(result.output, "This is shell stdout", 1)
66 self.assertCount(result.output, "This is shell stderr", 1)
67
68 def test_python_exec_func_shell_loggingC(self):
69 # no logs, verbose
70 self.write_config('BBINCLUDELOGS = ""')
71 result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
72 ignore_status = True)
73 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
74 # two copies due to set +x
75 self.assertCount(result.output, "This is shell stdout", 2)
76 self.assertCount(result.output, "This is shell stderr", 2)
77
78 def test_python_exec_func_shell_loggingD(self):
79 # logs, verbose
80 self.write_config('BBINCLUDELOGS = "yes"')
81 result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
82 ignore_status = True)
83 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
84 # two copies due to set +x
85 self.assertCount(result.output, "This is shell stdout", 2)
86 self.assertCount(result.output, "This is shell stderr", 2)
87
88 def test_python_exit_loggingA(self):
89 # no logs, no verbose
90 self.write_config('BBINCLUDELOGS = ""')
91 result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
92 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
93 self.assertNotIn("This is python stdout", result.output)
94
95 def test_python_exit_loggingB(self):
96 # logs, no verbose
97 self.write_config('BBINCLUDELOGS = "yes"')
98 result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
99 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
100 # A sys.exit() should include the output
101 self.assertCount(result.output, "This is python stdout", 1)
102
103 def test_python_exit_loggingC(self):
104 # no logs, verbose
105 self.write_config('BBINCLUDELOGS = ""')
106 result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
107 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
108 self.assertCount(result.output, "This is python stdout", 1)
109
110 def test_python_exit_loggingD(self):
111 # logs, verbose
112 self.write_config('BBINCLUDELOGS = "yes"')
113 result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
114 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
115 self.assertCount(result.output, "This is python stdout", 1)
116
117 def test_python_exec_func_python_loggingA(self):
118 # no logs, no verbose
119 self.write_config('BBINCLUDELOGS = ""')
120 result = bitbake("logging-test -c pythontest_exec_func_python -f",
121 ignore_status = True)
122 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
123 self.assertNotIn("This is python stdout", result.output)
124
125 def test_python_exec_func_python_loggingB(self):
126 # logs, no verbose
127 self.write_config('BBINCLUDELOGS = "yes"')
128 result = bitbake("logging-test -c pythontest_exec_func_python -f",
129 ignore_status = True)
130 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
131 # A sys.exit() should include the output
132 self.assertCount(result.output, "This is python stdout", 1)
133
134 def test_python_exec_func_python_loggingC(self):
135 # no logs, verbose
136 self.write_config('BBINCLUDELOGS = ""')
137 result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
138 ignore_status = True)
139 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
140 self.assertCount(result.output, "This is python stdout", 1)
141
142 def test_python_exec_func_python_loggingD(self):
143 # logs, verbose
144 self.write_config('BBINCLUDELOGS = "yes"')
145 result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
146 ignore_status = True)
147 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
148 self.assertCount(result.output, "This is python stdout", 1)
149
150 def test_python_fatal_loggingA(self):
151 # no logs, no verbose
152 self.write_config('BBINCLUDELOGS = ""')
153 result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
154 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
155 self.assertNotIn("This is python fatal test stdout", result.output)
156 self.assertCount(result.output, "This is a fatal error", 1)
157
158 def test_python_fatal_loggingB(self):
159 # logs, no verbose
160 self.write_config('BBINCLUDELOGS = "yes"')
161 result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
162 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
163 # A bb.fatal() should not include the output
164 self.assertNotIn("This is python fatal test stdout", result.output)
165 self.assertCount(result.output, "This is a fatal error", 1)
166
167 def test_python_fatal_loggingC(self):
168 # no logs, verbose
169 self.write_config('BBINCLUDELOGS = ""')
170 result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
171 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
172 self.assertCount(result.output, "This is python fatal test stdout", 1)
173 self.assertCount(result.output, "This is a fatal error", 1)
174
175 def test_python_fatal_loggingD(self):
176 # logs, verbose
177 self.write_config('BBINCLUDELOGS = "yes"')
178 result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
179 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
180 self.assertCount(result.output, "This is python fatal test stdout", 1)
181 self.assertCount(result.output, "This is a fatal error", 1)
182
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
index 79390acc0d..51934ef70d 100644
--- a/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -39,7 +41,7 @@ class BitbakeTests(OESelftestTestCase):
39 41
40 def test_event_handler(self): 42 def test_event_handler(self):
41 self.write_config("INHERIT += \"test_events\"") 43 self.write_config("INHERIT += \"test_events\"")
42 result = bitbake('m4-native') 44 result = bitbake('selftest-hello-native')
43 find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output) 45 find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
44 find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output) 46 find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
45 self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output) 47 self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
@@ -47,11 +49,11 @@ class BitbakeTests(OESelftestTestCase):
47 self.assertNotIn('Test for bb.event.InvalidEvent', result.output) 49 self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
48 50
49 def test_local_sstate(self): 51 def test_local_sstate(self):
50 bitbake('m4-native') 52 bitbake('selftest-hello-native')
51 bitbake('m4-native -cclean') 53 bitbake('selftest-hello-native -cclean')
52 result = bitbake('m4-native') 54 result = bitbake('selftest-hello-native')
53 find_setscene = re.search("m4-native.*do_.*_setscene", result.output) 55 find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output)
54 self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output ) 56 self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output )
55 57
56 def test_bitbake_invalid_recipe(self): 58 def test_bitbake_invalid_recipe(self):
57 result = bitbake('-b asdf', ignore_status=True) 59 result = bitbake('-b asdf', ignore_status=True)
@@ -63,15 +65,15 @@ class BitbakeTests(OESelftestTestCase):
63 65
64 def test_warnings_errors(self): 66 def test_warnings_errors(self):
65 result = bitbake('-b asdf', ignore_status=True) 67 result = bitbake('-b asdf', ignore_status=True)
66 find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output) 68 find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages*", result.output)
67 find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output) 69 find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages*", result.output)
68 self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output) 70 self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
69 self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output) 71 self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
70 72
71 def test_invalid_patch(self): 73 def test_invalid_patch(self):
72 # This patch should fail to apply. 74 # This patch should fail to apply.
73 self.write_recipeinc('man-db', 'FILESEXTRAPATHS_prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"') 75 self.write_recipeinc('man-db', 'FILESEXTRAPATHS:prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"')
74 self.write_config("INHERIT_remove = \"report-error\"") 76 self.write_config("INHERIT:remove = \"report-error\"")
75 result = bitbake('man-db -c patch', ignore_status=True) 77 result = bitbake('man-db -c patch', ignore_status=True)
76 self.delete_recipeinc('man-db') 78 self.delete_recipeinc('man-db')
77 bitbake('-cclean man-db') 79 bitbake('-cclean man-db')
@@ -83,8 +85,10 @@ class BitbakeTests(OESelftestTestCase):
83 85
84 def test_force_task_1(self): 86 def test_force_task_1(self):
85 # test 1 from bug 5875 87 # test 1 from bug 5875
88 import uuid
86 test_recipe = 'zlib' 89 test_recipe = 'zlib'
87 test_data = "Microsoft Made No Profit From Anyone's Zunes Yo" 90 # Need to use uuid otherwise hash equivlance would change the workflow
91 test_data = "Microsoft Made No Profit From Anyone's Zunes Yo %s" % uuid.uuid1()
88 bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe) 92 bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
89 image_dir = bb_vars['D'] 93 image_dir = bb_vars['D']
90 pkgsplit_dir = bb_vars['PKGDEST'] 94 pkgsplit_dir = bb_vars['PKGDEST']
@@ -139,19 +143,14 @@ class BitbakeTests(OESelftestTestCase):
139 self.write_recipeinc('man-db', data) 143 self.write_recipeinc('man-db', data)
140 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" 144 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
141SSTATE_DIR = \"${TOPDIR}/download-selftest\" 145SSTATE_DIR = \"${TOPDIR}/download-selftest\"
142INHERIT_remove = \"report-error\" 146INHERIT:remove = \"report-error\"
143""") 147""")
144 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 148 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
145 149
146 bitbake('-ccleanall man-db')
147 result = bitbake('-c fetch man-db', ignore_status=True) 150 result = bitbake('-c fetch man-db', ignore_status=True)
148 bitbake('-ccleanall man-db')
149 self.delete_recipeinc('man-db') 151 self.delete_recipeinc('man-db')
150 self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output) 152 self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
151 self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output) 153 self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output)
152 line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
153 self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
154doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
155 154
156 def test_rename_downloaded_file(self): 155 def test_rename_downloaded_file(self):
157 # TODO unique dldir instead of using cleanall 156 # TODO unique dldir instead of using cleanall
@@ -161,7 +160,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
161""") 160""")
162 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 161 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
163 162
164 data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' 163 data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
165 self.write_recipeinc('aspell', data) 164 self.write_recipeinc('aspell', data)
166 result = bitbake('-f -c fetch aspell', ignore_status=True) 165 result = bitbake('-f -c fetch aspell', ignore_status=True)
167 self.delete_recipeinc('aspell') 166 self.delete_recipeinc('aspell')
@@ -176,7 +175,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
176 self.assertIn('localconf', result.output) 175 self.assertIn('localconf', result.output)
177 176
178 def test_dry_run(self): 177 def test_dry_run(self):
179 result = runCmd('bitbake -n m4-native') 178 result = runCmd('bitbake -n selftest-hello-native')
180 self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output) 179 self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
181 180
182 def test_just_parse(self): 181 def test_just_parse(self):
@@ -189,6 +188,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
189 self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output) 188 self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
190 189
191 def test_prefile(self): 190 def test_prefile(self):
191 # Test when the prefile does not exist
192 result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
193 self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
194 # Test when the prefile exists
192 preconf = os.path.join(self.builddir, 'conf/prefile.conf') 195 preconf = os.path.join(self.builddir, 'conf/prefile.conf')
193 self.track_for_cleanup(preconf) 196 self.track_for_cleanup(preconf)
194 ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"") 197 ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
@@ -199,6 +202,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
199 self.assertIn('localconf', result.output) 202 self.assertIn('localconf', result.output)
200 203
201 def test_postfile(self): 204 def test_postfile(self):
205 # Test when the postfile does not exist
206 result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
207 self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
208 # Test when the postfile exists
202 postconf = os.path.join(self.builddir, 'conf/postfile.conf') 209 postconf = os.path.join(self.builddir, 'conf/postfile.conf')
203 self.track_for_cleanup(postconf) 210 self.track_for_cleanup(postconf)
204 ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"") 211 ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
@@ -213,7 +220,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
213 def test_continue(self): 220 def test_continue(self):
214 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" 221 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
215SSTATE_DIR = \"${TOPDIR}/download-selftest\" 222SSTATE_DIR = \"${TOPDIR}/download-selftest\"
216INHERIT_remove = \"report-error\" 223INHERIT:remove = \"report-error\"
217""") 224""")
218 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 225 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
219 self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" ) 226 self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
@@ -225,16 +232,22 @@ INHERIT_remove = \"report-error\"
225 self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output) 232 self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
226 233
227 def test_non_gplv3(self): 234 def test_non_gplv3(self):
228 self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"') 235 self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"
236OVERRIDES .= ":gplv3test"
237require conf/distro/include/no-gplv3.inc
238''')
229 result = bitbake('selftest-ed', ignore_status=True) 239 result = bitbake('selftest-ed', ignore_status=True)
230 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output)) 240 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
231 lic_dir = get_bb_var('LICENSE_DIRECTORY') 241 lic_dir = get_bb_var('LICENSE_DIRECTORY')
232 self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3'))) 242 arch = get_bb_var('SSTATE_PKGARCH')
233 self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2'))) 243 filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later')
244 self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename)
245 filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-only')
246 self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename)
234 247
235 def test_setscene_only(self): 248 def test_setscene_only(self):
236 """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)""" 249 """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
237 test_recipe = 'ed' 250 test_recipe = 'selftest-hello-native'
238 251
239 bitbake(test_recipe) 252 bitbake(test_recipe)
240 bitbake('-c clean %s' % test_recipe) 253 bitbake('-c clean %s' % test_recipe)
@@ -247,7 +260,7 @@ INHERIT_remove = \"report-error\"
247 'Executed tasks were: %s' % (task, str(tasks))) 260 'Executed tasks were: %s' % (task, str(tasks)))
248 261
249 def test_skip_setscene(self): 262 def test_skip_setscene(self):
250 test_recipe = 'ed' 263 test_recipe = 'selftest-hello-native'
251 264
252 bitbake(test_recipe) 265 bitbake(test_recipe)
253 bitbake('-c clean %s' % test_recipe) 266 bitbake('-c clean %s' % test_recipe)
@@ -298,3 +311,86 @@ INHERIT_remove = \"report-error\"
298 311
299 test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe) 312 test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
300 self.assertEqual(expected_recipe_summary, test_recipe_summary_after) 313 self.assertEqual(expected_recipe_summary, test_recipe_summary_after)
314
315 def test_git_patchtool(self):
316 """ PATCHTOOL=git should work with non-git sources like tarballs
317 test recipe for the test must NOT containt git:// repository in SRC_URI
318 """
319 test_recipe = "man-db"
320 self.write_recipeinc(test_recipe, 'PATCHTOOL=\"git\"')
321 src = get_bb_var("SRC_URI",test_recipe)
322 gitscm = re.search("git://", src)
323 self.assertFalse(gitscm, "test_git_patchtool pre-condition failed: {} test recipe contains git repo!".format(test_recipe))
324 result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
325 fatal = re.search("fatal: not a git repository (or any of the parent directories)", result.output)
326 self.assertFalse(fatal, "Failed to patch using PATCHTOOL=\"git\"")
327 self.delete_recipeinc(test_recipe)
328 bitbake('-cclean {}'.format(test_recipe))
329
330 def test_git_patchtool2(self):
331 """ Test if PATCHTOOL=git works with git repo and doesn't reinitialize it
332 """
333 test_recipe = "gitrepotest"
334 src = get_bb_var("SRC_URI",test_recipe)
335 gitscm = re.search("git://", src)
336 self.assertTrue(gitscm, "test_git_patchtool pre-condition failed: {} test recipe doesn't contains git repo!".format(test_recipe))
337 result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
338 srcdir = get_bb_var('S', test_recipe)
339 result = runCmd("git log", cwd = srcdir)
340 self.assertFalse("bitbake_patching_started" in result.output, msg = "Repository has been reinitialized. {}".format(srcdir))
341 self.delete_recipeinc(test_recipe)
342 bitbake('-cclean {}'.format(test_recipe))
343
344
345 def test_git_unpack_nonetwork(self):
346 """
347 Test that a recipe with a floating tag that needs to be resolved upstream doesn't
348 access the network in a patch task run in a separate builld invocation
349 """
350
351 # Enable the recipe to float using a distro override
352 self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
353
354 bitbake('gitunpackoffline -c fetch')
355 bitbake('gitunpackoffline -c patch')
356
357 def test_git_unpack_nonetwork_fail(self):
358 """
359 Test that a recipe with a floating tag which doesn't call get_srcrev() in the fetcher
360 raises an error when the fetcher is called.
361 """
362
363 # Enable the recipe to float using a distro override
364 self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
365
366 result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True)
367 self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
368
369 def test_unexpanded_variable_in_path(self):
370 """
371 Test that bitbake fails if directory contains unexpanded bitbake variable in the name
372 """
373 recipe_name = "gitunpackoffline"
374 self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"')
375 result = bitbake('{}'.format(recipe_name), ignore_status=True)
376 self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path")
377 self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution",
378 result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output)
379
380 def test_bb_env_bb_getvar_equality(self):
381 """ Test if "bitbake -e" output is identical to "bitbake-getvar" output for a variable set from an anonymous function
382 """
383 self.write_config('''INHERIT += "test_anon_func"
384TEST_SET_FROM_ANON_FUNC ?= ""''')
385
386 result_bb_e = runCmd('bitbake -e')
387 bb_e_var_match = re.search('^TEST_SET_FROM_ANON_FUNC="(?P<value>.*)"$', result_bb_e.output, re.MULTILINE)
388 self.assertTrue(bb_e_var_match, msg = "Can't find TEST_SET_FROM_ANON_FUNC value in \"bitbake -e\" output")
389 bb_e_var_value = bb_e_var_match.group("value")
390
391 result_bb_getvar = runCmd('bitbake-getvar TEST_SET_FROM_ANON_FUNC --value')
392 bb_getvar_var_value = result_bb_getvar.output.strip()
393 self.assertEqual(bb_e_var_value, bb_getvar_var_value,
394 msg='''"bitbake -e" output differs from bitbake-getvar output for TEST_SET_FROM_ANON_FUNC (set from anonymous function)
395bitbake -e: "%s"
396bitbake-getvar: "%s"''' % (bb_e_var_value, bb_getvar_var_value))
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
index 821f52f5a8..5ff263d342 100644
--- a/meta/lib/oeqa/selftest/cases/binutils.py
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -1,12 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
3import sys 7import time
4import re
5import logging
6from oeqa.core.decorator import OETestTag 8from oeqa.core.decorator import OETestTag
7from oeqa.core.case import OEPTestResultTestCase 9from oeqa.core.case import OEPTestResultTestCase
8from oeqa.selftest.case import OESelftestTestCase 10from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars 11from oeqa.utils.commands import bitbake, get_bb_vars
10 12
11def parse_values(content): 13def parse_values(content):
12 for i in content: 14 for i in content:
@@ -31,19 +33,23 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
31 features.append('CHECK_TARGETS = "{0}"'.format(suite)) 33 features.append('CHECK_TARGETS = "{0}"'.format(suite))
32 self.write_config("\n".join(features)) 34 self.write_config("\n".join(features))
33 35
34 recipe = "binutils-cross-testsuite" 36 recipe = "binutils-testsuite"
35 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) 37 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
36 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] 38 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
37 39
40 start_time = time.time()
41
38 bitbake("{0} -c check".format(recipe)) 42 bitbake("{0} -c check".format(recipe))
39 43
44 end_time = time.time()
45
40 sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite)) 46 sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
41 if not os.path.exists(sumspath): 47 if not os.path.exists(sumspath):
42 sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite)) 48 sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
43 logpath = os.path.splitext(sumspath)[0] + ".log" 49 logpath = os.path.splitext(sumspath)[0] + ".log"
44 50
45 ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite 51 ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
46 self.ptest_section(ptestsuite, logfile = logpath) 52 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
47 with open(sumspath, "r") as f: 53 with open(sumspath, "r") as f:
48 for test, result in parse_values(f): 54 for test, result in parse_values(f):
49 self.ptest_result(ptestsuite, test, result) 55 self.ptest_result(ptestsuite, test, result)
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py
index d865da6252..511c666554 100644
--- a/meta/lib/oeqa/selftest/cases/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/buildhistory.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,10 +9,10 @@ import re
7import datetime 9import datetime
8 10
9from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_vars 12from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runCmd
11 13
12 14
13class BuildhistoryBase(OESelftestTestCase): 15class BuildhistoryTests(OESelftestTestCase):
14 16
15 def config_buildhistory(self, tmp_bh_location=False): 17 def config_buildhistory(self, tmp_bh_location=False):
16 bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT']) 18 bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT'])
@@ -46,5 +48,58 @@ class BuildhistoryBase(OESelftestTestCase):
46 else: 48 else:
47 self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output)) 49 self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output))
48 50
49 # No tests should be added to the base class. 51
50 # Please create a new class that inherit this one, or use one of those already available for adding tests. 52 def test_buildhistory_basic(self):
53 self.run_buildhistory_operation('xcursor-transparent-theme')
54 self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
55
56 def test_buildhistory_buildtime_pr_backwards(self):
57 target = 'xcursor-transparent-theme'
58 error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target
59 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
60 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
61
62 def test_fileinfo(self):
63 self.config_buildhistory()
64 bitbake('hicolor-icon-theme')
65 history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme')
66 self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.')
67
68 def load_bh(f):
69 d = {}
70 for line in open(f):
71 split = [s.strip() for s in line.split('=', 1)]
72 if len(split) > 1:
73 d[split[0]] = split[1]
74 return d
75
76 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest'))
77 self.assertIn('FILELIST', data)
78 self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme')
79 self.assertGreater(int(data['PKGSIZE']), 0)
80
81 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest'))
82 if 'FILELIST' in data:
83 self.assertEqual(data['FILELIST'], '/usr/share/pkgconfig/default-icon-theme.pc')
84 self.assertGreater(int(data['PKGSIZE']), 0)
85
86 def test_buildhistory_diff(self):
87 target = 'xcursor-transparent-theme'
88 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
89 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
90 result = runCmd("oe-pkgdata-util read-value PKGV %s" % target)
91 pkgv = result.output.rstrip()
92 result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
93 expected_endlines = [
94 "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
95 "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
96 ]
97 for line in result.output.splitlines():
98 for el in expected_endlines:
99 if line.endswith(el):
100 expected_endlines.remove(el)
101 break
102 else:
103 self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines)))
104 if expected_endlines:
105 self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines)) \ No newline at end of file
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index 3495bee986..767e19bd88 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -8,9 +10,10 @@ import glob as g
8import shutil 10import shutil
9import tempfile 11import tempfile
10from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
11from oeqa.selftest.cases.buildhistory import BuildhistoryBase 13from oeqa.core.decorator.data import skipIfMachine
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars 14from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
13import oeqa.utils.ftools as ftools 15import oeqa.utils.ftools as ftools
16from oeqa.core.decorator import OETestTag
14 17
15class ImageOptionsTests(OESelftestTestCase): 18class ImageOptionsTests(OESelftestTestCase):
16 19
@@ -50,23 +53,23 @@ class ImageOptionsTests(OESelftestTestCase):
50 def test_read_only_image(self): 53 def test_read_only_image(self):
51 distro_features = get_bb_var('DISTRO_FEATURES') 54 distro_features = get_bb_var('DISTRO_FEATURES')
52 if not ('x11' in distro_features and 'opengl' in distro_features): 55 if not ('x11' in distro_features and 'opengl' in distro_features):
53 self.skipTest('core-image-sato requires x11 and opengl in distro features') 56 self.skipTest('core-image-sato/weston requires x11 and opengl in distro features')
54 self.write_config('IMAGE_FEATURES += "read-only-rootfs"') 57 self.write_config('IMAGE_FEATURES += "read-only-rootfs"')
55 bitbake("core-image-sato") 58 bitbake("core-image-sato core-image-weston")
56 # do_image will fail if there are any pending postinsts 59 # do_image will fail if there are any pending postinsts
57 60
58class DiskMonTest(OESelftestTestCase): 61class DiskMonTest(OESelftestTestCase):
59 62
60 def test_stoptask_behavior(self): 63 def test_stoptask_behavior(self):
61 self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"') 64 self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
62 res = bitbake("delay -c delay", ignore_status = True) 65 res = bitbake("delay -c delay", ignore_status = True)
63 self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output) 66 self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output)
64 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 67 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
65 self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"') 68 self.write_config('BB_DISKMON_DIRS = "HALT,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
66 res = bitbake("delay -c delay", ignore_status = True) 69 res = bitbake("delay -c delay", ignore_status = True)
67 self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output) 70 self.assertTrue('ERROR: Immediately halt since the disk space monitor action is "HALT"!' in res.output, "Tasks should have been halted immediately. Disk monitor is set to HALT: %s" % res.output)
68 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 71 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
69 self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"') 72 self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
70 res = bitbake("delay -c delay") 73 res = bitbake("delay -c delay")
71 self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output) 74 self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output)
72 75
@@ -78,9 +81,9 @@ class SanityOptionsTest(OESelftestTestCase):
78 81
79 def test_options_warnqa_errorqa_switch(self): 82 def test_options_warnqa_errorqa_switch(self):
80 83
81 self.write_config("INHERIT_remove = \"report-error\"") 84 self.write_config("INHERIT:remove = \"report-error\"")
82 if "packages-list" not in get_bb_var("ERROR_QA"): 85 if "packages-list" not in get_bb_var("ERROR_QA"):
83 self.append_config("ERROR_QA_append = \" packages-list\"") 86 self.append_config("ERROR_QA:append:pn-xcursor-transparent-theme = \" packages-list\"")
84 87
85 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 88 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
86 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') 89 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
@@ -90,8 +93,8 @@ class SanityOptionsTest(OESelftestTestCase):
90 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) 93 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
91 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 94 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
92 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 95 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
93 self.append_config('ERROR_QA_remove = "packages-list"') 96 self.append_config('ERROR_QA:remove:pn-xcursor-transparent-theme = "packages-list"')
94 self.append_config('WARN_QA_append = " packages-list"') 97 self.append_config('WARN_QA:append:pn-xcursor-transparent-theme = " packages-list"')
95 res = bitbake("xcursor-transparent-theme -f -c package") 98 res = bitbake("xcursor-transparent-theme -f -c package")
96 self.delete_recipeinc('xcursor-transparent-theme') 99 self.delete_recipeinc('xcursor-transparent-theme')
97 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") 100 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
@@ -135,32 +138,24 @@ class SanityOptionsTest(OESelftestTestCase):
135 138
136 self.assertNotIn(err, ret.output) 139 self.assertNotIn(err, ret.output)
137 140
138
139class BuildhistoryTests(BuildhistoryBase):
140
141 def test_buildhistory_basic(self):
142 self.run_buildhistory_operation('xcursor-transparent-theme')
143 self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
144
145 def test_buildhistory_buildtime_pr_backwards(self):
146 target = 'xcursor-transparent-theme'
147 error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target
148 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
149 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
150
151class ArchiverTest(OESelftestTestCase): 141class ArchiverTest(OESelftestTestCase):
152 def test_arch_work_dir_and_export_source(self): 142 def test_arch_work_dir_and_export_source(self):
153 """ 143 """
154 Test for archiving the work directory and exporting the source files. 144 Test for archiving the work directory and exporting the source files.
155 """ 145 """
156 self.write_config("INHERIT += \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"") 146 self.write_config("""
147INHERIT += "archiver"
148PACKAGE_CLASSES = "package_rpm"
149ARCHIVER_MODE[src] = "original"
150ARCHIVER_MODE[srpm] = "1"
151""")
157 res = bitbake("xcursor-transparent-theme", ignore_status=True) 152 res = bitbake("xcursor-transparent-theme", ignore_status=True)
158 self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output) 153 self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
159 deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC') 154 deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC')
160 pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*") 155 pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*")
161 src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm" 156 src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm"
162 tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz" 157 tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.xz"
163 self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under %s/allarch*/xcursor*" % deploy_dir_src) 158 self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.xz files under %s/allarch*/xcursor*" % deploy_dir_src)
164 159
165class ToolchainOptions(OESelftestTestCase): 160class ToolchainOptions(OESelftestTestCase):
166 def test_toolchain_fortran(self): 161 def test_toolchain_fortran(self):
@@ -168,10 +163,11 @@ class ToolchainOptions(OESelftestTestCase):
168 Test that Fortran works by building a Hello, World binary. 163 Test that Fortran works by building a Hello, World binary.
169 """ 164 """
170 165
171 features = 'FORTRAN_forcevariable = ",fortran"\n' 166 features = 'FORTRAN:forcevariable = ",fortran"\n'
172 self.write_config(features) 167 self.write_config(features)
173 bitbake('fortran-helloworld') 168 bitbake('fortran-helloworld')
174 169
170@OETestTag("yocto-mirrors")
175class SourceMirroring(OESelftestTestCase): 171class SourceMirroring(OESelftestTestCase):
176 # Can we download everything from the Yocto Sources Mirror over http only 172 # Can we download everything from the Yocto Sources Mirror over http only
177 def test_yocto_source_mirror(self): 173 def test_yocto_source_mirror(self):
@@ -195,5 +191,10 @@ PREMIRRORS = "\\
195 https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n" 191 https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n"
196""") 192""")
197 193
198 bitbake("world --runall fetch") 194 bitbake("world --runall fetch --continue")
195
199 196
197class Poisoning(OESelftestTestCase):
198 def test_poisoning(self):
199 # The poison recipe fails if the poisoning didn't work
200 bitbake("poison")
diff --git a/meta/lib/oeqa/selftest/cases/c_cpp.py b/meta/lib/oeqa/selftest/cases/c_cpp.py
new file mode 100644
index 0000000000..9a70ce29f5
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/c_cpp.py
@@ -0,0 +1,60 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.core.decorator.data import skipIfNotQemuUsermode
9from oeqa.utils.commands import bitbake
10
11
12class CCppTests(OESelftestTestCase):
13
14 @skipIfNotQemuUsermode()
15 def _qemu_usermode(self, recipe_name):
16 self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
17 bitbake("%s -c run_tests" % recipe_name)
18
19 @skipIfNotQemuUsermode()
20 def _qemu_usermode_failing(self, recipe_name):
21 config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name
22 self.write_config(config)
23 self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
24 result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True)
25 self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % (
26 result.command, result.status, result.output, result.error))
27
28
29class CMakeTests(CCppTests):
30 def test_cmake_qemu(self):
31 """Test for cmake-qemu.bbclass good case
32
33 compile the cmake-example and verify the CTests pass in qemu-user.
34 qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR.
35 """
36 self._qemu_usermode("cmake-example")
37
38 def test_cmake_qemu_failing(self):
39 """Test for cmake-qemu.bbclass bad case
40
41 Break the comparison in the test code and verify the CTests do not pass.
42 """
43 self._qemu_usermode_failing("cmake-example")
44
45
46class MesonTests(CCppTests):
47 def test_meson_qemu(self):
48 """Test the qemu-user feature of the meson.bbclass good case
49
50 compile the meson-example and verify the Unit Test pass in qemu-user.
51 qemu-user is configured by meson's exe_wrapper option.
52 """
53 self._qemu_usermode("meson-example")
54
55 def test_meson_qemu_failing(self):
56 """Test the qemu-user feature of the meson.bbclass bad case
57
58 Break the comparison in the test code and verify the Unit Test does not pass in qemu-user.
59 """
60 self._qemu_usermode_failing("meson-example")
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py
index 79cc8a0f2e..d1ac305a84 100644
--- a/meta/lib/oeqa/selftest/cases/containerimage.py
+++ b/meta/lib/oeqa/selftest/cases/containerimage.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
13# The only package added to the image is container_image_testpkg, which 15# The only package added to the image is container_image_testpkg, which
14# contains one file. However, due to some other things not cleaning up during 16# contains one file. However, due to some other things not cleaning up during
15# rootfs creation, there is some cruft. Ideally bugs will be filed and the 17# rootfs creation, there is some cruft. Ideally bugs will be filed and the
16# cruft removed, but for now we whitelist some known set. 18# cruft removed, but for now we ignore some known set.
17# 19#
18# Also for performance reasons we're only checking the cruft when using ipk. 20# Also for performance reasons we're only checking the cruft when using ipk.
19# When using deb, and rpm it is a bit different and we could test all 21# When using deb, and rpm it is a bit different and we could test all
@@ -22,7 +24,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
22# 24#
23class ContainerImageTests(OESelftestTestCase): 25class ContainerImageTests(OESelftestTestCase):
24 26
25 # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that 27 # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
26 # the conversion type bar gets added as a dep as well 28 # the conversion type bar gets added as a dep as well
27 def test_expected_files(self): 29 def test_expected_files(self):
28 30
@@ -40,10 +42,9 @@ class ContainerImageTests(OESelftestTestCase):
40 self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy" 42 self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy"
41IMAGE_FSTYPES = "container" 43IMAGE_FSTYPES = "container"
42PACKAGE_CLASSES = "package_ipk" 44PACKAGE_CLASSES = "package_ipk"
43IMAGE_FEATURES = ""
44IMAGE_BUILDINFO_FILE = "" 45IMAGE_BUILDINFO_FILE = ""
45INIT_MANAGER = "sysvinit" 46INIT_MANAGER = "sysvinit"
46IMAGE_INSTALL_remove = "ssh-pregen-hostkeys" 47IMAGE_INSTALL:remove = "ssh-pregen-hostkeys"
47 48
48""") 49""")
49 50
@@ -53,8 +54,6 @@ IMAGE_INSTALL_remove = "ssh-pregen-hostkeys"
53 expected_files = [ 54 expected_files = [
54 './', 55 './',
55 '.{bindir}/theapp', 56 '.{bindir}/theapp',
56 '.{sysconfdir}/default/',
57 '.{sysconfdir}/default/postinst',
58 '.{sysconfdir}/ld.so.cache', 57 '.{sysconfdir}/ld.so.cache',
59 '.{sysconfdir}/timestamp', 58 '.{sysconfdir}/timestamp',
60 '.{sysconfdir}/version', 59 '.{sysconfdir}/version',
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index 3f343a2841..511e4b81b4 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -1,9 +1,19 @@
1from oe.cve_check import Version 1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
2from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_vars
3 11
4class CVECheck(OESelftestTestCase): 12class CVECheck(OESelftestTestCase):
5 13
6 def test_version_compare(self): 14 def test_version_compare(self):
15 from oe.cve_check import Version
16
7 result = Version("100") > Version("99") 17 result = Version("100") > Version("99")
8 self.assertTrue( result, msg="Failed to compare version '100' > '99'") 18 self.assertTrue( result, msg="Failed to compare version '100' > '99'")
9 result = Version("2.3.1") > Version("2.2.3") 19 result = Version("2.3.1") > Version("2.2.3")
@@ -34,3 +44,453 @@ class CVECheck(OESelftestTestCase):
34 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'") 44 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'")
35 result = Version("1.0b","alphabetical") > Version("1.0","alphabetical") 45 result = Version("1.0b","alphabetical") > Version("1.0","alphabetical")
36 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'") 46 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'")
47
48 # consider the trailing "p" and "patch" as patched released when comparing
49 result = Version("1.0","patch") < Version("1.0p1","patch")
50 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0' < '1.0p1'")
51 result = Version("1.0p2","patch") > Version("1.0p1","patch")
52 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
53 result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
54 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
55
56
57 def test_convert_cve_version(self):
58 from oe.cve_check import convert_cve_version
59
60 # Default format
61 self.assertEqual(convert_cve_version("8.3"), "8.3")
62 self.assertEqual(convert_cve_version(""), "")
63
64 # OpenSSL format version
65 self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
66
67 # OpenSSH format
68 self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
69 self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
70
71 # Linux kernel format
72 self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
73 self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
74
75 def test_product_match(self):
76 from oe.cve_check import has_cve_product_match
77
78 status = {}
79 status["detail"] = "ignored"
80 status["vendor"] = "*"
81 status["product"] = "*"
82 status["description"] = ""
83 status["mapping"] = ""
84
85 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), True)
86 self.assertEqual(has_cve_product_match(status, "*:*"), True)
87 self.assertEqual(has_cve_product_match(status, "some_product"), True)
88 self.assertEqual(has_cve_product_match(status, "glibc"), True)
89 self.assertEqual(has_cve_product_match(status, "glibca"), True)
90 self.assertEqual(has_cve_product_match(status, "aglibc"), True)
91 self.assertEqual(has_cve_product_match(status, "*"), True)
92 self.assertEqual(has_cve_product_match(status, "aglibc glibc test:test"), True)
93
94 status["product"] = "glibc"
95 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
96 # The CPE in the recipe must be defined, no * accepted
97 self.assertEqual(has_cve_product_match(status, "*:*"), False)
98 self.assertEqual(has_cve_product_match(status, "*"), False)
99 self.assertEqual(has_cve_product_match(status, "some_product"), False)
100 self.assertEqual(has_cve_product_match(status, "glibc"), True)
101 self.assertEqual(has_cve_product_match(status, "glibca"), False)
102 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
103 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), True)
104 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc test"), True)
105 self.assertEqual(has_cve_product_match(status, "test some_vendor:glibc"), True)
106
107 status["vendor"] = "glibca"
108 status["product"] = "glibc"
109 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
110 # The CPE in the recipe must be defined, no * accepted
111 self.assertEqual(has_cve_product_match(status, "*:*"), False)
112 self.assertEqual(has_cve_product_match(status, "*"), False)
113 self.assertEqual(has_cve_product_match(status, "some_product"), False)
114 self.assertEqual(has_cve_product_match(status, "glibc"), False)
115 self.assertEqual(has_cve_product_match(status, "glibca"), False)
116 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
117 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), False)
118 self.assertEqual(has_cve_product_match(status, "glibca:glibc"), True)
119 self.assertEqual(has_cve_product_match(status, "test:test glibca:glibc"), True)
120 self.assertEqual(has_cve_product_match(status, "test glibca:glibc"), True)
121 self.assertEqual(has_cve_product_match(status, "glibca:glibc test"), True)
122
123 def test_parse_cve_from_patch_filename(self):
124 from oe.cve_check import parse_cve_from_filename
125
126 # Patch filename without CVE ID
127 self.assertEqual(parse_cve_from_filename("0001-test.patch"), "")
128
129 # Patch with single CVE ID
130 self.assertEqual(
131 parse_cve_from_filename("CVE-2022-12345.patch"), "CVE-2022-12345"
132 )
133
134 # Patch with multiple CVE IDs
135 self.assertEqual(
136 parse_cve_from_filename("CVE-2022-41741-CVE-2022-41742.patch"),
137 "CVE-2022-41742",
138 )
139
140 # Patches with CVE ID and appended text
141 self.assertEqual(
142 parse_cve_from_filename("CVE-2023-3019-0001.patch"), "CVE-2023-3019"
143 )
144 self.assertEqual(
145 parse_cve_from_filename("CVE-2024-21886-1.patch"), "CVE-2024-21886"
146 )
147
148 # Patch with CVE ID and prepended text
149 self.assertEqual(
150 parse_cve_from_filename("grep-CVE-2012-5667.patch"), "CVE-2012-5667"
151 )
152 self.assertEqual(
153 parse_cve_from_filename("0001-CVE-2012-5667.patch"), "CVE-2012-5667"
154 )
155
156 # Patch with CVE ID and both prepended and appended text
157 self.assertEqual(
158 parse_cve_from_filename(
159 "0001-tpm2_import-fix-fixed-AES-key-CVE-2021-3565-0001.patch"
160 ),
161 "CVE-2021-3565",
162 )
163
164 # Only grab the last CVE ID in the filename
165 self.assertEqual(
166 parse_cve_from_filename("CVE-2012-5667-CVE-2012-5668.patch"),
167 "CVE-2012-5668",
168 )
169
170 # Test invalid CVE ID with incorrect length (must be at least 4 digits)
171 self.assertEqual(
172 parse_cve_from_filename("CVE-2024-001.patch"),
173 "",
174 )
175
176 # Test valid CVE ID with very long length
177 self.assertEqual(
178 parse_cve_from_filename("CVE-2024-0000000000000000000000001.patch"),
179 "CVE-2024-0000000000000000000000001",
180 )
181
182 def test_parse_cve_from_patch_contents(self):
183 import textwrap
184 from oe.cve_check import parse_cves_from_patch_contents
185
186 # Standard patch file excerpt without any patches
187 self.assertEqual(
188 parse_cves_from_patch_contents(
189 textwrap.dedent("""\
190 remove "*" for root since we don't have a /etc/shadow so far.
191
192 Upstream-Status: Inappropriate [configuration]
193
194 Signed-off-by: Scott Garman <scott.a.garman@intel.com>
195
196 --- base-passwd/passwd.master~nobash
197 +++ base-passwd/passwd.master
198 @@ -1,4 +1,4 @@
199 -root:*:0:0:root:/root:/bin/sh
200 +root::0:0:root:/root:/bin/sh
201 daemon:*:1:1:daemon:/usr/sbin:/bin/sh
202 bin:*:2:2:bin:/bin:/bin/sh
203 sys:*:3:3:sys:/dev:/bin/sh
204 """)
205 ),
206 set(),
207 )
208
209 # Patch file with multiple CVE IDs (space-separated)
210 self.assertEqual(
211 parse_cves_from_patch_contents(
212 textwrap.dedent("""\
213 There is an assertion in function _cairo_arc_in_direction().
214
215 CVE: CVE-2019-6461 CVE-2019-6462
216 Upstream-Status: Pending
217 Signed-off-by: Ross Burton <ross.burton@intel.com>
218
219 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
220 index 390397bae..1bde774a4 100644
221 --- a/src/cairo-arc.c
222 +++ b/src/cairo-arc.c
223 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
224 if (cairo_status (cr))
225 return;
226
227 - assert (angle_max >= angle_min);
228 + if (angle_max < angle_min)
229 + return;
230
231 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
232 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
233 """),
234 ),
235 {"CVE-2019-6461", "CVE-2019-6462"},
236 )
237
238 # Patch file with multiple CVE IDs (comma-separated w/ both space and no space)
239 self.assertEqual(
240 parse_cves_from_patch_contents(
241 textwrap.dedent("""\
242 There is an assertion in function _cairo_arc_in_direction().
243
244 CVE: CVE-2019-6461,CVE-2019-6462, CVE-2019-6463
245 Upstream-Status: Pending
246 Signed-off-by: Ross Burton <ross.burton@intel.com>
247
248 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
249 index 390397bae..1bde774a4 100644
250 --- a/src/cairo-arc.c
251 +++ b/src/cairo-arc.c
252 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
253 if (cairo_status (cr))
254 return;
255
256 - assert (angle_max >= angle_min);
257 + if (angle_max < angle_min)
258 + return;
259
260 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
261 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
262
263 """),
264 ),
265 {"CVE-2019-6461", "CVE-2019-6462", "CVE-2019-6463"},
266 )
267
268 # Patch file with multiple CVE IDs (&-separated)
269 self.assertEqual(
270 parse_cves_from_patch_contents(
271 textwrap.dedent("""\
272 There is an assertion in function _cairo_arc_in_direction().
273
274 CVE: CVE-2019-6461 & CVE-2019-6462
275 Upstream-Status: Pending
276 Signed-off-by: Ross Burton <ross.burton@intel.com>
277
278 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
279 index 390397bae..1bde774a4 100644
280 --- a/src/cairo-arc.c
281 +++ b/src/cairo-arc.c
282 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
283 if (cairo_status (cr))
284 return;
285
286 - assert (angle_max >= angle_min);
287 + if (angle_max < angle_min)
288 + return;
289
290 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
291 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
292 """),
293 ),
294 {"CVE-2019-6461", "CVE-2019-6462"},
295 )
296
297 # Patch file with multiple lines with CVE IDs
298 self.assertEqual(
299 parse_cves_from_patch_contents(
300 textwrap.dedent("""\
301 There is an assertion in function _cairo_arc_in_direction().
302
303 CVE: CVE-2019-6461 & CVE-2019-6462
304
305 CVE: CVE-2019-6463 & CVE-2019-6464
306 Upstream-Status: Pending
307 Signed-off-by: Ross Burton <ross.burton@intel.com>
308
309 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
310 index 390397bae..1bde774a4 100644
311 --- a/src/cairo-arc.c
312 +++ b/src/cairo-arc.c
313 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
314 if (cairo_status (cr))
315 return;
316
317 - assert (angle_max >= angle_min);
318 + if (angle_max < angle_min)
319 + return;
320
321 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
322 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
323
324 """),
325 ),
326 {"CVE-2019-6461", "CVE-2019-6462", "CVE-2019-6463", "CVE-2019-6464"},
327 )
328
329 def test_recipe_report_json(self):
330 config = """
331INHERIT += "cve-check"
332CVE_CHECK_FORMAT_JSON = "1"
333"""
334 self.write_config(config)
335
336 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
337 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
338 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
339
340 try:
341 os.remove(summary_json)
342 os.remove(recipe_json)
343 except FileNotFoundError:
344 pass
345
346 bitbake("m4-native -c cve_check")
347
348 def check_m4_json(filename):
349 with open(filename) as f:
350 report = json.load(f)
351 self.assertEqual(report["version"], "1")
352 self.assertEqual(len(report["package"]), 1)
353 package = report["package"][0]
354 self.assertEqual(package["name"], "m4-native")
355 found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
356 self.assertIn("CVE-2008-1687", found_cves)
357 self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
358
359 self.assertExists(summary_json)
360 check_m4_json(summary_json)
361 self.assertExists(recipe_json)
362 check_m4_json(recipe_json)
363
364
365 def test_image_json(self):
366 config = """
367INHERIT += "cve-check"
368CVE_CHECK_FORMAT_JSON = "1"
369"""
370 self.write_config(config)
371
372 vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
373 report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
374 print(report_json)
375 try:
376 os.remove(report_json)
377 except FileNotFoundError:
378 pass
379
380 bitbake("core-image-minimal-initramfs")
381 self.assertExists(report_json)
382
383 # Check that the summary report lists at least one package
384 with open(report_json) as f:
385 report = json.load(f)
386 self.assertEqual(report["version"], "1")
387 self.assertGreater(len(report["package"]), 1)
388
389 # Check that a random recipe wrote a recipe report to deploy/cve/
390 recipename = report["package"][0]["name"]
391 recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
392 self.assertExists(recipe_report)
393 with open(recipe_report) as f:
394 report = json.load(f)
395 self.assertEqual(report["version"], "1")
396 self.assertEqual(len(report["package"]), 1)
397 self.assertEqual(report["package"][0]["name"], recipename)
398
399
400 def test_recipe_report_json_unpatched(self):
401 config = """
402INHERIT += "cve-check"
403CVE_CHECK_FORMAT_JSON = "1"
404CVE_CHECK_REPORT_PATCHED = "0"
405"""
406 self.write_config(config)
407
408 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
409 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
410 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
411
412 try:
413 os.remove(summary_json)
414 os.remove(recipe_json)
415 except FileNotFoundError:
416 pass
417
418 bitbake("m4-native -c cve_check")
419
420 def check_m4_json(filename):
421 with open(filename) as f:
422 report = json.load(f)
423 self.assertEqual(report["version"], "1")
424 self.assertEqual(len(report["package"]), 1)
425 package = report["package"][0]
426 self.assertEqual(package["name"], "m4-native")
427 #m4 had only Patched CVEs, so the issues array will be empty
428 self.assertEqual(package["issue"], [])
429
430 self.assertExists(summary_json)
431 check_m4_json(summary_json)
432 self.assertExists(recipe_json)
433 check_m4_json(recipe_json)
434
435
436 def test_recipe_report_json_ignored(self):
437 config = """
438INHERIT += "cve-check"
439CVE_CHECK_FORMAT_JSON = "1"
440CVE_CHECK_REPORT_PATCHED = "1"
441"""
442 self.write_config(config)
443
444 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
445 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
446 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
447
448 try:
449 os.remove(summary_json)
450 os.remove(recipe_json)
451 except FileNotFoundError:
452 pass
453
454 bitbake("logrotate -c cve_check")
455
456 def check_m4_json(filename):
457 with open(filename) as f:
458 report = json.load(f)
459 self.assertEqual(report["version"], "1")
460 self.assertEqual(len(report["package"]), 1)
461 package = report["package"][0]
462 self.assertEqual(package["name"], "logrotate")
463 found_cves = {}
464 for issue in package["issue"]:
465 found_cves[issue["id"]] = {
466 "status" : issue["status"],
467 "detail" : issue["detail"] if "detail" in issue else "",
468 "description" : issue["description"] if "description" in issue else ""
469 }
470 # m4 CVE should not be in logrotate
471 self.assertNotIn("CVE-2008-1687", found_cves)
472 # logrotate has both Patched and Ignored CVEs
473 detail = "version-not-in-range"
474 self.assertIn("CVE-2011-1098", found_cves)
475 self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
476 self.assertEqual(found_cves["CVE-2011-1098"]["detail"], detail)
477 self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
478 detail = "not-applicable-platform"
479 description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
480 self.assertIn("CVE-2011-1548", found_cves)
481 self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored")
482 self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail)
483 self.assertEqual(found_cves["CVE-2011-1548"]["description"], description)
484 self.assertIn("CVE-2011-1549", found_cves)
485 self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored")
486 self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail)
487 self.assertEqual(found_cves["CVE-2011-1549"]["description"], description)
488 self.assertIn("CVE-2011-1550", found_cves)
489 self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored")
490 self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail)
491 self.assertEqual(found_cves["CVE-2011-1550"]["description"], description)
492
493 self.assertExists(summary_json)
494 check_m4_json(summary_json)
495 self.assertExists(recipe_json)
496 check_m4_json(recipe_json)
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
new file mode 100644
index 0000000000..46c0cd87bb
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -0,0 +1,160 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import socketserver
8import subprocess
9import time
10import urllib
11import pathlib
12
13from oeqa.core.decorator import OETestTag
14from oeqa.selftest.case import OESelftestTestCase
15from oeqa.utils.commands import bitbake, get_bb_var, runqemu
16
17
18class Debuginfod(OESelftestTestCase):
19
20 def wait_for_debuginfod(self, port):
21 """
22 debuginfod takes time to scan the packages and requesting too early may
23 result in a test failure if the right packages haven't been scanned yet.
24
25 Request the metrics endpoint periodically and wait for there to be no
26 busy scanning threads.
27
28 Returns if debuginfod is ready, raises an exception if not within the
29 timeout.
30 """
31
32 # Wait two minutes
33 countdown = 24
34 delay = 5
35 latest = None
36
37 while countdown:
38 self.logger.info("waiting...")
39 time.sleep(delay)
40
41 self.logger.info("polling server")
42 if self.debuginfod.poll():
43 self.logger.info("server dead")
44 self.debuginfod.communicate()
45 self.fail("debuginfod terminated unexpectedly")
46 self.logger.info("server alive")
47
48 try:
49 with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f:
50 for line in f.read().decode("ascii").splitlines():
51 key, value = line.rsplit(" ", 1)
52 if key == "thread_busy{role=\"scan\"}":
53 latest = int(value)
54 self.logger.info("Waiting for %d scan jobs to finish" % latest)
55 if latest == 0:
56 return
57 except urllib.error.URLError as e:
58 # TODO: how to catch just timeouts?
59 self.logger.error(e)
60
61 countdown -= 1
62
63 raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
64
65 def start_debuginfod(self, feed_dir):
66 # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
67
68 # Save some useful paths for later
69 native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native"))
70 native_bindir = native_sysroot / "usr" / "bin"
71 self.debuginfod = native_bindir / "debuginfod"
72 self.debuginfod_find = native_bindir / "debuginfod-find"
73
74 cmd = [
75 self.debuginfod,
76 "--verbose",
77 # In-memory database, this is a one-shot test
78 "--database=:memory:",
79 # Don't use all the host cores
80 "--concurrency=8",
81 "--connection-pool=8",
82 # Disable rescanning, this is a one-shot test
83 "--rescan-time=0",
84 "--groom-time=0",
85 feed_dir,
86 ]
87
88 format = get_bb_var("PACKAGE_CLASSES").split()[0]
89 if format == "package_deb":
90 cmd.append("--scan-deb-dir")
91 elif format == "package_ipk":
92 cmd.append("--scan-deb-dir")
93 elif format == "package_rpm":
94 cmd.append("--scan-rpm-dir")
95 else:
96 self.fail("Unknown package class %s" % format)
97
98 # Find a free port. Racey but the window is small.
99 with socketserver.TCPServer(("localhost", 0), None) as s:
100 self.port = s.server_address[1]
101 cmd.append("--port=%d" % self.port)
102
103 self.logger.info(f"Starting server {cmd}")
104 self.debuginfod = subprocess.Popen(cmd, env={})
105 self.wait_for_debuginfod(self.port)
106
107
108 def test_debuginfod_native(self):
109 """
110 Test debuginfod outside of qemu, by building a package and looking up a
111 binary's debuginfo using elfutils-native.
112 """
113
114 self.write_config("""
115TMPDIR = "${TOPDIR}/tmp-debuginfod"
116DISTRO_FEATURES:append = " debuginfod"
117INHERIT += "localpkgfeed"
118""")
119 bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package xz:do_localpkgfeed")
120
121 try:
122 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
123
124 env = os.environ.copy()
125 env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
126
127 pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz"))
128 cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz")
129 self.logger.info(f"Starting client {cmd}")
130 output = subprocess.check_output(cmd, env=env, text=True)
131 # This should be more comprehensive
132 self.assertIn("/.cache/debuginfod_client/", output)
133 finally:
134 self.debuginfod.kill()
135
136 @OETestTag("runqemu")
137 def test_debuginfod_qemu(self):
138 """
139 Test debuginfod-find inside a qemu, talking to a debuginfod on the host.
140 """
141
142 self.write_config("""
143TMPDIR = "${TOPDIR}/tmp-debuginfod"
144DISTRO_FEATURES:append = " debuginfod"
145INHERIT += "localpkgfeed"
146CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
147 """)
148 bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot xz:do_localpkgfeed")
149
150 try:
151 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
152
153 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
154 cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
155 self.logger.info(f"Starting client {cmd}")
156 status, output = qemu.run_serial(cmd)
157 # This should be more comprehensive
158 self.assertIn("/.cache/debuginfod_client/", output)
159 finally:
160 self.debuginfod.kill()
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 3385546e8e..05f228f03e 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -1,18 +1,23 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import errno
5import os 8import os
6import re 9import re
7import shutil 10import shutil
8import tempfile 11import tempfile
9import glob 12import glob
10import fnmatch 13import fnmatch
14import unittest
15import json
11 16
12import oeqa.utils.ftools as ftools
13from oeqa.selftest.case import OESelftestTestCase 17from oeqa.selftest.case import OESelftestTestCase
14from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer 18from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
15from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer 19from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer
20from oeqa.core.decorator import OETestTag
16 21
17oldmetapath = None 22oldmetapath = None
18 23
@@ -24,6 +29,9 @@ def setUpModule():
24 corecopydir = os.path.join(templayerdir, 'core-copy') 29 corecopydir = os.path.join(templayerdir, 'core-copy')
25 bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf') 30 bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
26 edited_layers = [] 31 edited_layers = []
32 # make sure user doesn't have a local workspace
33 result = runCmd('bitbake-layers show-layers')
34 assert "workspacelayer" not in result.output, "Devtool test suite cannot be run with a local workspace directory"
27 35
28 # We need to take a copy of the meta layer so we can modify it and not 36 # We need to take a copy of the meta layer so we can modify it and not
29 # have any races against other tests that might be running in parallel 37 # have any races against other tests that might be running in parallel
@@ -38,10 +46,17 @@ def setUpModule():
38 canonical_layerpath = os.path.realpath(canonical_layerpath) + '/' 46 canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
39 edited_layers.append(layerpath) 47 edited_layers.append(layerpath)
40 oldmetapath = os.path.realpath(layerpath) 48 oldmetapath = os.path.realpath(layerpath)
49
50 # when downloading poky from tar.gz some tests will be skipped (BUG 12389)
51 try:
52 runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath)
53 except:
54 raise unittest.SkipTest("devtool tests require folder to be a git repo")
55
41 result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath) 56 result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
42 oldreporoot = result.output.rstrip() 57 oldreporoot = result.output.rstrip()
43 newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot)) 58 newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
44 runCmd('git clone %s %s' % (oldreporoot, corecopydir), cwd=templayerdir) 59 runCmd('git clone file://%s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
45 # Now we need to copy any modified files 60 # Now we need to copy any modified files
46 # You might ask "why not just copy the entire tree instead of 61 # You might ask "why not just copy the entire tree instead of
47 # cloning and doing this?" - well, the problem with that is 62 # cloning and doing this?" - well, the problem with that is
@@ -49,11 +64,15 @@ def setUpModule():
49 # under COREBASE and we don't want to copy that, so we have 64 # under COREBASE and we don't want to copy that, so we have
50 # to be selective. 65 # to be selective.
51 result = runCmd('git status --porcelain', cwd=oldreporoot) 66 result = runCmd('git status --porcelain', cwd=oldreporoot)
67
68 # Also copy modifications to the 'scripts/' directory
69 canonical_layerpath_scripts = os.path.normpath(canonical_layerpath + "../scripts")
70
52 for line in result.output.splitlines(): 71 for line in result.output.splitlines():
53 if line.startswith(' M ') or line.startswith('?? '): 72 if line.startswith(' M ') or line.startswith('?? '):
54 relpth = line.split()[1] 73 relpth = line.split()[1]
55 pth = os.path.join(oldreporoot, relpth) 74 pth = os.path.join(oldreporoot, relpth)
56 if pth.startswith(canonical_layerpath): 75 if pth.startswith(canonical_layerpath) or pth.startswith(canonical_layerpath_scripts):
57 if relpth.endswith('/'): 76 if relpth.endswith('/'):
58 destdir = os.path.join(corecopydir, relpth) 77 destdir = os.path.join(corecopydir, relpth)
59 # avoid race condition by not copying .pyc files YPBZ#13421,13803 78 # avoid race condition by not copying .pyc files YPBZ#13421,13803
@@ -80,32 +99,15 @@ def tearDownModule():
80 bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb) 99 bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb)
81 shutil.rmtree(templayerdir) 100 shutil.rmtree(templayerdir)
82 101
83class DevtoolBase(OESelftestTestCase): 102class DevtoolTestCase(OESelftestTestCase):
84
85 @classmethod
86 def setUpClass(cls):
87 super(DevtoolBase, cls).setUpClass()
88 bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
89 cls.original_sstate = bb_vars['SSTATE_DIR']
90 cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
91 cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
92 cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
93 % cls.original_sstate)
94
95 @classmethod
96 def tearDownClass(cls):
97 cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
98 runCmd('rm -rf %s' % cls.devtool_sstate)
99 super(DevtoolBase, cls).tearDownClass()
100 103
101 def setUp(self): 104 def setUp(self):
102 """Test case setup function""" 105 """Test case setup function"""
103 super(DevtoolBase, self).setUp() 106 super(DevtoolTestCase, self).setUp()
104 self.workspacedir = os.path.join(self.builddir, 'workspace') 107 self.workspacedir = os.path.join(self.builddir, 'workspace')
105 self.assertTrue(not os.path.exists(self.workspacedir), 108 self.assertTrue(not os.path.exists(self.workspacedir),
106 'This test cannot be run with a workspace directory ' 109 'This test cannot be run with a workspace directory '
107 'under the build directory') 110 'under the build directory')
108 self.append_config(self.sstate_conf)
109 111
110 def _check_src_repo(self, repo_dir): 112 def _check_src_repo(self, repo_dir):
111 """Check srctree git repository""" 113 """Check srctree git repository"""
@@ -152,7 +154,7 @@ class DevtoolBase(OESelftestTestCase):
152 value = invalue 154 value = invalue
153 invar = None 155 invar = None
154 elif '=' in line: 156 elif '=' in line:
155 splitline = line.split('=', 1) 157 splitline = re.split(r"[?+:]*=[+]?", line, 1)
156 var = splitline[0].rstrip() 158 var = splitline[0].rstrip()
157 value = splitline[1].strip().strip('"') 159 value = splitline[1].strip().strip('"')
158 if value.endswith('\\'): 160 if value.endswith('\\'):
@@ -235,6 +237,103 @@ class DevtoolBase(OESelftestTestCase):
235 filelist.append(' '.join(splitline)) 237 filelist.append(' '.join(splitline))
236 return filelist 238 return filelist
237 239
240 def _check_diff(self, diffoutput, addlines, removelines):
241 """Check output from 'git diff' matches expectation"""
242 remaining_addlines = addlines[:]
243 remaining_removelines = removelines[:]
244 for line in diffoutput.splitlines():
245 if line.startswith('+++') or line.startswith('---'):
246 continue
247 elif line.startswith('+'):
248 matched = False
249 for item in addlines:
250 if re.match(item, line[1:].strip()):
251 matched = True
252 remaining_addlines.remove(item)
253 break
254 self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
255 elif line.startswith('-'):
256 matched = False
257 for item in removelines:
258 if re.match(item, line[1:].strip()):
259 matched = True
260 remaining_removelines.remove(item)
261 break
262 self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
263 if remaining_addlines:
264 self.fail('Expected added lines not found: %s' % remaining_addlines)
265 if remaining_removelines:
266 self.fail('Expected removed lines not found: %s' % remaining_removelines)
267
268 def _check_runqemu_prerequisites(self):
269 """Check runqemu is available
270
271 Whilst some tests would seemingly be better placed as a runtime test,
272 unfortunately the runtime tests run under bitbake and you can't run
273 devtool within bitbake (since devtool needs to run bitbake itself).
274 Additionally we are testing build-time functionality as well, so
275 really this has to be done as an oe-selftest test.
276 """
277 machine = get_bb_var('MACHINE')
278 if not machine.startswith('qemu'):
279 self.skipTest('This test only works with qemu machines')
280 if not os.path.exists('/etc/runqemu-nosudo'):
281 self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
282 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
283 if result.status != 0:
284 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
285 if result.status != 0:
286 self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
287 for line in result.output.splitlines():
288 if line.startswith('tap'):
289 break
290 else:
291 self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
292
293 def _test_devtool_add_git_url(self, git_url, version, pn, resulting_src_uri, srcrev=None):
294 self.track_for_cleanup(self.workspacedir)
295 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
296 command = 'devtool add --version %s %s %s' % (version, pn, git_url)
297 if srcrev :
298 command += ' --srcrev %s' %srcrev
299 result = runCmd(command)
300 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
301 # Check the recipe name is correct
302 recipefile = get_bb_var('FILE', pn)
303 self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named')
304 self.assertIn(recipefile, result.output)
305 # Test devtool status
306 result = runCmd('devtool status')
307 self.assertIn(pn, result.output)
308 self.assertIn(recipefile, result.output)
309 checkvars = {}
310 checkvars['SRC_URI'] = resulting_src_uri
311 self._test_recipe_contents(recipefile, checkvars, [])
312
313class DevtoolBase(DevtoolTestCase):
314
315 @classmethod
316 def setUpClass(cls):
317 super(DevtoolBase, cls).setUpClass()
318 bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
319 cls.original_sstate = bb_vars['SSTATE_DIR']
320 cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
321 cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
322 cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
323 % cls.original_sstate)
324 cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yoctoproject.org:8686"\n')
325
326 @classmethod
327 def tearDownClass(cls):
328 cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
329 runCmd('rm -rf %s' % cls.devtool_sstate)
330 super(DevtoolBase, cls).tearDownClass()
331
332 def setUp(self):
333 """Test case setup function"""
334 super(DevtoolBase, self).setUp()
335 self.append_config(self.sstate_conf)
336
238 337
239class DevtoolTests(DevtoolBase): 338class DevtoolTests(DevtoolBase):
240 339
@@ -304,6 +403,38 @@ class DevtoolAddTests(DevtoolBase):
304 bindir = bindir[1:] 403 bindir = bindir[1:]
305 self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D') 404 self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
306 405
406 def test_devtool_add_binary(self):
407 # Create a binary package containing a known test file
408 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
409 self.track_for_cleanup(tempdir)
410 pn = 'tst-bin'
411 pv = '1.0'
412 test_file_dir = "var/lib/%s/" % pn
413 test_file_name = "test_file"
414 test_file_content = "TEST CONTENT"
415 test_file_package_root = os.path.join(tempdir, pn)
416 test_file_dir_full = os.path.join(test_file_package_root, test_file_dir)
417 bb.utils.mkdirhier(test_file_dir_full)
418 with open(os.path.join(test_file_dir_full, test_file_name), "w") as f:
419 f.write(test_file_content)
420 bin_package_path = os.path.join(tempdir, "%s.tar.gz" % pn)
421 runCmd("tar czf %s -C %s ." % (bin_package_path, test_file_package_root))
422
423 # Test devtool add -b on the binary package
424 self.track_for_cleanup(self.workspacedir)
425 self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
426 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
427 result = runCmd('devtool add -b %s %s' % (pn, bin_package_path))
428 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
429
430 # Build the resulting recipe
431 result = runCmd('devtool build %s' % pn)
432 installdir = get_bb_var('D', pn)
433 self.assertTrue(installdir, 'Could not query installdir variable')
434
435 # Check that a known file from the binary package has indeed been installed
436 self.assertTrue(os.path.isfile(os.path.join(installdir, test_file_dir, test_file_name)), '%s not found in D' % test_file_name)
437
307 def test_devtool_add_git_local(self): 438 def test_devtool_add_git_local(self):
308 # We need dbus built so that DEPENDS recognition works 439 # We need dbus built so that DEPENDS recognition works
309 bitbake('dbus') 440 bitbake('dbus')
@@ -336,15 +467,32 @@ class DevtoolAddTests(DevtoolBase):
336 self.assertIn(srcdir, result.output) 467 self.assertIn(srcdir, result.output)
337 self.assertIn(recipefile, result.output) 468 self.assertIn(recipefile, result.output)
338 checkvars = {} 469 checkvars = {}
339 checkvars['LICENSE'] = 'GPLv2' 470 checkvars['LICENSE'] = 'GPL-2.0-only'
340 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' 471 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
341 checkvars['S'] = '${WORKDIR}/git' 472 checkvars['S'] = None
342 checkvars['PV'] = '0.1+git${SRCPV}' 473 checkvars['PV'] = '0.1+git'
343 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https' 474 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master'
344 checkvars['SRCREV'] = srcrev 475 checkvars['SRCREV'] = srcrev
345 checkvars['DEPENDS'] = set(['dbus']) 476 checkvars['DEPENDS'] = set(['dbus'])
346 self._test_recipe_contents(recipefile, checkvars, []) 477 self._test_recipe_contents(recipefile, checkvars, [])
347 478
479 def test_devtool_add_git_style1(self):
480 version = 'v3.1.0'
481 pn = 'mbedtls'
482 # this will trigger reformat_git_uri with branch parameter in url
483 git_url = "'git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https'"
484 resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https"
485 self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
486
487 def test_devtool_add_git_style2(self):
488 version = 'v3.1.0'
489 srcrev = 'v3.1.0'
490 pn = 'mbedtls'
491 # this will trigger reformat_git_uri with branch parameter in url
492 git_url = "'git://git@github.com/ARMmbed/mbedtls.git;protocol=https'"
493 resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;protocol=https;branch=master"
494 self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri, srcrev)
495
348 def test_devtool_add_library(self): 496 def test_devtool_add_library(self):
349 # Fetch source 497 # Fetch source
350 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 498 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -373,7 +521,7 @@ class DevtoolAddTests(DevtoolBase):
373 recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version) 521 recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version)
374 result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile) 522 result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile)
375 with open(recipefile, 'a') as f: 523 with open(recipefile, 'a') as f:
376 f.write('\nFILES_${PN}-dev += "${datadir}/cmake/Modules"\n') 524 f.write('\nFILES:${PN}-dev += "${datadir}/cmake/Modules"\n')
377 # We don't have the ability to pick up this dependency automatically yet... 525 # We don't have the ability to pick up this dependency automatically yet...
378 f.write('\nDEPENDS += "libusb1"\n') 526 f.write('\nDEPENDS += "libusb1"\n')
379 f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n') 527 f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n')
@@ -405,7 +553,7 @@ class DevtoolAddTests(DevtoolBase):
405 self.track_for_cleanup(self.workspacedir) 553 self.track_for_cleanup(self.workspacedir)
406 self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe) 554 self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
407 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 555 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
408 result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url)) 556 result = runCmd('devtool add --no-pypi %s %s -f %s' % (testrecipe, srcdir, url))
409 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output) 557 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
410 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') 558 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
411 self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created') 559 self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
@@ -417,14 +565,14 @@ class DevtoolAddTests(DevtoolBase):
417 recipefile = get_bb_var('FILE', testrecipe) 565 recipefile = get_bb_var('FILE', testrecipe)
418 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') 566 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
419 checkvars = {} 567 checkvars = {}
420 checkvars['S'] = '${WORKDIR}/MarkupSafe-${PV}' 568 checkvars['S'] = '${UNPACKDIR}/MarkupSafe-${PV}'
421 checkvars['SRC_URI'] = url.replace(testver, '${PV}') 569 checkvars['SRC_URI'] = url.replace(testver, '${PV}')
422 self._test_recipe_contents(recipefile, checkvars, []) 570 self._test_recipe_contents(recipefile, checkvars, [])
423 # Try with version specified 571 # Try with version specified
424 result = runCmd('devtool reset -n %s' % testrecipe) 572 result = runCmd('devtool reset -n %s' % testrecipe)
425 shutil.rmtree(srcdir) 573 shutil.rmtree(srcdir)
426 fakever = '1.9' 574 fakever = '1.9'
427 result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever)) 575 result = runCmd('devtool add --no-pypi %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
428 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') 576 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
429 # Test devtool status 577 # Test devtool status
430 result = runCmd('devtool status') 578 result = runCmd('devtool status')
@@ -434,7 +582,7 @@ class DevtoolAddTests(DevtoolBase):
434 recipefile = get_bb_var('FILE', testrecipe) 582 recipefile = get_bb_var('FILE', testrecipe)
435 self.assertIn('%s_%s.bb' % (testrecipe, fakever), recipefile, 'Recipe file incorrectly named') 583 self.assertIn('%s_%s.bb' % (testrecipe, fakever), recipefile, 'Recipe file incorrectly named')
436 checkvars = {} 584 checkvars = {}
437 checkvars['S'] = '${WORKDIR}/MarkupSafe-%s' % testver 585 checkvars['S'] = '${UNPACKDIR}/MarkupSafe-%s' % testver
438 checkvars['SRC_URI'] = url 586 checkvars['SRC_URI'] = url
439 self._test_recipe_contents(recipefile, checkvars, []) 587 self._test_recipe_contents(recipefile, checkvars, [])
440 588
@@ -442,6 +590,7 @@ class DevtoolAddTests(DevtoolBase):
442 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 590 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
443 self.track_for_cleanup(tempdir) 591 self.track_for_cleanup(tempdir)
444 url = 'gitsm://git.yoctoproject.org/mraa' 592 url = 'gitsm://git.yoctoproject.org/mraa'
593 url_branch = '%s;branch=master' % url
445 checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' 594 checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d'
446 testrecipe = 'mraa' 595 testrecipe = 'mraa'
447 srcdir = os.path.join(tempdir, testrecipe) 596 srcdir = os.path.join(tempdir, testrecipe)
@@ -460,9 +609,9 @@ class DevtoolAddTests(DevtoolBase):
460 recipefile = get_bb_var('FILE', testrecipe) 609 recipefile = get_bb_var('FILE', testrecipe)
461 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') 610 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
462 checkvars = {} 611 checkvars = {}
463 checkvars['S'] = '${WORKDIR}/git' 612 checkvars['S'] = None
464 checkvars['PV'] = '1.0+git${SRCPV}' 613 checkvars['PV'] = '1.0+git'
465 checkvars['SRC_URI'] = url 614 checkvars['SRC_URI'] = url_branch
466 checkvars['SRCREV'] = '${AUTOREV}' 615 checkvars['SRCREV'] = '${AUTOREV}'
467 self._test_recipe_contents(recipefile, checkvars, []) 616 self._test_recipe_contents(recipefile, checkvars, [])
468 # Try with revision and version specified 617 # Try with revision and version specified
@@ -479,9 +628,9 @@ class DevtoolAddTests(DevtoolBase):
479 recipefile = get_bb_var('FILE', testrecipe) 628 recipefile = get_bb_var('FILE', testrecipe)
480 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') 629 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
481 checkvars = {} 630 checkvars = {}
482 checkvars['S'] = '${WORKDIR}/git' 631 checkvars['S'] = None
483 checkvars['PV'] = '1.5+git${SRCPV}' 632 checkvars['PV'] = '1.5+git'
484 checkvars['SRC_URI'] = url 633 checkvars['SRC_URI'] = url_branch
485 checkvars['SRCREV'] = checkrev 634 checkvars['SRCREV'] = checkrev
486 self._test_recipe_contents(recipefile, checkvars, []) 635 self._test_recipe_contents(recipefile, checkvars, [])
487 636
@@ -504,7 +653,7 @@ class DevtoolAddTests(DevtoolBase):
504 result = runCmd('devtool status') 653 result = runCmd('devtool status')
505 self.assertIn(testrecipe, result.output) 654 self.assertIn(testrecipe, result.output)
506 self.assertIn(srcdir, result.output) 655 self.assertIn(srcdir, result.output)
507 # Check recipe 656 # Check recipedevtool add
508 recipefile = get_bb_var('FILE', testrecipe) 657 recipefile = get_bb_var('FILE', testrecipe)
509 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') 658 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
510 checkvars = {} 659 checkvars = {}
@@ -536,6 +685,19 @@ class DevtoolAddTests(DevtoolBase):
536 # Test devtool build 685 # Test devtool build
537 result = runCmd('devtool build %s' % pn) 686 result = runCmd('devtool build %s' % pn)
538 687
688 def test_devtool_add_python_egg_requires(self):
689 # Fetch source
690 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
691 self.track_for_cleanup(tempdir)
692 testver = '0.14.0'
693 url = 'https://files.pythonhosted.org/packages/e9/9e/25d59f5043cf763833b2581c8027fa92342c4cf8ee523b498ecdf460c16d/uvicorn-%s.tar.gz' % testver
694 testrecipe = 'python3-uvicorn'
695 srcdir = os.path.join(tempdir, testrecipe)
696 # Test devtool add
697 self.track_for_cleanup(self.workspacedir)
698 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
699 result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
700
539class DevtoolModifyTests(DevtoolBase): 701class DevtoolModifyTests(DevtoolBase):
540 702
541 def test_devtool_modify(self): 703 def test_devtool_modify(self):
@@ -595,6 +757,25 @@ class DevtoolModifyTests(DevtoolBase):
595 result = runCmd('devtool status') 757 result = runCmd('devtool status')
596 self.assertNotIn('mdadm', result.output) 758 self.assertNotIn('mdadm', result.output)
597 759
760 def test_devtool_modify_go(self):
761 import oe.path
762 from tempfile import TemporaryDirectory
763 with TemporaryDirectory(prefix='devtoolqa') as tempdir:
764 self.track_for_cleanup(self.workspacedir)
765 self.add_command_to_tearDown('bitbake -c clean go-helloworld')
766 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
767 result = runCmd('devtool modify go-helloworld -x %s' % tempdir)
768 self.assertExists(
769 oe.path.join(tempdir, 'src', 'golang.org', 'x', 'example', 'go.mod'),
770 'Extracted source could not be found'
771 )
772 self.assertExists(
773 oe.path.join(self.workspacedir, 'conf', 'layer.conf'),
774 'Workspace directory not created'
775 )
776 matches = glob.glob(oe.path.join(self.workspacedir, 'appends', 'go-helloworld_*.bbappend'))
777 self.assertTrue(matches, 'bbappend not created %s' % result.output)
778
598 def test_devtool_buildclean(self): 779 def test_devtool_buildclean(self):
599 def assertFile(path, *paths): 780 def assertFile(path, *paths):
600 f = os.path.join(path, *paths) 781 f = os.path.join(path, *paths)
@@ -649,7 +830,7 @@ class DevtoolModifyTests(DevtoolBase):
649 self.track_for_cleanup(self.workspacedir) 830 self.track_for_cleanup(self.workspacedir)
650 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 831 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
651 832
652 testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split() 833 testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk'.split()
653 # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose 834 # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose
654 result = runCmd('bitbake-layers show-recipes gcc-source*') 835 result = runCmd('bitbake-layers show-recipes gcc-source*')
655 for line in result.output.splitlines(): 836 for line in result.output.splitlines():
@@ -697,6 +878,7 @@ class DevtoolModifyTests(DevtoolBase):
697 878
698 self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) 879 self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
699 self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) 880 self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
881
700 def test_devtool_modify_localfiles_only(self): 882 def test_devtool_modify_localfiles_only(self):
701 # Check preconditions 883 # Check preconditions
702 testrecipe = 'base-files' 884 testrecipe = 'base-files'
@@ -720,13 +902,8 @@ class DevtoolModifyTests(DevtoolBase):
720 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) 902 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
721 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 903 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
722 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 904 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
723 srcfile = os.path.join(tempdir, 'oe-local-files/share/dot.bashrc') 905 srcfile = os.path.join(tempdir, 'share/dot.bashrc')
724 srclink = os.path.join(tempdir, 'share/dot.bashrc')
725 self.assertExists(srcfile, 'Extracted source could not be found') 906 self.assertExists(srcfile, 'Extracted source could not be found')
726 if os.path.islink(srclink) and os.path.exists(srclink) and os.path.samefile(srcfile, srclink):
727 correct_symlink = True
728 self.assertTrue(correct_symlink, 'Source symlink to oe-local-files is broken')
729
730 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe)) 907 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
731 self.assertTrue(matches, 'bbappend not created') 908 self.assertTrue(matches, 'bbappend not created')
732 # Test devtool status 909 # Test devtool status
@@ -763,6 +940,122 @@ class DevtoolModifyTests(DevtoolBase):
763 # Try building 940 # Try building
764 bitbake(testrecipe) 941 bitbake(testrecipe)
765 942
943 def test_devtool_modify_git_no_extract(self):
944 # Check preconditions
945 testrecipe = 'psplash'
946 src_uri = get_bb_var('SRC_URI', testrecipe)
947 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
948 # Clean up anything in the workdir/sysroot/sstate cache
949 bitbake('%s -c cleansstate' % testrecipe)
950 # Try modifying a recipe
951 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
952 self.track_for_cleanup(tempdir)
953 self.track_for_cleanup(self.workspacedir)
954 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
955 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
956 result = runCmd('git clone https://git.yoctoproject.org/psplash %s && devtool modify -n %s %s' % (tempdir, testrecipe, tempdir))
957 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
958 matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
959 self.assertTrue(matches, 'bbappend not created')
960 # Test devtool status
961 result = runCmd('devtool status')
962 self.assertIn(testrecipe, result.output)
963 self.assertIn(tempdir, result.output)
964
965 def test_devtool_modify_git_crates_subpath(self):
966 # This tests two things in devtool context:
967 # - that we support local git dependencies for cargo based recipe
968 # - that we support patches in SRC_URI when git url contains subpath parameter
969
970 # Check preconditions:
971 # recipe inherits cargo
972 # git:// uri with a subpath as the main package
973 # some crate:// in SRC_URI
974 # others git:// in SRC_URI
975 # cointains a patch
976 testrecipe = 'hello-rs'
977 bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'UNPACKDIR', 'CARGO_HOME'], testrecipe)
978 recipefile = bb_vars['FILE']
979 unpackdir = bb_vars['UNPACKDIR']
980 cargo_home = bb_vars['CARGO_HOME']
981 src_uri = bb_vars['SRC_URI'].split()
982 self.assertTrue(src_uri[0].startswith('git://'),
983 'This test expects the %s recipe to have a git repo has its main uri' % testrecipe)
984 self.assertIn(';subpath=', src_uri[0],
985 'This test expects the %s recipe to have a git uri with subpath' % testrecipe)
986 self.assertTrue(any([uri.startswith('crate://') for uri in src_uri]),
987 'This test expects the %s recipe to have some crates in its src uris' % testrecipe)
988 self.assertGreaterEqual(sum(map(lambda x:x.startswith('git://'), src_uri)), 2,
989 'This test expects the %s recipe to have several git:// uris' % testrecipe)
990 self.assertTrue(any([uri.startswith('file://') and '.patch' in uri for uri in src_uri]),
991 'This test expects the %s recipe to have a patch in its src uris' % testrecipe)
992
993 self._test_recipe_contents(recipefile, {}, ['ptest-cargo'])
994
995 # Clean up anything in the workdir/sysroot/sstate cache
996 bitbake('%s -c cleansstate' % testrecipe)
997 # Try modifying a recipe
998 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
999 self.track_for_cleanup(tempdir)
1000 self.track_for_cleanup(self.workspacedir)
1001 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1002 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1003 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1004 self.assertExists(os.path.join(tempdir, 'Cargo.toml'), 'Extracted source could not be found')
1005 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
1006 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
1007 self.assertTrue(matches, 'bbappend not created')
1008 # Test devtool status
1009 result = runCmd('devtool status')
1010 self.assertIn(testrecipe, result.output)
1011 self.assertIn(tempdir, result.output)
1012 # Check git repo
1013 self._check_src_repo(tempdir)
1014 # Check that the patch is correctly applied.
1015 # The last commit message in the tree must contain the following note:
1016 # Notes (devtool):
1017 # original patch: <patchname>
1018 # ..
1019 patchname = None
1020 for uri in src_uri:
1021 if uri.startswith('file://') and '.patch' in uri:
1022 patchname = uri.replace("file://", "").partition('.patch')[0] + '.patch'
1023 self.assertIsNotNone(patchname)
1024 result = runCmd('git -C %s log -1' % tempdir)
1025 self.assertIn("Notes (devtool):\n original patch: %s" % patchname, result.output)
1026
1027 # Configure the recipe to check that the git dependencies are correctly patched in cargo config
1028 bitbake('-c configure %s' % testrecipe)
1029
1030 cargo_config_path = os.path.join(cargo_home, 'config.toml')
1031 with open(cargo_config_path, "r") as f:
1032 cargo_config_contents = [line.strip('\n') for line in f.readlines()]
1033
1034 # Get back git dependencies of the recipe (ignoring the main one)
1035 # and check that they are all correctly patched to be fetched locally
1036 git_deps = [uri for uri in src_uri if uri.startswith("git://")][1:]
1037 for git_dep in git_deps:
1038 raw_url, _, raw_parms = git_dep.partition(";")
1039 parms = {}
1040 for parm in raw_parms.split(";"):
1041 name_parm, _, value_parm = parm.partition('=')
1042 parms[name_parm]=value_parm
1043 self.assertIn('protocol', parms, 'git dependencies uri should contain the "protocol" parameter')
1044 self.assertIn('name', parms, 'git dependencies uri should contain the "name" parameter')
1045 self.assertIn('destsuffix', parms, 'git dependencies uri should contain the "destsuffix" parameter')
1046 self.assertIn('type', parms, 'git dependencies uri should contain the "type" parameter')
1047 self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
1048 raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
1049 patch_line = '[patch."%s"]' % raw_url
1050 path_patched = os.path.join(unpackdir, parms['destsuffix'])
1051 path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
1052 # Would have been better to use tomllib to read this file :/
1053 self.assertIn(patch_line, cargo_config_contents)
1054 self.assertIn(path_override_line, cargo_config_contents)
1055
1056 # Try to package the recipe
1057 bitbake('-c package_qa %s' % testrecipe)
1058
766 def test_devtool_modify_localfiles(self): 1059 def test_devtool_modify_localfiles(self):
767 # Check preconditions 1060 # Check preconditions
768 testrecipe = 'lighttpd' 1061 testrecipe = 'lighttpd'
@@ -828,12 +1121,43 @@ class DevtoolModifyTests(DevtoolBase):
828 runCmd('git -C %s checkout %s' % (tempdir, branch)) 1121 runCmd('git -C %s checkout %s' % (tempdir, branch))
829 with open(source, "rt") as f: 1122 with open(source, "rt") as f:
830 content = f.read() 1123 content = f.read()
831 self.assertEquals(content, expected) 1124 self.assertEqual(content, expected)
832 check('devtool', 'This is a test for something\n') 1125 if self.td["MACHINE"] == "qemux86":
1126 check('devtool', 'This is a test for qemux86\n')
1127 elif self.td["MACHINE"] == "qemuarm":
1128 check('devtool', 'This is a test for qemuarm\n')
1129 else:
1130 check('devtool', 'This is a test for something\n')
833 check('devtool-no-overrides', 'This is a test for something\n') 1131 check('devtool-no-overrides', 'This is a test for something\n')
834 check('devtool-override-qemuarm', 'This is a test for qemuarm\n') 1132 check('devtool-override-qemuarm', 'This is a test for qemuarm\n')
835 check('devtool-override-qemux86', 'This is a test for qemux86\n') 1133 check('devtool-override-qemux86', 'This is a test for qemux86\n')
836 1134
1135 def test_devtool_modify_multiple_sources(self):
1136 # This test check that recipes fetching several sources can be used with devtool modify/build
1137 # Check preconditions
1138 testrecipe = 'bzip2'
1139 src_uri = get_bb_var('SRC_URI', testrecipe)
1140 src1 = 'https://' in src_uri
1141 src2 = 'git://' in src_uri
1142 self.assertTrue(src1 and src2, 'This test expects the %s recipe to fetch both a git source and a tarball and it seems that it no longer does' % testrecipe)
1143 # Clean up anything in the workdir/sysroot/sstate cache
1144 bitbake('%s -c cleansstate' % testrecipe)
1145 # Try modifying a recipe
1146 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1147 self.track_for_cleanup(tempdir)
1148 self.track_for_cleanup(self.workspacedir)
1149 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1150 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1151 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1152 self.assertEqual(result.status, 0, "Could not modify recipe %s. Output: %s" % (testrecipe, result.output))
1153 # Test devtool status
1154 result = runCmd('devtool status')
1155 self.assertIn(testrecipe, result.output)
1156 self.assertIn(tempdir, result.output)
1157 # Try building
1158 result = bitbake(testrecipe)
1159 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
1160
837class DevtoolUpdateTests(DevtoolBase): 1161class DevtoolUpdateTests(DevtoolBase):
838 1162
839 def test_devtool_update_recipe(self): 1163 def test_devtool_update_recipe(self):
@@ -861,16 +1185,20 @@ class DevtoolUpdateTests(DevtoolBase):
861 result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir) 1185 result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir)
862 result = runCmd('git add devtool-new-file', cwd=tempdir) 1186 result = runCmd('git add devtool-new-file', cwd=tempdir)
863 result = runCmd('git commit -m "Add a new file"', cwd=tempdir) 1187 result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
864 self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1188 cleanup_cmd = 'cd %s; rm %s/*.patch; git add %s; git checkout %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))
1189 self.add_command_to_tearDown(cleanup_cmd)
865 result = runCmd('devtool update-recipe %s' % testrecipe) 1190 result = runCmd('devtool update-recipe %s' % testrecipe)
1191 result = runCmd('git add minicom', cwd=os.path.dirname(recipefile))
866 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1192 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
867 ('??', '.*/0001-Change-the-README.patch$'), 1193 ('A ', '.*/0001-Change-the-README.patch$'),
868 ('??', '.*/0002-Add-a-new-file.patch$')] 1194 ('A ', '.*/0002-Add-a-new-file.patch$')]
869 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1195 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1196 result = runCmd(cleanup_cmd)
1197 self._check_repo_status(os.path.dirname(recipefile), [])
870 1198
871 def test_devtool_update_recipe_git(self): 1199 def test_devtool_update_recipe_git(self):
872 # Check preconditions 1200 # Check preconditions
873 testrecipe = 'mtd-utils' 1201 testrecipe = 'mtd-utils-selftest'
874 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1202 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
875 recipefile = bb_vars['FILE'] 1203 recipefile = bb_vars['FILE']
876 src_uri = bb_vars['SRC_URI'] 1204 src_uri = bb_vars['SRC_URI']
@@ -904,28 +1232,12 @@ class DevtoolUpdateTests(DevtoolBase):
904 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1232 self._check_repo_status(os.path.dirname(recipefile), expected_status)
905 1233
906 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) 1234 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
907 addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"'] 1235 addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"']
908 srcurilines = src_uri.split() 1236 srcurilines = src_uri.split()
909 srcurilines[0] = 'SRC_URI = "' + srcurilines[0] 1237 srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
910 srcurilines.append('"') 1238 srcurilines.append('"')
911 removelines = ['SRCREV = ".*"'] + srcurilines 1239 removelines = ['SRCREV = ".*"'] + srcurilines
912 for line in result.output.splitlines(): 1240 self._check_diff(result.output, addlines, removelines)
913 if line.startswith('+++') or line.startswith('---'):
914 continue
915 elif line.startswith('+'):
916 matched = False
917 for item in addlines:
918 if re.match(item, line[1:].strip()):
919 matched = True
920 break
921 self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
922 elif line.startswith('-'):
923 matched = False
924 for item in removelines:
925 if re.match(item, line[1:].strip()):
926 matched = True
927 break
928 self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
929 # Now try with auto mode 1241 # Now try with auto mode
930 runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile))) 1242 runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
931 result = runCmd('devtool update-recipe %s' % testrecipe) 1243 result = runCmd('devtool update-recipe %s' % testrecipe)
@@ -939,7 +1251,7 @@ class DevtoolUpdateTests(DevtoolBase):
939 1251
940 def test_devtool_update_recipe_append(self): 1252 def test_devtool_update_recipe_append(self):
941 # Check preconditions 1253 # Check preconditions
942 testrecipe = 'mdadm' 1254 testrecipe = 'minicom'
943 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1255 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
944 recipefile = bb_vars['FILE'] 1256 recipefile = bb_vars['FILE']
945 src_uri = bb_vars['SRC_URI'] 1257 src_uri = bb_vars['SRC_URI']
@@ -957,7 +1269,7 @@ class DevtoolUpdateTests(DevtoolBase):
957 # Check git repo 1269 # Check git repo
958 self._check_src_repo(tempsrcdir) 1270 self._check_src_repo(tempsrcdir)
959 # Add a commit 1271 # Add a commit
960 result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir) 1272 result = runCmd('echo "Additional line" >> README', cwd=tempsrcdir)
961 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) 1273 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
962 self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe)) 1274 self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe))
963 # Create a temporary layer and add it to bblayers.conf 1275 # Create a temporary layer and add it to bblayers.conf
@@ -975,7 +1287,7 @@ class DevtoolUpdateTests(DevtoolBase):
975 self.assertExists(patchfile, 'Patch file not created') 1287 self.assertExists(patchfile, 'Patch file not created')
976 1288
977 # Check bbappend contents 1289 # Check bbappend contents
978 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1290 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
979 '\n', 1291 '\n',
980 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n', 1292 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n',
981 '\n'] 1293 '\n']
@@ -987,15 +1299,16 @@ class DevtoolUpdateTests(DevtoolBase):
987 with open(bbappendfile, 'r') as f: 1299 with open(bbappendfile, 'r') as f:
988 self.assertEqual(expectedlines, f.readlines()) 1300 self.assertEqual(expectedlines, f.readlines())
989 # Drop new commit and check patch gets deleted 1301 # Drop new commit and check patch gets deleted
990 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1302 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
991 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1303 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
992 self.assertNotExists(patchfile, 'Patch file not deleted') 1304 self.assertNotExists(patchfile, 'Patch file not deleted')
993 expectedlines2 = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1305 expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
994 '\n'] 1306 '\n']
995 with open(bbappendfile, 'r') as f: 1307 with open(bbappendfile, 'r') as f:
996 self.assertEqual(expectedlines2, f.readlines()) 1308 self.assertEqual(expectedlines2, f.readlines())
997 # Put commit back and check we can run it if layer isn't in bblayers.conf 1309 # Put commit back and check we can run it if layer isn't in bblayers.conf
998 os.remove(bbappendfile) 1310 os.remove(bbappendfile)
1311 result = runCmd('echo "Additional line" >> README', cwd=tempsrcdir)
999 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) 1312 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
1000 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1313 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1001 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1314 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
@@ -1007,10 +1320,11 @@ class DevtoolUpdateTests(DevtoolBase):
1007 1320
1008 def test_devtool_update_recipe_append_git(self): 1321 def test_devtool_update_recipe_append_git(self):
1009 # Check preconditions 1322 # Check preconditions
1010 testrecipe = 'mtd-utils' 1323 testrecipe = 'mtd-utils-selftest'
1011 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1324 bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe)
1012 recipefile = bb_vars['FILE'] 1325 recipefile = bb_vars['FILE']
1013 src_uri = bb_vars['SRC_URI'] 1326 src_uri = bb_vars['SRC_URI']
1327 corenames = bb_vars['LAYERSERIES_CORENAMES']
1014 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) 1328 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
1015 for entry in src_uri.split(): 1329 for entry in src_uri.split():
1016 if entry.startswith('git://'): 1330 if entry.startswith('git://'):
@@ -1041,7 +1355,7 @@ class DevtoolUpdateTests(DevtoolBase):
1041 f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n') 1355 f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
1042 f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n') 1356 f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
1043 f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n') 1357 f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
1044 f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n') 1358 f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames)
1045 self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir) 1359 self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
1046 result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir) 1360 result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
1047 # Create the bbappend 1361 # Create the bbappend
@@ -1069,7 +1383,7 @@ class DevtoolUpdateTests(DevtoolBase):
1069 with open(bbappendfile, 'r') as f: 1383 with open(bbappendfile, 'r') as f:
1070 self.assertEqual(expectedlines, set(f.readlines())) 1384 self.assertEqual(expectedlines, set(f.readlines()))
1071 # Drop new commit and check SRCREV changes 1385 # Drop new commit and check SRCREV changes
1072 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1386 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
1073 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1387 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
1074 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created') 1388 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created')
1075 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir) 1389 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
@@ -1081,6 +1395,7 @@ class DevtoolUpdateTests(DevtoolBase):
1081 self.assertEqual(expectedlines, set(f.readlines())) 1395 self.assertEqual(expectedlines, set(f.readlines()))
1082 # Put commit back and check we can run it if layer isn't in bblayers.conf 1396 # Put commit back and check we can run it if layer isn't in bblayers.conf
1083 os.remove(bbappendfile) 1397 os.remove(bbappendfile)
1398 result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempsrcdir)
1084 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir) 1399 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
1085 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1400 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1086 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1401 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
@@ -1112,22 +1427,39 @@ class DevtoolUpdateTests(DevtoolBase):
1112 # Try building just to ensure we haven't broken that 1427 # Try building just to ensure we haven't broken that
1113 bitbake("%s" % testrecipe) 1428 bitbake("%s" % testrecipe)
1114 # Edit / commit local source 1429 # Edit / commit local source
1115 runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir) 1430 runCmd('echo "/* Foobar */" >> makedevs.c', cwd=tempdir)
1116 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1431 runCmd('echo "Foo" > new-local', cwd=tempdir)
1117 runCmd('echo "Bar" > new-file', cwd=tempdir) 1432 runCmd('echo "Bar" > new-file', cwd=tempdir)
1118 runCmd('git add new-file', cwd=tempdir) 1433 runCmd('git add new-file', cwd=tempdir)
1119 runCmd('git commit -m "Add new file"', cwd=tempdir) 1434 runCmd('git commit -m "Add new file"', cwd=tempdir)
1120 self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' % 1435 runCmd('git add new-local', cwd=tempdir)
1121 os.path.dirname(recipefile))
1122 runCmd('devtool update-recipe %s' % testrecipe) 1436 runCmd('devtool update-recipe %s' % testrecipe)
1123 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1437 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1124 (' M', '.*/makedevs/makedevs.c$'), 1438 (' M', '.*/makedevs/makedevs.c$'),
1125 ('??', '.*/makedevs/new-local$'), 1439 ('??', '.*/makedevs/new-local$'),
1126 ('??', '.*/makedevs/0001-Add-new-file.patch$')] 1440 ('??', '.*/makedevs/0001-Add-new-file.patch$')]
1127 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1441 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1128 1442 # Now try to update recipe in another layer, so first, clean it
1129 def test_devtool_update_recipe_local_files_2(self): 1443 runCmd('cd %s; git clean -fd .; git checkout .' % os.path.dirname(recipefile))
1130 """Check local source files support when oe-local-files is in Git""" 1444 # Create a temporary layer and add it to bblayers.conf
1445 self._create_temp_layer(templayerdir, True, 'templayer')
1446 # Update recipe in templayer
1447 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
1448 self.assertNotIn('WARNING:', result.output)
1449 # Check recipe is still clean
1450 self._check_repo_status(os.path.dirname(recipefile), [])
1451 splitpath = os.path.dirname(recipefile).split(os.sep)
1452 appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
1453 bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
1454 patchfile = os.path.join(appenddir, testrecipe, '0001-Add-new-file.patch')
1455 new_local_file = os.path.join(appenddir, testrecipe, 'new_local')
1456 local_file = os.path.join(appenddir, testrecipe, 'makedevs.c')
1457 self.assertExists(patchfile, 'Patch file 0001-Add-new-file.patch not created')
1458 self.assertExists(local_file, 'File makedevs.c not created')
1459 self.assertExists(patchfile, 'File new_local not created')
1460
1461 def _test_devtool_update_recipe_local_files_2(self):
1462 """Check local source files support when editing local files in Git"""
1131 testrecipe = 'devtool-test-local' 1463 testrecipe = 'devtool-test-local'
1132 recipefile = get_bb_var('FILE', testrecipe) 1464 recipefile = get_bb_var('FILE', testrecipe)
1133 recipedir = os.path.dirname(recipefile) 1465 recipedir = os.path.dirname(recipefile)
@@ -1142,17 +1474,13 @@ class DevtoolUpdateTests(DevtoolBase):
1142 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 1474 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1143 # Check git repo 1475 # Check git repo
1144 self._check_src_repo(tempdir) 1476 self._check_src_repo(tempdir)
1145 # Add oe-local-files to Git
1146 runCmd('rm oe-local-files/.gitignore', cwd=tempdir)
1147 runCmd('git add oe-local-files', cwd=tempdir)
1148 runCmd('git commit -m "Add local sources"', cwd=tempdir)
1149 # Edit / commit local sources 1477 # Edit / commit local sources
1150 runCmd('echo "# Foobar" >> oe-local-files/file1', cwd=tempdir) 1478 runCmd('echo "# Foobar" >> file1', cwd=tempdir)
1151 runCmd('git commit -am "Edit existing file"', cwd=tempdir) 1479 runCmd('git commit -am "Edit existing file"', cwd=tempdir)
1152 runCmd('git rm oe-local-files/file2', cwd=tempdir) 1480 runCmd('git rm file2', cwd=tempdir)
1153 runCmd('git commit -m"Remove file"', cwd=tempdir) 1481 runCmd('git commit -m"Remove file"', cwd=tempdir)
1154 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1482 runCmd('echo "Foo" > new-local', cwd=tempdir)
1155 runCmd('git add oe-local-files/new-local', cwd=tempdir) 1483 runCmd('git add new-local', cwd=tempdir)
1156 runCmd('git commit -m "Add new local file"', cwd=tempdir) 1484 runCmd('git commit -m "Add new local file"', cwd=tempdir)
1157 runCmd('echo "Gar" > new-file', cwd=tempdir) 1485 runCmd('echo "Gar" > new-file', cwd=tempdir)
1158 runCmd('git add new-file', cwd=tempdir) 1486 runCmd('git add new-file', cwd=tempdir)
@@ -1161,7 +1489,7 @@ class DevtoolUpdateTests(DevtoolBase):
1161 os.path.dirname(recipefile)) 1489 os.path.dirname(recipefile))
1162 # Checkout unmodified file to working copy -> devtool should still pick 1490 # Checkout unmodified file to working copy -> devtool should still pick
1163 # the modified version from HEAD 1491 # the modified version from HEAD
1164 runCmd('git checkout HEAD^ -- oe-local-files/file1', cwd=tempdir) 1492 runCmd('git checkout HEAD^ -- file1', cwd=tempdir)
1165 runCmd('devtool update-recipe %s' % testrecipe) 1493 runCmd('devtool update-recipe %s' % testrecipe)
1166 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1494 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1167 (' M', '.*/file1$'), 1495 (' M', '.*/file1$'),
@@ -1236,7 +1564,7 @@ class DevtoolUpdateTests(DevtoolBase):
1236 # (don't bother with cleaning the recipe on teardown, we won't be building it) 1564 # (don't bother with cleaning the recipe on teardown, we won't be building it)
1237 result = runCmd('devtool modify %s' % testrecipe) 1565 result = runCmd('devtool modify %s' % testrecipe)
1238 # Modify one file 1566 # Modify one file
1239 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe, 'oe-local-files')) 1567 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe))
1240 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1568 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
1241 result = runCmd('devtool update-recipe %s' % testrecipe) 1569 result = runCmd('devtool update-recipe %s' % testrecipe)
1242 expected_status = [(' M', '.*/%s/file2$' % testrecipe)] 1570 expected_status = [(' M', '.*/%s/file2$' % testrecipe)]
@@ -1259,7 +1587,7 @@ class DevtoolUpdateTests(DevtoolBase):
1259 # Modify one file 1587 # Modify one file
1260 srctree = os.path.join(self.workspacedir, 'sources', testrecipe) 1588 srctree = os.path.join(self.workspacedir, 'sources', testrecipe)
1261 runCmd('echo "Another line" >> README', cwd=srctree) 1589 runCmd('echo "Another line" >> README', cwd=srctree)
1262 runCmd('git commit -a --amend --no-edit', cwd=srctree) 1590 runCmd('git commit -a --amend --no-edit --no-verify', cwd=srctree)
1263 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1591 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
1264 result = runCmd('devtool update-recipe %s' % testrecipe) 1592 result = runCmd('devtool update-recipe %s' % testrecipe)
1265 expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)] 1593 expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)]
@@ -1295,6 +1623,121 @@ class DevtoolUpdateTests(DevtoolBase):
1295 expected_status = [] 1623 expected_status = []
1296 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1624 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1297 1625
1626 def test_devtool_finish_modify_git_subdir(self):
1627 # Check preconditions
1628 testrecipe = 'dos2unix'
1629 self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n')
1630 bb_vars = get_bb_vars(['SRC_URI', 'S', 'UNPACKDIR', 'FILE', 'BB_GIT_DEFAULT_DESTSUFFIX'], testrecipe)
1631 self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe)
1632 unpackdir_git = '%s/%s/' % (bb_vars['UNPACKDIR'], bb_vars['BB_GIT_DEFAULT_DESTSUFFIX'])
1633 if not bb_vars['S'].startswith(unpackdir_git):
1634 self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe)
1635 subdir = bb_vars['S'].split(unpackdir_git, 1)[1]
1636 # Clean up anything in the workdir/sysroot/sstate cache
1637 bitbake('%s -c cleansstate' % testrecipe)
1638 # Try modifying a recipe
1639 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1640 self.track_for_cleanup(tempdir)
1641 self.track_for_cleanup(self.workspacedir)
1642 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1643 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1644 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1645 testsrcfile = os.path.join(tempdir, subdir, 'dos2unix.c')
1646 self.assertExists(testsrcfile, 'Extracted source could not be found')
1647 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
1648 self.assertNotExists(os.path.join(tempdir, subdir, '.git'), 'Subdirectory has been initialised as a git repo')
1649 # Check git repo
1650 self._check_src_repo(tempdir)
1651 # Modify file
1652 runCmd("sed -i '1s:^:/* Add a comment */\\n:' %s" % testsrcfile)
1653 result = runCmd('git commit -a -m "Add a comment"', cwd=tempdir)
1654 # Now try updating original recipe
1655 recipefile = bb_vars['FILE']
1656 recipedir = os.path.dirname(recipefile)
1657 self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
1658 result = runCmd('devtool update-recipe %s' % testrecipe)
1659 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1660 ('??', '.*/%s/%s/$' % (testrecipe, testrecipe))]
1661 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1662 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
1663 removelines = ['SRC_URI = "git://.*"']
1664 addlines = [
1665 'SRC_URI = "git://.* \\\\',
1666 'file://0001-Add-a-comment.patch;patchdir=.. \\\\',
1667 '"'
1668 ]
1669 self._check_diff(result.output, addlines, removelines)
1670 # Put things back so we can run devtool finish on a different layer
1671 runCmd('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
1672 # Run devtool finish
1673 res = re.search('recipes-.*', recipedir)
1674 self.assertTrue(res, 'Unable to find recipe subdirectory')
1675 recipesubdir = res[0]
1676 self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, recipesubdir))
1677 result = runCmd('devtool finish %s meta-selftest' % testrecipe)
1678 # Check bbappend file contents
1679 appendfn = os.path.join(self.testlayer_path, recipesubdir, '%s_%%.bbappend' % testrecipe)
1680 with open(appendfn, 'r') as f:
1681 appendlines = f.readlines()
1682 expected_appendlines = [
1683 'FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
1684 '\n',
1685 'SRC_URI += "file://0001-Add-a-comment.patch;patchdir=.."\n',
1686 '\n'
1687 ]
1688 self.assertEqual(appendlines, expected_appendlines)
1689 self.assertExists(os.path.join(os.path.dirname(appendfn), testrecipe, '0001-Add-a-comment.patch'))
1690 # Try building
1691 bitbake('%s -c patch' % testrecipe)
1692
1693 def test_devtool_git_submodules(self):
1694 # This tests if we can add a patch in a git submodule and extract it properly using devtool finish
1695 # Check preconditions
1696 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1697 self.track_for_cleanup(self.workspacedir)
1698 recipe = 'vulkan-samples'
1699 src_uri = get_bb_var('SRC_URI', recipe)
1700 self.assertIn('gitsm://', src_uri, 'This test expects the %s recipe to be a git recipe with submodules' % recipe)
1701 oldrecipefile = get_bb_var('FILE', recipe)
1702 recipedir = os.path.dirname(oldrecipefile)
1703 result = runCmd('git status --porcelain .', cwd=recipedir)
1704 if result.output.strip():
1705 self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
1706 self.assertIn('/meta/', recipedir)
1707 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1708 self.track_for_cleanup(tempdir)
1709 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1710 result = runCmd('devtool modify %s %s' % (recipe, tempdir))
1711 self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
1712 # Test devtool status
1713 result = runCmd('devtool status')
1714 self.assertIn(recipe, result.output)
1715 self.assertIn(tempdir, result.output)
1716 # Modify a source file in a submodule, (grab the first one)
1717 result = runCmd('git submodule --quiet foreach \'echo $sm_path\'', cwd=tempdir)
1718 submodule = result.output.splitlines()[0]
1719 submodule_path = os.path.join(tempdir, submodule)
1720 runCmd('echo "#This is a first comment" >> testfile', cwd=submodule_path)
1721 result = runCmd('git status --porcelain . ', cwd=submodule_path)
1722 self.assertIn("testfile", result.output)
1723 runCmd('git add testfile; git commit -m "Adding a new file"', cwd=submodule_path)
1724
1725 # Try finish to the original layer
1726 self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
1727 runCmd('devtool finish -f %s meta' % recipe)
1728 result = runCmd('devtool status')
1729 self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
1730 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
1731 expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)),
1732 ('??', '.*/.*-Adding-a-new-file.patch$')]
1733 self._check_repo_status(recipedir, expected_status)
1734 # Make sure the patch is added to the recipe with the correct "patchdir" option
1735 result = runCmd('git diff .', cwd=recipedir)
1736 addlines = [
1737 'file://0001-Adding-a-new-file.patch;patchdir=%s \\\\' % submodule
1738 ]
1739 self._check_diff(result.output, addlines, [])
1740
1298class DevtoolExtractTests(DevtoolBase): 1741class DevtoolExtractTests(DevtoolBase):
1299 1742
1300 def test_devtool_extract(self): 1743 def test_devtool_extract(self):
@@ -1317,6 +1760,8 @@ class DevtoolExtractTests(DevtoolBase):
1317 self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found') 1760 self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
1318 self._check_src_repo(tempdir) 1761 self._check_src_repo(tempdir)
1319 1762
1763class DevtoolResetTests(DevtoolBase):
1764
1320 def test_devtool_reset_all(self): 1765 def test_devtool_reset_all(self):
1321 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 1766 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1322 self.track_for_cleanup(tempdir) 1767 self.track_for_cleanup(tempdir)
@@ -1343,33 +1788,30 @@ class DevtoolExtractTests(DevtoolBase):
1343 matches2 = glob.glob(stampprefix2 + '*') 1788 matches2 = glob.glob(stampprefix2 + '*')
1344 self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2) 1789 self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2)
1345 1790
1791 def test_devtool_reset_re_plus_plus(self):
1792 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1793 self.track_for_cleanup(tempdir)
1794 self.track_for_cleanup(self.workspacedir)
1795 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1796 testrecipe = 'devtool-test-reset-re++'
1797 result = runCmd('devtool modify %s' % testrecipe)
1798 result = runCmd('devtool reset -n %s' % testrecipe)
1799 self.assertIn(testrecipe, result.output)
1800 result = runCmd('devtool status')
1801 self.assertNotIn(testrecipe, result.output)
1802 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', testrecipe), 'Recipe directory should not exist after resetting')
1803
1804class DevtoolDeployTargetTests(DevtoolBase):
1805
1806 @OETestTag("runqemu")
1346 def test_devtool_deploy_target(self): 1807 def test_devtool_deploy_target(self):
1347 # NOTE: Whilst this test would seemingly be better placed as a runtime test, 1808 self._check_runqemu_prerequisites()
1348 # unfortunately the runtime tests run under bitbake and you can't run
1349 # devtool within bitbake (since devtool needs to run bitbake itself).
1350 # Additionally we are testing build-time functionality as well, so
1351 # really this has to be done as an oe-selftest test.
1352 #
1353 # Check preconditions
1354 machine = get_bb_var('MACHINE')
1355 if not machine.startswith('qemu'):
1356 self.skipTest('This test only works with qemu machines')
1357 if not os.path.exists('/etc/runqemu-nosudo'):
1358 self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
1359 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
1360 if result.status != 0:
1361 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
1362 if result.status != 0:
1363 self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
1364 for line in result.output.splitlines():
1365 if line.startswith('tap'):
1366 break
1367 else:
1368 self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
1369 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 1809 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1370 # Definitions 1810 # Definitions
1371 testrecipe = 'mdadm' 1811 testrecipe = 'mdadm'
1372 testfile = '/sbin/mdadm' 1812 testfile = '/sbin/mdadm'
1813 if "usrmerge" in get_bb_var('DISTRO_FEATURES'):
1814 testfile = '/usr/sbin/mdadm'
1373 testimage = 'oe-selftest-image' 1815 testimage = 'oe-selftest-image'
1374 testcommand = '/sbin/mdadm --help' 1816 testcommand = '/sbin/mdadm --help'
1375 # Build an image to run 1817 # Build an image to run
@@ -1428,6 +1870,8 @@ class DevtoolExtractTests(DevtoolBase):
1428 result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True) 1870 result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True)
1429 self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have') 1871 self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have')
1430 1872
1873class DevtoolBuildImageTests(DevtoolBase):
1874
1431 def test_devtool_build_image(self): 1875 def test_devtool_build_image(self):
1432 """Test devtool build-image plugin""" 1876 """Test devtool build-image plugin"""
1433 # Check preconditions 1877 # Check preconditions
@@ -1463,6 +1907,14 @@ class DevtoolExtractTests(DevtoolBase):
1463 1907
1464class DevtoolUpgradeTests(DevtoolBase): 1908class DevtoolUpgradeTests(DevtoolBase):
1465 1909
1910 def setUp(self):
1911 super().setUp()
1912 try:
1913 runCmd("git config --global user.name")
1914 runCmd("git config --global user.email")
1915 except:
1916 self.skip("Git user.name and user.email must be set")
1917
1466 def test_devtool_upgrade(self): 1918 def test_devtool_upgrade(self):
1467 # Check preconditions 1919 # Check preconditions
1468 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 1920 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1543,6 +1995,100 @@ class DevtoolUpgradeTests(DevtoolBase):
1543 self.assertNotIn(recipe, result.output) 1995 self.assertNotIn(recipe, result.output)
1544 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting') 1996 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
1545 1997
1998 def test_devtool_upgrade_drop_md5sum(self):
1999 # Check preconditions
2000 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2001 self.track_for_cleanup(self.workspacedir)
2002 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2003 # For the moment, we are using a real recipe.
2004 recipe = 'devtool-upgrade-test3'
2005 version = '1.6.0'
2006 oldrecipefile = get_bb_var('FILE', recipe)
2007 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2008 self.track_for_cleanup(tempdir)
2009 # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
2010 # we are downgrading instead of upgrading.
2011 result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
2012 # Check new recipe file is present
2013 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
2014 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2015 # Check recipe got changed as expected
2016 with open(oldrecipefile + '.upgraded', 'r') as f:
2017 desiredlines = f.readlines()
2018 with open(newrecipefile, 'r') as f:
2019 newlines = f.readlines()
2020 self.assertEqual(desiredlines, newlines)
2021
2022 def test_devtool_upgrade_all_checksums(self):
2023 # Check preconditions
2024 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2025 self.track_for_cleanup(self.workspacedir)
2026 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2027 # For the moment, we are using a real recipe.
2028 recipe = 'devtool-upgrade-test4'
2029 version = '1.6.0'
2030 oldrecipefile = get_bb_var('FILE', recipe)
2031 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2032 self.track_for_cleanup(tempdir)
2033 # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
2034 # we are downgrading instead of upgrading.
2035 result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
2036 # Check new recipe file is present
2037 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
2038 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2039 # Check recipe got changed as expected
2040 with open(oldrecipefile + '.upgraded', 'r') as f:
2041 desiredlines = f.readlines()
2042 with open(newrecipefile, 'r') as f:
2043 newlines = f.readlines()
2044 self.assertEqual(desiredlines, newlines)
2045
2046 def test_devtool_upgrade_recipe_upgrade_extra_tasks(self):
2047 # Check preconditions
2048 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2049 self.track_for_cleanup(self.workspacedir)
2050 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2051 recipe = 'python3-guessing-game'
2052 version = '0.2.0'
2053 commit = '40cf004c2772ffa20ea803fa3be1528a75be3e98'
2054 oldrecipefile = get_bb_var('FILE', recipe)
2055 oldcratesincfile = os.path.join(os.path.dirname(oldrecipefile), os.path.basename(oldrecipefile).strip('_git.bb') + '-crates.inc')
2056 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2057 self.track_for_cleanup(tempdir)
2058 # Check that recipe is not already under devtool control
2059 result = runCmd('devtool status')
2060 self.assertNotIn(recipe, result.output)
2061 # Check upgrade
2062 result = runCmd('devtool upgrade %s %s --version %s --srcrev %s' % (recipe, tempdir, version, commit))
2063 # Check if srctree at least is populated
2064 self.assertTrue(len(os.listdir(tempdir)) > 0, 'srctree (%s) should be populated with new (%s) source code' % (tempdir, commit))
2065 # Check new recipe file and new -crates.inc files are present
2066 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldrecipefile))
2067 newcratesincfile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldcratesincfile))
2068 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2069 self.assertExists(newcratesincfile, 'Recipe crates.inc file should exist after upgrade')
2070 # Check devtool status and make sure recipe is present
2071 result = runCmd('devtool status')
2072 self.assertIn(recipe, result.output)
2073 self.assertIn(tempdir, result.output)
2074 # Check recipe got changed as expected
2075 with open(oldrecipefile + '.upgraded', 'r') as f:
2076 desiredlines = f.readlines()
2077 with open(newrecipefile, 'r') as f:
2078 newlines = f.readlines()
2079 self.assertEqual(desiredlines, newlines)
2080 # Check crates.inc got changed as expected
2081 with open(oldcratesincfile + '.upgraded', 'r') as f:
2082 desiredlines = f.readlines()
2083 with open(newcratesincfile, 'r') as f:
2084 newlines = f.readlines()
2085 self.assertEqual(desiredlines, newlines)
2086 # Check devtool reset recipe
2087 result = runCmd('devtool reset %s -n' % recipe)
2088 result = runCmd('devtool status')
2089 self.assertNotIn(recipe, result.output)
2090 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
2091
1546 def test_devtool_layer_plugins(self): 2092 def test_devtool_layer_plugins(self):
1547 """Test that devtool can use plugins from other layers. 2093 """Test that devtool can use plugins from other layers.
1548 2094
@@ -1561,7 +2107,15 @@ class DevtoolUpgradeTests(DevtoolBase):
1561 for p in paths: 2107 for p in paths:
1562 dstdir = os.path.join(dstdir, p) 2108 dstdir = os.path.join(dstdir, p)
1563 if not os.path.exists(dstdir): 2109 if not os.path.exists(dstdir):
1564 os.makedirs(dstdir) 2110 try:
2111 os.makedirs(dstdir)
2112 except PermissionError:
2113 return False
2114 except OSError as e:
2115 if e.errno == errno.EROFS:
2116 return False
2117 else:
2118 raise e
1565 if p == "lib": 2119 if p == "lib":
1566 # Can race with other tests 2120 # Can race with other tests
1567 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) 2121 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -1569,8 +2123,12 @@ class DevtoolUpgradeTests(DevtoolBase):
1569 self.track_for_cleanup(dstdir) 2123 self.track_for_cleanup(dstdir)
1570 dstfile = os.path.join(dstdir, os.path.basename(srcfile)) 2124 dstfile = os.path.join(dstdir, os.path.basename(srcfile))
1571 if srcfile != dstfile: 2125 if srcfile != dstfile:
1572 shutil.copy(srcfile, dstfile) 2126 try:
2127 shutil.copy(srcfile, dstfile)
2128 except PermissionError:
2129 return False
1573 self.track_for_cleanup(dstfile) 2130 self.track_for_cleanup(dstfile)
2131 return True
1574 2132
1575 def test_devtool_load_plugin(self): 2133 def test_devtool_load_plugin(self):
1576 """Test that devtool loads only the first found plugin in BBPATH.""" 2134 """Test that devtool loads only the first found plugin in BBPATH."""
@@ -1588,15 +2146,17 @@ class DevtoolUpgradeTests(DevtoolBase):
1588 plugincontent = fh.readlines() 2146 plugincontent = fh.readlines()
1589 try: 2147 try:
1590 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') 2148 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
1591 for path in searchpath: 2149 searchpath = [
1592 self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool') 2150 path for path in searchpath
2151 if self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
2152 ]
1593 result = runCmd("devtool --quiet count") 2153 result = runCmd("devtool --quiet count")
1594 self.assertEqual(result.output, '1') 2154 self.assertEqual(result.output, '1')
1595 result = runCmd("devtool --quiet multiloaded") 2155 result = runCmd("devtool --quiet multiloaded")
1596 self.assertEqual(result.output, "no") 2156 self.assertEqual(result.output, "no")
1597 for path in searchpath: 2157 for path in searchpath:
1598 result = runCmd("devtool --quiet bbdir") 2158 result = runCmd("devtool --quiet bbdir")
1599 self.assertEqual(result.output, path) 2159 self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
1600 os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py')) 2160 os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py'))
1601 finally: 2161 finally:
1602 with open(srcfile, 'w') as fh: 2162 with open(srcfile, 'w') as fh:
@@ -1777,6 +2337,52 @@ class DevtoolUpgradeTests(DevtoolBase):
1777 if files: 2337 if files:
1778 self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files)) 2338 self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files))
1779 2339
2340 def test_devtool_finish_update_patch(self):
2341 # This test uses a modified version of the sysdig recipe from meta-oe.
2342 # - The patches have been renamed.
2343 # - The dependencies are commented out since the recipe is not being
2344 # built.
2345 #
2346 # The sysdig recipe is interesting in that it fetches two different Git
2347 # repositories, and there are patches for both. This leads to that
2348 # devtool will create ignore commits as it uses Git submodules to keep
2349 # track of the second repository.
2350 #
2351 # This test will verify that the ignored commits actually are ignored
2352 # when a commit in between is modified. It will also verify that the
2353 # updated patch keeps its original name.
2354
2355 # Check preconditions
2356 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2357 # Try modifying a recipe
2358 self.track_for_cleanup(self.workspacedir)
2359 recipe = 'sysdig-selftest'
2360 recipefile = get_bb_var('FILE', recipe)
2361 recipedir = os.path.dirname(recipefile)
2362 result = runCmd('git status --porcelain .', cwd=recipedir)
2363 if result.output.strip():
2364 self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
2365 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2366 self.track_for_cleanup(tempdir)
2367 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2368 result = runCmd('devtool modify %s %s' % (recipe, tempdir))
2369 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (recipedir, recipe, recipe, os.path.basename(recipefile)))
2370 self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
2371 # Make a change to one of the existing commits
2372 result = runCmd('echo "# A comment " >> CMakeLists.txt', cwd=tempdir)
2373 result = runCmd('git status --porcelain', cwd=tempdir)
2374 self.assertIn('M CMakeLists.txt', result.output)
2375 result = runCmd('git commit --fixup HEAD^ CMakeLists.txt', cwd=tempdir)
2376 result = runCmd('git show -s --format=%s', cwd=tempdir)
2377 self.assertIn('fixup! cmake: Pass PROBE_NAME via CFLAGS', result.output)
2378 result = runCmd('GIT_SEQUENCE_EDITOR=true git rebase -i --autosquash devtool-base', cwd=tempdir)
2379 result = runCmd('devtool finish %s meta-selftest' % recipe)
2380 result = runCmd('devtool status')
2381 self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
2382 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
2383 expected_status = [(' M', '.*/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch$')]
2384 self._check_repo_status(recipedir, expected_status)
2385
1780 def test_devtool_rename(self): 2386 def test_devtool_rename(self):
1781 # Check preconditions 2387 # Check preconditions
1782 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 2388 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1808,12 +2414,11 @@ class DevtoolUpgradeTests(DevtoolBase):
1808 newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename) 2414 newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename)
1809 self.assertExists(newsrctree, 'Source directory not renamed') 2415 self.assertExists(newsrctree, 'Source directory not renamed')
1810 checkvars = {} 2416 checkvars = {}
1811 checkvars['S'] = '${WORKDIR}/%s-%s' % (recipename, recipever) 2417 checkvars['S'] = '${UNPACKDIR}/%s-%s' % (recipename, recipever)
1812 checkvars['SRC_URI'] = url 2418 checkvars['SRC_URI'] = url
1813 self._test_recipe_contents(newrecipefile, checkvars, []) 2419 self._test_recipe_contents(newrecipefile, checkvars, [])
1814 # Try again - change just name this time 2420 # Try again - change just name this time
1815 result = runCmd('devtool reset -n %s' % newrecipename) 2421 result = runCmd('devtool reset -n %s' % newrecipename)
1816 shutil.rmtree(newsrctree)
1817 add_recipe() 2422 add_recipe()
1818 newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever)) 2423 newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever))
1819 result = runCmd('devtool rename %s %s' % (recipename, newrecipename)) 2424 result = runCmd('devtool rename %s %s' % (recipename, newrecipename))
@@ -1821,19 +2426,18 @@ class DevtoolUpgradeTests(DevtoolBase):
1821 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists') 2426 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists')
1822 self.assertExists(os.path.join(self.workspacedir, 'sources', newrecipename), 'Source directory not renamed') 2427 self.assertExists(os.path.join(self.workspacedir, 'sources', newrecipename), 'Source directory not renamed')
1823 checkvars = {} 2428 checkvars = {}
1824 checkvars['S'] = '${WORKDIR}/%s-${PV}' % recipename 2429 checkvars['S'] = '${UNPACKDIR}/%s-${PV}' % recipename
1825 checkvars['SRC_URI'] = url.replace(recipever, '${PV}') 2430 checkvars['SRC_URI'] = url.replace(recipever, '${PV}')
1826 self._test_recipe_contents(newrecipefile, checkvars, []) 2431 self._test_recipe_contents(newrecipefile, checkvars, [])
1827 # Try again - change just version this time 2432 # Try again - change just version this time
1828 result = runCmd('devtool reset -n %s' % newrecipename) 2433 result = runCmd('devtool reset -n %s' % newrecipename)
1829 shutil.rmtree(newsrctree)
1830 add_recipe() 2434 add_recipe()
1831 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever)) 2435 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever))
1832 result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever)) 2436 result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever))
1833 self.assertExists(newrecipefile, 'Recipe file not renamed') 2437 self.assertExists(newrecipefile, 'Recipe file not renamed')
1834 self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory no longer exists') 2438 self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory no longer exists')
1835 checkvars = {} 2439 checkvars = {}
1836 checkvars['S'] = '${WORKDIR}/${BPN}-%s' % recipever 2440 checkvars['S'] = '${UNPACKDIR}/${BPN}-%s' % recipever
1837 checkvars['SRC_URI'] = url 2441 checkvars['SRC_URI'] = url
1838 self._test_recipe_contents(newrecipefile, checkvars, []) 2442 self._test_recipe_contents(newrecipefile, checkvars, [])
1839 2443
@@ -1858,8 +2462,9 @@ class DevtoolUpgradeTests(DevtoolBase):
1858 Expected: devtool modify is able to checkout the source of the kernel 2462 Expected: devtool modify is able to checkout the source of the kernel
1859 and modification to the source and configurations are reflected 2463 and modification to the source and configurations are reflected
1860 when building the kernel. 2464 when building the kernel.
1861 """ 2465 """
1862 kernel_provider = get_bb_var('PREFERRED_PROVIDER_virtual/kernel') 2466 kernel_provider = self.td['PREFERRED_PROVIDER_virtual/kernel']
2467
1863 # Clean up the environment 2468 # Clean up the environment
1864 bitbake('%s -c clean' % kernel_provider) 2469 bitbake('%s -c clean' % kernel_provider)
1865 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 2470 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -1886,33 +2491,540 @@ class DevtoolUpgradeTests(DevtoolBase):
1886 self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found') 2491 self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
1887 #Step 4.2 2492 #Step 4.2
1888 configfile = os.path.join(tempdir,'.config') 2493 configfile = os.path.join(tempdir,'.config')
1889 diff = runCmd('diff %s %s' % (tmpconfig, configfile)) 2494 runCmd('diff %s %s' % (tmpconfig, configfile))
1890 self.assertEqual(0,diff.status,'Kernel .config file is not the same using bitbake and devtool') 2495
1891 #Step 4.3 2496 #Step 4.3
1892 #NOTE: virtual/kernel is mapped to kernel_provider 2497 #NOTE: virtual/kernel is mapped to kernel_provider
1893 result = runCmd('devtool build %s' % kernel_provider) 2498 runCmd('devtool build %s' % kernel_provider)
1894 self.assertEqual(0,result.status,'Cannot build kernel using `devtool build`')
1895 kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux') 2499 kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux')
1896 self.assertExists(kernelfile, 'Kernel was not build correctly') 2500 self.assertExists(kernelfile, 'Kernel was not build correctly')
1897 2501
1898 #Modify the kernel source 2502 #Modify the kernel source
1899 modfile = os.path.join(tempdir,'arch/x86/boot/header.S') 2503 modfile = os.path.join(tempdir, 'init/version.c')
1900 modstring = "Use a boot loader. Devtool testing." 2504 # Moved to uts.h in 6.1 onwards
1901 modapplied = runCmd("sed -i 's/Use a boot loader./%s/' %s" % (modstring, modfile)) 2505 modfile2 = os.path.join(tempdir, 'include/linux/uts.h')
1902 self.assertEqual(0,modapplied.status,'Modification to %s on kernel source failed' % modfile) 2506 runCmd("sed -i 's/Linux/LiNuX/g' %s %s" % (modfile, modfile2))
2507
1903 #Modify the configuration 2508 #Modify the configuration
1904 codeconfigfile = os.path.join(tempdir,'.config.new') 2509 codeconfigfile = os.path.join(tempdir, '.config.new')
1905 modconfopt = "CONFIG_SG_POOL=n" 2510 modconfopt = "CONFIG_SG_POOL=n"
1906 modconf = runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile)) 2511 runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile))
1907 self.assertEqual(0,modconf.status,'Modification to %s failed' % codeconfigfile) 2512
1908 #Build again kernel with devtool 2513 #Build again kernel with devtool
1909 rebuild = runCmd('devtool build %s' % kernel_provider) 2514 runCmd('devtool build %s' % kernel_provider)
1910 self.assertEqual(0,rebuild.status,'Fail to build kernel after modification of source and config') 2515
1911 #Step 4.4 2516 #Step 4.4
1912 bzimagename = 'bzImage-' + get_bb_var('KERNEL_VERSION_NAME', kernel_provider) 2517 runCmd("grep '%s' %s" % ('LiNuX', kernelfile))
1913 bzimagefile = os.path.join(get_bb_var('D', kernel_provider),'boot', bzimagename) 2518
1914 checkmodcode = runCmd("grep '%s' %s" % (modstring, bzimagefile))
1915 self.assertEqual(0,checkmodcode.status,'Modification on kernel source failed')
1916 #Step 4.5 2519 #Step 4.5
1917 checkmodconfg = runCmd("grep %s %s" % (modconfopt, codeconfigfile)) 2520 runCmd("grep %s %s" % (modconfopt, codeconfigfile))
1918 self.assertEqual(0,checkmodconfg.status,'Modification to configuration file failed') 2521
2522
2523class DevtoolIdeSdkTests(DevtoolBase):
2524 def _write_bb_config(self, recipe_names):
2525 """Helper to write the bitbake local.conf file"""
2526 conf_lines = [
2527 'IMAGE_CLASSES += "image-combined-dbg"',
2528 'IMAGE_GEN_DEBUGFS = "1"',
2529 'IMAGE_INSTALL:append = " gdbserver %s"' % ' '.join(
2530 [r + '-ptest' for r in recipe_names])
2531 ]
2532 self.write_config("\n".join(conf_lines))
2533
2534 def _check_workspace(self):
2535 """Check if a workspace directory is available and setup the cleanup"""
2536 self.assertTrue(not os.path.exists(self.workspacedir),
2537 'This test cannot be run with a workspace directory under the build directory')
2538 self.track_for_cleanup(self.workspacedir)
2539 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2540
2541 def _workspace_scripts_dir(self, recipe_name):
2542 return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts'))
2543
2544 def _sources_scripts_dir(self, src_dir):
2545 return os.path.realpath(os.path.join(src_dir, 'oe-scripts'))
2546
2547 def _workspace_gdbinit_dir(self, recipe_name):
2548 return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts', 'gdbinit'))
2549
2550 def _sources_gdbinit_dir(self, src_dir):
2551 return os.path.realpath(os.path.join(src_dir, 'oe-gdbinit'))
2552
2553 def _devtool_ide_sdk_recipe(self, recipe_name, build_file, testimage):
2554 """Setup a recipe for working with devtool ide-sdk
2555
2556 Basically devtool modify -x followed by some tests
2557 """
2558 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2559 self.track_for_cleanup(tempdir)
2560 self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name)
2561
2562 result = runCmd('devtool modify %s -x %s --debug-build' % (recipe_name, tempdir))
2563 self.assertExists(os.path.join(tempdir, build_file),
2564 'Extracted source could not be found')
2565 self.assertExists(os.path.join(self.workspacedir, 'conf',
2566 'layer.conf'), 'Workspace directory not created')
2567 matches = glob.glob(os.path.join(self.workspacedir,
2568 'appends', recipe_name + '.bbappend'))
2569 self.assertTrue(matches, 'bbappend not created %s' % result.output)
2570
2571 # Test devtool status
2572 result = runCmd('devtool status')
2573 self.assertIn(recipe_name, result.output)
2574 self.assertIn(tempdir, result.output)
2575 self._check_src_repo(tempdir)
2576
2577 # Usually devtool ide-sdk would initiate the build of the SDK.
2578 # But there is a circular dependency with starting Qemu and passing the IP of runqemu to devtool ide-sdk.
2579 if testimage:
2580 bitbake("%s qemu-native qemu-helper-native" % testimage)
2581 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
2582 self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
2583 self.add_command_to_tearDown(
2584 'rm -f %s/%s*' % (deploy_dir_image, testimage))
2585
2586 return tempdir
2587
2588 def _get_recipe_ids(self, recipe_name):
2589 """IDs needed to write recipe specific config entries into IDE config files"""
2590 package_arch = get_bb_var('PACKAGE_ARCH', recipe_name)
2591 recipe_id = recipe_name + "-" + package_arch
2592 recipe_id_pretty = recipe_name + ": " + package_arch
2593 return (recipe_id, recipe_id_pretty)
2594
2595 def _verify_install_script_code(self, tempdir, recipe_name):
2596 """Verify the scripts referred by the tasks.json file are fine.
2597
2598 This function does not depend on Qemu. Therefore it verifies the scripts
2599 exists and the delete step works as expected. But it does not try to
2600 deploy to Qemu.
2601 """
2602 recipe_id, recipe_id_pretty = self._get_recipe_ids(recipe_name)
2603 with open(os.path.join(tempdir, '.vscode', 'tasks.json')) as tasks_j:
2604 tasks_d = json.load(tasks_j)
2605 tasks = tasks_d["tasks"]
2606 task_install = next(
2607 (task for task in tasks if task["label"] == "install && deploy-target %s" % recipe_id_pretty), None)
2608 self.assertIsNot(task_install, None)
2609 # execute only the bb_run_do_install script since the deploy would require e.g. Qemu running.
2610 i_and_d_script = "install_and_deploy_" + recipe_id
2611 i_and_d_script_path = os.path.join(
2612 self._workspace_scripts_dir(recipe_name), i_and_d_script)
2613 self.assertExists(i_and_d_script_path)
2614
2615 def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe):
2616 """Verify deployment and execution in Qemu system work for one recipe.
2617
2618 This function checks the entire SDK workflow: changing the code, recompiling
2619 it and deploying it back to Qemu, and checking that the changes have been
2620 incorporated into the provided binaries. It also runs the tests of the recipe.
2621 """
2622 recipe_id, _ = self._get_recipe_ids(recipe_name)
2623 i_and_d_script = "install_and_deploy_" + recipe_id
2624 install_deploy_cmd = os.path.join(
2625 self._workspace_scripts_dir(recipe_name), i_and_d_script)
2626 self.assertExists(install_deploy_cmd,
2627 '%s script not found' % install_deploy_cmd)
2628 runCmd(install_deploy_cmd)
2629
2630 MAGIC_STRING_ORIG = "Magic: 123456789"
2631 MAGIC_STRING_NEW = "Magic: 987654321"
2632 ptest_cmd = "ptest-runner " + recipe_name
2633
2634 # validate that SSH is working
2635 status, _ = qemu.run("uname")
2636 self.assertEqual(
2637 status, 0, msg="Failed to connect to the SSH server on Qemu")
2638
2639 # Verify the unmodified example prints the magic string
2640 status, output = qemu.run(example_exe)
2641 self.assertEqual(status, 0, msg="%s failed: %s" %
2642 (example_exe, output))
2643 self.assertIn(MAGIC_STRING_ORIG, output)
2644
2645 # Verify the unmodified ptests work
2646 status, output = qemu.run(ptest_cmd)
2647 self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
2648 self.assertIn("PASS: cpp-example-lib", output)
2649
2650 # Verify remote debugging works
2651 self._gdb_cross_debugging(
2652 qemu, recipe_name, example_exe, MAGIC_STRING_ORIG)
2653
2654 # Replace the Magic String in the code, compile and deploy to Qemu
2655 cpp_example_lib_hpp = os.path.join(tempdir, 'cpp-example-lib.hpp')
2656 with open(cpp_example_lib_hpp, 'r') as file:
2657 cpp_code = file.read()
2658 cpp_code = cpp_code.replace(MAGIC_STRING_ORIG, MAGIC_STRING_NEW)
2659 with open(cpp_example_lib_hpp, 'w') as file:
2660 file.write(cpp_code)
2661 runCmd(install_deploy_cmd, cwd=tempdir)
2662
2663 # Verify the modified example prints the modified magic string
2664 status, output = qemu.run(example_exe)
2665 self.assertEqual(status, 0, msg="%s failed: %s" %
2666 (example_exe, output))
2667 self.assertNotIn(MAGIC_STRING_ORIG, output)
2668 self.assertIn(MAGIC_STRING_NEW, output)
2669
2670 # Verify the modified example ptests work
2671 status, output = qemu.run(ptest_cmd)
2672 self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
2673 self.assertIn("PASS: cpp-example-lib", output)
2674
2675 # Verify remote debugging works wit the modified magic string
2676 self._gdb_cross_debugging(
2677 qemu, recipe_name, example_exe, MAGIC_STRING_NEW)
2678
2679 def _gdb_cross(self):
2680 """Verify gdb-cross is provided by devtool ide-sdk"""
2681 target_arch = self.td["TARGET_ARCH"]
2682 target_sys = self.td["TARGET_SYS"]
2683 gdb_recipe = "gdb-cross-" + target_arch
2684 gdb_binary = target_sys + "-gdb"
2685
2686 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
2687 r = runCmd("%s --version" % gdb_binary,
2688 native_sysroot=native_sysroot, target_sys=target_sys)
2689 self.assertEqual(r.status, 0)
2690 self.assertIn("GNU gdb", r.output)
2691
2692 def _gdb_cross_debugging(self, qemu, recipe_name, example_exe, magic_string):
2693 """Verify gdb-cross is working
2694
2695 Test remote debugging:
2696 break main
2697 run
2698 continue
2699 break CppExample::print_json()
2700 continue
2701 print CppExample::test_string.compare("cpp-example-lib Magic: 123456789")
2702 $1 = 0
2703 print CppExample::test_string.compare("cpp-example-lib Magic: 123456789aaa")
2704 $2 = -3
2705 list cpp-example-lib.hpp:13,13
2706 13 inline static const std::string test_string = "cpp-example-lib Magic: 123456789";
2707 continue
2708 """
2709 sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
2710 gdbserver_script = os.path.join(self._workspace_scripts_dir(
2711 recipe_name), 'gdbserver_1234_usr-bin-' + example_exe + '_m')
2712 gdb_script = os.path.join(self._workspace_scripts_dir(
2713 recipe_name), 'gdb_1234_usr-bin-' + example_exe)
2714
2715 # Start a gdbserver
2716 r = runCmd(gdbserver_script)
2717 self.assertEqual(r.status, 0)
2718
2719 # Check there is a gdbserver running
2720 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
2721 self.assertEqual(r.status, 0)
2722 self.assertIn("gdbserver ", r.output)
2723
2724 # Check the pid file is correct
2725 test_cmd = "cat /proc/$(cat /tmp/gdbserver_1234_usr-bin-" + \
2726 example_exe + "/pid)/cmdline"
2727 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, test_cmd))
2728 self.assertEqual(r.status, 0)
2729 self.assertIn("gdbserver", r.output)
2730
2731 # Test remote debugging works
2732 gdb_batch_cmd = " --batch -ex 'break main' -ex 'run'"
2733 gdb_batch_cmd += " -ex 'break CppExample::print_json()' -ex 'continue'"
2734 gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %s\")'" % magic_string
2735 gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %saaa\")'" % magic_string
2736 gdb_batch_cmd += " -ex 'list cpp-example-lib.hpp:13,13'"
2737 gdb_batch_cmd += " -ex 'continue'"
2738 r = runCmd(gdb_script + gdb_batch_cmd)
2739 self.logger.debug("%s %s returned: %s", gdb_script,
2740 gdb_batch_cmd, r.output)
2741 self.assertEqual(r.status, 0)
2742 self.assertIn("Breakpoint 1, main", r.output)
2743 self.assertIn("$1 = 0", r.output) # test.string.compare equal
2744 self.assertIn("$2 = -3", r.output) # test.string.compare longer
2745 self.assertIn(
2746 'inline static const std::string test_string = "cpp-example-lib %s";' % magic_string, r.output)
2747 self.assertIn("exited normally", r.output)
2748
2749 # Stop the gdbserver
2750 r = runCmd(gdbserver_script + ' stop')
2751 self.assertEqual(r.status, 0)
2752
2753 # Check there is no gdbserver running
2754 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
2755 self.assertEqual(r.status, 0)
2756 self.assertNotIn("gdbserver ", r.output)
2757
2758 def _verify_cmake_preset(self, tempdir):
2759 """Verify the generated cmake preset works as expected
2760
2761 Check if compiling works
2762 Check if unit tests can be executed in qemu (not qemu-system)
2763 """
2764 with open(os.path.join(tempdir, 'CMakeUserPresets.json')) as cmake_preset_j:
2765 cmake_preset_d = json.load(cmake_preset_j)
2766 config_presets = cmake_preset_d["configurePresets"]
2767 self.assertEqual(len(config_presets), 1)
2768 cmake_exe = config_presets[0]["cmakeExecutable"]
2769 preset_name = config_presets[0]["name"]
2770
2771 # Verify the wrapper for cmake native is available
2772 self.assertExists(cmake_exe)
2773
2774 # Verify the cmake preset generated by devtool ide-sdk is available
2775 result = runCmd('%s --list-presets' % cmake_exe, cwd=tempdir)
2776 self.assertIn(preset_name, result.output)
2777
2778 # Verify cmake re-uses the o files compiled by bitbake
2779 result = runCmd('%s --build --preset %s' %
2780 (cmake_exe, preset_name), cwd=tempdir)
2781 self.assertIn("ninja: no work to do.", result.output)
2782
2783 # Verify the unit tests work (in Qemu user mode)
2784 result = runCmd('%s --build --preset %s --target test' %
2785 (cmake_exe, preset_name), cwd=tempdir)
2786 self.assertIn("100% tests passed", result.output)
2787
2788 # Verify re-building and testing works again
2789 result = runCmd('%s --build --preset %s --target clean' %
2790 (cmake_exe, preset_name), cwd=tempdir)
2791 self.assertIn("Cleaning", result.output)
2792 result = runCmd('%s --build --preset %s' %
2793 (cmake_exe, preset_name), cwd=tempdir)
2794 self.assertIn("Building", result.output)
2795 self.assertIn("Linking", result.output)
2796 result = runCmd('%s --build --preset %s --target test' %
2797 (cmake_exe, preset_name), cwd=tempdir)
2798 self.assertIn("Running tests...", result.output)
2799 self.assertIn("100% tests passed", result.output)
2800
2801 @OETestTag("runqemu")
2802 def test_devtool_ide_sdk_none_qemu(self):
2803 """Start qemu-system and run tests for multiple recipes. ide=none is used."""
2804 recipe_names = ["cmake-example", "meson-example"]
2805 testimage = "oe-selftest-image"
2806
2807 self._check_workspace()
2808 self._write_bb_config(recipe_names)
2809 self._check_runqemu_prerequisites()
2810
2811 # Verify deployment to Qemu (system mode) works
2812 bitbake(testimage)
2813 with runqemu(testimage, runqemuparams="nographic") as qemu:
2814 # cmake-example recipe
2815 recipe_name = "cmake-example"
2816 example_exe = "cmake-example"
2817 build_file = "CMakeLists.txt"
2818 tempdir = self._devtool_ide_sdk_recipe(
2819 recipe_name, build_file, testimage)
2820 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
2821 recipe_name, testimage, qemu.ip)
2822 runCmd(bitbake_sdk_cmd)
2823 self._gdb_cross()
2824 self._verify_cmake_preset(tempdir)
2825 self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
2826 # Verify the oe-scripts sym-link is valid
2827 self.assertEqual(self._workspace_scripts_dir(
2828 recipe_name), self._sources_scripts_dir(tempdir))
2829
2830 # meson-example recipe
2831 recipe_name = "meson-example"
2832 example_exe = "mesonex"
2833 build_file = "meson.build"
2834 tempdir = self._devtool_ide_sdk_recipe(
2835 recipe_name, build_file, testimage)
2836 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
2837 recipe_name, testimage, qemu.ip)
2838 runCmd(bitbake_sdk_cmd)
2839 self._gdb_cross()
2840 self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
2841 # Verify the oe-scripts sym-link is valid
2842 self.assertEqual(self._workspace_scripts_dir(
2843 recipe_name), self._sources_scripts_dir(tempdir))
2844
2845 def test_devtool_ide_sdk_code_cmake(self):
2846 """Verify a cmake recipe works with ide=code mode"""
2847 recipe_name = "cmake-example"
2848 build_file = "CMakeLists.txt"
2849 testimage = "oe-selftest-image"
2850
2851 self._check_workspace()
2852 self._write_bb_config([recipe_name])
2853 tempdir = self._devtool_ide_sdk_recipe(
2854 recipe_name, build_file, testimage)
2855 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
2856 recipe_name, testimage)
2857 runCmd(bitbake_sdk_cmd)
2858 self._verify_cmake_preset(tempdir)
2859 self._verify_install_script_code(tempdir, recipe_name)
2860 self._gdb_cross()
2861
2862 def test_devtool_ide_sdk_code_meson(self):
2863 """Verify a meson recipe works with ide=code mode"""
2864 recipe_name = "meson-example"
2865 build_file = "meson.build"
2866 testimage = "oe-selftest-image"
2867
2868 self._check_workspace()
2869 self._write_bb_config([recipe_name])
2870 tempdir = self._devtool_ide_sdk_recipe(
2871 recipe_name, build_file, testimage)
2872 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
2873 recipe_name, testimage)
2874 runCmd(bitbake_sdk_cmd)
2875
2876 with open(os.path.join(tempdir, '.vscode', 'settings.json')) as settings_j:
2877 settings_d = json.load(settings_j)
2878 meson_exe = settings_d["mesonbuild.mesonPath"]
2879 meson_build_folder = settings_d["mesonbuild.buildFolder"]
2880
2881 # Verify the wrapper for meson native is available
2882 self.assertExists(meson_exe)
2883
2884 # Verify meson re-uses the o files compiled by bitbake
2885 result = runCmd('%s compile -C %s' %
2886 (meson_exe, meson_build_folder), cwd=tempdir)
2887 self.assertIn("ninja: no work to do.", result.output)
2888
2889 # Verify the unit tests work (in Qemu)
2890 runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
2891
2892 # Verify re-building and testing works again
2893 result = runCmd('%s compile -C %s --clean' %
2894 (meson_exe, meson_build_folder), cwd=tempdir)
2895 self.assertIn("Cleaning...", result.output)
2896 result = runCmd('%s compile -C %s' %
2897 (meson_exe, meson_build_folder), cwd=tempdir)
2898 self.assertIn("Linking target", result.output)
2899 runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
2900
2901 self._verify_install_script_code(tempdir, recipe_name)
2902 self._gdb_cross()
2903
2904 def test_devtool_ide_sdk_shared_sysroots(self):
2905 """Verify the shared sysroot SDK"""
2906
2907 # Handle the workspace (which is not needed by this test case)
2908 self._check_workspace()
2909
2910 result_init = runCmd(
2911 'devtool ide-sdk -m shared oe-selftest-image cmake-example meson-example --ide=code')
2912 bb_vars = get_bb_vars(
2913 ['REAL_MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'], "meta-ide-support")
2914 environment_script = 'environment-setup-%s' % bb_vars['REAL_MULTIMACH_TARGET_SYS']
2915 deploydir = bb_vars['DEPLOY_DIR_IMAGE']
2916 environment_script_path = os.path.join(deploydir, environment_script)
2917 cpp_example_src = os.path.join(
2918 bb_vars['COREBASE'], 'meta-selftest', 'recipes-test', 'cpp', 'files')
2919
2920 # Verify the cross environment script is available
2921 self.assertExists(environment_script_path)
2922
2923 def runCmdEnv(cmd, cwd):
2924 cmd = '/bin/sh -c ". %s > /dev/null && %s"' % (
2925 environment_script_path, cmd)
2926 return runCmd(cmd, cwd)
2927
2928 # Verify building the C++ example works with CMake
2929 tempdir_cmake = tempfile.mkdtemp(prefix='devtoolqa')
2930 self.track_for_cleanup(tempdir_cmake)
2931
2932 result_cmake = runCmdEnv("which cmake", cwd=tempdir_cmake)
2933 cmake_native = os.path.normpath(result_cmake.output.strip())
2934 self.assertExists(cmake_native)
2935
2936 runCmdEnv('cmake %s' % cpp_example_src, cwd=tempdir_cmake)
2937 runCmdEnv('cmake --build %s' % tempdir_cmake, cwd=tempdir_cmake)
2938
2939 # Verify the printed note really referres to a cmake executable
2940 cmake_native_code = ""
2941 for line in result_init.output.splitlines():
2942 m = re.search(r'"cmake.cmakePath": "(.*)"', line)
2943 if m:
2944 cmake_native_code = m.group(1)
2945 break
2946 self.assertExists(cmake_native_code)
2947 self.assertEqual(cmake_native, cmake_native_code)
2948
2949 # Verify building the C++ example works with Meson
2950 tempdir_meson = tempfile.mkdtemp(prefix='devtoolqa')
2951 self.track_for_cleanup(tempdir_meson)
2952
2953 result_cmake = runCmdEnv("which meson", cwd=tempdir_meson)
2954 meson_native = os.path.normpath(result_cmake.output.strip())
2955 self.assertExists(meson_native)
2956
2957 runCmdEnv('meson setup %s' % tempdir_meson, cwd=cpp_example_src)
2958 runCmdEnv('meson compile', cwd=tempdir_meson)
2959
2960 def test_devtool_ide_sdk_plugins(self):
2961 """Test that devtool ide-sdk can use plugins from other layers."""
2962
2963 # We need a workspace layer and a modified recipe (but no image)
2964 modified_recipe_name = "meson-example"
2965 modified_build_file = "meson.build"
2966 testimage = "oe-selftest-image"
2967 shared_recipe_name = "cmake-example"
2968
2969 self._check_workspace()
2970 self._write_bb_config([modified_recipe_name])
2971 tempdir = self._devtool_ide_sdk_recipe(
2972 modified_recipe_name, modified_build_file, None)
2973
2974 IDE_RE = re.compile(r'.*--ide \{(.*)\}.*')
2975
2976 def get_ides_from_help(help_str):
2977 m = IDE_RE.search(help_str)
2978 return m.group(1).split(',')
2979
2980 # verify the default plugins are available but the foo plugin is not
2981 result = runCmd('devtool ide-sdk -h')
2982 found_ides = get_ides_from_help(result.output)
2983 self.assertIn('code', found_ides)
2984 self.assertIn('none', found_ides)
2985 self.assertNotIn('foo', found_ides)
2986
2987 shared_config_file = os.path.join(tempdir, 'shared-config.txt')
2988 shared_config_str = 'Dummy shared IDE config'
2989 modified_config_file = os.path.join(tempdir, 'modified-config.txt')
2990 modified_config_str = 'Dummy modified IDE config'
2991
2992 # Generate a foo plugin in the workspace layer
2993 plugin_dir = os.path.join(
2994 self.workspacedir, 'lib', 'devtool', 'ide_plugins')
2995 os.makedirs(plugin_dir)
2996 plugin_code = 'from devtool.ide_plugins import IdeBase\n\n'
2997 plugin_code += 'class IdeFoo(IdeBase):\n'
2998 plugin_code += ' def setup_shared_sysroots(self, shared_env):\n'
2999 plugin_code += ' with open("%s", "w") as config_file:\n' % shared_config_file
3000 plugin_code += ' config_file.write("%s")\n\n' % shared_config_str
3001 plugin_code += ' def setup_modified_recipe(self, args, image_recipe, modified_recipe):\n'
3002 plugin_code += ' with open("%s", "w") as config_file:\n' % modified_config_file
3003 plugin_code += ' config_file.write("%s")\n\n' % modified_config_str
3004 plugin_code += 'def register_ide_plugin(ide_plugins):\n'
3005 plugin_code += ' ide_plugins["foo"] = IdeFoo\n'
3006
3007 plugin_py = os.path.join(plugin_dir, 'ide_foo.py')
3008 with open(plugin_py, 'w') as plugin_file:
3009 plugin_file.write(plugin_code)
3010
3011 # Verify the foo plugin is available as well
3012 result = runCmd('devtool ide-sdk -h')
3013 found_ides = get_ides_from_help(result.output)
3014 self.assertIn('code', found_ides)
3015 self.assertIn('none', found_ides)
3016 self.assertIn('foo', found_ides)
3017
3018 # Verify the foo plugin generates a shared config
3019 result = runCmd(
3020 'devtool ide-sdk -m shared --skip-bitbake --ide foo %s' % shared_recipe_name)
3021 with open(shared_config_file) as shared_config:
3022 shared_config_new = shared_config.read()
3023 self.assertEqual(shared_config_str, shared_config_new)
3024
3025 # Verify the foo plugin generates a modified config
3026 result = runCmd('devtool ide-sdk --skip-bitbake --ide foo %s %s' %
3027 (modified_recipe_name, testimage))
3028 with open(modified_config_file) as modified_config:
3029 modified_config_new = modified_config.read()
3030 self.assertEqual(modified_config_str, modified_config_new)
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index e1cfc3b621..f2c6124d70 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -1,11 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
7from oeqa.utils.decorators import testcase
8from oeqa.utils.ftools import write_file
9 8
10import oe.recipeutils 9import oe.recipeutils
11 10
@@ -18,13 +17,13 @@ class Distrodata(OESelftestTestCase):
18 Product: oe-core 17 Product: oe-core
19 Author: Alexander Kanavin <alex.kanavin@gmail.com> 18 Author: Alexander Kanavin <alex.kanavin@gmail.com>
20 """ 19 """
21 feature = 'LICENSE_FLAGS_WHITELIST += " commercial"\n' 20 feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n'
22 self.write_config(feature) 21 self.write_config(feature)
23 22
24 pkgs = oe.recipeutils.get_recipe_upgrade_status() 23 pkggroups = oe.recipeutils.get_recipe_upgrade_status()
25 24
26 regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN'] 25 regressed_failures = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'UNKNOWN_BROKEN']
27 regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN'] 26 regressed_successes = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'KNOWN_BROKEN']
28 msg = "" 27 msg = ""
29 if len(regressed_failures) > 0: 28 if len(regressed_failures) > 0:
30 msg = msg + """ 29 msg = msg + """
@@ -49,21 +48,21 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
49 Author: Alexander Kanavin <alex.kanavin@gmail.com> 48 Author: Alexander Kanavin <alex.kanavin@gmail.com>
50 """ 49 """
51 def is_exception(pkg): 50 def is_exception(pkg):
52 exceptions = ["packagegroup-", "initramfs-", "systemd-machine-units", "target-sdk-provides-dummy"] 51 exceptions = ["packagegroup-",]
53 for i in exceptions: 52 for i in exceptions:
54 if i in pkg: 53 if i in pkg:
55 return True 54 return True
56 return False 55 return False
57 56
58 def is_maintainer_exception(entry): 57 def is_maintainer_exception(entry):
59 exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", 58 exceptions = ["musl", "newlib", "picolibc", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
60 "cve-update-db-native"] 59 "cve-update-nvd2-native", "barebox", "libglvnd"]
61 for i in exceptions: 60 for i in exceptions:
62 if i in entry: 61 if i in entry:
63 return True 62 return True
64 return False 63 return False
65 64
66 feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_WHITELIST += " commercial"\nPARSE_ALL_RECIPES = "1"\n' 65 feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_ACCEPTED += " commercial"\nPARSE_ALL_RECIPES = "1"\nPACKAGE_CLASSES = "package_ipk package_deb package_rpm"\n'
67 self.write_config(feature) 66 self.write_config(feature)
68 67
69 with bb.tinfoil.Tinfoil() as tinfoil: 68 with bb.tinfoil.Tinfoil() as tinfoil:
@@ -74,7 +73,7 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
74 73
75 missing_recipes = [] 74 missing_recipes = []
76 recipes = [] 75 recipes = []
77 prefix = "RECIPE_MAINTAINER_pn-" 76 prefix = "RECIPE_MAINTAINER:pn-"
78 77
79 # We could have used all_recipes() here, but this method will find 78 # We could have used all_recipes() here, but this method will find
80 # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files 79 # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files
@@ -116,3 +115,15 @@ The list of oe-core recipes with maintainers is empty. This may indicate that th
116 self.fail(""" 115 self.fail("""
117Unable to find recipes for the following entries in maintainers.inc: 116Unable to find recipes for the following entries in maintainers.inc:
118""" + "\n".join(['%s' % i for i in missing_recipes])) 117""" + "\n".join(['%s' % i for i in missing_recipes]))
118
119 def test_common_include_recipes(self):
120 """
121 Summary: Test that obtaining recipes that share includes between them returns a sane result
122 Expected: At least cmake and qemu entries are present in the output
123 Product: oe-core
124 Author: Alexander Kanavin <alex.kanavin@gmail.com>
125 """
126 recipes = oe.recipeutils.get_common_include_recipes()
127
128 self.assertIn({'qemu-system-native', 'qemu', 'qemu-native'}, recipes)
129 self.assertIn({'cmake-native', 'cmake'}, recipes)
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py
index a61cf9bcb3..fcfcdaf7e4 100644
--- a/meta/lib/oeqa/selftest/cases/efibootpartition.py
+++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py
@@ -5,42 +5,30 @@
5# SPDX-License-Identifier: MIT 5# SPDX-License-Identifier: MIT
6# 6#
7 7
8import re
9
10from oeqa.selftest.case import OESelftestTestCase 8from oeqa.selftest.case import OESelftestTestCase
11from oeqa.utils.commands import bitbake, runqemu, get_bb_var 9from oeqa.utils.commands import bitbake, runqemu, get_bb_var
10from oeqa.core.decorator.data import skipIfNotMachine
11import oe.types
12 12
13class GenericEFITest(OESelftestTestCase): 13class GenericEFITest(OESelftestTestCase):
14 """EFI booting test class""" 14 """EFI booting test class"""
15 @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently")
16 def test_boot_efi(self):
17 image = "core-image-minimal"
18 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or ""
19 cmd = "runqemu %s nographic serial wic ovmf" % (runqemu_params)
20 if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
21 cmd += " kvm"
15 22
16 cmd_common = "runqemu nographic serial wic ovmf" 23 self.write_config("""
17 efi_provider = "systemd-boot" 24EFI_PROVIDER = "grub-efi"
18 image = "core-image-minimal" 25IMAGE_FSTYPES:pn-%s:append = " wic"
19 machine = "qemux86-64" 26MACHINE_FEATURES:append = " efi"
20 recipes_built = False
21
22 @classmethod
23 def setUpLocal(self):
24 super(GenericEFITest, self).setUpLocal(self)
25
26 self.write_config(self,
27"""
28EFI_PROVIDER = "%s"
29IMAGE_FSTYPES_pn-%s_append = " wic"
30MACHINE = "%s"
31MACHINE_FEATURES_append = " efi"
32WKS_FILE = "efi-bootdisk.wks.in" 27WKS_FILE = "efi-bootdisk.wks.in"
33IMAGE_INSTALL_append = " grub-efi systemd-boot kernel-image-bzimage" 28IMAGE_INSTALL:append = " grub-efi kernel-image-bzimage"
34""" 29"""
35% (self.efi_provider, self.image, self.machine)) 30% (image))
36 if not self.recipes_built:
37 bitbake("ovmf")
38 bitbake(self.image)
39 self.recipes_built = True
40 31
41 @classmethod 32 bitbake(image + " ovmf")
42 def test_boot_efi(self): 33 with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
43 """Test generic boot partition with qemu"""
44 cmd = "%s %s" % (self.cmd_common, self.machine)
45 with runqemu(self.image, ssh=False, launch_cmd=cmd) as qemu:
46 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) 34 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/esdk.py
index 862849af35..7a5fe00a08 100644
--- a/meta/lib/oeqa/selftest/cases/eSDK.py
+++ b/meta/lib/oeqa/selftest/cases/esdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -8,7 +10,7 @@ import os
8import glob 10import glob
9import time 11import time
10from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
11from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars 13from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
12 14
13class oeSDKExtSelfTest(OESelftestTestCase): 15class oeSDKExtSelfTest(OESelftestTestCase):
14 """ 16 """
@@ -25,11 +27,7 @@ class oeSDKExtSelfTest(OESelftestTestCase):
25 return glob.glob(pattern)[0] 27 return glob.glob(pattern)[0]
26 28
27 @staticmethod 29 @staticmethod
28 def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options): 30 def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, **options):
29 if postconfig:
30 esdk_conf_file = os.path.join(tmpdir_eSDKQA, 'conf', 'local.conf')
31 with open(esdk_conf_file, 'a+') as f:
32 f.write(postconfig)
33 if not options: 31 if not options:
34 options = {} 32 options = {}
35 if not 'shell' in options: 33 if not 'shell' in options:
@@ -63,7 +61,7 @@ class oeSDKExtSelfTest(OESelftestTestCase):
63 cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA) 61 cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
64 62
65 sstate_config=""" 63 sstate_config="""
66SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" 64ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
67SSTATE_MIRRORS = "file://.* file://%s/PATH" 65SSTATE_MIRRORS = "file://.* file://%s/PATH"
68CORE_IMAGE_EXTRA_INSTALL = "perl" 66CORE_IMAGE_EXTRA_INSTALL = "perl"
69 """ % sstate_dir 67 """ % sstate_dir
@@ -91,7 +89,7 @@ CORE_IMAGE_EXTRA_INSTALL = "perl"
91 89
92 # Configure eSDK to use sstate mirror from poky 90 # Configure eSDK to use sstate mirror from poky
93 sstate_config=""" 91 sstate_config="""
94SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" 92ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
95SSTATE_MIRRORS = "file://.* file://%s/PATH" 93SSTATE_MIRRORS = "file://.* file://%s/PATH"
96 """ % bb_vars["SSTATE_DIR"] 94 """ % bb_vars["SSTATE_DIR"]
97 with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f: 95 with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
@@ -100,7 +98,7 @@ SSTATE_MIRRORS = "file://.* file://%s/PATH"
100 @classmethod 98 @classmethod
101 def tearDownClass(cls): 99 def tearDownClass(cls):
102 for i in range(0, 10): 100 for i in range(0, 10):
103 if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')): 101 if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
104 time.sleep(1) 102 time.sleep(1)
105 else: 103 else:
106 break 104 break
diff --git a/meta/lib/oeqa/selftest/cases/externalsrc.py b/meta/lib/oeqa/selftest/cases/externalsrc.py
new file mode 100644
index 0000000000..1d800dc82c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/externalsrc.py
@@ -0,0 +1,44 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import tempfile
10
11from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import get_bb_var, runCmd
13
14class ExternalSrc(OESelftestTestCase):
15 # test that srctree_hash_files does not crash
16 # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
17 # so we check only that a recipe with externalsrc can be parsed
18 def test_externalsrc_srctree_hash_files(self):
19 test_recipe = "git-submodule-test"
20 git_url = "git://git.yoctoproject.org/git-submodule-test"
21 externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
22
23 self.write_config(
24 """
25INHERIT += "externalsrc"
26EXTERNALSRC:pn-%s = "%s"
27""" % (test_recipe, externalsrc_dir)
28 )
29
30 # test with git without submodules
31 runCmd('git clone %s %s' % (git_url, externalsrc_dir))
32 os.unlink(externalsrc_dir + "/.gitmodules")
33 open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
34 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
35 os.unlink(".gitmodules")
36
37 # test with git with submodules
38 runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
39 runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
40 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
41
42 # test without git
43 shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
44 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
index 76cbadf2ff..1beef5cfed 100644
--- a/meta/lib/oeqa/selftest/cases/fetch.py
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -1,7 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import tempfile
8import textwrap
9import bb.tinfoil
5import oe.path 10import oe.path
6from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake 12from oeqa.utils.commands import bitbake
@@ -21,8 +26,8 @@ class Fetch(OESelftestTestCase):
21 # No mirrors, should use git to fetch successfully 26 # No mirrors, should use git to fetch successfully
22 features = """ 27 features = """
23DL_DIR = "%s" 28DL_DIR = "%s"
24MIRRORS_forcevariable = "" 29MIRRORS:forcevariable = ""
25PREMIRRORS_forcevariable = "" 30PREMIRRORS:forcevariable = ""
26""" % dldir 31""" % dldir
27 self.write_config(features) 32 self.write_config(features)
28 oe.path.remove(dldir, recurse=True) 33 oe.path.remove(dldir, recurse=True)
@@ -31,9 +36,10 @@ PREMIRRORS_forcevariable = ""
31 # No mirrors and broken git, should fail 36 # No mirrors and broken git, should fail
32 features = """ 37 features = """
33DL_DIR = "%s" 38DL_DIR = "%s"
39SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
34GIT_PROXY_COMMAND = "false" 40GIT_PROXY_COMMAND = "false"
35MIRRORS_forcevariable = "" 41MIRRORS:forcevariable = ""
36PREMIRRORS_forcevariable = "" 42PREMIRRORS:forcevariable = ""
37""" % dldir 43""" % dldir
38 self.write_config(features) 44 self.write_config(features)
39 oe.path.remove(dldir, recurse=True) 45 oe.path.remove(dldir, recurse=True)
@@ -43,9 +49,62 @@ PREMIRRORS_forcevariable = ""
43 # Broken git but a specific mirror 49 # Broken git but a specific mirror
44 features = """ 50 features = """
45DL_DIR = "%s" 51DL_DIR = "%s"
52SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
46GIT_PROXY_COMMAND = "false" 53GIT_PROXY_COMMAND = "false"
47MIRRORS_forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/" 54MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
48""" % dldir 55""" % dldir
49 self.write_config(features) 56 self.write_config(features)
50 oe.path.remove(dldir, recurse=True) 57 oe.path.remove(dldir, recurse=True)
51 bitbake("dbus-wait -c fetch -f") 58 bitbake("dbus-wait -c fetch -f")
59
60
61class Dependencies(OESelftestTestCase):
62 def write_recipe(self, content, tempdir):
63 f = os.path.join(tempdir, "test.bb")
64 with open(f, "w") as fd:
65 fd.write(content)
66 return f
67
68 def test_dependencies(self):
69 """
70 Verify that the correct dependencies are generated for specific SRC_URI entries.
71 """
72
73 with bb.tinfoil.Tinfoil() as tinfoil, tempfile.TemporaryDirectory(prefix="selftest-fetch") as tempdir:
74 tinfoil.prepare(config_only=False, quiet=2)
75
76 r = """
77 LICENSE = "CLOSED"
78 SRC_URI = "http://example.com/tarball.zip"
79 """
80 f = self.write_recipe(textwrap.dedent(r), tempdir)
81 d = tinfoil.parse_recipe_file(f)
82 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
83 self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends"))
84
85 # Verify that the downloadfilename overrides the URI
86 r = """
87 LICENSE = "CLOSED"
88 SRC_URI = "https://example.com/tarball;downloadfilename=something.zip"
89 """
90 f = self.write_recipe(textwrap.dedent(r), tempdir)
91 d = tinfoil.parse_recipe_file(f)
92 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
93 self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "")
94
95 r = """
96 LICENSE = "CLOSED"
97 SRC_URI = "ftp://example.com/tarball.lz"
98 """
99 f = self.write_recipe(textwrap.dedent(r), tempdir)
100 d = tinfoil.parse_recipe_file(f)
101 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
102 self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends"))
103
104 r = """
105 LICENSE = "CLOSED"
106 SRC_URI = "git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
107 """
108 f = self.write_recipe(textwrap.dedent(r), tempdir)
109 d = tinfoil.parse_recipe_file(f)
110 self.assertIn("git-native", d.getVarFlag("do_fetch", "depends"))
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
index 02692de822..3c40857747 100644
--- a/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -1,14 +1,743 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
7import os 7import os
8import json
9import re 8import re
9import shlex
10import logging
11import pprint
12import tempfile
13
14import oe.fitimage
15
16from oeqa.selftest.case import OESelftestTestCase
17from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_bb_var
18
19
20class BbVarsMockGenKeys:
21 def __init__(self, keydir, gen_keys="0", sign_enabled="0", keyname="", sign_ind="0", img_keyname=""):
22 self.bb_vars = {
23 'FIT_GENERATE_KEYS': gen_keys,
24 'FIT_KEY_GENRSA_ARGS': "-F4",
25 'FIT_KEY_REQ_ARGS': "-batch -new",
26 'FIT_KEY_SIGN_PKCS': "-x509",
27 'FIT_SIGN_INDIVIDUAL': sign_ind,
28 'FIT_SIGN_NUMBITS': "2048",
29 'UBOOT_SIGN_ENABLE': sign_enabled,
30 'UBOOT_SIGN_IMG_KEYNAME': img_keyname,
31 'UBOOT_SIGN_KEYDIR': keydir,
32 'UBOOT_SIGN_KEYNAME': keyname,
33 }
34
35 def getVar(self, var):
36 return self.bb_vars[var]
37
38class FitImageTestCase(OESelftestTestCase):
39 """Test functions usable for testing kernel-fitimage.bbclass and uboot-sign.bbclass
40
41 A brief summary showing the structure of a test case:
42
43 self._test_fitimage()
44 # Generate a local.conf file and bitbake the bootloader or the kernel
45 self._bitbake_fit_image()
46
47 # Check if the its file contains the expected paths and attributes.
48 # The _get_req_* functions are implemented by more specific chield classes.
49 self._check_its_file()
50 req_its_paths = self._get_req_its_paths()
51 req_sigvalues_config = self._get_req_sigvalues_config()
52 req_sigvalues_image = self._get_req_sigvalues_image()
53 # Compare the its file against req_its_paths, req_sigvalues_config, req_sigvalues_image
54
55 # Call the dumpimage utiliy and check that it prints all the expected paths and attributes
56 # The _get_req_* functions are implemented by more specific chield classes.
57 self._check_fitimage()
58 self._get_req_sections()
59 # Compare the output of the dumpimage utility against
60 """
61
62 MKIMAGE_HASH_LENGTHS = { 'sha256': 64, 'sha384': 96, 'sha512': 128 }
63 MKIMAGE_SIGNATURE_LENGTHS = { 'rsa2048': 512 }
64
65 def _gen_signing_key(self, bb_vars):
66 """Generate a key pair and a singing certificate
67
68 Generate a UBOOT_SIGN_KEYNAME in the UBOOT_SIGN_KEYDIR similar to what
69 the FIT_GENERATE_KEYS feature does. However, having a static key is
70 probably a more realistic use case than generating a random key with
71 each clean build. So this needs to be tested as well.
72 The FIT_GENERATE_KEYS generates 2 keys: The UBOOT_SIGN_KEYNAME and the
73 UBOOT_SIGN_IMG_KEYNAME. The UBOOT_SIGN_IMG_KEYNAME is used by the
74 FIT_SIGN_INDIVIDUAL feature only. Testing if everything is working if
75 there is only one key available is important as well. Therefore this
76 function generates only the keys which are really needed, not just two.
77 """
78
79 # Define some variables which are usually defined by the kernel-fitimage.bbclass.
80 # But for testing purpose check if the uboot-sign.bbclass is independent from
81 # the kernel-fitimage.bbclass
82 fit_sign_numbits = bb_vars.get('FIT_SIGN_NUMBITS', "2048")
83 fit_key_genrsa_args = bb_vars.get('FIT_KEY_GENRSA_ARGS', "-F4")
84 fit_key_req_args = bb_vars.get('FIT_KEY_REQ_ARGS', "-batch -new")
85 fit_key_sign_pkcs = bb_vars.get('FIT_KEY_SIGN_PKCS', "-x509")
86
87 uboot_sign_keydir = bb_vars['UBOOT_SIGN_KEYDIR']
88 sign_keys = [bb_vars['UBOOT_SIGN_KEYNAME']]
89 if bb_vars['FIT_SIGN_INDIVIDUAL'] == "1":
90 sign_keys.append(bb_vars['UBOOT_SIGN_IMG_KEYNAME'])
91 for sign_key in sign_keys:
92 sing_key_path = os.path.join(uboot_sign_keydir, sign_key)
93 if not os.path.isdir(uboot_sign_keydir):
94 os.makedirs(uboot_sign_keydir)
95 openssl_bindir = FitImageTestCase._setup_native('openssl-native')
96 openssl_path = os.path.join(openssl_bindir, 'openssl')
97 runCmd("%s genrsa %s -out %s.key %s" % (
98 openssl_path,
99 fit_key_genrsa_args,
100 sing_key_path,
101 fit_sign_numbits
102 ))
103 runCmd("%s req %s %s -key %s.key -out %s.crt" % (
104 openssl_path,
105 fit_key_req_args,
106 fit_key_sign_pkcs,
107 sing_key_path,
108 sing_key_path
109 ))
110
111 @staticmethod
112 def _gen_random_file(file_path, num_bytes=65536):
113 with open(file_path, 'wb') as file_out:
114 file_out.write(os.urandom(num_bytes))
115
116 @staticmethod
117 def _setup_native(native_recipe):
118 """Build a native recipe and return the path to its bindir in RECIPE_SYSROOT_NATIVE"""
119 bitbake(native_recipe + " -c addto_recipe_sysroot")
120 vars = get_bb_vars(['RECIPE_SYSROOT_NATIVE', 'bindir'], native_recipe)
121 return os.path.join(vars['RECIPE_SYSROOT_NATIVE'], vars['bindir'])
122
123 def _verify_fit_image_signature(self, uboot_tools_bindir, fitimage_path, dtb_path, conf_name=None):
124 """Verify the signature of a fit configuration
125
126 The fit_check_sign utility from u-boot-tools-native is called.
127 uboot-fit_check_sign -f fitImage -k $dtb_path -c conf-$dtb_name
128 dtb_path refers to a binary device tree containing the public key.
129 """
130 fit_check_sign_path = os.path.join(uboot_tools_bindir, 'uboot-fit_check_sign')
131 cmd = '%s -f %s -k %s' % (fit_check_sign_path, fitimage_path, dtb_path)
132 if conf_name:
133 cmd += ' -c %s' % conf_name
134 result = runCmd(cmd)
135 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
136 self.assertIn("Signature check OK", result.output)
137
138 def _verify_dtb_property(self, dtc_bindir, dtb_path, node_path, property_name, req_property, absent=False):
139 """Verify device tree properties
140
141 The fdtget utility from dtc-native is called and the property is compared.
142 """
143 fdtget_path = os.path.join(dtc_bindir, 'fdtget')
144 cmd = '%s %s %s %s' % (fdtget_path, dtb_path, node_path, property_name)
145 if absent:
146 result = runCmd(cmd, ignore_status=True)
147 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
148 self.assertIn("FDT_ERR_NOTFOUND", result.output)
149 else:
150 result = runCmd(cmd)
151 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
152 self.assertEqual(req_property, result.output.strip())
153
154 @staticmethod
155 def _find_string_in_bin_file(file_path, search_string):
156 """find strings in a binary file
157
158 Shell equivalent: strings "$1" | grep "$2" | wc -l
159 return number of matches
160 """
161 found_positions = 0
162 with open(file_path, 'rb') as file:
163 content = file.read().decode('ascii', errors='ignore')
164 found_positions = content.count(search_string)
165 return found_positions
166
167 @staticmethod
168 def _get_uboot_mkimage_sign_args(uboot_mkimage_sign_args):
169 """Retrive the string passed via -c to the mkimage command
170
171 Example: If a build configutation defines
172 UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
173 this function returns "a smart comment"
174 """
175 a_comment = None
176 if uboot_mkimage_sign_args:
177 mkimage_args = shlex.split(uboot_mkimage_sign_args)
178 try:
179 c_index = mkimage_args.index('-c')
180 a_comment = mkimage_args[c_index+1]
181 except ValueError:
182 pass
183 return a_comment
184
185 @staticmethod
186 def _get_dtb_files(bb_vars):
187 """Return a list of devicetree names
188
189 The list should be used to check the dtb and conf nodes in the FIT image or its file.
190 In addition to the entries from KERNEL_DEVICETREE, the external devicetree and the
191 external devicetree overlay added by the test recipe bbb-dtbs-as-ext are handled as well.
192 """
193 kernel_devicetree = bb_vars.get('KERNEL_DEVICETREE')
194 all_dtbs = []
195 dtb_symlinks = []
196 if kernel_devicetree:
197 all_dtbs += [os.path.basename(dtb) for dtb in kernel_devicetree.split()]
198 # Support only the test recipe which provides 1 devicetree and 1 devicetree overlay
199 pref_prov_dtb = bb_vars.get('PREFERRED_PROVIDER_virtual/dtb')
200 if pref_prov_dtb == "bbb-dtbs-as-ext":
201 all_dtbs += ["am335x-bonegreen-ext.dtb", "BBORG_RELAY-00A2.dtbo"]
202 dtb_symlinks.append("am335x-bonegreen-ext-alias.dtb")
203 return (all_dtbs, dtb_symlinks)
204
205 def _is_req_dict_in_dict(self, found_dict, req_dict):
206 """
207 Check if all key-value pairs in the required dictionary are present in the found dictionary.
208
209 This function recursively checks if the required dictionary (`req_dict`) is a subset of the found dictionary (`found_dict`).
210 It supports nested dictionaries, strings, lists, and sets as values.
211
212 Args:
213 found_dict (dict): The dictionary to search within.
214 req_dict (dict): The dictionary containing the required key-value pairs.
215 """
216 for key, value in req_dict.items():
217 self.assertIn(key, found_dict)
218 if isinstance(value, dict):
219 self._is_req_dict_in_dict(found_dict[key], value)
220 elif isinstance(value, str):
221 self.assertIn(value, found_dict[key])
222 elif isinstance(value, list):
223 self.assertLessEqual(set(value), set(found_dict[key]))
224 elif isinstance(value, set):
225 self.assertLessEqual(value, found_dict[key])
226 else:
227 self.assertEqual(value, found_dict[key])
228
229 def _check_its_file(self, bb_vars, its_file_path):
230 """Check if the its file contains the expected sections and fields"""
231 # print the its file for debugging
232 if logging.DEBUG >= self.logger.level:
233 with open(its_file_path) as its_file:
234 self.logger.debug("its file: %s" % its_file.read())
235
236 # Generate a list of expected paths in the its file
237 req_its_paths = self._get_req_its_paths(bb_vars)
238 self.logger.debug("req_its_paths:\n%s\n" % pprint.pformat(req_its_paths, indent=4))
239
240 # Generate a dict of expected configuration signature nodes
241 req_sigvalues_config = self._get_req_sigvalues_config(bb_vars)
242 self.logger.debug("req_sigvalues_config:\n%s\n" % pprint.pformat(req_sigvalues_config, indent=4))
243
244 # Generate a dict of expected image signature nodes
245 req_sigvalues_image = self._get_req_sigvalues_image(bb_vars)
246 self.logger.debug("req_sigvalues_image:\n%s\n" % pprint.pformat(req_sigvalues_image, indent=4))
247
248 # Parse the its file for paths and signatures
249 its_path = []
250 its_paths = []
251 linect = 0
252 sigs = {}
253 with open(its_file_path) as its_file:
254 for line in its_file:
255 linect += 1
256 line = line.strip()
257 if line.endswith('};'):
258 its_path.pop()
259 elif line.endswith('{'):
260 its_path.append(line[:-1].strip())
261 its_paths.append(its_path[:])
262 # kernel-fitimage uses signature-1, uboot-sign uses signature
263 elif its_path and (its_path[-1] == 'signature-1' or its_path[-1] == 'signature'):
264 itsdotpath = '.'.join(its_path)
265 if not itsdotpath in sigs:
266 sigs[itsdotpath] = {}
267 if not '=' in line or not line.endswith(';'):
268 self.fail('Unexpected formatting in %s sigs section line %d:%s' % (its_file_path, linect, line))
269 key, value = line.split('=', 1)
270 sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
271
272 # Check if all expected paths are found in the its file
273 self.logger.debug("itspaths:\n%s\n" % pprint.pformat(its_paths, indent=4))
274 for req_path in req_its_paths:
275 if not req_path in its_paths:
276 self.fail('Missing path in its file: %s (%s)' % (req_path, its_file_path))
277
278 # Check if all the expected singnature nodes (images and configurations) are found
279 self.logger.debug("sigs:\n%s\n" % pprint.pformat(sigs, indent=4))
280 if req_sigvalues_config or req_sigvalues_image:
281 for its_path, values in sigs.items():
282 if bb_vars.get('FIT_CONF_PREFIX', "conf-") in its_path:
283 reqsigvalues = req_sigvalues_config
284 else:
285 reqsigvalues = req_sigvalues_image
286 for reqkey, reqvalue in reqsigvalues.items():
287 value = values.get(reqkey, None)
288 if value is None:
289 self.fail('Missing key "%s" in its file signature section %s (%s)' % (reqkey, its_path, its_file_path))
290 self.assertEqual(value, reqvalue)
291
292 # Generate a list of expected fields in the its file
293 req_its_fields = self._get_req_its_fields(bb_vars)
294 self.logger.debug("req_its_fields:\n%s\n" % pprint.pformat(req_its_fields, indent=4))
295
296 # Check if all expected fields are in the its file
297 if req_its_fields:
298 field_index = 0
299 field_index_last = len(req_its_fields) - 1
300 with open(its_file_path) as its_file:
301 for line in its_file:
302 if req_its_fields[field_index] in line:
303 if field_index < field_index_last:
304 field_index +=1
305 else:
306 break
307 self.assertEqual(field_index, field_index_last,
308 "Fields in Image Tree Source File %s did not match, error in finding %s"
309 % (its_file_path, req_its_fields[field_index]))
310
311 def _check_fitimage(self, bb_vars, fitimage_path, uboot_tools_bindir):
312 """Run dumpimage on the final FIT image and parse the output into a dict"""
313 dumpimage_path = os.path.join(uboot_tools_bindir, 'dumpimage')
314 cmd = '%s -l %s' % (dumpimage_path, fitimage_path)
315 self.logger.debug("Analyzing output from dumpimage: %s" % cmd)
316 dumpimage_result = runCmd(cmd)
317 in_section = None
318 sections = {}
319 self.logger.debug("dumpimage output: %s" % dumpimage_result.output)
320 for line in dumpimage_result.output.splitlines():
321 # Find potentially hashed and signed sections
322 if line.startswith((' Configuration', ' Image')):
323 in_section = re.search(r'\((.*)\)', line).groups()[0]
324 # Key value lines start with two spaces otherwise the section ended
325 elif not line.startswith(" "):
326 in_section = None
327 # Handle key value lines of this section
328 elif in_section:
329 if not in_section in sections:
330 sections[in_section] = {}
331 try:
332 key, value = line.split(':', 1)
333 key = key.strip()
334 value = value.strip()
335 except ValueError as val_err:
336 # Handle multiple entries as e.g. for Loadables as a list
337 if key and line.startswith(" "):
338 value = sections[in_section][key] + "," + line.strip()
339 else:
340 raise ValueError(f"Error processing line: '{line}'. Original error: {val_err}")
341 sections[in_section][key] = value
342
343 # Check if the requested dictionary is a subset of the parsed dictionary
344 req_sections, num_signatures = self._get_req_sections(bb_vars)
345 self.logger.debug("req_sections: \n%s\n" % pprint.pformat(req_sections, indent=4))
346 self.logger.debug("dumpimage sections: \n%s\n" % pprint.pformat(sections, indent=4))
347 self._is_req_dict_in_dict(sections, req_sections)
348
349 # Call the signing related checks if the function is provided by a inherited class
350 self._check_signing(bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path)
351
352 def _get_req_its_paths(self, bb_vars):
353 self.logger.error("This function needs to be implemented")
354 return []
355
356 def _get_req_its_fields(self, bb_vars):
357 self.logger.error("This function needs to be implemented")
358 return []
359
360 def _get_req_sigvalues_config(self, bb_vars):
361 self.logger.error("This function needs to be implemented")
362 return {}
363
364 def _get_req_sigvalues_image(self, bb_vars):
365 self.logger.error("This function needs to be implemented")
366 return {}
367
368 def _get_req_sections(self, bb_vars):
369 self.logger.error("This function needs to be implemented")
370 return ({}, 0)
371
372 def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path):
373 """Verify the signatures in the FIT image."""
374 self.fail("Function needs to be implemented by inheriting classes")
375
376 def _bitbake_fit_image(self, bb_vars):
377 """Bitbake the FIT image and return the paths to the its file and the FIT image"""
378 self.fail("Function needs to be implemented by inheriting classes")
379
380 def _test_fitimage(self, bb_vars):
381 """Check if the its file and the FIT image are created and signed correctly"""
382 fitimage_its_path, fitimage_path = self._bitbake_fit_image(bb_vars)
383 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
384 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
385
386 self.logger.debug("Checking its: %s" % fitimage_its_path)
387 self._check_its_file(bb_vars, fitimage_its_path)
388
389 # Setup u-boot-tools-native
390 uboot_tools_bindir = FitImageTestCase._setup_native('u-boot-tools-native')
391
392 # Verify the FIT image
393 self._check_fitimage(bb_vars, fitimage_path, uboot_tools_bindir)
394
395class KernelFitImageBase(FitImageTestCase):
396 """Test cases for the linux-yocto-fitimage recipe"""
397
398 def _fit_get_bb_vars(self, additional_vars=[]):
399 """Retrieve BitBake variables specific to the test case.
400
401 Call the get_bb_vars function once and get all variables needed by the test case.
402 """
403 internal_used = {
404 'DEPLOY_DIR_IMAGE',
405 'FIT_CONF_DEFAULT_DTB',
406 'FIT_CONF_PREFIX',
407 'FIT_DESC',
408 'FIT_HASH_ALG',
409 'FIT_KERNEL_COMP_ALG',
410 'FIT_SIGN_ALG',
411 'FIT_SIGN_INDIVIDUAL',
412 'FIT_UBOOT_ENV',
413 'INITRAMFS_IMAGE_BUNDLE',
414 'INITRAMFS_IMAGE_NAME',
415 'INITRAMFS_IMAGE',
416 'KERNEL_DEPLOYSUBDIR',
417 'KERNEL_DEVICETREE',
418 'KERNEL_FIT_LINK_NAME',
419 'MACHINE',
420 'PREFERRED_PROVIDER_virtual/dtb',
421 'UBOOT_ARCH',
422 'UBOOT_ENTRYPOINT',
423 'UBOOT_LOADADDRESS',
424 'UBOOT_MKIMAGE_KERNEL_TYPE',
425 'UBOOT_MKIMAGE_SIGN_ARGS',
426 'UBOOT_RD_ENTRYPOINT',
427 'UBOOT_RD_LOADADDRESS',
428 'UBOOT_SIGN_ENABLE',
429 'UBOOT_SIGN_IMG_KEYNAME',
430 'UBOOT_SIGN_KEYDIR',
431 'UBOOT_SIGN_KEYNAME',
432 }
433 bb_vars = get_bb_vars(list(internal_used | set(additional_vars)), self.kernel_recipe)
434 self.logger.debug("bb_vars: %s" % pprint.pformat(bb_vars, indent=4))
435 return bb_vars
436
437 def _config_add_kernel_classes(self, config):
438 config += '# Use kernel-fit-extra-artifacts.bbclass for the creation of the vmlinux artifact' + os.linesep
439 config += 'KERNEL_CLASSES = "kernel-fit-extra-artifacts"' + os.linesep
440 return config
441
442 @property
443 def kernel_recipe(self):
444 return "linux-yocto-fitimage"
445
446 def _config_add_uboot_env(self, config):
447 """Generate an u-boot environment
448
449 Create a boot.cmd file that is packed into the FIT image as a source-able text file.
450 Updates the configuration to include the boot.cmd file.
451 """
452 fit_uenv_file = "boot.cmd"
453 test_files_dir = "test-files"
454 fit_uenv_path = os.path.join(self.builddir, test_files_dir, fit_uenv_file)
455
456 config += '# Add an u-boot script to the fitImage' + os.linesep
457 config += 'FIT_UBOOT_ENV = "%s"' % fit_uenv_file + os.linesep
458 config += 'FILESEXTRAPATHS:prepend := "${TOPDIR}/%s:"' % test_files_dir + os.linesep
459 config += 'SRC_URI:append:pn-%s = " file://${FIT_UBOOT_ENV}"' % self.kernel_recipe + os.linesep
460
461 if not os.path.isdir(test_files_dir):
462 os.makedirs(test_files_dir)
463 self.logger.debug("Writing to: %s" % fit_uenv_path)
464 with open(fit_uenv_path, "w") as f:
465 f.write('echo "hello world"')
10 466
11class FitImageTests(OESelftestTestCase): 467 return config
468
469 def _bitbake_fit_image(self, bb_vars):
470 """Bitbake the kernel and return the paths to the its file and the FIT image"""
471 bitbake(self.kernel_recipe)
472
473 # Find the right its file and the final fitImage and check if both files are available
474 deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE']
475 initramfs_image = bb_vars['INITRAMFS_IMAGE']
476 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
477 initramfs_image_name = bb_vars['INITRAMFS_IMAGE_NAME']
478 kernel_fit_link_name = bb_vars['KERNEL_FIT_LINK_NAME']
479 if not initramfs_image and initramfs_image_bundle != "1":
480 fitimage_its_name = "fitImage-its-%s" % kernel_fit_link_name
481 fitimage_name = "fitImage"
482 elif initramfs_image and initramfs_image_bundle != "1":
483 fitimage_its_name = "fitImage-its-%s-%s" % (initramfs_image_name, kernel_fit_link_name)
484 fitimage_name = "fitImage-%s-%s" % (initramfs_image_name, kernel_fit_link_name)
485 elif initramfs_image and initramfs_image_bundle == "1":
486 fitimage_its_name = "fitImage-its-%s-%s" % (initramfs_image_name, kernel_fit_link_name)
487 fitimage_name = "fitImage" # or fitImage-${KERNEL_IMAGE_LINK_NAME}${KERNEL_IMAGE_BIN_EXT}
488 else:
489 self.fail('Invalid configuration: INITRAMFS_IMAGE_BUNDLE = "1" and not INITRAMFS_IMAGE')
490 kernel_deploysubdir = bb_vars['KERNEL_DEPLOYSUBDIR']
491 if kernel_deploysubdir:
492 fitimage_its_path = os.path.realpath(os.path.join(deploy_dir_image, kernel_deploysubdir, fitimage_its_name))
493 fitimage_path = os.path.realpath(os.path.join(deploy_dir_image, kernel_deploysubdir, fitimage_name))
494 else:
495 fitimage_its_path = os.path.realpath(os.path.join(deploy_dir_image, fitimage_its_name))
496 fitimage_path = os.path.realpath(os.path.join(deploy_dir_image, fitimage_name))
497 return (fitimage_its_path, fitimage_path)
498
499 def _get_req_its_paths(self, bb_vars):
500 """Generate a list of expected paths in the its file
501
502 Example:
503 [
504 ['/', 'images', 'kernel-1', 'hash-1'],
505 ['/', 'images', 'kernel-1', 'signature-1'],
506 ]
507 """
508 dtb_files, dtb_symlinks = FitImageTestCase._get_dtb_files(bb_vars)
509 fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL']
510 fit_uboot_env = bb_vars['FIT_UBOOT_ENV']
511 initramfs_image = bb_vars['INITRAMFS_IMAGE']
512 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
513 uboot_sign_enable = bb_vars.get('UBOOT_SIGN_ENABLE')
514
515 # image nodes
516 images = [ 'kernel-1' ]
517 if dtb_files:
518 images += [ 'fdt-' + dtb for dtb in dtb_files ]
519 if fit_uboot_env:
520 images.append('bootscr-' + fit_uboot_env)
521 if bb_vars['MACHINE'] == "qemux86-64": # Not really the right if
522 images.append('setup-1')
523 if initramfs_image and initramfs_image_bundle != "1":
524 images.append('ramdisk-1')
525
526 # configuration nodes (one per DTB and also one per symlink)
527 if dtb_files:
528 configurations = [bb_vars['FIT_CONF_PREFIX'] + conf for conf in dtb_files + dtb_symlinks]
529 else:
530 configurations = [bb_vars['FIT_CONF_PREFIX'] + '1']
531
532 # Create a list of paths for all image and configuration nodes
533 req_its_paths = []
534 for image in images:
535 req_its_paths.append(['/', 'images', image, 'hash-1'])
536 if uboot_sign_enable == "1" and fit_sign_individual == "1":
537 req_its_paths.append(['/', 'images', image, 'signature-1'])
538 for configuration in configurations:
539 req_its_paths.append(['/', 'configurations', configuration, 'hash-1'])
540 if uboot_sign_enable == "1":
541 req_its_paths.append(['/', 'configurations', configuration, 'signature-1'])
542 return req_its_paths
543
544 def _get_req_its_fields(self, bb_vars):
545 initramfs_image = bb_vars['INITRAMFS_IMAGE']
546 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
547 uboot_rd_loadaddress = bb_vars.get('UBOOT_RD_LOADADDRESS')
548 uboot_rd_entrypoint = bb_vars.get('UBOOT_RD_ENTRYPOINT')
549
550 its_field_check = [
551 'description = "%s";' % bb_vars['FIT_DESC'],
552 'description = "Linux kernel";',
553 'type = "' + str(bb_vars['UBOOT_MKIMAGE_KERNEL_TYPE']) + '";',
554 # 'compression = "' + str(bb_vars['FIT_KERNEL_COMP_ALG']) + '";', defined based on files in TMPDIR, not ideal...
555 'data = /incbin/("linux.bin");',
556 'arch = "' + str(bb_vars['UBOOT_ARCH']) + '";',
557 'os = "linux";',
558 'load = <' + str(bb_vars['UBOOT_LOADADDRESS']) + '>;',
559 'entry = <' + str(bb_vars['UBOOT_ENTRYPOINT']) + '>;',
560 ]
561 if initramfs_image and initramfs_image_bundle != "1":
562 its_field_check.append('type = "ramdisk";')
563 if uboot_rd_loadaddress:
564 its_field_check.append("load = <%s>;" % uboot_rd_loadaddress)
565 if uboot_rd_entrypoint:
566 its_field_check.append("entry = <%s>;" % uboot_rd_entrypoint)
567
568 fit_conf_default_dtb = bb_vars.get('FIT_CONF_DEFAULT_DTB')
569 if fit_conf_default_dtb:
570 fit_conf_prefix = bb_vars.get('FIT_CONF_PREFIX', "conf-")
571 its_field_check.append('default = "' + fit_conf_prefix + fit_conf_default_dtb + '";')
572
573 its_field_check.append('kernel = "kernel-1";')
574
575 if initramfs_image and initramfs_image_bundle != "1":
576 its_field_check.append('ramdisk = "ramdisk-1";')
577
578 return its_field_check
579
580 def _get_req_sigvalues_config(self, bb_vars):
581 """Generate a dictionary of expected configuration signature nodes"""
582 if bb_vars.get('UBOOT_SIGN_ENABLE') != "1":
583 return {}
584 sign_images = '"kernel", "fdt"'
585 if bb_vars['INITRAMFS_IMAGE'] and bb_vars['INITRAMFS_IMAGE_BUNDLE'] != "1":
586 sign_images += ', "ramdisk"'
587 if bb_vars['FIT_UBOOT_ENV']:
588 sign_images += ', "bootscr"'
589 req_sigvalues_config = {
590 'algo': '"%s,%s"' % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG']),
591 'key-name-hint': '"%s"' % bb_vars['UBOOT_SIGN_KEYNAME'],
592 'sign-images': sign_images,
593 }
594 return req_sigvalues_config
595
596 def _get_req_sigvalues_image(self, bb_vars):
597 """Generate a dictionary of expected image signature nodes"""
598 if bb_vars['FIT_SIGN_INDIVIDUAL'] != "1":
599 return {}
600 req_sigvalues_image = {
601 'algo': '"%s,%s"' % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG']),
602 'key-name-hint': '"%s"' % bb_vars['UBOOT_SIGN_IMG_KEYNAME'],
603 }
604 return req_sigvalues_image
605
606 def _get_req_sections(self, bb_vars):
607 """Generate a dictionary of expected sections in the output of dumpimage"""
608 dtb_files, dtb_symlinks = FitImageTestCase._get_dtb_files(bb_vars)
609 fit_hash_alg = bb_vars['FIT_HASH_ALG']
610 fit_sign_alg = bb_vars['FIT_SIGN_ALG']
611 fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL']
612 fit_uboot_env = bb_vars['FIT_UBOOT_ENV']
613 initramfs_image = bb_vars['INITRAMFS_IMAGE']
614 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
615 uboot_sign_enable = bb_vars['UBOOT_SIGN_ENABLE']
616 uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME']
617 uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME']
618 num_signatures = 0
619 req_sections = {
620 "kernel-1": {
621 "Type": "Kernel Image",
622 "OS": "Linux",
623 "Load Address": bb_vars['UBOOT_LOADADDRESS'],
624 "Entry Point": bb_vars['UBOOT_ENTRYPOINT'],
625 }
626 }
627 # Create one section per DTB
628 for dtb in dtb_files:
629 req_sections['fdt-' + dtb] = {
630 "Type": "Flat Device Tree",
631 }
632 # Add a script section if there is a script
633 if fit_uboot_env:
634 req_sections['bootscr-' + fit_uboot_env] = { "Type": "Script" }
635 # Add the initramfs
636 if initramfs_image and initramfs_image_bundle != "1":
637 req_sections['ramdisk-1'] = {
638 "Type": "RAMDisk Image",
639 "Load Address": bb_vars['UBOOT_RD_LOADADDRESS'],
640 "Entry Point": bb_vars['UBOOT_RD_ENTRYPOINT']
641 }
642 # Create a configuration section for each DTB
643 if dtb_files:
644 for dtb in dtb_files + dtb_symlinks:
645 conf_name = bb_vars['FIT_CONF_PREFIX'] + dtb
646 # Assume that DTBs with an "-alias" in its name are symlink DTBs created e.g. by the
647 # bbb-dtbs-as-ext test recipe. Make the configuration node pointing to the real DTB.
648 real_dtb = dtb.replace("-alias", "")
649 # dtb overlays do not refer to a kernel (yet?)
650 if dtb.endswith('.dtbo'):
651 req_sections[conf_name] = {
652 "FDT": 'fdt-' + real_dtb,
653 }
654 else:
655 req_sections[conf_name] = {
656 "Kernel": "kernel-1",
657 "FDT": 'fdt-' + real_dtb,
658 }
659 if initramfs_image and initramfs_image_bundle != "1":
660 req_sections[conf_name]['Init Ramdisk'] = "ramdisk-1"
661 else:
662 conf_name = bb_vars['FIT_CONF_PREFIX'] + '1'
663 req_sections[conf_name] = {
664 "Kernel": "kernel-1"
665 }
666 if initramfs_image and initramfs_image_bundle != "1":
667 req_sections[conf_name]['Init Ramdisk'] = "ramdisk-1"
668
669 # Add signing related properties if needed
670 if uboot_sign_enable == "1":
671 for section in req_sections:
672 req_sections[section]['Hash algo'] = fit_hash_alg
673 if section.startswith(bb_vars['FIT_CONF_PREFIX']):
674 req_sections[section]['Hash value'] = "unavailable"
675 req_sections[section]['Sign algo'] = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_keyname)
676 num_signatures += 1
677 elif fit_sign_individual == "1":
678 req_sections[section]['Sign algo'] = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_img_keyname)
679 num_signatures += 1
680 return (req_sections, num_signatures)
681
682 def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path):
683 """Verify the signature nodes in the FIT image"""
684 if bb_vars['UBOOT_SIGN_ENABLE'] == "1":
685 self.logger.debug("Verifying signatures in the FIT image")
686 else:
687 self.logger.debug("FIT image is not signed. Signature verification is not needed.")
688 return
689
690 fit_hash_alg = bb_vars['FIT_HASH_ALG']
691 fit_sign_alg = bb_vars['FIT_SIGN_ALG']
692 uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME']
693 uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME']
694 deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE']
695 kernel_deploysubdir = bb_vars['KERNEL_DEPLOYSUBDIR']
696 fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL']
697 fit_hash_alg_len = FitImageTestCase.MKIMAGE_HASH_LENGTHS[fit_hash_alg]
698 fit_sign_alg_len = FitImageTestCase.MKIMAGE_SIGNATURE_LENGTHS[fit_sign_alg]
699 for section, values in sections.items():
700 # Configuration nodes are always signed with UBOOT_SIGN_KEYNAME (if UBOOT_SIGN_ENABLE = "1")
701 if section.startswith(bb_vars['FIT_CONF_PREFIX']):
702 sign_algo = values.get('Sign algo', None)
703 req_sign_algo = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_keyname)
704 self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section)
705 sign_value = values.get('Sign value', None)
706 self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section)
707 dtb_file_name = section.replace(bb_vars['FIT_CONF_PREFIX'], '')
708 dtb_path = os.path.join(deploy_dir_image, dtb_file_name)
709 if kernel_deploysubdir:
710 dtb_path = os.path.join(deploy_dir_image, kernel_deploysubdir, dtb_file_name)
711 # External devicetrees created by devicetree.bbclass are in a subfolder and have priority
712 dtb_path_ext = os.path.join(deploy_dir_image, "devicetree", dtb_file_name)
713 if os.path.exists(dtb_path_ext):
714 dtb_path = dtb_path_ext
715 self._verify_fit_image_signature(uboot_tools_bindir, fitimage_path, dtb_path, section)
716 else:
717 # Image nodes always need a hash which gets indirectly signed by the config signature
718 hash_algo = values.get('Hash algo', None)
719 self.assertEqual(hash_algo, fit_hash_alg)
720 hash_value = values.get('Hash value', None)
721 self.assertEqual(len(hash_value), fit_hash_alg_len, 'Hash value for section %s not expected length' % section)
722 # Optionally, if FIT_SIGN_INDIVIDUAL = 1 also the image nodes have a signature (which is redundant but possible)
723 if fit_sign_individual == "1":
724 sign_algo = values.get('Sign algo', None)
725 req_sign_algo = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_img_keyname)
726 self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section)
727 sign_value = values.get('Sign value', None)
728 self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section)
729
730 # Search for the string passed to mkimage in each signed section of the FIT image.
731 # Looks like mkimage supports to add a comment but does not support to read it back.
732 a_comment = FitImageTestCase._get_uboot_mkimage_sign_args(bb_vars['UBOOT_MKIMAGE_SIGN_ARGS'])
733 self.logger.debug("a_comment: %s" % a_comment)
734 if a_comment:
735 found_comments = FitImageTestCase._find_string_in_bin_file(fitimage_path, a_comment)
736 self.assertEqual(found_comments, num_signatures, "Expected %d signed and commented (%s) sections in the fitImage." %
737 (num_signatures, a_comment))
738
739class KernelFitImageRecipeTests(KernelFitImageBase):
740 """Test cases for the kernel-fitimage bbclass"""
12 741
13 def test_fit_image(self): 742 def test_fit_image(self):
14 """ 743 """
@@ -24,214 +753,230 @@ class FitImageTests(OESelftestTestCase):
24 Author: Usama Arif <usama.arif@arm.com> 753 Author: Usama Arif <usama.arif@arm.com>
25 """ 754 """
26 config = """ 755 config = """
27# Enable creation of fitImage
28KERNEL_IMAGETYPE = "Image" 756KERNEL_IMAGETYPE = "Image"
29KERNEL_IMAGETYPES += " fitImage "
30KERNEL_CLASSES = " kernel-fitimage "
31 757
32# RAM disk variables including load address and entrypoint for kernel and RAM disk 758# RAM disk variables including load address and entrypoint for kernel and RAM disk
33IMAGE_FSTYPES += "cpio.gz" 759IMAGE_FSTYPES += "cpio.gz"
34INITRAMFS_IMAGE = "core-image-minimal" 760INITRAMFS_IMAGE = "core-image-minimal"
761# core-image-minimal is used as initramfs here, drop the rootfs suffix
762IMAGE_NAME_SUFFIX:pn-core-image-minimal = ""
35UBOOT_RD_LOADADDRESS = "0x88000000" 763UBOOT_RD_LOADADDRESS = "0x88000000"
36UBOOT_RD_ENTRYPOINT = "0x88000000" 764UBOOT_RD_ENTRYPOINT = "0x88000000"
37UBOOT_LOADADDRESS = "0x80080000" 765UBOOT_LOADADDRESS = "0x80080000"
38UBOOT_ENTRYPOINT = "0x80080000" 766UBOOT_ENTRYPOINT = "0x80080000"
39FIT_DESC = "A model description" 767FIT_DESC = "A model description"
768FIT_CONF_PREFIX = "foo-"
40""" 769"""
770 config = self._config_add_kernel_classes(config)
41 self.write_config(config) 771 self.write_config(config)
772 bb_vars = self._fit_get_bb_vars()
773 self._test_fitimage(bb_vars)
42 774
43 # fitImage is created as part of linux recipe 775 def test_get_compatible_from_dtb(self):
44 bitbake("virtual/kernel") 776 """Test the oe.fitimage.get_compatible_from_dtb function
45
46 image_type = "core-image-minimal"
47 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
48 machine = get_bb_var('MACHINE')
49 fitimage_its_path = os.path.join(deploy_dir_image,
50 "fitImage-its-%s-%s-%s" % (image_type, machine, machine))
51 fitimage_path = os.path.join(deploy_dir_image,
52 "fitImage-%s-%s-%s" % (image_type, machine, machine))
53
54 self.assertTrue(os.path.exists(fitimage_its_path),
55 "%s image tree source doesn't exist" % (fitimage_its_path))
56 self.assertTrue(os.path.exists(fitimage_path),
57 "%s FIT image doesn't exist" % (fitimage_path))
58
59 # Check that the type, load address, entrypoint address and default
60 # values for kernel and ramdisk in Image Tree Source are as expected.
61 # The order of fields in the below array is important. Not all the
62 # fields are tested, only the key fields that wont vary between
63 # different architectures.
64 its_field_check = [
65 'description = "A model description";',
66 'type = "kernel";',
67 'load = <0x80080000>;',
68 'entry = <0x80080000>;',
69 'type = "ramdisk";',
70 'load = <0x88000000>;',
71 'entry = <0x88000000>;',
72 'default = "conf-1";',
73 'kernel = "kernel-1";',
74 'ramdisk = "ramdisk-1";'
75 ]
76 777
77 with open(fitimage_its_path) as its_file: 778 1. bitbake bbb-dtbs-as-ext
78 field_index = 0 779 2. Check if symlink_points_below returns the path to the DTB
79 for line in its_file: 780 3. Check if the expected compatible string is found by get_compatible_from_dtb()
80 if field_index == len(its_field_check): 781 """
81 break 782 DTB_RECIPE = "bbb-dtbs-as-ext"
82 if its_field_check[field_index] in line: 783 DTB_FILE = "am335x-bonegreen-ext.dtb"
83 field_index +=1 784 DTB_SYMLINK = "am335x-bonegreen-ext-alias.dtb"
785 DTBO_FILE = "BBORG_RELAY-00A2.dtbo"
786 EXPECTED_COMP = ["ti,am335x-bone-green", "ti,am335x-bone-black", "ti,am335x-bone", "ti,am33xx"]
84 787
85 if field_index != len(its_field_check): # if its equal, the test passed 788 config = """
86 self.assertTrue(field_index == len(its_field_check), 789DISTRO = "poky"
87 "Fields in Image Tree Source File %s did not match, error in finding %s" 790MACHINE = "beaglebone-yocto"
88 % (fitimage_its_path, its_field_check[field_index])) 791"""
792 self.write_config(config)
793
794 # Provide the fdtget command called by get_compatible_from_dtb
795 dtc_bindir = FitImageTestCase._setup_native('dtc-native')
796 fdtget_path = os.path.join(dtc_bindir, "fdtget")
797 self.assertExists(fdtget_path)
798
799 # bitbake an external DTB with a symlink to it and a DTB overlay
800 bitbake(DTB_RECIPE)
801 deploy_dir_image = get_bb_var("DEPLOY_DIR_IMAGE", DTB_RECIPE)
802 devicetree_dir = os.path.join(deploy_dir_image, "devicetree")
803 dtb_path = os.path.join(devicetree_dir, DTB_FILE)
804 dtb_alias_path = os.path.join(devicetree_dir, DTB_SYMLINK)
805 dtbo_file = os.path.join(devicetree_dir, DTBO_FILE)
806 self.assertExists(dtb_path)
807 self.assertExists(dtb_alias_path)
808 self.assertExists(dtbo_file)
809
810 # Test symlink_points_below
811 linked_dtb = oe.fitimage.symlink_points_below(dtb_alias_path, devicetree_dir)
812 self.assertEqual(linked_dtb, DTB_FILE)
813
814 # Check if get_compatible_from_dtb finds the expected compatible string in the DTBs
815 comp = oe.fitimage.get_compatible_from_dtb(dtb_path, fdtget_path)
816 self.assertEqual(comp, EXPECTED_COMP)
817 comp_alias = oe.fitimage.get_compatible_from_dtb(dtb_alias_path, fdtget_path)
818 self.assertEqual(comp_alias, EXPECTED_COMP)
819 # The alias is a symlink, therefore the compatible string is equal
820 self.assertEqual(comp_alias, comp)
89 821
822 def test_fit_image_ext_dtb_dtbo(self):
823 """
824 Summary: Check if FIT image and Image Tree Source (its) are created correctly.
825 Expected: 1) its and FIT image are built successfully
826 2) The its file contains also the external devicetree overlay
827 3) Dumping the FIT image indicates the devicetree overlay
828 """
829 config = """
830# Enable creation of fitImage
831MACHINE = "beaglebone-yocto"
832# Add a devicetree overlay which does not need kernel sources
833PREFERRED_PROVIDER_virtual/dtb = "bbb-dtbs-as-ext"
834"""
835 config = self._config_add_kernel_classes(config)
836 config = self._config_add_uboot_env(config)
837 self.write_config(config)
838 bb_vars = self._fit_get_bb_vars()
839 self._test_fitimage(bb_vars)
840
841
842 def test_sign_fit_image_configurations(self):
843 """
844 Summary: Check if FIT image and Image Tree Source (its) are created
845 and the configuration nodes are signed correctly.
846 Expected: 1) its and FIT image are built successfully
847 2) Scanning the its file indicates signing is enabled
848 as requested by UBOOT_SIGN_ENABLE
849 3) Dumping the FIT image indicates signature values
850 are present (only for the configuration nodes as
851 FIT_SIGN_INDIVIDUAL is disabled)
852 4) Verify the FIT image contains the comments passed via
853 UBOOT_MKIMAGE_SIGN_ARGS once per configuration node.
854 """
855 # Generate a configuration section which gets included into the local.conf file
856 config = """
857# Enable creation of fitImage
858MACHINE = "beaglebone-yocto"
859UBOOT_SIGN_ENABLE = "1"
860UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
861UBOOT_SIGN_KEYNAME = "dev"
862UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
863FIT_CONF_DEFAULT_DTB = "am335x-bonegreen.dtb"
864"""
865 config = self._config_add_kernel_classes(config)
866 config = self._config_add_uboot_env(config)
867 self.write_config(config)
868
869 # Retrieve some variables from bitbake
870 bb_vars = self._fit_get_bb_vars([
871 'FIT_KEY_GENRSA_ARGS',
872 'FIT_KEY_REQ_ARGS',
873 'FIT_KEY_SIGN_PKCS',
874 'FIT_SIGN_NUMBITS',
875 'UBOOT_SIGN_KEYDIR',
876 ])
877
878 self._gen_signing_key(bb_vars)
879 self._test_fitimage(bb_vars)
90 880
91 def test_sign_fit_image(self): 881 def test_sign_fit_image_individual(self):
92 """ 882 """
93 Summary: Check if FIT image and Image Tree Source (its) are created 883 Summary: Check if FIT image and Image Tree Source (its) are created
94 and signed correctly. 884 and all nodes are signed correctly.
95 Expected: 1) its and FIT image are built successfully 885 Expected: 1) its and FIT image are built successfully
96 2) Scanning the its file indicates signing is enabled 886 2) Scanning the its file indicates signing is enabled
97 as requested by UBOOT_SIGN_ENABLE (using keys generated 887 as requested by UBOOT_SIGN_ENABLE
98 via FIT_GENERATE_KEYS)
99 3) Dumping the FIT image indicates signature values 888 3) Dumping the FIT image indicates signature values
100 are present (including for images as enabled via 889 are present (including for images as enabled via
101 FIT_SIGN_INDIVIDUAL) 890 FIT_SIGN_INDIVIDUAL)
102 4) Examination of the do_assemble_fitimage runfile/logfile 891 This also implies that FIT_GENERATE_KEYS = "1" works.
103 indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and 892 4) Verify the FIT image contains the comments passed via
104 UBOOT_MKIMAGE_SIGN_ARGS are working as expected. 893 UBOOT_MKIMAGE_SIGN_ARGS once per image and per
894 configuration node.
895 Note: This test is mostly for backward compatibility.
896 The recommended approach is to sign the configuration nodes
897 which include also the hashes of all the images. Signing
898 all the images individually is therefore redundant.
105 Product: oe-core 899 Product: oe-core
106 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon 900 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon
107 work by Usama Arif <usama.arif@arm.com> 901 work by Usama Arif <usama.arif@arm.com>
108 """ 902 """
903 # Generate a configuration section which gets included into the local.conf file
109 config = """ 904 config = """
110# Enable creation of fitImage 905# Enable creation of fitImage
111MACHINE = "beaglebone-yocto" 906MACHINE = "beaglebone-yocto"
112KERNEL_IMAGETYPES += " fitImage "
113KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
114UBOOT_SIGN_ENABLE = "1" 907UBOOT_SIGN_ENABLE = "1"
115FIT_GENERATE_KEYS = "1" 908FIT_GENERATE_KEYS = "1"
116UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 909UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
117UBOOT_SIGN_KEYNAME = "oe-selftest" 910UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
911UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
118FIT_SIGN_INDIVIDUAL = "1" 912FIT_SIGN_INDIVIDUAL = "1"
119UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" 913UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
120""" 914"""
915 config = self._config_add_kernel_classes(config)
916 config = self._config_add_uboot_env(config)
121 self.write_config(config) 917 self.write_config(config)
918 bb_vars = self._fit_get_bb_vars()
122 919
123 # fitImage is created as part of linux recipe 920 # Ensure new keys are generated and FIT_GENERATE_KEYS = "1" is tested
124 bitbake("virtual/kernel") 921 bitbake("kernel-signing-keys-native -c compile -f")
125
126 image_type = "core-image-minimal"
127 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
128 machine = get_bb_var('MACHINE')
129 fitimage_its_path = os.path.join(deploy_dir_image,
130 "fitImage-its-%s" % (machine,))
131 fitimage_path = os.path.join(deploy_dir_image,
132 "fitImage-%s.bin" % (machine,))
133
134 self.assertTrue(os.path.exists(fitimage_its_path),
135 "%s image tree source doesn't exist" % (fitimage_its_path))
136 self.assertTrue(os.path.exists(fitimage_path),
137 "%s FIT image doesn't exist" % (fitimage_path))
138
139 req_itspaths = [
140 ['/', 'images', 'kernel-1'],
141 ['/', 'images', 'kernel-1', 'signature-1'],
142 ['/', 'images', 'fdt-am335x-boneblack.dtb'],
143 ['/', 'images', 'fdt-am335x-boneblack.dtb', 'signature-1'],
144 ['/', 'configurations', 'conf-am335x-boneblack.dtb'],
145 ['/', 'configurations', 'conf-am335x-boneblack.dtb', 'signature-1'],
146 ]
147 922
148 itspath = [] 923 self._test_fitimage(bb_vars)
149 itspaths = []
150 linect = 0
151 sigs = {}
152 with open(fitimage_its_path) as its_file:
153 linect += 1
154 for line in its_file:
155 line = line.strip()
156 if line.endswith('};'):
157 itspath.pop()
158 elif line.endswith('{'):
159 itspath.append(line[:-1].strip())
160 itspaths.append(itspath[:])
161 elif itspath and itspath[-1] == 'signature-1':
162 itsdotpath = '.'.join(itspath)
163 if not itsdotpath in sigs:
164 sigs[itsdotpath] = {}
165 if not '=' in line or not line.endswith(';'):
166 self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
167 key, value = line.split('=', 1)
168 sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
169 924
170 for reqpath in req_itspaths: 925 def test_fit_image_sign_initramfs(self):
171 if not reqpath in itspaths: 926 """
172 self.fail('Missing section in its file: %s' % reqpath) 927 Summary: Verifies the content of the initramfs node in the FIT Image Tree Source (its)
928 The FIT settings are set by the test case.
929 The machine used is beaglebone-yocto.
930 Expected: 1. The ITS is generated with initramfs support
931 2. All the fields in the kernel node are as expected (matching the
932 conf settings)
933 3. The kernel is included in all the available configurations and
934 its hash is included in the configuration signature
173 935
174 reqsigvalues_image = { 936 Product: oe-core
175 'algo': '"sha256,rsa2048"', 937 Author: Abdellatif El Khlifi <abdellatif.elkhlifi@arm.com>
176 'key-name-hint': '"oe-selftest"', 938 """
177 }
178 reqsigvalues_config = {
179 'algo': '"sha256,rsa2048"',
180 'key-name-hint': '"oe-selftest"',
181 'sign-images': '"kernel", "fdt"',
182 }
183 939
184 for itspath, values in sigs.items(): 940 config = """
185 if 'conf-' in itspath: 941DISTRO = "poky"
186 reqsigvalues = reqsigvalues_config 942MACHINE = "beaglebone-yocto"
187 else: 943INITRAMFS_IMAGE = "core-image-minimal-initramfs"
188 reqsigvalues = reqsigvalues_image 944INITRAMFS_SCRIPTS = ""
189 for reqkey, reqvalue in reqsigvalues.items(): 945UBOOT_MACHINE = "am335x_evm_defconfig"
190 value = values.get(reqkey, None) 946UBOOT_SIGN_ENABLE = "1"
191 if value is None: 947UBOOT_SIGN_KEYNAME = "beaglebonekey"
192 self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath)) 948UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}"
193 self.assertEqual(value, reqvalue) 949UBOOT_DTB_BINARY = "u-boot.dtb"
194 950UBOOT_ENTRYPOINT = "0x80000000"
195 # Dump the image to see if it really got signed 951UBOOT_LOADADDRESS = "0x80000000"
196 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 952UBOOT_RD_LOADADDRESS = "0x88000000"
197 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 953UBOOT_RD_ENTRYPOINT = "0x88000000"
198 recipe_sysroot_native = result.output.split('=')[1].strip('"') 954UBOOT_DTB_LOADADDRESS = "0x82000000"
199 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage') 955UBOOT_ARCH = "arm"
200 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 956UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
201 in_signed = None 957UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
202 signed_sections = {} 958UBOOT_EXTLINUX = "0"
203 for line in result.output.splitlines(): 959KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
204 if line.startswith((' Configuration', ' Image')): 960FIT_KERNEL_COMP_ALG = "none"
205 in_signed = re.search('\((.*)\)', line).groups()[0] 961FIT_HASH_ALG = "sha256"
206 elif re.match('^ *', line) in (' ', ''): 962"""
207 in_signed = None 963 config = self._config_add_kernel_classes(config)
208 elif in_signed: 964 config = self._config_add_uboot_env(config)
209 if not in_signed in signed_sections: 965 self.write_config(config)
210 signed_sections[in_signed] = {} 966
211 key, value = line.split(':', 1) 967 # Retrieve some variables from bitbake
212 signed_sections[in_signed][key.strip()] = value.strip() 968 bb_vars = self._fit_get_bb_vars([
213 self.assertIn('kernel-1', signed_sections) 969 'FIT_KEY_GENRSA_ARGS',
214 self.assertIn('fdt-am335x-boneblack.dtb', signed_sections) 970 'FIT_KEY_REQ_ARGS',
215 self.assertIn('conf-am335x-boneblack.dtb', signed_sections) 971 'FIT_KEY_SIGN_PKCS',
216 for signed_section, values in signed_sections.items(): 972 'FIT_SIGN_NUMBITS',
217 value = values.get('Sign algo', None) 973 'UBOOT_SIGN_KEYDIR',
218 self.assertEqual(value, 'sha256,rsa2048:oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) 974 ])
219 value = values.get('Sign value', None) 975
220 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 976 self._gen_signing_key(bb_vars)
221 977 self._test_fitimage(bb_vars)
222 # Check for UBOOT_MKIMAGE_SIGN_ARGS 978
223 result = runCmd('bitbake -e virtual/kernel | grep ^T=') 979 def test_fit_image_sign_initramfs_bundle(self):
224 tempdir = result.output.split('=', 1)[1].strip().strip('')
225 result = runCmd('grep "a smart comment" %s/run.do_assemble_fitimage' % tempdir, ignore_status=True)
226 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN_ARGS value did not get used')
227
228 # Check for evidence of test-mkimage-wrapper class
229 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
230 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
231 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
232 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
233
234 def test_initramfs_bundle(self):
235 """ 980 """
236 Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its) 981 Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its)
237 The FIT settings are set by the test case. 982 The FIT settings are set by the test case.
@@ -247,14 +992,12 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
247 """ 992 """
248 993
249 config = """ 994 config = """
250DISTRO="poky" 995DISTRO = "poky"
251MACHINE = "beaglebone-yocto" 996MACHINE = "beaglebone-yocto"
252INITRAMFS_IMAGE_BUNDLE = "1" 997INITRAMFS_IMAGE_BUNDLE = "1"
253INITRAMFS_IMAGE = "core-image-minimal-initramfs" 998INITRAMFS_IMAGE = "core-image-minimal-initramfs"
254INITRAMFS_SCRIPTS = "" 999INITRAMFS_SCRIPTS = ""
255UBOOT_MACHINE = "am335x_evm_defconfig" 1000UBOOT_MACHINE = "am335x_evm_defconfig"
256KERNEL_CLASSES = " kernel-fitimage "
257KERNEL_IMAGETYPES = "fitImage"
258UBOOT_SIGN_ENABLE = "1" 1001UBOOT_SIGN_ENABLE = "1"
259UBOOT_SIGN_KEYNAME = "beaglebonekey" 1002UBOOT_SIGN_KEYNAME = "beaglebonekey"
260UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}" 1003UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}"
@@ -264,102 +1007,725 @@ UBOOT_LOADADDRESS = "0x80000000"
264UBOOT_DTB_LOADADDRESS = "0x82000000" 1007UBOOT_DTB_LOADADDRESS = "0x82000000"
265UBOOT_ARCH = "arm" 1008UBOOT_ARCH = "arm"
266UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 1009UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1010UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
267UBOOT_EXTLINUX = "0" 1011UBOOT_EXTLINUX = "0"
268FIT_GENERATE_KEYS = "1"
269KERNEL_IMAGETYPE_REPLACEMENT = "zImage" 1012KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
1013FIT_KERNEL_COMP_ALG = "none"
270FIT_HASH_ALG = "sha256" 1014FIT_HASH_ALG = "sha256"
271""" 1015"""
1016 config = self._config_add_kernel_classes(config)
1017 config = self._config_add_uboot_env(config)
272 self.write_config(config) 1018 self.write_config(config)
1019 bb_vars = self._fit_get_bb_vars()
1020 self._gen_signing_key(bb_vars)
1021 self._test_fitimage(bb_vars)
1022
1023class FitImagePyTests(KernelFitImageBase):
1024 """Test cases for the fitimage.py module without calling bitbake"""
273 1025
274 # fitImage is created as part of linux recipe 1026 def _test_fitimage_py(self, bb_vars_overrides=None):
275 bitbake("virtual/kernel") 1027 topdir = os.path.join(os.environ['BUILDDIR'])
1028 fitimage_its_path = os.path.join(topdir, self._testMethodName + '.its')
276 1029
277 image_type = get_bb_var('INITRAMFS_IMAGE') 1030 # Provide variables without calling bitbake
278 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 1031 bb_vars = {
279 machine = get_bb_var('MACHINE') 1032 # image-fitimage.conf
280 fitimage_its_path = os.path.join(deploy_dir_image, 1033 'FIT_DESC': "Kernel fitImage for a dummy distro",
281 "fitImage-its-%s-%s-%s" % (image_type, machine, machine)) 1034 'FIT_HASH_ALG': "sha256",
282 fitimage_path = os.path.join(deploy_dir_image,"fitImage") 1035 'FIT_SIGN_ALG': "rsa2048",
1036 'FIT_PAD_ALG': "pkcs-1.5",
1037 'FIT_GENERATE_KEYS': "0",
1038 'FIT_SIGN_NUMBITS': "2048",
1039 'FIT_KEY_GENRSA_ARGS': "-F4",
1040 'FIT_KEY_REQ_ARGS': "-batch -new",
1041 'FIT_KEY_SIGN_PKCS': "-x509",
1042 'FIT_SIGN_INDIVIDUAL': "0",
1043 'FIT_CONF_PREFIX': "conf-",
1044 'FIT_SUPPORTED_INITRAMFS_FSTYPES': "cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.zst cpio.gz ext2.gz cpio",
1045 'FIT_CONF_DEFAULT_DTB': "",
1046 'FIT_ADDRESS_CELLS': "1",
1047 'FIT_UBOOT_ENV': "",
1048 # kernel.bbclass
1049 'UBOOT_ENTRYPOINT': "0x20008000",
1050 'UBOOT_LOADADDRESS': "0x20008000",
1051 'INITRAMFS_IMAGE': "",
1052 'INITRAMFS_IMAGE_BUNDLE': "",
1053 # kernel-uboot.bbclass
1054 'FIT_KERNEL_COMP_ALG': "gzip",
1055 'FIT_KERNEL_COMP_ALG_EXTENSION': ".gz",
1056 'UBOOT_MKIMAGE_KERNEL_TYPE': "kernel",
1057 # uboot-config.bbclass
1058 'UBOOT_MKIMAGE_DTCOPTS': "",
1059 'UBOOT_MKIMAGE': "uboot-mkimage",
1060 'UBOOT_MKIMAGE_SIGN': "uboot-mkimage",
1061 'UBOOT_MKIMAGE_SIGN_ARGS': "",
1062 'UBOOT_SIGN_ENABLE': "0",
1063 'UBOOT_SIGN_KEYDIR': None,
1064 'UBOOT_SIGN_KEYNAME': None,
1065 'UBOOT_SIGN_IMG_KEYNAME': None,
1066 # others
1067 'MACHINE': "qemux86-64",
1068 'UBOOT_ARCH': "x86",
1069 'HOST_PREFIX': "x86_64-poky-linux-"
1070 }
1071 if bb_vars_overrides:
1072 bb_vars.update(bb_vars_overrides)
283 1073
284 self.assertTrue(os.path.exists(fitimage_its_path), 1074 root_node = oe.fitimage.ItsNodeRootKernel(
285 "%s image tree source doesn't exist" % (fitimage_its_path)) 1075 bb_vars["FIT_DESC"], bb_vars["FIT_ADDRESS_CELLS"],
286 self.assertTrue(os.path.exists(fitimage_path), 1076 bb_vars['HOST_PREFIX'], bb_vars['UBOOT_ARCH'], bb_vars["FIT_CONF_PREFIX"],
287 "%s FIT image doesn't exist" % (fitimage_path)) 1077 oe.types.boolean(bb_vars['UBOOT_SIGN_ENABLE']), bb_vars["UBOOT_SIGN_KEYDIR"],
1078 bb_vars["UBOOT_MKIMAGE"], bb_vars["UBOOT_MKIMAGE_DTCOPTS"],
1079 bb_vars["UBOOT_MKIMAGE_SIGN"], bb_vars["UBOOT_MKIMAGE_SIGN_ARGS"],
1080 bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG'], bb_vars['FIT_PAD_ALG'],
1081 bb_vars['UBOOT_SIGN_KEYNAME'],
1082 oe.types.boolean(bb_vars['FIT_SIGN_INDIVIDUAL']), bb_vars['UBOOT_SIGN_IMG_KEYNAME']
1083 )
288 1084
289 kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) 1085 root_node.fitimage_emit_section_kernel("kernel-1", "linux.bin", "none",
290 kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) 1086 bb_vars.get('UBOOT_LOADADDRESS'), bb_vars.get('UBOOT_ENTRYPOINT'),
291 initramfs_bundle_format = str(get_bb_var('KERNEL_IMAGETYPE_REPLACEMENT')) 1087 bb_vars.get('UBOOT_MKIMAGE_KERNEL_TYPE'), bb_vars.get("UBOOT_ENTRYSYMBOL")
292 uboot_arch = str(get_bb_var('UBOOT_ARCH')) 1088 )
293 initramfs_bundle = "arch/" + uboot_arch + "/boot/" + initramfs_bundle_format + ".initramfs"
294 fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
295 1089
296 its_file = open(fitimage_its_path) 1090 dtb_files, _ = FitImageTestCase._get_dtb_files(bb_vars)
1091 for dtb in dtb_files:
1092 root_node.fitimage_emit_section_dtb(dtb, os.path.join("a-dir", dtb),
1093 bb_vars.get("UBOOT_DTB_LOADADDRESS"), bb_vars.get("UBOOT_DTBO_LOADADDRESS"))
297 1094
298 its_lines = [line.strip() for line in its_file.readlines()] 1095 if bb_vars.get('FIT_UBOOT_ENV'):
1096 root_node.fitimage_emit_section_boot_script(
1097 "bootscr-" + bb_vars['FIT_UBOOT_ENV'], bb_vars['FIT_UBOOT_ENV'])
299 1098
300 exp_node_lines = [ 1099 if bb_vars['MACHINE'] == "qemux86-64": # Not really the right if
301 'kernel-1 {', 1100 root_node.fitimage_emit_section_setup("setup-1", "setup1.bin")
302 'description = "Linux kernel";', 1101
303 'data = /incbin/("' + initramfs_bundle + '");', 1102 if bb_vars.get('INITRAMFS_IMAGE') and bb_vars.get("INITRAMFS_IMAGE_BUNDLE") != "1":
304 'type = "kernel";', 1103 root_node.fitimage_emit_section_ramdisk("ramdisk-1", "a-dir/a-initramfs-1",
305 'arch = "' + uboot_arch + '";', 1104 "core-image-minimal-initramfs",
306 'os = "linux";', 1105 bb_vars.get("UBOOT_RD_LOADADDRESS"), bb_vars.get("UBOOT_RD_ENTRYPOINT"))
1106
1107 root_node.fitimage_emit_section_config(bb_vars['FIT_CONF_DEFAULT_DTB'])
1108 root_node.write_its_file(fitimage_its_path)
1109
1110 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
1111 self.logger.debug("Checking its: %s" % fitimage_its_path)
1112 self._check_its_file(bb_vars, fitimage_its_path)
1113
1114 def test_fitimage_py_default(self):
1115 self._test_fitimage_py()
1116
1117 def test_fitimage_py_default_dtb(self):
1118 bb_vars_overrides = {
1119 'KERNEL_DEVICETREE': "one.dtb two.dtb three.dtb",
1120 'FIT_CONF_DEFAULT_DTB': "two.dtb"
1121 }
1122 self._test_fitimage_py(bb_vars_overrides)
1123
1124
1125class UBootFitImageTests(FitImageTestCase):
1126 """Test cases for the uboot-sign bbclass"""
1127
1128 BOOTLOADER_RECIPE = "virtual/bootloader"
1129
1130 def _fit_get_bb_vars(self, additional_vars=[]):
1131 """Get bb_vars as needed by _test_sign_fit_image
1132
1133 Call the get_bb_vars function once and get all variables needed by the test case.
1134 """
1135 internal_used = {
1136 'DEPLOY_DIR_IMAGE',
1137 'FIT_HASH_ALG',
1138 'FIT_KEY_GENRSA_ARGS',
1139 'FIT_KEY_REQ_ARGS',
1140 'FIT_KEY_SIGN_PKCS',
1141 'FIT_SIGN_ALG',
1142 'FIT_SIGN_INDIVIDUAL',
1143 'FIT_SIGN_NUMBITS',
1144 'MACHINE',
1145 'SPL_MKIMAGE_SIGN_ARGS',
1146 'SPL_SIGN_ENABLE',
1147 'SPL_SIGN_KEYNAME',
1148 'UBOOT_ARCH',
1149 'UBOOT_DTB_BINARY',
1150 'UBOOT_DTB_IMAGE',
1151 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT',
1152 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS',
1153 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE',
1154 'UBOOT_FIT_CONF_USER_LOADABLES',
1155 'UBOOT_FIT_DESC',
1156 'UBOOT_FIT_HASH_ALG',
1157 'UBOOT_FIT_SIGN_ALG',
1158 'UBOOT_FIT_TEE_ENTRYPOINT',
1159 'UBOOT_FIT_TEE_LOADADDRESS',
1160 'UBOOT_FIT_TEE',
1161 'UBOOT_FIT_UBOOT_ENTRYPOINT',
1162 'UBOOT_FIT_UBOOT_LOADADDRESS',
1163 'UBOOT_FIT_USER_SETTINGS',
1164 'UBOOT_FITIMAGE_ENABLE',
1165 'UBOOT_NODTB_BINARY',
1166 'UBOOT_SIGN_ENABLE',
1167 'UBOOT_SIGN_IMG_KEYNAME',
1168 'UBOOT_SIGN_KEYDIR',
1169 'UBOOT_SIGN_KEYNAME',
1170 }
1171 bb_vars = get_bb_vars(list(internal_used | set(additional_vars)), UBootFitImageTests.BOOTLOADER_RECIPE)
1172 self.logger.debug("bb_vars: %s" % pprint.pformat(bb_vars, indent=4))
1173 return bb_vars
1174
1175 def _bitbake_fit_image(self, bb_vars):
1176 """Bitbake the bootloader and return the paths to the its file and the FIT image"""
1177 bitbake(UBootFitImageTests.BOOTLOADER_RECIPE)
1178
1179 deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE']
1180 machine = bb_vars['MACHINE']
1181 fitimage_its_path = os.path.join(deploy_dir_image, "u-boot-its-%s" % machine)
1182 fitimage_path = os.path.join(deploy_dir_image, "u-boot-fitImage-%s" % machine)
1183 return (fitimage_its_path, fitimage_path)
1184
1185 def _get_req_its_paths(self, bb_vars):
1186 # image nodes
1187 images = [ 'uboot', 'fdt', ]
1188 if bb_vars['UBOOT_FIT_TEE'] == "1":
1189 images.append('tee')
1190 if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1":
1191 images.append('atf')
1192 # if bb_vars['UBOOT_FIT_USER_SETTINGS']:
1193
1194 # configuration nodes
1195 configurations = [ 'conf']
1196
1197 # Create a list of paths for all image and configuration nodes
1198 req_its_paths = []
1199 for image in images:
1200 req_its_paths.append(['/', 'images', image])
1201 if bb_vars['SPL_SIGN_ENABLE'] == "1":
1202 req_its_paths.append(['/', 'images', image, 'signature'])
1203 for configuration in configurations:
1204 req_its_paths.append(['/', 'configurations', configuration])
1205 return req_its_paths
1206
1207 def _get_req_its_fields(self, bb_vars):
1208 loadables = ["uboot"]
1209 its_field_check = [
1210 'description = "%s";' % bb_vars['UBOOT_FIT_DESC'],
1211 'description = "U-Boot image";',
1212 'data = /incbin/("%s");' % bb_vars['UBOOT_NODTB_BINARY'],
1213 'type = "standalone";',
1214 'os = "u-boot";',
1215 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
1216 'compression = "none";',
1217 'load = <%s>;' % bb_vars['UBOOT_FIT_UBOOT_LOADADDRESS'],
1218 'entry = <%s>;' % bb_vars['UBOOT_FIT_UBOOT_ENTRYPOINT'],
1219 'description = "U-Boot FDT";',
1220 'data = /incbin/("%s");' % bb_vars['UBOOT_DTB_BINARY'],
1221 'type = "flat_dt";',
1222 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
307 'compression = "none";', 1223 'compression = "none";',
308 'load = <' + kernel_load + '>;',
309 'entry = <' + kernel_entry + '>;',
310 'hash-1 {',
311 'algo = "' + fit_hash_alg +'";',
312 '};',
313 '};'
314 ] 1224 ]
1225 if bb_vars['UBOOT_FIT_TEE'] == "1":
1226 its_field_check += [
1227 'description = "Trusted Execution Environment";',
1228 'data = /incbin/("%s");' % bb_vars['UBOOT_FIT_TEE_IMAGE'],
1229 'type = "tee";',
1230 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
1231 'os = "tee";',
1232 'load = <%s>;' % bb_vars['UBOOT_FIT_TEE_LOADADDRESS'],
1233 'entry = <%s>;' % bb_vars['UBOOT_FIT_TEE_ENTRYPOINT'],
1234 'compression = "none";',
1235 ]
1236 loadables.insert(0, "tee")
1237 if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1":
1238 its_field_check += [
1239 'description = "ARM Trusted Firmware";',
1240 'data = /incbin/("%s");' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE'],
1241 'type = "firmware";',
1242 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
1243 'os = "arm-trusted-firmware";',
1244 'load = <%s>;' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS'],
1245 'entry = <%s>;' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT'],
1246 'compression = "none";',
1247 ]
1248 loadables.insert(0, "atf")
1249 its_field_check += [
1250 'default = "conf";',
1251 'description = "Boot with signed U-Boot FIT";',
1252 'loadables = "%s";' % '", "'.join(loadables),
1253 'fdt = "fdt";',
1254 ]
1255 return its_field_check
315 1256
316 node_str = exp_node_lines[0] 1257 def _get_req_sigvalues_config(self, bb_vars):
1258 # COnfigurations are not signed by uboot-sign
1259 return {}
317 1260
318 test_passed = False 1261 def _get_req_sigvalues_image(self, bb_vars):
1262 if bb_vars['SPL_SIGN_ENABLE'] != "1":
1263 return {}
1264 req_sigvalues_image = {
1265 'algo': '"%s,%s"' % (bb_vars['UBOOT_FIT_HASH_ALG'], bb_vars['UBOOT_FIT_SIGN_ALG']),
1266 'key-name-hint': '"%s"' % bb_vars['SPL_SIGN_KEYNAME'],
1267 }
1268 return req_sigvalues_image
319 1269
320 print ("checking kernel node\n") 1270 def _get_req_sections(self, bb_vars):
1271 """Generate the expected output of dumpimage for beaglebone targets
321 1272
322 if node_str in its_lines: 1273 The dict generated by this function is supposed to be compared against
323 node_start_idx = its_lines.index(node_str) 1274 the dict which is generated by the _dump_fitimage function.
324 node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))] 1275 """
325 if node == exp_node_lines: 1276 loadables = ['uboot']
326 print("kernel node verified") 1277 req_sections = {
327 else: 1278 "uboot": {
328 self.assertTrue(test_passed == True,"kernel node does not match expectation") 1279 "Type": "Standalone Program",
329 1280 "Load Address": bb_vars['UBOOT_FIT_UBOOT_LOADADDRESS'],
330 rx_configs = re.compile("^conf-.*") 1281 "Entry Point": bb_vars['UBOOT_FIT_UBOOT_ENTRYPOINT'],
331 its_configs = list(filter(rx_configs.match, its_lines)) 1282 },
332 1283 "fdt": {
333 for cfg_str in its_configs: 1284 "Type": "Flat Device Tree",
334 cfg_start_idx = its_lines.index(cfg_str) 1285 }
335 line_idx = cfg_start_idx + 2 1286 }
336 node_end = False 1287 if bb_vars['UBOOT_FIT_TEE'] == "1":
337 while node_end == False: 1288 loadables.insert(0, "tee")
338 if its_lines[line_idx] == "};" and its_lines[line_idx-1] == "};" : 1289 req_sections['tee'] = {
339 node_end = True 1290 "Type": "Trusted Execution Environment Image",
340 line_idx = line_idx + 1 1291 # "Load Address": bb_vars['UBOOT_FIT_TEE_LOADADDRESS'], not printed by mkimage?
341 1292 # "Entry Point": bb_vars['UBOOT_FIT_TEE_ENTRYPOINT'], not printed by mkimage?
342 node = its_lines[cfg_start_idx:line_idx] 1293 }
343 print("checking configuration " + cfg_str.rstrip(" {")) 1294 if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1":
344 rx_desc_line = re.compile("^description.*1 Linux kernel.*") 1295 loadables.insert(0, "atf")
345 if len(list(filter(rx_desc_line.match, node))) != 1: 1296 req_sections['atf'] = {
346 self.assertTrue(test_passed == True,"kernel keyword not found in the description line") 1297 "Type": "Firmware",
347 break 1298 "Load Address": bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS'],
348 else: 1299 # "Entry Point": bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT'], not printed by mkimage?
349 print("kernel keyword found in the description line") 1300 }
1301 req_sections["conf"] = {
1302 "Kernel": "unavailable",
1303 "FDT": "fdt",
1304 "Loadables": ','.join(loadables),
1305 }
350 1306
351 if 'kernel = "kernel-1";' not in node: 1307 # Add signing related properties if needed
352 self.assertTrue(test_passed == True,"kernel line not found") 1308 uboot_fit_hash_alg = bb_vars['UBOOT_FIT_HASH_ALG']
353 break 1309 uboot_fit_sign_alg = bb_vars['UBOOT_FIT_SIGN_ALG']
354 else: 1310 spl_sign_enable = bb_vars['SPL_SIGN_ENABLE']
355 print("kernel line found") 1311 spl_sign_keyname = bb_vars['SPL_SIGN_KEYNAME']
1312 num_signatures = 0
1313 if spl_sign_enable == "1":
1314 for section in req_sections:
1315 if not section.startswith('conf'):
1316 req_sections[section]['Sign algo'] = "%s,%s:%s" % \
1317 (uboot_fit_hash_alg, uboot_fit_sign_alg, spl_sign_keyname)
1318 num_signatures += 1
1319 return (req_sections, num_signatures)
1320
1321 def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path):
1322 if bb_vars['UBOOT_FITIMAGE_ENABLE'] == '1' and bb_vars['SPL_SIGN_ENABLE'] == "1":
1323 self.logger.debug("Verifying signatures in the FIT image")
1324 else:
1325 self.logger.debug("FIT image is not signed. Signature verification is not needed.")
1326 return
356 1327
357 rx_sign_line = re.compile("^sign-images.*kernel.*") 1328 uboot_fit_hash_alg = bb_vars['UBOOT_FIT_HASH_ALG']
358 if len(list(filter(rx_sign_line.match, node))) != 1: 1329 uboot_fit_sign_alg = bb_vars['UBOOT_FIT_SIGN_ALG']
359 self.assertTrue(test_passed == True,"kernel hash not signed") 1330 spl_sign_keyname = bb_vars['SPL_SIGN_KEYNAME']
360 break 1331 fit_sign_alg_len = FitImageTestCase.MKIMAGE_SIGNATURE_LENGTHS[uboot_fit_sign_alg]
1332 for section, values in sections.items():
1333 # Configuration nodes are always signed with UBOOT_SIGN_KEYNAME (if UBOOT_SIGN_ENABLE = "1")
1334 if section.startswith("conf"):
1335 # uboot-sign does not sign configuration nodes
1336 pass
361 else: 1337 else:
362 print("kernel hash signed") 1338 # uboot-sign does not add hash nodes, only image signatures
1339 sign_algo = values.get('Sign algo', None)
1340 req_sign_algo = "%s,%s:%s" % (uboot_fit_hash_alg, uboot_fit_sign_alg, spl_sign_keyname)
1341 self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section)
1342 sign_value = values.get('Sign value', None)
1343 self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section)
1344
1345 # Search for the string passed to mkimage in each signed section of the FIT image.
1346 # Looks like mkimage supports to add a comment but does not support to read it back.
1347 a_comment = FitImageTestCase._get_uboot_mkimage_sign_args(bb_vars['SPL_MKIMAGE_SIGN_ARGS'])
1348 self.logger.debug("a_comment: %s" % a_comment)
1349 if a_comment:
1350 found_comments = FitImageTestCase._find_string_in_bin_file(fitimage_path, a_comment)
1351 self.assertEqual(found_comments, num_signatures, "Expected %d signed and commented (%s) sections in the fitImage." %
1352 (num_signatures, a_comment))
1353
1354 def _check_kernel_dtb(self, bb_vars):
1355 """
1356 Check if the device-tree from U-Boot has the kernel public key(s).
1357
1358 The concat_dtb function of the uboot-sign.bbclass injects the public keys
1359 which are required for verifying the kernel at run-time into the DTB from
1360 U-Boot. The following example is from a build with FIT_SIGN_INDIVIDUAL
1361 set to "1". If it is set to "0" the key-the-kernel-image-key node is not
1362 present.
1363 / {
1364 ...
1365 signature {
1366 key-the-kernel-image-key {
1367 required = "image";
1368 algo = "sha256,rsa2048";
1369 ...
1370 };
1371 key-the-kernel-config-key {
1372 required = "conf";
1373 algo = "sha256,rsa2048";
1374 ...
1375 };
1376 };
1377 """
1378 # Setup u-boot-tools-native
1379 dtc_bindir = FitImageTestCase._setup_native('dtc-native')
1380
1381 # Check if 1 or 2 signature sections are in the DTB.
1382 uboot_dtb_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['UBOOT_DTB_IMAGE'])
1383 algo = "%s,%s" % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG'])
1384 if bb_vars['FIT_SIGN_INDIVIDUAL'] == "1":
1385 uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME']
1386 key_dtb_path = "/signature/key-" + uboot_sign_img_keyname
1387 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "required", "image")
1388 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "algo", algo)
1389 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "key-name-hint", uboot_sign_img_keyname)
1390
1391 uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME']
1392 key_dtb_path = "/signature/key-" + uboot_sign_keyname
1393 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "required", "conf")
1394 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "algo", algo)
1395 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "key-name-hint", uboot_sign_keyname)
1396
1397
1398 def test_uboot_fit_image(self):
1399 """
1400 Summary: Check if Uboot FIT image and Image Tree Source
1401 (its) are built and the Image Tree Source has the
1402 correct fields.
1403 Expected: 1. u-boot-fitImage and u-boot-its can be built
1404 2. The type, load address, entrypoint address and
1405 default values of U-boot image are correct in the
1406 Image Tree Source. Not all the fields are tested,
1407 only the key fields that wont vary between
1408 different architectures.
1409 Product: oe-core
1410 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
1411 based on work by Usama Arif <usama.arif@arm.com>
1412 """
1413 config = """
1414# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1415MACHINE = "qemuarm"
1416UBOOT_MACHINE = "am57xx_evm_defconfig"
1417SPL_BINARY = "MLO"
1418
1419# Enable creation of the U-Boot fitImage
1420UBOOT_FITIMAGE_ENABLE = "1"
1421
1422# (U-boot) fitImage properties
1423UBOOT_LOADADDRESS = "0x80080000"
1424UBOOT_ENTRYPOINT = "0x80080000"
1425UBOOT_FIT_DESC = "A model description"
1426"""
1427 self.write_config(config)
1428 bb_vars = self._fit_get_bb_vars()
1429 self._test_fitimage(bb_vars)
1430
1431
1432 def test_sign_standalone_uboot_fit_image(self):
1433 """
1434 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1435 created and signed correctly for the scenario where only
1436 the U-Boot proper fitImage is being created and signed.
1437 Expected: 1) U-Boot its and FIT image are built successfully
1438 2) Scanning the its file indicates signing is enabled
1439 as requested by SPL_SIGN_ENABLE (using keys generated
1440 via UBOOT_FIT_GENERATE_KEYS)
1441 3) Dumping the FIT image indicates signature values
1442 are present
1443 4) Examination of the do_uboot_assemble_fitimage
1444 runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
1445 and SPL_MKIMAGE_SIGN_ARGS are working as expected.
1446 Product: oe-core
1447 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
1448 work by Paul Eggleton <paul.eggleton@microsoft.com> and
1449 Usama Arif <usama.arif@arm.com>
1450 """
1451 config = """
1452# There's no U-boot defconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1453# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1454MACHINE = "qemuarm"
1455UBOOT_MACHINE = "am57xx_evm_defconfig"
1456SPL_BINARY = "MLO"
1457# Enable creation and signing of the U-Boot fitImage
1458UBOOT_FITIMAGE_ENABLE = "1"
1459SPL_SIGN_ENABLE = "1"
1460SPL_SIGN_KEYNAME = "spl-oe-selftest"
1461SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1462UBOOT_DTB_BINARY = "u-boot.dtb"
1463UBOOT_ENTRYPOINT = "0x80000000"
1464UBOOT_LOADADDRESS = "0x80000000"
1465UBOOT_DTB_LOADADDRESS = "0x82000000"
1466UBOOT_ARCH = "arm"
1467SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1468SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
1469UBOOT_EXTLINUX = "0"
1470UBOOT_FIT_GENERATE_KEYS = "1"
1471UBOOT_FIT_HASH_ALG = "sha256"
1472"""
1473 self.write_config(config)
1474 bb_vars = self._fit_get_bb_vars()
1475 self._test_fitimage(bb_vars)
1476
1477
1478 def test_sign_cascaded_uboot_fit_image(self):
1479 """
1480 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1481 created and signed correctly for the scenario where both
1482 U-Boot proper and Kernel fitImages are being created and
1483 signed.
1484 Expected: 1) U-Boot its and FIT image are built successfully
1485 2) Scanning the its file indicates signing is enabled
1486 as requested by SPL_SIGN_ENABLE (using keys generated
1487 via UBOOT_FIT_GENERATE_KEYS)
1488 3) Dumping the FIT image indicates signature values
1489 are present
1490 4) Examination of the do_uboot_assemble_fitimage that
1491 UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and SPL_MKIMAGE_SIGN_ARGS
1492 are working as expected.
1493 Product: oe-core
1494 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
1495 work by Paul Eggleton <paul.eggleton@microsoft.com> and
1496 Usama Arif <usama.arif@arm.com>
1497 """
1498 config = """
1499# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1500# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1501MACHINE = "qemuarm"
1502UBOOT_MACHINE = "am57xx_evm_defconfig"
1503SPL_BINARY = "MLO"
1504# Enable creation and signing of the U-Boot fitImage
1505UBOOT_FITIMAGE_ENABLE = "1"
1506SPL_SIGN_ENABLE = "1"
1507SPL_SIGN_KEYNAME = "spl-cascaded-oe-selftest"
1508SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1509UBOOT_DTB_BINARY = "u-boot.dtb"
1510UBOOT_ENTRYPOINT = "0x80000000"
1511UBOOT_LOADADDRESS = "0x80000000"
1512UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1513UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'"
1514UBOOT_DTB_LOADADDRESS = "0x82000000"
1515UBOOT_ARCH = "arm"
1516SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1517SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'"
1518UBOOT_EXTLINUX = "0"
1519UBOOT_FIT_GENERATE_KEYS = "1"
1520UBOOT_FIT_HASH_ALG = "sha256"
1521UBOOT_SIGN_ENABLE = "1"
1522UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1523UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
1524"""
1525 self.write_config(config)
1526 bb_vars = self._fit_get_bb_vars()
1527
1528 self._gen_signing_key(bb_vars)
1529 self._test_fitimage(bb_vars)
1530 self._check_kernel_dtb(bb_vars)
1531
1532 def test_uboot_atf_tee_fit_image(self):
1533 """
1534 Summary: Check if U-boot FIT image and Image Tree Source
1535 (its) are built and the Image Tree Source has the
1536 correct fields.
1537 Expected: 1. Create atf and tee dummy images
1538 2. Both u-boot-fitImage and u-boot-its can be built
1539 3. The os, load address, entrypoint address and
1540 default values of U-boot, ATF and TEE images are
1541 correct in the Image Tree Source. Not all the
1542 fields are tested, only the key fields that wont
1543 vary between different architectures.
1544 Product: oe-core
1545 Author: Jamin Lin <jamin_lin@aspeedtech.com>
1546 """
1547 config = """
1548# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1549MACHINE = "qemuarm"
1550UBOOT_MACHINE = "am57xx_evm_defconfig"
1551SPL_BINARY = "MLO"
1552
1553# Enable creation of the U-Boot fitImage
1554UBOOT_FITIMAGE_ENABLE = "1"
1555
1556# (U-boot) fitImage properties
1557UBOOT_LOADADDRESS = "0x80080000"
1558UBOOT_ENTRYPOINT = "0x80080000"
1559UBOOT_FIT_DESC = "A model description"
1560
1561# Enable creation of the TEE fitImage
1562UBOOT_FIT_TEE = "1"
1563
1564# TEE fitImage properties
1565UBOOT_FIT_TEE_IMAGE = "${TOPDIR}/tee-dummy.bin"
1566UBOOT_FIT_TEE_LOADADDRESS = "0x80180000"
1567UBOOT_FIT_TEE_ENTRYPOINT = "0x80180000"
1568
1569# Enable creation of the ATF fitImage
1570UBOOT_FIT_ARM_TRUSTED_FIRMWARE = "1"
1571
1572# ATF fitImage properties
1573UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE = "${TOPDIR}/atf-dummy.bin"
1574UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS = "0x80280000"
1575UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT = "0x80280000"
1576"""
1577 self.write_config(config)
1578
1579 bb_vars = self._fit_get_bb_vars([
1580 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE',
1581 'UBOOT_FIT_TEE_IMAGE',
1582 ])
1583
1584 # Create an ATF dummy image
1585 dummy_atf = os.path.join(self.builddir, bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE'])
1586 FitImageTestCase._gen_random_file(dummy_atf)
1587
1588 # Create a TEE dummy image
1589 dummy_tee = os.path.join(self.builddir, bb_vars['UBOOT_FIT_TEE_IMAGE'])
1590 FitImageTestCase._gen_random_file(dummy_tee)
1591
1592 self._test_fitimage(bb_vars)
1593
1594 def test_sign_standalone_uboot_atf_tee_fit_image(self):
1595 """
1596 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1597 created and signed correctly for the scenario where only
1598 the U-Boot proper fitImage is being created and signed.
1599 Expected: 1. Create atf and tee dummy images
1600 2. U-Boot its and FIT image are built successfully
1601 3. Scanning the its file indicates signing is enabled
1602 as requested by SPL_SIGN_ENABLE (using keys generated
1603 via UBOOT_FIT_GENERATE_KEYS)
1604 4. Dumping the FIT image indicates signature values
1605 are present
1606 5. Examination of the do_uboot_assemble_fitimage
1607 runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
1608 and SPL_MKIMAGE_SIGN_ARGS are working as expected.
1609 Product: oe-core
1610 Author: Jamin Lin <jamin_lin@aspeedtech.com>
1611 """
1612 config = """
1613# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1614# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1615MACHINE = "qemuarm"
1616UBOOT_MACHINE = "am57xx_evm_defconfig"
1617SPL_BINARY = "MLO"
1618# Enable creation and signing of the U-Boot fitImage
1619UBOOT_FITIMAGE_ENABLE = "1"
1620SPL_SIGN_ENABLE = "1"
1621SPL_SIGN_KEYNAME = "spl-oe-selftest"
1622SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1623UBOOT_DTB_BINARY = "u-boot.dtb"
1624UBOOT_ENTRYPOINT = "0x80000000"
1625UBOOT_LOADADDRESS = "0x80000000"
1626UBOOT_ARCH = "arm"
1627SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1628SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot ATF TEE comment'"
1629UBOOT_EXTLINUX = "0"
1630UBOOT_FIT_GENERATE_KEYS = "1"
1631UBOOT_FIT_HASH_ALG = "sha256"
1632
1633# Enable creation of the TEE fitImage
1634UBOOT_FIT_TEE = "1"
1635
1636# TEE fitImage properties
1637UBOOT_FIT_TEE_IMAGE = "${TOPDIR}/tee-dummy.bin"
1638UBOOT_FIT_TEE_LOADADDRESS = "0x80180000"
1639UBOOT_FIT_TEE_ENTRYPOINT = "0x80180000"
1640
1641# Enable creation of the ATF fitImage
1642UBOOT_FIT_ARM_TRUSTED_FIRMWARE = "1"
1643
1644# ATF fitImage properties
1645UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE = "${TOPDIR}/atf-dummy.bin"
1646UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS = "0x80280000"
1647UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT = "0x80280000"
1648"""
1649 self.write_config(config)
1650
1651 bb_vars = self._fit_get_bb_vars([
1652 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE',
1653 'UBOOT_FIT_TEE_IMAGE',
1654 ])
1655
1656 # Create an ATF dummy image
1657 dummy_atf = os.path.join(self.builddir, bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE'])
1658 FitImageTestCase._gen_random_file(dummy_atf)
1659
1660 # Create a TEE dummy image
1661 dummy_tee = os.path.join(self.builddir, bb_vars['UBOOT_FIT_TEE_IMAGE'])
1662 FitImageTestCase._gen_random_file(dummy_tee)
1663
1664 self._test_fitimage(bb_vars)
1665
1666
1667 def test_sign_uboot_kernel_individual(self):
1668 """
1669 Summary: Check if the device-tree from U-Boot has two public keys
1670 for verifying the kernel FIT image created by the
1671 kernel-fitimage.bbclass included.
1672 This test sets: FIT_SIGN_INDIVIDUAL = "1"
1673 Expected: There must be two signature nodes. One is required for
1674 the individual image nodes, the other is required for the
1675 verification of the configuration section.
1676 """
1677 config = """
1678# Enable creation of fitImage
1679MACHINE = "beaglebone-yocto"
1680UBOOT_SIGN_ENABLE = "1"
1681UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1682UBOOT_SIGN_KEYNAME = "the-kernel-config-key"
1683UBOOT_SIGN_IMG_KEYNAME = "the-kernel-image-key"
1684UBOOT_MKIMAGE_DTCOPTS="-I dts -O dtb -p 2000"
1685FIT_SIGN_INDIVIDUAL = "1"
1686"""
1687 self.write_config(config)
1688 bb_vars = self._fit_get_bb_vars()
1689 self._gen_signing_key(bb_vars)
1690
1691 bitbake(UBootFitImageTests.BOOTLOADER_RECIPE)
1692
1693 # Just check the DTB of u-boot since there is no u-boot FIT image
1694 self._check_kernel_dtb(bb_vars)
1695
1696
1697 def test_sign_uboot_fit_image_without_spl(self):
1698 """
1699 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1700 created and signed correctly for the scenario where only
1701 the U-Boot proper fitImage is being created and signed
1702 (no SPL included).
1703 Expected: 1) U-Boot its and FIT image are built successfully
1704 2) Scanning the its file indicates signing is enabled
1705 as requested by SPL_SIGN_ENABLE (using keys generated
1706 via UBOOT_FIT_GENERATE_KEYS)
1707 3) Dumping the FIT image indicates signature values
1708 are present
1709 4) Examination of the do_uboot_assemble_fitimage
1710 runfile/logfile indicate that UBOOT_MKIMAGE and
1711 UBOOT_MKIMAGE_SIGN are working as expected.
1712 Product: oe-core
1713 Author: Jamin Lin <jamin_lin@aspeedtech.com>
1714 """
1715 config = """
1716# There's no U-boot defconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1717# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1718MACHINE = "qemuarm"
1719UBOOT_MACHINE = "am57xx_evm_defconfig"
1720# Enable creation and signing of the U-Boot fitImage (no SPL)
1721UBOOT_FITIMAGE_ENABLE = "1"
1722SPL_DTB_BINARY = ""
1723SPL_SIGN_ENABLE = "1"
1724SPL_SIGN_KEYNAME = "spl-oe-selftest"
1725SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1726UBOOT_FIT_GENERATE_KEYS = "1"
1727"""
1728 self.write_config(config)
1729 bb_vars = self._fit_get_bb_vars()
1730 self._test_fitimage(bb_vars)
363 1731
364 test_passed = True
365 self.assertTrue(test_passed == True,"Initramfs bundle test success")
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
index 3efe15228f..1bda29a72b 100644
--- a/meta/lib/oeqa/selftest/cases/gcc.py
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -1,9 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
7import time
3from oeqa.core.decorator import OETestTag 8from oeqa.core.decorator import OETestTag
4from oeqa.core.case import OEPTestResultTestCase 9from oeqa.core.case import OEPTestResultTestCase
5from oeqa.selftest.case import OESelftestTestCase 10from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command 11from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu
7 12
8def parse_values(content): 13def parse_values(content):
9 for i in content: 14 for i in content:
@@ -32,15 +37,20 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
32 features = [] 37 features = []
33 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets))) 38 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets)))
34 if ssh is not None: 39 if ssh is not None:
35 features.append('TOOLCHAIN_TEST_TARGET = "ssh"') 40 features.append('TOOLCHAIN_TEST_TARGET = "linux-ssh"')
36 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh)) 41 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
37 features.append('TOOLCHAIN_TEST_HOST_USER = "root"') 42 features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
38 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') 43 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
39 self.write_config("\n".join(features)) 44 self.write_config("\n".join(features))
40 45
41 recipe = "gcc-runtime" 46 recipe = "gcc-runtime"
47
48 start_time = time.time()
49
42 bitbake("{} -c check".format(recipe)) 50 bitbake("{} -c check".format(recipe))
43 51
52 end_time = time.time()
53
44 bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe) 54 bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
45 builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"] 55 builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
46 56
@@ -54,7 +64,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
54 64
55 ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite 65 ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
56 ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite 66 ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
57 self.ptest_section(ptestsuite, logfile = logpath) 67 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
58 with open(sumspath, "r") as f: 68 with open(sumspath, "r") as f:
59 for test, result in parse_values(f): 69 for test, result in parse_values(f):
60 self.ptest_result(ptestsuite, test, result) 70 self.ptest_result(ptestsuite, test, result)
@@ -73,6 +83,8 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
73 # validate that SSH is working 83 # validate that SSH is working
74 status, _ = qemu.run("uname") 84 status, _ = qemu.run("uname")
75 self.assertEqual(status, 0) 85 self.assertEqual(status, 0)
86 qemu.run('echo "MaxStartups 75:30:100" >> /etc/ssh/sshd_config')
87 qemu.run('service sshd restart')
76 88
77 return self.run_check(*args, ssh=qemu.ip, **kwargs) 89 return self.run_check(*args, ssh=qemu.ip, **kwargs)
78 90
@@ -114,37 +126,44 @@ class GccLibItmSelfTest(GccSelfTestBase):
114 self.run_check("libitm") 126 self.run_check("libitm")
115 127
116@OETestTag("toolchain-system") 128@OETestTag("toolchain-system")
129@OETestTag("runqemu")
117class GccCrossSelfTestSystemEmulated(GccSelfTestBase): 130class GccCrossSelfTestSystemEmulated(GccSelfTestBase):
118 def test_cross_gcc(self): 131 def test_cross_gcc(self):
119 self.run_check_emulated("gcc") 132 self.run_check_emulated("gcc")
120 133
121@OETestTag("toolchain-system") 134@OETestTag("toolchain-system")
135@OETestTag("runqemu")
122class GxxCrossSelfTestSystemEmulated(GccSelfTestBase): 136class GxxCrossSelfTestSystemEmulated(GccSelfTestBase):
123 def test_cross_gxx(self): 137 def test_cross_gxx(self):
124 self.run_check_emulated("g++") 138 self.run_check_emulated("g++")
125 139
126@OETestTag("toolchain-system") 140@OETestTag("toolchain-system")
141@OETestTag("runqemu")
127class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase): 142class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase):
128 def test_libatomic(self): 143 def test_libatomic(self):
129 self.run_check_emulated("libatomic") 144 self.run_check_emulated("libatomic")
130 145
131@OETestTag("toolchain-system") 146@OETestTag("toolchain-system")
147@OETestTag("runqemu")
132class GccLibGompSelfTestSystemEmulated(GccSelfTestBase): 148class GccLibGompSelfTestSystemEmulated(GccSelfTestBase):
133 def test_libgomp(self): 149 def test_libgomp(self):
134 self.run_check_emulated("libgomp") 150 self.run_check_emulated("libgomp")
135 151
136@OETestTag("toolchain-system") 152@OETestTag("toolchain-system")
153@OETestTag("runqemu")
137class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase): 154class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase):
138 def test_libstdcxx(self): 155 def test_libstdcxx(self):
139 self.run_check_emulated("libstdc++-v3") 156 self.run_check_emulated("libstdc++-v3")
140 157
141@OETestTag("toolchain-system") 158@OETestTag("toolchain-system")
159@OETestTag("runqemu")
142class GccLibSspSelfTestSystemEmulated(GccSelfTestBase): 160class GccLibSspSelfTestSystemEmulated(GccSelfTestBase):
143 def test_libssp(self): 161 def test_libssp(self):
144 self.check_skip("libssp") 162 self.check_skip("libssp")
145 self.run_check_emulated("libssp") 163 self.run_check_emulated("libssp")
146 164
147@OETestTag("toolchain-system") 165@OETestTag("toolchain-system")
166@OETestTag("runqemu")
148class GccLibItmSelfTestSystemEmulated(GccSelfTestBase): 167class GccLibItmSelfTestSystemEmulated(GccSelfTestBase):
149 def test_libitm(self): 168 def test_libitm(self):
150 self.check_skip("libitm") 169 self.check_skip("libitm")
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py
new file mode 100644
index 0000000000..b6b7c5c473
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gdbserver.py
@@ -0,0 +1,67 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import time
8import tempfile
9import shutil
10import concurrent.futures
11
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd
14
15class GdbServerTest(OESelftestTestCase):
16 def test_gdb_server(self):
17 target_arch = self.td["TARGET_ARCH"]
18 target_sys = self.td["TARGET_SYS"]
19
20 features = """
21IMAGE_GEN_DEBUGFS = "1"
22IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
23CORE_IMAGE_EXTRA_INSTALL = "gdbserver"
24 """
25 self.write_config(features)
26
27 gdb_recipe = "gdb-cross-" + target_arch
28 gdb_binary = target_sys + "-gdb"
29
30 bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe)
31
32 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
33 r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys)
34 self.assertEqual(r.status, 0)
35 self.assertIn("GNU gdb", r.output)
36 image = 'core-image-minimal'
37 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
38
39 with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs:
40 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
41 shutil.unpack_archive(filename, debugfs)
42 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
43 shutil.unpack_archive(filename, debugfs)
44
45 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
46 status, output = qemu.run_serial("kmod --help")
47 self.assertIn("modprobe", output)
48
49 with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
50 def run_gdb():
51 for _ in range(5):
52 time.sleep(2)
53 cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip)
54 self.logger.warning("starting gdb %s" % cmd)
55 r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys)
56 self.assertEqual(0, r.status)
57 line_re = r"Line \d+ of \".*\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>"
58 self.assertRegex(r.output, line_re)
59 break
60 else:
61 self.fail("Timed out connecting to gdb")
62 future = executor.submit(run_gdb)
63
64 status, output = qemu.run_serial("gdbserver --once :9999 kmod --help")
65 self.assertEqual(status, 1)
66 # The future either returns None, or raises an exception
67 future.result()
diff --git a/meta/lib/oeqa/selftest/cases/gitarchivetests.py b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
new file mode 100644
index 0000000000..71382089c1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
@@ -0,0 +1,136 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import sys
9basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
10lib_path = basepath + '/scripts/lib'
11sys.path = sys.path + [lib_path]
12import oeqa.utils.gitarchive as ga
13from oeqa.utils.git import GitError
14import tempfile
15import shutil
16import scriptutils
17import logging
18from oeqa.selftest.case import OESelftestTestCase
19
20logger = scriptutils.logger_create('resulttool')
21
22def create_fake_repository(commit, tag_list=[], add_remote=True):
23 """ Create a testing git directory
24
25 Initialize a simple git repository with one initial commit, and as many
26 tags on this commit as listed in tag_list
27 Returns both git directory path and gitarchive git object
28 If commit is true, fake data will be commited, otherwise it will stay in staging area
29 If commit is true and tag_lsit is non empty, all tags in tag_list will be
30 created on the initial commit
31 Fake remote will also be added to make git ls-remote work
32 """
33 fake_data_file = "fake_data.txt"
34 tempdir = tempfile.mkdtemp(prefix='fake_results.')
35 repo = ga.init_git_repo(tempdir, False, False, logger)
36 if add_remote:
37 repo.run_cmd(["remote", "add", "origin", "."])
38 with open(os.path.join(tempdir, fake_data_file), "w") as fake_data:
39 fake_data.write("Fake data")
40 if commit:
41 repo.run_cmd(["add", fake_data_file])
42 repo.run_cmd(["commit", "-m", "\"Add fake data\""])
43 for tag in tag_list:
44 repo.run_cmd(["tag", tag])
45
46 return tempdir, repo
47
48def delete_fake_repository(path):
49 shutil.rmtree(path)
50
51def tag_exists(git_obj, target_tag):
52 for tag in git_obj.run_cmd(["tag"]).splitlines():
53 if target_tag == tag:
54 return True
55 return False
56
57class GitArchiveTests(OESelftestTestCase):
58 TEST_BRANCH="main"
59 TEST_COMMIT="0f7d5df"
60 TEST_COMMIT_COUNT="42"
61
62 @classmethod
63 def setUpClass(cls):
64 super().setUpClass()
65 cls.log = logging.getLogger('gitarchivetests')
66 cls.log.setLevel(logging.DEBUG)
67
68 def test_create_first_test_tag(self):
69 path, git_obj = create_fake_repository(False)
70 keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
71 target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
72
73 ga.gitarchive(path, path, True, False,
74 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
75 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
76 'Test run #{tag_number} of {branch}:{commit}', '',
77 [], [], False, keywords, logger)
78 self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created")
79 delete_fake_repository(path)
80
81 def test_create_second_test_tag(self):
82 first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
83 second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1"
84 keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
85
86 path, git_obj = create_fake_repository(True, [first_tag])
87 ga.gitarchive(path, path, True, False,
88 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
89 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
90 'Test run #{tag_number} of {branch}:{commit}', '',
91 [], [], False, keywords, logger)
92 self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created")
93 delete_fake_repository(path)
94
95 def test_get_revs_on_branch(self):
96 fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
97 tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
98
99 path, git_obj = create_fake_repository(True, fake_tags_list)
100 revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main")
101 self.assertEqual(len(revs), 1)
102 self.assertEqual(revs[0].commit, "0f7d5df")
103 self.assertEqual(len(revs[0].tags), 2)
104 self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1'])
105 delete_fake_repository(path)
106
107 def test_get_tags_without_valid_remote(self):
108 url = 'git://git.yoctoproject.org/poky'
109 path, git_obj = create_fake_repository(False, None, False)
110
111 tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
112 """Test for some well established tags (released tags)"""
113 self.assertIn("yocto-4.0", tags)
114 self.assertIn("yocto-4.1", tags)
115 self.assertIn("yocto-4.2", tags)
116 delete_fake_repository(path)
117
118 def test_get_tags_with_only_local_tag(self):
119 fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
120 path, git_obj = create_fake_repository(True, fake_tags_list, False)
121
122 """No remote is configured and no url is passed: get_tags must fall
123 back to local tags
124 """
125 tags = ga.get_tags(git_obj, self.log)
126 self.assertCountEqual(tags, fake_tags_list)
127 delete_fake_repository(path)
128
129 def test_get_tags_without_valid_remote_and_wrong_url(self):
130 url = 'git://git.foo.org/bar'
131 path, git_obj = create_fake_repository(False, None, False)
132
133 """Test for some well established tags (released tags)"""
134 with self.assertRaises(GitError):
135 tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
136 delete_fake_repository(path)
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
index c687f6ef93..bd56b2f6e7 100644
--- a/meta/lib/oeqa/selftest/cases/glibc.py
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -1,10 +1,15 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
7import time
3import contextlib 8import contextlib
4from oeqa.core.decorator import OETestTag 9from oeqa.core.decorator import OETestTag
5from oeqa.core.case import OEPTestResultTestCase 10from oeqa.core.case import OEPTestResultTestCase
6from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command 12from oeqa.utils.commands import bitbake, get_bb_var, runqemu
8from oeqa.utils.nfs import unfs_server 13from oeqa.utils.nfs import unfs_server
9 14
10def parse_values(content): 15def parse_values(content):
@@ -24,16 +29,20 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
24 features.append('TOOLCHAIN_TEST_HOST_USER = "root"') 29 features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
25 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') 30 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
26 # force single threaded test execution 31 # force single threaded test execution
27 features.append('EGLIBCPARALLELISM_task-check_pn-glibc-testsuite = "PARALLELMFLAGS="-j1""') 32 features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
28 self.write_config("\n".join(features)) 33 self.write_config("\n".join(features))
29 34
35 start_time = time.time()
36
30 bitbake("glibc-testsuite -c check") 37 bitbake("glibc-testsuite -c check")
31 38
39 end_time = time.time()
40
32 builddir = get_bb_var("B", "glibc-testsuite") 41 builddir = get_bb_var("B", "glibc-testsuite")
33 42
34 ptestsuite = "glibc-user" if ssh is None else "glibc" 43 ptestsuite = "glibc-user" if ssh is None else "glibc"
35 self.ptest_section(ptestsuite) 44 self.ptest_section(ptestsuite, duration = int(end_time - start_time))
36 with open(os.path.join(builddir, "tests.sum"), "r") as f: 45 with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
37 for test, result in parse_values(f): 46 for test, result in parse_values(f):
38 self.ptest_result(ptestsuite, test, result) 47 self.ptest_result(ptestsuite, test, result)
39 48
@@ -41,7 +50,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
41 with contextlib.ExitStack() as s: 50 with contextlib.ExitStack() as s:
42 # use the base work dir, as the nfs mount, since the recipe directory may not exist 51 # use the base work dir, as the nfs mount, since the recipe directory may not exist
43 tmpdir = get_bb_var("BASE_WORKDIR") 52 tmpdir = get_bb_var("BASE_WORKDIR")
44 nfsport, mountport = s.enter_context(unfs_server(tmpdir)) 53 nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
45 54
46 # build core-image-minimal with required packages 55 # build core-image-minimal with required packages
47 default_installed_packages = [ 56 default_installed_packages = [
@@ -61,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
61 bitbake("core-image-minimal") 70 bitbake("core-image-minimal")
62 71
63 # start runqemu 72 # start runqemu
64 qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic")) 73 qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
65 74
66 # validate that SSH is working 75 # validate that SSH is working
67 status, _ = qemu.run("uname") 76 status, _ = qemu.run("uname")
@@ -70,7 +79,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
70 # setup nfs mount 79 # setup nfs mount
71 if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0: 80 if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
72 raise Exception("Failed to setup NFS mount directory on target") 81 raise Exception("Failed to setup NFS mount directory on target")
73 mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir) 82 mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
74 status, output = qemu.run(mountcmd) 83 status, output = qemu.run(mountcmd)
75 if status != 0: 84 if status != 0:
76 raise Exception("Failed to setup NFS mount on target ({})".format(repr(output))) 85 raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
@@ -83,6 +92,7 @@ class GlibcSelfTest(GlibcSelfTestBase):
83 self.run_check() 92 self.run_check()
84 93
85@OETestTag("toolchain-system") 94@OETestTag("toolchain-system")
95@OETestTag("runqemu")
86class GlibcSelfTestSystemEmulated(GlibcSelfTestBase): 96class GlibcSelfTestSystemEmulated(GlibcSelfTestBase):
87 def test_glibc(self): 97 def test_glibc(self):
88 self.run_check_emulated() 98 self.run_check_emulated()
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
index 4fc3605f42..ee2cf4b09a 100644
--- a/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -50,6 +52,9 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
50 cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name) 52 cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name)
51 cmd = cmd + ". %s; " % self.env_SDK 53 cmd = cmd + ". %s; " % self.env_SDK
52 cmd = cmd + "export GOPATH=%s; " % self.go_path 54 cmd = cmd + "export GOPATH=%s; " % self.go_path
55 cmd = cmd + "export GOFLAGS=-modcacherw; "
56 cmd = cmd + "export CGO_ENABLED=1; "
57 cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; "
53 cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd 58 cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
54 return runCmd(cmd).status 59 return runCmd(cmd).status
55 60
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py
index 52e1080f13..17c98baf14 100644
--- a/meta/lib/oeqa/selftest/cases/image_typedep.py
+++ b/meta/lib/oeqa/selftest/cases/image_typedep.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -9,7 +11,7 @@ from oeqa.utils.commands import bitbake
9 11
10class ImageTypeDepTests(OESelftestTestCase): 12class ImageTypeDepTests(OESelftestTestCase):
11 13
12 # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that 14 # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
13 # the conversion type bar gets added as a dep as well 15 # the conversion type bar gets added as a dep as well
14 def test_conversion_typedep_added(self): 16 def test_conversion_typedep_added(self):
15 17
@@ -22,7 +24,7 @@ LICENSE = "MIT"
22IMAGE_FSTYPES = "testfstype" 24IMAGE_FSTYPES = "testfstype"
23 25
24IMAGE_TYPES_MASKED += "testfstype" 26IMAGE_TYPES_MASKED += "testfstype"
25IMAGE_TYPEDEP_testfstype = "tar.bz2" 27IMAGE_TYPEDEP:testfstype = "tar.bz2"
26 28
27inherit image 29inherit image
28 30
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index 6723a8198f..94d01ba116 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu 8from oeqa.core.decorator import OETestTag
9from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
7from oeqa.utils.sshcontrol import SSHControl 10from oeqa.utils.sshcontrol import SSHControl
8import glob 11import glob
9import os 12import os
@@ -14,6 +17,7 @@ class ImageFeatures(OESelftestTestCase):
14 test_user = 'tester' 17 test_user = 'tester'
15 root_user = 'root' 18 root_user = 'root'
16 19
20 @OETestTag("runqemu")
17 def test_non_root_user_can_connect_via_ssh_without_password(self): 21 def test_non_root_user_can_connect_via_ssh_without_password(self):
18 """ 22 """
19 Summary: Check if non root user can connect via ssh without password 23 Summary: Check if non root user can connect via ssh without password
@@ -39,6 +43,7 @@ class ImageFeatures(OESelftestTestCase):
39 status, output = ssh.run("true") 43 status, output = ssh.run("true")
40 self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output)) 44 self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output))
41 45
46 @OETestTag("runqemu")
42 def test_all_users_can_connect_via_ssh_without_password(self): 47 def test_all_users_can_connect_via_ssh_without_password(self):
43 """ 48 """
44 Summary: Check if all users can connect via ssh without password 49 Summary: Check if all users can connect via ssh without password
@@ -68,18 +73,6 @@ class ImageFeatures(OESelftestTestCase):
68 self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output) 73 self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output)
69 74
70 75
71 def test_clutter_image_can_be_built(self):
72 """
73 Summary: Check if clutter image can be built
74 Expected: 1. core-image-clutter can be built
75 Product: oe-core
76 Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
77 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
78 """
79
80 # Build a core-image-clutter
81 bitbake('core-image-clutter')
82
83 def test_wayland_support_in_image(self): 76 def test_wayland_support_in_image(self):
84 """ 77 """
85 Summary: Check Wayland support in image 78 Summary: Check Wayland support in image
@@ -109,12 +102,11 @@ class ImageFeatures(OESelftestTestCase):
109 features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"' 102 features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
110 self.write_config(features) 103 self.write_config(features)
111 104
112 image_name = 'core-image-minimal' 105 image = 'core-image-minimal'
113 bitbake(image_name) 106 bitbake(image)
107 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
114 108
115 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 109 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME'])
116 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
117 image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name)
118 bmap_path = "%s.bmap" % image_path 110 bmap_path = "%s.bmap" % image_path
119 gzip_path = "%s.gz" % bmap_path 111 gzip_path = "%s.gz" % bmap_path
120 112
@@ -127,8 +119,8 @@ class ImageFeatures(OESelftestTestCase):
127 image_stat = os.stat(image_path) 119 image_stat = os.stat(image_path)
128 self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512) 120 self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
129 121
130 # check if the resulting gzip is valid 122 # check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink
131 self.assertTrue(runCmd('gzip -t %s' % gzip_path)) 123 self.assertTrue(runCmd('gzip --test --force %s' % gzip_path))
132 124
133 def test_hypervisor_fmts(self): 125 def test_hypervisor_fmts(self):
134 """ 126 """
@@ -143,17 +135,16 @@ class ImageFeatures(OESelftestTestCase):
143 img_types = [ 'vmdk', 'vdi', 'qcow2' ] 135 img_types = [ 'vmdk', 'vdi', 'qcow2' ]
144 features = "" 136 features = ""
145 for itype in img_types: 137 for itype in img_types:
146 features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype 138 features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype
147 self.write_config(features) 139 self.write_config(features)
148 140
149 image_name = 'core-image-minimal' 141 image = 'core-image-minimal'
150 bitbake(image_name) 142 bitbake(image)
143 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
151 144
152 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
153 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
154 for itype in img_types: 145 for itype in img_types:
155 image_path = os.path.join(deploy_dir_image, "%s.wic.%s" % 146 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" %
156 (link_name, itype)) 147 (bb_vars['IMAGE_LINK_NAME'], itype))
157 148
158 # check if result image file is in deploy directory 149 # check if result image file is in deploy directory
159 self.assertTrue(os.path.exists(image_path)) 150 self.assertTrue(os.path.exists(image_path))
@@ -173,24 +164,22 @@ class ImageFeatures(OESelftestTestCase):
173 """ 164 """
174 Summary: Check for chaining many CONVERSION_CMDs together 165 Summary: Check for chaining many CONVERSION_CMDs together
175 Expected: 1. core-image-minimal can be built with 166 Expected: 1. core-image-minimal can be built with
176 ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a 167 ext4.bmap.gz.bz2.zst.xz.u-boot and also create a
177 sha256sum 168 sha256sum
178 2. The above image has a valid sha256sum 169 2. The above image has a valid sha256sum
179 Product: oe-core 170 Product: oe-core
180 Author: Tom Rini <trini@konsulko.com> 171 Author: Tom Rini <trini@konsulko.com>
181 """ 172 """
182 173
183 conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot" 174 conv = "ext4.bmap.gz.bz2.zst.xz.u-boot"
184 features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv) 175 features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
185 self.write_config(features) 176 self.write_config(features)
186 177
187 image_name = 'core-image-minimal' 178 image = 'core-image-minimal'
188 bitbake(image_name) 179 bitbake(image)
189 180 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
190 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 181 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" %
191 link_name = get_bb_var('IMAGE_LINK_NAME', image_name) 182 (bb_vars['IMAGE_LINK_NAME'], conv))
192 image_path = os.path.join(deploy_dir_image, "%s.%s" %
193 (link_name, conv))
194 183
195 # check if resulting image is in the deploy directory 184 # check if resulting image is in the deploy directory
196 self.assertTrue(os.path.exists(image_path)) 185 self.assertTrue(os.path.exists(image_path))
@@ -198,7 +187,7 @@ class ImageFeatures(OESelftestTestCase):
198 187
199 # check if the resulting sha256sum agrees 188 # check if the resulting sha256sum agrees
200 self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' % 189 self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
201 (deploy_dir_image, link_name, conv))) 190 (bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv)))
202 191
203 def test_image_fstypes(self): 192 def test_image_fstypes(self):
204 """ 193 """
@@ -207,26 +196,43 @@ class ImageFeatures(OESelftestTestCase):
207 Product: oe-core 196 Product: oe-core
208 Author: Ed Bartosh <ed.bartosh@linux.intel.com> 197 Author: Ed Bartosh <ed.bartosh@linux.intel.com>
209 """ 198 """
210 image_name = 'core-image-minimal' 199 image = 'core-image-minimal'
211 200
212 all_image_types = set(get_bb_var("IMAGE_TYPES", image_name).split()) 201 all_image_types = set(get_bb_var("IMAGE_TYPES", image).split())
213 blacklist = set(('container', 'elf', 'f2fs', 'multiubi', 'tar.zst', 'wic.zst')) 202 skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat'))
214 img_types = all_image_types - blacklist 203 img_types = all_image_types - skip_image_types
215 204
216 config = 'IMAGE_FSTYPES += "%s"\n'\ 205 config = """
217 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\ 206IMAGE_FSTYPES += "%s"
218 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types) 207WKS_FILE = "wictestdisk.wks"
208MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"
209UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"
210MULTIUBI_BUILD += "mtd_2_128"
211MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047"
212UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512"
213MULTIUBI_BUILD += "mtd_4_256"
214MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096"
215UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB"
216""" % ' '.join(img_types)
219 self.write_config(config) 217 self.write_config(config)
220 218
221 bitbake(image_name) 219 bitbake(image)
220 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image)
222 221
223 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
224 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
225 for itype in img_types: 222 for itype in img_types:
226 image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype)) 223 if itype == 'multiubi':
227 # check if result image is in deploy directory 224 # For multiubi build we need to manage MULTIUBI_BUILD entry to append
228 self.assertTrue(os.path.exists(image_path), 225 # specific name to IMAGE_LINK_NAME
229 "%s image %s doesn't exist" % (itype, image_path)) 226 for vname in bb_vars['MULTIUBI_BUILD'].split():
227 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname))
228 # check if result image is in deploy directory
229 self.assertTrue(os.path.exists(image_path),
230 "%s image %s doesn't exist" % (itype, image_path))
231 else:
232 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype))
233 # check if result image is in deploy directory
234 self.assertTrue(os.path.exists(image_path),
235 "%s image %s doesn't exist" % (itype, image_path))
230 236
231 def test_useradd_static(self): 237 def test_useradd_static(self):
232 config = """ 238 config = """
@@ -240,16 +246,11 @@ USERADD_GID_TABLES += "files/static-group"
240 246
241 def test_no_busybox_base_utils(self): 247 def test_no_busybox_base_utils(self):
242 config = """ 248 config = """
243# Enable x11 249# Enable wayland
244DISTRO_FEATURES_append += "x11" 250DISTRO_FEATURES:append = " pam opengl wayland"
245 251
246# Switch to systemd 252# Switch to systemd
247DISTRO_FEATURES += "systemd" 253INIT_MANAGER = "systemd"
248VIRTUAL-RUNTIME_init_manager = "systemd"
249VIRTUAL-RUNTIME_initscripts = ""
250VIRTUAL-RUNTIME_syslog = ""
251VIRTUAL-RUNTIME_login_manager = "shadow-base"
252DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
253 254
254# Replace busybox 255# Replace busybox
255PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils" 256PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils"
@@ -257,12 +258,12 @@ VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils"
257VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock" 258VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock"
258VIRTUAL-RUNTIME_base-utils-syslog = "" 259VIRTUAL-RUNTIME_base-utils-syslog = ""
259 260
260# Blacklist busybox 261# Skip busybox
261PNBLACKLIST[busybox] = "Don't build this" 262SKIP_RECIPE[busybox] = "Don't build this"
262""" 263"""
263 self.write_config(config) 264 self.write_config(config)
264 265
265 bitbake("--graphviz core-image-sato") 266 bitbake("--graphviz core-image-weston")
266 267
267 def test_image_gen_debugfs(self): 268 def test_image_gen_debugfs(self):
268 """ 269 """
@@ -275,20 +276,20 @@ PNBLACKLIST[busybox] = "Don't build this"
275 Yeoh Ee Peng <ee.peng.yeoh@intel.com> 276 Yeoh Ee Peng <ee.peng.yeoh@intel.com>
276 """ 277 """
277 278
278 image_name = 'core-image-minimal' 279 image = 'core-image-minimal'
280 image_fstypes_debugfs = 'tar.bz2'
279 features = 'IMAGE_GEN_DEBUGFS = "1"\n' 281 features = 'IMAGE_GEN_DEBUGFS = "1"\n'
280 features += 'IMAGE_FSTYPES_DEBUGFS = "tar.bz2"\n' 282 features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs
281 features += 'MACHINE = "genericx86-64"\n'
282 self.write_config(features) 283 self.write_config(features)
283 284
284 bitbake(image_name) 285 bitbake(image)
285 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 286 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
286 dbg_tar_file = os.path.join(deploy_dir_image, "*-dbg.rootfs.tar.bz2") 287
287 debug_files = glob.glob(dbg_tar_file) 288 dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs))
288 self.assertNotEqual(len(debug_files), 0, 'debug filesystem not generated at %s' % dbg_tar_file) 289 self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file)
289 result = runCmd('cd %s; tar xvf %s' % (deploy_dir_image, dbg_tar_file)) 290 result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file))
290 self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output)) 291 self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output))
291 result = runCmd('find %s -name %s' % (deploy_dir_image, "udevadm")) 292 result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm"))
292 self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output) 293 self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output)
293 dbg_symbols_targets = result.output.splitlines() 294 dbg_symbols_targets = result.output.splitlines()
294 self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets) 295 self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets)
@@ -298,9 +299,33 @@ PNBLACKLIST[busybox] = "Don't build this"
298 299
299 def test_empty_image(self): 300 def test_empty_image(self):
300 """Test creation of image with no packages""" 301 """Test creation of image with no packages"""
301 bitbake('test-empty-image') 302 image = 'test-empty-image'
302 res_dir = get_bb_var('DEPLOY_DIR_IMAGE') 303 bitbake(image)
303 images = os.path.join(res_dir, "test-empty-image-*.manifest") 304 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
304 result = glob.glob(images) 305 manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME'])
305 with open(result[1],"r") as f: 306 self.assertTrue(os.path.exists(manifest))
307
308 with open(manifest, "r") as f:
306 self.assertEqual(len(f.read().strip()),0) 309 self.assertEqual(len(f.read().strip()),0)
310
311 def test_mandb(self):
312 """
313 Test that an image containing manpages has working man and apropos commands.
314 """
315 config = """
316DISTRO_FEATURES:append = " api-documentation"
317CORE_IMAGE_EXTRA_INSTALL = "man-pages"
318"""
319 self.write_config(config)
320 bitbake("core-image-minimal")
321
322 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
323 # This manpage is provided by man-pages
324 status, output = qemu.run_serial("apropos 8859")
325 self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
326 self.assertIn("iso_8859_15", output)
327
328 # This manpage is provided by man-pages
329 status, output = qemu.run_serial("man --pager=cat intro")
330 self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
331 self.assertIn("introduction to user commands", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
index 152da6332a..93884f5731 100644
--- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -1,10 +1,16 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.selftest.case import OESelftestTestCase 6from oeqa.selftest.case import OESelftestTestCase
2from oeqa.utils.commands import bitbake 7from oeqa.utils.commands import bitbake
3 8
4class IncompatibleLicenseTests(OESelftestTestCase): 9class IncompatibleLicenseTestObsolete(OESelftestTestCase):
5 10
6 def lic_test(self, pn, pn_lic, lic): 11 def lic_test(self, pn, pn_lic, lic, error_msg=None):
7 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic) 12 if not error_msg:
13 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
8 14
9 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic)) 15 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
10 16
@@ -12,72 +18,81 @@ class IncompatibleLicenseTests(OESelftestTestCase):
12 if error_msg not in result.output: 18 if error_msg not in result.output:
13 raise AssertionError(result.output) 19 raise AssertionError(result.output)
14 20
15 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) 21 # Verify that a package with an SPDX license cannot be built when
16 # cannot be built when INCOMPATIBLE_LICENSE contains this SPDX license 22 # INCOMPATIBLE_LICENSE contains an alias (in SPDXLICENSEMAP) of this SPDX
17 def test_incompatible_spdx_license(self): 23 # license
18 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
19
20 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
21 # cannot be built when INCOMPATIBLE_LICENSE contains an alias (in
22 # SPDXLICENSEMAP) of this SPDX license
23 def test_incompatible_alias_spdx_license(self): 24 def test_incompatible_alias_spdx_license(self):
24 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3') 25 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
25
26 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
27 # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded license
28 # matching this SPDX license
29 def test_incompatible_spdx_license_wildcard(self):
30 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPL-3.0-only')
31 26
32 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) 27 # Verify that a package with an SPDX license cannot be built when
33 # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded alias 28 # INCOMPATIBLE_LICENSE contains a wildcarded alias license matching this
34 # license matching this SPDX license 29 # SPDX license
35 def test_incompatible_alias_spdx_license_wildcard(self): 30 def test_incompatible_alias_spdx_license_wildcard(self):
36 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3') 31 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
37
38 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
39 # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
40 # license
41 def test_incompatible_spdx_license_alias(self):
42 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
43 32
44 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 33 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
45 # license cannot be built when INCOMPATIBLE_LICENSE contains this alias 34 # license cannot be built when INCOMPATIBLE_LICENSE contains this alias
46 def test_incompatible_alias_spdx_license_alias(self): 35 def test_incompatible_alias_spdx_license_alias(self):
47 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3') 36 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
48 37
49 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 38 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
50 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded 39 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
51 # license matching this SPDX license 40 # license matching this SPDX license
52 def test_incompatible_spdx_license_alias_wildcard(self): 41 def test_incompatible_spdx_license_alias_wildcard(self):
53 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0') 42 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0', "*GPL-3.0 is an invalid license wildcard entry")
54 43
55 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 44 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
56 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded 45 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
57 # alias license matching the SPDX license 46 # alias license matching the SPDX license
58 def test_incompatible_alias_spdx_license_alias_wildcard(self): 47 def test_incompatible_alias_spdx_license_alias_wildcard(self):
59 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3') 48 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
60 49
61 # Verify that a package with multiple SPDX licenses (from
62 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains
63 # some of them
64 def test_incompatible_spdx_licenses(self):
65 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
66 50
67 # Verify that a package with multiple SPDX licenses (from 51 # Verify that a package with multiple SPDX licenses cannot be built when
68 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a 52 # INCOMPATIBLE_LICENSE contains a wildcard to some of them
69 # wildcard to some of them
70 def test_incompatible_spdx_licenses_wildcard(self): 53 def test_incompatible_spdx_licenses_wildcard(self):
71 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only') 54 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only', "*GPL-3.0-only is an invalid license wildcard entry")
72 55
73 # Verify that a package with multiple SPDX licenses (from 56
74 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a 57 # Verify that a package with multiple SPDX licenses cannot be built when
75 # wildcard matching all licenses 58 # INCOMPATIBLE_LICENSE contains a wildcard matching all licenses
76 def test_incompatible_all_licenses_wildcard(self): 59 def test_incompatible_all_licenses_wildcard(self):
77 self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*') 60 self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*', "* is an invalid license wildcard entry")
61
62class IncompatibleLicenseTests(OESelftestTestCase):
63
64 def lic_test(self, pn, pn_lic, lic):
65 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
78 66
79 # Verify that a package with a non-SPDX license (neither in 67 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
80 # AVAILABLE_LICENSES nor in SPDXLICENSEMAP) cannot be built when 68
69 result = bitbake('%s --dry-run' % (pn), ignore_status=True)
70 if error_msg not in result.output:
71 raise AssertionError(result.output)
72
73 # Verify that a package with an SPDX license cannot be built when
74 # INCOMPATIBLE_LICENSE contains this SPDX license
75 def test_incompatible_spdx_license(self):
76 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
77
78 # Verify that a package with an SPDX license cannot be built when
79 # INCOMPATIBLE_LICENSE contains a wildcarded license matching this SPDX
80 # license
81 def test_incompatible_spdx_license_wildcard(self):
82 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0*')
83
84 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
85 # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
86 # license
87 def test_incompatible_spdx_license_alias(self):
88 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
89
90 # Verify that a package with multiple SPDX licenses cannot be built when
91 # INCOMPATIBLE_LICENSE contains some of them
92 def test_incompatible_spdx_licenses(self):
93 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
94
95 # Verify that a package with a non-SPDX license cannot be built when
81 # INCOMPATIBLE_LICENSE contains this license 96 # INCOMPATIBLE_LICENSE contains this license
82 def test_incompatible_nonspdx_license(self): 97 def test_incompatible_nonspdx_license(self):
83 self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense') 98 self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense')
@@ -85,8 +100,9 @@ class IncompatibleLicenseTests(OESelftestTestCase):
85class IncompatibleLicensePerImageTests(OESelftestTestCase): 100class IncompatibleLicensePerImageTests(OESelftestTestCase):
86 def default_config(self): 101 def default_config(self):
87 return """ 102 return """
88IMAGE_INSTALL_append = " bash" 103IMAGE_INSTALL:append = " bash"
89INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" 104INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
105MACHINE_ESSENTIAL_EXTRA_RDEPENDS:remove = "tar"
90""" 106"""
91 107
92 def test_bash_default(self): 108 def test_bash_default(self):
@@ -98,7 +114,8 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
98 raise AssertionError(result.output) 114 raise AssertionError(result.output)
99 115
100 def test_bash_and_license(self): 116 def test_bash_and_license(self):
101 self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " & SomeLicense"') 117 self.disable_class("create-spdx")
118 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"')
102 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" 119 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
103 120
104 result = bitbake('core-image-minimal', ignore_status=True) 121 result = bitbake('core-image-minimal', ignore_status=True)
@@ -106,30 +123,33 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
106 raise AssertionError(result.output) 123 raise AssertionError(result.output)
107 124
108 def test_bash_or_license(self): 125 def test_bash_or_license(self):
109 self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " | SomeLicense"') 126 self.disable_class("create-spdx")
127 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"\nERROR_QA:remove:pn-core-image-minimal = "license-file-missing"')
110 128
111 bitbake('core-image-minimal') 129 bitbake('core-image-minimal')
112 130
113 def test_bash_whitelist(self): 131 def test_bash_license_exceptions(self):
114 self.write_config(self.default_config() + '\nWHITELIST_GPL-3.0_pn-core-image-minimal = "bash"') 132 self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"\nERROR_QA:remove:pn-core-image-minimal = "license-exception"')
115 133
116 bitbake('core-image-minimal') 134 bitbake('core-image-minimal')
117 135
118class NoGPL3InImagesTests(OESelftestTestCase): 136class NoGPL3InImagesTests(OESelftestTestCase):
119 def test_core_image_minimal(self): 137 def test_core_image_minimal(self):
120 self.write_config(""" 138 self.write_config("""
121INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" 139INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
140
141require conf/distro/include/no-gplv3.inc
122""") 142""")
123 bitbake('core-image-minimal') 143 bitbake('core-image-minimal')
124 144
125 def test_core_image_full_cmdline(self): 145 def test_core_image_full_cmdline_weston(self):
126 self.write_config(""" 146 self.write_config("""
127INHERIT += "testimage"\n 147IMAGE_CLASSES += "testimage"
128INCOMPATIBLE_LICENSE_pn-core-image-full-cmdline = "GPL-3.0 LGPL-3.0"\n 148INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*"
129RDEPENDS_packagegroup-core-full-cmdline-utils_remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-fish mc-helpers mc-helpers-perl sed tar time"\n 149INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*"
130RDEPENDS_packagegroup-core-full-cmdline-dev-utils_remove = "diffutils m4 make patch"\n 150
131RDEPENDS_packagegroup-core-full-cmdline-multiuser_remove = "gzip"\n 151require conf/distro/include/no-gplv3.inc
132""") 152""")
133 bitbake('core-image-full-cmdline') 153 bitbake('core-image-full-cmdline core-image-weston')
134 bitbake('-c testimage core-image-full-cmdline') 154 bitbake('-c testimage core-image-full-cmdline core-image-weston')
135 155
diff --git a/meta/lib/oeqa/selftest/cases/intercept.py b/meta/lib/oeqa/selftest/cases/intercept.py
new file mode 100644
index 0000000000..12583c3099
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/intercept.py
@@ -0,0 +1,21 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class GitCheck(OESelftestTestCase):
11 def test_git_intercept(self):
12 """
13 Git binaries with CVE-2022-24765 fixed will refuse to operate on a
14 repository which is owned by a different user. This breaks our
15 do_install task as that runs inside pseudo, so the git repository is
16 owned by the build user but git is running as (fake)root.
17
18 We have an intercept which disables pseudo, so verify that it works.
19 """
20 bitbake("git-submodule-test -c test_git_as_user")
21 bitbake("git-submodule-test -c test_git_as_root")
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
index a61876ee61..b1f78a0cd1 100644
--- a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
+++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
@@ -1,3 +1,9 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
1import os 7import os
2from oeqa.selftest.case import OESelftestTestCase 8from oeqa.selftest.case import OESelftestTestCase
3from oeqa.utils.commands import runCmd, get_bb_var 9from oeqa.utils.commands import runCmd, get_bb_var
@@ -58,7 +64,8 @@ class KernelDev(OESelftestTestCase):
58 recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend') 64 recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
59 with open(recipe_append, 'w+') as fh: 65 with open(recipe_append, 'w+') as fh:
60 fh.write('SRC_URI += "file://%s"\n' % patch_name) 66 fh.write('SRC_URI += "file://%s"\n' % patch_name)
61 fh.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"') 67 fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n')
68 fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"')
62 69
63 runCmd('bitbake virtual/kernel -c clean') 70 runCmd('bitbake virtual/kernel -c clean')
64 runCmd('bitbake virtual/kernel -c patch') 71 runCmd('bitbake virtual/kernel -c patch')
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 05e9426fc6..64b17117cc 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import runCmd, bitbake, get_bb_var 10from oeqa.utils.commands import bitbake, get_bb_var
9import oeqa.utils.ftools as ftools 11import oeqa.utils.ftools as ftools
10 12
11class LayerAppendTests(OESelftestTestCase): 13class LayerAppendTests(OESelftestTestCase):
@@ -30,20 +32,20 @@ python do_build() {
30addtask build 32addtask build
31""" 33"""
32 append = """ 34 append = """
33FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" 35FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
34 36
35SRC_URI_append = " file://appendtest.txt" 37SRC_URI:append = " file://appendtest.txt"
36 38
37sysroot_stage_all_append() { 39sysroot_stage_all:append() {
38 install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ 40 install -m 644 ${UNPACKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
39} 41}
40 42
41""" 43"""
42 44
43 append2 = """ 45 append2 = """
44FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" 46FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
45 47
46SRC_URI_append = " file://appendtest.txt" 48SRC_URI:append = " file://appendtest.txt"
47""" 49"""
48 layerappend = '' 50 layerappend = ''
49 51
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py
index afe8f8809f..930354c931 100644
--- a/meta/lib/oeqa/selftest/cases/liboe.py
+++ b/meta/lib/oeqa/selftest/cases/liboe.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,11 +9,11 @@ from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake, runCmd
7import oe.path 9import oe.path
8import os 10import os
9 11
10class LibOE(OESelftestTestCase): 12class CopyTreeTests(OESelftestTestCase):
11 13
12 @classmethod 14 @classmethod
13 def setUpClass(cls): 15 def setUpClass(cls):
14 super(LibOE, cls).setUpClass() 16 super().setUpClass()
15 cls.tmp_dir = get_bb_var('TMPDIR') 17 cls.tmp_dir = get_bb_var('TMPDIR')
16 18
17 def test_copy_tree_special(self): 19 def test_copy_tree_special(self):
@@ -97,6 +99,39 @@ class LibOE(OESelftestTestCase):
97 99
98 dstcnt = len(os.listdir(dst)) 100 dstcnt = len(os.listdir(dst))
99 srccnt = len(os.listdir(src)) 101 srccnt = len(os.listdir(src))
100 self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt)) 102 self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
101 103
102 oe.path.remove(testloc) 104 oe.path.remove(testloc)
105
106class SubprocessTests(OESelftestTestCase):
107
108 def test_subprocess_tweak(self):
109 """
110 Test that the string representation of
111 oeqa.utils.subprocesstweak.OETestCalledProcessError includes stdout and
112 stderr, as expected.
113 """
114 script = """
115#! /bin/sh
116echo Ivn fgqbhg | tr '[a-zA-Z]' '[n-za-mN-ZA-M]'
117echo Ivn fgqree | tr '[a-zA-Z]' '[n-za-mN-ZA-M]' >&2
118exit 42
119 """
120
121 import subprocess
122 import unittest.mock
123 from oeqa.utils.subprocesstweak import OETestCalledProcessError
124
125 with self.assertRaises(OETestCalledProcessError) as cm:
126 with unittest.mock.patch("subprocess.CalledProcessError", OETestCalledProcessError):
127 subprocess.run(["bash", "-"], input=script, text=True, capture_output=True, check=True)
128
129 e = cm.exception
130 self.assertEqual(e.returncode, 42)
131 self.assertEqual("Via stdout\n", e.stdout)
132 self.assertEqual("Via stderr\n", e.stderr)
133
134 string = str(e)
135 self.assertIn("exit status 42", string)
136 self.assertIn("Standard Output: Via stdout", string)
137 self.assertIn("Standard Error: Via stderr", string)
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py
index bae935d697..2d0b805b90 100644
--- a/meta/lib/oeqa/selftest/cases/lic_checksum.py
+++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py
@@ -1,16 +1,36 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import tempfile 8import tempfile
9import urllib
7 10
8from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake 12from oeqa.utils.commands import bitbake
10from oeqa.utils import CommandError
11 13
12class LicenseTests(OESelftestTestCase): 14class LicenseTests(OESelftestTestCase):
13 15
16 def test_checksum_with_space(self):
17 bitbake_cmd = '-c populate_lic emptytest'
18
19 lic_file, lic_path = tempfile.mkstemp(" -afterspace")
20 os.close(lic_file)
21 #self.track_for_cleanup(lic_path)
22
23 self.write_config("INHERIT:remove = \"report-error\"")
24
25 self.write_recipeinc('emptytest', """
26INHIBIT_DEFAULT_DEPS = "1"
27LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
28SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
29""" % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path)))
30 result = bitbake(bitbake_cmd)
31 self.delete_recipeinc('emptytest')
32
33
14 # Verify that changing a license file that has an absolute path causes 34 # Verify that changing a license file that has an absolute path causes
15 # the license qa to fail due to a mismatched md5sum. 35 # the license qa to fail due to a mismatched md5sum.
16 def test_nonmatching_checksum(self): 36 def test_nonmatching_checksum(self):
@@ -21,7 +41,7 @@ class LicenseTests(OESelftestTestCase):
21 os.close(lic_file) 41 os.close(lic_file)
22 self.track_for_cleanup(lic_path) 42 self.track_for_cleanup(lic_path)
23 43
24 self.write_config("INHERIT_remove = \"report-error\"") 44 self.write_config("INHERIT:remove = \"report-error\"")
25 45
26 self.write_recipeinc('emptytest', """ 46 self.write_recipeinc('emptytest', """
27INHIBIT_DEFAULT_DEPS = "1" 47INHIBIT_DEFAULT_DEPS = "1"
@@ -34,5 +54,6 @@ SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
34 f.write("data") 54 f.write("data")
35 55
36 result = bitbake(bitbake_cmd, ignore_status=True) 56 result = bitbake(bitbake_cmd, ignore_status=True)
57 self.delete_recipeinc('emptytest')
37 if error_msg not in result.output: 58 if error_msg not in result.output:
38 raise AssertionError(result.output) 59 raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
new file mode 100644
index 0000000000..ac4888ef66
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -0,0 +1,54 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5from oeqa.selftest.case import OESelftestTestCase
6from oeqa.core.decorator import OETestTag
7from oeqa.utils.commands import bitbake, runqemu
8
9class LocalesTest(OESelftestTestCase):
10
11 @OETestTag("runqemu")
12
13 def run_locales_test(self, binary_enabled):
14 features = []
15 features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
16 features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
17 features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8 en_US.ISO-8859-1 de_DE.UTF-8 fr_FR.ISO-8859-1 zh_HK.BIG5-HKSCS tr_TR.UTF-8"')
18 features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
19 if binary_enabled:
20 features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
21 else:
22 features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"')
23 self.write_config("\n".join(features))
24
25 # Build a core-image-minimal
26 bitbake('core-image-minimal')
27
28 with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu:
29 cmd = "locale -a"
30 status, output = qemu.run_serial(cmd)
31 # output must includes fr_FR or fr_FR.UTF-8
32 self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output)
33 self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output)
34
35 cmd = "localedef --list-archive -v"
36 status, output = qemu.run_serial(cmd)
37 # output must includes fr_FR.utf8
38 self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output)
39 self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output)
40
41 def test_locales_on(self):
42 """
43 Summary: Test the locales are generated
44 Expected: 1. Check the locale exist in the locale-archive
45 2. Check the locale exist for the glibc
46 3. Check the locale can be generated
47 Product: oe-core
48 Author: Louis Rannou <lrannou@baylibre.com>
49 AutomatedBy: Louis Rannou <lrannou@baylibre.com>
50 """
51 self.run_locales_test(True)
52
53 def test_locales_off(self):
54 self.run_locales_test(False)
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py
index 5d13f35468..07a6c80489 100644
--- a/meta/lib/oeqa/selftest/cases/manifest.py
+++ b/meta/lib/oeqa/selftest/cases/manifest.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake 10from oeqa.utils.commands import get_bb_var, bitbake
9 11
10class ManifestEntry: 12class ManifestEntry:
11 '''A manifest item of a collection able to list missing packages''' 13 '''A manifest item of a collection able to list missing packages'''
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
index 6f10d30dc9..c3a7df4cdf 100644
--- a/meta/lib/oeqa/selftest/cases/meta_ide.py
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -16,13 +18,15 @@ class MetaIDE(OESelftestTestCase):
16 def setUpClass(cls): 18 def setUpClass(cls):
17 super(MetaIDE, cls).setUpClass() 19 super(MetaIDE, cls).setUpClass()
18 bitbake('meta-ide-support') 20 bitbake('meta-ide-support')
19 bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'TMPDIR', 'COREBASE']) 21 bitbake('build-sysroots -c build_native_sysroot')
20 cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] 22 bitbake('build-sysroots -c build_target_sysroot')
21 cls.tmpdir = bb_vars['TMPDIR'] 23 bb_vars = get_bb_vars(['MACHINE_ARCH', 'TARGET_VENDOR', 'TARGET_OS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
22 cls.environment_script_path = '%s/%s' % (cls.tmpdir, cls.environment_script) 24 cls.environment_script = 'environment-setup-%s%s-%s' % (bb_vars['MACHINE_ARCH'], bb_vars['TARGET_VENDOR'], bb_vars['TARGET_OS'])
25 cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
26 cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
23 cls.corebasedir = bb_vars['COREBASE'] 27 cls.corebasedir = bb_vars['COREBASE']
24 cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide') 28 cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
25 29
26 @classmethod 30 @classmethod
27 def tearDownClass(cls): 31 def tearDownClass(cls):
28 shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True) 32 shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
@@ -40,12 +44,17 @@ class MetaIDE(OESelftestTestCase):
40 def test_meta_ide_can_build_cpio_project(self): 44 def test_meta_ide_can_build_cpio_project(self):
41 dl_dir = self.td.get('DL_DIR', None) 45 dl_dir = self.td.get('DL_DIR', None)
42 self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path, 46 self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
43 "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz", 47 "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz",
44 self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir) 48 self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
45 self.project.download_archive() 49 self.project.download_archive()
46 self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS --disable-maintainer-mode','sed -i -e "/char \*program_name/d" src/global.c;'), 0, 50 self.assertEqual(self.project.run_configure('CFLAGS="-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration" $CONFIGURE_FLAGS'), 0,
47 msg="Running configure failed") 51 msg="Running configure failed")
48 self.assertEqual(self.project.run_make(), 0, 52 self.assertEqual(self.project.run_make(make_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'"), 0,
49 msg="Running make failed") 53 msg="Running make failed")
50 self.assertEqual(self.project.run_install(), 0, 54 self.assertEqual(self.project.run_install(), 0,
51 msg="Running make install failed") 55 msg="Running make install failed")
56
57 def test_meta_ide_can_run_sdk_tests(self):
58 bitbake('-c populate_sysroot gtk+3')
59 bitbake('build-sysroots -c build_target_sysroot')
60 bitbake('-c testsdk meta-ide-support')
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
new file mode 100644
index 0000000000..a8923460f9
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -0,0 +1,60 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import subprocess
8import tempfile
9import shutil
10
11from oeqa.core.decorator import OETestTag
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
14
15
16class Minidebuginfo(OESelftestTestCase):
17 def test_minidebuginfo(self):
18 target_sys = get_bb_var("TARGET_SYS")
19 binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH"))
20
21 image = 'core-image-minimal'
22 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image)
23
24 self.write_config("""
25DISTRO_FEATURES:append = " minidebuginfo"
26IMAGE_FSTYPES = "tar.bz2"
27""")
28 bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils))
29
30 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils)
31
32 # confirm that executables and shared libraries contain an ELF section
33 # ".gnu_debugdata" which stores minidebuginfo.
34 with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs:
35 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME']))
36 shutil.unpack_archive(filename, unpackedfs)
37
38 r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")],
39 native_sysroot = native_sysroot, target_sys = target_sys)
40 self.assertIn(".gnu_debugdata", r.output)
41
42 r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")],
43 native_sysroot = native_sysroot, target_sys = target_sys)
44 self.assertIn(".gnu_debugdata", r.output)
45
46 @OETestTag("runqemu")
47 def test_minidebuginfo_qemu(self):
48 """
49 Test minidebuginfo inside a qemu.
50 This runs test_systemd_coredump_minidebuginfo and other minidebuginfo runtime tests which may be added in the future.
51 """
52
53 self.write_config("""
54DISTRO_FEATURES:append = " minidebuginfo"
55INIT_MANAGER = "systemd"
56IMAGE_CLASSES += "testimage"
57TEST_SUITES = "ping ssh systemd"
58 """)
59 bitbake('core-image-minimal')
60 bitbake('-c testimage core-image-minimal')
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py
index 39b92f2439..f509cbf607 100644
--- a/meta/lib/oeqa/selftest/cases/multiconfig.py
+++ b/meta/lib/oeqa/selftest/cases/multiconfig.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -17,7 +19,7 @@ class MultiConfig(OESelftestTestCase):
17 """ 19 """
18 20
19 config = """ 21 config = """
20IMAGE_INSTALL_append_pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl" 22IMAGE_INSTALL:append:pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl"
21BBMULTICONFIG = "tiny musl" 23BBMULTICONFIG = "tiny musl"
22""" 24"""
23 self.write_config(config) 25 self.write_config(config)
@@ -52,7 +54,7 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
52 self.write_config(config) 54 self.write_config(config)
53 55
54 testconfig = textwrap.dedent('''\ 56 testconfig = textwrap.dedent('''\
55 MCTESTVAR_append = "1" 57 MCTESTVAR:append = "1"
56 ''') 58 ''')
57 self.write_config(testconfig, 'test') 59 self.write_config(testconfig, 'test')
58 60
@@ -64,9 +66,22 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
64 self.assertIn('MCTESTVAR=test1', result.output.splitlines()) 66 self.assertIn('MCTESTVAR=test1', result.output.splitlines())
65 67
66 testconfig = textwrap.dedent('''\ 68 testconfig = textwrap.dedent('''\
67 MCTESTVAR_append = "2" 69 MCTESTVAR:append = "2"
68 ''') 70 ''')
69 self.write_config(testconfig, 'test') 71 self.write_config(testconfig, 'test')
70 72
71 result = bitbake('mc:test:multiconfig-test-parse -c showvar') 73 result = bitbake('mc:test:multiconfig-test-parse -c showvar')
72 self.assertIn('MCTESTVAR=test2', result.output.splitlines()) 74 self.assertIn('MCTESTVAR=test2', result.output.splitlines())
75
76 def test_multiconfig_inlayer(self):
77 """
78 Test that a multiconfig from meta-selftest works.
79 """
80
81 config = """
82BBMULTICONFIG = "muslmc"
83"""
84 self.write_config(config)
85
86 # Build a core-image-minimal, only dry run needed to check config is present
87 bitbake('mc:muslmc:bash -n')
diff --git a/meta/lib/oeqa/selftest/cases/newlib.py b/meta/lib/oeqa/selftest/cases/newlib.py
new file mode 100644
index 0000000000..fe57aa51f2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/newlib.py
@@ -0,0 +1,13 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class NewlibTest(OESelftestTestCase):
11 def test_newlib(self):
12 self.write_config('TCLIBC = "newlib"')
13 bitbake("newlib libgloss")
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
index 802a91a488..042ccdd2b4 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
8import sys
6from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
7import tempfile 10import tempfile
8import operator 11import operator
@@ -11,15 +14,14 @@ from oeqa.utils.commands import get_bb_var
11class TestBlobParsing(OESelftestTestCase): 14class TestBlobParsing(OESelftestTestCase):
12 15
13 def setUp(self): 16 def setUp(self):
14 import time
15 self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory', 17 self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
16 dir=get_bb_var('TOPDIR')) 18 dir=get_bb_var('TOPDIR'))
17 19
18 try: 20 try:
19 from git import Repo 21 from git import Repo
20 self.repo = Repo.init(self.repo_path) 22 self.repo = Repo.init(self.repo_path)
21 except ImportError: 23 except ImportError as e:
22 self.skipTest('Python module GitPython is not present') 24 self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path))
23 25
24 self.test_file = "test" 26 self.test_file = "test"
25 self.var_map = {} 27 self.var_map = {}
@@ -28,6 +30,16 @@ class TestBlobParsing(OESelftestTestCase):
28 import shutil 30 import shutil
29 shutil.rmtree(self.repo_path) 31 shutil.rmtree(self.repo_path)
30 32
33 @property
34 def heads_default(self):
35 """
36 Support repos defaulting to master or to main branch
37 """
38 try:
39 return self.repo.heads.main
40 except AttributeError:
41 return self.repo.heads.master
42
31 def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"): 43 def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
32 if len(to_add) == 0 and len(to_remove) == 0: 44 if len(to_add) == 0 and len(to_remove) == 0:
33 return 45 return
@@ -65,10 +77,10 @@ class TestBlobParsing(OESelftestTestCase):
65 changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")} 77 changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
66 78
67 self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" }) 79 self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
68 blob1 = self.repo.heads.master.commit.tree.blobs[0] 80 blob1 = self.heads_default.commit.tree.blobs[0]
69 81
70 self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" }) 82 self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
71 blob2 = self.repo.heads.master.commit.tree.blobs[0] 83 blob2 = self.heads_default.commit.tree.blobs[0]
72 84
73 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), 85 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
74 blob1, blob2, False, False) 86 blob1, blob2, False, False)
@@ -84,10 +96,10 @@ class TestBlobParsing(OESelftestTestCase):
84 defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]} 96 defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
85 97
86 self.commit_vars(to_add = { "foo" : "1" }) 98 self.commit_vars(to_add = { "foo" : "1" })
87 blob1 = self.repo.heads.master.commit.tree.blobs[0] 99 blob1 = self.heads_default.commit.tree.blobs[0]
88 100
89 self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" }) 101 self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
90 blob2 = self.repo.heads.master.commit.tree.blobs[0] 102 blob2 = self.heads_default.commit.tree.blobs[0]
91 103
92 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), 104 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
93 blob1, blob2, False, False) 105 blob1, blob2, False, False)
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py
index 5a5f9b4fdf..7bf550b6fd 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/elf.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py
index 6ebbee589f..5eea12e761 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/license.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/license.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -15,11 +17,11 @@ class SeenVisitor(oe.license.LicenseVisitor):
15 17
16class TestSingleLicense(TestCase): 18class TestSingleLicense(TestCase):
17 licenses = [ 19 licenses = [
18 "GPLv2", 20 "GPL-2.0-only",
19 "LGPL-2.0", 21 "LGPL-2.0-only",
20 "Artistic", 22 "Artistic-1.0",
21 "MIT", 23 "MIT",
22 "GPLv3+", 24 "GPL-3.0-or-later",
23 "FOO_BAR", 25 "FOO_BAR",
24 ] 26 ]
25 invalid_licenses = ["GPL/BSD"] 27 invalid_licenses = ["GPL/BSD"]
@@ -67,9 +69,9 @@ class TestComplexCombinations(TestSimpleCombinations):
67 "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"], 69 "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"],
68 "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"], 70 "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"],
69 "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"], 71 "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"],
70 "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"], 72 "(GPL-2.0-only|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0-only", "BSD-4-clause", "MIT"],
71 } 73 }
72 preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"] 74 preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0-only"]
73 75
74class TestIsIncluded(TestCase): 76class TestIsIncluded(TestCase):
75 tests = { 77 tests = {
@@ -87,12 +89,12 @@ class TestIsIncluded(TestCase):
87 [True, ["BAR", "FOOBAR"]], 89 [True, ["BAR", "FOOBAR"]],
88 ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"): 90 ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"):
89 [True, ["BAZ", "MOO", "BARFOO"]], 91 [True, ["BAZ", "MOO", "BARFOO"]],
90 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, None): 92 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, None):
91 [True, ["GPL-3.0", "GPL-2.0", "LGPL-2.1"]], 93 [True, ["GPL-3.0-or-later", "GPL-2.0-only", "LGPL-2.1-only"]],
92 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0"): 94 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later"):
93 [True, ["Proprietary"]], 95 [True, ["Proprietary"]],
94 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0 Proprietary"): 96 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later Proprietary"):
95 [False, ["GPL-3.0"]] 97 [False, ["GPL-3.0-or-later"]]
96 } 98 }
97 99
98 def test_tests(self): 100 def test_tests(self):
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py
index a1cfa08c09..b963e447e3 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/path.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/path.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py
index 7eb49e6f95..58318b18b2 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/types.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/types.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
index a7214beb4c..0cb46425a0 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/utils.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -64,7 +66,7 @@ class TestMultiprocessLaunch(TestCase):
64 import bb 66 import bb
65 67
66 def testfunction(item, d): 68 def testfunction(item, d):
67 if item == "2" or item == "1": 69 if item == "2":
68 raise KeyError("Invalid number %s" % item) 70 raise KeyError("Invalid number %s" % item)
69 return "Found %s" % item 71 return "Found %s" % item
70 72
@@ -99,5 +101,4 @@ class TestMultiprocessLaunch(TestCase):
99 # Assert the function prints exceptions 101 # Assert the function prints exceptions
100 with captured_output() as (out, err): 102 with captured_output() as (out, err):
101 self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,)) 103 self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,))
102 self.assertIn("KeyError: 'Invalid number 1'", out.getvalue())
103 self.assertIn("KeyError: 'Invalid number 2'", out.getvalue()) 104 self.assertIn("KeyError: 'Invalid number 2'", out.getvalue())
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index 8a10ff357b..3f9899b289 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,47 +9,19 @@ import shutil
7import importlib 9import importlib
8import unittest 10import unittest
9from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
10from oeqa.selftest.cases.buildhistory import BuildhistoryBase 12from oeqa.utils.commands import runCmd, bitbake, get_bb_var
11from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer
12from oeqa.utils import CommandError 13from oeqa.utils import CommandError
13 14
14class BuildhistoryDiffTests(BuildhistoryBase):
15
16 def test_buildhistory_diff(self):
17 target = 'xcursor-transparent-theme'
18 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
19 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
20 result = runCmd("oe-pkgdata-util read-value PKGV %s" % target)
21 pkgv = result.output.rstrip()
22 result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
23 expected_endlines = [
24 "xcursor-transparent-theme-dev: RDEPENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
25 "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
26 ]
27 for line in result.output.splitlines():
28 for el in expected_endlines:
29 if line.endswith(el):
30 expected_endlines.remove(el)
31 break
32 else:
33 self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines)))
34 if expected_endlines:
35 self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
36
37@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present") 15@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present")
38class OEScriptTests(OESelftestTestCase): 16class OEPybootchartguyTests(OESelftestTestCase):
39 17
40 @classmethod 18 @classmethod
41 def setUpClass(cls): 19 def setUpClass(cls):
42 super(OEScriptTests, cls).setUpClass() 20 super().setUpClass()
43 import cairo
44 bitbake("core-image-minimal -c rootfs -f") 21 bitbake("core-image-minimal -c rootfs -f")
45 cls.tmpdir = get_bb_var('TMPDIR') 22 cls.tmpdir = get_bb_var('TMPDIR')
46 cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1] 23 cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
47 24 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
48 scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
49
50class OEPybootchartguyTests(OEScriptTests):
51 25
52 def test_pybootchartguy_help(self): 26 def test_pybootchartguy_help(self):
53 runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir) 27 runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
@@ -67,7 +41,10 @@ class OEPybootchartguyTests(OEScriptTests):
67 41
68class OEGitproxyTests(OESelftestTestCase): 42class OEGitproxyTests(OESelftestTestCase):
69 43
70 scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') 44 @classmethod
45 def setUpClass(cls):
46 super().setUpClass()
47 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
71 48
72 def test_oegitproxy_help(self): 49 def test_oegitproxy_help(self):
73 try: 50 try:
@@ -125,15 +102,22 @@ class OEGitproxyTests(OESelftestTestCase):
125class OeRunNativeTest(OESelftestTestCase): 102class OeRunNativeTest(OESelftestTestCase):
126 def test_oe_run_native(self): 103 def test_oe_run_native(self):
127 bitbake("qemu-helper-native -c addto_recipe_sysroot") 104 bitbake("qemu-helper-native -c addto_recipe_sysroot")
128 result = runCmd("oe-run-native qemu-helper-native tunctl -h") 105 result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help")
129 self.assertIn("Delete: tunctl -d device-name [-f tun-clone-device]", result.output) 106 self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output)
107
108class OEListPackageconfigTests(OESelftestTestCase):
109
110 @classmethod
111 def setUpClass(cls):
112 super().setUpClass()
113 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
130 114
131class OEListPackageconfigTests(OEScriptTests):
132 #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags 115 #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
133 def check_endlines(self, results, expected_endlines): 116 def check_endlines(self, results, expected_endlines):
134 for line in results.output.splitlines(): 117 for line in results.output.splitlines():
135 for el in expected_endlines: 118 for el in expected_endlines:
136 if line.split() == el.split(): 119 if line and line.split()[0] == el.split()[0] and \
120 ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
137 expected_endlines.remove(el) 121 expected_endlines.remove(el)
138 break 122 break
139 123
@@ -149,8 +133,8 @@ class OEListPackageconfigTests(OEScriptTests):
149 results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir) 133 results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
150 expected_endlines = [] 134 expected_endlines = []
151 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") 135 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
152 expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") 136 expected_endlines.append("pinentry gtk2 ncurses qt secret")
153 expected_endlines.append("tar acl") 137 expected_endlines.append("tar acl selinux")
154 138
155 self.check_endlines(results, expected_endlines) 139 self.check_endlines(results, expected_endlines)
156 140
@@ -167,11 +151,10 @@ class OEListPackageconfigTests(OEScriptTests):
167 def test_packageconfig_flags_option_all(self): 151 def test_packageconfig_flags_option_all(self):
168 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) 152 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
169 expected_endlines = [] 153 expected_endlines = []
170 expected_endlines.append("pinentry-1.1.1") 154 expected_endlines.append("pinentry-1.3.1")
171 expected_endlines.append("PACKAGECONFIG ncurses libcap") 155 expected_endlines.append("PACKAGECONFIG ncurses")
172 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") 156 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
173 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") 157 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
174 expected_endlines.append("PACKAGECONFIG[libcap] --with-libcap, --without-libcap, libcap")
175 expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses") 158 expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
176 expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret") 159 expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
177 160
@@ -181,7 +164,7 @@ class OEListPackageconfigTests(OEScriptTests):
181 results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir) 164 results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
182 expected_endlines = [] 165 expected_endlines = []
183 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") 166 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
184 expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") 167 expected_endlines.append("pinentry gtk2 ncurses qt secret")
185 168
186 self.check_endlines(results, expected_endlines) 169 self.check_endlines(results, expected_endlines)
187 170
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
new file mode 100644
index 0000000000..580fbdcb9c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -0,0 +1,541 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, runqemu, get_bb_vars
9from oeqa.core.decorator import OETestTag
10from oeqa.core.decorator.data import skipIfNotMachine
11
12def getline_qemu(out, line):
13 for l in out.split('\n'):
14 if line in l:
15 return l
16
17def getline(res, line):
18 return getline_qemu(res.output, line)
19
20class OverlayFSTests(OESelftestTestCase):
21 """Overlayfs class usage tests"""
22
23 def add_overlay_conf_to_machine(self):
24 machine_inc = """
25OVERLAYFS_MOUNT_POINT[mnt-overlay] = "/mnt/overlay"
26"""
27 self.set_machine_config(machine_inc)
28
29 def test_distro_features_missing(self):
30 """
31 Summary: Check that required DISTRO_FEATURES are set
32 Expected: Fail when either systemd or overlayfs are not in DISTRO_FEATURES
33 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
34 """
35
36 config = """
37IMAGE_INSTALL:append = " overlayfs-user"
38"""
39 overlayfs_recipe_append = """
40inherit overlayfs
41"""
42 self.write_config(config)
43 self.add_overlay_conf_to_machine()
44 self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
45
46 res = bitbake('core-image-minimal', ignore_status=True)
47 line = getline(res, "overlayfs-user was skipped: missing required distro features")
48 self.assertTrue("overlayfs" in res.output, msg=res.output)
49 self.assertTrue("systemd" in res.output, msg=res.output)
50 self.assertTrue("ERROR: Required build target 'core-image-minimal' has no buildable providers." in res.output, msg=res.output)
51
52 def test_not_all_units_installed(self):
53 """
54 Summary: Test QA check that we have required mount units in the image
55 Expected: Fail because mount unit for overlay partition is not installed
56 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
57 """
58
59 config = """
60IMAGE_INSTALL:append = " overlayfs-user"
61DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
62"""
63
64 self.write_config(config)
65 self.add_overlay_conf_to_machine()
66
67 res = bitbake('core-image-minimal', ignore_status=True)
68 line = getline(res, " Mount path /mnt/overlay not found in fstab and unit mnt-overlay.mount not found in systemd unit directories")
69 self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
70 line = getline(res, "Not all mount paths and units are installed in the image")
71 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
72
73 def test_not_all_units_installed_but_qa_skipped(self):
74 """
75 Summary: Test skipping the QA check
76 Expected: Image is created successfully
77 Author: Claudius Heine <ch@denx.de>
78 """
79
80 config = """
81IMAGE_INSTALL:append = " overlayfs-user"
82DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
83OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
84"""
85
86 self.write_config(config)
87 self.add_overlay_conf_to_machine()
88
89 bitbake('core-image-minimal')
90
91 def test_mount_unit_not_set(self):
92 """
93 Summary: Test whether mount unit was set properly
94 Expected: Fail because mount unit was not set
95 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
96 """
97
98 config = """
99IMAGE_INSTALL:append = " overlayfs-user"
100DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
101"""
102
103 self.write_config(config)
104
105 res = bitbake('core-image-minimal', ignore_status=True)
106 line = getline(res, "A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
107 self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
108
109 def test_wrong_mount_unit_set(self):
110 """
111 Summary: Test whether mount unit was set properly
112 Expected: Fail because not the correct flag used for mount unit
113 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
114 """
115
116 config = """
117IMAGE_INSTALL:append = " overlayfs-user"
118DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
119"""
120
121 wrong_machine_config = """
122OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay"
123"""
124
125 self.write_config(config)
126 self.set_machine_config(wrong_machine_config)
127
128 res = bitbake('core-image-minimal', ignore_status=True)
129 line = getline(res, "Missing required mount point for OVERLAYFS_MOUNT_POINT[mnt-overlay] in your MACHINE configuration")
130 self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
131
132 def _test_correct_image(self, recipe, data):
133 """
134 Summary: Check that we can create an image when all parameters are
135 set correctly
136 Expected: Image is created successfully
137 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
138 """
139
140 config = """
141IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units"
142DISTRO_FEATURES:append = " overlayfs"
143
144# Use systemd as init manager
145INIT_MANAGER = "systemd"
146
147# enable overlayfs in the kernel
148KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
149"""
150
151 overlayfs_recipe_append = """
152OVERLAYFS_WRITABLE_PATHS[mnt-overlay] += "/usr/share/another-overlay-mount"
153
154SYSTEMD_SERVICE:${PN} += " \
155 my-application.service \
156"
157
158do_install:append() {
159 install -d ${D}${systemd_system_unitdir}
160 cat <<EOT > ${D}${systemd_system_unitdir}/my-application.service
161[Unit]
162Description=Sample application start-up unit
163After=overlayfs-user-overlays.service
164Requires=overlayfs-user-overlays.service
165
166[Service]
167Type=oneshot
168ExecStart=/bin/true
169RemainAfterExit=true
170
171[Install]
172WantedBy=multi-user.target
173EOT
174}
175"""
176
177 self.write_config(config)
178 self.add_overlay_conf_to_machine()
179 self.write_recipeinc(recipe, data)
180 self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
181
182 bitbake('core-image-minimal')
183
184 with runqemu('core-image-minimal') as qemu:
185 # Check that application service started
186 status, output = qemu.run_serial("systemctl status my-application")
187 self.assertTrue("active (exited)" in output, msg=output)
188
189 # Check that overlay mounts are dependencies of our application unit
190 status, output = qemu.run_serial("systemctl list-dependencies my-application")
191 self.assertTrue("overlayfs-user-overlays.service" in output, msg=output)
192
193 status, output = qemu.run_serial("systemctl list-dependencies overlayfs-user-overlays")
194 self.assertTrue("usr-share-another\\x2doverlay\\x2dmount.mount" in output, msg=output)
195 self.assertTrue("usr-share-my\\x2dapplication.mount" in output, msg=output)
196
197 # Check that we have /mnt/overlay fs mounted as tmpfs and
198 # /usr/share/my-application as an overlay (see overlayfs-user recipe)
199 status, output = qemu.run_serial("/bin/mount -t tmpfs,overlay")
200
201 line = getline_qemu(output, "on /mnt/overlay")
202 self.assertTrue(line and line.startswith("tmpfs"), msg=output)
203
204 line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/my-application")
205 self.assertTrue(line and line.startswith("overlay"), msg=output)
206
207 line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/another-overlay-mount")
208 self.assertTrue(line and line.startswith("overlay"), msg=output)
209
210 @OETestTag("runqemu")
211 def test_correct_image_fstab(self):
212 """
213 Summary: Check that we can create an image when all parameters are
214 set correctly via fstab
215 Expected: Image is created successfully
216 Author: Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
217 """
218
219 base_files_append = """
220do_install:append() {
221 cat <<EOT >> ${D}${sysconfdir}/fstab
222tmpfs /mnt/overlay tmpfs mode=1777,strictatime,nosuid,nodev 0 0
223EOT
224}
225"""
226
227 self._test_correct_image('base-files', base_files_append)
228
229 @OETestTag("runqemu")
230 def test_correct_image_unit(self):
231 """
232 Summary: Check that we can create an image when all parameters are
233 set correctly via mount unit
234 Expected: Image is created successfully
235 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
236 """
237
238 systemd_machine_unit_append = """
239SYSTEMD_SERVICE:${PN} += " \
240 mnt-overlay.mount \
241"
242
243do_install:append() {
244 install -d ${D}${systemd_system_unitdir}
245 cat <<EOT > ${D}${systemd_system_unitdir}/mnt-overlay.mount
246[Unit]
247Description=Tmpfs directory
248DefaultDependencies=no
249
250[Mount]
251What=tmpfs
252Where=/mnt/overlay
253Type=tmpfs
254Options=mode=1777,strictatime,nosuid,nodev
255
256[Install]
257WantedBy=multi-user.target
258EOT
259}
260
261"""
262
263 self._test_correct_image('systemd-machine-units', systemd_machine_unit_append)
264
265@OETestTag("runqemu")
266class OverlayFSEtcRunTimeTests(OESelftestTestCase):
267 """overlayfs-etc class tests"""
268
269 def test_all_required_variables_set(self):
270 """
271 Summary: Check that required variables are set
272 Expected: Fail when any of required variables is missing
273 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
274 """
275
276 configBase = """
277# Use systemd as init manager
278INIT_MANAGER = "systemd"
279
280# enable overlayfs in the kernel
281KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
282
283# Image configuration for overlayfs-etc
284EXTRA_IMAGE_FEATURES += "overlayfs-etc"
285IMAGE_FEATURES:remove = "package-management"
286"""
287 configMountPoint = """
288OVERLAYFS_ETC_MOUNT_POINT = "/data"
289"""
290 configDevice = """
291OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1"
292"""
293
294 self.write_config(configBase)
295 res = bitbake('core-image-minimal', ignore_status=True)
296 line = getline(res, "OVERLAYFS_ETC_MOUNT_POINT must be set in your MACHINE configuration")
297 self.assertTrue(line, msg=res.output)
298
299 self.append_config(configMountPoint)
300 res = bitbake('core-image-minimal', ignore_status=True)
301 line = getline(res, "OVERLAYFS_ETC_DEVICE must be set in your MACHINE configuration")
302 self.assertTrue(line, msg=res.output)
303
304 self.append_config(configDevice)
305 res = bitbake('core-image-minimal', ignore_status=True)
306 line = getline(res, "OVERLAYFS_ETC_FSTYPE should contain a valid file system type on /dev/mmcblk0p1")
307 self.assertTrue(line, msg=res.output)
308
309 def test_image_feature_conflict(self):
310 """
311 Summary: Overlayfs-etc is not allowed to be used with package-management
312 Expected: Feature conflict
313 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
314 """
315
316 config = """
317# Use systemd as init manager
318INIT_MANAGER = "systemd"
319
320# enable overlayfs in the kernel
321KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
322EXTRA_IMAGE_FEATURES += "overlayfs-etc"
323EXTRA_IMAGE_FEATURES += "package-management"
324"""
325
326 self.write_config(config)
327
328 res = bitbake('core-image-minimal', ignore_status=True)
329 line = getline(res, "contains conflicting IMAGE_FEATURES")
330 self.assertTrue("overlayfs-etc" in res.output, msg=res.output)
331 self.assertTrue("package-management" in res.output, msg=res.output)
332
333 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
334 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
335 def test_image_feature_is_missing(self):
336 """
337 Summary: Overlayfs-etc class is not applied when image feature is not set
338 Expected: Image is created successfully but /etc is not an overlay
339 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
340 """
341
342 config = """
343# Use systemd as init manager
344INIT_MANAGER = "systemd"
345
346# enable overlayfs in the kernel
347KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
348
349IMAGE_FSTYPES += "wic"
350WKS_FILE = "overlayfs_etc.wks.in"
351
352EXTRA_IMAGE_FEATURES += "read-only-rootfs"
353# Image configuration for overlayfs-etc
354OVERLAYFS_ETC_MOUNT_POINT = "/data"
355OVERLAYFS_ETC_DEVICE = "/dev/sda3"
356OVERLAYFS_ROOTFS_TYPE = "ext4"
357"""
358
359 self.write_config(config)
360
361 bitbake('core-image-minimal')
362
363 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
364 status, output = qemu.run_serial("/bin/mount")
365
366 line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
367 self.assertFalse(line, msg=output)
368
369 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
370 def test_sbin_init_preinit(self):
371 self.run_sbin_init(False, "ext4")
372
373 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
374 def test_sbin_init_original(self):
375 self.run_sbin_init(True, "ext4")
376
377 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
378 def test_sbin_init_read_only(self):
379 self.run_sbin_init(True, "squashfs")
380
381 def run_sbin_init(self, origInit, rootfsType):
382 """
383 Summary: Confirm we can replace original init and mount overlay on top of /etc
384 Expected: Image is created successfully and /etc is mounted as an overlay
385 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
386 """
387
388 config = self.get_working_config()
389
390 args = {
391 'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit",
392 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True),
393 'OVERLAYFS_ROOTFS_TYPE': rootfsType,
394 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4")
395 }
396
397 self.write_config(config.format(**args))
398
399 bitbake('core-image-minimal')
400 testFile = "/etc/my-test-data"
401
402 with runqemu('core-image-minimal', image_fstype='wic', discard_writes=False) as qemu:
403 status, output = qemu.run_serial("/bin/mount")
404
405 line = getline_qemu(output, "/dev/sda3")
406 self.assertTrue("/data" in output, msg=output)
407
408 line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
409 self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output)
410
411 # check that lower layer is not available
412 status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower")
413 line = getline_qemu(output, "No such file or directory")
414 self.assertTrue(line, msg=output)
415
416 status, output = qemu.run_serial("touch " + testFile)
417 status, output = qemu.run_serial("sync")
418 status, output = qemu.run_serial("ls -1 " + testFile)
419 line = getline_qemu(output, testFile)
420 self.assertTrue(line and line.startswith(testFile), msg=output)
421
422 # Check that file exists in /etc after reboot
423 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
424 status, output = qemu.run_serial("ls -1 " + testFile)
425 line = getline_qemu(output, testFile)
426 self.assertTrue(line and line.startswith(testFile), msg=output)
427
428 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
429 def test_lower_layer_access(self):
430 """
431 Summary: Test that lower layer of /etc is available read-only when configured
432 Expected: Can't write to lower layer. The files on lower and upper different after
433 modification
434 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
435 """
436
437 config = self.get_working_config()
438
439 configLower = """
440OVERLAYFS_ETC_EXPOSE_LOWER = "1"
441IMAGE_INSTALL:append = " overlayfs-user"
442"""
443 testFile = "lower-layer-test.txt"
444
445 args = {
446 'OVERLAYFS_INIT_OPTION': "",
447 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
448 'OVERLAYFS_ROOTFS_TYPE': "ext4",
449 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
450 }
451
452 self.write_config(config.format(**args))
453
454 self.append_config(configLower)
455 bitbake('core-image-minimal')
456
457 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
458 status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile)
459 status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile)
460 line = getline_qemu(output, "Modified in upper")
461 self.assertTrue(line, msg=output)
462 line = getline_qemu(output, "Original file")
463 self.assertTrue(line, msg=output)
464
465 status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt")
466 line = getline_qemu(output, "Read-only file system")
467 self.assertTrue(line, msg=output)
468
469 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
470 def test_postinst_on_target_for_read_only_rootfs(self):
471 """
472 Summary: The purpose of this test case is to verify that post-installation
473 on target scripts are executed even if using read-only rootfs when
474 read-only-rootfs-delayed-postinsts is set
475 Expected: The test files are created on first boot
476 """
477
478 import oe.path
479
480 vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
481 sysconfdir = vars["sysconfdir"]
482 self.assertIsNotNone(sysconfdir)
483 # Need to use oe.path here as sysconfdir starts with /
484 targettestdir = os.path.join(sysconfdir, "postinst-test")
485
486 config = self.get_working_config()
487
488 args = {
489 'OVERLAYFS_INIT_OPTION': "",
490 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
491 'OVERLAYFS_ROOTFS_TYPE': "ext4",
492 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
493 }
494
495 # read-only-rootfs is already set in get_working_config()
496 config += 'EXTRA_IMAGE_FEATURES += "read-only-rootfs-delayed-postinsts"\n'
497 config += 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
498
499 self.write_config(config.format(**args))
500
501 res = bitbake('core-image-minimal')
502
503 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
504 for filename in ("rootfs", "delayed-a", "delayed-b"):
505 status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
506 self.assertIn("found", output, "%s was not present on boot" % filename)
507
508 def get_working_config(self):
509 return """
510# Use systemd as init manager
511INIT_MANAGER = "systemd"
512
513# enable overlayfs in the kernel
514KERNEL_EXTRA_FEATURES:append = " \
515 features/overlayfs/overlayfs.scc \
516 cfg/fs/squashfs.scc"
517
518IMAGE_FSTYPES += "wic"
519OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
520OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}"
521OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}"
522WKS_FILE = "overlayfs_etc.wks.in"
523
524EXTRA_IMAGE_FEATURES += "read-only-rootfs"
525# Image configuration for overlayfs-etc
526EXTRA_IMAGE_FEATURES += "overlayfs-etc"
527IMAGE_FEATURES:remove = "package-management"
528OVERLAYFS_ETC_MOUNT_POINT = "/data"
529OVERLAYFS_ETC_FSTYPE = "ext4"
530OVERLAYFS_ETC_DEVICE = "/dev/sda3"
531OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
532
533ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs"
534
535ext4_rootfs() {{
536}}
537
538squashfs_rootfs() {{
539 mkdir -p ${{IMAGE_ROOTFS}}/data
540}}
541"""
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 7166c3991f..38ed7173fe 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -1,10 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu 8from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu
7import stat
8import subprocess, os 9import subprocess, os
9import oe.path 10import oe.path
10import re 11import re
@@ -88,6 +89,13 @@ class VersionOrdering(OESelftestTestCase):
88 self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort)) 89 self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
89 90
90class PackageTests(OESelftestTestCase): 91class PackageTests(OESelftestTestCase):
92 # Verify that a recipe cannot rename a package into an existing one
93 def test_package_name_conflict(self):
94 res = bitbake("packagenameconflict", ignore_status=True)
95 self.assertNotEqual(res.status, 0)
96 err = "package name already exists"
97 self.assertTrue(err in res.output)
98
91 # Verify that a recipe which sets up hardlink files has those preserved into split packages 99 # Verify that a recipe which sets up hardlink files has those preserved into split packages
92 # Also test file sparseness is preserved 100 # Also test file sparseness is preserved
93 def test_preserve_sparse_hardlinks(self): 101 def test_preserve_sparse_hardlinks(self):
@@ -95,11 +103,37 @@ class PackageTests(OESelftestTestCase):
95 103
96 dest = get_bb_var('PKGDEST', 'selftest-hardlink') 104 dest = get_bb_var('PKGDEST', 'selftest-hardlink')
97 bindir = get_bb_var('bindir', 'selftest-hardlink') 105 bindir = get_bb_var('bindir', 'selftest-hardlink')
106 libdir = get_bb_var('libdir', 'selftest-hardlink')
107 libexecdir = get_bb_var('libexecdir', 'selftest-hardlink')
98 108
99 def checkfiles(): 109 def checkfiles():
100 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/ 110 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/
101 # so expect 8 in total. 111 # so expect 8 in total.
102 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8) 112 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8)
113 self.assertEqual(os.stat(dest + "/selftest-hardlink" + libexecdir + "/hello3").st_nlink, 8)
114
115 # Check dbg version
116 # 2 items, a copy in both package/packages-split so 4
117 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + bindir + "/.debug/hello1").st_nlink, 4)
118 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello1").st_nlink, 4)
119
120 # Even though the libexecdir name is 'hello3' or 'hello4', that isn't the debug target name
121 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello3"), False)
122 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello4"), False)
123
124 # Check the staticdev libraries
125 # 101 items, a copy in both package/packages-split so 202
126 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello.a").st_nlink, 202)
127 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-25.a").st_nlink, 202)
128 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-50.a").st_nlink, 202)
129 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-75.a").st_nlink, 202)
130
131 # Check static dbg
132 # 101 items, a copy in both package/packages-split so 202
133 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello.a").st_nlink, 202)
134 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-25.a").st_nlink, 202)
135 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-50.a").st_nlink, 202)
136 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-75.a").st_nlink, 202)
103 137
104 # Test a sparse file remains sparse 138 # Test a sparse file remains sparse
105 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest") 139 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest")
@@ -116,9 +150,9 @@ class PackageTests(OESelftestTestCase):
116 150
117 # Verify gdb to read symbols from separated debug hardlink file correctly 151 # Verify gdb to read symbols from separated debug hardlink file correctly
118 def test_gdb_hardlink_debug(self): 152 def test_gdb_hardlink_debug(self):
119 features = 'IMAGE_INSTALL_append = " selftest-hardlink"\n' 153 features = 'IMAGE_INSTALL:append = " selftest-hardlink"\n'
120 features += 'IMAGE_INSTALL_append = " selftest-hardlink-dbg"\n' 154 features += 'IMAGE_INSTALL:append = " selftest-hardlink-dbg"\n'
121 features += 'IMAGE_INSTALL_append = " selftest-hardlink-gdb"\n' 155 features += 'IMAGE_INSTALL:append = " selftest-hardlink-gdb"\n'
122 self.write_config(features) 156 self.write_config(features)
123 bitbake("core-image-minimal") 157 bitbake("core-image-minimal")
124 158
@@ -134,8 +168,10 @@ class PackageTests(OESelftestTestCase):
134 self.logger.error("No debugging symbols found. GDB result:\n%s" % output) 168 self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
135 return False 169 return False
136 170
137 # Check debugging symbols works correctly 171 # Check debugging symbols works correctly. Don't look for a
138 elif re.match(r"Breakpoint 1.*hello\.c.*4", l): 172 # source file as optimisation can put the breakpoint inside
173 # stdio.h.
174 elif "Breakpoint 1 at" in l:
139 return True 175 return True
140 176
141 self.logger.error("GDB result:\n%d: %s", status, output) 177 self.logger.error("GDB result:\n%d: %s", status, output)
@@ -150,25 +186,25 @@ class PackageTests(OESelftestTestCase):
150 self.fail('GDB %s failed' % binary) 186 self.fail('GDB %s failed' % binary)
151 187
152 def test_preserve_ownership(self): 188 def test_preserve_ownership(self):
153 import os, stat, oe.cachedpath 189 features = 'IMAGE_INSTALL:append = " selftest-chown"\n'
154 features = 'IMAGE_INSTALL_append = " selftest-chown"\n'
155 self.write_config(features) 190 self.write_config(features)
156 bitbake("core-image-minimal") 191 bitbake("core-image-minimal")
157 192
158 sysconfdir = get_bb_var('sysconfdir', 'selftest-chown') 193 def check_ownership(qemu, expected_gid, expected_uid, path):
159 def check_ownership(qemu, gid, uid, path):
160 self.logger.info("Check ownership of %s", path) 194 self.logger.info("Check ownership of %s", path)
161 status, output = qemu.run_serial(r'/bin/stat -c "%U %G" ' + path, timeout=60) 195 status, output = qemu.run_serial('stat -c "%U %G" ' + path)
162 output = output.split(" ") 196 self.assertEqual(status, 1, "stat failed: " + output)
163 if output[0] != uid or output[1] != gid : 197 try:
164 self.logger.error("Incrrect ownership %s [%s:%s]", path, output[0], output[1]) 198 uid, gid = output.split()
165 return False 199 self.assertEqual(uid, expected_uid)
166 return True 200 self.assertEqual(gid, expected_gid)
201 except ValueError:
202 self.fail("Cannot parse output: " + output)
167 203
204 sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
168 with runqemu('core-image-minimal') as qemu: 205 with runqemu('core-image-minimal') as qemu:
169 for path in [ sysconfdir + "/selftest-chown/file", 206 for path in [ sysconfdir + "/selftest-chown/file",
170 sysconfdir + "/selftest-chown/dir", 207 sysconfdir + "/selftest-chown/dir",
171 sysconfdir + "/selftest-chown/symlink", 208 sysconfdir + "/selftest-chown/symlink",
172 sysconfdir + "/selftest-chown/fifotest/fifo"]: 209 sysconfdir + "/selftest-chown/fifotest/fifo"]:
173 if not check_ownership(qemu, "test", "test", path): 210 check_ownership(qemu, "test", "test", path)
174 self.fail('Test ownership %s failed' % path)
diff --git a/meta/lib/oeqa/selftest/cases/picolibc.py b/meta/lib/oeqa/selftest/cases/picolibc.py
new file mode 100644
index 0000000000..e40b4fc3d3
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/picolibc.py
@@ -0,0 +1,18 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, get_bb_var
9
10class PicolibcTest(OESelftestTestCase):
11
12 def test_picolibc(self):
13 compatible_machines = ['qemuarm', 'qemuarm64', 'qemuriscv32', 'qemuriscv64']
14 machine = get_bb_var('MACHINE')
15 if machine not in compatible_machines:
16 self.skipTest('This test only works with machines : %s' % ' '.join(compatible_machines))
17 self.write_config('TCLIBC = "picolibc"')
18 bitbake("picolibc-helloworld")
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py
index 254abc40c6..d786c33018 100644
--- a/meta/lib/oeqa/selftest/cases/pkgdata.py
+++ b/meta/lib/oeqa/selftest/cases/pkgdata.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -47,8 +49,8 @@ class OePkgdataUtilTests(OESelftestTestCase):
47 self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output) 49 self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
48 50
49 def test_find_path(self): 51 def test_find_path(self):
50 result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1') 52 result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1')
51 self.assertEqual(result.output, 'zlib: /lib/libz.so.1') 53 self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1')
52 result = runCmd('oe-pkgdata-util find-path /usr/bin/m4') 54 result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
53 self.assertEqual(result.output, 'm4: /usr/bin/m4') 55 self.assertEqual(result.output, 'm4: /usr/bin/m4')
54 result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True) 56 result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
@@ -120,8 +122,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
120 curpkg = line.split(':')[0] 122 curpkg = line.split(':')[0]
121 files[curpkg] = [] 123 files[curpkg] = []
122 return files 124 return files
123 bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir']) 125 bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir'])
124 base_libdir = bb_vars['base_libdir']
125 libdir = bb_vars['libdir'] 126 libdir = bb_vars['libdir']
126 includedir = bb_vars['includedir'] 127 includedir = bb_vars['includedir']
127 mandir = bb_vars['mandir'] 128 mandir = bb_vars['mandir']
@@ -138,7 +139,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
138 self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) 139 self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
139 self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) 140 self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
140 self.assertGreater(len(files['libz1']), 1) 141 self.assertGreater(len(files['libz1']), 1)
141 libspec = os.path.join(base_libdir, 'libz.so.1.*') 142 libspec = os.path.join(libdir, 'libz.so.1.*')
142 found = False 143 found = False
143 for fileitem in files['libz1']: 144 for fileitem in files['libz1']:
144 if fnmatch.fnmatchcase(fileitem, libspec): 145 if fnmatch.fnmatchcase(fileitem, libspec):
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
index 578b2b4dd9..8da3739c57 100644
--- a/meta/lib/oeqa/selftest/cases/prservice.py
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -12,6 +14,8 @@ from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var 14from oeqa.utils.commands import runCmd, bitbake, get_bb_var
13from oeqa.utils.network import get_free_port 15from oeqa.utils.network import get_free_port
14 16
17import bb.utils
18
15class BitbakePrTests(OESelftestTestCase): 19class BitbakePrTests(OESelftestTestCase):
16 20
17 @classmethod 21 @classmethod
@@ -19,6 +23,16 @@ class BitbakePrTests(OESelftestTestCase):
19 super(BitbakePrTests, cls).setUpClass() 23 super(BitbakePrTests, cls).setUpClass()
20 cls.pkgdata_dir = get_bb_var('PKGDATA_DIR') 24 cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
21 25
26 cls.exported_db_path = os.path.join(cls.builddir, 'export.inc')
27 cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
28
29 def cleanup(self):
30 # Ensure any memory resident bitbake is stopped
31 bitbake("-m")
32 # Remove any existing export file or prserv database
33 bb.utils.remove(self.exported_db_path)
34 bb.utils.remove(self.current_db_path + "*")
35
22 def get_pr_version(self, package_name): 36 def get_pr_version(self, package_name):
23 package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name) 37 package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
24 package_data = ftools.read_file(package_data_file) 38 package_data = ftools.read_file(package_data_file)
@@ -40,13 +54,14 @@ class BitbakePrTests(OESelftestTestCase):
40 return str(stamps[0]) 54 return str(stamps[0])
41 55
42 def increment_package_pr(self, package_name): 56 def increment_package_pr(self, package_name):
43 inc_data = "do_package_append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now() 57 inc_data = "do_package:append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now()
44 self.write_recipeinc(package_name, inc_data) 58 self.write_recipeinc(package_name, inc_data)
45 res = bitbake(package_name, ignore_status=True) 59 res = bitbake(package_name, ignore_status=True)
46 self.delete_recipeinc(package_name) 60 self.delete_recipeinc(package_name)
47 self.assertEqual(res.status, 0, msg=res.output) 61 self.assertEqual(res.status, 0, msg=res.output)
48 62
49 def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'): 63 def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
64 self.cleanup()
50 config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type 65 config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
51 self.write_config(config_package_data) 66 self.write_config(config_package_data)
52 config_server_data = 'PRSERV_HOST = "%s"' % pr_socket 67 config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
@@ -66,24 +81,24 @@ class BitbakePrTests(OESelftestTestCase):
66 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) 81 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
67 self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1) 82 self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
68 83
84 self.cleanup()
85
69 def run_test_pr_export_import(self, package_name, replace_current_db=True): 86 def run_test_pr_export_import(self, package_name, replace_current_db=True):
70 self.config_pr_tests(package_name) 87 self.config_pr_tests(package_name)
71 88
72 self.increment_package_pr(package_name) 89 self.increment_package_pr(package_name)
73 pr_1 = self.get_pr_version(package_name) 90 pr_1 = self.get_pr_version(package_name)
74 91
75 exported_db_path = os.path.join(self.builddir, 'export.inc') 92 export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True)
76 export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
77 self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output) 93 self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
78 self.assertTrue(os.path.exists(exported_db_path)) 94 self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output))
79 95
80 if replace_current_db: 96 if replace_current_db:
81 current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3') 97 self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path)
82 self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path) 98 os.remove(self.current_db_path)
83 os.remove(current_db_path)
84 99
85 import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True) 100 import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True)
86 os.remove(exported_db_path) 101 #os.remove(self.exported_db_path)
87 self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output) 102 self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
88 103
89 self.increment_package_pr(package_name) 104 self.increment_package_pr(package_name)
@@ -91,6 +106,8 @@ class BitbakePrTests(OESelftestTestCase):
91 106
92 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) 107 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
93 108
109 self.cleanup()
110
94 def test_import_export_replace_db(self): 111 def test_import_export_replace_db(self):
95 self.run_test_pr_export_import('m4') 112 self.run_test_pr_export_import('m4')
96 113
diff --git a/meta/lib/oeqa/selftest/cases/pseudo.py b/meta/lib/oeqa/selftest/cases/pseudo.py
index 33593d5ce9..3ef8786022 100644
--- a/meta/lib/oeqa/selftest/cases/pseudo.py
+++ b/meta/lib/oeqa/selftest/cases/pseudo.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 9d56e9e1e3..0bd724c8ee 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -1,7 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import errno
5import os 8import os
6import shutil 9import shutil
7import tempfile 10import tempfile
@@ -25,7 +28,17 @@ def tearDownModule():
25 runCmd('rm -rf %s' % templayerdir) 28 runCmd('rm -rf %s' % templayerdir)
26 29
27 30
28class RecipetoolBase(devtool.DevtoolBase): 31def needTomllib(test):
32 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
33 try:
34 import tomllib
35 except ImportError:
36 try:
37 import tomli
38 except ImportError:
39 test.skipTest('Test requires python 3.11 or above for tomllib module or tomli module')
40
41class RecipetoolBase(devtool.DevtoolTestCase):
29 42
30 def setUpLocal(self): 43 def setUpLocal(self):
31 super(RecipetoolBase, self).setUpLocal() 44 super(RecipetoolBase, self).setUpLocal()
@@ -35,6 +48,8 @@ class RecipetoolBase(devtool.DevtoolBase):
35 self.testfile = os.path.join(self.tempdir, 'testfile') 48 self.testfile = os.path.join(self.tempdir, 'testfile')
36 with open(self.testfile, 'w') as f: 49 with open(self.testfile, 'w') as f:
37 f.write('Test file\n') 50 f.write('Test file\n')
51 config = 'BBMASK += "meta-poky/recipes-core/base-files/base-files_%.bbappend"\n'
52 self.append_config(config)
38 53
39 def tearDownLocal(self): 54 def tearDownLocal(self):
40 runCmd('rm -rf %s/recipes-*' % self.templayerdir) 55 runCmd('rm -rf %s/recipes-*' % self.templayerdir)
@@ -68,17 +83,16 @@ class RecipetoolBase(devtool.DevtoolBase):
68 return bbappendfile, result.output 83 return bbappendfile, result.output
69 84
70 85
71class RecipetoolTests(RecipetoolBase): 86class RecipetoolAppendTests(RecipetoolBase):
72 87
73 @classmethod 88 @classmethod
74 def setUpClass(cls): 89 def setUpClass(cls):
75 super(RecipetoolTests, cls).setUpClass() 90 super(RecipetoolAppendTests, cls).setUpClass()
76 # Ensure we have the right data in shlibs/pkgdata 91 # Ensure we have the right data in shlibs/pkgdata
77 cls.logger.info('Running bitbake to generate pkgdata') 92 cls.logger.info('Running bitbake to generate pkgdata')
78 bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile') 93 bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile')
79 bb_vars = get_bb_vars(['COREBASE', 'BBPATH']) 94 bb_vars = get_bb_vars(['COREBASE'])
80 cls.corebase = bb_vars['COREBASE'] 95 cls.corebase = bb_vars['COREBASE']
81 cls.bbpath = bb_vars['BBPATH']
82 96
83 def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles): 97 def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles):
84 cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options) 98 cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options)
@@ -94,7 +108,7 @@ class RecipetoolTests(RecipetoolBase):
94 108
95 def test_recipetool_appendfile_basic(self): 109 def test_recipetool_appendfile_basic(self):
96 # Basic test 110 # Basic test
97 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 111 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
98 '\n'] 112 '\n']
99 _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd']) 113 _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd'])
100 self.assertNotIn('WARNING: ', output) 114 self.assertNotIn('WARNING: ', output)
@@ -106,23 +120,29 @@ class RecipetoolTests(RecipetoolBase):
106 self._try_recipetool_appendfile_fail('/dev/console', self.testfile, ['ERROR: /dev/console cannot be handled by this tool']) 120 self._try_recipetool_appendfile_fail('/dev/console', self.testfile, ['ERROR: /dev/console cannot be handled by this tool'])
107 121
108 def test_recipetool_appendfile_alternatives(self): 122 def test_recipetool_appendfile_alternatives(self):
123 lspath = '/bin/ls'
124 dirname = "base_bindir"
125 if "usrmerge" in get_bb_var('DISTRO_FEATURES'):
126 lspath = '/usr/bin/ls'
127 dirname = "bindir"
128
109 # Now try with a file we know should be an alternative 129 # Now try with a file we know should be an alternative
110 # (this is very much a fake example, but one we know is reliably an alternative) 130 # (this is very much a fake example, but one we know is reliably an alternative)
111 self._try_recipetool_appendfile_fail('/bin/ls', self.testfile, ['ERROR: File /bin/ls is an alternative possibly provided by the following recipes:', 'coreutils', 'busybox']) 131 self._try_recipetool_appendfile_fail(lspath, self.testfile, ['ERROR: File %s is an alternative possibly provided by the following recipes:' % lspath, 'coreutils', 'busybox'])
112 # Need a test file - should be executable 132 # Need a test file - should be executable
113 testfile2 = os.path.join(self.corebase, 'oe-init-build-env') 133 testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
114 testfile2name = os.path.basename(testfile2) 134 testfile2name = os.path.basename(testfile2)
115 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 135 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
116 '\n', 136 '\n',
117 'SRC_URI += "file://%s"\n' % testfile2name, 137 'SRC_URI += "file://%s"\n' % testfile2name,
118 '\n', 138 '\n',
119 'do_install_append() {\n', 139 'do_install:append() {\n',
120 ' install -d ${D}${base_bindir}\n', 140 ' install -d ${D}${%s}\n' % dirname,
121 ' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name, 141 ' install -m 0755 ${UNPACKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname),
122 '}\n'] 142 '}\n']
123 self._try_recipetool_appendfile('coreutils', '/bin/ls', testfile2, '-r coreutils', expectedlines, [testfile2name]) 143 self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name])
124 # Now try bbappending the same file again, contents should not change 144 # Now try bbappending the same file again, contents should not change
125 bbappendfile, _ = self._try_recipetool_appendfile('coreutils', '/bin/ls', self.testfile, '-r coreutils', expectedlines, [testfile2name]) 145 bbappendfile, _ = self._try_recipetool_appendfile('coreutils', lspath, self.testfile, '-r coreutils', expectedlines, [testfile2name])
126 # But file should have 146 # But file should have
127 copiedfile = os.path.join(os.path.dirname(bbappendfile), 'coreutils', testfile2name) 147 copiedfile = os.path.join(os.path.dirname(bbappendfile), 'coreutils', testfile2name)
128 result = runCmd('diff -q %s %s' % (testfile2, copiedfile), ignore_status=True) 148 result = runCmd('diff -q %s %s' % (testfile2, copiedfile), ignore_status=True)
@@ -138,117 +158,117 @@ class RecipetoolTests(RecipetoolBase):
138 158
139 def test_recipetool_appendfile_add(self): 159 def test_recipetool_appendfile_add(self):
140 # Try arbitrary file add to a recipe 160 # Try arbitrary file add to a recipe
141 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 161 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
142 '\n', 162 '\n',
143 'SRC_URI += "file://testfile"\n', 163 'SRC_URI += "file://testfile"\n',
144 '\n', 164 '\n',
145 'do_install_append() {\n', 165 'do_install:append() {\n',
146 ' install -d ${D}${datadir}\n', 166 ' install -d ${D}${datadir}\n',
147 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 167 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
148 '}\n'] 168 '}\n']
149 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile']) 169 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile'])
150 # Try adding another file, this time where the source file is executable 170 # Try adding another file, this time where the source file is executable
151 # (so we're testing that, plus modifying an existing bbappend) 171 # (so we're testing that, plus modifying an existing bbappend)
152 testfile2 = os.path.join(self.corebase, 'oe-init-build-env') 172 testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
153 testfile2name = os.path.basename(testfile2) 173 testfile2name = os.path.basename(testfile2)
154 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 174 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
155 '\n', 175 '\n',
156 'SRC_URI += "file://testfile \\\n', 176 'SRC_URI += "file://testfile \\\n',
157 ' file://%s \\\n' % testfile2name, 177 ' file://%s \\\n' % testfile2name,
158 ' "\n', 178 ' "\n',
159 '\n', 179 '\n',
160 'do_install_append() {\n', 180 'do_install:append() {\n',
161 ' install -d ${D}${datadir}\n', 181 ' install -d ${D}${datadir}\n',
162 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 182 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
163 ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, 183 ' install -m 0755 ${UNPACKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
164 '}\n'] 184 '}\n']
165 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name]) 185 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name])
166 186
167 def test_recipetool_appendfile_add_bindir(self): 187 def test_recipetool_appendfile_add_bindir(self):
168 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable 188 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
169 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 189 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
170 '\n', 190 '\n',
171 'SRC_URI += "file://testfile"\n', 191 'SRC_URI += "file://testfile"\n',
172 '\n', 192 '\n',
173 'do_install_append() {\n', 193 'do_install:append() {\n',
174 ' install -d ${D}${bindir}\n', 194 ' install -d ${D}${bindir}\n',
175 ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', 195 ' install -m 0755 ${UNPACKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
176 '}\n'] 196 '}\n']
177 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile']) 197 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile'])
178 self.assertNotIn('WARNING: ', output) 198 self.assertNotIn('WARNING: ', output)
179 199
180 def test_recipetool_appendfile_add_machine(self): 200 def test_recipetool_appendfile_add_machine(self):
181 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable 201 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
182 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 202 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
183 '\n', 203 '\n',
184 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n', 204 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
185 '\n', 205 '\n',
186 'SRC_URI_append_mymachine = " file://testfile"\n', 206 'SRC_URI:append:mymachine = " file://testfile"\n',
187 '\n', 207 '\n',
188 'do_install_append_mymachine() {\n', 208 'do_install:append:mymachine() {\n',
189 ' install -d ${D}${datadir}\n', 209 ' install -d ${D}${datadir}\n',
190 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 210 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
191 '}\n'] 211 '}\n']
192 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile']) 212 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile'])
193 self.assertNotIn('WARNING: ', output) 213 self.assertNotIn('WARNING: ', output)
194 214
195 def test_recipetool_appendfile_orig(self): 215 def test_recipetool_appendfile_orig(self):
196 # A file that's in SRC_URI and in do_install with the same name 216 # A file that's in SRC_URI and in do_install with the same name
197 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 217 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
198 '\n'] 218 '\n']
199 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig']) 219 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig'])
200 self.assertNotIn('WARNING: ', output) 220 self.assertNotIn('WARNING: ', output)
201 221
202 def test_recipetool_appendfile_todir(self): 222 def test_recipetool_appendfile_todir(self):
203 # A file that's in SRC_URI and in do_install with destination directory rather than file 223 # A file that's in SRC_URI and in do_install with destination directory rather than file
204 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 224 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
205 '\n'] 225 '\n']
206 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir']) 226 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir'])
207 self.assertNotIn('WARNING: ', output) 227 self.assertNotIn('WARNING: ', output)
208 228
209 def test_recipetool_appendfile_renamed(self): 229 def test_recipetool_appendfile_renamed(self):
210 # A file that's in SRC_URI with a different name to the destination file 230 # A file that's in SRC_URI with a different name to the destination file
211 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 231 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
212 '\n'] 232 '\n']
213 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1']) 233 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1'])
214 self.assertNotIn('WARNING: ', output) 234 self.assertNotIn('WARNING: ', output)
215 235
216 def test_recipetool_appendfile_subdir(self): 236 def test_recipetool_appendfile_subdir(self):
217 # A file that's in SRC_URI in a subdir 237 # A file that's in SRC_URI in a subdir
218 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 238 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
219 '\n', 239 '\n',
220 'SRC_URI += "file://testfile"\n', 240 'SRC_URI += "file://testfile"\n',
221 '\n', 241 '\n',
222 'do_install_append() {\n', 242 'do_install:append() {\n',
223 ' install -d ${D}${datadir}\n', 243 ' install -d ${D}${datadir}\n',
224 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', 244 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
225 '}\n'] 245 '}\n']
226 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile']) 246 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile'])
227 self.assertNotIn('WARNING: ', output) 247 self.assertNotIn('WARNING: ', output)
228 248
229 def test_recipetool_appendfile_inst_glob(self): 249 def test_recipetool_appendfile_inst_glob(self):
230 # A file that's in do_install as a glob 250 # A file that's in do_install as a glob
231 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 251 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
232 '\n'] 252 '\n']
233 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile']) 253 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile'])
234 self.assertNotIn('WARNING: ', output) 254 self.assertNotIn('WARNING: ', output)
235 255
236 def test_recipetool_appendfile_inst_todir_glob(self): 256 def test_recipetool_appendfile_inst_todir_glob(self):
237 # A file that's in do_install as a glob with destination as a directory 257 # A file that's in do_install as a glob with destination as a directory
238 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 258 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
239 '\n'] 259 '\n']
240 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile']) 260 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile'])
241 self.assertNotIn('WARNING: ', output) 261 self.assertNotIn('WARNING: ', output)
242 262
243 def test_recipetool_appendfile_patch(self): 263 def test_recipetool_appendfile_patch(self):
244 # A file that's added by a patch in SRC_URI 264 # A file that's added by a patch in SRC_URI
245 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 265 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
246 '\n', 266 '\n',
247 'SRC_URI += "file://testfile"\n', 267 'SRC_URI += "file://testfile"\n',
248 '\n', 268 '\n',
249 'do_install_append() {\n', 269 'do_install:append() {\n',
250 ' install -d ${D}${sysconfdir}\n', 270 ' install -d ${D}${sysconfdir}\n',
251 ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', 271 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
252 '}\n'] 272 '}\n']
253 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile']) 273 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile'])
254 for line in output.splitlines(): 274 for line in output.splitlines():
@@ -260,20 +280,20 @@ class RecipetoolTests(RecipetoolBase):
260 280
261 def test_recipetool_appendfile_script(self): 281 def test_recipetool_appendfile_script(self):
262 # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install) 282 # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install)
263 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 283 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
264 '\n', 284 '\n',
265 'SRC_URI += "file://testfile"\n', 285 'SRC_URI += "file://testfile"\n',
266 '\n', 286 '\n',
267 'do_install_append() {\n', 287 'do_install:append() {\n',
268 ' install -d ${D}${datadir}\n', 288 ' install -d ${D}${datadir}\n',
269 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', 289 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
270 '}\n'] 290 '}\n']
271 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile']) 291 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile'])
272 self.assertNotIn('WARNING: ', output) 292 self.assertNotIn('WARNING: ', output)
273 293
274 def test_recipetool_appendfile_inst_func(self): 294 def test_recipetool_appendfile_inst_func(self):
275 # A file that's installed from a function called by do_install 295 # A file that's installed from a function called by do_install
276 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 296 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
277 '\n'] 297 '\n']
278 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func']) 298 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func'])
279 self.assertNotIn('WARNING: ', output) 299 self.assertNotIn('WARNING: ', output)
@@ -283,13 +303,13 @@ class RecipetoolTests(RecipetoolBase):
283 # First try without specifying recipe 303 # First try without specifying recipe
284 self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile']) 304 self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile'])
285 # Now specify recipe 305 # Now specify recipe
286 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 306 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
287 '\n', 307 '\n',
288 'SRC_URI += "file://testfile"\n', 308 'SRC_URI += "file://testfile"\n',
289 '\n', 309 '\n',
290 'do_install_append() {\n', 310 'do_install:append() {\n',
291 ' install -d ${D}${datadir}\n', 311 ' install -d ${D}${datadir}\n',
292 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', 312 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
293 '}\n'] 313 '}\n']
294 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile']) 314 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile'])
295 315
@@ -332,6 +352,9 @@ class RecipetoolTests(RecipetoolBase):
332 filename = try_appendfile_wc('-w') 352 filename = try_appendfile_wc('-w')
333 self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend') 353 self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend')
334 354
355
356class RecipetoolCreateTests(RecipetoolBase):
357
335 def test_recipetool_create(self): 358 def test_recipetool_create(self):
336 # Try adding a recipe 359 # Try adding a recipe
337 tempsrc = os.path.join(self.tempdir, 'srctree') 360 tempsrc = os.path.join(self.tempdir, 'srctree')
@@ -341,14 +364,13 @@ class RecipetoolTests(RecipetoolBase):
341 result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc)) 364 result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
342 self.assertTrue(os.path.isfile(recipefile)) 365 self.assertTrue(os.path.isfile(recipefile))
343 checkvars = {} 366 checkvars = {}
344 checkvars['LICENSE'] = 'GPLv2' 367 checkvars['LICENSE'] = 'GPL-2.0-only'
345 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' 368 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
346 checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz' 369 checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz'
347 checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288'
348 checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07' 370 checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07'
349 self._test_recipe_contents(recipefile, checkvars, []) 371 self._test_recipe_contents(recipefile, checkvars, [])
350 372
351 def test_recipetool_create_git(self): 373 def test_recipetool_create_autotools(self):
352 if 'x11' not in get_bb_var('DISTRO_FEATURES'): 374 if 'x11' not in get_bb_var('DISTRO_FEATURES'):
353 self.skipTest('Test requires x11 as distro feature') 375 self.skipTest('Test requires x11 as distro feature')
354 # Ensure we have the right data in shlibs/pkgdata 376 # Ensure we have the right data in shlibs/pkgdata
@@ -357,15 +379,15 @@ class RecipetoolTests(RecipetoolBase):
357 tempsrc = os.path.join(self.tempdir, 'srctree') 379 tempsrc = os.path.join(self.tempdir, 'srctree')
358 os.makedirs(tempsrc) 380 os.makedirs(tempsrc)
359 recipefile = os.path.join(self.tempdir, 'libmatchbox.bb') 381 recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
360 srcuri = 'git://git.yoctoproject.org/libmatchbox' 382 srcuri = 'git://git.yoctoproject.org/libmatchbox;protocol=https'
361 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc]) 383 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
362 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) 384 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
363 checkvars = {} 385 checkvars = {}
364 checkvars['LICENSE'] = 'LGPLv2.1' 386 checkvars['LICENSE'] = 'LGPL-2.1-only'
365 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34' 387 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
366 checkvars['S'] = '${WORKDIR}/git' 388 checkvars['S'] = None
367 checkvars['PV'] = '1.11+git${SRCPV}' 389 checkvars['PV'] = '1.11+git'
368 checkvars['SRC_URI'] = srcuri 390 checkvars['SRC_URI'] = srcuri + ';branch=master'
369 checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango']) 391 checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango'])
370 inherits = ['autotools', 'pkgconfig'] 392 inherits = ['autotools', 'pkgconfig']
371 self._test_recipe_contents(recipefile, checkvars, inherits) 393 self._test_recipe_contents(recipefile, checkvars, inherits)
@@ -374,8 +396,8 @@ class RecipetoolTests(RecipetoolBase):
374 # Try adding a recipe 396 # Try adding a recipe
375 temprecipe = os.path.join(self.tempdir, 'recipe') 397 temprecipe = os.path.join(self.tempdir, 'recipe')
376 os.makedirs(temprecipe) 398 os.makedirs(temprecipe)
377 pv = '1.7.3.0' 399 pv = '1.7.4.1'
378 srcuri = 'http://www.dest-unreach.org/socat/download/socat-%s.tar.bz2' % pv 400 srcuri = 'http://www.dest-unreach.org/socat/download/Archive/socat-%s.tar.bz2' % pv
379 result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe)) 401 result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe))
380 dirlist = os.listdir(temprecipe) 402 dirlist = os.listdir(temprecipe)
381 if len(dirlist) > 1: 403 if len(dirlist) > 1:
@@ -384,7 +406,7 @@ class RecipetoolTests(RecipetoolBase):
384 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist))) 406 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
385 self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named') 407 self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named')
386 checkvars = {} 408 checkvars = {}
387 checkvars['LICENSE'] = set(['Unknown', 'GPLv2']) 409 checkvars['LICENSE'] = set(['Unknown', 'GPL-2.0-only'])
388 checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263']) 410 checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'])
389 # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot 411 # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot
390 checkvars['S'] = None 412 checkvars['S'] = None
@@ -400,9 +422,8 @@ class RecipetoolTests(RecipetoolBase):
400 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 422 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
401 self.assertTrue(os.path.isfile(recipefile)) 423 self.assertTrue(os.path.isfile(recipefile))
402 checkvars = {} 424 checkvars = {}
403 checkvars['LICENSE'] = set(['LGPLv2.1', 'MPL-1.1']) 425 checkvars['LICENSE'] = set(['LGPL-2.1-only', 'MPL-1.1-only'])
404 checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz' 426 checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz'
405 checkvars['SRC_URI[md5sum]'] = 'cee7be0ccfc892fa433d6c837df9522a'
406 checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b' 427 checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b'
407 checkvars['DEPENDS'] = set(['boost', 'zlib']) 428 checkvars['DEPENDS'] = set(['boost', 'zlib'])
408 inherits = ['cmake'] 429 inherits = ['cmake']
@@ -424,77 +445,271 @@ class RecipetoolTests(RecipetoolBase):
424 checkvars = {} 445 checkvars = {}
425 checkvars['SUMMARY'] = 'Node Server Example' 446 checkvars['SUMMARY'] = 'Node Server Example'
426 checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme' 447 checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme'
427 checkvars['LICENSE'] = set(['MIT', 'ISC', 'Unknown']) 448 checkvars['LICENSE'] = 'BSD-3-Clause & ISC & MIT & Unknown'
428 urls = [] 449 urls = []
429 urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}') 450 urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}')
430 urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json') 451 urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json')
431 checkvars['SRC_URI'] = set(urls) 452 checkvars['SRC_URI'] = set(urls)
432 checkvars['S'] = '${WORKDIR}/npm' 453 checkvars['S'] = '${WORKDIR}/npm'
433 checkvars['LICENSE_${PN}'] = 'MIT' 454 checkvars['LICENSE:${PN}'] = 'MIT'
434 checkvars['LICENSE_${PN}-base64'] = 'Unknown' 455 checkvars['LICENSE:${PN}-base64'] = 'Unknown'
435 checkvars['LICENSE_${PN}-accepts'] = 'MIT' 456 checkvars['LICENSE:${PN}-accepts'] = 'MIT'
436 checkvars['LICENSE_${PN}-inherits'] = 'ISC' 457 checkvars['LICENSE:${PN}-inherits'] = 'ISC'
437 inherits = ['npm'] 458 inherits = ['npm']
438 self._test_recipe_contents(recipefile, checkvars, inherits) 459 self._test_recipe_contents(recipefile, checkvars, inherits)
439 460
440 def test_recipetool_create_github(self): 461 def test_recipetool_create_github(self):
441 # Basic test to see if github URL mangling works 462 # Basic test to see if github URL mangling works. Deliberately use an
463 # older release of Meson at present so we don't need a toml parser.
442 temprecipe = os.path.join(self.tempdir, 'recipe') 464 temprecipe = os.path.join(self.tempdir, 'recipe')
443 os.makedirs(temprecipe) 465 os.makedirs(temprecipe)
444 recipefile = os.path.join(temprecipe, 'meson_git.bb') 466 recipefile = os.path.join(temprecipe, 'python3-meson_git.bb')
445 srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0' 467 srcuri = 'https://github.com/mesonbuild/meson;rev=0.52.1'
446 result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri]) 468 cmd = ['recipetool', 'create', '-o', temprecipe, srcuri]
447 self.assertTrue(os.path.isfile(recipefile)) 469 result = runCmd(cmd)
470 self.assertTrue(os.path.isfile(recipefile), msg="recipe %s not created for command %s, output %s" % (recipefile, " ".join(cmd), result.output))
448 checkvars = {} 471 checkvars = {}
449 checkvars['LICENSE'] = set(['Apache-2.0']) 472 checkvars['LICENSE'] = set(['Apache-2.0', "Unknown"])
450 checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https' 473 checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=0.52'
451 inherits = ['setuptools3'] 474 inherits = ['setuptools3']
452 self._test_recipe_contents(recipefile, checkvars, inherits) 475 self._test_recipe_contents(recipefile, checkvars, inherits)
453 476
454 def test_recipetool_create_python3_setuptools(self): 477 def test_recipetool_create_python3_setuptools(self):
455 # Test creating python3 package from tarball (using setuptools3 class) 478 # Test creating python3 package from tarball (using setuptools3 class)
479 # Use the --no-pypi switch to avoid creating a pypi enabled recipe and
480 # and check the created recipe as if it was a more general tarball
456 temprecipe = os.path.join(self.tempdir, 'recipe') 481 temprecipe = os.path.join(self.tempdir, 'recipe')
457 os.makedirs(temprecipe) 482 os.makedirs(temprecipe)
458 pn = 'python-magic' 483 pn = 'python-magic'
459 pv = '0.4.15' 484 pv = '0.4.15'
460 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) 485 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
461 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv 486 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
462 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 487 result = runCmd('recipetool create --no-pypi -o %s %s' % (temprecipe, srcuri))
463 self.assertTrue(os.path.isfile(recipefile)) 488 self.assertTrue(os.path.isfile(recipefile))
464 checkvars = {} 489 checkvars = {}
465 checkvars['LICENSE'] = set(['MIT']) 490 checkvars['LICENSE'] = set(['MIT'])
466 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' 491 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
467 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz' 492 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz'
468 checkvars['SRC_URI[md5sum]'] = 'e384c95a47218f66c6501cd6dd45ff59'
469 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' 493 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
470 inherits = ['setuptools3'] 494 inherits = ['setuptools3']
471 self._test_recipe_contents(recipefile, checkvars, inherits) 495 self._test_recipe_contents(recipefile, checkvars, inherits)
472 496
473 def test_recipetool_create_python3_distutils(self): 497 def test_recipetool_create_python3_setuptools_pypi_tarball(self):
474 # Test creating python3 package from tarball (using distutils3 class) 498 # Test creating python3 package from tarball (using setuptools3 and pypi classes)
499 temprecipe = os.path.join(self.tempdir, 'recipe')
500 os.makedirs(temprecipe)
501 pn = 'python-magic'
502 pv = '0.4.15'
503 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
504 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
505 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
506 self.assertTrue(os.path.isfile(recipefile))
507 checkvars = {}
508 checkvars['LICENSE'] = set(['MIT'])
509 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
510 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
511 checkvars['PYPI_PACKAGE'] = pn
512 inherits = ['setuptools3', 'pypi']
513 self._test_recipe_contents(recipefile, checkvars, inherits)
514
515 def test_recipetool_create_python3_setuptools_pypi(self):
516 # Test creating python3 package from pypi url (using setuptools3 and pypi classes)
517 # Intentionnaly using setuptools3 class here instead of any of the pep517 class
518 # to avoid the toml dependency and allows this test to run on host autobuilders
519 # with older version of python
475 temprecipe = os.path.join(self.tempdir, 'recipe') 520 temprecipe = os.path.join(self.tempdir, 'recipe')
476 os.makedirs(temprecipe) 521 os.makedirs(temprecipe)
477 pn = 'docutils' 522 pn = 'python-magic'
478 pv = '0.14' 523 pv = '0.4.15'
479 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) 524 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
480 srcuri = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-%s.tar.gz' % pv 525 # First specify the required version in the url
526 srcuri = 'https://pypi.org/project/%s/%s' % (pn, pv)
527 runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
528 self.assertTrue(os.path.isfile(recipefile))
529 checkvars = {}
530 checkvars['LICENSE'] = set(['MIT'])
531 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
532 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
533 checkvars['PYPI_PACKAGE'] = pn
534 inherits = ['setuptools3', "pypi"]
535 self._test_recipe_contents(recipefile, checkvars, inherits)
536
537 # Now specify the version as a recipetool parameter
538 runCmd('rm -rf %s' % recipefile)
539 self.assertFalse(os.path.isfile(recipefile))
540 srcuri = 'https://pypi.org/project/%s' % pn
541 runCmd('recipetool create -o %s %s --version %s' % (temprecipe, srcuri, pv))
542 self.assertTrue(os.path.isfile(recipefile))
543 checkvars = {}
544 checkvars['LICENSE'] = set(['MIT'])
545 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
546 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
547 checkvars['PYPI_PACKAGE'] = pn
548 inherits = ['setuptools3', "pypi"]
549 self._test_recipe_contents(recipefile, checkvars, inherits)
550
551 # Now, try to grab latest version of the package, so we cannot guess the name of the recipe,
552 # unless hardcoding the latest version but it means we will need to update the test for each release,
553 # so use a regexp
554 runCmd('rm -rf %s' % recipefile)
555 self.assertFalse(os.path.isfile(recipefile))
556 recipefile_re = r'%s_(.*)\.bb' % pn
557 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
558 dirlist = os.listdir(temprecipe)
559 if len(dirlist) > 1:
560 self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
561 if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])):
562 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
563 import re
564 match = re.match(recipefile_re, dirlist[0])
565 self.assertTrue(match)
566 latest_pv = match.group(1)
567 self.assertTrue(latest_pv != pv)
568 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, latest_pv))
569 # Do not check LIC_FILES_CHKSUM and SRC_URI checksum here to avoid having updating the test on each release
570 checkvars = {}
571 checkvars['LICENSE'] = set(['MIT'])
572 checkvars['PYPI_PACKAGE'] = pn
573 inherits = ['setuptools3', "pypi"]
574 self._test_recipe_contents(recipefile, checkvars, inherits)
575
576 def test_recipetool_create_python3_pep517_setuptools_build_meta(self):
577 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
578 needTomllib(self)
579
580 # Test creating python3 package from tarball (using setuptools.build_meta class)
581 temprecipe = os.path.join(self.tempdir, 'recipe')
582 os.makedirs(temprecipe)
583 pn = 'webcolors'
584 pv = '1.13'
585 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
586 srcuri = 'https://files.pythonhosted.org/packages/a1/fb/f95560c6a5d4469d9c49e24cf1b5d4d21ffab5608251c6020a965fb7791c/%s-%s.tar.gz' % (pn, pv)
587 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
588 self.assertTrue(os.path.isfile(recipefile))
589 checkvars = {}
590 checkvars['SUMMARY'] = 'A library for working with the color formats defined by HTML and CSS.'
591 checkvars['LICENSE'] = set(['BSD-3-Clause'])
592 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19'
593 checkvars['SRC_URI[sha256sum]'] = 'c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a'
594 inherits = ['python_setuptools_build_meta', 'pypi']
595
596 self._test_recipe_contents(recipefile, checkvars, inherits)
597
598 def test_recipetool_create_python3_pep517_poetry_core_masonry_api(self):
599 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
600 needTomllib(self)
601
602 # Test creating python3 package from tarball (using poetry.core.masonry.api class)
603 temprecipe = os.path.join(self.tempdir, 'recipe')
604 os.makedirs(temprecipe)
605 pn = 'iso8601'
606 pv = '2.1.0'
607 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
608 srcuri = 'https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/%s-%s.tar.gz' % (pn, pv)
609 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
610 self.assertTrue(os.path.isfile(recipefile))
611 checkvars = {}
612 checkvars['SUMMARY'] = 'Simple module to parse ISO 8601 dates'
613 checkvars['LICENSE'] = set(['MIT'])
614 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367'
615 checkvars['SRC_URI[sha256sum]'] = '6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df'
616 inherits = ['python_poetry_core', 'pypi']
617
618 self._test_recipe_contents(recipefile, checkvars, inherits)
619
620 def test_recipetool_create_python3_pep517_flit_core_buildapi(self):
621 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
622 needTomllib(self)
623
624 # Test creating python3 package from tarball (using flit_core.buildapi class)
625 temprecipe = os.path.join(self.tempdir, 'recipe')
626 os.makedirs(temprecipe)
627 pn = 'typing-extensions'
628 pv = '4.8.0'
629 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
630 srcuri = 'https://files.pythonhosted.org/packages/1f/7a/8b94bb016069caa12fc9f587b28080ac33b4fbb8ca369b98bc0a4828543e/typing_extensions-%s.tar.gz' % pv
631 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
632 self.assertTrue(os.path.isfile(recipefile))
633 checkvars = {}
634 checkvars['SUMMARY'] = 'Backported and Experimental Type Hints for Python 3.8+'
635 checkvars['LICENSE'] = set(['PSF-2.0'])
636 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2'
637 checkvars['SRC_URI[sha256sum]'] = 'df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef'
638 inherits = ['python_flit_core', 'pypi']
639
640 self._test_recipe_contents(recipefile, checkvars, inherits)
641
642 def test_recipetool_create_python3_pep517_hatchling(self):
643 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
644 needTomllib(self)
645
646 # Test creating python3 package from tarball (using hatchling class)
647 temprecipe = os.path.join(self.tempdir, 'recipe')
648 os.makedirs(temprecipe)
649 pn = 'jsonschema'
650 pv = '4.19.1'
651 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
652 srcuri = 'https://files.pythonhosted.org/packages/e4/43/087b24516db11722c8687e0caf0f66c7785c0b1c51b0ab951dfde924e3f5/jsonschema-%s.tar.gz' % pv
653 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
654 self.assertTrue(os.path.isfile(recipefile))
655 checkvars = {}
656 checkvars['SUMMARY'] = 'An implementation of JSON Schema validation for Python'
657 checkvars['HOMEPAGE'] = 'https://github.com/python-jsonschema/jsonschema'
658 checkvars['LICENSE'] = set(['MIT'])
659 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af'
660 checkvars['SRC_URI[sha256sum]'] = 'ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf'
661 inherits = ['python_hatchling', 'pypi']
662
663 self._test_recipe_contents(recipefile, checkvars, inherits)
664
665 def test_recipetool_create_python3_pep517_maturin(self):
666 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
667 needTomllib(self)
668
669 # Test creating python3 package from tarball (using maturin class)
670 temprecipe = os.path.join(self.tempdir, 'recipe')
671 os.makedirs(temprecipe)
672 pn = 'pydantic-core'
673 pv = '2.14.5'
674 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
675 srcuri = 'https://files.pythonhosted.org/packages/64/26/cffb93fe9c6b5a91c497f37fae14a4b073ecbc47fc36a9979c7aa888b245/pydantic_core-%s.tar.gz' % pv
676 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
677 self.assertTrue(os.path.isfile(recipefile))
678 checkvars = {}
679 checkvars['HOMEPAGE'] = 'https://github.com/pydantic/pydantic-core'
680 checkvars['LICENSE'] = set(['MIT'])
681 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=ab599c188b4a314d2856b3a55030c75c'
682 checkvars['SRC_URI[sha256sum]'] = '6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71'
683 inherits = ['python_maturin', 'pypi']
684
685 self._test_recipe_contents(recipefile, checkvars, inherits)
686
687 def test_recipetool_create_python3_pep517_mesonpy(self):
688 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
689 needTomllib(self)
690
691 # Test creating python3 package from tarball (using mesonpy class)
692 temprecipe = os.path.join(self.tempdir, 'recipe')
693 os.makedirs(temprecipe)
694 pn = 'siphash24'
695 pv = '1.4'
696 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
697 srcuri = 'https://files.pythonhosted.org/packages/c2/32/b934a70592f314afcfa86c7f7e388804a8061be65b822e2aa07e573b6477/%s-%s.tar.gz' % (pn, pv)
481 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 698 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
482 self.assertTrue(os.path.isfile(recipefile)) 699 self.assertTrue(os.path.isfile(recipefile))
483 checkvars = {} 700 checkvars = {}
484 checkvars['LICENSE'] = set(['PSF', '&', 'BSD', 'GPL']) 701 checkvars['SRC_URI[sha256sum]'] = '7fd65e39b2a7c8c4ddc3a168a687f4610751b0ac2ebb518783c0cdfc30bec4a0'
485 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING.txt;md5=35a23d42b615470583563132872c97d6' 702 inherits = ['python_mesonpy', 'pypi']
486 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-${PV}.tar.gz' 703
487 checkvars['SRC_URI[md5sum]'] = 'c53768d63db3873b7d452833553469de'
488 checkvars['SRC_URI[sha256sum]'] = '51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274'
489 inherits = ['distutils3']
490 self._test_recipe_contents(recipefile, checkvars, inherits) 704 self._test_recipe_contents(recipefile, checkvars, inherits)
491 705
492 def test_recipetool_create_github_tarball(self): 706 def test_recipetool_create_github_tarball(self):
493 # Basic test to ensure github URL mangling doesn't apply to release tarballs 707 # Basic test to ensure github URL mangling doesn't apply to release tarballs.
708 # Deliberately use an older release of Meson at present so we don't need a toml parser.
494 temprecipe = os.path.join(self.tempdir, 'recipe') 709 temprecipe = os.path.join(self.tempdir, 'recipe')
495 os.makedirs(temprecipe) 710 os.makedirs(temprecipe)
496 pv = '0.32.0' 711 pv = '0.52.1'
497 recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv) 712 recipefile = os.path.join(temprecipe, 'python3-meson_%s.bb' % pv)
498 srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv) 713 srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv)
499 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 714 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
500 self.assertTrue(os.path.isfile(recipefile)) 715 self.assertTrue(os.path.isfile(recipefile))
@@ -504,19 +719,93 @@ class RecipetoolTests(RecipetoolBase):
504 inherits = ['setuptools3'] 719 inherits = ['setuptools3']
505 self._test_recipe_contents(recipefile, checkvars, inherits) 720 self._test_recipe_contents(recipefile, checkvars, inherits)
506 721
507 def test_recipetool_create_git_http(self): 722 def _test_recipetool_create_git(self, srcuri, branch=None):
508 # Basic test to check http git URL mangling works 723 # Basic test to check http git URL mangling works
509 temprecipe = os.path.join(self.tempdir, 'recipe') 724 temprecipe = os.path.join(self.tempdir, 'recipe')
510 os.makedirs(temprecipe) 725 os.makedirs(temprecipe)
511 recipefile = os.path.join(temprecipe, 'matchbox-terminal_git.bb') 726 name = srcuri.split(';')[0].split('/')[-1]
512 srcuri = 'http://git.yoctoproject.org/git/matchbox-terminal' 727 recipefile = os.path.join(temprecipe, name + '_git.bb')
513 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 728 options = ' -B %s' % branch if branch else ''
729 result = runCmd('recipetool create -o %s%s "%s"' % (temprecipe, options, srcuri))
514 self.assertTrue(os.path.isfile(recipefile)) 730 self.assertTrue(os.path.isfile(recipefile))
515 checkvars = {} 731 checkvars = {}
516 checkvars['LICENSE'] = set(['GPLv2']) 732 checkvars['SRC_URI'] = srcuri
517 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http' 733 for scheme in ['http', 'https']:
518 inherits = ['pkgconfig', 'autotools'] 734 if srcuri.startswith(scheme + ":"):
735 checkvars['SRC_URI'] = 'git%s;protocol=%s' % (srcuri[len(scheme):], scheme)
736 if ';branch=' not in srcuri:
737 checkvars['SRC_URI'] += ';branch=' + (branch or 'master')
738 self._test_recipe_contents(recipefile, checkvars, [])
739
740 def test_recipetool_create_git_http(self):
741 self._test_recipetool_create_git('http://git.yoctoproject.org/git/matchbox-keyboard')
742
743 def test_recipetool_create_git_srcuri_master(self):
744 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master;protocol=https')
745
746 def test_recipetool_create_git_srcuri_branch(self):
747 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1;protocol=https')
748
749 def test_recipetool_create_git_srcbranch(self):
750 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;protocol=https', 'matchbox-keyboard-0-1')
751
752 def _go_urifiy(self, url, version, modulepath = None, pathmajor = None, subdir = None):
753 modulepath = ",path='%s'" % modulepath if len(modulepath) else ''
754 pathmajor = ",pathmajor='%s'" % pathmajor if len(pathmajor) else ''
755 subdir = ",subdir='%s'" % subdir if len(subdir) else ''
756 return "${@go_src_uri('%s','%s'%s%s%s)}" % (url, version, modulepath, pathmajor, subdir)
757
758 def test_recipetool_create_go(self):
759 # Basic test to check go recipe generation
760 self.maxDiff = None
761
762 temprecipe = os.path.join(self.tempdir, 'recipe')
763 os.makedirs(temprecipe)
764
765 recipefile = os.path.join(temprecipe, 'recipetool-go-test_git.bb')
766
767 srcuri = 'https://git.yoctoproject.org/recipetool-go-test.git'
768 srcrev = "c3e213c01b6c1406b430df03ef0d1ae77de5d2f7"
769 srcbranch = "main"
770
771 result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch))
772
773 inherits = ['go-mod', 'go-mod-update-modules']
774
775 checkvars = {}
776 checkvars['GO_IMPORT'] = "git.yoctoproject.org/recipetool-go-test"
777 checkvars['SRC_URI'] = {'git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'}
778 checkvars['LIC_FILES_CHKSUM'] = {
779 'file://src/${GO_IMPORT}/LICENSE;md5=4e3933dd47afbf115e484d11385fb3bd',
780 'file://src/${GO_IMPORT}/is/LICENSE;md5=62beaee5a116dd1e80161667b1df39ab'
781 }
782
519 self._test_recipe_contents(recipefile, checkvars, inherits) 783 self._test_recipe_contents(recipefile, checkvars, inherits)
784 self.assertNotIn('Traceback', result.output)
785
786 lics_require_file = os.path.join(temprecipe, 'recipetool-go-test-licenses.inc')
787 self.assertFileExists(lics_require_file)
788 checkvars = {}
789 checkvars['LIC_FILES_CHKSUM'] = {'file://pkg/mod/github.com/godbus/dbus/v5@v5.1.0/LICENSE;md5=09042bd5c6c96a2b9e45ddf1bc517eed;spdx=BSD-2-Clause'}
790 self._test_recipe_contents(lics_require_file, checkvars, [])
791
792 deps_require_file = os.path.join(temprecipe, 'recipetool-go-test-go-mods.inc')
793 self.assertFileExists(deps_require_file)
794 checkvars = {}
795 checkvars['SRC_URI'] = {'gomod://github.com/godbus/dbus/v5;version=v5.1.0;sha256sum=03dfa8e71089a6f477310d15c4d3a036d82d028532881b50fee254358e782ad9'}
796 self._test_recipe_contents(deps_require_file, checkvars, [])
797
798class RecipetoolTests(RecipetoolBase):
799
800 @classmethod
801 def setUpClass(cls):
802 import sys
803
804 super(RecipetoolTests, cls).setUpClass()
805 bb_vars = get_bb_vars(['BBPATH'])
806 cls.bbpath = bb_vars['BBPATH']
807 libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'recipetool')
808 sys.path.insert(0, libpath)
520 809
521 def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths): 810 def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths):
522 dstdir = basedstdir 811 dstdir = basedstdir
@@ -524,7 +813,15 @@ class RecipetoolTests(RecipetoolBase):
524 for p in paths: 813 for p in paths:
525 dstdir = os.path.join(dstdir, p) 814 dstdir = os.path.join(dstdir, p)
526 if not os.path.exists(dstdir): 815 if not os.path.exists(dstdir):
527 os.makedirs(dstdir) 816 try:
817 os.makedirs(dstdir)
818 except PermissionError:
819 return False
820 except OSError as e:
821 if e.errno == errno.EROFS:
822 return False
823 else:
824 raise e
528 if p == "lib": 825 if p == "lib":
529 # Can race with other tests 826 # Can race with other tests
530 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) 827 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -532,8 +829,12 @@ class RecipetoolTests(RecipetoolBase):
532 self.track_for_cleanup(dstdir) 829 self.track_for_cleanup(dstdir)
533 dstfile = os.path.join(dstdir, os.path.basename(srcfile)) 830 dstfile = os.path.join(dstdir, os.path.basename(srcfile))
534 if srcfile != dstfile: 831 if srcfile != dstfile:
535 shutil.copy(srcfile, dstfile) 832 try:
833 shutil.copy(srcfile, dstfile)
834 except PermissionError:
835 return False
536 self.track_for_cleanup(dstfile) 836 self.track_for_cleanup(dstfile)
837 return True
537 838
538 def test_recipetool_load_plugin(self): 839 def test_recipetool_load_plugin(self):
539 """Test that recipetool loads only the first found plugin in BBPATH.""" 840 """Test that recipetool loads only the first found plugin in BBPATH."""
@@ -547,20 +848,148 @@ class RecipetoolTests(RecipetoolBase):
547 plugincontent = fh.readlines() 848 plugincontent = fh.readlines()
548 try: 849 try:
549 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') 850 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
550 for path in searchpath: 851 searchpath = [
551 self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool') 852 path for path in searchpath
853 if self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
854 ]
552 result = runCmd("recipetool --quiet count") 855 result = runCmd("recipetool --quiet count")
553 self.assertEqual(result.output, '1') 856 self.assertEqual(result.output, '1')
554 result = runCmd("recipetool --quiet multiloaded") 857 result = runCmd("recipetool --quiet multiloaded")
555 self.assertEqual(result.output, "no") 858 self.assertEqual(result.output, "no")
556 for path in searchpath: 859 for path in searchpath:
557 result = runCmd("recipetool --quiet bbdir") 860 result = runCmd("recipetool --quiet bbdir")
558 self.assertEqual(result.output, path) 861 self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
559 os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py')) 862 os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py'))
560 finally: 863 finally:
561 with open(srcfile, 'w') as fh: 864 with open(srcfile, 'w') as fh:
562 fh.writelines(plugincontent) 865 fh.writelines(plugincontent)
563 866
867 def test_recipetool_handle_license_vars(self):
868 from create import handle_license_vars
869 from unittest.mock import Mock
870
871 commonlicdir = get_bb_var('COMMON_LICENSE_DIR')
872
873 class DataConnectorCopy(bb.tinfoil.TinfoilDataStoreConnector):
874 pass
875
876 d = DataConnectorCopy
877 d.getVar = Mock(return_value=commonlicdir)
878 d.expand = Mock(side_effect=lambda x: x)
879
880 srctree = tempfile.mkdtemp(prefix='recipetoolqa')
881 self.track_for_cleanup(srctree)
882
883 # Multiple licenses
884 licenses = ['MIT', 'ISC', 'BSD-3-Clause', 'Apache-2.0']
885 for licence in licenses:
886 shutil.copy(os.path.join(commonlicdir, licence), os.path.join(srctree, 'LICENSE.' + licence))
887 # Duplicate license
888 shutil.copy(os.path.join(commonlicdir, 'MIT'), os.path.join(srctree, 'LICENSE'))
889
890 extravalues = {
891 # Duplicate and missing licenses
892 'LICENSE': 'Zlib & BSD-2-Clause & Zlib',
893 'LIC_FILES_CHKSUM': [
894 'file://README.md;md5=0123456789abcdef0123456789abcd'
895 ]
896 }
897 lines_before = []
898 handled = []
899 licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d)
900 expected_lines_before = [
901 '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is',
902 '# your responsibility to verify that the values are complete and correct.',
903 '# NOTE: Original package / source metadata indicates license is: BSD-2-Clause & Zlib',
904 '#',
905 '# NOTE: multiple licenses have been detected; they have been separated with &',
906 '# in the LICENSE value for now since it is a reasonable assumption that all',
907 '# of the licenses apply. If instead there is a choice between the multiple',
908 '# licenses then you should change the value to separate the licenses with |',
909 '# instead of &. If there is any doubt, check the accompanying documentation',
910 '# to determine which situation is applicable.',
911 'LICENSE = "Apache-2.0 & BSD-2-Clause & BSD-3-Clause & ISC & MIT & Zlib"',
912 'LIC_FILES_CHKSUM = "file://LICENSE;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
913 ' file://LICENSE.Apache-2.0;md5=89aea4e17d99a7cacdbeed46a0096b10 \\\n'
914 ' file://LICENSE.BSD-3-Clause;md5=550794465ba0ec5312d6919e203a55f9 \\\n'
915 ' file://LICENSE.ISC;md5=f3b90e78ea0cffb20bf5cca7947a896d \\\n'
916 ' file://LICENSE.MIT;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
917 ' file://README.md;md5=0123456789abcdef0123456789abcd"',
918 ''
919 ]
920 self.assertEqual(lines_before, expected_lines_before)
921 expected_licvalues = [
922 ('MIT', 'LICENSE', '0835ade698e0bcf8506ecda2f7b4f302'),
923 ('Apache-2.0', 'LICENSE.Apache-2.0', '89aea4e17d99a7cacdbeed46a0096b10'),
924 ('BSD-3-Clause', 'LICENSE.BSD-3-Clause', '550794465ba0ec5312d6919e203a55f9'),
925 ('ISC', 'LICENSE.ISC', 'f3b90e78ea0cffb20bf5cca7947a896d'),
926 ('MIT', 'LICENSE.MIT', '0835ade698e0bcf8506ecda2f7b4f302')
927 ]
928 self.assertEqual(handled, [('license', expected_licvalues)])
929 self.assertEqual(extravalues, {})
930 self.assertEqual(licvalues, expected_licvalues)
931
932
933 def test_recipetool_split_pkg_licenses(self):
934 from create import split_pkg_licenses
935 licvalues = [
936 # Duplicate licenses
937 ('BSD-2-Clause', 'x/COPYING', None),
938 ('BSD-2-Clause', 'x/LICENSE', None),
939 # Multiple licenses
940 ('MIT', 'x/a/LICENSE.MIT', None),
941 ('ISC', 'x/a/LICENSE.ISC', None),
942 # Alternative licenses
943 ('(MIT | ISC)', 'x/b/LICENSE', None),
944 # Alternative licenses without brackets
945 ('MIT | BSD-2-Clause', 'x/c/LICENSE', None),
946 # Multi licenses with alternatives
947 ('MIT', 'x/d/COPYING', None),
948 ('MIT | BSD-2-Clause', 'x/d/LICENSE', None),
949 # Multi licenses with alternatives and brackets
950 ('Apache-2.0 & ((MIT | ISC) & BSD-3-Clause)', 'x/e/LICENSE', None)
951 ]
952 packages = {
953 '${PN}': '',
954 'a': 'x/a',
955 'b': 'x/b',
956 'c': 'x/c',
957 'd': 'x/d',
958 'e': 'x/e',
959 'f': 'x/f',
960 'g': 'x/g',
961 }
962 fallback_licenses = {
963 # Ignored
964 'a': 'BSD-3-Clause',
965 # Used
966 'f': 'BSD-3-Clause'
967 }
968 outlines = []
969 outlicenses = split_pkg_licenses(licvalues, packages, outlines, fallback_licenses)
970 expected_outlicenses = {
971 '${PN}': ['BSD-2-Clause'],
972 'a': ['ISC', 'MIT'],
973 'b': ['(ISC | MIT)'],
974 'c': ['(BSD-2-Clause | MIT)'],
975 'd': ['(BSD-2-Clause | MIT)', 'MIT'],
976 'e': ['(ISC | MIT)', 'Apache-2.0', 'BSD-3-Clause'],
977 'f': ['BSD-3-Clause'],
978 'g': ['Unknown']
979 }
980 self.assertEqual(outlicenses, expected_outlicenses)
981 expected_outlines = [
982 'LICENSE:${PN} = "BSD-2-Clause"',
983 'LICENSE:a = "ISC & MIT"',
984 'LICENSE:b = "(ISC | MIT)"',
985 'LICENSE:c = "(BSD-2-Clause | MIT)"',
986 'LICENSE:d = "(BSD-2-Clause | MIT) & MIT"',
987 'LICENSE:e = "(ISC | MIT) & Apache-2.0 & BSD-3-Clause"',
988 'LICENSE:f = "BSD-3-Clause"',
989 'LICENSE:g = "Unknown"'
990 ]
991 self.assertEqual(outlines, expected_outlines)
992
564 993
565class RecipetoolAppendsrcBase(RecipetoolBase): 994class RecipetoolAppendsrcBase(RecipetoolBase):
566 def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles): 995 def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles):
@@ -593,9 +1022,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
593 for uri in src_uri: 1022 for uri in src_uri:
594 p = urllib.parse.urlparse(uri) 1023 p = urllib.parse.urlparse(uri)
595 if p.scheme == 'file': 1024 if p.scheme == 'file':
596 return p.netloc + p.path 1025 return p.netloc + p.path, uri
597 1026
598 def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''): 1027 def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, remove=None, machine=None , options=''):
599 if newfile is None: 1028 if newfile is None:
600 newfile = self.testfile 1029 newfile = self.testfile
601 1030
@@ -620,14 +1049,42 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
620 else: 1049 else:
621 destpath = '.' + os.sep 1050 destpath = '.' + os.sep
622 1051
623 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1052 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
624 '\n'] 1053 '\n']
1054
1055 override = ""
1056 if machine:
1057 options += ' -m %s' % machine
1058 override = ':append:%s' % machine
1059 expectedlines.extend(['PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
1060 '\n'])
1061
1062 if remove:
1063 for entry in remove:
1064 if machine:
1065 entry_remove_line = 'SRC_URI:remove:%s = " %s"\n' % (machine, entry)
1066 else:
1067 entry_remove_line = 'SRC_URI:remove = "%s"\n' % entry
1068
1069 expectedlines.extend([entry_remove_line,
1070 '\n'])
1071
625 if has_src_uri: 1072 if has_src_uri:
626 uri = 'file://%s' % filename 1073 uri = 'file://%s' % filename
627 if expected_subdir: 1074 if expected_subdir:
628 uri += ';subdir=%s' % expected_subdir 1075 uri += ';subdir=%s' % expected_subdir
629 expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri, 1076 if machine:
630 '\n'] 1077 src_uri_line = 'SRC_URI%s = " %s"\n' % (override, uri)
1078 else:
1079 src_uri_line = 'SRC_URI += "%s"\n' % uri
1080
1081 expectedlines.extend([src_uri_line, '\n'])
1082
1083 with open("/tmp/tmp.txt", "w") as file:
1084 print(expectedlines, file=file)
1085
1086 if machine:
1087 filename = '%s/%s' % (machine, filename)
631 1088
632 return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename]) 1089 return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
633 1090
@@ -674,34 +1131,62 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
674 1131
675 def test_recipetool_appendsrcfile_srcdir_basic(self): 1132 def test_recipetool_appendsrcfile_srcdir_basic(self):
676 testrecipe = 'bash' 1133 testrecipe = 'bash'
677 bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) 1134 bb_vars = get_bb_vars(['S', 'UNPACKDIR'], testrecipe)
678 srcdir = bb_vars['S'] 1135 srcdir = bb_vars['S']
679 workdir = bb_vars['WORKDIR'] 1136 unpackdir = bb_vars['UNPACKDIR']
680 subdir = os.path.relpath(srcdir, workdir) 1137 subdir = os.path.relpath(srcdir, unpackdir)
681 self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir) 1138 self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir)
682 1139
683 def test_recipetool_appendsrcfile_existing_in_src_uri(self): 1140 def test_recipetool_appendsrcfile_existing_in_src_uri(self):
684 testrecipe = 'base-files' 1141 testrecipe = 'base-files'
685 filepath = self._get_first_file_uri(testrecipe) 1142 filepath,_ = self._get_first_file_uri(testrecipe)
686 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) 1143 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
687 self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False) 1144 self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
688 1145
689 def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self): 1146 def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self, machine=None):
690 testrecipe = 'base-files' 1147 testrecipe = 'base-files'
691 subdir = 'tmp' 1148 subdir = 'tmp'
692 filepath = self._get_first_file_uri(testrecipe) 1149 filepath, srcuri_entry = self._get_first_file_uri(testrecipe)
693 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) 1150 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
694 1151
695 output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False) 1152 self._test_appendsrcfile(testrecipe, filepath, subdir, machine=machine, remove=[srcuri_entry])
696 self.assertTrue(any('with different parameters' in l for l in output)) 1153
1154 def test_recipetool_appendsrcfile_machine(self):
1155 # A very basic test
1156 self._test_appendsrcfile('base-files', 'a-file', machine='mymachine')
1157
1158 # Force cleaning the output of previous test
1159 self.tearDownLocal()
1160
1161 # A more complex test: existing entry in src_uri with different param
1162 self.test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(machine='mymachine')
1163
1164 def test_recipetool_appendsrcfile_update_recipe_basic(self):
1165 testrecipe = "mtd-utils-selftest"
1166 recipefile = get_bb_var('FILE', testrecipe)
1167 self.assertIn('meta-selftest', recipefile, 'This test expect %s recipe to be in meta-selftest')
1168 cmd = 'recipetool appendsrcfile -W -u meta-selftest %s %s' % (testrecipe, self.testfile)
1169 result = runCmd(cmd)
1170 self.assertNotIn('Traceback', result.output)
1171 self.add_command_to_tearDown('cd %s; rm -f %s/%s; git checkout .' % (os.path.dirname(recipefile), testrecipe, os.path.basename(self.testfile)))
1172
1173 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1174 ('??', '.*/%s/%s$' % (testrecipe, os.path.basename(self.testfile)))]
1175 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1176 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
1177 removelines = []
1178 addlines = [
1179 'file://%s \\\\' % os.path.basename(self.testfile),
1180 ]
1181 self._check_diff(result.output, addlines, removelines)
697 1182
698 def test_recipetool_appendsrcfile_replace_file_srcdir(self): 1183 def test_recipetool_appendsrcfile_replace_file_srcdir(self):
699 testrecipe = 'bash' 1184 testrecipe = 'bash'
700 filepath = 'Makefile.in' 1185 filepath = 'Makefile.in'
701 bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) 1186 bb_vars = get_bb_vars(['S', 'UNPACKDIR'], testrecipe)
702 srcdir = bb_vars['S'] 1187 srcdir = bb_vars['S']
703 workdir = bb_vars['WORKDIR'] 1188 unpackdir = bb_vars['UNPACKDIR']
704 subdir = os.path.relpath(srcdir, workdir) 1189 subdir = os.path.relpath(srcdir, unpackdir)
705 1190
706 self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir) 1191 self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir)
707 bitbake('%s:do_unpack' % testrecipe) 1192 bitbake('%s:do_unpack' % testrecipe)
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
index 747870383b..e697fd2920 100644
--- a/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -1,15 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os
6import re
7import time
8import logging
9import bb.tinfoil 7import bb.tinfoil
10 8
11from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, get_test_layer 10from oeqa.utils.commands import get_test_layer
13 11
14 12
15def setUpModule(): 13def setUpModule():
@@ -40,7 +38,7 @@ class RecipeUtilsTests(OESelftestTestCase):
40 SUMMARY = "Python framework to process interdependent tasks in a pool of workers" 38 SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
41 HOMEPAGE = "http://github.com/gitpython-developers/async" 39 HOMEPAGE = "http://github.com/gitpython-developers/async"
42 SECTION = "devel/python" 40 SECTION = "devel/python"
43-LICENSE = "BSD" 41-LICENSE = "BSD-3-Clause"
44+LICENSE = "something" 42+LICENSE = "something"
45 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e" 43 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
46 44
@@ -52,7 +50,7 @@ class RecipeUtilsTests(OESelftestTestCase):
52+SRC_URI[md5sum] = "aaaaaa" 50+SRC_URI[md5sum] = "aaaaaa"
53 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051" 51 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
54 52
55 RDEPENDS_${PN} += "${PYTHON_PN}-threading" 53 RDEPENDS:${PN} += "python3-threading"
56""" 54"""
57 patchlines = [] 55 patchlines = []
58 for f in patches: 56 for f in patches:
@@ -74,13 +72,13 @@ class RecipeUtilsTests(OESelftestTestCase):
74 expected_patch = """ 72 expected_patch = """
75--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 73--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
76+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 74+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
77@@ -8,6 +8,4 @@ 75@@ -10,6 +10,4 @@
78 76
79 BBCLASSEXTEND = "native nativesdk" 77 BBCLASSEXTEND = "native nativesdk"
80 78
81-SRC_URI += "file://somefile" 79-SRC_URI += "file://somefile"
82- 80-
83 SRC_URI_append = " file://anotherfile" 81 SRC_URI:append = " file://anotherfile"
84""" 82"""
85 patchlines = [] 83 patchlines = []
86 for f in patches: 84 for f in patches:
@@ -99,13 +97,13 @@ class RecipeUtilsTests(OESelftestTestCase):
99 expected_patch = """ 97 expected_patch = """
100--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 98--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
101+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 99+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
102@@ -8,6 +8,3 @@ 100@@ -10,6 +10,3 @@
103 101
104 BBCLASSEXTEND = "native nativesdk" 102 BBCLASSEXTEND = "native nativesdk"
105 103
106-SRC_URI += "file://somefile" 104-SRC_URI += "file://somefile"
107- 105-
108-SRC_URI_append = " file://anotherfile" 106-SRC_URI:append = " file://anotherfile"
109""" 107"""
110 patchlines = [] 108 patchlines = []
111 for f in patches: 109 for f in patches:
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 0d0259477e..f06027cb03 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -9,35 +9,13 @@ import bb.utils
9import functools 9import functools
10import multiprocessing 10import multiprocessing
11import textwrap 11import textwrap
12import json
13import unittest
14import tempfile 12import tempfile
15import shutil 13import shutil
16import stat 14import stat
17import os 15import os
18import datetime 16import datetime
19 17
20# For sample packages, see:
21# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-0t7wr_oo/
22# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-4s9ejwyp/
23# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-haiwdlbr/
24# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-hwds3mcl/
25# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201203-sua0pzvc/
26# (both packages/ and packages-excluded/)
27
28# ruby-ri-docs, meson:
29#https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20210215-0_td9la2/packages/diff-html/
30exclude_packages = [ 18exclude_packages = [
31 'glide',
32 'go-dep',
33 'go-helloworld',
34 'go-runtime',
35 'go_',
36 'go-',
37 'meson',
38 'ovmf-shell-efi',
39 'perf',
40 'ruby-ri-docs'
41 ] 19 ]
42 20
43def is_excluded(package): 21def is_excluded(package):
@@ -65,13 +43,14 @@ class CompareResult(object):
65 return (self.status, self.test) < (other.status, other.test) 43 return (self.status, self.test) < (other.status, other.test)
66 44
67class PackageCompareResults(object): 45class PackageCompareResults(object):
68 def __init__(self): 46 def __init__(self, exclusions):
69 self.total = [] 47 self.total = []
70 self.missing = [] 48 self.missing = []
71 self.different = [] 49 self.different = []
72 self.different_excluded = [] 50 self.different_excluded = []
73 self.same = [] 51 self.same = []
74 self.active_exclusions = set() 52 self.active_exclusions = set()
53 exclude_packages.extend((exclusions or "").split())
75 54
76 def add_result(self, r): 55 def add_result(self, r):
77 self.total.append(r) 56 self.total.append(r)
@@ -118,8 +97,11 @@ def compare_file(reference, test, diffutils_sysroot):
118 result.status = SAME 97 result.status = SAME
119 return result 98 return result
120 99
121def run_diffoscope(a_dir, b_dir, html_dir, **kwargs): 100def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, max_diff_block_lines=1024, max_diff_block_lines_saved=0, **kwargs):
122 return runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], 101 return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size),
102 '--max-diff-block-lines-saved', str(max_diff_block_lines_saved),
103 '--max-diff-block-lines', str(max_diff_block_lines),
104 '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir],
123 **kwargs) 105 **kwargs)
124 106
125class DiffoscopeTests(OESelftestTestCase): 107class DiffoscopeTests(OESelftestTestCase):
@@ -149,10 +131,21 @@ class ReproducibleTests(OESelftestTestCase):
149 131
150 package_classes = ['deb', 'ipk', 'rpm'] 132 package_classes = ['deb', 'ipk', 'rpm']
151 133
134 # Maximum report size, in bytes
135 max_report_size = 250 * 1024 * 1024
136
137 # Maximum diff blocks size, in lines
138 max_diff_block_lines = 1024
139 # Maximum diff blocks size (saved in memory), in lines
140 max_diff_block_lines_saved = max_diff_block_lines
141
152 # targets are the things we want to test the reproducibility of 142 # targets are the things we want to test the reproducibility of
153 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] 143 # Have to add the virtual targets manually for now as builds may or may not include them as they're exclude from world
144 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world', 'virtual/librpc', 'virtual/libsdl2', 'virtual/crypt']
145
154 # sstate targets are things to pull from sstate to potentially cut build/debugging time 146 # sstate targets are things to pull from sstate to potentially cut build/debugging time
155 sstate_targets = [] 147 sstate_targets = []
148
156 save_results = False 149 save_results = False
157 if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ: 150 if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ:
158 save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT'] 151 save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT']
@@ -167,20 +160,40 @@ class ReproducibleTests(OESelftestTestCase):
167 160
168 def setUpLocal(self): 161 def setUpLocal(self):
169 super().setUpLocal() 162 super().setUpLocal()
170 needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS'] 163 needed_vars = [
164 'TOPDIR',
165 'TARGET_PREFIX',
166 'BB_NUMBER_THREADS',
167 'BB_HASHSERVE',
168 'OEQA_REPRODUCIBLE_TEST_PACKAGE',
169 'OEQA_REPRODUCIBLE_TEST_TARGET',
170 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS',
171 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES',
172 'OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS',
173 ]
171 bb_vars = get_bb_vars(needed_vars) 174 bb_vars = get_bb_vars(needed_vars)
172 for v in needed_vars: 175 for v in needed_vars:
173 setattr(self, v.lower(), bb_vars[v]) 176 setattr(self, v.lower(), bb_vars[v])
174 177
178 if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']:
179 self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split()
180
181 if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'] or bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS']:
182 self.targets = (bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'] or "").split() + (bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS'] or "").split()
183
184 if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']:
185 self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split()
186
187 if bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS']:
188 # Setup to build every DEPENDS of leaf recipes using sstate
189 for leaf_recipe in bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS'].split():
190 self.sstate_targets.extend(get_bb_var('DEPENDS', leaf_recipe).split())
191
175 self.extraresults = {} 192 self.extraresults = {}
176 self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = ''
177 self.extraresults.setdefault('reproducible', {}).setdefault('files', {}) 193 self.extraresults.setdefault('reproducible', {}).setdefault('files', {})
178 194
179 def append_to_log(self, msg):
180 self.extraresults['reproducible.rawlogs']['log'] += msg
181
182 def compare_packages(self, reference_dir, test_dir, diffutils_sysroot): 195 def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
183 result = PackageCompareResults() 196 result = PackageCompareResults(self.oeqa_reproducible_excluded_packages)
184 197
185 old_cwd = os.getcwd() 198 old_cwd = os.getcwd()
186 try: 199 try:
@@ -205,7 +218,7 @@ class ReproducibleTests(OESelftestTestCase):
205 218
206 def write_package_list(self, package_class, name, packages): 219 def write_package_list(self, package_class, name, packages):
207 self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [ 220 self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [
208 {'reference': p.reference, 'test': p.test} for p in packages] 221 p.reference.split("/./")[1] for p in packages]
209 222
210 def copy_file(self, source, dest): 223 def copy_file(self, source, dest):
211 bb.utils.mkdirhier(os.path.dirname(dest)) 224 bb.utils.mkdirhier(os.path.dirname(dest))
@@ -217,14 +230,11 @@ class ReproducibleTests(OESelftestTestCase):
217 tmpdir = os.path.join(self.topdir, name, 'tmp') 230 tmpdir = os.path.join(self.topdir, name, 'tmp')
218 if os.path.exists(tmpdir): 231 if os.path.exists(tmpdir):
219 bb.utils.remove(tmpdir, recurse=True) 232 bb.utils.remove(tmpdir, recurse=True)
220
221 config = textwrap.dedent('''\ 233 config = textwrap.dedent('''\
222 INHERIT += "reproducible_build"
223 PACKAGE_CLASSES = "{package_classes}" 234 PACKAGE_CLASSES = "{package_classes}"
224 INHIBIT_PACKAGE_STRIP = "1"
225 TMPDIR = "{tmpdir}" 235 TMPDIR = "{tmpdir}"
226 LICENSE_FLAGS_WHITELIST = "commercial" 236 LICENSE_FLAGS_ACCEPTED = "commercial"
227 DISTRO_FEATURES_append = ' systemd pam' 237 DISTRO_FEATURES:append = ' pam'
228 USERADDEXTENSION = "useradd-staticids" 238 USERADDEXTENSION = "useradd-staticids"
229 USERADD_ERROR_DYNAMIC = "skip" 239 USERADD_ERROR_DYNAMIC = "skip"
230 USERADD_UID_TABLES += "files/static-passwd" 240 USERADD_UID_TABLES += "files/static-passwd"
@@ -232,25 +242,70 @@ class ReproducibleTests(OESelftestTestCase):
232 ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes), 242 ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes),
233 tmpdir=tmpdir) 243 tmpdir=tmpdir)
234 244
245 # Export BB_CONSOLELOG to the calling function and make it constant to
246 # avoid a case where bitbake would get a timestamp-based filename but
247 # oe-selftest would, later, get another.
248 capture_vars.append("BB_CONSOLELOG")
249 config += 'BB_CONSOLELOG = "${LOG_DIR}/cooker/${MACHINE}/console.log"\n'
250
251 # We want different log files for each build, but a persistent bitbake
252 # may reuse the previous log file so restart the bitbake server.
253 bitbake("--kill-server")
254
255 def print_condensed_error_log(logs, context_lines=10, tail_lines=20):
256 """Prints errors with context and the end of the log."""
257
258 logs = logs.split("\n")
259 for i, line in enumerate(logs):
260 if line.startswith("ERROR"):
261 self.logger.info("Found ERROR (line %d):" % (i + 1))
262 for l in logs[i-context_lines:i+context_lines]:
263 self.logger.info(" " + l)
264
265 self.logger.info("End of log:")
266 for l in logs[-tail_lines:]:
267 self.logger.info(" " + l)
268
269 bitbake_failure_count = 0
235 if not use_sstate: 270 if not use_sstate:
236 if self.sstate_targets: 271 if self.sstate_targets:
237 self.logger.info("Building prebuild for %s (sstate allowed)..." % (name)) 272 self.logger.info("Building prebuild for %s (sstate allowed)..." % (name))
238 self.write_config(config) 273 self.write_config(config)
239 bitbake(' '.join(self.sstate_targets)) 274 try:
275 bitbake("--continue "+' '.join(self.sstate_targets))
276 except AssertionError as e:
277 bitbake_failure_count += 1
278 self.logger.error("Bitbake failed! but keep going... Log:")
279 print_condensed_error_log(str(e))
240 280
241 # This config fragment will disable using shared and the sstate 281 # This config fragment will disable using shared and the sstate
242 # mirror, forcing a complete build from scratch 282 # mirror, forcing a complete build from scratch
243 config += textwrap.dedent('''\ 283 config += textwrap.dedent('''\
244 SSTATE_DIR = "${TMPDIR}/sstate" 284 SSTATE_DIR = "${TMPDIR}/sstate"
245 SSTATE_MIRRORS = "" 285 SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
246 ''') 286 ''')
247 287
248 self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) 288 self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
249 self.write_config(config) 289 self.write_config(config)
250 d = get_bb_vars(capture_vars) 290 d = get_bb_vars(capture_vars)
251 # targets used to be called images 291 try:
252 bitbake(' '.join(getattr(self, 'images', self.targets))) 292 # targets used to be called images
253 return d 293 bitbake("--continue "+' '.join(getattr(self, 'images', self.targets)))
294 except AssertionError as e:
295 bitbake_failure_count += 1
296 self.logger.error("Bitbake failed! but keep going... Log:")
297 print_condensed_error_log(str(e))
298
299 # The calling function expects the existence of the deploy
300 # directories containing the packages.
301 # If bitbake failed to create them, do it manually
302 for c in self.package_classes:
303 deploy = d['DEPLOY_DIR_' + c.upper()]
304 if not os.path.exists(deploy):
305 self.logger.info("Manually creating %s" % deploy)
306 bb.utils.mkdirhier(deploy)
307
308 return (d, bitbake_failure_count)
254 309
255 def test_reproducible_builds(self): 310 def test_reproducible_builds(self):
256 def strip_topdir(s): 311 def strip_topdir(s):
@@ -272,15 +327,30 @@ class ReproducibleTests(OESelftestTestCase):
272 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) 327 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
273 self.logger.info('Non-reproducible packages will be copied to %s', save_dir) 328 self.logger.info('Non-reproducible packages will be copied to %s', save_dir)
274 329
275 vars_A = self.do_test_build('reproducibleA', self.build_from_sstate) 330 # The below bug shows that a few reproducible issues are depends on build dir path length.
331 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=15554
332 # So, the reproducibleA & reproducibleB directories are changed to reproducibleA & reproducibleB-extended to have different size.
276 333
277 vars_B = self.do_test_build('reproducibleB', False) 334 fails = []
335 vars_list = [None, None]
336
337 for i, (name, use_sstate) in enumerate(
338 (('reproducibleA', self.build_from_sstate),
339 ('reproducibleB-extended', False))):
340 (variables, bitbake_failure_count) = self.do_test_build(name, use_sstate)
341 if bitbake_failure_count > 0:
342 self.logger.error('%s build failed. Trying to compute built packages differences but the test will fail.' % name)
343 fails.append("Bitbake %s failure" % name)
344 if self.save_results:
345 failure_log_path = os.path.join(save_dir, "bitbake-%s.log" % name)
346 self.logger.info('Failure log for %s will be copied to %s'% (name, failure_log_path))
347 self.copy_file(variables["BB_CONSOLELOG"], failure_log_path)
348 vars_list[i] = variables
278 349
350 vars_A, vars_B = vars_list
279 # NOTE: The temp directories from the reproducible build are purposely 351 # NOTE: The temp directories from the reproducible build are purposely
280 # kept after the build so it can be diffed for debugging. 352 # kept after the build so it can be diffed for debugging.
281 353
282 fails = []
283
284 for c in self.package_classes: 354 for c in self.package_classes:
285 with self.subTest(package_class=c): 355 with self.subTest(package_class=c):
286 package_class = 'package_' + c 356 package_class = 'package_' + c
@@ -293,8 +363,6 @@ class ReproducibleTests(OESelftestTestCase):
293 363
294 self.logger.info('Reproducibility summary for %s: %s' % (c, result)) 364 self.logger.info('Reproducibility summary for %s: %s' % (c, result))
295 365
296 self.append_to_log('\n'.join("%s: %s" % (r.status, r.test) for r in result.total))
297
298 self.write_package_list(package_class, 'missing', result.missing) 366 self.write_package_list(package_class, 'missing', result.missing)
299 self.write_package_list(package_class, 'different', result.different) 367 self.write_package_list(package_class, 'different', result.different)
300 self.write_package_list(package_class, 'different_excluded', result.different_excluded) 368 self.write_package_list(package_class, 'different_excluded', result.different_excluded)
@@ -309,9 +377,13 @@ class ReproducibleTests(OESelftestTestCase):
309 self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)])) 377 self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)]))
310 self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)])) 378 self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)]))
311 379
312 if result.missing or result.different: 380 if result.different:
313 fails.append("The following %s packages are missing or different and not in exclusion list: %s" % 381 fails.append("The following %s packages are different and not in exclusion list:\n%s" %
314 (c, '\n'.join(r.test for r in (result.missing + result.different)))) 382 (c, '\n'.join(r.test for r in (result.different))))
383
384 if result.missing and len(self.sstate_targets) == 0:
385 fails.append("The following %s packages are missing and not in exclusion list:\n%s" %
386 (c, '\n'.join(r.test for r in (result.missing))))
315 387
316 # Clean up empty directories 388 # Clean up empty directories
317 if self.save_results: 389 if self.save_results:
@@ -325,7 +397,9 @@ class ReproducibleTests(OESelftestTestCase):
325 # Copy jquery to improve the diffoscope output usability 397 # Copy jquery to improve the diffoscope output usability
326 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) 398 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
327 399
328 run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, 400 run_diffoscope('reproducibleA', 'reproducibleB-extended', package_html_dir, max_report_size=self.max_report_size,
401 max_diff_block_lines_saved=self.max_diff_block_lines_saved,
402 max_diff_block_lines=self.max_diff_block_lines,
329 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) 403 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
330 404
331 if fails: 405 if fails:
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py
index dac5c46801..c3303f3fbb 100644
--- a/meta/lib/oeqa/selftest/cases/resulttooltests.py
+++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -69,7 +71,7 @@ class ResultToolTests(OESelftestTestCase):
69 self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results) 71 self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
70 self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results) 72 self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
71 73
72 def test_regrresion_can_get_regression_result(self): 74 def test_regression_can_get_regression_result(self):
73 base_result_data = {'result': {'test1': {'status': 'PASSED'}, 75 base_result_data = {'result': {'test1': {'status': 'PASSED'},
74 'test2': {'status': 'PASSED'}, 76 'test2': {'status': 'PASSED'},
75 'test3': {'status': 'FAILED'}, 77 'test3': {'status': 'FAILED'},
@@ -96,3 +98,278 @@ class ResultToolTests(OESelftestTestCase):
96 resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map) 98 resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
97 self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results)) 99 self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
98 100
101 def test_results_without_metadata_can_be_compared(self):
102 base_configuration = {"configuration": {
103 "TEST_TYPE": "oeselftest",
104 "TESTSERIES": "series1",
105 "IMAGE_BASENAME": "image",
106 "IMAGE_PKGTYPE": "ipk",
107 "DISTRO": "mydistro",
108 "MACHINE": "qemux86",
109 "STARTTIME": 1672527600
110 }, "result": {}}
111 target_configuration = {"configuration": {
112 "TEST_TYPE": "oeselftest",
113 "TESTSERIES": "series1",
114 "IMAGE_BASENAME": "image",
115 "IMAGE_PKGTYPE": "ipk",
116 "DISTRO": "mydistro",
117 "MACHINE": "qemux86",
118 "STARTTIME": 1672527600
119 }, "result": {}}
120 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
121 msg="incorrect metadata filtering, tests without metadata should be compared")
122
123 def test_target_result_with_missing_metadata_can_not_be_compared(self):
124 base_configuration = {"configuration": {
125 "TEST_TYPE": "oeselftest",
126 "TESTSERIES": "series1",
127 "IMAGE_BASENAME": "image",
128 "IMAGE_PKGTYPE": "ipk",
129 "DISTRO": "mydistro",
130 "MACHINE": "qemux86",
131 "OESELFTEST_METADATA": {
132 "run_all_tests": True,
133 "run_tests": None,
134 "skips": None,
135 "machine": None,
136 "select_tags": ["toolchain-user", "toolchain-system"],
137 "exclude_tags": None
138 }}, "result": {}}
139 target_configuration = {"configuration": {"TEST_TYPE": "oeselftest",
140 "TESTSERIES": "series1",
141 "IMAGE_BASENAME": "image",
142 "IMAGE_PKGTYPE": "ipk",
143 "DISTRO": "mydistro",
144 "MACHINE": "qemux86",
145 "STARTTIME": 1672527600
146 }, "result": {}}
147 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
148 msg="incorrect metadata filtering, tests should not be compared")
149
150 def test_results_with_matching_metadata_can_be_compared(self):
151 base_configuration = {"configuration": {
152 "TEST_TYPE": "oeselftest",
153 "TESTSERIES": "series1",
154 "IMAGE_BASENAME": "image",
155 "IMAGE_PKGTYPE": "ipk",
156 "DISTRO": "mydistro",
157 "MACHINE": "qemux86",
158 "STARTTIME": 1672527600,
159 "OESELFTEST_METADATA": {"run_all_tests": True,
160 "run_tests": None,
161 "skips": None,
162 "machine": None,
163 "select_tags": ["toolchain-user", "toolchain-system"],
164 "exclude_tags": None}
165 }, "result": {}}
166 target_configuration = {"configuration": {
167 "TEST_TYPE": "oeselftest",
168 "TESTSERIES": "series1",
169 "IMAGE_BASENAME": "image",
170 "IMAGE_PKGTYPE": "ipk",
171 "DISTRO": "mydistro",
172 "MACHINE": "qemux86",
173 "STARTTIME": 1672527600,
174 "OESELFTEST_METADATA": {"run_all_tests": True,
175 "run_tests": None,
176 "skips": None,
177 "machine": None,
178 "select_tags": ["toolchain-user", "toolchain-system"],
179 "exclude_tags": None}
180 }, "result": {}}
181 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
182 msg="incorrect metadata filtering, tests with matching metadata should be compared")
183
184 def test_results_with_mismatching_metadata_can_not_be_compared(self):
185 base_configuration = {"configuration": {
186 "TEST_TYPE": "oeselftest",
187 "TESTSERIES": "series1",
188 "IMAGE_BASENAME": "image",
189 "IMAGE_PKGTYPE": "ipk",
190 "DISTRO": "mydistro",
191 "MACHINE": "qemux86",
192 "STARTTIME": 1672527600,
193 "OESELFTEST_METADATA": {"run_all_tests": True,
194 "run_tests": None,
195 "skips": None,
196 "machine": None,
197 "select_tags": ["toolchain-user", "toolchain-system"],
198 "exclude_tags": None}
199 }, "result": {}}
200 target_configuration = {"configuration": {
201 "TEST_TYPE": "oeselftest",
202 "TESTSERIES": "series1",
203 "IMAGE_BASENAME": "image",
204 "IMAGE_PKGTYPE": "ipk",
205 "DISTRO": "mydistro",
206 "MACHINE": "qemux86",
207 "STARTTIME": 1672527600,
208 "OESELFTEST_METADATA": {"run_all_tests": True,
209 "run_tests": None,
210 "skips": None,
211 "machine": None,
212 "select_tags": ["machine"],
213 "exclude_tags": None}
214 }, "result": {}}
215 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
216 msg="incorrect metadata filtering, tests with mismatching metadata should not be compared")
217
218 def test_metadata_matching_is_only_checked_for_relevant_test_type(self):
219 base_configuration = {"configuration": {"TEST_TYPE": "runtime",
220 "TESTSERIES": "series1",
221 "IMAGE_BASENAME": "image",
222 "IMAGE_PKGTYPE": "ipk",
223 "DISTRO": "mydistro",
224 "MACHINE": "qemux86",
225 "STARTTIME": 1672527600,
226 "OESELFTEST_METADATA": {"run_all_tests": True,
227 "run_tests": None,
228 "skips": None,
229 "machine": None,
230 "select_tags": ["toolchain-user", "toolchain-system"],
231 "exclude_tags": None}}, "result": {}}
232 target_configuration = {"configuration": {"TEST_TYPE": "runtime",
233 "TESTSERIES": "series1",
234 "IMAGE_BASENAME": "image",
235 "IMAGE_PKGTYPE": "ipk",
236 "DISTRO": "mydistro",
237 "MACHINE": "qemux86",
238 "STARTTIME": 1672527600,
239 "OESELFTEST_METADATA": {"run_all_tests": True,
240 "run_tests": None,
241 "skips": None,
242 "machine": None,
243 "select_tags": ["machine"],
244 "exclude_tags": None}}, "result": {}}
245 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
246 msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE'])
247
248 def test_machine_matches(self):
249 base_configuration = {"configuration": {
250 "TEST_TYPE": "runtime",
251 "MACHINE": "qemux86"}, "result": {}}
252 target_configuration = {"configuration": {
253 "TEST_TYPE": "runtime",
254 "MACHINE": "qemux86"
255 }, "result": {}}
256 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
257 msg="incorrect machine filtering, identical machine tests should be compared")
258
259 def test_machine_mismatches(self):
260 base_configuration = {"configuration": {
261 "TEST_TYPE": "runtime",
262 "MACHINE": "qemux86"
263 }, "result": {}}
264 target_configuration = {"configuration": {
265 "TEST_TYPE": "runtime",
266 "MACHINE": "qemux86_64"
267 }, "result": {}}
268 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
269 msg="incorrect machine filtering, mismatching machine tests should not be compared")
270
271 def test_can_not_compare_non_ltp_tests(self):
272 base_configuration = {"configuration": {
273 "TEST_TYPE": "runtime",
274 "MACHINE": "qemux86"
275 }, "result": {
276 "ltpresult_foo": {
277 "status": "PASSED"
278 }}}
279 target_configuration = {"configuration": {
280 "TEST_TYPE": "runtime",
281 "MACHINE": "qemux86_64"
282 }, "result": {
283 "bar": {
284 "status": "PASSED"
285 }}}
286 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
287 msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared")
288
289 def test_can_compare_ltp_tests(self):
290 base_configuration = {"configuration": {
291 "TEST_TYPE": "runtime",
292 "MACHINE": "qemux86"
293 }, "result": {
294 "ltpresult_foo": {
295 "status": "PASSED"
296 }}}
297 target_configuration = {"configuration": {
298 "TEST_TYPE": "runtime",
299 "MACHINE": "qemux86"
300 }, "result": {
301 "ltpresult_foo": {
302 "status": "PASSED"
303 }}}
304 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
305 msg="incorrect ltpresult filtering, matching ltpresult content should be compared")
306
307 def test_can_match_non_static_ptest_names(self):
308 base_configuration = {"a": {
309 "conf_X": {
310 "configuration": {
311 "TEST_TYPE": "runtime",
312 "MACHINE": "qemux86"
313 }, "result": {
314 "ptestresult.lttng-tools.foo_-_bar_-_moo": {
315 "status": "PASSED"
316 },
317 "ptestresult.babeltrace.bar_-_moo_-_foo": {
318 "status": "PASSED"
319 },
320 "ptestresult.babeltrace2.moo_-_foo_-_bar": {
321 "status": "PASSED"
322 },
323 "ptestresult.curl.test_0000__foo_out_of_bar": {
324 "status": "PASSED"
325 },
326 "ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": {
327 "status": "PASSED"
328 },
329 "ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": {
330 "status": "PASSED"
331 },
332 "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": {
333 "status": "PASSED"
334 },
335 "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
336 "status": "PASSED"
337 }
338 }}}}
339 target_configuration = {"a": {
340 "conf_Y": {
341 "configuration": {
342 "TEST_TYPE": "runtime",
343 "MACHINE": "qemux86"
344 }, "result": {
345 "ptestresult.lttng-tools.foo_-_yyy_-_zzz": {
346 "status": "PASSED"
347 },
348 "ptestresult.babeltrace.bar_-_zzz_-_xxx": {
349 "status": "PASSED"
350 },
351 "ptestresult.babeltrace2.moo_-_xxx_-_yyy": {
352 "status": "PASSED"
353 },
354 "ptestresult.curl.test_0000__xxx_out_of_yyy": {
355 "status": "PASSED"
356 },
357 "ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": {
358 "status": "PASSED"
359 },
360 "ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": {
361 "status": "PASSED"
362 },
363 "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": {
364 "status": "PASSED"
365 },
366 "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
367 "status": "PASSED"
368 }
369 }}}}
370 regression.fixup_ptest_names(base_configuration, self.logger)
371 regression.fixup_ptest_names(target_configuration, self.logger)
372 result, resultstring = regression.compare_result(
373 self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"])
374 self.assertDictEqual(
375 result, {}, msg=f"ptests should be compared: {resultstring}")
diff --git a/meta/lib/oeqa/selftest/cases/retain.py b/meta/lib/oeqa/selftest/cases/retain.py
new file mode 100644
index 0000000000..892be45857
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/retain.py
@@ -0,0 +1,241 @@
1# Tests for retain.bbclass
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import os
9import glob
10import fnmatch
11import oe.path
12import shutil
13import tarfile
14from oeqa.utils.commands import bitbake, get_bb_vars
15from oeqa.selftest.case import OESelftestTestCase
16
17class Retain(OESelftestTestCase):
18
19 def test_retain_always(self):
20 """
21 Summary: Test retain class with RETAIN_DIRS_ALWAYS
22 Expected: Archive written to RETAIN_OUTDIR when build of test recipe completes
23 Product: oe-core
24 Author: Paul Eggleton <paul.eggleton@microsoft.com>
25 """
26
27 test_recipe = 'quilt-native'
28
29 features = 'INHERIT += "retain"\n'
30 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
31 self.write_config(features)
32
33 bitbake('-c clean %s' % test_recipe)
34
35 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
36 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
37 tmpdir = bb_vars['TMPDIR']
38 if len(retain_outdir) < 5:
39 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
40 if not oe.path.is_path_parent(tmpdir, retain_outdir):
41 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
42 try:
43 shutil.rmtree(retain_outdir)
44 except FileNotFoundError:
45 pass
46
47 bitbake(test_recipe)
48 if not glob.glob(os.path.join(retain_outdir, '%s_temp_*.tar.gz' % test_recipe)):
49 self.fail('No output archive for %s created' % test_recipe)
50
51
52 def test_retain_failure(self):
53 """
54 Summary: Test retain class default behaviour
55 Expected: Archive written to RETAIN_OUTDIR only when build of test
56 recipe fails, and archive contents are as expected
57 Product: oe-core
58 Author: Paul Eggleton <paul.eggleton@microsoft.com>
59 """
60
61 test_recipe_fail = 'error'
62
63 features = 'INHERIT += "retain"\n'
64 self.write_config(features)
65
66 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'RETAIN_DIRS_ALWAYS', 'RETAIN_DIRS_GLOBAL_ALWAYS'])
67 if bb_vars['RETAIN_DIRS_ALWAYS']:
68 self.fail('RETAIN_DIRS_ALWAYS is set, this interferes with the test')
69 if bb_vars['RETAIN_DIRS_GLOBAL_ALWAYS']:
70 self.fail('RETAIN_DIRS_GLOBAL_ALWAYS is set, this interferes with the test')
71 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
72 tmpdir = bb_vars['TMPDIR']
73 if len(retain_outdir) < 5:
74 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
75 if not oe.path.is_path_parent(tmpdir, retain_outdir):
76 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
77
78 try:
79 shutil.rmtree(retain_outdir)
80 except FileNotFoundError:
81 pass
82
83 bitbake('-c clean %s' % test_recipe_fail)
84
85 if os.path.exists(retain_outdir):
86 retain_dirlist = os.listdir(retain_outdir)
87 if retain_dirlist:
88 self.fail('RETAIN_OUTDIR should be empty without failure, contents:\n%s' % '\n'.join(retain_dirlist))
89
90 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
91 if result.status == 0:
92 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
93
94 archives = glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % test_recipe_fail))
95 if not archives:
96 self.fail('No output archive for %s created' % test_recipe_fail)
97 if len(archives) > 1:
98 self.fail('More than one archive for %s created' % test_recipe_fail)
99 for archive in archives:
100 found = False
101 archive_prefix = os.path.basename(archive).split('.tar')[0]
102 expected_prefix_start = '%s_workdir' % test_recipe_fail
103 if not archive_prefix.startswith(expected_prefix_start):
104 self.fail('Archive %s name does not start with expected prefix "%s"' % (os.path.basename(archive), expected_prefix_start))
105 with tarfile.open(archive) as tf:
106 for ti in tf:
107 if not fnmatch.fnmatch(ti.name, '%s/*' % archive_prefix):
108 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
109 if ti.name.endswith('/temp/log.do_compile'):
110 found = True
111 if not found:
112 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
113
114
115 def test_retain_global(self):
116 """
117 Summary: Test retain class RETAIN_DIRS_GLOBAL_* behaviour
118 Expected: Ensure RETAIN_DIRS_GLOBAL_ALWAYS always causes an
119 archive to be created, and RETAIN_DIRS_GLOBAL_FAILURE
120 only causes an archive to be created on failure.
121 Also test archive naming (with : character) as an
122 added bonus.
123 Product: oe-core
124 Author: Paul Eggleton <paul.eggleton@microsoft.com>
125 """
126
127 test_recipe = 'quilt-native'
128 test_recipe_fail = 'error'
129
130 features = 'INHERIT += "retain"\n'
131 features += 'RETAIN_DIRS_GLOBAL_ALWAYS = "${LOG_DIR};prefix=buildlogs"\n'
132 features += 'RETAIN_DIRS_GLOBAL_FAILURE = "${STAMPS_DIR}"\n'
133 self.write_config(features)
134
135 bitbake('-c clean %s' % test_recipe)
136
137 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'STAMPS_DIR'])
138 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
139 tmpdir = bb_vars['TMPDIR']
140 if len(retain_outdir) < 5:
141 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
142 if not oe.path.is_path_parent(tmpdir, retain_outdir):
143 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
144 try:
145 shutil.rmtree(retain_outdir)
146 except FileNotFoundError:
147 pass
148
149 # Test success case
150 bitbake(test_recipe)
151 if not glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz')):
152 self.fail('No output archive for LOG_DIR created')
153 stamps_dir = bb_vars['STAMPS_DIR']
154 if glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
155 self.fail('Output archive for STAMPS_DIR created when it should not have been')
156
157 # Test failure case
158 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
159 if result.status == 0:
160 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
161 if not glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
162 self.fail('Output archive for STAMPS_DIR not created')
163 if len(glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz'))) != 2:
164 self.fail('Should be exactly two buildlogs archives in output dir')
165
166
167 def test_retain_misc(self):
168 """
169 Summary: Test retain class with RETAIN_ENABLED and RETAIN_TARBALL_SUFFIX
170 Expected: Archive written to RETAIN_OUTDIR only when RETAIN_ENABLED is set
171 and archive contents are as expected. Also test archive naming
172 (with : character) as an added bonus.
173 Product: oe-core
174 Author: Paul Eggleton <paul.eggleton@microsoft.com>
175 """
176
177 test_recipe_fail = 'error'
178
179 features = 'INHERIT += "retain"\n'
180 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
181 features += 'RETAIN_ENABLED = "0"\n'
182 self.write_config(features)
183
184 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
185 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
186 tmpdir = bb_vars['TMPDIR']
187 if len(retain_outdir) < 5:
188 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
189 if not oe.path.is_path_parent(tmpdir, retain_outdir):
190 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
191
192 try:
193 shutil.rmtree(retain_outdir)
194 except FileNotFoundError:
195 pass
196
197 bitbake('-c clean %s' % test_recipe_fail)
198 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
199 if result.status == 0:
200 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
201
202 if os.path.exists(retain_outdir) and os.listdir(retain_outdir):
203 self.fail('RETAIN_OUTDIR should be empty with RETAIN_ENABLED = "0"')
204
205 features = 'INHERIT += "retain"\n'
206 features += 'RETAIN_DIRS_ALWAYS = "${T};prefix=recipelogs"\n'
207 features += 'RETAIN_TARBALL_SUFFIX = "${DATETIME}-testsuffix.tar.bz2"\n'
208 features += 'RETAIN_ENABLED = "1"\n'
209 self.write_config(features)
210
211 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
212 if result.status == 0:
213 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
214
215 archives = glob.glob(os.path.join(retain_outdir, '%s_*-testsuffix.tar.bz2' % test_recipe_fail))
216 if not archives:
217 self.fail('No output archive for %s created' % test_recipe_fail)
218 if len(archives) != 2:
219 self.fail('Two archives for %s expected, but %d exist' % (test_recipe_fail, len(archives)))
220 recipelogs_found = False
221 workdir_found = False
222 for archive in archives:
223 contents_found = False
224 archive_prefix = os.path.basename(archive).split('.tar')[0]
225 if archive_prefix.startswith('%s_recipelogs' % test_recipe_fail):
226 recipelogs_found = True
227 if archive_prefix.startswith('%s_workdir' % test_recipe_fail):
228 workdir_found = True
229 with tarfile.open(archive, 'r:bz2') as tf:
230 for ti in tf:
231 if not fnmatch.fnmatch(ti.name, '%s/*' % (archive_prefix)):
232 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
233 if ti.name.endswith('/log.do_compile'):
234 contents_found = True
235 if not contents_found:
236 # Both archives should contain this file
237 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
238 if not recipelogs_found:
239 self.fail('No archive with expected "recipelogs" prefix found')
240 if not workdir_found:
241 self.fail('No archive with expected "workdir" prefix found')
diff --git a/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
new file mode 100644
index 0000000000..44e2c09a6f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
@@ -0,0 +1,97 @@
1# SPDX-FileCopyrightText: Huawei Inc.
2#
3# SPDX-License-Identifier: MIT
4
5import os
6import oe
7import unittest
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, get_bb_vars
10
11class ShadowUtilsTidyFiles(OESelftestTestCase):
12 """
13 Check if shadow image rootfs files are tidy.
14
15 The tests are focused on testing the functionality provided by the
16 'tidy_shadowutils_files' rootfs postprocess command (via
17 SORT_PASSWD_POSTPROCESS_COMMAND).
18 """
19
20 def sysconf_build(self):
21 """
22 Verify if shadow tidy files tests are to be run and if yes, build a
23 test image and return its sysconf rootfs path.
24 """
25
26 test_image = "core-image-minimal"
27
28 config = 'IMAGE_CLASSES += "extrausers"\n'
29 config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n'
30 config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n'
31 self.write_config(config)
32
33 vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"),
34 test_image)
35 passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"]
36 self.assertIsNotNone(passwd_postprocess_cmd)
37 if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'):
38 raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' "
39 "rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.")
40
41 rootfs = vars["IMAGE_ROOTFS"]
42 self.assertIsNotNone(rootfs)
43 sysconfdir = vars["sysconfdir"]
44 bitbake(test_image)
45 self.assertIsNotNone(sysconfdir)
46
47 return oe.path.join(rootfs, sysconfdir)
48
49 def test_shadowutils_backup_files(self):
50 """
51 Test that the rootfs doesn't include any known shadow backup files.
52 """
53
54 backup_files = (
55 'group-',
56 'gshadow-',
57 'passwd-',
58 'shadow-',
59 'subgid-',
60 'subuid-',
61 )
62
63 rootfs_sysconfdir = self.sysconf_build()
64 found = []
65 for backup_file in backup_files:
66 backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file)
67 if os.path.exists(backup_filepath):
68 found.append(backup_file)
69 if (found):
70 raise Exception('The following shadow backup files were found in '
71 'the rootfs: %s' % found)
72
73 def test_shadowutils_sorted_files(self):
74 """
75 Test that the 'passwd' and the 'group' shadow utils files are ordered
76 by ID.
77 """
78
79 files = (
80 'passwd',
81 'group',
82 )
83
84 rootfs_sysconfdir = self.sysconf_build()
85 unsorted = []
86 for file in files:
87 filepath = oe.path.join(rootfs_sysconfdir, file)
88 with open(filepath, 'rb') as f:
89 ids = []
90 lines = f.readlines()
91 for line in lines:
92 entries = line.split(b':')
93 ids.append(int(entries[2]))
94 if (ids != sorted(ids)):
95 unsorted.append(file)
96 if (unsorted):
97 raise Exception("The following files were not sorted by ID as expected: %s" % unsorted)
diff --git a/meta/lib/oeqa/selftest/cases/rpmtests.py b/meta/lib/oeqa/selftest/cases/rpmtests.py
new file mode 100644
index 0000000000..902d7dca3d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rpmtests.py
@@ -0,0 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class BitbakeTests(OESelftestTestCase):
11
12 def test_rpm_filenames(self):
13 test_recipe = "testrpm"
14 bitbake(test_recipe)
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
index fa6113d7fa..70047ca0ca 100644
--- a/meta/lib/oeqa/selftest/cases/runcmd.py
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -27,8 +29,8 @@ class RunCmdTests(OESelftestTestCase):
27 29
28 # The delta is intentionally smaller than the timeout, to detect cases where 30 # The delta is intentionally smaller than the timeout, to detect cases where
29 # we incorrectly apply the timeout more than once. 31 # we incorrectly apply the timeout more than once.
30 TIMEOUT = 5 32 TIMEOUT = 10
31 DELTA = 3 33 DELTA = 8
32 34
33 def test_result_okay(self): 35 def test_result_okay(self):
34 result = runCmd("true") 36 result = runCmd("true")
@@ -56,11 +58,11 @@ class RunCmdTests(OESelftestTestCase):
56 self.assertEqual(result.status, 0) 58 self.assertEqual(result.status, 0)
57 59
58 def test_result_assertion(self): 60 def test_result_assertion(self):
59 self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar", 61 self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
60 runCmd, "echo foobar >&2; false", shell=True) 62 runCmd, "echo foobar >&2; false", shell=True)
61 63
62 def test_result_exception(self): 64 def test_result_exception(self):
63 self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar", 65 self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
64 runCmd, "echo foobar >&2; false", shell=True, assert_error=False) 66 runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
65 67
66 def test_output(self): 68 def test_output(self):
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py
index 7e676bcb41..f01e1eec66 100644
--- a/meta/lib/oeqa/selftest/cases/runqemu.py
+++ b/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -4,14 +4,17 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import os
7import re 8import re
8import tempfile
9import time 9import time
10import oe.types 10import oe.types
11from oeqa.core.decorator import OETestTag 11from oeqa.core.decorator import OETestTag
12from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine
12from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, runqemu, get_bb_var, runCmd 14from oeqa.utils.commands import bitbake, runqemu, get_bb_var
14 15
16
17@OETestTag("runqemu")
15class RunqemuTests(OESelftestTestCase): 18class RunqemuTests(OESelftestTestCase):
16 """Runqemu test class""" 19 """Runqemu test class"""
17 20
@@ -21,23 +24,26 @@ class RunqemuTests(OESelftestTestCase):
21 def setUpLocal(self): 24 def setUpLocal(self):
22 super(RunqemuTests, self).setUpLocal() 25 super(RunqemuTests, self).setUpLocal()
23 self.recipe = 'core-image-minimal' 26 self.recipe = 'core-image-minimal'
24 self.machine = 'qemux86-64' 27 self.machine = self.td['MACHINE']
25 self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi" 28 self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe)
26 self.cmd_common = "runqemu nographic"
27 29
28 kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), 'x86_64') 30 self.fstypes = "ext4"
31 if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'):
32 self.fstypes += " iso hddimg"
33 if self.machine == "qemux86-64":
34 self.fstypes += " wic.vmdk wic.qcow2 wic.vdi"
35
36 self.cmd_common = "runqemu nographic"
37 kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"])
29 if kvm: 38 if kvm:
30 self.cmd_common += " kvm" 39 self.cmd_common += " kvm"
31 40
32 self.write_config( 41 self.write_config(
33""" 42"""
34MACHINE = "%s"
35IMAGE_FSTYPES = "%s" 43IMAGE_FSTYPES = "%s"
36# 10 means 1 second 44# 10 means 1 second
37SYSLINUX_TIMEOUT = "10" 45SYSLINUX_TIMEOUT = "10"
38""" 46""" % self.fstypes)
39% (self.machine, self.fstypes)
40 )
41 47
42 if not RunqemuTests.image_is_ready: 48 if not RunqemuTests.image_is_ready:
43 RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 49 RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -56,14 +62,17 @@ SYSLINUX_TIMEOUT = "10"
56 cmd = "%s %s ext4" % (self.cmd_common, self.machine) 62 cmd = "%s %s ext4" % (self.cmd_common, self.machine)
57 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 63 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
58 with open(qemu.qemurunnerlog) as f: 64 with open(qemu.qemurunnerlog) as f:
59 self.assertIn('rootfs.ext4', f.read(), "Failed: %s" % cmd) 65 regexp = r'\nROOTFS: .*\.ext4]\n'
66 self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd))
60 67
68 @skipIfNotArch(['i586', 'i686', 'x86_64'])
61 def test_boot_machine_iso(self): 69 def test_boot_machine_iso(self):
62 """Test runqemu machine iso""" 70 """Test runqemu machine iso"""
63 cmd = "%s %s iso" % (self.cmd_common, self.machine) 71 cmd = "%s %s iso" % (self.cmd_common, self.machine)
64 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 72 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
65 with open(qemu.qemurunnerlog) as f: 73 with open(qemu.qemurunnerlog) as f:
66 self.assertIn('media=cdrom', f.read(), "Failed: %s" % cmd) 74 text_in = 'media=cdrom'
75 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
67 76
68 def test_boot_recipe_image(self): 77 def test_boot_recipe_image(self):
69 """Test runqemu recipe-image""" 78 """Test runqemu recipe-image"""
@@ -72,20 +81,24 @@ SYSLINUX_TIMEOUT = "10"
72 with open(qemu.qemurunnerlog) as f: 81 with open(qemu.qemurunnerlog) as f:
73 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) 82 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
74 83
75 84 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
85 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
76 def test_boot_recipe_image_vmdk(self): 86 def test_boot_recipe_image_vmdk(self):
77 """Test runqemu recipe-image vmdk""" 87 """Test runqemu recipe-image vmdk"""
78 cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe) 88 cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
79 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 89 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
80 with open(qemu.qemurunnerlog) as f: 90 with open(qemu.qemurunnerlog) as f:
81 self.assertIn('format=vmdk', f.read(), "Failed: %s" % cmd) 91 text_in = 'format=vmdk'
92 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
82 93
94 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
83 def test_boot_recipe_image_vdi(self): 95 def test_boot_recipe_image_vdi(self):
84 """Test runqemu recipe-image vdi""" 96 """Test runqemu recipe-image vdi"""
85 cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe) 97 cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
86 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 98 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
87 with open(qemu.qemurunnerlog) as f: 99 with open(qemu.qemurunnerlog) as f:
88 self.assertIn('format=vdi', f.read(), "Failed: %s" % cmd) 100 text_in = 'format=vdi'
101 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
89 102
90 def test_boot_deploy(self): 103 def test_boot_deploy(self):
91 """Test runqemu deploy_dir_image""" 104 """Test runqemu deploy_dir_image"""
@@ -94,7 +107,7 @@ SYSLINUX_TIMEOUT = "10"
94 with open(qemu.qemurunnerlog) as f: 107 with open(qemu.qemurunnerlog) as f:
95 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) 108 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
96 109
97 110 @skipIfNotArch(['i586', 'i686', 'x86_64'])
98 def test_boot_deploy_hddimg(self): 111 def test_boot_deploy_hddimg(self):
99 """Test runqemu deploy_dir_image hddimg""" 112 """Test runqemu deploy_dir_image hddimg"""
100 cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image) 113 cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
@@ -109,6 +122,7 @@ SYSLINUX_TIMEOUT = "10"
109 with open(qemu.qemurunnerlog) as f: 122 with open(qemu.qemurunnerlog) as f:
110 self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd) 123 self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
111 124
125 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
112 def test_boot_machine_slirp_qcow2(self): 126 def test_boot_machine_slirp_qcow2(self):
113 """Test runqemu machine slirp qcow2""" 127 """Test runqemu machine slirp qcow2"""
114 cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine) 128 cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
@@ -118,7 +132,7 @@ SYSLINUX_TIMEOUT = "10"
118 132
119 def test_boot_qemu_boot(self): 133 def test_boot_qemu_boot(self):
120 """Test runqemu /path/to/image.qemuboot.conf""" 134 """Test runqemu /path/to/image.qemuboot.conf"""
121 qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine) 135 qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name)
122 qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf) 136 qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
123 if not os.path.exists(qemuboot_conf): 137 if not os.path.exists(qemuboot_conf):
124 self.skipTest("%s not found" % qemuboot_conf) 138 self.skipTest("%s not found" % qemuboot_conf)
@@ -129,7 +143,7 @@ SYSLINUX_TIMEOUT = "10"
129 143
130 def test_boot_rootfs(self): 144 def test_boot_rootfs(self):
131 """Test runqemu /path/to/rootfs.ext4""" 145 """Test runqemu /path/to/rootfs.ext4"""
132 rootfs = "%s-%s.ext4" % (self.recipe, self.machine) 146 rootfs = "%s.ext4" % (self.image_link_name)
133 rootfs = os.path.join(self.deploy_dir_image, rootfs) 147 rootfs = os.path.join(self.deploy_dir_image, rootfs)
134 if not os.path.exists(rootfs): 148 if not os.path.exists(rootfs):
135 self.skipTest("%s not found" % rootfs) 149 self.skipTest("%s not found" % rootfs)
@@ -149,26 +163,27 @@ SYSLINUX_TIMEOUT = "10"
149# bootup various filesystem types, including live image(iso and hddimg) 163# bootup various filesystem types, including live image(iso and hddimg)
150# where live image was not supported on all qemu architecture. 164# where live image was not supported on all qemu architecture.
151@OETestTag("machine") 165@OETestTag("machine")
166@OETestTag("runqemu")
152class QemuTest(OESelftestTestCase): 167class QemuTest(OESelftestTestCase):
153 168
154 @classmethod 169 @classmethod
155 def setUpClass(cls): 170 def setUpClass(cls):
156 super(QemuTest, cls).setUpClass() 171 super(QemuTest, cls).setUpClass()
157 cls.recipe = 'core-image-minimal' 172 cls.recipe = 'core-image-minimal'
158 cls.machine = get_bb_var('MACHINE') 173 cls.machine = get_bb_var('MACHINE')
159 cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 174 cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
175 cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe)
160 cls.cmd_common = "runqemu nographic" 176 cls.cmd_common = "runqemu nographic"
161 cls.qemuboot_conf = "%s-%s.qemuboot.conf" % (cls.recipe, cls.machine) 177 cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name)
162 cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf) 178 cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
163 bitbake(cls.recipe) 179 bitbake(cls.recipe)
164 180
165 def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout): 181 def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout):
182 # Allow the runner's LoggingThread instance to exit without errors
183 # (such as the exception "Console connection closed unexpectedly")
184 # as qemu will disappear when we shut it down
185 qemu.runner.allowexit()
166 qemu.run_serial("shutdown -h now") 186 qemu.run_serial("shutdown -h now")
167 # Stop thread will stop the LoggingThread instance used for logging
168 # qemu through serial console, stop thread will prevent this code
169 # from facing exception (Console connection closed unexpectedly)
170 # when qemu was shutdown by the above shutdown command
171 qemu.runner.stop_thread()
172 time_track = 0 187 time_track = 0
173 try: 188 try:
174 while True: 189 while True:
@@ -190,22 +205,12 @@ class QemuTest(OESelftestTestCase):
190 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) 205 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
191 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) 206 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
192 207
193 # Need to have portmap/rpcbind running to allow this test to work and 208 def test_qemu_can_boot_nfs_and_shutdown(self):
194 # current autobuilder setup does not have this. 209 rootfs_tar = "%s.tar.bz2" % (self.image_link_name)
195 def disabled_test_qemu_can_boot_nfs_and_shutdown(self):
196 self.assertExists(self.qemuboot_conf)
197 bitbake('meta-ide-support')
198 rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine)
199 rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar) 210 rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
200 self.assertExists(rootfs_tar) 211 self.assertExists(rootfs_tar)
201 tmpdir = tempfile.mkdtemp(prefix='qemu_nfs') 212 cmd = "%s %s" % (self.cmd_common, rootfs_tar)
202 tmpdir_nfs = os.path.join(tmpdir, 'nfs')
203 cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs)
204 result = runCmd(cmd_extract_nfs)
205 self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output)
206 cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs)
207 shutdown_timeout = 120 213 shutdown_timeout = 120
208 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 214 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
209 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) 215 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
210 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) 216 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
211 runCmd('rm -rf %s' % tmpdir)
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index b20c5b427b..d58ffa80f5 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -1,24 +1,20 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu 8from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
7from oeqa.utils.sshcontrol import SSHControl 9from oeqa.core.decorator import OETestTag
8import os 10import os
9import re
10import tempfile 11import tempfile
11import shutil
12import oe.lsb 12import oe.lsb
13from oeqa.core.decorator.data import skipIfNotQemu 13from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine
14 14
15class TestExport(OESelftestTestCase): 15class TestExport(OESelftestTestCase):
16 16
17 @classmethod 17 @OETestTag("runqemu")
18 def tearDownClass(cls):
19 runCmd("rm -rf /tmp/sdk")
20 super(TestExport, cls).tearDownClass()
21
22 def test_testexport_basic(self): 18 def test_testexport_basic(self):
23 """ 19 """
24 Summary: Check basic testexport functionality with only ping test enabled. 20 Summary: Check basic testexport functionality with only ping test enabled.
@@ -29,7 +25,7 @@ class TestExport(OESelftestTestCase):
29 Author: Mariano Lopez <mariano.lopez@intel.com> 25 Author: Mariano Lopez <mariano.lopez@intel.com>
30 """ 26 """
31 27
32 features = 'INHERIT += "testexport"\n' 28 features = 'IMAGE_CLASSES += "testexport"\n'
33 # These aren't the actual IP addresses but testexport class needs something defined 29 # These aren't the actual IP addresses but testexport class needs something defined
34 features += 'TEST_SERVER_IP = "192.168.7.1"\n' 30 features += 'TEST_SERVER_IP = "192.168.7.1"\n'
35 features += 'TEST_TARGET_IP = "192.168.7.1"\n' 31 features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -70,7 +66,7 @@ class TestExport(OESelftestTestCase):
70 Author: Mariano Lopez <mariano.lopez@intel.com> 66 Author: Mariano Lopez <mariano.lopez@intel.com>
71 """ 67 """
72 68
73 features = 'INHERIT += "testexport"\n' 69 features = 'IMAGE_CLASSES += "testexport"\n'
74 # These aren't the actual IP addresses but testexport class needs something defined 70 # These aren't the actual IP addresses but testexport class needs something defined
75 features += 'TEST_SERVER_IP = "192.168.7.1"\n' 71 features += 'TEST_SERVER_IP = "192.168.7.1"\n'
76 features += 'TEST_TARGET_IP = "192.168.7.1"\n' 72 features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -95,21 +91,23 @@ class TestExport(OESelftestTestCase):
95 msg = "Couldn't find SDK tarball: %s" % tarball_path 91 msg = "Couldn't find SDK tarball: %s" % tarball_path
96 self.assertEqual(os.path.isfile(tarball_path), True, msg) 92 self.assertEqual(os.path.isfile(tarball_path), True, msg)
97 93
98 # Extract SDK and run tar from SDK 94 with tempfile.TemporaryDirectory() as tmpdirname:
99 result = runCmd("%s -y -d /tmp/sdk" % tarball_path) 95 # Extract SDK and run tar from SDK
100 self.assertEqual(0, result.status, "Couldn't extract SDK") 96 result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname))
97 self.assertEqual(0, result.status, "Couldn't extract SDK")
101 98
102 env_script = result.output.split()[-1] 99 env_script = result.output.split()[-1]
103 result = runCmd(". %s; which tar" % env_script, shell=True) 100 result = runCmd(". %s; which tar" % env_script, shell=True)
104 self.assertEqual(0, result.status, "Couldn't setup SDK environment") 101 self.assertEqual(0, result.status, "Couldn't setup SDK environment")
105 is_sdk_tar = True if "/tmp/sdk" in result.output else False 102 is_sdk_tar = True if tmpdirname in result.output else False
106 self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") 103 self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
107 104
108 tar_sdk = result.output 105 tar_sdk = result.output
109 result = runCmd("%s --version" % tar_sdk) 106 result = runCmd("%s --version" % tar_sdk)
110 self.assertEqual(0, result.status, "Couldn't run tar from SDK") 107 self.assertEqual(0, result.status, "Couldn't run tar from SDK")
111 108
112 109
110@OETestTag("runqemu")
113class TestImage(OESelftestTestCase): 111class TestImage(OESelftestTestCase):
114 112
115 def test_testimage_install(self): 113 def test_testimage_install(self):
@@ -123,15 +121,30 @@ class TestImage(OESelftestTestCase):
123 if get_bb_var('DISTRO') == 'poky-tiny': 121 if get_bb_var('DISTRO') == 'poky-tiny':
124 self.skipTest('core-image-full-cmdline not buildable for poky-tiny') 122 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
125 123
126 features = 'INHERIT += "testimage"\n' 124 features = 'IMAGE_CLASSES += "testimage"\n'
127 features += 'IMAGE_INSTALL_append = " libssl"\n' 125 features += 'IMAGE_INSTALL:append = " libssl"\n'
128 features += 'TEST_SUITES = "ping ssh selftest"\n' 126 features += 'TEST_SUITES = "ping ssh selftest"\n'
129 self.write_config(features) 127 self.write_config(features)
130 128
131 # Build core-image-sato and testimage
132 bitbake('core-image-full-cmdline socat') 129 bitbake('core-image-full-cmdline socat')
133 bitbake('-c testimage core-image-full-cmdline') 130 bitbake('-c testimage core-image-full-cmdline')
134 131
132 def test_testimage_slirp(self):
133 """
134 Summary: Check basic testimage functionality with qemu and slirp networking.
135 """
136
137 features = '''
138IMAGE_CLASSES:append = " testimage"
139IMAGE_FEATURES:append = " ssh-server-dropbear"
140IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}"
141TEST_RUNQEMUPARAMS += " slirp"
142'''
143 self.write_config(features)
144
145 bitbake('core-image-minimal')
146 bitbake('-c testimage core-image-minimal')
147
135 def test_testimage_dnf(self): 148 def test_testimage_dnf(self):
136 """ 149 """
137 Summary: Check package feeds functionality for dnf 150 Summary: Check package feeds functionality for dnf
@@ -142,7 +155,7 @@ class TestImage(OESelftestTestCase):
142 if get_bb_var('DISTRO') == 'poky-tiny': 155 if get_bb_var('DISTRO') == 'poky-tiny':
143 self.skipTest('core-image-full-cmdline not buildable for poky-tiny') 156 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
144 157
145 features = 'INHERIT += "testimage"\n' 158 features = 'IMAGE_CLASSES += "testimage"\n'
146 features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n' 159 features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
147 # We don't yet know what the server ip and port will be - they will be patched 160 # We don't yet know what the server ip and port will be - they will be patched
148 # in at the start of the on-image test 161 # in at the start of the on-image test
@@ -161,13 +174,50 @@ class TestImage(OESelftestTestCase):
161 features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n' 174 features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
162 features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase') 175 features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
163 features += 'GPG_PATH = "%s"\n' % self.gpg_home 176 features += 'GPG_PATH = "%s"\n' % self.gpg_home
164 features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home 177 self.write_config(features)
178
179 bitbake('core-image-full-cmdline socat')
180 bitbake('-c testimage core-image-full-cmdline')
181
182 def test_testimage_apt(self):
183 """
184 Summary: Check package feeds functionality for apt
185 Expected: 1. Check that remote package feeds can be accessed
186 Product: oe-core
187 Author: Ferry Toth <fntoth@gmail.com>
188 """
189 if get_bb_var('DISTRO') == 'poky-tiny':
190 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
191
192 features = 'IMAGE_CLASSES += "testimage"\n'
193 features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n'
194 # We don't yet know what the server ip and port will be - they will be patched
195 # in at the start of the on-image test
196 features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n'
197 features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
198 features += 'PACKAGE_CLASSES = "package_deb"\n'
199 # We need gnupg on the target to install keys
200 features += 'IMAGE_INSTALL:append:pn-core-image-full-cmdline = " gnupg"\n'
201
202 bitbake('gnupg-native -c addto_recipe_sysroot')
203
204 # Enable package feed signing
205 self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
206 self.track_for_cleanup(self.gpg_home)
207 signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
208 runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True)
209 features += 'INHERIT += "sign_package_feed"\n'
210 features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
211 features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
212 features += 'GPG_PATH = "%s"\n' % self.gpg_home
165 self.write_config(features) 213 self.write_config(features)
166 214
167 # Build core-image-sato and testimage 215 # Build core-image-sato and testimage
168 bitbake('core-image-full-cmdline socat') 216 bitbake('core-image-full-cmdline socat')
169 bitbake('-c testimage core-image-full-cmdline') 217 bitbake('-c testimage core-image-full-cmdline')
170 218
219 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966
220 @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
171 def test_testimage_virgl_gtk_sdl(self): 221 def test_testimage_virgl_gtk_sdl(self):
172 """ 222 """
173 Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends 223 Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
@@ -190,25 +240,26 @@ class TestImage(OESelftestTestCase):
190 240
191 qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') 241 qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
192 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') 242 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
193 features = 'INHERIT += "testimage"\n' 243 features = 'IMAGE_CLASSES += "testimage"\n'
194 if 'gtk+' not in qemu_packageconfig: 244 if 'gtk+' not in qemu_packageconfig:
195 features += 'PACKAGECONFIG_append_pn-qemu-system-native = " gtk+"\n' 245 features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n'
196 if 'sdl' not in qemu_packageconfig: 246 if 'sdl' not in qemu_packageconfig:
197 features += 'PACKAGECONFIG_append_pn-qemu-system-native = " sdl"\n' 247 features += 'PACKAGECONFIG:append:pn-qemu-system-native = " sdl"\n'
198 if 'opengl' not in qemu_distrofeatures: 248 if 'opengl' not in qemu_distrofeatures:
199 features += 'DISTRO_FEATURES_append = " opengl"\n' 249 features += 'DISTRO_FEATURES:append = " opengl"\n'
200 features += 'TEST_SUITES = "ping ssh virgl"\n' 250 features += 'TEST_SUITES = "ping ssh virgl"\n'
201 features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' 251 features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
202 features += 'IMAGE_INSTALL_append = " kmscube"\n' 252 features += 'IMAGE_INSTALL:append = " kmscube"\n'
203 features_gtk = features + 'TEST_RUNQEMUPARAMS = "gtk gl"\n' 253 features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n'
204 self.write_config(features_gtk) 254 self.write_config(features_gtk)
205 bitbake('core-image-minimal') 255 bitbake('core-image-minimal')
206 bitbake('-c testimage core-image-minimal') 256 bitbake('-c testimage core-image-minimal')
207 features_sdl = features + 'TEST_RUNQEMUPARAMS = "sdl gl"\n' 257 features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n'
208 self.write_config(features_sdl) 258 self.write_config(features_sdl)
209 bitbake('core-image-minimal') 259 bitbake('core-image-minimal')
210 bitbake('-c testimage core-image-minimal') 260 bitbake('-c testimage core-image-minimal')
211 261
262 @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
212 def test_testimage_virgl_headless(self): 263 def test_testimage_virgl_headless(self):
213 """ 264 """
214 Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend 265 Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
@@ -218,28 +269,27 @@ class TestImage(OESelftestTestCase):
218 Author: Alexander Kanavin <alex.kanavin@gmail.com> 269 Author: Alexander Kanavin <alex.kanavin@gmail.com>
219 """ 270 """
220 import subprocess, os 271 import subprocess, os
221 try: 272
222 content = os.listdir("/dev/dri") 273 distro = oe.lsb.distro_identifier()
223 if len([i for i in content if i.startswith('render')]) == 0: 274 # Merge request to address the issue on centos/rhel/derivatives:
224 self.skipTest("No render nodes found in /dev/dri: %s" %(content)) 275 # https://gitlab.com/cki-project/kernel-ark/-/merge_requests/3449
225 except FileNotFoundError: 276 if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or
226 self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.") 277 distro.startswith('almalinux') or distro.startswith('rocky')):
227 try: 278 self.skipTest('virgl headless cannot be tested with %s' %(distro))
228 dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True) 279
229 except subprocess.CalledProcessError as e:
230 self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
231 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') 280 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
232 features = 'INHERIT += "testimage"\n' 281 features = 'IMAGE_CLASSES += "testimage"\n'
233 if 'opengl' not in qemu_distrofeatures: 282 if 'opengl' not in qemu_distrofeatures:
234 features += 'DISTRO_FEATURES_append = " opengl"\n' 283 features += 'DISTRO_FEATURES:append = " opengl"\n'
235 features += 'TEST_SUITES = "ping ssh virgl"\n' 284 features += 'TEST_SUITES = "ping ssh virgl"\n'
236 features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' 285 features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
237 features += 'IMAGE_INSTALL_append = " kmscube"\n' 286 features += 'IMAGE_INSTALL:append = " kmscube"\n'
238 features += 'TEST_RUNQEMUPARAMS = "egl-headless"\n' 287 features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n'
239 self.write_config(features) 288 self.write_config(features)
240 bitbake('core-image-minimal') 289 bitbake('core-image-minimal')
241 bitbake('-c testimage core-image-minimal') 290 bitbake('-c testimage core-image-minimal')
242 291
292@OETestTag("runqemu")
243class Postinst(OESelftestTestCase): 293class Postinst(OESelftestTestCase):
244 294
245 def init_manager_loop(self, init_manager): 295 def init_manager_loop(self, init_manager):
@@ -260,10 +310,7 @@ class Postinst(OESelftestTestCase):
260 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' 310 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
261 features += 'PACKAGE_CLASSES = "%s"\n' % classes 311 features += 'PACKAGE_CLASSES = "%s"\n' % classes
262 if init_manager == "systemd": 312 if init_manager == "systemd":
263 features += 'DISTRO_FEATURES_append = " systemd"\n' 313 features += 'INIT_MANAGER = "systemd"\n'
264 features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
265 features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
266 features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
267 self.write_config(features) 314 self.write_config(features)
268 315
269 bitbake('core-image-minimal') 316 bitbake('core-image-minimal')
@@ -280,7 +327,7 @@ class Postinst(OESelftestTestCase):
280 327
281 328
282 329
283 @skipIfNotQemu('qemuall', 'Test only runs in qemu') 330 @skipIfNotQemu()
284 def test_postinst_rootfs_and_boot_sysvinit(self): 331 def test_postinst_rootfs_and_boot_sysvinit(self):
285 """ 332 """
286 Summary: The purpose of this test case is to verify Post-installation 333 Summary: The purpose of this test case is to verify Post-installation
@@ -301,7 +348,7 @@ class Postinst(OESelftestTestCase):
301 self.init_manager_loop("sysvinit") 348 self.init_manager_loop("sysvinit")
302 349
303 350
304 @skipIfNotQemu('qemuall', 'Test only runs in qemu') 351 @skipIfNotQemu()
305 def test_postinst_rootfs_and_boot_systemd(self): 352 def test_postinst_rootfs_and_boot_systemd(self):
306 """ 353 """
307 Summary: The purpose of this test case is to verify Post-installation 354 Summary: The purpose of this test case is to verify Post-installation
@@ -357,6 +404,7 @@ class Postinst(OESelftestTestCase):
357 self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")), 404 self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")),
358 "rootfs-after-failure file was created") 405 "rootfs-after-failure file was created")
359 406
407@OETestTag("runqemu")
360class SystemTap(OESelftestTestCase): 408class SystemTap(OESelftestTestCase):
361 """ 409 """
362 Summary: The purpose of this test case is to verify native crosstap 410 Summary: The purpose of this test case is to verify native crosstap
@@ -377,14 +425,14 @@ TEST_SERVER_IP = "192.168.7.1"
377TEST_TARGET_IP = "192.168.7.2" 425TEST_TARGET_IP = "192.168.7.2"
378 426
379EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs" 427EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs"
380IMAGE_FEATURES_append = " ssh-server-dropbear" 428IMAGE_FEATURES:append = " ssh-server-dropbear"
381 429
382# enables kernel debug symbols 430# enables kernel debug symbols
383KERNEL_EXTRA_FEATURES_append = " features/debug/debug-kernel.scc" 431KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
384KERNEL_EXTRA_FEATURES_append = " features/systemtap/systemtap.scc" 432KERNEL_EXTRA_FEATURES:append = " features/systemtap/systemtap.scc"
385 433
386# add systemtap run-time into target image if it is not there yet 434# add systemtap run-time into target image if it is not there yet
387IMAGE_INSTALL_append = " systemtap-runtime" 435IMAGE_INSTALL:append = " systemtap-runtime"
388""" 436"""
389 437
390 def test_crosstap_helloworld(self): 438 def test_crosstap_helloworld(self):
@@ -433,4 +481,3 @@ IMAGE_INSTALL_append = " systemtap-runtime"
433 cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples 481 cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples
434 result = runCmd(cmd) 482 result = runCmd(cmd)
435 self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output) 483 self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output)
436
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
new file mode 100644
index 0000000000..d99a58d6b9
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -0,0 +1,135 @@
1# SPDX-License-Identifier: MIT
2import subprocess
3import time
4from oeqa.core.decorator import OETestTag
5from oeqa.core.decorator.data import skipIfArch
6from oeqa.core.case import OEPTestResultTestCase
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
9from oeqa.utils.sshcontrol import SSHControl
10
11def parse_results(filename):
12 tests = {}
13 with open(filename, "r") as f:
14 lines = f.readlines()
15 for line in lines:
16 if "..." in line and "test [" in line:
17 test = line.split("test ")[1].split(" ... ")[0]
18 if "] " in test:
19 test = test.split("] ", 1)[1]
20 result = line.split(" ... ")[1].strip()
21 if result == "ok":
22 result = "PASS"
23 elif result == "failed":
24 result = "FAIL"
25 elif "ignored" in result:
26 result = "SKIPPED"
27 if test in tests:
28 if tests[test] != result:
29 print("Duplicate and mismatching result %s for %s" % (result, test))
30 else:
31 print("Duplicate result %s for %s" % (result, test))
32 else:
33 tests[test] = result
34 return tests
35
36# Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs.
37@OETestTag("toolchain-system")
38@OETestTag("toolchain-user")
39@OETestTag("runqemu")
40class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
41
42 @skipIfArch(['mips', 'mips64'])
43 def test_rust(self, *args, **kwargs):
44 # build remote-test-server before image build
45 recipe = "rust"
46 start_time = time.time()
47 bitbake("{} -c test_compile".format(recipe))
48 builddir = get_bb_var("RUSTSRC", "rust")
49 # build core-image-minimal with required packages
50 default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
51 features = []
52 features.append('IMAGE_FEATURES += "ssh-server-dropbear"')
53 features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
54 self.write_config("\n".join(features))
55 bitbake("core-image-minimal")
56
57 # Exclude the test folders that error out while building
58 # TODO: Fix the errors and include them for testing
59 # no-fail-fast: Run all tests regardless of failure.
60 # bless: First runs rustfmt to format the codebase,
61 # then runs tidy checks.
62 exclude_list = [
63 'src/bootstrap',
64 'src/doc/rustc',
65 'src/doc/rustdoc',
66 'src/doc/unstable-book',
67 'src/etc/test-float-parse',
68 'src/librustdoc',
69 'src/rustdoc-json-types',
70 'src/tools/jsondoclint',
71 'src/tools/lint-docs',
72 'src/tools/replace-version-placeholder',
73 'src/tools/rust-analyzer',
74 'src/tools/rustdoc-themes',
75 'src/tools/rust-installer',
76 'src/tools/suggest-tests',
77 'tests/assembly/asm/aarch64-outline-atomics.rs',
78 'tests/codegen/issues/issue-122805.rs',
79 'tests/codegen/thread-local.rs',
80 'tests/mir-opt/',
81 'tests/run-make',
82 'tests/run-make-fulldeps',
83 'tests/rustdoc',
84 'tests/rustdoc-json',
85 'tests/rustdoc-js-std',
86 'tests/ui/abi/stack-probes-lto.rs',
87 'tests/ui/abi/stack-probes.rs',
88 'tests/ui/codegen/mismatched-data-layouts.rs',
89 'tests/codegen/rust-abi-arch-specific-adjustment.rs',
90 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
91 'tests/ui/feature-gates/version_check.rs',
92 'tests/ui-fulldeps/',
93 'tests/ui/process/nofile-limit.rs',
94 'tidyselftest'
95 ]
96
97 exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
98 # Add exclude_fail_tests with other test arguments
99 testargs = exclude_fail_tests + " --no-fail-fast --bless"
100
101 # wrap the execution with a qemu instance.
102 # Tests are run with 512 tasks in parallel to execute all tests very quickly
103 with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu:
104 # Copy remote-test-server to image through scp
105 host_sys = get_bb_var("RUST_BUILD_SYS", "rust")
106 ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root")
107 ssh.copy_to(builddir + "/build/" + host_sys + "/stage2-tools-bin/remote-test-server","~/")
108 # Execute remote-test-server on image through background ssh
109 command = '~/remote-test-server --bind 0.0.0.0:12345 -v'
110 sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
111 # Get the values of variables.
112 tcpath = get_bb_var("TARGET_SYS", "rust")
113 targetsys = get_bb_var("RUST_TARGET_SYS", "rust")
114 rustlibpath = get_bb_var("WORKDIR", "rust")
115 tmpdir = get_bb_var("TMPDIR", "rust")
116
117 # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
118 cmd = "export TARGET_VENDOR=\"-poky\";"
119 cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/python3-native:%s/recipe-sysroot-native/usr/bin:%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, rustlibpath, rustlibpath, tcpath, tmpdir)
120 cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
121 # Trigger testing.
122 cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
123 cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys)
124 retval = runCmd(cmd)
125 end_time = time.time()
126
127 resultlog = rustlibpath + "/results-log.txt"
128 with open(resultlog, "w") as f:
129 f.write(retval.output)
130
131 ptestsuite = "rust"
132 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog)
133 test_results = parse_results(resultlog)
134 for test in test_results:
135 self.ptest_result(ptestsuite, test, test_results[test])
diff --git a/meta/lib/oeqa/selftest/cases/sdk.py b/meta/lib/oeqa/selftest/cases/sdk.py
new file mode 100644
index 0000000000..3971365029
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/sdk.py
@@ -0,0 +1,39 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os.path
8
9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_vars
11
12class SDKTests(OESelftestTestCase):
13
14 def load_manifest(self, filename):
15 manifest = {}
16 with open(filename) as f:
17 for line in f:
18 name, arch, version = line.split(maxsplit=3)
19 manifest[name] = (version, arch)
20 return manifest
21
22 def test_sdk_manifests(self):
23 image = "core-image-minimal"
24
25 self.write_config("""
26TOOLCHAIN_HOST_TASK:append = " nativesdk-selftest-hello"
27IMAGE_INSTALL:append = " selftest-hello"
28""")
29
30 bitbake(f"{image} -c populate_sdk")
31 vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAIN_OUTPUTNAME'], image)
32
33 path = os.path.join(vars["SDK_DEPLOY"], vars["TOOLCHAIN_OUTPUTNAME"] + ".host.manifest")
34 self.assertNotEqual(os.path.getsize(path), 0, msg="Host manifest is empty")
35 self.assertIn("nativesdk-selftest-hello", self.load_manifest(path))
36
37 path = os.path.join(vars["SDK_DEPLOY"], vars["TOOLCHAIN_OUTPUTNAME"] + ".target.manifest")
38 self.assertNotEqual(os.path.getsize(path), 0, msg="Target manifest is empty")
39 self.assertIn("selftest-hello", self.load_manifest(path))
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py
index af080dcf03..a80a8651a5 100644
--- a/meta/lib/oeqa/selftest/cases/selftest.py
+++ b/meta/lib/oeqa/selftest/cases/selftest.py
@@ -1,9 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import importlib 7import importlib
6from oeqa.utils.commands import runCmd
7import oeqa.selftest 8import oeqa.selftest
8from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
9 10
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py
index a28c7eb19a..4df45ba032 100644
--- a/meta/lib/oeqa/selftest/cases/signing.py
+++ b/meta/lib/oeqa/selftest/cases/signing.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -81,6 +83,8 @@ class Signing(OESelftestTestCase):
81 feature += 'RPM_GPG_PASSPHRASE = "test123"\n' 83 feature += 'RPM_GPG_PASSPHRASE = "test123"\n'
82 feature += 'RPM_GPG_NAME = "testuser"\n' 84 feature += 'RPM_GPG_NAME = "testuser"\n'
83 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir 85 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
86 feature += 'PACKAGECONFIG:append:pn-rpm-native = " sequoia"\n'
87 feature += 'PACKAGECONFIG:append:pn-rpm = " sequoia"\n'
84 88
85 self.write_config(feature) 89 self.write_config(feature)
86 90
@@ -145,7 +149,7 @@ class Signing(OESelftestTestCase):
145 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir 149 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
146 feature += 'SSTATE_DIR = "%s"\n' % sstatedir 150 feature += 'SSTATE_DIR = "%s"\n' % sstatedir
147 # Any mirror might have partial sstate without .sig files, triggering failures 151 # Any mirror might have partial sstate without .sig files, triggering failures
148 feature += 'SSTATE_MIRRORS_forcevariable = ""\n' 152 feature += 'SSTATE_MIRRORS:forcevariable = ""\n'
149 153
150 self.write_config(feature) 154 self.write_config(feature)
151 155
@@ -159,13 +163,13 @@ class Signing(OESelftestTestCase):
159 bitbake('-c clean %s' % test_recipe) 163 bitbake('-c clean %s' % test_recipe)
160 bitbake('-c populate_lic %s' % test_recipe) 164 bitbake('-c populate_lic %s' % test_recipe)
161 165
162 recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz.sig') 166 recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst.sig')
163 recipe_tgz = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz') 167 recipe_archive = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst')
164 168
165 self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.') 169 self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
166 self.assertEqual(len(recipe_tgz), 1, 'Failed to find .tgz file.') 170 self.assertEqual(len(recipe_archive), 1, 'Failed to find .tar.zst file.')
167 171
168 ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_tgz[0])) 172 ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_archive[0]))
169 # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30 173 # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
170 # gpg: Good signature from "testuser (nocomment) <testuser@email.com>" 174 # gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
171 self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.') 175 self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
@@ -189,7 +193,7 @@ class LockedSignatures(OESelftestTestCase):
189 193
190 bitbake(test_recipe) 194 bitbake(test_recipe)
191 # Generate locked sigs include file 195 # Generate locked sigs include file
192 bitbake('-S none %s' % test_recipe) 196 bitbake('-S lockedsigs %s' % test_recipe)
193 197
194 feature = 'require %s\n' % locked_sigs_file 198 feature = 'require %s\n' % locked_sigs_file
195 feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n' 199 feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
@@ -206,7 +210,7 @@ class LockedSignatures(OESelftestTestCase):
206 # Use uuid so hash equivalance server isn't triggered 210 # Use uuid so hash equivalance server isn't triggered
207 recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend' 211 recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend'
208 recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file) 212 recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file)
209 feature = 'SUMMARY_${PN} = "test locked signature%s"\n' % uuid.uuid4() 213 feature = 'SUMMARY:${PN} = "test locked signature%s"\n' % uuid.uuid4()
210 214
211 os.mkdir(os.path.join(templayerdir, 'recipes-test')) 215 os.mkdir(os.path.join(templayerdir, 'recipes-test'))
212 os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe)) 216 os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe))
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
new file mode 100644
index 0000000000..8cd4e83ca2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -0,0 +1,288 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
9import textwrap
10import hashlib
11from pathlib import Path
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
14import oe.spdx30
15
16
17class SPDX22Check(OESelftestTestCase):
18 @classmethod
19 def setUpClass(cls):
20 super().setUpClass()
21 bitbake("python3-spdx-tools-native")
22 bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
23
24 def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
25 config = textwrap.dedent(
26 """\
27 INHERIT:remove = "create-spdx"
28 INHERIT += "create-spdx-2.2"
29 """
30 )
31 self.write_config(config)
32
33 deploy_dir = get_bb_var("DEPLOY_DIR")
34 arch_dir = get_bb_var("PACKAGE_ARCH", target_name)
35 spdx_version = get_bb_var("SPDX_VERSION")
36 # qemux86-64 creates the directory qemux86_64
37 #arch_dir = arch_var.replace("-", "_")
38
39 full_file_path = os.path.join(
40 deploy_dir, "spdx", spdx_version, arch_dir, high_level_dir, spdx_file
41 )
42
43 try:
44 os.remove(full_file_path)
45 except FileNotFoundError:
46 pass
47
48 bitbake("%s -c create_spdx" % target_name)
49
50 def check_spdx_json(filename):
51 with open(filename) as f:
52 report = json.load(f)
53 self.assertNotEqual(report, None)
54 self.assertNotEqual(report["SPDXID"], None)
55
56 python = os.path.join(
57 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"),
58 "nativepython3",
59 )
60 validator = os.path.join(
61 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"), "pyspdxtools"
62 )
63 result = runCmd("{} {} -i {}".format(python, validator, filename))
64
65 self.assertExists(full_file_path)
66 result = check_spdx_json(full_file_path)
67
68 def test_spdx_base_files(self):
69 self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
70
71 def test_spdx_tar(self):
72 self.check_recipe_spdx("packages", "tar.spdx.json", "tar")
73
74
75class SPDX3CheckBase(object):
76 """
77 Base class for checking SPDX 3 based tests
78 """
79
80 def check_spdx_file(self, filename):
81 self.assertExists(filename)
82
83 # Read the file
84 objset = oe.spdx30.SHACLObjectSet()
85 with open(filename, "r") as f:
86 d = oe.spdx30.JSONLDDeserializer()
87 d.read(f, objset)
88
89 return objset
90
91 def check_recipe_spdx(self, target_name, spdx_path, *, task=None, extraconf=""):
92 config = (
93 textwrap.dedent(
94 f"""\
95 INHERIT:remove = "create-spdx"
96 INHERIT += "{self.SPDX_CLASS}"
97 """
98 )
99 + textwrap.dedent(extraconf)
100 )
101
102 self.write_config(config)
103
104 if task:
105 bitbake(f"-c {task} {target_name}")
106 else:
107 bitbake(target_name)
108
109 filename = spdx_path.format(
110 **get_bb_vars(
111 [
112 "DEPLOY_DIR_IMAGE",
113 "DEPLOY_DIR_SPDX",
114 "MACHINE",
115 "MACHINE_ARCH",
116 "SDKMACHINE",
117 "SDK_DEPLOY",
118 "SPDX_VERSION",
119 "SSTATE_PKGARCH",
120 "TOOLCHAIN_OUTPUTNAME",
121 ],
122 target_name,
123 )
124 )
125
126 return self.check_spdx_file(filename)
127
128 def check_objset_missing_ids(self, objset):
129 for o in objset.foreach_type(oe.spdx30.SpdxDocument):
130 doc = o
131 break
132 else:
133 self.assertTrue(False, "Unable to find SpdxDocument")
134
135 missing_ids = objset.missing_ids - set(i.externalSpdxId for i in doc.import_)
136 if missing_ids:
137 self.assertTrue(
138 False,
139 "The following SPDXIDs are unresolved:\n " + "\n ".join(missing_ids),
140 )
141
142
143class SPDX30Check(SPDX3CheckBase, OESelftestTestCase):
144 SPDX_CLASS = "create-spdx-3.0"
145
146 def test_base_files(self):
147 self.check_recipe_spdx(
148 "base-files",
149 "{DEPLOY_DIR_SPDX}/{MACHINE_ARCH}/packages/package-base-files.spdx.json",
150 )
151
152 def test_gcc_include_source(self):
153 objset = self.check_recipe_spdx(
154 "gcc",
155 "{DEPLOY_DIR_SPDX}/{SSTATE_PKGARCH}/recipes/recipe-gcc.spdx.json",
156 extraconf="""\
157 SPDX_INCLUDE_SOURCES = "1"
158 """,
159 )
160
161 gcc_pv = get_bb_var("PV", "gcc")
162 filename = f"gcc-{gcc_pv}/README"
163 found = False
164 for software_file in objset.foreach_type(oe.spdx30.software_File):
165 if software_file.name == filename:
166 found = True
167 self.logger.info(
168 f"The spdxId of {filename} in recipe-gcc.spdx.json is {software_file.spdxId}"
169 )
170 break
171
172 self.assertTrue(
173 found, f"Not found source file {filename} in recipe-gcc.spdx.json\n"
174 )
175
176 def test_core_image_minimal(self):
177 objset = self.check_recipe_spdx(
178 "core-image-minimal",
179 "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json",
180 )
181
182 # Document should be fully linked
183 self.check_objset_missing_ids(objset)
184
185 def test_core_image_minimal_sdk(self):
186 objset = self.check_recipe_spdx(
187 "core-image-minimal",
188 "{SDK_DEPLOY}/{TOOLCHAIN_OUTPUTNAME}.spdx.json",
189 task="populate_sdk",
190 )
191
192 # Document should be fully linked
193 self.check_objset_missing_ids(objset)
194
195 def test_baremetal_helloworld(self):
196 objset = self.check_recipe_spdx(
197 "baremetal-helloworld",
198 "{DEPLOY_DIR_IMAGE}/baremetal-helloworld-image-{MACHINE}.spdx.json",
199 extraconf="""\
200 TCLIBC = "baremetal"
201 """,
202 )
203
204 # Document should be fully linked
205 self.check_objset_missing_ids(objset)
206
207 def test_extra_opts(self):
208 HOST_SPDXID = "http://foo.bar/spdx/bar2"
209
210 EXTRACONF = textwrap.dedent(
211 f"""\
212 SPDX_INVOKED_BY_name = "CI Tool"
213 SPDX_INVOKED_BY_type = "software"
214
215 SPDX_ON_BEHALF_OF_name = "John Doe"
216 SPDX_ON_BEHALF_OF_type = "person"
217 SPDX_ON_BEHALF_OF_id_email = "John.Doe@noreply.com"
218
219 SPDX_PACKAGE_SUPPLIER_name = "ACME Embedded Widgets"
220 SPDX_PACKAGE_SUPPLIER_type = "organization"
221
222 SPDX_AUTHORS += "authorA"
223 SPDX_AUTHORS_authorA_ref = "SPDX_ON_BEHALF_OF"
224
225 SPDX_BUILD_HOST = "host"
226
227 SPDX_IMPORTS += "host"
228 SPDX_IMPORTS_host_spdxid = "{HOST_SPDXID}"
229
230 SPDX_INCLUDE_BUILD_VARIABLES = "1"
231 SPDX_INCLUDE_BITBAKE_PARENT_BUILD = "1"
232 SPDX_INCLUDE_TIMESTAMPS = "1"
233
234 SPDX_PRETTY = "1"
235 """
236 )
237 extraconf_hash = hashlib.sha1(EXTRACONF.encode("utf-8")).hexdigest()
238
239 objset = self.check_recipe_spdx(
240 "core-image-minimal",
241 "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json",
242 # Many SPDX variables do not trigger a rebuild, since they are
243 # intended to record information at the time of the build. As such,
244 # the extra configuration alone may not trigger a rebuild, and even
245 # if it does, the task hash won't necessarily be unique. In order
246 # to make sure rebuilds happen, but still allow these test objects
247 # to be pulled from sstate (e.g. remain reproducible), change the
248 # namespace prefix to include the hash of the extra configuration
249 extraconf=textwrap.dedent(
250 f"""\
251 SPDX_NAMESPACE_PREFIX = "http://spdx.org/spdxdocs/{extraconf_hash}"
252 """
253 )
254 + EXTRACONF,
255 )
256
257 # Document should be fully linked
258 self.check_objset_missing_ids(objset)
259
260 for o in objset.foreach_type(oe.spdx30.SoftwareAgent):
261 if o.name == "CI Tool":
262 break
263 else:
264 self.assertTrue(False, "Unable to find software tool")
265
266 for o in objset.foreach_type(oe.spdx30.Person):
267 if o.name == "John Doe":
268 break
269 else:
270 self.assertTrue(False, "Unable to find person")
271
272 for o in objset.foreach_type(oe.spdx30.Organization):
273 if o.name == "ACME Embedded Widgets":
274 break
275 else:
276 self.assertTrue(False, "Unable to find organization")
277
278 for o in objset.foreach_type(oe.spdx30.SpdxDocument):
279 doc = o
280 break
281 else:
282 self.assertTrue(False, "Unable to find SpdxDocument")
283
284 for i in doc.import_:
285 if i.externalSpdxId == HOST_SPDXID:
286 break
287 else:
288 self.assertTrue(False, "Unable to find imported Host SpdxID")
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py
deleted file mode 100644
index 80ce9e353c..0000000000
--- a/meta/lib/oeqa/selftest/cases/sstate.py
+++ /dev/null
@@ -1,67 +0,0 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import datetime
6import unittest
7import os
8import re
9import shutil
10
11import oeqa.utils.ftools as ftools
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_test_layer
14
15
16class SStateBase(OESelftestTestCase):
17
18 def setUpLocal(self):
19 super(SStateBase, self).setUpLocal()
20 self.temp_sstate_location = None
21 needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
22 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
23 bb_vars = get_bb_vars(needed_vars)
24 self.sstate_path = bb_vars['SSTATE_DIR']
25 self.hostdistro = bb_vars['NATIVELSBSTRING']
26 self.tclibc = bb_vars['TCLIBC']
27 self.tune_arch = bb_vars['TUNE_ARCH']
28 self.topdir = bb_vars['TOPDIR']
29 self.target_vendor = bb_vars['TARGET_VENDOR']
30 self.target_os = bb_vars['TARGET_OS']
31 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
32
33 # Creates a special sstate configuration with the option to add sstate mirrors
34 def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
35 self.temp_sstate_location = temp_sstate_location
36
37 if self.temp_sstate_location:
38 temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
39 config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
40 self.append_config(config_temp_sstate)
41 self.track_for_cleanup(temp_sstate_path)
42 bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
43 self.sstate_path = bb_vars['SSTATE_DIR']
44 self.hostdistro = bb_vars['NATIVELSBSTRING']
45 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
46
47 if add_local_mirrors:
48 config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
49 self.append_config(config_set_sstate_if_not_set)
50 for local_mirror in add_local_mirrors:
51 self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
52 config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
53 self.append_config(config_sstate_mirror)
54
55 # Returns a list containing sstate files
56 def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
57 result = []
58 for root, dirs, files in os.walk(self.sstate_path):
59 if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
60 for f in files:
61 if re.search(filename_regex, f):
62 result.append(f)
63 if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
64 for f in files:
65 if re.search(filename_regex, f):
66 result.append(f)
67 return result
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index c46e8ba489..08f94b168a 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,17 +9,205 @@ import shutil
7import glob 9import glob
8import subprocess 10import subprocess
9import tempfile 11import tempfile
12import datetime
13import re
10 14
15from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, get_bb_vars
11from oeqa.selftest.case import OESelftestTestCase 16from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer, create_temp_layer 17from oeqa.core.decorator import OETestTag
13from oeqa.selftest.cases.sstate import SStateBase
14 18
19import oe
15import bb.siggen 20import bb.siggen
16 21
22# Set to True to preserve stamp files after test execution for debugging failures
23keep_temp_files = False
24
25class SStateBase(OESelftestTestCase):
26
27 def setUpLocal(self):
28 super(SStateBase, self).setUpLocal()
29 self.temp_sstate_location = None
30 needed_vars = ['SSTATE_DIR', 'TCLIBC', 'TUNE_ARCH',
31 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
32 bb_vars = get_bb_vars(needed_vars)
33 self.sstate_path = bb_vars['SSTATE_DIR']
34 self.tclibc = bb_vars['TCLIBC']
35 self.tune_arch = bb_vars['TUNE_ARCH']
36 self.topdir = bb_vars['TOPDIR']
37 self.target_vendor = bb_vars['TARGET_VENDOR']
38 self.target_os = bb_vars['TARGET_OS']
39
40 def track_for_cleanup(self, path):
41 if not keep_temp_files:
42 super().track_for_cleanup(path)
43
44 # Creates a special sstate configuration with the option to add sstate mirrors
45 def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
46 self.temp_sstate_location = temp_sstate_location
47
48 if self.temp_sstate_location:
49 temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
50 config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
51 self.append_config(config_temp_sstate)
52 self.track_for_cleanup(temp_sstate_path)
53 self.sstate_path = get_bb_var('SSTATE_DIR')
54
55 if add_local_mirrors:
56 config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
57 self.append_config(config_set_sstate_if_not_set)
58 for local_mirror in add_local_mirrors:
59 self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
60 config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
61 self.append_config(config_sstate_mirror)
62
63 def set_hostdistro(self):
64 # This needs to be read after a BuildStarted event in case it gets changed by event
65 # handling in uninative.bbclass
66 self.hostdistro = get_bb_var('NATIVELSBSTRING')
67 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
68
69 # Returns a list containing sstate files
70 def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
71 self.set_hostdistro()
72
73 result = []
74 for root, dirs, files in os.walk(self.sstate_path):
75 if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
76 for f in files:
77 if re.search(filename_regex, f):
78 result.append(f)
79 if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
80 for f in files:
81 if re.search(filename_regex, f):
82 result.append(f)
83 return result
84
85 # Test sstate files creation and their location and directory perms
86 def run_test_sstate_creation(self, targets, hostdistro_specific):
87 self.config_sstate(True, [self.sstate_path])
88
89 bitbake(['-cclean'] + targets)
90
91 # Set it to a umask we know will be 'wrong'
92 with bb.utils.umask(0o022):
93 bitbake(targets)
94
95 # Distro specific files
96 distro_specific_files = self.search_sstate('|'.join(map(str, targets)), True, False)
97
98 # Distro non-specific
99 distro_non_specific_files = []
100 results = self.search_sstate('|'.join(map(str, targets)), False, True)
101 for r in results:
102 if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")):
103 continue
104 distro_non_specific_files.append(r)
105
106 if hostdistro_specific:
107 self.assertTrue(distro_specific_files , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
108 self.assertFalse(distro_non_specific_files, msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(distro_non_specific_files)))
109 else:
110 self.assertTrue(distro_non_specific_files , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
111 self.assertFalse(distro_specific_files, msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(distro_specific_files)))
112
113 # Now we'll walk the tree to check the mode and see if things are incorrect.
114 badperms = []
115 for root, dirs, files in os.walk(self.sstate_path):
116 for directory in dirs:
117 mode = os.stat(os.path.join(root, directory)).st_mode & 0o777
118 if mode != 0o775:
119 badperms.append("%s: %s vs %s" % (os.path.join(root, directory), mode, 0o775))
120
121 # Check badperms is empty
122 self.assertFalse(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms)))
123
124 # Test the sstate files deletion part of the do_cleansstate task
125 def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
126 self.config_sstate(temp_sstate_location, [self.sstate_path])
127
128 bitbake(['-ccleansstate'] + targets)
129
130 bitbake(targets)
131 archives_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
132 self.assertTrue(archives_created, msg="Could not find sstate .tar.zst files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_created)))
133
134 siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
135 self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
136
137 bitbake(['-ccleansstate'] + targets)
138 archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
139 self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed)))
140
141 # Test rebuilding of distro-specific sstate files
142 def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
143 self.config_sstate(temp_sstate_location, [self.sstate_path])
144
145 bitbake(['-ccleansstate'] + targets)
146
147 self.set_hostdistro()
148
149 bitbake(targets)
150 results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True)
151 filtered_results = []
152 for r in results:
153 if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo")):
154 continue
155 filtered_results.append(r)
156 self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
157 file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
158 self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
159
160 self.track_for_cleanup(self.distro_specific_sstate + "_old")
161 shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
162 shutil.rmtree(self.distro_specific_sstate)
163
164 bitbake(['-cclean'] + targets)
165 bitbake(targets)
166 file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
167 self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
168
169 not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
170 self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated)))
171
172 created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
173 self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
174
175 def sstate_common_samesigs(self, configA, configB, allarch=False):
176
177 self.write_config(configA)
178 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
179 bitbake("world meta-toolchain -S none")
180 self.write_config(configB)
181 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
182 bitbake("world meta-toolchain -S none")
183
184 def get_files(d, result):
185 for root, dirs, files in os.walk(d):
186 for name in files:
187 if "meta-environment" in root or "cross-canadian" in root:
188 continue
189 if "do_build" not in name:
190 # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
191 (_, task, _, shash) = name.rsplit(".", 3)
192 result[os.path.join(os.path.basename(root), task)] = shash
193
194 files1 = {}
195 files2 = {}
196 subdirs = sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux"))
197 if allarch:
198 subdirs.extend(sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")))
199
200 for subdir in subdirs:
201 nativesdkdir = os.path.basename(subdir)
202 get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir, files1)
203 get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir, files2)
204
205 self.maxDiff = None
206 self.assertEqual(files1, files2)
207
17class SStateTests(SStateBase): 208class SStateTests(SStateBase):
18 def test_autorev_sstate_works(self): 209 def test_autorev_sstate_works(self):
19 # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV} 210 # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
20 # when PV does not contain SRCPV
21 211
22 tempdir = tempfile.mkdtemp(prefix='sstate_autorev') 212 tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
23 tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir') 213 tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
@@ -39,7 +229,7 @@ class SStateTests(SStateBase):
39 229
40 recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb') 230 recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
41 os.makedirs(os.path.dirname(recipefile)) 231 os.makedirs(os.path.dirname(recipefile))
42 srcuri = 'git://' + srcdir + ';protocol=file' 232 srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
43 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri]) 233 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
44 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) 234 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
45 235
@@ -53,61 +243,14 @@ class SStateTests(SStateBase):
53 result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir) 243 result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
54 bitbake("dbus-wait-test -c unpack") 244 bitbake("dbus-wait-test -c unpack")
55 245
246class SStateCreation(SStateBase):
247 def test_sstate_creation_distro_specific(self):
248 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], hostdistro_specific=True)
56 249
57 # Test sstate files creation and their location 250 def test_sstate_creation_distro_nonspecific(self):
58 def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True): 251 self.run_test_sstate_creation(['linux-libc-headers'], hostdistro_specific=False)
59 self.config_sstate(temp_sstate_location, [self.sstate_path])
60
61 if self.temp_sstate_location:
62 bitbake(['-cclean'] + targets)
63 else:
64 bitbake(['-ccleansstate'] + targets)
65
66 bitbake(targets)
67 file_tracker = []
68 results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
69 if distro_nonspecific:
70 for r in results:
71 if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo", "_fetch.tgz.siginfo", "_unpack.tgz.siginfo", "_patch.tgz.siginfo")):
72 continue
73 file_tracker.append(r)
74 else:
75 file_tracker = results
76
77 if should_pass:
78 self.assertTrue(file_tracker , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
79 else:
80 self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker)))
81
82 def test_sstate_creation_distro_specific_pass(self):
83 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
84
85 def test_sstate_creation_distro_specific_fail(self):
86 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
87
88 def test_sstate_creation_distro_nonspecific_pass(self):
89 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
90
91 def test_sstate_creation_distro_nonspecific_fail(self):
92 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
93
94 # Test the sstate files deletion part of the do_cleansstate task
95 def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
96 self.config_sstate(temp_sstate_location, [self.sstate_path])
97
98 bitbake(['-ccleansstate'] + targets)
99
100 bitbake(targets)
101 tgz_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
102 self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_created)))
103
104 siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
105 self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
106
107 bitbake(['-ccleansstate'] + targets)
108 tgz_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
109 self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_removed)))
110 252
253class SStateCleanup(SStateBase):
111 def test_cleansstate_task_distro_specific_nonspecific(self): 254 def test_cleansstate_task_distro_specific_nonspecific(self):
112 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] 255 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
113 targets.append('linux-libc-headers') 256 targets.append('linux-libc-headers')
@@ -121,39 +264,7 @@ class SStateTests(SStateBase):
121 targets.append('linux-libc-headers') 264 targets.append('linux-libc-headers')
122 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) 265 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
123 266
124 267class SStateDistroTests(SStateBase):
125 # Test rebuilding of distro-specific sstate files
126 def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
127 self.config_sstate(temp_sstate_location, [self.sstate_path])
128
129 bitbake(['-ccleansstate'] + targets)
130
131 bitbake(targets)
132 results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True)
133 filtered_results = []
134 for r in results:
135 if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo")):
136 continue
137 filtered_results.append(r)
138 self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
139 file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
140 self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
141
142 self.track_for_cleanup(self.distro_specific_sstate + "_old")
143 shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
144 shutil.rmtree(self.distro_specific_sstate)
145
146 bitbake(['-cclean'] + targets)
147 bitbake(targets)
148 file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
149 self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
150
151 not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
152 self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated)))
153
154 created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
155 self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once)))
156
157 def test_rebuild_distro_specific_sstate_cross_native_targets(self): 268 def test_rebuild_distro_specific_sstate_cross_native_targets(self):
158 self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) 269 self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
159 270
@@ -163,48 +274,48 @@ class SStateTests(SStateBase):
163 def test_rebuild_distro_specific_sstate_native_target(self): 274 def test_rebuild_distro_specific_sstate_native_target(self):
164 self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True) 275 self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
165 276
166 277class SStateCacheManagement(SStateBase):
167 # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list 278 # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
168 # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE) 279 # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.py (such as changing the value of MACHINE)
169 def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]): 280 def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
170 self.assertTrue(global_config) 281 self.assertTrue(global_config)
171 self.assertTrue(target_config) 282 self.assertTrue(target_config)
172 self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements') 283 self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
173 self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
174 284
175 # If buildhistory is enabled, we need to disable version-going-backwards 285 for idx in range(len(target_config)):
176 # QA checks for this test. It may report errors otherwise. 286 self.append_config(global_config[idx])
177 self.append_config('ERROR_QA_remove = "version-going-backwards"') 287 self.append_recipeinc(target, target_config[idx])
288 bitbake(target)
289 self.remove_config(global_config[idx])
290 self.remove_recipeinc(target, target_config[idx])
178 291
179 # For not this only checks if random sstate tasks are handled correctly as a group. 292 self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
293
294 # For now this only checks if random sstate tasks are handled correctly as a group.
180 # In the future we should add control over what tasks we check for. 295 # In the future we should add control over what tasks we check for.
181 296
182 sstate_archs_list = []
183 expected_remaining_sstate = [] 297 expected_remaining_sstate = []
184 for idx in range(len(target_config)): 298 for idx in range(len(target_config)):
185 self.append_config(global_config[idx]) 299 self.append_config(global_config[idx])
186 self.append_recipeinc(target, target_config[idx]) 300 self.append_recipeinc(target, target_config[idx])
187 sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
188 if not sstate_arch in sstate_archs_list:
189 sstate_archs_list.append(sstate_arch)
190 if target_config[idx] == target_config[-1]: 301 if target_config[idx] == target_config[-1]:
191 target_sstate_before_build = self.search_sstate(target + r'.*?\.tgz$') 302 target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$')
192 bitbake("-cclean %s" % target) 303 bitbake("-cclean %s" % target)
193 result = bitbake(target, ignore_status=True) 304 result = bitbake(target, ignore_status=True)
194 if target_config[idx] == target_config[-1]: 305 if target_config[idx] == target_config[-1]:
195 target_sstate_after_build = self.search_sstate(target + r'.*?\.tgz$') 306 target_sstate_after_build = self.search_sstate(target + r'.*?\.tar.zst$')
196 expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)] 307 expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)]
197 self.remove_config(global_config[idx]) 308 self.remove_config(global_config[idx])
198 self.remove_recipeinc(target, target_config[idx]) 309 self.remove_recipeinc(target, target_config[idx])
199 self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output)) 310 self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
200 311
201 runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list)))) 312 runCmd("sstate-cache-management.py -y --cache-dir=%s --remove-duplicated" % (self.sstate_path))
202 actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)] 313 actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)]
203 314
204 actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate] 315 actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
205 self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected))) 316 self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected)))
206 expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate] 317 expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
207 self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual))) 318 self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual)))
208 319
209 def test_sstate_cache_management_script_using_pr_1(self): 320 def test_sstate_cache_management_script_using_pr_1(self):
210 global_config = [] 321 global_config = []
@@ -242,18 +353,12 @@ class SStateTests(SStateBase):
242 target_config.append('') 353 target_config.append('')
243 self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic']) 354 self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
244 355
245 def test_sstate_32_64_same_hash(self): 356class SStateHashSameSigs(SStateBase):
246 """ 357 def sstate_hashtest(self, sdkmachine):
247 The sstate checksums for both native and target should not vary whether
248 they're built on a 32 or 64 bit system. Rather than requiring two different
249 build machines and running a builds, override the variables calling uname()
250 manually and check using bitbake -S.
251 """
252 358
253 self.write_config(""" 359 self.write_config("""
254MACHINE = "qemux86" 360MACHINE = "qemux86"
255TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 361TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
256TCLIBCAPPEND = ""
257BUILD_ARCH = "x86_64" 362BUILD_ARCH = "x86_64"
258BUILD_OS = "linux" 363BUILD_OS = "linux"
259SDKMACHINE = "x86_64" 364SDKMACHINE = "x86_64"
@@ -261,24 +366,23 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
261BB_SIGNATURE_HANDLER = "OEBasicHash" 366BB_SIGNATURE_HANDLER = "OEBasicHash"
262""") 367""")
263 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 368 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
264 bitbake("core-image-sato -S none") 369 bitbake("core-image-weston -S none")
265 self.write_config(""" 370 self.write_config("""
266MACHINE = "qemux86" 371MACHINE = "qemux86"
267TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 372TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
268TCLIBCAPPEND = ""
269BUILD_ARCH = "i686" 373BUILD_ARCH = "i686"
270BUILD_OS = "linux" 374BUILD_OS = "linux"
271SDKMACHINE = "i686" 375SDKMACHINE = "%s"
272PACKAGE_CLASSES = "package_rpm package_ipk package_deb" 376PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
273BB_SIGNATURE_HANDLER = "OEBasicHash" 377BB_SIGNATURE_HANDLER = "OEBasicHash"
274""") 378""" % sdkmachine)
275 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 379 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
276 bitbake("core-image-sato -S none") 380 bitbake("core-image-weston -S none")
277 381
278 def get_files(d): 382 def get_files(d):
279 f = [] 383 f = []
280 for root, dirs, files in os.walk(d): 384 for root, dirs, files in os.walk(d):
281 if "core-image-sato" in root: 385 if "core-image-weston" in root:
282 # SDKMACHINE changing will change 386 # SDKMACHINE changing will change
283 # do_rootfs/do_testimage/do_build stamps of images which 387 # do_rootfs/do_testimage/do_build stamps of images which
284 # is safe to ignore. 388 # is safe to ignore.
@@ -291,6 +395,20 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
291 self.maxDiff = None 395 self.maxDiff = None
292 self.assertCountEqual(files1, files2) 396 self.assertCountEqual(files1, files2)
293 397
398 def test_sstate_32_64_same_hash(self):
399 """
400 The sstate checksums for both native and target should not vary whether
401 they're built on a 32 or 64 bit system. Rather than requiring two different
402 build machines and running a builds, override the variables calling uname()
403 manually and check using bitbake -S.
404 """
405 self.sstate_hashtest("i686")
406
407 def test_sstate_sdk_arch_same_hash(self):
408 """
409 Similarly, test an arm SDK has the same hashes
410 """
411 self.sstate_hashtest("aarch64")
294 412
295 def test_sstate_nativelsbstring_same_hash(self): 413 def test_sstate_nativelsbstring_same_hash(self):
296 """ 414 """
@@ -301,20 +419,18 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
301 419
302 self.write_config(""" 420 self.write_config("""
303TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 421TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
304TCLIBCAPPEND = \"\"
305NATIVELSBSTRING = \"DistroA\" 422NATIVELSBSTRING = \"DistroA\"
306BB_SIGNATURE_HANDLER = "OEBasicHash" 423BB_SIGNATURE_HANDLER = "OEBasicHash"
307""") 424""")
308 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 425 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
309 bitbake("core-image-sato -S none") 426 bitbake("core-image-weston -S none")
310 self.write_config(""" 427 self.write_config("""
311TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 428TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
312TCLIBCAPPEND = \"\"
313NATIVELSBSTRING = \"DistroB\" 429NATIVELSBSTRING = \"DistroB\"
314BB_SIGNATURE_HANDLER = "OEBasicHash" 430BB_SIGNATURE_HANDLER = "OEBasicHash"
315""") 431""")
316 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 432 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
317 bitbake("core-image-sato -S none") 433 bitbake("core-image-weston -S none")
318 434
319 def get_files(d): 435 def get_files(d):
320 f = [] 436 f = []
@@ -327,6 +443,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
327 self.maxDiff = None 443 self.maxDiff = None
328 self.assertCountEqual(files1, files2) 444 self.assertCountEqual(files1, files2)
329 445
446class SStateHashSameSigs2(SStateBase):
330 def test_sstate_allarch_samesigs(self): 447 def test_sstate_allarch_samesigs(self):
331 """ 448 """
332 The sstate checksums of allarch packages should be independent of whichever 449 The sstate checksums of allarch packages should be independent of whichever
@@ -337,17 +454,19 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
337 454
338 configA = """ 455 configA = """
339TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 456TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
340TCLIBCAPPEND = \"\"
341MACHINE = \"qemux86-64\" 457MACHINE = \"qemux86-64\"
342BB_SIGNATURE_HANDLER = "OEBasicHash" 458BB_SIGNATURE_HANDLER = "OEBasicHash"
343""" 459"""
460 #OLDEST_KERNEL is arch specific so set to a different value here for testing
344 configB = """ 461 configB = """
345TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 462TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
346TCLIBCAPPEND = \"\"
347MACHINE = \"qemuarm\" 463MACHINE = \"qemuarm\"
464OLDEST_KERNEL = \"3.3.0\"
348BB_SIGNATURE_HANDLER = "OEBasicHash" 465BB_SIGNATURE_HANDLER = "OEBasicHash"
466ERROR_QA:append = " somenewoption"
467WARN_QA:append = " someotheroption"
349""" 468"""
350 self.sstate_allarch_samesigs(configA, configB) 469 self.sstate_common_samesigs(configA, configB, allarch=True)
351 470
352 def test_sstate_nativesdk_samesigs_multilib(self): 471 def test_sstate_nativesdk_samesigs_multilib(self):
353 """ 472 """
@@ -356,51 +475,22 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
356 475
357 configA = """ 476 configA = """
358TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 477TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
359TCLIBCAPPEND = \"\"
360MACHINE = \"qemux86-64\" 478MACHINE = \"qemux86-64\"
361require conf/multilib.conf 479require conf/multilib.conf
362MULTILIBS = \"multilib:lib32\" 480MULTILIBS = \"multilib:lib32\"
363DEFAULTTUNE_virtclass-multilib-lib32 = \"x86\" 481DEFAULTTUNE:virtclass-multilib-lib32 = \"x86\"
364BB_SIGNATURE_HANDLER = "OEBasicHash" 482BB_SIGNATURE_HANDLER = "OEBasicHash"
365""" 483"""
366 configB = """ 484 configB = """
367TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 485TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
368TCLIBCAPPEND = \"\"
369MACHINE = \"qemuarm\" 486MACHINE = \"qemuarm\"
370require conf/multilib.conf 487require conf/multilib.conf
371MULTILIBS = \"\" 488MULTILIBS = \"\"
372BB_SIGNATURE_HANDLER = "OEBasicHash" 489BB_SIGNATURE_HANDLER = "OEBasicHash"
373""" 490"""
374 self.sstate_allarch_samesigs(configA, configB) 491 self.sstate_common_samesigs(configA, configB)
375
376 def sstate_allarch_samesigs(self, configA, configB):
377
378 self.write_config(configA)
379 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
380 bitbake("world meta-toolchain -S none")
381 self.write_config(configB)
382 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
383 bitbake("world meta-toolchain -S none")
384
385 def get_files(d):
386 f = {}
387 for root, dirs, files in os.walk(d):
388 for name in files:
389 if "meta-environment" in root or "cross-canadian" in root:
390 continue
391 if "do_build" not in name:
392 # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
393 (_, task, _, shash) = name.rsplit(".", 3)
394 f[os.path.join(os.path.basename(root), task)] = shash
395 return f
396
397 nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0])
398
399 files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir)
400 files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir)
401 self.maxDiff = None
402 self.assertEqual(files1, files2)
403 492
493class SStateHashSameSigs3(SStateBase):
404 def test_sstate_sametune_samesigs(self): 494 def test_sstate_sametune_samesigs(self):
405 """ 495 """
406 The sstate checksums of two identical machines (using the same tune) should be the 496 The sstate checksums of two identical machines (using the same tune) should be the
@@ -410,22 +500,20 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
410 500
411 self.write_config(""" 501 self.write_config("""
412TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 502TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
413TCLIBCAPPEND = \"\"
414MACHINE = \"qemux86\" 503MACHINE = \"qemux86\"
415require conf/multilib.conf 504require conf/multilib.conf
416MULTILIBS = "multilib:lib32" 505MULTILIBS = "multilib:lib32"
417DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 506DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
418BB_SIGNATURE_HANDLER = "OEBasicHash" 507BB_SIGNATURE_HANDLER = "OEBasicHash"
419""") 508""")
420 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 509 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
421 bitbake("world meta-toolchain -S none") 510 bitbake("world meta-toolchain -S none")
422 self.write_config(""" 511 self.write_config("""
423TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 512TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
424TCLIBCAPPEND = \"\"
425MACHINE = \"qemux86copy\" 513MACHINE = \"qemux86copy\"
426require conf/multilib.conf 514require conf/multilib.conf
427MULTILIBS = "multilib:lib32" 515MULTILIBS = "multilib:lib32"
428DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 516DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
429BB_SIGNATURE_HANDLER = "OEBasicHash" 517BB_SIGNATURE_HANDLER = "OEBasicHash"
430""") 518""")
431 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 519 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
@@ -435,7 +523,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
435 f = [] 523 f = []
436 for root, dirs, files in os.walk(d): 524 for root, dirs, files in os.walk(d):
437 for name in files: 525 for name in files:
438 if "meta-environment" in root or "cross-canadian" in root: 526 if "meta-environment" in root or "cross-canadian" in root or 'meta-ide-support' in root:
439 continue 527 continue
440 if "qemux86copy-" in root or "qemux86-" in root: 528 if "qemux86copy-" in root or "qemux86-" in root:
441 continue 529 continue
@@ -458,18 +546,16 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
458 546
459 self.write_config(""" 547 self.write_config("""
460TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 548TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
461TCLIBCAPPEND = \"\"
462MACHINE = \"qemux86\" 549MACHINE = \"qemux86\"
463require conf/multilib.conf 550require conf/multilib.conf
464MULTILIBS = "multilib:lib32" 551MULTILIBS = "multilib:lib32"
465DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 552DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
466BB_SIGNATURE_HANDLER = "OEBasicHash" 553BB_SIGNATURE_HANDLER = "OEBasicHash"
467""") 554""")
468 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 555 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
469 bitbake("binutils-native -S none") 556 bitbake("binutils-native -S none")
470 self.write_config(""" 557 self.write_config("""
471TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 558TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
472TCLIBCAPPEND = \"\"
473MACHINE = \"qemux86copy\" 559MACHINE = \"qemux86copy\"
474BB_SIGNATURE_HANDLER = "OEBasicHash" 560BB_SIGNATURE_HANDLER = "OEBasicHash"
475""") 561""")
@@ -488,7 +574,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
488 self.maxDiff = None 574 self.maxDiff = None
489 self.assertCountEqual(files1, files2) 575 self.assertCountEqual(files1, files2)
490 576
491 577class SStateHashSameSigs4(SStateBase):
492 def test_sstate_noop_samesigs(self): 578 def test_sstate_noop_samesigs(self):
493 """ 579 """
494 The sstate checksums of two builds with these variables changed or 580 The sstate checksums of two builds with these variables changed or
@@ -497,13 +583,12 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
497 583
498 self.write_config(""" 584 self.write_config("""
499TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 585TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
500TCLIBCAPPEND = ""
501BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}" 586BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}"
502PARALLEL_MAKE = "-j 1" 587PARALLEL_MAKE = "-j 1"
503DL_DIR = "${TOPDIR}/download1" 588DL_DIR = "${TOPDIR}/download1"
504TIME = "111111" 589TIME = "111111"
505DATE = "20161111" 590DATE = "20161111"
506INHERIT_remove = "buildstats-summary buildhistory uninative" 591INHERIT:remove = "buildstats-summary buildhistory uninative"
507http_proxy = "" 592http_proxy = ""
508BB_SIGNATURE_HANDLER = "OEBasicHash" 593BB_SIGNATURE_HANDLER = "OEBasicHash"
509""") 594""")
@@ -512,14 +597,13 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
512 bitbake("world meta-toolchain -S none") 597 bitbake("world meta-toolchain -S none")
513 self.write_config(""" 598 self.write_config("""
514TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 599TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
515TCLIBCAPPEND = ""
516BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}" 600BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}"
517PARALLEL_MAKE = "-j 2" 601PARALLEL_MAKE = "-j 2"
518DL_DIR = "${TOPDIR}/download2" 602DL_DIR = "${TOPDIR}/download2"
519TIME = "222222" 603TIME = "222222"
520DATE = "20161212" 604DATE = "20161212"
521# Always remove uninative as we're changing proxies 605# Always remove uninative as we're changing proxies
522INHERIT_remove = "uninative" 606INHERIT:remove = "uninative"
523INHERIT += "buildstats-summary buildhistory" 607INHERIT += "buildstats-summary buildhistory"
524http_proxy = "http://example.com/" 608http_proxy = "http://example.com/"
525BB_SIGNATURE_HANDLER = "OEBasicHash" 609BB_SIGNATURE_HANDLER = "OEBasicHash"
@@ -573,3 +657,334 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
573 compare_sigfiles(rest, files1, files2, compare=False) 657 compare_sigfiles(rest, files1, files2, compare=False)
574 658
575 self.fail("sstate hashes not identical.") 659 self.fail("sstate hashes not identical.")
660
661 def test_sstate_movelayer_samesigs(self):
662 """
663 The sstate checksums of two builds with the same oe-core layer in two
664 different locations should be the same.
665 """
666 core_layer = os.path.join(
667 self.tc.td["COREBASE"], 'meta')
668 copy_layer_1 = self.topdir + "/meta-copy1/meta"
669 copy_layer_2 = self.topdir + "/meta-copy2/meta"
670
671 oe.path.copytree(core_layer, copy_layer_1)
672 os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy1/scripts")
673 self.write_config("""
674TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
675""")
676 bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_1, core_layer)
677 self.write_bblayers_config(bblayers_conf)
678 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
679 bitbake("bash -S none")
680
681 oe.path.copytree(core_layer, copy_layer_2)
682 os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy2/scripts")
683 self.write_config("""
684TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
685""")
686 bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_2, core_layer)
687 self.write_bblayers_config(bblayers_conf)
688 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
689 bitbake("bash -S none")
690
691 def get_files(d):
692 f = []
693 for root, dirs, files in os.walk(d):
694 for name in files:
695 f.append(os.path.join(root, name))
696 return f
697 files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps")
698 files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps")
699 files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
700 self.maxDiff = None
701 self.assertCountEqual(files1, files2)
702
703class SStateFindSiginfo(SStateBase):
704 def test_sstate_compare_sigfiles_and_find_siginfo(self):
705 """
706 Test the functionality of the find_siginfo: basic function and callback in compare_sigfiles
707 """
708 self.write_config("""
709TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
710MACHINE = \"qemux86-64\"
711require conf/multilib.conf
712MULTILIBS = "multilib:lib32"
713DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
714BB_SIGNATURE_HANDLER = "OEBasicHash"
715""")
716 self.track_for_cleanup(self.topdir + "/tmp-sstates-findsiginfo")
717
718 pns = ["binutils", "binutils-native", "lib32-binutils"]
719 target_configs = [
720"""
721TMPVAL1 = "tmpval1"
722TMPVAL2 = "tmpval2"
723do_tmptask1() {
724 echo ${TMPVAL1}
725}
726do_tmptask2() {
727 echo ${TMPVAL2}
728}
729addtask do_tmptask1
730addtask tmptask2 before do_tmptask1
731""",
732"""
733TMPVAL3 = "tmpval3"
734TMPVAL4 = "tmpval4"
735do_tmptask1() {
736 echo ${TMPVAL3}
737}
738do_tmptask2() {
739 echo ${TMPVAL4}
740}
741addtask do_tmptask1
742addtask tmptask2 before do_tmptask1
743"""
744 ]
745
746 for target_config in target_configs:
747 self.write_recipeinc("binutils", target_config)
748 for pn in pns:
749 bitbake("%s -c do_tmptask1 -S none" % pn)
750 self.delete_recipeinc("binutils")
751
752 with bb.tinfoil.Tinfoil() as tinfoil:
753 tinfoil.prepare(config_only=True)
754
755 def find_siginfo(pn, taskname, sigs=None):
756 result = None
757 command_complete = False
758 tinfoil.set_event_mask(["bb.event.FindSigInfoResult",
759 "bb.command.CommandCompleted"])
760 ret = tinfoil.run_command("findSigInfo", pn, taskname, sigs)
761 if ret:
762 while result is None or not command_complete:
763 event = tinfoil.wait_event(1)
764 if event:
765 if isinstance(event, bb.command.CommandCompleted):
766 command_complete = True
767 elif isinstance(event, bb.event.FindSigInfoResult):
768 result = event.result
769 return result
770
771 def recursecb(key, hash1, hash2):
772 nonlocal recursecb_count
773 recursecb_count += 1
774 hashes = [hash1, hash2]
775 hashfiles = find_siginfo(key, None, hashes)
776 self.assertCountEqual(hashes, hashfiles)
777 bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
778
779 for pn in pns:
780 recursecb_count = 0
781 matches = find_siginfo(pn, "do_tmptask1")
782 self.assertGreaterEqual(len(matches), 2)
783 latesthashes = sorted(matches.keys(), key=lambda h: matches[h]['time'])[-2:]
784 bb.siggen.compare_sigfiles(matches[latesthashes[-2]]['path'], matches[latesthashes[-1]]['path'], recursecb)
785 self.assertEqual(recursecb_count,1)
786
787class SStatePrintdiff(SStateBase):
788 def run_test_printdiff_changerecipe(self, target, change_recipe, change_bbtask, change_content, expected_sametmp_output, expected_difftmp_output):
789 import time
790 self.write_config("""
791TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
792""".format(time.time()))
793 # Use runall do_build to ensure any indirect sstate is created, e.g. tzcode-native on both x86 and
794 # aarch64 hosts since only allarch target recipes depend upon it and it may not be built otherwise.
795 # A bitbake -c cleansstate tzcode-native would cause some of these tests to error for example.
796 bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
797 bitbake("-S none {}".format(target))
798 bitbake(change_bbtask)
799 self.write_recipeinc(change_recipe, change_content)
800 result_sametmp = bitbake("-S printdiff {}".format(target))
801
802 self.write_config("""
803TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
804""".format(time.time()))
805 result_difftmp = bitbake("-S printdiff {}".format(target))
806
807 self.delete_recipeinc(change_recipe)
808 for item in expected_sametmp_output:
809 self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
810 for item in expected_difftmp_output:
811 self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
812
813 def run_test_printdiff_changeconfig(self, target, change_bbtasks, change_content, expected_sametmp_output, expected_difftmp_output):
814 import time
815 self.write_config("""
816TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
817""".format(time.time()))
818 bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
819 bitbake("-S none {}".format(target))
820 bitbake(" ".join(change_bbtasks))
821 self.append_config(change_content)
822 result_sametmp = bitbake("-S printdiff {}".format(target))
823
824 self.write_config("""
825TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
826""".format(time.time()))
827 self.append_config(change_content)
828 result_difftmp = bitbake("-S printdiff {}".format(target))
829
830 for item in expected_sametmp_output:
831 self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
832 for item in expected_difftmp_output:
833 self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
834
835
836 # Check if printdiff walks the full dependency chain from the image target to where the change is in a specific recipe
837 def test_image_minimal_vs_perlcross(self):
838 expected_output = ("Task perlcross-native:do_install couldn't be used from the cache because:",
839"We need hash",
840"most recent matching task was")
841 expected_sametmp_output = expected_output + (
842"Variable do_install value changed",
843'+ echo "this changes the task signature"')
844 expected_difftmp_output = expected_output
845
846 self.run_test_printdiff_changerecipe("core-image-minimal", "perlcross", "-c do_install perlcross-native",
847"""
848do_install:append() {
849 echo "this changes the task signature"
850}
851""",
852expected_sametmp_output, expected_difftmp_output)
853
854 # Check if changes to gcc-source (which uses tmp/work-shared) are correctly discovered
855 def test_gcc_runtime_vs_gcc_source(self):
856 gcc_source_pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
857
858 expected_output = ("Task {}:do_preconfigure couldn't be used from the cache because:".format(gcc_source_pn),
859"We need hash",
860"most recent matching task was")
861 expected_sametmp_output = expected_output + (
862"Variable do_preconfigure value changed",
863'+ print("this changes the task signature")')
864 expected_difftmp_output = expected_output
865
866 self.run_test_printdiff_changerecipe("gcc-runtime", "gcc-source", "-c do_preconfigure {}".format(gcc_source_pn),
867"""
868python do_preconfigure:append() {
869 print("this changes the task signature")
870}
871""",
872expected_sametmp_output, expected_difftmp_output)
873
874 # Check if changing a really base task definiton is reported against multiple core recipes using it
875 def test_image_minimal_vs_base_do_configure(self):
876 change_bbtasks = ('zstd-native:do_configure',
877'texinfo-dummy-native:do_configure',
878'ldconfig-native:do_configure',
879'gettext-minimal-native:do_configure',
880'tzcode-native:do_configure',
881'makedevs-native:do_configure',
882'pigz-native:do_configure',
883'update-rc.d-native:do_configure',
884'unzip-native:do_configure',
885'gnu-config-native:do_configure')
886
887 expected_output = ["Task {} couldn't be used from the cache because:".format(t) for t in change_bbtasks] + [
888"We need hash",
889"most recent matching task was"]
890
891 expected_sametmp_output = expected_output + [
892"Variable base_do_configure value changed",
893'+ echo "this changes base_do_configure() definiton "']
894 expected_difftmp_output = expected_output
895
896 self.run_test_printdiff_changeconfig("core-image-minimal",change_bbtasks,
897"""
898INHERIT += "base-do-configure-modified"
899""",
900expected_sametmp_output, expected_difftmp_output)
901
902class SStateCheckObjectPresence(SStateBase):
903 def check_bb_output(self, output, targets, exceptions, check_cdn):
904 def is_exception(object, exceptions):
905 for e in exceptions:
906 if re.search(e, object):
907 return True
908 return False
909
910 # sstate is checked for existence of these, but they never get written out to begin with
911 exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
912 exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
913 exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
914 exceptions += ["linux-yocto.*shared_workdir"]
915 # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
916 # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
917 # which makes tracing other changes difficult
918 exceptions += ["{}.*create_.*spdx".format(t) for t in targets.split()]
919
920 output_l = output.splitlines()
921 for l in output_l:
922 if l.startswith("Sstate summary"):
923 for idx, item in enumerate(l.split()):
924 if item == 'Missed':
925 missing_objects = int(l.split()[idx+1])
926 break
927 else:
928 self.fail("Did not find missing objects amount in sstate summary: {}".format(l))
929 break
930 else:
931 self.fail("Did not find 'Sstate summary' line in bitbake output")
932
933 failed_urls = []
934 failed_urls_extrainfo = []
935 for l in output_l:
936 if "SState: Unsuccessful fetch test for" in l and check_cdn:
937 missing_object = l.split()[6]
938 elif "SState: Looked for but didn't find file" in l and not check_cdn:
939 missing_object = l.split()[8]
940 else:
941 missing_object = None
942 if missing_object:
943 if not is_exception(missing_object, exceptions):
944 failed_urls.append(missing_object)
945 else:
946 missing_objects -= 1
947
948 if "urlopen failed for" in l and not is_exception(l, exceptions):
949 failed_urls_extrainfo.append(l)
950
951 self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
952 self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
953
954@OETestTag("yocto-mirrors")
955class SStateMirrors(SStateCheckObjectPresence):
956 def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
957 if check_cdn:
958 self.config_sstate(True)
959 self.append_config("""
960MACHINE = "{}"
961BB_HASHSERVE_UPSTREAM = "hashserv.yoctoproject.org:8686"
962SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
963""".format(machine))
964 else:
965 self.append_config("""
966MACHINE = "{}"
967""".format(machine))
968 result = bitbake("-DD -n {}".format(targets))
969 bitbake("-S none {}".format(targets))
970 if ignore_errors:
971 return
972 self.check_bb_output(result.output, targets, exceptions, check_cdn)
973
974 def test_cdn_mirror_qemux86_64(self):
975 exceptions = []
976 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
977 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
978
979 def test_cdn_mirror_qemuarm64(self):
980 exceptions = []
981 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
982 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
983
984 def test_local_cache_qemux86_64(self):
985 exceptions = []
986 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
987
988 def test_local_cache_qemuarm64(self):
989 exceptions = []
990 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
diff --git a/meta/lib/oeqa/selftest/cases/sysroot.py b/meta/lib/oeqa/selftest/cases/sysroot.py
index 6e34927c90..ef854f6fee 100644
--- a/meta/lib/oeqa/selftest/cases/sysroot.py
+++ b/meta/lib/oeqa/selftest/cases/sysroot.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import uuid 7import uuid
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake 10from oeqa.utils.commands import bitbake
9 11
10class SysrootTests(OESelftestTestCase): 12class SysrootTests(OESelftestTestCase):
11 def test_sysroot_cleanup(self): 13 def test_sysroot_cleanup(self):
@@ -24,14 +26,61 @@ class SysrootTests(OESelftestTestCase):
24 self.write_config(""" 26 self.write_config("""
25PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1" 27PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1"
26MACHINE = "qemux86" 28MACHINE = "qemux86"
27TESTSTRING_pn-sysroot-test-arch1 = "%s" 29TESTSTRING:pn-sysroot-test-arch1 = "%s"
28TESTSTRING_pn-sysroot-test-arch2 = "%s" 30TESTSTRING:pn-sysroot-test-arch2 = "%s"
29""" % (uuid1, uuid2)) 31""" % (uuid1, uuid2))
30 bitbake("sysroot-test") 32 bitbake("sysroot-test")
31 self.write_config(""" 33 self.write_config("""
32PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2" 34PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2"
33MACHINE = "qemux86copy" 35MACHINE = "qemux86copy"
34TESTSTRING_pn-sysroot-test-arch1 = "%s" 36TESTSTRING:pn-sysroot-test-arch1 = "%s"
35TESTSTRING_pn-sysroot-test-arch2 = "%s" 37TESTSTRING:pn-sysroot-test-arch2 = "%s"
36""" % (uuid1, uuid2)) 38""" % (uuid1, uuid2))
37 bitbake("sysroot-test") 39 bitbake("sysroot-test")
40
41 def test_sysroot_max_shebang(self):
42 """
43 Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed.
44 Expected: Fail when a shebang bigger than the max shebang-size is reached.
45 Author: Paulo Neves <ptsneves@gmail.com>
46 """
47 expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]"
48 res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True)
49 self.assertTrue(expected in res.output, msg=res.output)
50 self.assertTrue(res.status != 0)
51
52 def test_sysroot_la(self):
53 """
54 Summary: Check that workdir paths are not contained in .la files.
55 Expected: Fail when a workdir path is found in the file content.
56 Author: Paulo Neves <ptsneves@gmail.com>
57 """
58 expected = "la-test.la failed sanity test (workdir) in path"
59
60 res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True)
61 self.assertTrue(expected in res.output, msg=res.output)
62 self.assertTrue('[la]' in res.output, msg=res.output)
63 self.assertTrue(res.status != 0)
64
65 res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True)
66 self.assertTrue(expected in res.output, msg=res.output)
67 self.assertTrue('[la]' in res.output, msg=res.output)
68 self.assertTrue(res.status != 0)
69
70 def test_sysroot_pkgconfig(self):
71 """
72 Summary: Check that tmpdir paths are not contained in .pc files.
73 Expected: Fail when a tmpdir path is found in the file content.
74 Author: Paulo Neves <ptsneves@gmail.com>
75 """
76 expected = "test.pc failed sanity test (tmpdir) in path"
77
78 res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True)
79 self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
80 self.assertTrue(expected in res.output, msg=res.output)
81 self.assertTrue(res.status != 0)
82
83 res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True)
84 self.assertTrue(expected in res.output, msg=res.output)
85 self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
86 self.assertTrue(res.status != 0)
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
index a51c6048d3..21c8686b2a 100644
--- a/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -9,7 +11,6 @@ import logging
9import bb.tinfoil 11import bb.tinfoil
10 12
11from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd
13 14
14class TinfoilTests(OESelftestTestCase): 15class TinfoilTests(OESelftestTestCase):
15 """ Basic tests for the tinfoil API """ 16 """ Basic tests for the tinfoil API """
@@ -47,6 +48,17 @@ class TinfoilTests(OESelftestTestCase):
47 rd = tinfoil.parse_recipe_file(best[3]) 48 rd = tinfoil.parse_recipe_file(best[3])
48 self.assertEqual(testrecipe, rd.getVar('PN')) 49 self.assertEqual(testrecipe, rd.getVar('PN'))
49 50
51 def test_parse_virtual_recipe(self):
52 with bb.tinfoil.Tinfoil() as tinfoil:
53 tinfoil.prepare(config_only=False, quiet=2)
54 testrecipe = 'nativesdk-gcc'
55 best = tinfoil.find_best_provider(testrecipe)
56 if not best:
57 self.fail('Unable to find recipe providing %s' % testrecipe)
58 rd = tinfoil.parse_recipe_file(best[3])
59 self.assertEqual(testrecipe, rd.getVar('PN'))
60 self.assertIsNotNone(rd.getVar('FILE_LAYERNAME'))
61
50 def test_parse_recipe_copy_expand(self): 62 def test_parse_recipe_copy_expand(self):
51 with bb.tinfoil.Tinfoil() as tinfoil: 63 with bb.tinfoil.Tinfoil() as tinfoil:
52 tinfoil.prepare(config_only=False, quiet=2) 64 tinfoil.prepare(config_only=False, quiet=2)
@@ -65,6 +77,32 @@ class TinfoilTests(OESelftestTestCase):
65 localdata.setVar('PN', 'hello') 77 localdata.setVar('PN', 'hello')
66 self.assertEqual('hello', localdata.getVar('BPN')) 78 self.assertEqual('hello', localdata.getVar('BPN'))
67 79
80 # The config_data API to parse_recipe_file is used by:
81 # layerindex-web layerindex/update_layer.py
82 def test_parse_recipe_custom_data(self):
83 with bb.tinfoil.Tinfoil() as tinfoil:
84 tinfoil.prepare(config_only=False, quiet=2)
85 localdata = bb.data.createCopy(tinfoil.config_data)
86 localdata.setVar("TESTVAR", "testval")
87 testrecipe = 'mdadm'
88 best = tinfoil.find_best_provider(testrecipe)
89 if not best:
90 self.fail('Unable to find recipe providing %s' % testrecipe)
91 rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
92 self.assertEqual("testval", rd.getVar('TESTVAR'))
93
94 def test_parse_virtual_recipe_custom_data(self):
95 with bb.tinfoil.Tinfoil() as tinfoil:
96 tinfoil.prepare(config_only=False, quiet=2)
97 localdata = bb.data.createCopy(tinfoil.config_data)
98 localdata.setVar("TESTVAR", "testval")
99 testrecipe = 'nativesdk-gcc'
100 best = tinfoil.find_best_provider(testrecipe)
101 if not best:
102 self.fail('Unable to find recipe providing %s' % testrecipe)
103 rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
104 self.assertEqual("testval", rd.getVar('TESTVAR'))
105
68 def test_list_recipes(self): 106 def test_list_recipes(self):
69 with bb.tinfoil.Tinfoil() as tinfoil: 107 with bb.tinfoil.Tinfoil() as tinfoil:
70 tinfoil.prepare(config_only=False, quiet=2) 108 tinfoil.prepare(config_only=False, quiet=2)
@@ -87,21 +125,20 @@ class TinfoilTests(OESelftestTestCase):
87 with bb.tinfoil.Tinfoil() as tinfoil: 125 with bb.tinfoil.Tinfoil() as tinfoil:
88 tinfoil.prepare(config_only=True) 126 tinfoil.prepare(config_only=True)
89 127
90 tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) 128 tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit'])
91 129
92 # Need to drain events otherwise events that were masked may still be in the queue 130 # Need to drain events otherwise events that were masked may still be in the queue
93 while tinfoil.wait_event(): 131 while tinfoil.wait_event():
94 pass 132 pass
95 133
96 pattern = 'conf' 134 pattern = 'conf'
97 res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') 135 res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False)
98 self.assertTrue(res) 136 self.assertTrue(res)
99 137
100 eventreceived = False 138 eventreceived = False
101 commandcomplete = False 139 commandcomplete = False
102 start = time.time() 140 start = time.time()
103 # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example 141 # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example
104 # The test is IO load sensitive too
105 while (not (eventreceived == True and commandcomplete == True) 142 while (not (eventreceived == True and commandcomplete == True)
106 and (time.time() - start < 60)): 143 and (time.time() - start < 60)):
107 # if we received both events (on let's say a good day), we are done 144 # if we received both events (on let's say a good day), we are done
@@ -111,14 +148,15 @@ class TinfoilTests(OESelftestTestCase):
111 commandcomplete = True 148 commandcomplete = True
112 elif isinstance(event, bb.event.FilesMatchingFound): 149 elif isinstance(event, bb.event.FilesMatchingFound):
113 self.assertEqual(pattern, event._pattern) 150 self.assertEqual(pattern, event._pattern)
114 self.assertIn('qemuarm.conf', event._matches) 151 self.assertIn('A', event._matches)
152 self.assertIn('B', event._matches)
115 eventreceived = True 153 eventreceived = True
116 elif isinstance(event, logging.LogRecord): 154 elif isinstance(event, logging.LogRecord):
117 continue 155 continue
118 else: 156 else:
119 self.fail('Unexpected event: %s' % event) 157 self.fail('Unexpected event: %s' % event)
120 158
121 self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server') 159 self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived))
122 self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') 160 self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
123 161
124 def test_setvariable_clean(self): 162 def test_setvariable_clean(self):
@@ -173,8 +211,8 @@ class TinfoilTests(OESelftestTestCase):
173 self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') 211 self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name')
174 # Test overrides 212 # Test overrides
175 tinfoil.config_data.setVar('TESTVAR', 'original') 213 tinfoil.config_data.setVar('TESTVAR', 'original')
176 tinfoil.config_data.setVar('TESTVAR_overrideone', 'one') 214 tinfoil.config_data.setVar('TESTVAR:overrideone', 'one')
177 tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two') 215 tinfoil.config_data.setVar('TESTVAR:overridetwo', 'two')
178 tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') 216 tinfoil.config_data.appendVar('OVERRIDES', ':overrideone')
179 value = tinfoil.config_data.getVar('TESTVAR') 217 value = tinfoil.config_data.getVar('TESTVAR')
180 self.assertEqual(value, 'one', 'Variable overrides not functioning correctly') 218 self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
diff --git a/meta/lib/oeqa/selftest/cases/toolchain.py b/meta/lib/oeqa/selftest/cases/toolchain.py
new file mode 100644
index 0000000000..b4b280d037
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/toolchain.py
@@ -0,0 +1,71 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import subprocess
9import tempfile
10from types import SimpleNamespace
11
12import oe.path
13from oeqa.selftest.case import OESelftestTestCase
14from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
15
16class ToolchainTests(OESelftestTestCase):
17
18 def test_toolchain_switching(self):
19 """
20 Test that a configuration that uses GCC by default but clang for one
21 specific recipe does infact do that.
22 """
23
24 def extract_comment(objcopy, filename):
25 """
26 Using the specified `objcopy`, return the .comment segment from
27 `filename` as a bytes().
28 """
29 with tempfile.NamedTemporaryFile(prefix="comment-") as f:
30 cmd = [objcopy, "--dump-section", ".comment=" + f.name, filename]
31 subprocess.run(cmd, check=True)
32 # clang's objcopy writes to a temporary file and renames, so we need to re-open.
33 with open(f.name, "rb") as f2:
34 return f2.read()
35
36 def check_recipe(recipe, filename, override, comment_present, comment_absent=None):
37 """
38 Check that `filename` in `recipe`'s bindir contains `comment`, and
39 the overrides contain `override`.
40 """
41 d = SimpleNamespace(**get_bb_vars(("D", "bindir", "OBJCOPY", "OVERRIDES", "PATH"), target=recipe))
42
43 self.assertIn(override, d.OVERRIDES)
44
45 binary = oe.path.join(d.D, d.bindir, filename)
46
47 objcopy = shutil.which(d.OBJCOPY, path=d.PATH)
48 self.assertIsNotNone(objcopy)
49
50 comment = extract_comment(objcopy, binary)
51 self.assertIn(comment_present, comment)
52 if comment_absent:
53 self.assertNotIn(comment_absent, comment)
54
55
56 # GCC by default, clang for selftest-hello.
57 self.write_config("""
58TOOLCHAIN = "gcc"
59TOOLCHAIN:pn-selftest-hello = "clang"
60 """)
61
62 # Force these recipes to re-install so we can extract the .comments from
63 # the install directory, as they're stripped out of the final packages.
64 bitbake("m4 selftest-hello -C install")
65
66 # m4 should be built with GCC and only GCC
67 check_recipe("m4", "m4", "toolchain-gcc", b"GCC: (GNU)", b"clang")
68
69 # helloworld should be built with clang. We can't assert that GCC is not
70 # present as it will be linked against glibc which is built with GCC.
71 check_recipe("selftest-hello", "helloworld", "toolchain-clang", b"clang version")
diff --git a/meta/lib/oeqa/selftest/cases/uboot.py b/meta/lib/oeqa/selftest/cases/uboot.py
new file mode 100644
index 0000000000..980ea327f0
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/uboot.py
@@ -0,0 +1,98 @@
1# Qemu-based u-boot bootloader integration testing
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, runqemu, get_bb_var, get_bb_vars, runCmd
10from oeqa.core.decorator.data import skipIfNotArch, skipIfNotBuildArch
11from oeqa.core.decorator import OETestTag
12
13uboot_boot_patterns = {
14 'search_reached_prompt': "stop autoboot",
15 'search_login_succeeded': "=>",
16 'search_cmd_finished': "=>"
17 }
18
19
20class UBootTest(OESelftestTestCase):
21
22 @skipIfNotArch(['arm', 'aarch64'])
23 @OETestTag("runqemu")
24 def test_boot_uboot(self):
25 """
26 Tests building u-boot and booting it with QEMU
27 """
28
29 self.write_config("""
30QB_DEFAULT_BIOS = "u-boot.bin"
31PREFERRED_PROVIDER_virtual/bootloader = "u-boot"
32QEMU_USE_KVM = "False"
33""")
34 bitbake("virtual/bootloader core-image-minimal")
35
36 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic',
37 boot_patterns=uboot_boot_patterns) as qemu:
38
39 # test if u-boot console works
40 cmd = "version"
41 status, output = qemu.run_serial(cmd)
42 self.assertEqual(status, 1, msg=output)
43 self.assertTrue("U-Boot" in output, msg=output)
44
45 @skipIfNotArch(['aarch64'])
46 @skipIfNotBuildArch(['aarch64'])
47 @OETestTag("runqemu")
48 def test_boot_uboot_kvm_to_full_target(self):
49 """
50 Tests building u-boot and booting it with QEMU and KVM.
51 Requires working KVM on build host. See "kvm-ok" output.
52 """
53
54 runCmd("kvm-ok")
55
56 image = "core-image-minimal"
57 vars = get_bb_vars(['HOST_ARCH', 'BUILD_ARCH'], image)
58 host_arch = vars['HOST_ARCH']
59 build_arch = vars['BUILD_ARCH']
60
61 self.assertEqual(host_arch, build_arch, 'HOST_ARCH %s and BUILD_ARCH %s must match for KVM' % (host_arch, build_arch))
62
63 self.write_config("""
64QEMU_USE_KVM = "1"
65
66# Using u-boot in EFI mode, need ESP partition for grub/systemd-boot/kernel etc
67IMAGE_FSTYPES:pn-core-image-minimal:append = " wic"
68
69# easiest to follow genericarm64 setup with wks file, initrd and EFI loader
70INITRAMFS_IMAGE = "core-image-initramfs-boot"
71EFI_PROVIDER = "${@bb.utils.contains("DISTRO_FEATURES", "systemd", "systemd-boot", "grub-efi", d)}"
72WKS_FILE = "genericarm64.wks.in"
73
74# use wic image with ESP for u-boot, not ext4
75QB_DEFAULT_FSTYPE = "wic"
76
77PREFERRED_PROVIDER_virtual/bootloader = "u-boot"
78QB_DEFAULT_BIOS = "u-boot.bin"
79
80# let u-boot or EFI loader load kernel from ESP
81QB_DEFAULT_KERNEL = "none"
82
83# virt pci, not scsi because support not in u-boot to find ESP
84QB_DRIVE_TYPE = "/dev/vd"
85""")
86 bitbake("virtual/bootloader %s" % image)
87
88 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or ""
89 with runqemu(image, ssh=False, runqemuparams='nographic kvm %s' % runqemu_params) as qemu:
90
91 # boot to target and login worked, should have been fast with kvm
92 cmd = "dmesg"
93 status, output = qemu.run_serial(cmd)
94 self.assertEqual(status, 1, msg=output)
95 # Machine is qemu
96 self.assertTrue("Machine model: linux,dummy-virt" in output, msg=output)
97 # with KVM enabled
98 self.assertTrue("KVM: hypervisor services detected" in output, msg=output)
diff --git a/meta/lib/oeqa/selftest/cases/uki.py b/meta/lib/oeqa/selftest/cases/uki.py
new file mode 100644
index 0000000000..9a1aa4e269
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/uki.py
@@ -0,0 +1,141 @@
1# Based on runqemu.py test file
2#
3# Copyright (c) 2017 Wind River Systems, Inc.
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, runqemu, get_bb_var
10from oeqa.core.decorator.data import skipIfNotArch
11from oeqa.core.decorator import OETestTag
12import oe.types
13
14class UkiTest(OESelftestTestCase):
15 """Boot Unified Kernel Image (UKI) generated with uki.bbclass on UEFI firmware (omvf/edk2)"""
16
17 @skipIfNotArch(['i586', 'i686', 'x86_64'])
18 @OETestTag("runqemu")
19 def test_uki_boot_systemd(self):
20 """Build and boot into UEFI firmware (omvf/edk2), systemd-boot, initrd without systemd, rootfs with systemd"""
21 image = "core-image-minimal"
22 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or ""
23 cmd = "runqemu %s nographic serial wic ovmf" % (runqemu_params)
24 if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
25 cmd += " kvm"
26
27 self.write_config("""
28# efi firmware must load systemd-boot, not grub
29EFI_PROVIDER = "systemd-boot"
30
31# image format must be wic, needs esp partition for firmware etc
32IMAGE_FSTYPES:pn-%s:append = " wic"
33WKS_FILE = "efi-uki-bootdisk.wks.in"
34
35# efi, uki and systemd features must be enabled
36INIT_MANAGER = "systemd"
37MACHINE_FEATURES:append = " efi"
38IMAGE_CLASSES:append:pn-core-image-minimal = " uki"
39
40# uki embeds also an initrd
41INITRAMFS_IMAGE = "core-image-minimal-initramfs"
42
43# runqemu must not load kernel separately, it's in the uki
44QB_KERNEL_ROOT = ""
45QB_DEFAULT_KERNEL = "none"
46
47# boot command line provided via uki, not via bootloader
48UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}"
49
50# disable kvm, breaks boot
51QEMU_USE_KVM = ""
52
53IMAGE_CLASSES:remove = 'testimage'
54""" % (image))
55
56 uki_filename = get_bb_var('UKI_FILENAME', image)
57
58 bitbake(image + " ovmf")
59 with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
60 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
61
62 # Verify from efivars that firmware was:
63 # x86_64, qemux86_64, ovmf = edk2
64 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderFirmwareInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep 'EDK II'"
65 status, output = qemu.run_serial(cmd)
66 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
67
68 # Check that systemd-boot was the loader
69 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-boot"
70 status, output = qemu.run_serial(cmd)
71 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
72
73 # Check that systemd-stub was used
74 cmd = "echo $( cat /sys/firmware/efi/efivars/StubInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-stub"
75 status, output = qemu.run_serial(cmd)
76 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
77
78 # Check that the compiled uki file was booted into
79 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep '%s'" % (uki_filename)
80 status, output = qemu.run_serial(cmd)
81 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
82
83 @skipIfNotArch(['i586', 'i686', 'x86_64'])
84 @OETestTag("runqemu")
85 def test_uki_sysvinit(self):
86 """Build and boot into UEFI firmware (omvf/edk2), systemd-boot, initrd with sysvinit, rootfs with sysvinit"""
87 config = """
88# efi firmware must load systemd-boot, not grub
89EFI_PROVIDER = "systemd-boot"
90
91# image format must be wic, needs esp partition for firmware etc
92IMAGE_FSTYPES:pn-core-image-base:append = " wic"
93WKS_FILE = "efi-uki-bootdisk.wks.in"
94
95# efi, uki and systemd features must be enabled
96MACHINE_FEATURES:append = " efi"
97IMAGE_CLASSES:append:pn-core-image-base = " uki"
98
99# uki embeds also an initrd, no systemd or udev
100INITRAMFS_IMAGE = "core-image-initramfs-boot"
101
102# runqemu must not load kernel separately, it's in the uki
103QB_KERNEL_ROOT = ""
104QB_DEFAULT_KERNEL = "none"
105
106# boot command line provided via uki, not via bootloader
107UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}"
108
109# disable kvm, breaks boot
110QEMU_USE_KVM = ""
111
112IMAGE_CLASSES:remove = 'testimage'
113"""
114 self.append_config(config)
115 bitbake('core-image-base ovmf')
116 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
117 uki_filename = get_bb_var('UKI_FILENAME', 'core-image-base')
118 self.remove_config(config)
119
120 with runqemu('core-image-base', ssh=False,
121 runqemuparams='%s slirp nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
122 # Verify from efivars that firmware was:
123 # x86_64, qemux86_64, ovmf = edk2
124 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderFirmwareInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep 'EDK II'"
125 status, output = qemu.run_serial(cmd)
126 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
127
128 # Check that systemd-boot was the loader
129 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-boot"
130 status, output = qemu.run_serial(cmd)
131 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
132
133 # Check that systemd-stub was used
134 cmd = "echo $( cat /sys/firmware/efi/efivars/StubInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-stub"
135 status, output = qemu.run_serial(cmd)
136 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
137
138 # Check that the compiled uki file was booted into
139 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep '%s'" % (uki_filename)
140 status, output = qemu.run_serial(cmd)
141 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
diff --git a/meta/lib/oeqa/selftest/cases/usergrouptests.py b/meta/lib/oeqa/selftest/cases/usergrouptests.py
new file mode 100644
index 0000000000..3c59b0f290
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/usergrouptests.py
@@ -0,0 +1,57 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake
11from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer
12
13class UserGroupTests(OESelftestTestCase):
14 def test_group_from_dep_package(self):
15 self.logger.info("Building creategroup2")
16 bitbake(' creategroup2 creategroup1')
17 bitbake(' creategroup2 creategroup1 -c clean')
18 self.logger.info("Packaging creategroup2")
19 self.assertTrue(bitbake(' creategroup2 -c package'))
20
21 def test_add_task_between_p_sysroot_and_package(self):
22 # Test for YOCTO #14961
23 self.assertTrue(bitbake('useraddbadtask -C fetch'))
24
25 def test_postinst_order(self):
26 self.logger.info("Building dcreategroup")
27 self.assertTrue(bitbake(' dcreategroup'))
28
29 def test_static_useradd_from_dynamic(self):
30 metaselftestpath = get_test_layer()
31 self.logger.info("Building core-image-minimal to generate passwd/group file")
32 bitbake(' core-image-minimal')
33 self.logger.info("Setting up useradd-staticids")
34 repropassdir = os.path.join(metaselftestpath, "conf/include")
35 os.makedirs(repropassdir)
36 etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \
37 os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc")))
38 shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd"))
39 shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group"))
40 # Copy the original local.conf
41 shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'))
42
43 self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
44 self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
45 self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"")
46 self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"")
47 self.logger.info("Rebuild with staticids")
48 bitbake(' core-image-minimal')
49 shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'))
50 self.logger.info("Rebuild without staticids")
51 bitbake(' core-image-minimal')
52 self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
53 self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
54 self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"")
55 self.write_config("USERADD_GID_TABLES += \"files/static-group\"")
56 self.logger.info("Rebuild with other staticids")
57 self.assertTrue(bitbake(' core-image-minimal'))
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py
index 2bf5cb9a86..680f99d381 100644
--- a/meta/lib/oeqa/selftest/cases/wic.py
+++ b/meta/lib/oeqa/selftest/cases/wic.py
@@ -11,39 +11,20 @@
11import os 11import os
12import sys 12import sys
13import unittest 13import unittest
14import hashlib
15import subprocess
14 16
15from glob import glob 17from glob import glob
16from shutil import rmtree, copy 18from shutil import rmtree, copy
17from functools import wraps, lru_cache
18from tempfile import NamedTemporaryFile 19from tempfile import NamedTemporaryFile
20from tempfile import TemporaryDirectory
19 21
20from oeqa.selftest.case import OESelftestTestCase 22from oeqa.selftest.case import OESelftestTestCase
23from oeqa.core.decorator import OETestTag
24from oeqa.core.decorator.data import skipIfNotArch
21from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu 25from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
22 26
23 27
24@lru_cache(maxsize=32)
25def get_host_arch(recipe):
26 """A cached call to get_bb_var('HOST_ARCH', <recipe>)"""
27 return get_bb_var('HOST_ARCH', recipe)
28
29
30def only_for_arch(archs, image='core-image-minimal'):
31 """Decorator for wrapping test cases that can be run only for specific target
32 architectures. A list of compatible architectures is passed in `archs`.
33 Current architecture will be determined by parsing bitbake output for
34 `image` recipe.
35 """
36 def wrapper(func):
37 @wraps(func)
38 def wrapped_f(*args, **kwargs):
39 arch = get_host_arch(image)
40 if archs and arch not in archs:
41 raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch)
42 return func(*args, **kwargs)
43 wrapped_f.__name__ = func.__name__
44 return wrapped_f
45 return wrapper
46
47def extract_files(debugfs_output): 28def extract_files(debugfs_output):
48 """ 29 """
49 extract file names from the output of debugfs -R 'ls -p', 30 extract file names from the output of debugfs -R 'ls -p',
@@ -77,22 +58,18 @@ class WicTestCase(OESelftestTestCase):
77 58
78 def setUpLocal(self): 59 def setUpLocal(self):
79 """This code is executed before each test method.""" 60 """This code is executed before each test method."""
80 self.resultdir = self.builddir + "/wic-tmp/" 61 self.resultdir = os.path.join(self.builddir, "wic-tmp")
81 super(WicTestCase, self).setUpLocal() 62 super(WicTestCase, self).setUpLocal()
82 63
83 # Do this here instead of in setUpClass as the base setUp does some 64 # Do this here instead of in setUpClass as the base setUp does some
84 # clean up which can result in the native tools built earlier in 65 # clean up which can result in the native tools built earlier in
85 # setUpClass being unavailable. 66 # setUpClass being unavailable.
86 if not WicTestCase.image_is_ready: 67 if not WicTestCase.image_is_ready:
87 if get_bb_var('USE_NLS') == 'yes': 68 if self.td['USE_NLS'] != 'yes':
88 bitbake('wic-tools') 69 self.skipTest('wic-tools needs USE_NLS=yes')
89 else:
90 self.skipTest('wic-tools cannot be built due its (intltool|gettext)-native dependency and NLS disable')
91 70
92 bitbake('core-image-minimal') 71 bitbake('wic-tools core-image-minimal core-image-minimal-mtdutils')
93 bitbake('core-image-minimal-mtdutils')
94 WicTestCase.image_is_ready = True 72 WicTestCase.image_is_ready = True
95
96 rmtree(self.resultdir, ignore_errors=True) 73 rmtree(self.resultdir, ignore_errors=True)
97 74
98 def tearDownLocal(self): 75 def tearDownLocal(self):
@@ -103,15 +80,13 @@ class WicTestCase(OESelftestTestCase):
103 def _get_image_env_path(self, image): 80 def _get_image_env_path(self, image):
104 """Generate and obtain the path to <image>.env""" 81 """Generate and obtain the path to <image>.env"""
105 if image not in WicTestCase.wicenv_cache: 82 if image not in WicTestCase.wicenv_cache:
106 self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status) 83 bitbake('%s -c do_rootfs_wicenv' % image)
107 bb_vars = get_bb_vars(['STAGING_DIR', 'MACHINE'], image) 84 stdir = get_bb_var('STAGING_DIR', image)
108 stdir = bb_vars['STAGING_DIR'] 85 machine = self.td["MACHINE"]
109 machine = bb_vars['MACHINE']
110 WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata') 86 WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata')
111 return WicTestCase.wicenv_cache[image] 87 return WicTestCase.wicenv_cache[image]
112 88
113class Wic(WicTestCase): 89class CLITests(OESelftestTestCase):
114
115 def test_version(self): 90 def test_version(self):
116 """Test wic --version""" 91 """Test wic --version"""
117 runCmd('wic --version') 92 runCmd('wic --version')
@@ -172,68 +147,136 @@ class Wic(WicTestCase):
172 """Test wic without command""" 147 """Test wic without command"""
173 self.assertEqual(1, runCmd('wic', ignore_status=True).status) 148 self.assertEqual(1, runCmd('wic', ignore_status=True).status)
174 149
150class Wic(WicTestCase):
151 def test_skip_kernel_install(self):
152 """Test the functionality of not installing the kernel in the boot directory using the wic plugin"""
153 # create a temporary file for the WKS content
154 with NamedTemporaryFile("w", suffix=".wks") as wks:
155 wks.write(
156 'part --source bootimg_efi '
157 '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" '
158 '--label boot --active\n'
159 )
160 wks.flush()
161 # create a temporary directory to extract the disk image to
162 with TemporaryDirectory() as tmpdir:
163 img = 'core-image-minimal'
164 # build the image using the WKS file
165 cmd = "wic create %s -e %s -o %s" % (
166 wks.name, img, self.resultdir)
167 runCmd(cmd)
168 wksname = os.path.splitext(os.path.basename(wks.name))[0]
169 out = glob(os.path.join(
170 self.resultdir, "%s-*.direct" % wksname))
171 self.assertEqual(1, len(out))
172 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
173 # extract the content of the disk image to the temporary directory
174 cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
175 runCmd(cmd)
176 # check if the kernel is installed or not
177 kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
178 for file in os.listdir(tmpdir):
179 if file == kimgtype:
180 raise AssertionError(
181 "The kernel image '{}' was found in the partition".format(kimgtype)
182 )
183
184 def test_kernel_install(self):
185 """Test the installation of the kernel to the boot directory in the wic plugin"""
186 # create a temporary file for the WKS content
187 with NamedTemporaryFile("w", suffix=".wks") as wks:
188 wks.write(
189 'part --source bootimg_efi '
190 '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" '
191 '--label boot --active\n'
192 )
193 wks.flush()
194 # create a temporary directory to extract the disk image to
195 with TemporaryDirectory() as tmpdir:
196 img = 'core-image-minimal'
197 # build the image using the WKS file
198 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
199 runCmd(cmd)
200 wksname = os.path.splitext(os.path.basename(wks.name))[0]
201 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
202 self.assertEqual(1, len(out))
203 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
204 # extract the content of the disk image to the temporary directory
205 cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
206 runCmd(cmd)
207 # check if the kernel is installed or not
208 kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
209 found = False
210 for file in os.listdir(tmpdir):
211 if file == kimgtype:
212 found = True
213 break
214 self.assertTrue(
215 found, "The kernel image '{}' was not found in the boot partition".format(kimgtype)
216 )
217
175 def test_build_image_name(self): 218 def test_build_image_name(self):
176 """Test wic create wictestdisk --image-name=core-image-minimal""" 219 """Test wic create wictestdisk --image-name=core-image-minimal"""
177 cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir 220 cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir
178 runCmd(cmd) 221 runCmd(cmd)
179 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 222 self.assertEqual(1, len(glob(os.path.join (self.resultdir, "wictestdisk-*.direct"))))
180 223
181 @only_for_arch(['i586', 'i686', 'x86_64']) 224 @skipIfNotArch(['i586', 'i686', 'x86_64'])
182 def test_gpt_image(self): 225 def test_gpt_image(self):
183 """Test creation of core-image-minimal with gpt table and UUID boot""" 226 """Test creation of core-image-minimal with gpt table and UUID boot"""
184 cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir 227 cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir
185 runCmd(cmd) 228 runCmd(cmd)
186 self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) 229 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
187 230
188 @only_for_arch(['i586', 'i686', 'x86_64']) 231 @skipIfNotArch(['i586', 'i686', 'x86_64'])
189 def test_iso_image(self): 232 def test_iso_image(self):
190 """Test creation of hybrid iso image with legacy and EFI boot""" 233 """Test creation of hybrid iso image with legacy and EFI boot"""
191 config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ 234 config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
192 'MACHINE_FEATURES_append = " efi"\n'\ 235 'MACHINE_FEATURES:append = " efi"\n'\
193 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 236 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
194 self.append_config(config) 237 self.append_config(config)
195 bitbake('core-image-minimal core-image-minimal-initramfs') 238 bitbake('core-image-minimal core-image-minimal-initramfs')
196 self.remove_config(config) 239 self.remove_config(config)
197 cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir 240 cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir
198 runCmd(cmd) 241 runCmd(cmd)
199 self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct"))) 242 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "HYBRID_ISO_IMG-*.direct"))))
200 self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso"))) 243 self.assertEqual(1, len(glob(os.path.join (self.resultdir, "HYBRID_ISO_IMG-*.iso"))))
201 244
202 @only_for_arch(['i586', 'i686', 'x86_64']) 245 @skipIfNotArch(['i586', 'i686', 'x86_64'])
203 def test_qemux86_directdisk(self): 246 def test_qemux86_directdisk(self):
204 """Test creation of qemux-86-directdisk image""" 247 """Test creation of qemux-86-directdisk image"""
205 cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir 248 cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir
206 runCmd(cmd) 249 runCmd(cmd)
207 self.assertEqual(1, len(glob(self.resultdir + "qemux86-directdisk-*direct"))) 250 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "qemux86-directdisk-*direct"))))
208 251
209 @only_for_arch(['i586', 'i686', 'x86_64']) 252 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
210 def test_mkefidisk(self): 253 def test_mkefidisk(self):
211 """Test creation of mkefidisk image""" 254 """Test creation of mkefidisk image"""
212 cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir 255 cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
213 runCmd(cmd) 256 runCmd(cmd)
214 self.assertEqual(1, len(glob(self.resultdir + "mkefidisk-*direct"))) 257 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "mkefidisk-*direct"))))
215 258
216 @only_for_arch(['i586', 'i686', 'x86_64']) 259 @skipIfNotArch(['i586', 'i686', 'x86_64'])
217 def test_bootloader_config(self): 260 def test_bootloader_config(self):
218 """Test creation of directdisk-bootloader-config image""" 261 """Test creation of directdisk-bootloader-config image"""
219 config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 262 config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
220 self.append_config(config) 263 self.append_config(config)
221 bitbake('core-image-minimal') 264 bitbake('core-image-minimal')
222 self.remove_config(config) 265 self.remove_config(config)
223 cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir 266 cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir
224 runCmd(cmd) 267 runCmd(cmd)
225 self.assertEqual(1, len(glob(self.resultdir + "directdisk-bootloader-config-*direct"))) 268 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-bootloader-config-*direct"))))
226 269
227 @only_for_arch(['i586', 'i686', 'x86_64']) 270 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
228 def test_systemd_bootdisk(self): 271 def test_systemd_bootdisk(self):
229 """Test creation of systemd-bootdisk image""" 272 """Test creation of systemd-bootdisk image"""
230 config = 'MACHINE_FEATURES_append = " efi"\n' 273 config = 'MACHINE_FEATURES:append = " efi"\n'
231 self.append_config(config) 274 self.append_config(config)
232 bitbake('core-image-minimal') 275 bitbake('core-image-minimal')
233 self.remove_config(config) 276 self.remove_config(config)
234 cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir 277 cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir
235 runCmd(cmd) 278 runCmd(cmd)
236 self.assertEqual(1, len(glob(self.resultdir + "systemd-bootdisk-*direct"))) 279 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "systemd-bootdisk-*direct"))))
237 280
238 def test_efi_bootpart(self): 281 def test_efi_bootpart(self):
239 """Test creation of efi-bootpart image""" 282 """Test creation of efi-bootpart image"""
@@ -242,7 +285,7 @@ class Wic(WicTestCase):
242 self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype) 285 self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype)
243 runCmd(cmd) 286 runCmd(cmd)
244 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 287 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
245 images = glob(self.resultdir + "mkefidisk-*.direct") 288 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
246 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 289 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
247 self.assertIn("kernel",result.output) 290 self.assertIn("kernel",result.output)
248 291
@@ -252,14 +295,15 @@ class Wic(WicTestCase):
252 kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal') 295 kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal')
253 self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype) 296 self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype)
254 runCmd(cmd) 297 runCmd(cmd)
255 self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) 298 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
256 299
257 @only_for_arch(['i586', 'i686', 'x86_64']) 300 # TODO this doesn't have to be x86-specific
301 @skipIfNotArch(['i586', 'i686', 'x86_64'])
258 def test_default_output_dir(self): 302 def test_default_output_dir(self):
259 """Test default output location""" 303 """Test default output location"""
260 for fname in glob("directdisk-*.direct"): 304 for fname in glob("directdisk-*.direct"):
261 os.remove(fname) 305 os.remove(fname)
262 config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 306 config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
263 self.append_config(config) 307 self.append_config(config)
264 bitbake('core-image-minimal') 308 bitbake('core-image-minimal')
265 self.remove_config(config) 309 self.remove_config(config)
@@ -267,7 +311,7 @@ class Wic(WicTestCase):
267 runCmd(cmd) 311 runCmd(cmd)
268 self.assertEqual(1, len(glob("directdisk-*.direct"))) 312 self.assertEqual(1, len(glob("directdisk-*.direct")))
269 313
270 @only_for_arch(['i586', 'i686', 'x86_64']) 314 @skipIfNotArch(['i586', 'i686', 'x86_64'])
271 def test_build_artifacts(self): 315 def test_build_artifacts(self):
272 """Test wic create directdisk providing all artifacts.""" 316 """Test wic create directdisk providing all artifacts."""
273 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], 317 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -282,28 +326,28 @@ class Wic(WicTestCase):
282 "-n %(recipe_sysroot_native)s " 326 "-n %(recipe_sysroot_native)s "
283 "-r %(image_rootfs)s " 327 "-r %(image_rootfs)s "
284 "-o %(resultdir)s" % bbvars) 328 "-o %(resultdir)s" % bbvars)
285 self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) 329 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
286 330
287 def test_compress_gzip(self): 331 def test_compress_gzip(self):
288 """Test compressing an image with gzip""" 332 """Test compressing an image with gzip"""
289 runCmd("wic create wictestdisk " 333 runCmd("wic create wictestdisk "
290 "--image-name core-image-minimal " 334 "--image-name core-image-minimal "
291 "-c gzip -o %s" % self.resultdir) 335 "-c gzip -o %s" % self.resultdir)
292 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.gz"))) 336 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.gz"))))
293 337
294 def test_compress_bzip2(self): 338 def test_compress_bzip2(self):
295 """Test compressing an image with bzip2""" 339 """Test compressing an image with bzip2"""
296 runCmd("wic create wictestdisk " 340 runCmd("wic create wictestdisk "
297 "--image-name=core-image-minimal " 341 "--image-name=core-image-minimal "
298 "-c bzip2 -o %s" % self.resultdir) 342 "-c bzip2 -o %s" % self.resultdir)
299 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.bz2"))) 343 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.bz2"))))
300 344
301 def test_compress_xz(self): 345 def test_compress_xz(self):
302 """Test compressing an image with xz""" 346 """Test compressing an image with xz"""
303 runCmd("wic create wictestdisk " 347 runCmd("wic create wictestdisk "
304 "--image-name=core-image-minimal " 348 "--image-name=core-image-minimal "
305 "--compress-with=xz -o %s" % self.resultdir) 349 "--compress-with=xz -o %s" % self.resultdir)
306 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.xz"))) 350 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.xz"))))
307 351
308 def test_wrong_compressor(self): 352 def test_wrong_compressor(self):
309 """Test how wic breaks if wrong compressor is provided""" 353 """Test how wic breaks if wrong compressor is provided"""
@@ -317,23 +361,23 @@ class Wic(WicTestCase):
317 runCmd("wic create wictestdisk " 361 runCmd("wic create wictestdisk "
318 "--image-name=core-image-minimal " 362 "--image-name=core-image-minimal "
319 "-D -o %s" % self.resultdir) 363 "-D -o %s" % self.resultdir)
320 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 364 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
321 self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) 365 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
322 366
323 def test_debug_long(self): 367 def test_debug_long(self):
324 """Test --debug option""" 368 """Test --debug option"""
325 runCmd("wic create wictestdisk " 369 runCmd("wic create wictestdisk "
326 "--image-name=core-image-minimal " 370 "--image-name=core-image-minimal "
327 "--debug -o %s" % self.resultdir) 371 "--debug -o %s" % self.resultdir)
328 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 372 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
329 self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) 373 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
330 374
331 def test_skip_build_check_short(self): 375 def test_skip_build_check_short(self):
332 """Test -s option""" 376 """Test -s option"""
333 runCmd("wic create wictestdisk " 377 runCmd("wic create wictestdisk "
334 "--image-name=core-image-minimal " 378 "--image-name=core-image-minimal "
335 "-s -o %s" % self.resultdir) 379 "-s -o %s" % self.resultdir)
336 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 380 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
337 381
338 def test_skip_build_check_long(self): 382 def test_skip_build_check_long(self):
339 """Test --skip-build-check option""" 383 """Test --skip-build-check option"""
@@ -341,14 +385,14 @@ class Wic(WicTestCase):
341 "--image-name=core-image-minimal " 385 "--image-name=core-image-minimal "
342 "--skip-build-check " 386 "--skip-build-check "
343 "--outdir %s" % self.resultdir) 387 "--outdir %s" % self.resultdir)
344 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 388 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
345 389
346 def test_build_rootfs_short(self): 390 def test_build_rootfs_short(self):
347 """Test -f option""" 391 """Test -f option"""
348 runCmd("wic create wictestdisk " 392 runCmd("wic create wictestdisk "
349 "--image-name=core-image-minimal " 393 "--image-name=core-image-minimal "
350 "-f -o %s" % self.resultdir) 394 "-f -o %s" % self.resultdir)
351 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 395 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
352 396
353 def test_build_rootfs_long(self): 397 def test_build_rootfs_long(self):
354 """Test --build-rootfs option""" 398 """Test --build-rootfs option"""
@@ -356,9 +400,10 @@ class Wic(WicTestCase):
356 "--image-name=core-image-minimal " 400 "--image-name=core-image-minimal "
357 "--build-rootfs " 401 "--build-rootfs "
358 "--outdir %s" % self.resultdir) 402 "--outdir %s" % self.resultdir)
359 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 403 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
360 404
361 @only_for_arch(['i586', 'i686', 'x86_64']) 405 # TODO this doesn't have to be x86-specific
406 @skipIfNotArch(['i586', 'i686', 'x86_64'])
362 def test_rootfs_indirect_recipes(self): 407 def test_rootfs_indirect_recipes(self):
363 """Test usage of rootfs plugin with rootfs recipes""" 408 """Test usage of rootfs plugin with rootfs recipes"""
364 runCmd("wic create directdisk-multi-rootfs " 409 runCmd("wic create directdisk-multi-rootfs "
@@ -366,9 +411,10 @@ class Wic(WicTestCase):
366 "--rootfs rootfs1=core-image-minimal " 411 "--rootfs rootfs1=core-image-minimal "
367 "--rootfs rootfs2=core-image-minimal " 412 "--rootfs rootfs2=core-image-minimal "
368 "--outdir %s" % self.resultdir) 413 "--outdir %s" % self.resultdir)
369 self.assertEqual(1, len(glob(self.resultdir + "directdisk-multi-rootfs*.direct"))) 414 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-multi-rootfs*.direct"))))
370 415
371 @only_for_arch(['i586', 'i686', 'x86_64']) 416 # TODO this doesn't have to be x86-specific
417 @skipIfNotArch(['i586', 'i686', 'x86_64'])
372 def test_rootfs_artifacts(self): 418 def test_rootfs_artifacts(self):
373 """Test usage of rootfs plugin with rootfs paths""" 419 """Test usage of rootfs plugin with rootfs paths"""
374 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], 420 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -385,7 +431,7 @@ class Wic(WicTestCase):
385 "--rootfs-dir rootfs1=%(image_rootfs)s " 431 "--rootfs-dir rootfs1=%(image_rootfs)s "
386 "--rootfs-dir rootfs2=%(image_rootfs)s " 432 "--rootfs-dir rootfs2=%(image_rootfs)s "
387 "--outdir %(resultdir)s" % bbvars) 433 "--outdir %(resultdir)s" % bbvars)
388 self.assertEqual(1, len(glob(self.resultdir + "%(wks)s-*.direct" % bbvars))) 434 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "%(wks)s-*.direct" % bbvars))))
389 435
390 def test_exclude_path(self): 436 def test_exclude_path(self):
391 """Test --exclude-path wks option.""" 437 """Test --exclude-path wks option."""
@@ -400,19 +446,20 @@ class Wic(WicTestCase):
400 wks.write(""" 446 wks.write("""
401part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr 447part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr
402part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr 448part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr
403part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr""" 449part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr
404 % (rootfs_dir, rootfs_dir)) 450part /mnt --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/whoami --rootfs-dir %s/usr"""
451 % (rootfs_dir, rootfs_dir, rootfs_dir))
405 runCmd("wic create %s -e core-image-minimal -o %s" \ 452 runCmd("wic create %s -e core-image-minimal -o %s" \
406 % (wks_file, self.resultdir)) 453 % (wks_file, self.resultdir))
407 454
408 os.remove(wks_file) 455 os.remove(wks_file)
409 wicout = glob(self.resultdir + "%s-*direct" % 'temp') 456 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % 'temp'))
410 self.assertEqual(1, len(wicout)) 457 self.assertEqual(1, len(wicout))
411 458
412 wicimg = wicout[0] 459 wicimg = wicout[0]
413 460
414 # verify partition size with wic 461 # verify partition size with wic
415 res = runCmd("parted -m %s unit b p 2>/dev/null" % wicimg) 462 res = runCmd("parted -m %s unit b p" % wicimg, stderr=subprocess.PIPE)
416 463
417 # parse parted output which looks like this: 464 # parse parted output which looks like this:
418 # BYT;\n 465 # BYT;\n
@@ -420,9 +467,9 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
420 # 1:0.00MiB:200MiB:200MiB:ext4::;\n 467 # 1:0.00MiB:200MiB:200MiB:ext4::;\n
421 partlns = res.output.splitlines()[2:] 468 partlns = res.output.splitlines()[2:]
422 469
423 self.assertEqual(3, len(partlns)) 470 self.assertEqual(4, len(partlns))
424 471
425 for part in [1, 2, 3]: 472 for part in [1, 2, 3, 4]:
426 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) 473 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part)
427 partln = partlns[part-1].split(":") 474 partln = partlns[part-1].split(":")
428 self.assertEqual(7, len(partln)) 475 self.assertEqual(7, len(partln))
@@ -433,16 +480,16 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
433 480
434 # Test partition 1, should contain the normal root directories, except 481 # Test partition 1, should contain the normal root directories, except
435 # /usr. 482 # /usr.
436 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ 483 res = runCmd("debugfs -R 'ls -p' %s" % \
437 os.path.join(self.resultdir, "selftest_img.part1")) 484 os.path.join(self.resultdir, "selftest_img.part1"), stderr=subprocess.PIPE)
438 files = extract_files(res.output) 485 files = extract_files(res.output)
439 self.assertIn("etc", files) 486 self.assertIn("etc", files)
440 self.assertNotIn("usr", files) 487 self.assertNotIn("usr", files)
441 488
442 # Partition 2, should contain common directories for /usr, not root 489 # Partition 2, should contain common directories for /usr, not root
443 # directories. 490 # directories.
444 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ 491 res = runCmd("debugfs -R 'ls -p' %s" % \
445 os.path.join(self.resultdir, "selftest_img.part2")) 492 os.path.join(self.resultdir, "selftest_img.part2"), stderr=subprocess.PIPE)
446 files = extract_files(res.output) 493 files = extract_files(res.output)
447 self.assertNotIn("etc", files) 494 self.assertNotIn("etc", files)
448 self.assertNotIn("usr", files) 495 self.assertNotIn("usr", files)
@@ -450,27 +497,78 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
450 497
451 # Partition 3, should contain the same as partition 2, including the bin 498 # Partition 3, should contain the same as partition 2, including the bin
452 # directory, but not the files inside it. 499 # directory, but not the files inside it.
453 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ 500 res = runCmd("debugfs -R 'ls -p' %s" % \
454 os.path.join(self.resultdir, "selftest_img.part3")) 501 os.path.join(self.resultdir, "selftest_img.part3"), stderr=subprocess.PIPE)
455 files = extract_files(res.output) 502 files = extract_files(res.output)
456 self.assertNotIn("etc", files) 503 self.assertNotIn("etc", files)
457 self.assertNotIn("usr", files) 504 self.assertNotIn("usr", files)
458 self.assertIn("share", files) 505 self.assertIn("share", files)
459 self.assertIn("bin", files) 506 self.assertIn("bin", files)
460 res = runCmd("debugfs -R 'ls -p bin' %s 2>/dev/null" % \ 507 res = runCmd("debugfs -R 'ls -p bin' %s" % \
461 os.path.join(self.resultdir, "selftest_img.part3")) 508 os.path.join(self.resultdir, "selftest_img.part3"), stderr=subprocess.PIPE)
462 files = extract_files(res.output) 509 files = extract_files(res.output)
463 self.assertIn(".", files) 510 self.assertIn(".", files)
464 self.assertIn("..", files) 511 self.assertIn("..", files)
465 self.assertEqual(2, len(files)) 512 self.assertEqual(2, len(files))
466 513
467 for part in [1, 2, 3]: 514 # Partition 4, should contain the same as partition 2, including the bin
515 # directory, but not whoami (a symlink to busybox.nosuid) inside it.
516 res = runCmd("debugfs -R 'ls -p' %s" % \
517 os.path.join(self.resultdir, "selftest_img.part4"), stderr=subprocess.PIPE)
518 files = extract_files(res.output)
519 self.assertNotIn("etc", files)
520 self.assertNotIn("usr", files)
521 self.assertIn("share", files)
522 self.assertIn("bin", files)
523 res = runCmd("debugfs -R 'ls -p bin' %s" % \
524 os.path.join(self.resultdir, "selftest_img.part4"), stderr=subprocess.PIPE)
525 files = extract_files(res.output)
526 self.assertIn(".", files)
527 self.assertIn("..", files)
528 self.assertIn("who", files)
529 self.assertNotIn("whoami", files)
530
531 for part in [1, 2, 3, 4]:
468 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) 532 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part)
469 os.remove(part_file) 533 os.remove(part_file)
470 534
471 finally: 535 finally:
472 os.environ['PATH'] = oldpath 536 os.environ['PATH'] = oldpath
473 537
538 def test_exclude_path_with_extra_space(self):
539 """Test having --exclude-path with IMAGE_ROOTFS_EXTRA_SPACE. [Yocto #15555]"""
540
541 with NamedTemporaryFile("w", suffix=".wks") as wks:
542 wks.writelines(
543 ['bootloader --ptable gpt\n',
544 'part /boot --size=100M --active --fstype=ext4 --label boot\n',
545 'part / --source rootfs --fstype=ext4 --label root --exclude-path boot/\n'])
546 wks.flush()
547 config = 'IMAGE_ROOTFS_EXTRA_SPACE = "500000"\n'\
548 'DEPENDS:pn-core-image-minimal += "wic-tools"\n'\
549 'IMAGE_FSTYPES += "wic ext4"\n'\
550 'WKS_FILE = "%s"\n' % wks.name
551 self.append_config(config)
552 bitbake('core-image-minimal')
553
554 """
555 the output of "wic ls <image>.wic" will look something like:
556 Num Start End Size Fstype
557 1 17408 136332287 136314880 ext4
558 2 136332288 171464703 35132416 ext4
559 we are looking for the size of partition 2
560 i.e. in this case the number 35,132,416
561 without the fix the size will be around 85,403,648
562 with the fix the size should be around 799,960,064
563 """
564 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'], 'core-image-minimal')
565 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
566 machine = bb_vars['MACHINE']
567 nativesysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
568 wicout = glob(os.path.join(deploy_dir, "core-image-minimal-%s.rootfs-*.wic" % machine))[0]
569 size_of_root_partition = int(runCmd("wic ls %s --native-sysroot %s" % (wicout, nativesysroot)).output.split('\n')[2].split()[3])
570 self.assertGreater(size_of_root_partition, 500000000)
571
474 def test_include_path(self): 572 def test_include_path(self):
475 """Test --include-path wks option.""" 573 """Test --include-path wks option."""
476 574
@@ -496,13 +594,13 @@ part /part2 --source rootfs --ondisk mmcblk0 --fstype=ext4 --include-path %s"""
496 part2 = glob(os.path.join(self.resultdir, 'temp-*.direct.p2'))[0] 594 part2 = glob(os.path.join(self.resultdir, 'temp-*.direct.p2'))[0]
497 595
498 # Test partition 1, should not contain 'test-file' 596 # Test partition 1, should not contain 'test-file'
499 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) 597 res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE)
500 files = extract_files(res.output) 598 files = extract_files(res.output)
501 self.assertNotIn('test-file', files) 599 self.assertNotIn('test-file', files)
502 self.assertEqual(True, files_own_by_root(res.output)) 600 self.assertEqual(True, files_own_by_root(res.output))
503 601
504 # Test partition 2, should contain 'test-file' 602 # Test partition 2, should contain 'test-file'
505 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part2)) 603 res = runCmd("debugfs -R 'ls -p' %s" % (part2), stderr=subprocess.PIPE)
506 files = extract_files(res.output) 604 files = extract_files(res.output)
507 self.assertIn('test-file', files) 605 self.assertIn('test-file', files)
508 self.assertEqual(True, files_own_by_root(res.output)) 606 self.assertEqual(True, files_own_by_root(res.output))
@@ -531,12 +629,12 @@ part / --source rootfs --fstype=ext4 --include-path %s --include-path core-imag
531 629
532 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] 630 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0]
533 631
534 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) 632 res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE)
535 files = extract_files(res.output) 633 files = extract_files(res.output)
536 self.assertIn('test-file', files) 634 self.assertIn('test-file', files)
537 self.assertEqual(True, files_own_by_root(res.output)) 635 self.assertEqual(True, files_own_by_root(res.output))
538 636
539 res = runCmd("debugfs -R 'ls -p /export/etc/' %s 2>/dev/null" % (part1)) 637 res = runCmd("debugfs -R 'ls -p /export/etc/' %s" % (part1), stderr=subprocess.PIPE)
540 files = extract_files(res.output) 638 files = extract_files(res.output)
541 self.assertIn('passwd', files) 639 self.assertIn('passwd', files)
542 self.assertEqual(True, files_own_by_root(res.output)) 640 self.assertEqual(True, files_own_by_root(res.output))
@@ -623,7 +721,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
623 % (wks_file, self.resultdir)) 721 % (wks_file, self.resultdir))
624 722
625 for part in glob(os.path.join(self.resultdir, 'temp-*.direct.p*')): 723 for part in glob(os.path.join(self.resultdir, 'temp-*.direct.p*')):
626 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part)) 724 res = runCmd("debugfs -R 'ls -p' %s" % (part), stderr=subprocess.PIPE)
627 self.assertEqual(True, files_own_by_root(res.output)) 725 self.assertEqual(True, files_own_by_root(res.output))
628 726
629 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "%s"\n' % wks_file 727 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "%s"\n' % wks_file
@@ -633,7 +731,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
633 731
634 # check each partition for permission 732 # check each partition for permission
635 for part in glob(os.path.join(tmpdir, 'temp-*.direct.p*')): 733 for part in glob(os.path.join(tmpdir, 'temp-*.direct.p*')):
636 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part)) 734 res = runCmd("debugfs -R 'ls -p' %s" % (part), stderr=subprocess.PIPE)
637 self.assertTrue(files_own_by_root(res.output) 735 self.assertTrue(files_own_by_root(res.output)
638 ,msg='Files permission incorrect using wks set "%s"' % test) 736 ,msg='Files permission incorrect using wks set "%s"' % test)
639 737
@@ -661,7 +759,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
661 759
662 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] 760 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0]
663 761
664 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) 762 res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE)
665 files = extract_files(res.output) 763 files = extract_files(res.output)
666 self.assertIn('passwd', files) 764 self.assertIn('passwd', files)
667 765
@@ -686,21 +784,185 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
686 % (wks_file, self.resultdir), ignore_status=True).status) 784 % (wks_file, self.resultdir), ignore_status=True).status)
687 os.remove(wks_file) 785 os.remove(wks_file)
688 786
787 def test_no_fstab_update(self):
788 """Test --no-fstab-update wks option."""
789
790 oldpath = os.environ['PATH']
791 os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
792
793 # Get stock fstab from base-files recipe
794 bitbake('base-files -c do_install')
795 bf_fstab = os.path.join(get_bb_var('D', 'base-files'), 'etc', 'fstab')
796 self.assertEqual(True, os.path.exists(bf_fstab))
797 bf_fstab_md5sum = runCmd('md5sum %s ' % bf_fstab).output.split(" ")[0]
798
799 try:
800 no_fstab_update_path = os.path.join(self.resultdir, 'test-no-fstab-update')
801 os.makedirs(no_fstab_update_path)
802 wks_file = os.path.join(no_fstab_update_path, 'temp.wks')
803 with open(wks_file, 'w') as wks:
804 wks.writelines(['part / --source rootfs --fstype=ext4 --label rootfs\n',
805 'part /mnt/p2 --source rootfs --rootfs-dir=core-image-minimal ',
806 '--fstype=ext4 --label p2 --no-fstab-update\n'])
807 runCmd("wic create %s -e core-image-minimal -o %s" \
808 % (wks_file, self.resultdir))
809
810 part_fstab_md5sum = []
811 for i in range(1, 3):
812 part = glob(os.path.join(self.resultdir, 'temp-*.direct.p') + str(i))[0]
813 part_fstab = runCmd("debugfs -R 'cat etc/fstab' %s" % (part), stderr=subprocess.PIPE)
814 part_fstab_md5sum.append(hashlib.md5((part_fstab.output + "\n\n").encode('utf-8')).hexdigest())
815
816 # '/etc/fstab' in partition 2 should contain the same stock fstab file
817 # as the one installed by the base-file recipe.
818 self.assertEqual(bf_fstab_md5sum, part_fstab_md5sum[1])
819
820 # '/etc/fstab' in partition 1 should contain an updated fstab file.
821 self.assertNotEqual(bf_fstab_md5sum, part_fstab_md5sum[0])
822
823 finally:
824 os.environ['PATH'] = oldpath
825
826 def test_no_fstab_update_errors(self):
827 """Test --no-fstab-update wks option error handling."""
828 wks_file = 'temp.wks'
829
830 # Absolute argument.
831 with open(wks_file, 'w') as wks:
832 wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update /etc")
833 self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
834 % (wks_file, self.resultdir), ignore_status=True).status)
835 os.remove(wks_file)
836
837 # Argument pointing to parent directory.
838 with open(wks_file, 'w') as wks:
839 wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update ././..")
840 self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
841 % (wks_file, self.resultdir), ignore_status=True).status)
842 os.remove(wks_file)
843
844 def test_extra_space(self):
845 """Test --extra-space wks option."""
846 extraspace = 1024**3
847 runCmd("wic create wictestdisk "
848 "--image-name core-image-minimal "
849 "--extra-space %i -o %s" % (extraspace ,self.resultdir))
850 wicout = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
851 self.assertEqual(1, len(wicout))
852 size = os.path.getsize(wicout[0])
853 self.assertTrue(size > extraspace, msg="Extra space not present (%s vs %s)" % (size, extraspace))
854
855 def test_no_table(self):
856 """Test --no-table wks option."""
857 wks_file = 'temp.wks'
858
859 # Absolute argument.
860 with open(wks_file, 'w') as wks:
861 wks.write("part testspace --no-table --fixed-size 16k --offset 4080k")
862 runCmd("wic create %s --image-name core-image-minimal -o %s" % (wks_file, self.resultdir))
863
864 wicout = glob(os.path.join(self.resultdir, "*.*"))
865
866 self.assertEqual(1, len(wicout))
867 size = os.path.getsize(wicout[0])
868 self.assertEqual(size, 4 * 1024 * 1024)
869
870 os.remove(wks_file)
871
872 def test_partition_hidden_attributes(self):
873 """Test --hidden wks option."""
874 wks_file = 'temp.wks'
875 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
876 try:
877 with open(wks_file, 'w') as wks:
878 wks.write("""
879part / --source rootfs --fstype=ext4
880part / --source rootfs --fstype=ext4 --hidden
881bootloader --ptable gpt""")
882
883 runCmd("wic create %s -e core-image-minimal -o %s" \
884 % (wks_file, self.resultdir))
885 wicout = os.path.join(self.resultdir, "*.direct")
886
887 result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 1" % (sysroot, wicout))
888 self.assertEqual('', result.output)
889 result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 2" % (sysroot, wicout))
890 self.assertEqual('RequiredPartition', result.output)
891
892 finally:
893 os.remove(wks_file)
894
895 def test_wic_sector_size(self):
896 """Test generation image sector size"""
897
898 oldpath = os.environ['PATH']
899 os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
900
901 try:
902 # Add WIC_SECTOR_SIZE into config
903 config = 'WIC_SECTOR_SIZE = "4096"\n'\
904 'WICVARS:append = " WIC_SECTOR_SIZE"\n'
905 self.append_config(config)
906 bitbake('core-image-minimal')
907
908 # Check WIC_SECTOR_SIZE apply to bitbake variable
909 wic_sector_size_str = get_bb_var('WIC_SECTOR_SIZE', 'core-image-minimal')
910 wic_sector_size = int(wic_sector_size_str)
911 self.assertEqual(4096, wic_sector_size)
912
913 self.logger.info("Test wic_sector_size: %d \n" % wic_sector_size)
914
915 with NamedTemporaryFile("w", suffix=".wks") as wks:
916 wks.writelines(
917 ['bootloader --ptable gpt\n',
918 'part --fstype ext4 --source rootfs --label rofs-a --mkfs-extraopts "-b 4096"\n',
919 'part --fstype ext4 --source rootfs --use-uuid --mkfs-extraopts "-b 4096"\n'])
920 wks.flush()
921 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir)
922 runCmd(cmd)
923 wksname = os.path.splitext(os.path.basename(wks.name))[0]
924 images = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
925 self.assertEqual(1, len(images))
926
927 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
928 # list partitions
929 result = runCmd("wic ls %s -n %s" % (images[0], sysroot))
930 self.assertEqual(3, len(result.output.split('\n')))
931
932 # verify partition size with wic
933 res = runCmd("export PARTED_SECTOR_SIZE=%d; parted -m %s unit b p" % (wic_sector_size, images[0]),
934 stderr=subprocess.PIPE)
935
936 # parse parted output which looks like this:
937 # BYT;\n
938 # /var/tmp/wic/build/tmpgjzzefdd-202410281021-sda.direct:78569472B:file:4096:4096:gpt::;\n
939 # 1:139264B:39284735B:39145472B:ext4:rofs-a:;\n
940 # 2:39284736B:78430207B:39145472B:ext4:primary:;\n
941 disk_info = res.output.splitlines()[1]
942 # Check sector sizes
943 sector_size_logical = int(disk_info.split(":")[3])
944 sector_size_physical = int(disk_info.split(":")[4])
945 self.assertEqual(wic_sector_size, sector_size_logical, "Logical sector size is not %d." % wic_sector_size)
946 self.assertEqual(wic_sector_size, sector_size_physical, "Physical sector size is not %d." % wic_sector_size)
947
948 finally:
949 os.environ['PATH'] = oldpath
950
689class Wic2(WicTestCase): 951class Wic2(WicTestCase):
690 952
691 def test_bmap_short(self): 953 def test_bmap_short(self):
692 """Test generation of .bmap file -m option""" 954 """Test generation of .bmap file -m option"""
693 cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir 955 cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir
694 runCmd(cmd) 956 runCmd(cmd)
695 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 957 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
696 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) 958 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
697 959
698 def test_bmap_long(self): 960 def test_bmap_long(self):
699 """Test generation of .bmap file --bmap option""" 961 """Test generation of .bmap file --bmap option"""
700 cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir 962 cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir
701 runCmd(cmd) 963 runCmd(cmd)
702 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 964 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
703 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) 965 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
704 966
705 def test_image_env(self): 967 def test_image_env(self):
706 """Test generation of <image>.env files.""" 968 """Test generation of <image>.env files."""
@@ -711,7 +973,7 @@ class Wic2(WicTestCase):
711 basename = bb_vars['IMAGE_BASENAME'] 973 basename = bb_vars['IMAGE_BASENAME']
712 self.assertEqual(basename, image) 974 self.assertEqual(basename, image)
713 path = os.path.join(imgdatadir, basename) + '.env' 975 path = os.path.join(imgdatadir, basename) + '.env'
714 self.assertTrue(os.path.isfile(path)) 976 self.assertTrue(os.path.isfile(path), msg="File %s wasn't generated as expected" % path)
715 977
716 wicvars = set(bb_vars['WICVARS'].split()) 978 wicvars = set(bb_vars['WICVARS'].split())
717 # filter out optional variables 979 # filter out optional variables
@@ -724,7 +986,7 @@ class Wic2(WicTestCase):
724 # test if variables used by wic present in the .env file 986 # test if variables used by wic present in the .env file
725 for var in wicvars: 987 for var in wicvars:
726 self.assertTrue(var in content, "%s is not in .env file" % var) 988 self.assertTrue(var in content, "%s is not in .env file" % var)
727 self.assertTrue(content[var]) 989 self.assertTrue(content[var], "%s doesn't have a value (%s)" % (var, content[var]))
728 990
729 def test_image_vars_dir_short(self): 991 def test_image_vars_dir_short(self):
730 """Test image vars directory selection -v option""" 992 """Test image vars directory selection -v option"""
@@ -736,7 +998,7 @@ class Wic2(WicTestCase):
736 "--image-name=%s -v %s -n %s -o %s" 998 "--image-name=%s -v %s -n %s -o %s"
737 % (image, imgenvdir, native_sysroot, 999 % (image, imgenvdir, native_sysroot,
738 self.resultdir)) 1000 self.resultdir))
739 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 1001 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
740 1002
741 def test_image_vars_dir_long(self): 1003 def test_image_vars_dir_long(self):
742 """Test image vars directory selection --vars option""" 1004 """Test image vars directory selection --vars option"""
@@ -751,58 +1013,99 @@ class Wic2(WicTestCase):
751 "--outdir %s" 1013 "--outdir %s"
752 % (image, imgenvdir, native_sysroot, 1014 % (image, imgenvdir, native_sysroot,
753 self.resultdir)) 1015 self.resultdir))
754 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 1016 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
755 1017
756 @only_for_arch(['i586', 'i686', 'x86_64']) 1018 # TODO this test could also work on aarch64
1019 @skipIfNotArch(['i586', 'i686', 'x86_64'])
757 def test_wic_image_type(self): 1020 def test_wic_image_type(self):
758 """Test building wic images by bitbake""" 1021 """Test building wic images by bitbake"""
759 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ 1022 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
760 'MACHINE_FEATURES_append = " efi"\n' 1023 'MACHINE_FEATURES:append = " efi"\n'
1024 image_recipe_append = """
1025do_image_wic[postfuncs] += "run_wic_cmd"
1026run_wic_cmd() {
1027 echo "test" >> ${WORKDIR}/test.wic-cp
1028 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1029 wic ls --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1030 wic rm --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/test.wic-cp
1031 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1032}
1033"""
1034 self.write_recipeinc('images', image_recipe_append)
1035
761 self.append_config(config) 1036 self.append_config(config)
762 self.assertEqual(0, bitbake('wic-image-minimal').status) 1037 image = 'wic-image-minimal'
1038 bitbake(image)
763 self.remove_config(config) 1039 self.remove_config(config)
764 1040
765 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) 1041 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
766 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] 1042 prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME'])
767 machine = bb_vars['MACHINE'] 1043
768 prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine) 1044 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1045 # check if file is there
1046 result = runCmd("wic ls %s:1/ -n %s" % (prefix+"wic", sysroot))
1047 self.assertIn("test.wic-cp", result.output)
1048
769 # check if we have result image and manifests symlinks 1049 # check if we have result image and manifests symlinks
770 # pointing to existing files 1050 # pointing to existing files
771 for suffix in ('wic', 'manifest'): 1051 for suffix in ('wic', 'manifest'):
772 path = prefix + suffix 1052 path = prefix + suffix
773 self.assertTrue(os.path.islink(path)) 1053 self.assertTrue(os.path.islink(path), msg="Link %s wasn't generated as expected" % path)
774 self.assertTrue(os.path.isfile(os.path.realpath(path))) 1054 self.assertTrue(os.path.isfile(os.path.realpath(path)), msg="File linked to by %s wasn't generated as expected" % path)
775 1055
776 @only_for_arch(['i586', 'i686', 'x86_64']) 1056 # TODO this should work on aarch64
1057 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1058 @OETestTag("runqemu")
777 def test_qemu(self): 1059 def test_qemu(self):
778 """Test wic-image-minimal under qemu""" 1060 """Test wic-image-minimal under qemu"""
779 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ 1061 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
780 'MACHINE_FEATURES_append = " efi"\n' 1062 'MACHINE_FEATURES:append = " efi"\n'
781 self.append_config(config) 1063 self.append_config(config)
782 self.assertEqual(0, bitbake('wic-image-minimal').status) 1064 image_recipe_append = """
1065do_image_wic[postfuncs] += "run_wic_cmd"
1066run_wic_cmd() {
1067 echo "test" >> ${WORKDIR}/test.wic-cp
1068 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1069 wic ls --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1070 wic rm --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/test.wic-cp
1071 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1072}
1073"""
1074 self.write_recipeinc('images', image_recipe_append)
1075 bitbake('wic-image-minimal')
1076
1077 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1078 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], "wic-image-minimal")
1079 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'])
1080 # check if file is there
1081 result = runCmd("wic ls %s:1/ -n %s" % (image_path+".wic", sysroot))
1082 self.assertIn("test.wic-cp", result.output)
783 self.remove_config(config) 1083 self.remove_config(config)
784 1084
785 with runqemu('wic-image-minimal', ssh=False) as qemu: 1085 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'wic-image-minimal') or ""
1086 with runqemu('wic-image-minimal', ssh=False, runqemuparams='%s nographic' % (runqemu_params)) as qemu:
786 cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \ 1087 cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \
787 "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'" 1088 "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'"
788 status, output = qemu.run_serial(cmd) 1089 status, output = qemu.run_serial(cmd)
789 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1090 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
790 self.assertEqual(output, '4') 1091 self.assertEqual(output, '4')
791 cmd = "grep UUID= /etc/fstab" 1092 cmd = "grep UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba /etc/fstab"
792 status, output = qemu.run_serial(cmd) 1093 status, output = qemu.run_serial(cmd)
793 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1094 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
794 self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0') 1095 self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0')
795 1096
796 @only_for_arch(['i586', 'i686', 'x86_64']) 1097 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1098 @OETestTag("runqemu")
797 def test_qemu_efi(self): 1099 def test_qemu_efi(self):
798 """Test core-image-minimal efi image under qemu""" 1100 """Test core-image-minimal efi image under qemu"""
799 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n' 1101 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n'
800 self.append_config(config) 1102 self.append_config(config)
801 self.assertEqual(0, bitbake('core-image-minimal ovmf').status) 1103 bitbake('core-image-minimal ovmf')
802 self.remove_config(config) 1104 self.remove_config(config)
803 1105
1106 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or ""
804 with runqemu('core-image-minimal', ssh=False, 1107 with runqemu('core-image-minimal', ssh=False,
805 runqemuparams='ovmf', image_fstype='wic') as qemu: 1108 runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
806 cmd = "grep sda. /proc/partitions |wc -l" 1109 cmd = "grep sda. /proc/partitions |wc -l"
807 status, output = qemu.run_serial(cmd) 1110 status, output = qemu.run_serial(cmd)
808 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1111 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
@@ -831,7 +1134,7 @@ class Wic2(WicTestCase):
831 1134
832 wksname = os.path.splitext(os.path.basename(wkspath))[0] 1135 wksname = os.path.splitext(os.path.basename(wkspath))[0]
833 1136
834 wicout = glob(self.resultdir + "%s-*direct" % wksname) 1137 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
835 1138
836 if not wicout: 1139 if not wicout:
837 return (p, None) 1140 return (p, None)
@@ -842,8 +1145,8 @@ class Wic2(WicTestCase):
842 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools") 1145 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
843 1146
844 # verify partition size with wic 1147 # verify partition size with wic
845 res = runCmd("parted -m %s unit kib p 2>/dev/null" % wicimg, 1148 res = runCmd("parted -m %s unit kib p" % wicimg,
846 native_sysroot=native_sysroot) 1149 native_sysroot=native_sysroot, stderr=subprocess.PIPE)
847 1150
848 # parse parted output which looks like this: 1151 # parse parted output which looks like this:
849 # BYT;\n 1152 # BYT;\n
@@ -882,71 +1185,71 @@ class Wic2(WicTestCase):
882 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1185 with NamedTemporaryFile("w", suffix=".wks") as tempf:
883 # Test that partitions are placed at the correct offsets, default KB 1186 # Test that partitions are placed at the correct offsets, default KB
884 tempf.write("bootloader --ptable gpt\n" \ 1187 tempf.write("bootloader --ptable gpt\n" \
885 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \ 1188 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 200M --fstype=ext4\n" \
886 "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n") 1189 "part /bar --ondisk hda --offset 204832 --fixed-size 100M --fstype=ext4\n")
887 tempf.flush() 1190 tempf.flush()
888 1191
889 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1192 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
890 self.assertEqual(partlns, [ 1193 self.assertEqual(partlns, [
891 "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", 1194 "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;",
892 "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", 1195 "2:204832kiB:307232kiB:102400kiB:ext4:primary:;",
893 ]) 1196 ])
894 1197
895 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1198 with NamedTemporaryFile("w", suffix=".wks") as tempf:
896 # Test that partitions are placed at the correct offsets, same with explicit KB 1199 # Test that partitions are placed at the correct offsets, same with explicit KB
897 tempf.write("bootloader --ptable gpt\n" \ 1200 tempf.write("bootloader --ptable gpt\n" \
898 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \ 1201 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 200M --fstype=ext4\n" \
899 "part /bar --ondisk hda --offset 102432K --fixed-size 100M --fstype=ext4\n") 1202 "part /bar --ondisk hda --offset 204832K --fixed-size 100M --fstype=ext4\n")
900 tempf.flush() 1203 tempf.flush()
901 1204
902 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1205 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
903 self.assertEqual(partlns, [ 1206 self.assertEqual(partlns, [
904 "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", 1207 "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;",
905 "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", 1208 "2:204832kiB:307232kiB:102400kiB:ext4:primary:;",
906 ]) 1209 ])
907 1210
908 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1211 with NamedTemporaryFile("w", suffix=".wks") as tempf:
909 # Test that partitions are placed at the correct offsets using MB 1212 # Test that partitions are placed at the correct offsets using MB
910 tempf.write("bootloader --ptable gpt\n" \ 1213 tempf.write("bootloader --ptable gpt\n" \
911 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \ 1214 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 200M --fstype=ext4\n" \
912 "part /bar --ondisk hda --offset 101M --fixed-size 100M --fstype=ext4\n") 1215 "part /bar --ondisk hda --offset 201M --fixed-size 100M --fstype=ext4\n")
913 tempf.flush() 1216 tempf.flush()
914 1217
915 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1218 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
916 self.assertEqual(partlns, [ 1219 self.assertEqual(partlns, [
917 "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", 1220 "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;",
918 "2:103424kiB:205824kiB:102400kiB:ext4:primary:;", 1221 "2:205824kiB:308224kiB:102400kiB:ext4:primary:;",
919 ]) 1222 ])
920 1223
921 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1224 with NamedTemporaryFile("w", suffix=".wks") as tempf:
922 # Test that partitions can be placed on a 512 byte sector boundary 1225 # Test that partitions can be placed on a 512 byte sector boundary
923 tempf.write("bootloader --ptable gpt\n" \ 1226 tempf.write("bootloader --ptable gpt\n" \
924 "part / --source rootfs --ondisk hda --offset 65s --fixed-size 99M --fstype=ext4\n" \ 1227 "part / --source rootfs --ondisk hda --offset 65s --fixed-size 199M --fstype=ext4\n" \
925 "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n") 1228 "part /bar --ondisk hda --offset 204832 --fixed-size 100M --fstype=ext4\n")
926 tempf.flush() 1229 tempf.flush()
927 1230
928 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1231 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
929 self.assertEqual(partlns, [ 1232 self.assertEqual(partlns, [
930 "1:32.5kiB:101408kiB:101376kiB:ext4:primary:;", 1233 "1:32.5kiB:203808kiB:203776kiB:ext4:primary:;",
931 "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", 1234 "2:204832kiB:307232kiB:102400kiB:ext4:primary:;",
932 ]) 1235 ])
933 1236
934 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1237 with NamedTemporaryFile("w", suffix=".wks") as tempf:
935 # Test that a partition can be placed immediately after a MSDOS partition table 1238 # Test that a partition can be placed immediately after a MSDOS partition table
936 tempf.write("bootloader --ptable msdos\n" \ 1239 tempf.write("bootloader --ptable msdos\n" \
937 "part / --source rootfs --ondisk hda --offset 1s --fixed-size 100M --fstype=ext4\n") 1240 "part / --source rootfs --ondisk hda --offset 1s --fixed-size 200M --fstype=ext4\n")
938 tempf.flush() 1241 tempf.flush()
939 1242
940 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1243 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
941 self.assertEqual(partlns, [ 1244 self.assertEqual(partlns, [
942 "1:0.50kiB:102400kiB:102400kiB:ext4::;", 1245 "1:0.50kiB:204800kiB:204800kiB:ext4::;",
943 ]) 1246 ])
944 1247
945 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1248 with NamedTemporaryFile("w", suffix=".wks") as tempf:
946 # Test that image creation fails if the partitions would overlap 1249 # Test that image creation fails if the partitions would overlap
947 tempf.write("bootloader --ptable gpt\n" \ 1250 tempf.write("bootloader --ptable gpt\n" \
948 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \ 1251 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 200M --fstype=ext4\n" \
949 "part /bar --ondisk hda --offset 102431 --fixed-size 100M --fstype=ext4\n") 1252 "part /bar --ondisk hda --offset 204831 --fixed-size 100M --fstype=ext4\n")
950 tempf.flush() 1253 tempf.flush()
951 1254
952 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) 1255 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True)
@@ -955,7 +1258,7 @@ class Wic2(WicTestCase):
955 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1258 with NamedTemporaryFile("w", suffix=".wks") as tempf:
956 # Test that partitions are not allowed to overlap with the booloader 1259 # Test that partitions are not allowed to overlap with the booloader
957 tempf.write("bootloader --ptable gpt\n" \ 1260 tempf.write("bootloader --ptable gpt\n" \
958 "part / --source rootfs --ondisk hda --offset 8 --fixed-size 100M --fstype=ext4\n") 1261 "part / --source rootfs --ondisk hda --offset 8 --fixed-size 200M --fstype=ext4\n")
959 tempf.flush() 1262 tempf.flush()
960 1263
961 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) 1264 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True)
@@ -976,50 +1279,74 @@ class Wic2(WicTestCase):
976 size = int(size[:-3]) 1279 size = int(size[:-3])
977 self.assertGreaterEqual(size, 204800) 1280 self.assertGreaterEqual(size, 204800)
978 1281
979 @only_for_arch(['i586', 'i686', 'x86_64']) 1282 # TODO this test could also work on aarch64
1283 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1284 @OETestTag("runqemu")
980 def test_rawcopy_plugin_qemu(self): 1285 def test_rawcopy_plugin_qemu(self):
981 """Test rawcopy plugin in qemu""" 1286 """Test rawcopy plugin in qemu"""
982 # build ext4 and wic images 1287 # build ext4 and then use it for a wic image
983 for fstype in ("ext4", "wic"): 1288 config = 'IMAGE_FSTYPES = "ext4"\n'
984 config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype 1289 self.append_config(config)
985 self.append_config(config) 1290 bitbake('core-image-minimal')
986 self.assertEqual(0, bitbake('core-image-minimal').status) 1291 image_link_name = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal')
987 self.remove_config(config) 1292 self.remove_config(config)
988 1293
989 with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: 1294 config = 'IMAGE_FSTYPES = "wic"\n' \
1295 'IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL = "%s"\n'\
1296 'WKS_FILE = "test_rawcopy_plugin.wks.in"\n'\
1297 % image_link_name
1298 self.append_config(config)
1299 bitbake('core-image-minimal-mtdutils')
1300 self.remove_config(config)
1301
1302 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal-mtdutils') or ""
1303 with runqemu('core-image-minimal-mtdutils', ssh=False,
1304 runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu:
990 cmd = "grep sda. /proc/partitions |wc -l" 1305 cmd = "grep sda. /proc/partitions |wc -l"
991 status, output = qemu.run_serial(cmd) 1306 status, output = qemu.run_serial(cmd)
992 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1307 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
993 self.assertEqual(output, '2') 1308 self.assertEqual(output, '2')
994 1309
995 def test_rawcopy_plugin(self): 1310 def _rawcopy_plugin(self, fstype):
996 """Test rawcopy plugin""" 1311 """Test rawcopy plugin"""
997 img = 'core-image-minimal' 1312 image = 'core-image-minimal'
998 machine = get_bb_var('MACHINE', img) 1313 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1314 params = ',unpack' if fstype.endswith('.gz') else ''
999 with NamedTemporaryFile("w", suffix=".wks") as wks: 1315 with NamedTemporaryFile("w", suffix=".wks") as wks:
1000 wks.writelines(['part /boot --active --source bootimg-pcbios\n', 1316 wks.write('part / --source rawcopy --sourceparams="file=%s.%s%s"\n'\
1001 'part / --source rawcopy --sourceparams="file=%s-%s.ext4" --use-uuid\n'\ 1317 % (bb_vars['IMAGE_LINK_NAME'], fstype, params))
1002 % (img, machine),
1003 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1004 wks.flush() 1318 wks.flush()
1005 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1319 cmd = "wic create %s -e %s -o %s" % (wks.name, image, self.resultdir)
1006 runCmd(cmd) 1320 runCmd(cmd)
1007 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1321 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1008 out = glob(self.resultdir + "%s-*direct" % wksname) 1322 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1009 self.assertEqual(1, len(out)) 1323 self.assertEqual(1, len(out))
1010 1324
1325 def test_rawcopy_plugin(self):
1326 config = 'IMAGE_FSTYPES = "ext4"\n'
1327 self.append_config(config)
1328 self.assertEqual(0, bitbake('core-image-minimal').status)
1329 self.remove_config(config)
1330 self._rawcopy_plugin('ext4')
1331
1332 def test_rawcopy_plugin_unpack(self):
1333 fstype = 'ext4.gz'
1334 config = 'IMAGE_FSTYPES = "%s"\n' % fstype
1335 self.append_config(config)
1336 self.assertEqual(0, bitbake('core-image-minimal').status)
1337 self.remove_config(config)
1338 self._rawcopy_plugin(fstype)
1339
1011 def test_empty_plugin(self): 1340 def test_empty_plugin(self):
1012 """Test empty plugin""" 1341 """Test empty plugin"""
1013 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n' 1342 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n'
1014 self.append_config(config) 1343 self.append_config(config)
1015 self.assertEqual(0, bitbake('core-image-minimal').status) 1344 image = 'core-image-minimal'
1345 bitbake(image)
1016 self.remove_config(config) 1346 self.remove_config(config)
1017 1347 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1018 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) 1348 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1019 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] 1349 self.assertTrue(os.path.exists(image_path), msg="Image file %s wasn't generated as expected" % image_path)
1020 machine = bb_vars['MACHINE']
1021 image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
1022 self.assertEqual(True, os.path.exists(image_path))
1023 1350
1024 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1351 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1025 1352
@@ -1028,15 +1355,18 @@ class Wic2(WicTestCase):
1028 result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot)) 1355 result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot))
1029 self.assertEqual('1', result.output) 1356 self.assertEqual('1', result.output)
1030 1357
1031 @only_for_arch(['i586', 'i686', 'x86_64']) 1358 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1359 @OETestTag("runqemu")
1032 def test_biosplusefi_plugin_qemu(self): 1360 def test_biosplusefi_plugin_qemu(self):
1033 """Test biosplusefi plugin in qemu""" 1361 """Test biosplusefi plugin in qemu"""
1034 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' 1362 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
1035 self.append_config(config) 1363 self.append_config(config)
1036 self.assertEqual(0, bitbake('core-image-minimal').status) 1364 bitbake('core-image-minimal')
1037 self.remove_config(config) 1365 self.remove_config(config)
1038 1366
1039 with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: 1367 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or ""
1368 with runqemu('core-image-minimal', ssh=False,
1369 runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu:
1040 # Check that we have ONLY two /dev/sda* partitions (/boot and /) 1370 # Check that we have ONLY two /dev/sda* partitions (/boot and /)
1041 cmd = "grep sda. /proc/partitions | wc -l" 1371 cmd = "grep sda. /proc/partitions | wc -l"
1042 status, output = qemu.run_serial(cmd) 1372 status, output = qemu.run_serial(cmd)
@@ -1059,32 +1389,178 @@ class Wic2(WicTestCase):
1059 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1389 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1060 self.assertEqual(output, '*') 1390 self.assertEqual(output, '*')
1061 1391
1062 @only_for_arch(['i586', 'i686', 'x86_64']) 1392 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1063 def test_biosplusefi_plugin(self): 1393 def test_biosplusefi_plugin(self):
1064 """Test biosplusefi plugin""" 1394 """Test biosplusefi plugin"""
1065 # Wic generation below may fail depending on the order of the unittests 1395 # Wic generation below may fail depending on the order of the unittests
1066 # This is because bootimg-pcbios (that bootimg-biosplusefi uses) generate its MBR inside STAGING_DATADIR directory 1396 # This is because bootimg_pcbios (that bootimg_biosplusefi uses) generate its MBR inside STAGING_DATADIR directory
1067 # which may or may not exists depending on what was built already 1397 # which may or may not exists depending on what was built already
1068 # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir() 1398 # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir()
1069 # will raise with "Couldn't find correct bootimg_dir" 1399 # will raise with "Couldn't find correct bootimg_dir"
1070 # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call 1400 # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call
1071 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' 1401 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
1072 self.append_config(config) 1402 self.append_config(config)
1073 self.assertEqual(0, bitbake('core-image-minimal').status) 1403 bitbake('core-image-minimal')
1404 self.remove_config(config)
1405
1406 img = 'core-image-minimal'
1407 with NamedTemporaryFile("w", suffix=".wks") as wks:
1408 wks.writelines(['part /boot --active --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\n',
1409 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
1410 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1411 wks.flush()
1412 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1413 runCmd(cmd)
1414 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1415 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
1416 self.assertEqual(1, len(out))
1417
1418 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
1419 def test_uefi_kernel(self):
1420 """ Test uefi-kernel in wic """
1421 config = 'IMAGE_EFI_BOOT_FILES="/etc/fstab;testfile"\nIMAGE_FSTYPES = "wic"\nWKS_FILE = "test_uefikernel.wks"\nMACHINE_FEATURES:append = " efi"\n'
1422 self.append_config(config)
1423 bitbake('core-image-minimal')
1074 self.remove_config(config) 1424 self.remove_config(config)
1075 1425
1076 img = 'core-image-minimal' 1426 img = 'core-image-minimal'
1077 with NamedTemporaryFile("w", suffix=".wks") as wks: 1427 with NamedTemporaryFile("w", suffix=".wks") as wks:
1078 wks.writelines(['part /boot --active --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\n', 1428 wks.writelines(['part /boot --source bootimg_efi --sourceparams="loader=uefi-kernel"\n'
1079 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\ 1429 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
1080 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) 1430 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1081 wks.flush() 1431 wks.flush()
1082 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1432 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1083 runCmd(cmd) 1433 runCmd(cmd)
1084 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1434 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1085 out = glob(self.resultdir + "%s-*.direct" % wksname) 1435 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
1086 self.assertEqual(1, len(out)) 1436 self.assertEqual(1, len(out))
1087 1437
1438 # TODO this test could also work on aarch64
1439 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1440 @OETestTag("runqemu")
1441 def test_efi_plugin_unified_kernel_image_qemu(self):
1442 """Test Unified Kernel Image feature in qemu without systemd in initramfs or rootfs"""
1443 config = """
1444# efi firmware must load systemd-boot, not grub
1445EFI_PROVIDER = "systemd-boot"
1446
1447# image format must be wic, needs esp partition for firmware etc
1448IMAGE_FSTYPES:pn-core-image-base:append = " wic"
1449WKS_FILE = "test_efi_plugin.wks"
1450
1451# efi, uki and systemd features must be enabled
1452MACHINE_FEATURES:append = " efi"
1453IMAGE_CLASSES:append:pn-core-image-base = " uki"
1454
1455# uki embeds also an initrd, no systemd or udev
1456INITRAMFS_IMAGE = "core-image-initramfs-boot"
1457
1458# runqemu must not load kernel separately, it's in the uki
1459QB_KERNEL_ROOT = ""
1460QB_DEFAULT_KERNEL = "none"
1461
1462# boot command line provided via uki, not via bootloader
1463UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}"
1464
1465"""
1466 self.append_config(config)
1467 bitbake('core-image-base ovmf')
1468 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
1469 uki_filename = get_bb_var('UKI_FILENAME', 'core-image-base')
1470 self.remove_config(config)
1471
1472 with runqemu('core-image-base', ssh=False,
1473 runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
1474 # Check that /boot has EFI boot*.efi (required for EFI)
1475 cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l"
1476 status, output = qemu.run_serial(cmd)
1477 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1478 self.assertEqual(output, '1')
1479 # Check that /boot has EFI/Linux/${UKI_FILENAME} (required for Unified Kernel Images auto detection)
1480 cmd = "ls /boot/EFI/Linux/%s | wc -l" % (uki_filename)
1481 status, output = qemu.run_serial(cmd)
1482 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1483 self.assertEqual(output, '1')
1484 # Check that /boot doesn't have loader/entries/boot.conf (Unified Kernel Images are auto detected by the bootloader)
1485 cmd = "ls /boot/loader/entries/boot.conf 2&>/dev/null | wc -l"
1486 status, output = qemu.run_serial(cmd)
1487 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1488 self.assertEqual(output, '0')
1489
1490 @skipIfNotArch(['aarch64'])
1491 @OETestTag("runqemu")
1492 def test_efi_plugin_plain_systemd_boot_qemu_aarch64(self):
1493 """Test plain systemd-boot in qemu with systemd"""
1494 config = """
1495INIT_MANAGER = "systemd"
1496EFI_PROVIDER = "systemd-boot"
1497
1498# image format must be wic, needs esp partition for firmware etc
1499IMAGE_FSTYPES:pn-core-image-base:append = " wic"
1500WKS_FILE = "test_efi_plugin_plain_systemd-boot.wks"
1501
1502INITRAMFS_IMAGE = "core-image-initramfs-boot"
1503
1504# to configure runqemu
1505IMAGE_CLASSES += "qemuboot"
1506# u-boot efi firmware
1507QB_DEFAULT_BIOS = "u-boot.bin"
1508# need to use virtio, scsi not supported by u-boot by default
1509QB_DRIVE_TYPE = "/dev/vd"
1510
1511# disable kvm, breaks boot
1512QEMU_USE_KVM = ""
1513
1514IMAGE_CLASSES:remove = 'testimage'
1515"""
1516 self.append_config(config)
1517 bitbake('core-image-base u-boot')
1518 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
1519
1520 with runqemu('core-image-base', ssh=False,
1521 runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu:
1522 # Check that /boot has EFI boot*.efi (required for EFI)
1523 cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l"
1524 status, output = qemu.run_serial(cmd)
1525 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1526 self.assertEqual(output, '1')
1527 # Check that boot.conf exists
1528 cmd = "cat /boot/loader/entries/boot.conf"
1529 status, output = qemu.run_serial(cmd)
1530 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1531 self.remove_config(config)
1532
1533 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1534 @OETestTag("runqemu")
1535 def test_efi_plugin_plain_systemd_boot_qemu_x86(self):
1536 """Test plain systemd-boot to systemd in qemu"""
1537 config = """
1538INIT_MANAGER = "systemd"
1539EFI_PROVIDER = "systemd-boot"
1540
1541# image format must be wic, needs esp partition for firmware etc
1542IMAGE_FSTYPES:pn-core-image-base:append = " wic"
1543WKS_FILE = "test_efi_plugin_plain_systemd-boot.wks"
1544
1545INITRAMFS_IMAGE = "core-image-initramfs-boot"
1546"""
1547 self.append_config(config)
1548 bitbake('core-image-base ovmf')
1549 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
1550 self.remove_config(config)
1551
1552 with runqemu('core-image-base', ssh=False,
1553 runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
1554 # Check that /boot has EFI boot*.efi (required for EFI)
1555 cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l"
1556 status, output = qemu.run_serial(cmd)
1557 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1558 self.assertEqual(output, '1')
1559 # Check that boot.conf exists
1560 cmd = "cat /boot/loader/entries/boot.conf"
1561 status, output = qemu.run_serial(cmd)
1562 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1563
1088 def test_fs_types(self): 1564 def test_fs_types(self):
1089 """Test filesystem types for empty and not empty partitions""" 1565 """Test filesystem types for empty and not empty partitions"""
1090 img = 'core-image-minimal' 1566 img = 'core-image-minimal'
@@ -1101,7 +1577,7 @@ class Wic2(WicTestCase):
1101 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1577 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1102 runCmd(cmd) 1578 runCmd(cmd)
1103 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1579 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1104 out = glob(self.resultdir + "%s-*direct" % wksname) 1580 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1105 self.assertEqual(1, len(out)) 1581 self.assertEqual(1, len(out))
1106 1582
1107 def test_kickstart_parser(self): 1583 def test_kickstart_parser(self):
@@ -1113,7 +1589,7 @@ class Wic2(WicTestCase):
1113 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir) 1589 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir)
1114 runCmd(cmd) 1590 runCmd(cmd)
1115 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1591 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1116 out = glob(self.resultdir + "%s-*direct" % wksname) 1592 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1117 self.assertEqual(1, len(out)) 1593 self.assertEqual(1, len(out))
1118 1594
1119 def test_image_bootpart_globbed(self): 1595 def test_image_bootpart_globbed(self):
@@ -1124,11 +1600,11 @@ class Wic2(WicTestCase):
1124 self.append_config(config) 1600 self.append_config(config)
1125 runCmd(cmd) 1601 runCmd(cmd)
1126 self.remove_config(config) 1602 self.remove_config(config)
1127 self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) 1603 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
1128 1604
1129 def test_sparse_copy(self): 1605 def test_sparse_copy(self):
1130 """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs""" 1606 """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs"""
1131 libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'wic') 1607 libpath = os.path.join(self.td['COREBASE'], 'scripts', 'lib', 'wic')
1132 sys.path.insert(0, libpath) 1608 sys.path.insert(0, libpath)
1133 from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp 1609 from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp
1134 with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse: 1610 with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse:
@@ -1154,12 +1630,148 @@ class Wic2(WicTestCase):
1154 self.assertEqual(dest_stat.st_blocks, 8) 1630 self.assertEqual(dest_stat.st_blocks, 8)
1155 os.unlink(dest) 1631 os.unlink(dest)
1156 1632
1633 def test_mkfs_extraopts(self):
1634 """Test wks option --mkfs-extraopts for empty and not empty partitions"""
1635 img = 'core-image-minimal'
1636 with NamedTemporaryFile("w", suffix=".wks") as wks:
1637 wks.writelines(
1638 ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
1639 "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
1640 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
1641 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1642 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1643 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
1644 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
1645 wks.flush()
1646 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1647 runCmd(cmd)
1648 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1649 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1650 self.assertEqual(1, len(out))
1651
1652 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1653 @OETestTag("runqemu")
1654 def test_expand_mbr_image(self):
1655 """Test wic write --expand command for mbr image"""
1656 # build an image
1657 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
1658 self.append_config(config)
1659 image = 'core-image-minimal'
1660 bitbake(image)
1661
1662 # get path to the image
1663 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1664 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1665
1666 self.remove_config(config)
1667
1668 try:
1669 # expand image to 1G
1670 new_image_path = None
1671 with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
1672 dir=bb_vars['DEPLOY_DIR_IMAGE'], delete=False) as sparse:
1673 sparse.truncate(1024 ** 3)
1674 new_image_path = sparse.name
1675
1676 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1677 cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
1678 runCmd(cmd)
1679
1680 # check if partitions are expanded
1681 orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
1682 exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
1683 orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
1684 exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
1685 self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
1686 self.assertTrue(orig_sizes[1] < exp_sizes[1], msg="Parition size wasn't enlarged (%s vs %s)" % (orig_sizes[1], exp_sizes[1]))
1687
1688 # Check if all free space is partitioned
1689 result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
1690 self.assertIn("0 B, 0 bytes, 0 sectors", result.output)
1691
1692 os.rename(image_path, image_path + '.bak')
1693 os.rename(new_image_path, image_path)
1694
1695 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or ""
1696 with runqemu('core-image-minimal', ssh=False, runqemuparams='%s nographic' % (runqemu_params)) as qemu:
1697 cmd = "ls /etc/"
1698 status, output = qemu.run_serial('true')
1699 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1700 finally:
1701 if os.path.exists(new_image_path):
1702 os.unlink(new_image_path)
1703 if os.path.exists(image_path + '.bak'):
1704 os.rename(image_path + '.bak', image_path)
1705
1706 def test_gpt_partition_name(self):
1707 """Test --part-name argument to set partition name in GPT table"""
1708 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "test_gpt_partition_name.wks"\n'
1709 self.append_config(config)
1710 image = 'core-image-minimal'
1711 bitbake(image)
1712 self.remove_config(config)
1713 deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
1714 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1715 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1716
1717 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1718
1719 # Image is created
1720 self.assertTrue(os.path.exists(image_path), "image file %s doesn't exist" % image_path)
1721
1722 # Check the names of the three partitions
1723 # as listed in test_gpt_partition_name.wks
1724 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 1" % (sysroot, image_path))
1725 self.assertEqual('boot-A', result.output)
1726 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 2" % (sysroot, image_path))
1727 self.assertEqual('root-A', result.output)
1728 # When the --part-name is not defined, the partition name is equal to the --label
1729 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 3" % (sysroot, image_path))
1730 self.assertEqual('ext-space', result.output)
1731
1732 def test_empty_zeroize_plugin(self):
1733 img = 'core-image-minimal'
1734 expected_size = [ 1024*1024, # 1M
1735 512*1024, # 512K
1736 2*1024*1024] # 2M
1737 # Check combination of sourceparams
1738 with NamedTemporaryFile("w", suffix=".wks") as wks:
1739 wks.writelines(
1740 ['part empty --source empty --sourceparams="fill" --ondisk sda --fixed-size 1M\n',
1741 'part empty --source empty --sourceparams="size=512K" --ondisk sda --size 1M --align 1024\n',
1742 'part empty --source empty --sourceparams="size=2048k,bs=512K" --ondisk sda --size 4M --align 1024\n'
1743 ])
1744 wks.flush()
1745 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1746 runCmd(cmd)
1747 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1748 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1749 # Skip the complete image and just look at the single partitions
1750 for idx, value in enumerate(wicout[1:]):
1751 self.logger.info(wicout[idx])
1752 # Check if partitions are actually zeroized
1753 with open(wicout[idx], mode="rb") as fd:
1754 ba = bytearray(fd.read())
1755 for b in ba:
1756 self.assertEqual(b, 0)
1757 self.assertEqual(expected_size[idx], os.path.getsize(wicout[idx]))
1758
1759 # Check inconsistancy check between "fill" and "--size" parameter
1760 with NamedTemporaryFile("w", suffix=".wks") as wks:
1761 wks.writelines(['part empty --source empty --sourceparams="fill" --ondisk sda --size 1M\n'])
1762 wks.flush()
1763 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1764 result = runCmd(cmd, ignore_status=True)
1765 self.assertIn("Source parameter 'fill' only works with the '--fixed-size' option, exiting.", result.output)
1766 self.assertNotEqual(0, result.status)
1767
1768class ModifyTests(WicTestCase):
1157 def test_wic_ls(self): 1769 def test_wic_ls(self):
1158 """Test listing image content using 'wic ls'""" 1770 """Test listing image content using 'wic ls'"""
1159 runCmd("wic create wictestdisk " 1771 runCmd("wic create wictestdisk "
1160 "--image-name=core-image-minimal " 1772 "--image-name=core-image-minimal "
1161 "-D -o %s" % self.resultdir) 1773 "-D -o %s" % self.resultdir)
1162 images = glob(self.resultdir + "wictestdisk-*.direct") 1774 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1163 self.assertEqual(1, len(images)) 1775 self.assertEqual(1, len(images))
1164 1776
1165 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1777 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1177,7 +1789,7 @@ class Wic2(WicTestCase):
1177 runCmd("wic create wictestdisk " 1789 runCmd("wic create wictestdisk "
1178 "--image-name=core-image-minimal " 1790 "--image-name=core-image-minimal "
1179 "-D -o %s" % self.resultdir) 1791 "-D -o %s" % self.resultdir)
1180 images = glob(self.resultdir + "wictestdisk-*.direct") 1792 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1181 self.assertEqual(1, len(images)) 1793 self.assertEqual(1, len(images))
1182 1794
1183 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1795 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1195,7 +1807,7 @@ class Wic2(WicTestCase):
1195 # check if file is there 1807 # check if file is there
1196 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 1808 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
1197 self.assertEqual(7, len(result.output.split('\n'))) 1809 self.assertEqual(7, len(result.output.split('\n')))
1198 self.assertTrue(os.path.basename(testfile.name) in result.output) 1810 self.assertIn(os.path.basename(testfile.name), result.output)
1199 1811
1200 # prepare directory 1812 # prepare directory
1201 testdir = os.path.join(self.resultdir, 'wic-test-cp-dir') 1813 testdir = os.path.join(self.resultdir, 'wic-test-cp-dir')
@@ -1209,13 +1821,13 @@ class Wic2(WicTestCase):
1209 # check if directory is there 1821 # check if directory is there
1210 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 1822 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
1211 self.assertEqual(8, len(result.output.split('\n'))) 1823 self.assertEqual(8, len(result.output.split('\n')))
1212 self.assertTrue(os.path.basename(testdir) in result.output) 1824 self.assertIn(os.path.basename(testdir), result.output)
1213 1825
1214 # copy the file from the partition and check if it success 1826 # copy the file from the partition and check if it success
1215 dest = '%s-cp' % testfile.name 1827 dest = '%s-cp' % testfile.name
1216 runCmd("wic cp %s:1/%s %s -n %s" % (images[0], 1828 runCmd("wic cp %s:1/%s %s -n %s" % (images[0],
1217 os.path.basename(testfile.name), dest, sysroot)) 1829 os.path.basename(testfile.name), dest, sysroot))
1218 self.assertTrue(os.path.exists(dest)) 1830 self.assertTrue(os.path.exists(dest), msg="File %s wasn't generated as expected" % dest)
1219 1831
1220 1832
1221 def test_wic_rm(self): 1833 def test_wic_rm(self):
@@ -1223,105 +1835,35 @@ class Wic2(WicTestCase):
1223 runCmd("wic create mkefidisk " 1835 runCmd("wic create mkefidisk "
1224 "--image-name=core-image-minimal " 1836 "--image-name=core-image-minimal "
1225 "-D -o %s" % self.resultdir) 1837 "-D -o %s" % self.resultdir)
1226 images = glob(self.resultdir + "mkefidisk-*.direct") 1838 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
1227 self.assertEqual(1, len(images)) 1839 self.assertEqual(1, len(images))
1228 1840
1229 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1841 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1842 # Not bulletproof but hopefully sufficient
1843 kerneltype = get_bb_var('KERNEL_IMAGETYPE', 'virtual/kernel')
1230 1844
1231 # list directory content of the first partition 1845 # list directory content of the first partition
1232 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) 1846 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
1233 self.assertIn('\nBZIMAGE ', result.output) 1847 self.assertIn('\n%s ' % kerneltype.upper(), result.output)
1234 self.assertIn('\nEFI <DIR> ', result.output) 1848 self.assertIn('\nEFI <DIR> ', result.output)
1235 1849
1236 # remove file 1850 # remove file. EFI partitions are case-insensitive so exercise that too
1237 runCmd("wic rm %s:1/bzimage -n %s" % (images[0], sysroot)) 1851 runCmd("wic rm %s:1/%s -n %s" % (images[0], kerneltype.lower(), sysroot))
1238 1852
1239 # remove directory 1853 # remove directory
1240 runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot)) 1854 runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot))
1241 1855
1242 # check if they're removed 1856 # check if they're removed
1243 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) 1857 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
1244 self.assertNotIn('\nBZIMAGE ', result.output) 1858 self.assertNotIn('\n%s ' % kerneltype.upper(), result.output)
1245 self.assertNotIn('\nEFI <DIR> ', result.output) 1859 self.assertNotIn('\nEFI <DIR> ', result.output)
1246 1860
1247 def test_mkfs_extraopts(self):
1248 """Test wks option --mkfs-extraopts for empty and not empty partitions"""
1249 img = 'core-image-minimal'
1250 with NamedTemporaryFile("w", suffix=".wks") as wks:
1251 wks.writelines(
1252 ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
1253 "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
1254 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
1255 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1256 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1257 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
1258 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
1259 wks.flush()
1260 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1261 runCmd(cmd)
1262 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1263 out = glob(self.resultdir + "%s-*direct" % wksname)
1264 self.assertEqual(1, len(out))
1265
1266 def test_expand_mbr_image(self):
1267 """Test wic write --expand command for mbr image"""
1268 # build an image
1269 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
1270 self.append_config(config)
1271 self.assertEqual(0, bitbake('core-image-minimal').status)
1272
1273 # get path to the image
1274 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'])
1275 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
1276 machine = bb_vars['MACHINE']
1277 image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
1278
1279 self.remove_config(config)
1280
1281 try:
1282 # expand image to 1G
1283 new_image_path = None
1284 with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
1285 dir=deploy_dir, delete=False) as sparse:
1286 sparse.truncate(1024 ** 3)
1287 new_image_path = sparse.name
1288
1289 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1290 cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
1291 runCmd(cmd)
1292
1293 # check if partitions are expanded
1294 orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
1295 exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
1296 orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
1297 exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
1298 self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
1299 self.assertTrue(orig_sizes[1] < exp_sizes[1])
1300
1301 # Check if all free space is partitioned
1302 result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
1303 self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output)
1304
1305 os.rename(image_path, image_path + '.bak')
1306 os.rename(new_image_path, image_path)
1307
1308 # Check if it boots in qemu
1309 with runqemu('core-image-minimal', ssh=False) as qemu:
1310 cmd = "ls /etc/"
1311 status, output = qemu.run_serial('true')
1312 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1313 finally:
1314 if os.path.exists(new_image_path):
1315 os.unlink(new_image_path)
1316 if os.path.exists(image_path + '.bak'):
1317 os.rename(image_path + '.bak', image_path)
1318
1319 def test_wic_ls_ext(self): 1861 def test_wic_ls_ext(self):
1320 """Test listing content of the ext partition using 'wic ls'""" 1862 """Test listing content of the ext partition using 'wic ls'"""
1321 runCmd("wic create wictestdisk " 1863 runCmd("wic create wictestdisk "
1322 "--image-name=core-image-minimal " 1864 "--image-name=core-image-minimal "
1323 "-D -o %s" % self.resultdir) 1865 "-D -o %s" % self.resultdir)
1324 images = glob(self.resultdir + "wictestdisk-*.direct") 1866 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1325 self.assertEqual(1, len(images)) 1867 self.assertEqual(1, len(images))
1326 1868
1327 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1869 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1329,14 +1871,14 @@ class Wic2(WicTestCase):
1329 # list directory content of the second ext4 partition 1871 # list directory content of the second ext4 partition
1330 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1872 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1331 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset( 1873 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(
1332 set(line.split()[-1] for line in result.output.split('\n') if line))) 1874 set(line.split()[-1] for line in result.output.split('\n') if line)), msg="Expected directories not present %s" % result.output)
1333 1875
1334 def test_wic_cp_ext(self): 1876 def test_wic_cp_ext(self):
1335 """Test copy files and directories to the ext partition.""" 1877 """Test copy files and directories to the ext partition."""
1336 runCmd("wic create wictestdisk " 1878 runCmd("wic create wictestdisk "
1337 "--image-name=core-image-minimal " 1879 "--image-name=core-image-minimal "
1338 "-D -o %s" % self.resultdir) 1880 "-D -o %s" % self.resultdir)
1339 images = glob(self.resultdir + "wictestdisk-*.direct") 1881 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1340 self.assertEqual(1, len(images)) 1882 self.assertEqual(1, len(images))
1341 1883
1342 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1884 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1344,7 +1886,7 @@ class Wic2(WicTestCase):
1344 # list directory content of the ext4 partition 1886 # list directory content of the ext4 partition
1345 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1887 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1346 dirs = set(line.split()[-1] for line in result.output.split('\n') if line) 1888 dirs = set(line.split()[-1] for line in result.output.split('\n') if line)
1347 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs)) 1889 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs), msg="Expected directories not present %s" % dirs)
1348 1890
1349 with NamedTemporaryFile("w", suffix=".wic-cp") as testfile: 1891 with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
1350 testfile.write("test") 1892 testfile.write("test")
@@ -1359,12 +1901,12 @@ class Wic2(WicTestCase):
1359 1901
1360 # check if the file to copy is in the partition 1902 # check if the file to copy is in the partition
1361 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1903 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1362 self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) 1904 self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1363 1905
1364 # copy file from the partition, replace the temporary file content with it and 1906 # copy file from the partition, replace the temporary file content with it and
1365 # check for the file size to validate the copy 1907 # check for the file size to validate the copy
1366 runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot)) 1908 runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot))
1367 self.assertTrue(os.stat(testfile.name).st_size > 0) 1909 self.assertTrue(os.stat(testfile.name).st_size > 0, msg="Filesize not as expected %s" % os.stat(testfile.name).st_size)
1368 1910
1369 1911
1370 def test_wic_rm_ext(self): 1912 def test_wic_rm_ext(self):
@@ -1372,25 +1914,25 @@ class Wic2(WicTestCase):
1372 runCmd("wic create mkefidisk " 1914 runCmd("wic create mkefidisk "
1373 "--image-name=core-image-minimal " 1915 "--image-name=core-image-minimal "
1374 "-D -o %s" % self.resultdir) 1916 "-D -o %s" % self.resultdir)
1375 images = glob(self.resultdir + "mkefidisk-*.direct") 1917 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
1376 self.assertEqual(1, len(images)) 1918 self.assertEqual(1, len(images))
1377 1919
1378 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1920 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1379 1921
1380 # list directory content of the /etc directory on ext4 partition 1922 # list directory content of the /etc directory on ext4 partition
1381 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1923 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1382 self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) 1924 self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1383 1925
1384 # remove file 1926 # remove file
1385 runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot)) 1927 runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
1386 1928
1387 # check if it's removed 1929 # check if it's removed
1388 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1930 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1389 self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line]) 1931 self.assertNotIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1390 1932
1391 # remove non-empty directory 1933 # remove non-empty directory
1392 runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot)) 1934 runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot))
1393 1935
1394 # check if it's removed 1936 # check if it's removed
1395 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1937 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1396 self.assertTrue('etc' not in [line.split()[-1] for line in result.output.split('\n') if line]) 1938 self.assertNotIn('etc', [line.split()[-1] for line in result.output.split('\n') if line])
diff --git a/meta/lib/oeqa/selftest/cases/wrapper.py b/meta/lib/oeqa/selftest/cases/wrapper.py
new file mode 100644
index 0000000000..f2be44262c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/wrapper.py
@@ -0,0 +1,16 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake
8
9class WrapperTests(OESelftestTestCase):
10 def test_shebang_wrapper(self):
11 """
12 Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective.
13 Expected: Exit status to be 0.
14 Author: Paulo Neves <ptsneves@gmail.com>
15 """
16 res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False)
diff --git a/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
new file mode 100644
index 0000000000..312edb6431
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
@@ -0,0 +1,39 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import sys
9import subprocess
10import shutil
11from oeqa.selftest.case import OESelftestTestCase
12from yocto_testresults_query import get_sha1, create_workdir
13basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
14lib_path = basepath + '/scripts/lib'
15sys.path = sys.path + [lib_path]
16
17
18class TestResultsQueryTests(OESelftestTestCase):
19 def test_get_sha1(self):
20 test_data_get_sha1 = [
21 {"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"},
22 {"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"},
23 ]
24 for data in test_data_get_sha1:
25 test_name = data["input"]
26 with self.subTest(f"Test SHA1 from {test_name}"):
27 self.assertEqual(
28 get_sha1(basepath, data["input"]), data["expected"])
29
30 def test_create_workdir(self):
31 workdir = create_workdir()
32 try:
33 url = subprocess.check_output(
34 ["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8")
35 except:
36 shutil.rmtree(workdir, ignore_errors=True)
37 self.fail(f"Can not execute git commands in {workdir}")
38 shutil.rmtree(workdir)
39 self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults")
diff --git a/meta/lib/oeqa/selftest/context.py b/meta/lib/oeqa/selftest/context.py
index 1659926975..16f82c6737 100644
--- a/meta/lib/oeqa/selftest/context.py
+++ b/meta/lib/oeqa/selftest/context.py
@@ -16,19 +16,32 @@ from random import choice
16import oeqa 16import oeqa
17import oe 17import oe
18import bb.utils 18import bb.utils
19import bb.tinfoil
19 20
20from oeqa.core.context import OETestContext, OETestContextExecutor 21from oeqa.core.context import OETestContext, OETestContextExecutor
21from oeqa.core.exception import OEQAPreRun, OEQATestNotFound 22from oeqa.core.exception import OEQAPreRun, OEQATestNotFound
22 23
23from oeqa.utils.commands import runCmd, get_bb_vars, get_test_layer 24from oeqa.utils.commands import runCmd, get_bb_vars, get_test_layer
24 25
26OESELFTEST_METADATA=["run_all_tests", "run_tests", "skips", "machine", "select_tags", "exclude_tags"]
27
28def get_oeselftest_metadata(args):
29 result = {}
30 raw_args = vars(args)
31 for metadata in OESELFTEST_METADATA:
32 if metadata in raw_args:
33 result[metadata] = raw_args[metadata]
34
35 return result
36
25class NonConcurrentTestSuite(unittest.TestSuite): 37class NonConcurrentTestSuite(unittest.TestSuite):
26 def __init__(self, suite, processes, setupfunc, removefunc): 38 def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
27 super().__init__([suite]) 39 super().__init__([suite])
28 self.processes = processes 40 self.processes = processes
29 self.suite = suite 41 self.suite = suite
30 self.setupfunc = setupfunc 42 self.setupfunc = setupfunc
31 self.removefunc = removefunc 43 self.removefunc = removefunc
44 self.bb_vars = bb_vars
32 45
33 def run(self, result): 46 def run(self, result):
34 (builddir, newbuilddir) = self.setupfunc("-st", None, self.suite) 47 (builddir, newbuilddir) = self.setupfunc("-st", None, self.suite)
@@ -39,7 +52,7 @@ class NonConcurrentTestSuite(unittest.TestSuite):
39 52
40def removebuilddir(d): 53def removebuilddir(d):
41 delay = 5 54 delay = 5
42 while delay and os.path.exists(d + "/bitbake.lock"): 55 while delay and (os.path.exists(d + "/bitbake.lock") or os.path.exists(d + "/cache/hashserv.db-wal")):
43 time.sleep(1) 56 time.sleep(1)
44 delay = delay - 1 57 delay = delay - 1
45 # Deleting these directories takes a lot of time, use autobuilder 58 # Deleting these directories takes a lot of time, use autobuilder
@@ -57,8 +70,6 @@ class OESelftestTestContext(OETestContext):
57 def __init__(self, td=None, logger=None, machines=None, config_paths=None, newbuilddir=None, keep_builddir=None): 70 def __init__(self, td=None, logger=None, machines=None, config_paths=None, newbuilddir=None, keep_builddir=None):
58 super(OESelftestTestContext, self).__init__(td, logger) 71 super(OESelftestTestContext, self).__init__(td, logger)
59 72
60 self.machines = machines
61 self.custommachine = None
62 self.config_paths = config_paths 73 self.config_paths = config_paths
63 self.newbuilddir = newbuilddir 74 self.newbuilddir = newbuilddir
64 75
@@ -67,10 +78,15 @@ class OESelftestTestContext(OETestContext):
67 else: 78 else:
68 self.removebuilddir = removebuilddir 79 self.removebuilddir = removebuilddir
69 80
81 def set_variables(self, vars):
82 self.bb_vars = vars
83
70 def setup_builddir(self, suffix, selftestdir, suite): 84 def setup_builddir(self, suffix, selftestdir, suite):
85 sstatedir = self.bb_vars['SSTATE_DIR']
86
71 builddir = os.environ['BUILDDIR'] 87 builddir = os.environ['BUILDDIR']
72 if not selftestdir: 88 if not selftestdir:
73 selftestdir = get_test_layer() 89 selftestdir = get_test_layer(self.bb_vars['BBLAYERS'])
74 if self.newbuilddir: 90 if self.newbuilddir:
75 newbuilddir = os.path.join(self.newbuilddir, 'build' + suffix) 91 newbuilddir = os.path.join(self.newbuilddir, 'build' + suffix)
76 else: 92 else:
@@ -86,16 +102,40 @@ class OESelftestTestContext(OETestContext):
86 oe.path.copytree(builddir + "/cache", newbuilddir + "/cache") 102 oe.path.copytree(builddir + "/cache", newbuilddir + "/cache")
87 oe.path.copytree(selftestdir, newselftestdir) 103 oe.path.copytree(selftestdir, newselftestdir)
88 104
105 # if the last line of local.conf in newbuilddir is not empty and does not end with newline then add one
106 localconf_path = newbuilddir + "/conf/local.conf"
107 with open(localconf_path, "r+", encoding="utf-8") as f:
108 last_line = f.readlines()[-1]
109 if last_line and not last_line.endswith("\n"):
110 f.write("\n")
111
112 subprocess.check_output("git init && git add * && git commit -a -m 'initial'", cwd=newselftestdir, shell=True)
113
114 # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
115 subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
116
117 # Relative paths in BBLAYERS only works when the new build dir share the same ascending node
118 if self.newbuilddir:
119 bblayers = subprocess.check_output("bitbake-getvar --value BBLAYERS | tail -1", cwd=builddir, shell=True, text=True)
120 if '..' in bblayers:
121 bblayers_abspath = [os.path.abspath(path) for path in bblayers.split()]
122 with open("%s/conf/bblayers.conf" % newbuilddir, "a") as f:
123 newbblayers = "# new bblayers to be used by selftest in the new build dir '%s'\n" % newbuilddir
124 newbblayers += 'unset BBLAYERS\n'
125 newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath)
126 f.write(newbblayers)
127
128 # Rewrite builddir paths seen in environment variables
89 for e in os.environ: 129 for e in os.environ:
90 if builddir + "/" in os.environ[e]: 130 # Rewrite paths that absolutely point inside builddir
131 # (e.g $builddir/conf/ would be rewritten but not $builddir/../bitbake/)
132 if builddir + "/" in os.environ[e] and builddir + "/" in os.path.abspath(os.environ[e]):
91 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/") 133 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
92 if os.environ[e].endswith(builddir): 134 if os.environ[e].endswith(builddir):
93 os.environ[e] = os.environ[e].replace(builddir, newbuilddir) 135 os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
94 136
95 subprocess.check_output("git init; git add *; git commit -a -m 'initial'", cwd=newselftestdir, shell=True) 137 # Set SSTATE_DIR to match the parent SSTATE_DIR
96 138 subprocess.check_output("echo 'SSTATE_DIR ?= \"%s\"' >> %s/conf/local.conf" % (sstatedir, newbuilddir), cwd=newbuilddir, shell=True)
97 # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
98 subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
99 139
100 os.chdir(newbuilddir) 140 os.chdir(newbuilddir)
101 141
@@ -124,17 +164,11 @@ class OESelftestTestContext(OETestContext):
124 if processes: 164 if processes:
125 from oeqa.core.utils.concurrencytest import ConcurrentTestSuite 165 from oeqa.core.utils.concurrencytest import ConcurrentTestSuite
126 166
127 return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir) 167 return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
128 else: 168 else:
129 return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir) 169 return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
130 170
131 def runTests(self, processes=None, machine=None, skips=[]): 171 def runTests(self, processes=None, machine=None, skips=[]):
132 if machine:
133 self.custommachine = machine
134 if machine == 'random':
135 self.custommachine = choice(self.machines)
136 self.logger.info('Run tests with custom MACHINE set to: %s' % \
137 self.custommachine)
138 return super(OESelftestTestContext, self).runTests(processes, skips) 172 return super(OESelftestTestContext, self).runTests(processes, skips)
139 173
140 def listTests(self, display_type, machine=None): 174 def listTests(self, display_type, machine=None):
@@ -154,9 +188,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
154 group.add_argument('-a', '--run-all-tests', default=False, 188 group.add_argument('-a', '--run-all-tests', default=False,
155 action="store_true", dest="run_all_tests", 189 action="store_true", dest="run_all_tests",
156 help='Run all (unhidden) tests') 190 help='Run all (unhidden) tests')
157 group.add_argument('-R', '--skip-tests', required=False, action='store',
158 nargs='+', dest="skips", default=None,
159 help='Run all (unhidden) tests except the ones specified. Format should be <module>[.<class>[.<test_method>]]')
160 group.add_argument('-r', '--run-tests', required=False, action='store', 191 group.add_argument('-r', '--run-tests', required=False, action='store',
161 nargs='+', dest="run_tests", default=None, 192 nargs='+', dest="run_tests", default=None,
162 help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>') 193 help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
@@ -171,11 +202,26 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
171 action="store_true", default=False, 202 action="store_true", default=False,
172 help='List all available tests.') 203 help='List all available tests.')
173 204
174 parser.add_argument('-j', '--num-processes', dest='processes', action='store', 205 parser.add_argument('-R', '--skip-tests', required=False, action='store',
175 type=int, help="number of processes to execute in parallel with") 206 nargs='+', dest="skips", default=None,
207 help='Skip the tests specified. Format should be <module>[.<class>[.<test_method>]]')
208
209 def check_parallel_support(parameter):
210 if not parameter.isdigit():
211 import argparse
212 raise argparse.ArgumentTypeError("argument -j/--num-processes: invalid int value: '%s' " % str(parameter))
213
214 processes = int(parameter)
215 if processes:
216 try:
217 import testtools, subunit
218 except ImportError:
219 print("Failed to import testtools or subunit, the testcases will run serially")
220 processes = None
221 return processes
176 222
177 parser.add_argument('--machine', required=False, choices=['random', 'all'], 223 parser.add_argument('-j', '--num-processes', dest='processes', action='store',
178 help='Run tests on different machines (random/all).') 224 type=check_parallel_support, help="number of processes to execute in parallel with")
179 225
180 parser.add_argument('-t', '--select-tag', dest="select_tags", 226 parser.add_argument('-t', '--select-tag', dest="select_tags",
181 action='append', default=None, 227 action='append', default=None,
@@ -191,20 +237,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
191 parser.add_argument('-v', '--verbose', action='store_true') 237 parser.add_argument('-v', '--verbose', action='store_true')
192 parser.set_defaults(func=self.run) 238 parser.set_defaults(func=self.run)
193 239
194 def _get_available_machines(self):
195 machines = []
196
197 bbpath = self.tc_kwargs['init']['td']['BBPATH'].split(':')
198
199 for path in bbpath:
200 found_machines = glob.glob(os.path.join(path, 'conf', 'machine', '*.conf'))
201 if found_machines:
202 for i in found_machines:
203 # eg: '/home/<user>/poky/meta-intel/conf/machine/intel-core2-32.conf'
204 machines.append(os.path.splitext(os.path.basename(i))[0])
205
206 return machines
207
208 def _get_cases_paths(self, bbpath): 240 def _get_cases_paths(self, bbpath):
209 cases_paths = [] 241 cases_paths = []
210 for layer in bbpath: 242 for layer in bbpath:
@@ -235,11 +267,10 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
235 args.list_tests = 'name' 267 args.list_tests = 'name'
236 268
237 self.tc_kwargs['init']['td'] = bbvars 269 self.tc_kwargs['init']['td'] = bbvars
238 self.tc_kwargs['init']['machines'] = self._get_available_machines()
239 270
240 builddir = os.environ.get("BUILDDIR") 271 builddir = os.environ.get("BUILDDIR")
241 self.tc_kwargs['init']['config_paths'] = {} 272 self.tc_kwargs['init']['config_paths'] = {}
242 self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer() 273 self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer(bbvars["BBLAYERS"])
243 self.tc_kwargs['init']['config_paths']['builddir'] = builddir 274 self.tc_kwargs['init']['config_paths']['builddir'] = builddir
244 self.tc_kwargs['init']['config_paths']['localconf'] = os.path.join(builddir, "conf/local.conf") 275 self.tc_kwargs['init']['config_paths']['localconf'] = os.path.join(builddir, "conf/local.conf")
245 self.tc_kwargs['init']['config_paths']['bblayers'] = os.path.join(builddir, "conf/bblayers.conf") 276 self.tc_kwargs['init']['config_paths']['bblayers'] = os.path.join(builddir, "conf/bblayers.conf")
@@ -275,14 +306,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
275 os.chdir(builddir) 306 os.chdir(builddir)
276 307
277 if not "meta-selftest" in self.tc.td["BBLAYERS"]: 308 if not "meta-selftest" in self.tc.td["BBLAYERS"]:
278 self.tc.logger.warning("meta-selftest layer not found in BBLAYERS, adding it") 309 self.tc.logger.info("meta-selftest layer not found in BBLAYERS, adding it")
279 meta_selftestdir = os.path.join( 310 meta_selftestdir = os.path.join(
280 self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest') 311 self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest')
281 if os.path.isdir(meta_selftestdir): 312 if os.path.isdir(meta_selftestdir):
282 runCmd("bitbake-layers add-layer %s" %meta_selftestdir) 313 runCmd("bitbake-layers add-layer %s" % meta_selftestdir)
283 # reload data is needed because a meta-selftest layer was add 314 # reload data is needed because a meta-selftest layer was add
284 self.tc.td = get_bb_vars() 315 self.tc.td = get_bb_vars()
285 self.tc.config_paths['testlayer_path'] = get_test_layer() 316 self.tc.config_paths['testlayer_path'] = get_test_layer(self.tc.td["BBLAYERS"])
286 else: 317 else:
287 self.tc.logger.error("could not locate meta-selftest in:\n%s" % meta_selftestdir) 318 self.tc.logger.error("could not locate meta-selftest in:\n%s" % meta_selftestdir)
288 raise OEQAPreRun 319 raise OEQAPreRun
@@ -320,8 +351,15 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
320 351
321 _add_layer_libs() 352 _add_layer_libs()
322 353
323 self.tc.logger.info("Running bitbake -e to test the configuration is valid/parsable") 354 self.tc.logger.info("Checking base configuration is valid/parsable")
324 runCmd("bitbake -e") 355
356 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
357 tinfoil.prepare(quiet=2, config_only=True)
358 d = tinfoil.config_data
359 vars = {}
360 vars['SSTATE_DIR'] = str(d.getVar('SSTATE_DIR'))
361 vars['BBLAYERS'] = str(d.getVar('BBLAYERS'))
362 self.tc.set_variables(vars)
325 363
326 def get_json_result_dir(self, args): 364 def get_json_result_dir(self, args):
327 json_result_dir = os.path.join(self.tc.td["LOG_DIR"], 'oeqa') 365 json_result_dir = os.path.join(self.tc.td["LOG_DIR"], 'oeqa')
@@ -334,12 +372,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
334 import platform 372 import platform
335 from oeqa.utils.metadata import metadata_from_bb 373 from oeqa.utils.metadata import metadata_from_bb
336 metadata = metadata_from_bb() 374 metadata = metadata_from_bb()
375 oeselftest_metadata = get_oeselftest_metadata(args)
337 configuration = {'TEST_TYPE': 'oeselftest', 376 configuration = {'TEST_TYPE': 'oeselftest',
338 'STARTTIME': args.test_start_time, 377 'STARTTIME': args.test_start_time,
339 'MACHINE': self.tc.td["MACHINE"], 378 'MACHINE': self.tc.td["MACHINE"],
340 'HOST_DISTRO': oe.lsb.distro_identifier().replace(' ', '-'), 379 'HOST_DISTRO': oe.lsb.distro_identifier().replace(' ', '-'),
341 'HOST_NAME': metadata['hostname'], 380 'HOST_NAME': metadata['hostname'],
342 'LAYERS': metadata['layers']} 381 'LAYERS': metadata['layers'],
382 'OESELFTEST_METADATA': oeselftest_metadata}
343 return configuration 383 return configuration
344 384
345 def get_result_id(self, configuration): 385 def get_result_id(self, configuration):
@@ -374,37 +414,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
374 414
375 rc = None 415 rc = None
376 try: 416 try:
377 if args.machine: 417 rc = self._internal_run(logger, args)
378 logger.info('Custom machine mode enabled. MACHINE set to %s' %
379 args.machine)
380
381 if args.machine == 'all':
382 results = []
383 for m in self.tc_kwargs['init']['machines']:
384 self.tc_kwargs['run']['machine'] = m
385 results.append(self._internal_run(logger, args))
386
387 # XXX: the oe-selftest script only needs to know if one
388 # machine run fails
389 for r in results:
390 rc = r
391 if not r.wasSuccessful():
392 break
393
394 else:
395 self.tc_kwargs['run']['machine'] = args.machine
396 return self._internal_run(logger, args)
397
398 else:
399 self.tc_kwargs['run']['machine'] = args.machine
400 rc = self._internal_run(logger, args)
401 finally: 418 finally:
402 config_paths = self.tc_kwargs['init']['config_paths'] 419 config_paths = self.tc_kwargs['init']['config_paths']
403 420
404 output_link = os.path.join(os.path.dirname(args.output_log), 421 output_link = os.path.join(os.path.dirname(args.output_log),
405 "%s-results.log" % self.name) 422 "%s-results.log" % self.name)
406 if os.path.lexists(output_link): 423 if os.path.lexists(output_link):
407 os.remove(output_link) 424 os.unlink(output_link)
408 os.symlink(args.output_log, output_link) 425 os.symlink(args.output_log, output_link)
409 426
410 return rc 427 return rc
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py
index 19f5a4ea7e..cdf382ee21 100644
--- a/meta/lib/oeqa/targetcontrol.py
+++ b/meta/lib/oeqa/targetcontrol.py
@@ -7,17 +7,14 @@
7# This module is used by testimage.bbclass for setting up and controlling a target machine. 7# This module is used by testimage.bbclass for setting up and controlling a target machine.
8 8
9import os 9import os
10import shutil
11import subprocess 10import subprocess
12import bb 11import bb
13import traceback
14import sys
15import logging 12import logging
16from oeqa.utils.sshcontrol import SSHControl 13from oeqa.utils.sshcontrol import SSHControl
17from oeqa.utils.qemurunner import QemuRunner 14from oeqa.utils.qemurunner import QemuRunner
18from oeqa.utils.qemutinyrunner import QemuTinyRunner 15from oeqa.utils.qemutinyrunner import QemuTinyRunner
19from oeqa.utils.dump import TargetDumper 16from oeqa.utils.dump import TargetDumper
20from oeqa.controllers.testtargetloader import TestTargetLoader 17from oeqa.utils.dump import MonitorDumper
21from abc import ABCMeta, abstractmethod 18from abc import ABCMeta, abstractmethod
22 19
23class BaseTarget(object, metaclass=ABCMeta): 20class BaseTarget(object, metaclass=ABCMeta):
@@ -41,7 +38,7 @@ class BaseTarget(object, metaclass=ABCMeta):
41 if os.path.islink(sshloglink): 38 if os.path.islink(sshloglink):
42 os.unlink(sshloglink) 39 os.unlink(sshloglink)
43 os.symlink(self.sshlog, sshloglink) 40 os.symlink(self.sshlog, sshloglink)
44 self.logger.info("SSH log file: %s" % self.sshlog) 41 self.logger.info("SSH log file: %s" % self.sshlog)
45 42
46 @abstractmethod 43 @abstractmethod
47 def start(self, params=None, ssh=True, extra_bootparams=None): 44 def start(self, params=None, ssh=True, extra_bootparams=None):
@@ -91,7 +88,7 @@ class QemuTarget(BaseTarget):
91 88
92 supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] 89 supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic']
93 90
94 def __init__(self, d, logger, image_fstype=None): 91 def __init__(self, d, logger, image_fstype=None, boot_patterns=None):
95 92
96 import oe.types 93 import oe.types
97 94
@@ -106,8 +103,7 @@ class QemuTarget(BaseTarget):
106 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype) 103 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype)
107 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') 104 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
108 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime) 105 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime)
109 dump_target_cmds = d.getVar("testimage_dump_target") 106 dump_monitor_cmds = d.getVar("testimage_dump_monitor")
110 dump_host_cmds = d.getVar("testimage_dump_host")
111 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR") 107 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR")
112 if not dump_dir: 108 if not dump_dir:
113 dump_dir = os.path.join(d.getVar('LOG_DIR'), 'runtime-hostdump') 109 dump_dir = os.path.join(d.getVar('LOG_DIR'), 'runtime-hostdump')
@@ -131,6 +127,7 @@ class QemuTarget(BaseTarget):
131 logfile = self.qemulog, 127 logfile = self.qemulog,
132 kernel = self.kernel, 128 kernel = self.kernel,
133 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), 129 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
130 tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"),
134 logger = logger) 131 logger = logger)
135 else: 132 else:
136 self.runner = QemuRunner(machine=d.getVar("MACHINE"), 133 self.runner = QemuRunner(machine=d.getVar("MACHINE"),
@@ -142,11 +139,14 @@ class QemuTarget(BaseTarget):
142 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), 139 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
143 use_kvm = use_kvm, 140 use_kvm = use_kvm,
144 dump_dir = dump_dir, 141 dump_dir = dump_dir,
145 dump_host_cmds = d.getVar("testimage_dump_host"),
146 logger = logger, 142 logger = logger,
147 serial_ports = len(d.getVar("SERIAL_CONSOLES").split())) 143 tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"),
144 serial_ports = len(d.getVar("SERIAL_CONSOLES").split()),
145 boot_patterns = boot_patterns)
148 146
149 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner) 147 self.monitor_dumper = MonitorDumper(dump_monitor_cmds, dump_dir, self.runner)
148 if (self.monitor_dumper):
149 self.monitor_dumper.create_dir("qmp")
150 150
151 def deploy(self): 151 def deploy(self):
152 bb.utils.mkdirhier(self.testdir) 152 bb.utils.mkdirhier(self.testdir)
@@ -156,7 +156,7 @@ class QemuTarget(BaseTarget):
156 os.unlink(qemuloglink) 156 os.unlink(qemuloglink)
157 os.symlink(self.qemulog, qemuloglink) 157 os.symlink(self.qemulog, qemuloglink)
158 158
159 self.logger.info("rootfs file: %s" % self.rootfs) 159 self.logger.info("rootfs file: %s" % self.rootfs)
160 self.logger.info("Qemu log file: %s" % self.qemulog) 160 self.logger.info("Qemu log file: %s" % self.qemulog)
161 super(QemuTarget, self).deploy() 161 super(QemuTarget, self).deploy()
162 162
@@ -198,7 +198,7 @@ class QemuTarget(BaseTarget):
198 self.server_ip = self.runner.server_ip 198 self.server_ip = self.runner.server_ip
199 self.connection = SSHControl(ip=self.ip, logfile=self.sshlog) 199 self.connection = SSHControl(ip=self.ip, logfile=self.sshlog)
200 else: 200 else:
201 raise RuntimError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn) 201 raise RuntimeError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn)
202 202
203 def run_serial(self, command, timeout=60): 203 def run_serial(self, command, timeout=60):
204 return self.runner.run_serial(command, timeout=timeout) 204 return self.runner.run_serial(command, timeout=timeout)
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py
index 6d1ec4cb99..e03f7e33bb 100644
--- a/meta/lib/oeqa/utils/__init__.py
+++ b/meta/lib/oeqa/utils/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4# Enable other layers to have modules in the same named directory 6# Enable other layers to have modules in the same named directory
@@ -88,3 +90,16 @@ def load_test_components(logger, executor):
88 "_executor_class defined." % (comp_name, comp_context)) 90 "_executor_class defined." % (comp_name, comp_context))
89 91
90 return components 92 return components
93
94def get_json_result_dir(d):
95 json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
96 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
97 if custom_json_result_dir:
98 json_result_dir = custom_json_result_dir
99 return json_result_dir
100
101def get_artefact_dir(d):
102 custom_json_result_dir = d.getVar("OEQA_ARTEFACT_DIR")
103 if custom_json_result_dir:
104 return custom_json_result_dir
105 return os.path.join(d.getVar("LOG_DIR"), 'oeqa-artefacts')
diff --git a/meta/lib/oeqa/utils/buildproject.py b/meta/lib/oeqa/utils/buildproject.py
index e6d80cc8dc..dfb9661868 100644
--- a/meta/lib/oeqa/utils/buildproject.py
+++ b/meta/lib/oeqa/utils/buildproject.py
@@ -18,6 +18,7 @@ class BuildProject(metaclass=ABCMeta):
18 def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None): 18 def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None):
19 self.uri = uri 19 self.uri = uri
20 self.archive = os.path.basename(uri) 20 self.archive = os.path.basename(uri)
21 self.tempdirobj = None
21 if not tmpdir: 22 if not tmpdir:
22 self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-') 23 self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-')
23 tmpdir = self.tempdirobj.name 24 tmpdir = self.tempdirobj.name
@@ -57,6 +58,8 @@ class BuildProject(metaclass=ABCMeta):
57 return self._run('cd %s; make install %s' % (self.targetdir, install_args)) 58 return self._run('cd %s; make install %s' % (self.targetdir, install_args))
58 59
59 def clean(self): 60 def clean(self):
61 if self.tempdirobj:
62 self.tempdirobj.cleanup()
60 if not self.needclean: 63 if not self.needclean:
61 return 64 return
62 self._run('rm -rf %s' % self.targetdir) 65 self._run('rm -rf %s' % self.targetdir)
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index a71c16ab14..b60a6e6c38 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -8,11 +8,8 @@
8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest 8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest
9# It provides a class and methods for running commands on the host in a convienent way for tests. 9# It provides a class and methods for running commands on the host in a convienent way for tests.
10 10
11
12
13import os 11import os
14import sys 12import sys
15import signal
16import subprocess 13import subprocess
17import threading 14import threading
18import time 15import time
@@ -21,6 +18,7 @@ from oeqa.utils import CommandError
21from oeqa.utils import ftools 18from oeqa.utils import ftools
22import re 19import re
23import contextlib 20import contextlib
21import errno
24# Export test doesn't require bb 22# Export test doesn't require bb
25try: 23try:
26 import bb 24 import bb
@@ -85,7 +83,7 @@ class Command(object):
85 except OSError as ex: 83 except OSError as ex:
86 # It's not an error when the command does not consume all 84 # It's not an error when the command does not consume all
87 # of our data. subprocess.communicate() also ignores that. 85 # of our data. subprocess.communicate() also ignores that.
88 if ex.errno != EPIPE: 86 if ex.errno != errno.EPIPE:
89 raise 87 raise
90 88
91 # We write in a separate thread because then we can read 89 # We write in a separate thread because then we can read
@@ -117,7 +115,7 @@ class Command(object):
117 else: 115 else:
118 deadline = time.time() + self.timeout 116 deadline = time.time() + self.timeout
119 for thread in self.threads: 117 for thread in self.threads:
120 timeout = deadline - time.time() 118 timeout = deadline - time.time()
121 if timeout < 0: 119 if timeout < 0:
122 timeout = 0 120 timeout = 0
123 thread.join(timeout) 121 thread.join(timeout)
@@ -168,18 +166,22 @@ class Result(object):
168 166
169 167
170def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True, 168def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True,
171 native_sysroot=None, limit_exc_output=0, output_log=None, **options): 169 native_sysroot=None, target_sys=None, limit_exc_output=0, output_log=None, **options):
172 result = Result() 170 result = Result()
173 171
174 if native_sysroot: 172 if native_sysroot:
175 extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \ 173 new_env = dict(options.get('env', os.environ))
176 (native_sysroot, native_sysroot, native_sysroot) 174 paths = new_env["PATH"].split(":")
177 extra_libpaths = "%s/lib:%s/usr/lib" % \ 175 paths = [
178 (native_sysroot, native_sysroot) 176 os.path.join(native_sysroot, "bin"),
179 nenv = dict(options.get('env', os.environ)) 177 os.path.join(native_sysroot, "sbin"),
180 nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '') 178 os.path.join(native_sysroot, "usr", "bin"),
181 nenv['LD_LIBRARY_PATH'] = extra_libpaths + ':' + nenv.get('LD_LIBRARY_PATH', '') 179 os.path.join(native_sysroot, "usr", "sbin"),
182 options['env'] = nenv 180 ] + paths
181 if target_sys:
182 paths = [os.path.join(native_sysroot, "usr", "bin", target_sys)] + paths
183 new_env["PATH"] = ":".join(paths)
184 options['env'] = new_env
183 185
184 cmd = Command(command, timeout=timeout, output_log=output_log, **options) 186 cmd = Command(command, timeout=timeout, output_log=output_log, **options)
185 cmd.run() 187 cmd.run()
@@ -201,6 +203,8 @@ def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=T
201 203
202 if result.status and not ignore_status: 204 if result.status and not ignore_status:
203 exc_output = result.output 205 exc_output = result.output
206 if result.error:
207 exc_output = exc_output + result.error
204 if limit_exc_output > 0: 208 if limit_exc_output > 0:
205 split = result.output.splitlines() 209 split = result.output.splitlines()
206 if len(split) > limit_exc_output: 210 if len(split) > limit_exc_output:
@@ -281,10 +285,25 @@ def get_bb_vars(variables=None, target=None, postconfig=None):
281 return values 285 return values
282 286
283def get_bb_var(var, target=None, postconfig=None): 287def get_bb_var(var, target=None, postconfig=None):
284 return get_bb_vars([var], target, postconfig)[var] 288 if postconfig:
285 289 return bitbake("-e %s" % target or "", postconfig=postconfig).output
286def get_test_layer(): 290 else:
287 layers = get_bb_var("BBLAYERS").split() 291 # Fast-path for the non-postconfig case
292 cmd = ["bitbake-getvar", "--quiet", "--value", var]
293 if target:
294 cmd.extend(["--recipe", target])
295 try:
296 return subprocess.run(cmd, check=True, text=True, stdout=subprocess.PIPE).stdout.strip()
297 except subprocess.CalledProcessError as e:
298 # We need to return None not the empty string if the variable hasn't been set.
299 if e.returncode == 1:
300 return None
301 raise
302
303def get_test_layer(bblayers=None):
304 if bblayers is None:
305 bblayers = get_bb_var("BBLAYERS")
306 layers = bblayers.split()
288 testlayer = None 307 testlayer = None
289 for l in layers: 308 for l in layers:
290 if '~' in l: 309 if '~' in l:
@@ -296,6 +315,7 @@ def get_test_layer():
296 315
297def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'): 316def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
298 os.makedirs(os.path.join(templayerdir, 'conf')) 317 os.makedirs(os.path.join(templayerdir, 'conf'))
318 corenames = get_bb_var('LAYERSERIES_CORENAMES')
299 with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f: 319 with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
300 f.write('BBPATH .= ":${LAYERDIR}"\n') 320 f.write('BBPATH .= ":${LAYERDIR}"\n')
301 f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec) 321 f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
@@ -304,12 +324,29 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec=
304 f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername) 324 f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
305 f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority)) 325 f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
306 f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername) 326 f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
307 f.write('LAYERSERIES_COMPAT_%s = "${LAYERSERIES_COMPAT_core}"\n' % templayername) 327 f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames))
308 328
309@contextlib.contextmanager 329@contextlib.contextmanager
310def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): 330def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, boot_patterns = {}, discard_writes=True):
311 """ 331 """
312 launch_cmd means directly run the command, don't need set rootfs or env vars. 332 Starts a context manager for a 'oeqa.targetcontrol.QemuTarget' resource.
333 The underlying Qemu will be booted into a shell when the generator yields
334 and stopped when the 'with' block exits.
335
336 Usage:
337
338 with runqemu('core-image-minimal') as qemu:
339 qemu.run_serial('cat /proc/cpuinfo')
340
341 Args:
342 pn (str): (image) recipe to run on
343 ssh (boolean): whether or not to enable SSH (network access)
344 runqemuparams (str): space-separated list of params to pass to 'runqemu' script (like 'nographics', 'ovmf', etc.)
345 image_fstype (str): IMAGE_FSTYPE to use
346 launch_cmd (str): directly run this command and bypass automatic runqemu parameter generation
347 overrides (dict): dict of "'<bitbake-variable>': value" pairs that allows overriding bitbake variables
348 boot_patterns (dict): dict of "'<pattern-name>': value" pairs to override default boot patterns, e.g. when not booting Linux
349 discard_writes (boolean): enables qemu -snapshot feature to prevent modifying original image
313 """ 350 """
314 351
315 import bb.tinfoil 352 import bb.tinfoil
@@ -340,7 +377,7 @@ def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None,
340 377
341 logdir = recipedata.getVar("TEST_LOG_DIR") 378 logdir = recipedata.getVar("TEST_LOG_DIR")
342 379
343 qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype) 380 qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype, boot_patterns=boot_patterns)
344 finally: 381 finally:
345 # We need to shut down tinfoil early here in case we actually want 382 # We need to shut down tinfoil early here in case we actually want
346 # to run tinfoil-using utilities with the running QEMU instance. 383 # to run tinfoil-using utilities with the running QEMU instance.
diff --git a/meta/lib/oeqa/utils/decorators.py b/meta/lib/oeqa/utils/decorators.py
index aabf4110cb..ea90164e5e 100644
--- a/meta/lib/oeqa/utils/decorators.py
+++ b/meta/lib/oeqa/utils/decorators.py
@@ -16,91 +16,6 @@ import threading
16import signal 16import signal
17from functools import wraps 17from functools import wraps
18 18
19#get the "result" object from one of the upper frames provided that one of these upper frames is a unittest.case frame
20class getResults(object):
21 def __init__(self):
22 #dynamically determine the unittest.case frame and use it to get the name of the test method
23 ident = threading.current_thread().ident
24 upperf = sys._current_frames()[ident]
25 while (upperf.f_globals['__name__'] != 'unittest.case'):
26 upperf = upperf.f_back
27
28 def handleList(items):
29 ret = []
30 # items is a list of tuples, (test, failure) or (_ErrorHandler(), Exception())
31 for i in items:
32 s = i[0].id()
33 #Handle the _ErrorHolder objects from skipModule failures
34 if "setUpModule (" in s:
35 ret.append(s.replace("setUpModule (", "").replace(")",""))
36 else:
37 ret.append(s)
38 # Append also the test without the full path
39 testname = s.split('.')[-1]
40 if testname:
41 ret.append(testname)
42 return ret
43 self.faillist = handleList(upperf.f_locals['result'].failures)
44 self.errorlist = handleList(upperf.f_locals['result'].errors)
45 self.skiplist = handleList(upperf.f_locals['result'].skipped)
46
47 def getFailList(self):
48 return self.faillist
49
50 def getErrorList(self):
51 return self.errorlist
52
53 def getSkipList(self):
54 return self.skiplist
55
56class skipIfFailure(object):
57
58 def __init__(self,testcase):
59 self.testcase = testcase
60
61 def __call__(self,f):
62 @wraps(f)
63 def wrapped_f(*args, **kwargs):
64 res = getResults()
65 if self.testcase in (res.getFailList() or res.getErrorList()):
66 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
67 return f(*args, **kwargs)
68 wrapped_f.__name__ = f.__name__
69 return wrapped_f
70
71class skipIfSkipped(object):
72
73 def __init__(self,testcase):
74 self.testcase = testcase
75
76 def __call__(self,f):
77 @wraps(f)
78 def wrapped_f(*args, **kwargs):
79 res = getResults()
80 if self.testcase in res.getSkipList():
81 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
82 return f(*args, **kwargs)
83 wrapped_f.__name__ = f.__name__
84 return wrapped_f
85
86class skipUnlessPassed(object):
87
88 def __init__(self,testcase):
89 self.testcase = testcase
90
91 def __call__(self,f):
92 @wraps(f)
93 def wrapped_f(*args, **kwargs):
94 res = getResults()
95 if self.testcase in res.getSkipList() or \
96 self.testcase in res.getFailList() or \
97 self.testcase in res.getErrorList():
98 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
99 return f(*args, **kwargs)
100 wrapped_f.__name__ = f.__name__
101 wrapped_f._depends_on = self.testcase
102 return wrapped_f
103
104class testcase(object): 19class testcase(object):
105 def __init__(self, test_case): 20 def __init__(self, test_case):
106 self.test_case = test_case 21 self.test_case = test_case
diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py
index 09a44329e0..d4d271369f 100644
--- a/meta/lib/oeqa/utils/dump.py
+++ b/meta/lib/oeqa/utils/dump.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import sys 8import sys
9import json
7import errno 10import errno
8import datetime 11import datetime
9import itertools 12import itertools
@@ -17,6 +20,7 @@ class BaseDumper(object):
17 # Some testing doesn't inherit testimage, so it is needed 20 # Some testing doesn't inherit testimage, so it is needed
18 # to set some defaults. 21 # to set some defaults.
19 self.parent_dir = parent_dir 22 self.parent_dir = parent_dir
23 self.dump_dir = parent_dir
20 dft_cmds = """ top -bn1 24 dft_cmds = """ top -bn1
21 iostat -x -z -N -d -p ALL 20 2 25 iostat -x -z -N -d -p ALL 20 2
22 ps -ef 26 ps -ef
@@ -46,11 +50,11 @@ class BaseDumper(object):
46 raise err 50 raise err
47 self.dump_dir = dump_dir 51 self.dump_dir = dump_dir
48 52
49 def _write_dump(self, command, output): 53 def _construct_filename(self, command):
50 if isinstance(self, HostDumper): 54 if isinstance(self, TargetDumper):
51 prefix = "host"
52 elif isinstance(self, TargetDumper):
53 prefix = "target" 55 prefix = "target"
56 elif isinstance(self, MonitorDumper):
57 prefix = "qmp"
54 else: 58 else:
55 prefix = "unknown" 59 prefix = "unknown"
56 for i in itertools.count(): 60 for i in itertools.count():
@@ -58,41 +62,80 @@ class BaseDumper(object):
58 fullname = os.path.join(self.dump_dir, filename) 62 fullname = os.path.join(self.dump_dir, filename)
59 if not os.path.exists(fullname): 63 if not os.path.exists(fullname):
60 break 64 break
61 with open(fullname, 'w') as dump_file: 65 return fullname
62 dump_file.write(output)
63
64
65class HostDumper(BaseDumper):
66 """ Class to get dumps from the host running the tests """
67
68 def __init__(self, cmds, parent_dir):
69 super(HostDumper, self).__init__(cmds, parent_dir)
70 66
71 def dump_host(self, dump_dir=""): 67 def _write_dump(self, command, output):
72 if dump_dir: 68 fullname = self._construct_filename(command)
73 self.dump_dir = dump_dir 69 os.makedirs(os.path.dirname(fullname), exist_ok=True)
74 env = os.environ.copy() 70 if isinstance(self, MonitorDumper):
75 env['PATH'] = '/usr/sbin:/sbin:/usr/bin:/bin' 71 with open(fullname, 'w') as json_file:
76 env['COLUMNS'] = '9999' 72 json.dump(output, json_file, indent=4)
77 for cmd in self.cmds: 73 else:
78 result = runCmd(cmd, ignore_status=True, env=env) 74 with open(fullname, 'w') as dump_file:
79 self._write_dump(cmd.split()[0], result.output) 75 dump_file.write(output)
80 76
81class TargetDumper(BaseDumper): 77class TargetDumper(BaseDumper):
82 """ Class to get dumps from target, it only works with QemuRunner """ 78 """ Class to get dumps from target, it only works with QemuRunner.
79 Will give up permanently after 5 errors from running commands over
80 serial console. This helps to end testing when target is really dead, hanging
81 or unresponsive.
82 """
83 83
84 def __init__(self, cmds, parent_dir, runner): 84 def __init__(self, cmds, parent_dir, runner):
85 super(TargetDumper, self).__init__(cmds, parent_dir) 85 super(TargetDumper, self).__init__(cmds, parent_dir)
86 self.runner = runner 86 self.runner = runner
87 self.errors = 0
87 88
88 def dump_target(self, dump_dir=""): 89 def dump_target(self, dump_dir=""):
90 if self.errors >= 5:
91 print("Too many errors when dumping data from target, assuming it is dead! Will not dump data anymore!")
92 return
89 if dump_dir: 93 if dump_dir:
90 self.dump_dir = dump_dir 94 self.dump_dir = dump_dir
91 for cmd in self.cmds: 95 for cmd in self.cmds:
92 # We can continue with the testing if serial commands fail 96 # We can continue with the testing if serial commands fail
93 try: 97 try:
94 (status, output) = self.runner.run_serial(cmd) 98 (status, output) = self.runner.run_serial(cmd)
99 if status == 0:
100 self.errors = self.errors + 1
95 self._write_dump(cmd.split()[0], output) 101 self._write_dump(cmd.split()[0], output)
96 except: 102 except:
103 self.errors = self.errors + 1
97 print("Tried to dump info from target but " 104 print("Tried to dump info from target but "
98 "serial console failed") 105 "serial console failed")
106 print("Failed CMD: %s" % (cmd))
107
108class MonitorDumper(BaseDumper):
109 """ Class to get dumps via the Qemu Monitor, it only works with QemuRunner
110 Will stop completely if there are more than 5 errors when dumping monitor data.
111 This helps to end testing when target is really dead, hanging or unresponsive.
112 """
113
114 def __init__(self, cmds, parent_dir, runner):
115 super(MonitorDumper, self).__init__(cmds, parent_dir)
116 self.runner = runner
117 self.errors = 0
118
119 def dump_monitor(self, dump_dir=""):
120 if self.runner is None:
121 return
122 if dump_dir:
123 self.dump_dir = dump_dir
124 if self.errors >= 5:
125 print("Too many errors when dumping data from qemu monitor, assuming it is dead! Will not dump data anymore!")
126 return
127 for cmd in self.cmds:
128 cmd_name = cmd.split()[0]
129 try:
130 if len(cmd.split()) > 1:
131 cmd_args = cmd.split()[1]
132 if "%s" in cmd_args:
133 filename = self._construct_filename(cmd_name)
134 cmd_data = json.loads(cmd_args % (filename))
135 output = self.runner.run_monitor(cmd_name, cmd_data)
136 else:
137 output = self.runner.run_monitor(cmd_name)
138 self._write_dump(cmd_name, output)
139 except Exception as e:
140 self.errors = self.errors + 1
141 print("Failed to dump QMP CMD: %s with\nException: %s" % (cmd_name, e))
diff --git a/meta/lib/oeqa/utils/ftools.py b/meta/lib/oeqa/utils/ftools.py
index 3093419cc7..a50aaa84c2 100644
--- a/meta/lib/oeqa/utils/ftools.py
+++ b/meta/lib/oeqa/utils/ftools.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/gitarchive.py b/meta/lib/oeqa/utils/gitarchive.py
index 6e8040eb5c..7e1d505748 100644
--- a/meta/lib/oeqa/utils/gitarchive.py
+++ b/meta/lib/oeqa/utils/gitarchive.py
@@ -67,7 +67,7 @@ def git_commit_data(repo, data_dir, branch, message, exclude, notes, log):
67 67
68 # Remove files that are excluded 68 # Remove files that are excluded
69 if exclude: 69 if exclude:
70 repo.run_cmd(['rm', '--cached'] + [f for f in exclude], env_update) 70 repo.run_cmd(['rm', '--cached', '--ignore-unmatch'] + [f for f in exclude], env_update)
71 71
72 tree = repo.run_cmd('write-tree', env_update) 72 tree = repo.run_cmd('write-tree', env_update)
73 73
@@ -100,9 +100,44 @@ def git_commit_data(repo, data_dir, branch, message, exclude, notes, log):
100 if os.path.exists(tmp_index): 100 if os.path.exists(tmp_index):
101 os.unlink(tmp_index) 101 os.unlink(tmp_index)
102 102
103def get_tags(repo, log, pattern=None, url=None):
104 """ Fetch remote tags from current repository
105
106 A pattern can be provided to filter returned tags list
107 An URL can be provided if local repository has no valid remote configured
108 """
109
110 base_cmd = ['ls-remote', '--refs', '--tags', '-q']
111 cmd = base_cmd.copy()
112
113 # First try to fetch tags from repository configured remote
114 cmd.append('origin')
115 if pattern:
116 cmd.append("refs/tags/"+pattern)
117 try:
118 tags_refs = repo.run_cmd(cmd)
119 tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
120 except GitError as e:
121 # If it fails, retry with repository url if one is provided
122 if url:
123 log.info("No remote repository configured, use provided url")
124 cmd = base_cmd.copy()
125 cmd.append(url)
126 if pattern:
127 cmd.append(pattern)
128 tags_refs = repo.run_cmd(cmd)
129 tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
130 else:
131 log.info("Read local tags only, some remote tags may be missed")
132 cmd = ["tag"]
133 if pattern:
134 cmd += ["-l", pattern]
135 tags = repo.run_cmd(cmd).splitlines()
136
137 return tags
103 138
104def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern, 139def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
105 keywords): 140 url, log, keywords):
106 """Generate tag name and message, with support for running id number""" 141 """Generate tag name and message, with support for running id number"""
107 keyws = keywords.copy() 142 keyws = keywords.copy()
108 # Tag number is handled specially: if not defined, we autoincrement it 143 # Tag number is handled specially: if not defined, we autoincrement it
@@ -111,12 +146,12 @@ def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
111 keyws['tag_number'] = '{tag_number}' 146 keyws['tag_number'] = '{tag_number}'
112 tag_re = format_str(name_pattern, keyws) 147 tag_re = format_str(name_pattern, keyws)
113 # Replace parentheses for proper regex matching 148 # Replace parentheses for proper regex matching
114 tag_re = tag_re.replace('(', '\(').replace(')', '\)') + '$' 149 tag_re = tag_re.replace('(', r'\(').replace(')', r'\)') + '$'
115 # Inject regex group pattern for 'tag_number' 150 # Inject regex group pattern for 'tag_number'
116 tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})') 151 tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})')
117 152
118 keyws['tag_number'] = 0 153 keyws['tag_number'] = 0
119 for existing_tag in repo.run_cmd('tag').splitlines(): 154 for existing_tag in get_tags(repo, log, url=url):
120 match = re.match(tag_re, existing_tag) 155 match = re.match(tag_re, existing_tag)
121 156
122 if match and int(match.group('tag_number')) >= keyws['tag_number']: 157 if match and int(match.group('tag_number')) >= keyws['tag_number']:
@@ -143,7 +178,8 @@ def gitarchive(data_dir, git_dir, no_create, bare, commit_msg_subject, commit_ms
143 if not no_tag and tagname: 178 if not no_tag and tagname:
144 tag_name, tag_msg = expand_tag_strings(data_repo, tagname, 179 tag_name, tag_msg = expand_tag_strings(data_repo, tagname,
145 tag_msg_subject, 180 tag_msg_subject,
146 tag_msg_body, keywords) 181 tag_msg_body,
182 push, log, keywords)
147 183
148 # Commit data 184 # Commit data
149 commit = git_commit_data(data_repo, data_dir, branch_name, 185 commit = git_commit_data(data_repo, data_dir, branch_name,
@@ -166,6 +202,8 @@ def gitarchive(data_dir, git_dir, no_create, bare, commit_msg_subject, commit_ms
166 log.info("Pushing data to remote") 202 log.info("Pushing data to remote")
167 data_repo.run_cmd(cmd) 203 data_repo.run_cmd(cmd)
168 204
205 return tag_name
206
169# Container class for tester revisions 207# Container class for tester revisions
170TestedRev = namedtuple('TestedRev', 'commit commit_number tags') 208TestedRev = namedtuple('TestedRev', 'commit commit_number tags')
171 209
@@ -181,7 +219,7 @@ def get_test_runs(log, repo, tag_name, **kwargs):
181 219
182 # Get a list of all matching tags 220 # Get a list of all matching tags
183 tag_pattern = tag_name.format(**str_fields) 221 tag_pattern = tag_name.format(**str_fields)
184 tags = repo.run_cmd(['tag', '-l', tag_pattern]).splitlines() 222 tags = get_tags(repo, log, pattern=tag_pattern)
185 log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern) 223 log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern)
186 224
187 # Parse undefined fields from tag names 225 # Parse undefined fields from tag names
@@ -199,6 +237,8 @@ def get_test_runs(log, repo, tag_name, **kwargs):
199 revs = [] 237 revs = []
200 for tag in tags: 238 for tag in tags:
201 m = tag_re.match(tag) 239 m = tag_re.match(tag)
240 if not m:
241 continue
202 groups = m.groupdict() 242 groups = m.groupdict()
203 revs.append([groups[f] for f in undef_fields] + [tag]) 243 revs.append([groups[f] for f in undef_fields] + [tag])
204 244
@@ -219,7 +259,15 @@ def get_test_revs(log, repo, tag_name, **kwargs):
219 if not commit in revs: 259 if not commit in revs:
220 revs[commit] = TestedRev(commit, commit_num, [tag]) 260 revs[commit] = TestedRev(commit, commit_num, [tag])
221 else: 261 else:
222 assert commit_num == revs[commit].commit_number, "Commit numbers do not match" 262 if commit_num != revs[commit].commit_number:
263 # Historically we have incorrect commit counts of '1' in the repo so fix these up
264 if int(revs[commit].commit_number) < 5:
265 tags = revs[commit].tags
266 revs[commit] = TestedRev(commit, commit_num, [tags])
267 elif int(commit_num) < 5:
268 pass
269 else:
270 sys.exit("Commit numbers for commit %s don't match (%s vs %s)" % (commit, commit_num, revs[commit].commit_number))
223 revs[commit].tags.append(tag) 271 revs[commit].tags.append(tag)
224 272
225 # Return in sorted table 273 # Return in sorted table
diff --git a/meta/lib/oeqa/utils/httpserver.py b/meta/lib/oeqa/utils/httpserver.py
index 58d3c3b3f8..80752c1377 100644
--- a/meta/lib/oeqa/utils/httpserver.py
+++ b/meta/lib/oeqa/utils/httpserver.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import http.server 7import http.server
8import logging
6import multiprocessing 9import multiprocessing
7import os 10import os
8import traceback
9import signal 11import signal
10from socketserver import ThreadingMixIn 12from socketserver import ThreadingMixIn
11 13
@@ -13,20 +15,24 @@ class HTTPServer(ThreadingMixIn, http.server.HTTPServer):
13 15
14 def server_start(self, root_dir, logger): 16 def server_start(self, root_dir, logger):
15 os.chdir(root_dir) 17 os.chdir(root_dir)
18 self.logger = logger
16 self.serve_forever() 19 self.serve_forever()
17 20
18class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler): 21class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
19 22
20 def log_message(self, format_str, *args): 23 def log_message(self, format_str, *args):
21 pass 24 self.server.logger.info(format_str, *args)
22 25
23class HTTPService(object): 26class HTTPService:
24 27
25 def __init__(self, root_dir, host='', port=0, logger=None): 28 def __init__(self, root_dir, host='', port=0, logger=None):
26 self.root_dir = root_dir 29 self.root_dir = root_dir
27 self.host = host 30 self.host = host
28 self.port = port 31 self.port = port
29 self.logger = logger 32 if logger:
33 self.logger = logger.getChild("HTTPService")
34 else:
35 self.logger = logging.getLogger("HTTPService")
30 36
31 def start(self): 37 def start(self):
32 if not os.path.exists(self.root_dir): 38 if not os.path.exists(self.root_dir):
@@ -38,6 +44,12 @@ class HTTPService(object):
38 self.port = self.server.server_port 44 self.port = self.server.server_port
39 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger]) 45 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger])
40 46
47 def handle_error(self, request, client_address):
48 import traceback
49 exception = traceback.format_exc()
50 self.logger.warn("Exception when handling %s: %s" % (request, exception))
51 self.server.handle_error = handle_error
52
41 # The signal handler from testimage.bbclass can cause deadlocks here 53 # The signal handler from testimage.bbclass can cause deadlocks here
42 # if the HTTPServer is terminated before it can restore the standard 54 # if the HTTPServer is terminated before it can restore the standard
43 #signal behaviour 55 #signal behaviour
@@ -47,7 +59,7 @@ class HTTPService(object):
47 signal.signal(signal.SIGTERM, orig) 59 signal.signal(signal.SIGTERM, orig)
48 60
49 if self.logger: 61 if self.logger:
50 self.logger.info("Started HTTPService on %s:%s" % (self.host, self.port)) 62 self.logger.info("Started HTTPService for %s on %s:%s" % (self.root_dir, self.host, self.port))
51 63
52 64
53 def stop(self): 65 def stop(self):
@@ -59,3 +71,10 @@ class HTTPService(object):
59 if self.logger: 71 if self.logger:
60 self.logger.info("Stopped HTTPService on %s:%s" % (self.host, self.port)) 72 self.logger.info("Stopped HTTPService on %s:%s" % (self.host, self.port))
61 73
74if __name__ == "__main__":
75 import sys, logging
76
77 logger = logging.getLogger(__name__)
78 logging.basicConfig(level=logging.DEBUG)
79 httpd = HTTPService(sys.argv[1], port=8888, logger=logger)
80 httpd.start()
diff --git a/meta/lib/oeqa/utils/logparser.py b/meta/lib/oeqa/utils/logparser.py
index 60e16d500e..496d9e0c90 100644
--- a/meta/lib/oeqa/utils/logparser.py
+++ b/meta/lib/oeqa/utils/logparser.py
@@ -1,8 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import sys 7import enum
6import os 8import os
7import re 9import re
8 10
@@ -42,6 +44,8 @@ class PtestParser(object):
42 result = section_regex['begin'].search(line) 44 result = section_regex['begin'].search(line)
43 if result: 45 if result:
44 current_section['name'] = result.group(1) 46 current_section['name'] = result.group(1)
47 if current_section['name'] not in self.results:
48 self.results[current_section['name']] = {}
45 continue 49 continue
46 50
47 result = section_regex['end'].search(line) 51 result = section_regex['end'].search(line)
@@ -73,9 +77,10 @@ class PtestParser(object):
73 for t in test_regex: 77 for t in test_regex:
74 result = test_regex[t].search(line) 78 result = test_regex[t].search(line)
75 if result: 79 if result:
76 if current_section['name'] not in self.results: 80 try:
77 self.results[current_section['name']] = {} 81 self.results[current_section['name']][result.group(1).strip()] = t
78 self.results[current_section['name']][result.group(1).strip()] = t 82 except KeyError:
83 bb.warn("Result with no section: %s - %s" % (t, result.group(1).strip()))
79 84
80 # Python performance for repeatedly joining long strings is poor, do it all at once at the end. 85 # Python performance for repeatedly joining long strings is poor, do it all at once at the end.
81 # For 2.1 million lines in a log this reduces 18 hours to 12s. 86 # For 2.1 million lines in a log this reduces 18 hours to 12s.
@@ -101,30 +106,48 @@ class PtestParser(object):
101 f.write(status + ": " + test_name + "\n") 106 f.write(status + ": " + test_name + "\n")
102 107
103 108
104# ltp log parsing 109class LtpParser:
105class LtpParser(object): 110 """
106 def __init__(self): 111 Parse the machine-readable LTP log output into a ptest-friendly data structure.
107 self.results = {} 112 """
108 self.section = {'duration': "", 'log': ""}
109
110 def parse(self, logfile): 113 def parse(self, logfile):
111 test_regex = {} 114 results = {}
112 test_regex['PASSED'] = re.compile(r"PASS") 115 # Aaccumulate the duration here but as the log rounds quick tests down
113 test_regex['FAILED'] = re.compile(r"FAIL") 116 # to 0 seconds this is very much a lower bound. The caller can replace
114 test_regex['SKIPPED'] = re.compile(r"SKIP") 117 # the value.
115 118 section = {"duration": 0, "log": ""}
116 with open(logfile, errors='replace') as f: 119
120 class LtpExitCode(enum.IntEnum):
121 # Exit codes as defined in ltp/include/tst_res_flags.h
122 TPASS = 0 # Test passed flag
123 TFAIL = 1 # Test failed flag
124 TBROK = 2 # Test broken flag
125 TWARN = 4 # Test warning flag
126 TINFO = 16 # Test information flag
127 TCONF = 32 # Test not appropriate for configuration flag
128
129 with open(logfile, errors="replace") as f:
130 # Lines look like this:
131 # tag=cfs_bandwidth01 stime=1689762564 dur=0 exit=exited stat=32 core=no cu=0 cs=0
117 for line in f: 132 for line in f:
118 for t in test_regex: 133 if not line.startswith("tag="):
119 result = test_regex[t].search(line) 134 continue
120 if result:
121 self.results[line.split()[0].strip()] = t
122 135
123 for test in self.results: 136 values = dict(s.split("=") for s in line.strip().split())
124 result = self.results[test]
125 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip()))
126 137
127 return self.results, self.section 138 section["duration"] += int(values["dur"])
139 exitcode = int(values["stat"])
140 if values["exit"] == "exited" and exitcode == LtpExitCode.TCONF:
141 # Exited normally with the "invalid configuration" code
142 results[values["tag"]] = "SKIPPED"
143 elif exitcode == LtpExitCode.TPASS:
144 # Successful exit
145 results[values["tag"]] = "PASSED"
146 else:
147 # Other exit
148 results[values["tag"]] = "FAILED"
149
150 return results, section
128 151
129 152
130# ltp Compliance log parsing 153# ltp Compliance log parsing
@@ -135,30 +158,27 @@ class LtpComplianceParser(object):
135 158
136 def parse(self, logfile): 159 def parse(self, logfile):
137 test_regex = {} 160 test_regex = {}
138 test_regex['PASSED'] = re.compile(r"^PASS") 161 test_regex['FAILED'] = re.compile(r"FAIL")
139 test_regex['FAILED'] = re.compile(r"^FAIL")
140 test_regex['SKIPPED'] = re.compile(r"(?:UNTESTED)|(?:UNSUPPORTED)")
141 162
142 section_regex = {} 163 section_regex = {}
143 section_regex['test'] = re.compile(r"^Testing") 164 section_regex['test'] = re.compile(r"^Executing")
144 165
145 with open(logfile, errors='replace') as f: 166 with open(logfile, errors='replace') as f:
167 name = logfile
168 result = "PASSED"
146 for line in f: 169 for line in f:
147 result = section_regex['test'].search(line) 170 regex_result = section_regex['test'].search(line)
148 if result: 171 if regex_result:
149 self.name = "" 172 name = line.split()[1].strip()
150 self.name = line.split()[1].strip()
151 self.results[self.name] = "PASSED"
152 failed = 0
153 173
154 failed_result = test_regex['FAILED'].search(line) 174 regex_result = test_regex['FAILED'].search(line)
155 if failed_result: 175 if regex_result:
156 failed = line.split()[1].strip() 176 result = "FAILED"
157 if int(failed) > 0: 177 self.results[name] = result
158 self.results[self.name] = "FAILED"
159 178
160 for test in self.results: 179 for test in self.results:
161 result = self.results[test] 180 result = self.results[test]
181 print (self.results)
162 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip())) 182 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip()))
163 183
164 return self.results, self.section 184 return self.results, self.section
diff --git a/meta/lib/oeqa/utils/metadata.py b/meta/lib/oeqa/utils/metadata.py
index 8013aa684d..b320df67e0 100644
--- a/meta/lib/oeqa/utils/metadata.py
+++ b/meta/lib/oeqa/utils/metadata.py
@@ -27,9 +27,9 @@ def metadata_from_bb():
27 data_dict = get_bb_vars() 27 data_dict = get_bb_vars()
28 28
29 # Distro information 29 # Distro information
30 info_dict['distro'] = {'id': data_dict['DISTRO'], 30 info_dict['distro'] = {'id': data_dict.get('DISTRO', 'NODISTRO'),
31 'version_id': data_dict['DISTRO_VERSION'], 31 'version_id': data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'),
32 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])} 32 'pretty_name': '%s %s' % (data_dict.get('DISTRO', 'NODISTRO'), data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'))}
33 33
34 # Host distro information 34 # Host distro information
35 os_release = get_os_release() 35 os_release = get_os_release()
@@ -76,6 +76,10 @@ def git_rev_info(path):
76 info['commit_count'] = int(subprocess.check_output(["git", "rev-list", "--count", "HEAD"], cwd=path).decode('utf-8').strip()) 76 info['commit_count'] = int(subprocess.check_output(["git", "rev-list", "--count", "HEAD"], cwd=path).decode('utf-8').strip())
77 except subprocess.CalledProcessError: 77 except subprocess.CalledProcessError:
78 pass 78 pass
79 try:
80 info['commit_time'] = int(subprocess.check_output(["git", "show", "--no-patch", "--format=%ct", "HEAD"], cwd=path).decode('utf-8').strip())
81 except subprocess.CalledProcessError:
82 pass
79 return info 83 return info
80 try: 84 try:
81 repo = Repo(path, search_parent_directories=True) 85 repo = Repo(path, search_parent_directories=True)
@@ -83,6 +87,7 @@ def git_rev_info(path):
83 return info 87 return info
84 info['commit'] = repo.head.commit.hexsha 88 info['commit'] = repo.head.commit.hexsha
85 info['commit_count'] = repo.head.commit.count() 89 info['commit_count'] = repo.head.commit.count()
90 info['commit_time'] = repo.head.commit.committed_date
86 try: 91 try:
87 info['branch'] = repo.active_branch.name 92 info['branch'] = repo.active_branch.name
88 except TypeError: 93 except TypeError:
diff --git a/meta/lib/oeqa/utils/network.py b/meta/lib/oeqa/utils/network.py
index 59d01723a1..da4ffda9a9 100644
--- a/meta/lib/oeqa/utils/network.py
+++ b/meta/lib/oeqa/utils/network.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/nfs.py b/meta/lib/oeqa/utils/nfs.py
index a37686c914..903469bfee 100644
--- a/meta/lib/oeqa/utils/nfs.py
+++ b/meta/lib/oeqa/utils/nfs.py
@@ -1,4 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
3import sys 7import sys
4import tempfile 8import tempfile
@@ -8,7 +12,7 @@ from oeqa.utils.commands import bitbake, get_bb_var, Command
8from oeqa.utils.network import get_free_port 12from oeqa.utils.network import get_free_port
9 13
10@contextlib.contextmanager 14@contextlib.contextmanager
11def unfs_server(directory, logger = None): 15def unfs_server(directory, logger = None, udp = True):
12 unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native") 16 unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native")
13 if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")): 17 if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")):
14 # build native tool 18 # build native tool
@@ -22,11 +26,11 @@ def unfs_server(directory, logger = None):
22 exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode()) 26 exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode())
23 27
24 # find some ports for the server 28 # find some ports for the server
25 nfsport, mountport = get_free_port(udp = True), get_free_port(udp = True) 29 nfsport, mountport = get_free_port(udp), get_free_port(udp)
26 30
27 nenv = dict(os.environ) 31 nenv = dict(os.environ)
28 nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '') 32 nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '')
29 cmd = Command(["unfsd", "-d", "-p", "-N", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)], 33 cmd = Command(["unfsd", "-d", "-p", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)],
30 bg = True, env = nenv, output_log = logger) 34 bg = True, env = nenv, output_log = logger)
31 cmd.run() 35 cmd.run()
32 yield nfsport, mountport 36 yield nfsport, mountport
diff --git a/meta/lib/oeqa/utils/package_manager.py b/meta/lib/oeqa/utils/package_manager.py
index 6b67f22fdd..db799b64d6 100644
--- a/meta/lib/oeqa/utils/package_manager.py
+++ b/meta/lib/oeqa/utils/package_manager.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py
new file mode 100644
index 0000000000..c69481db6c
--- /dev/null
+++ b/meta/lib/oeqa/utils/postactions.py
@@ -0,0 +1,102 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# Run a set of actions after tests. The runner provides internal data
8# dictionary as well as test context to any action to run.
9
10import datetime
11import io
12import os
13import stat
14import subprocess
15import tempfile
16from oeqa.utils import get_artefact_dir
17
18##################################################################
19# Host/target statistics
20##################################################################
21
22def get_target_disk_usage(d, tc, artifacts_list, outputdir):
23 output_file = os.path.join(outputdir, "target_disk_usage.txt")
24 try:
25 (status, output) = tc.target.run('df -h')
26 with open(output_file, 'w') as f:
27 f.write(output)
28 f.write("\n")
29 except Exception as e:
30 bb.warn(f"Can not get target disk usage: {e}")
31
32def get_host_disk_usage(d, tc, artifacts_list, outputdir):
33 import subprocess
34
35 output_file = os.path.join(outputdir, "host_disk_usage.txt")
36 try:
37 with open(output_file, 'w') as f:
38 output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={})
39 except Exception as e:
40 bb.warn(f"Can not get host disk usage: {e}")
41
42##################################################################
43# Artifacts retrieval
44##################################################################
45
46def get_artifacts_list(target, raw_list):
47 result = []
48 # Passed list may contains patterns in paths, expand them directly on target
49 for raw_path in raw_list.split():
50 cmd = f"for p in {raw_path}; do if [ -e $p ]; then echo $p; fi; done"
51 try:
52 status, output = target.run(cmd)
53 if status != 0 or not output:
54 raise Exception()
55 result += output.split()
56 except:
57 bb.note(f"No file/directory matching path {raw_path}")
58
59 return result
60
61def list_and_fetch_failed_tests_artifacts(d, tc, artifacts_list, outputdir):
62 artifacts_list = get_artifacts_list(tc.target, artifacts_list)
63 if not artifacts_list:
64 bb.warn("Could not load artifacts list, skip artifacts retrieval")
65 return
66 try:
67 # We need gnu tar for sparse files, not busybox
68 cmd = "tar --sparse -zcf - " + " ".join(artifacts_list)
69 (status, output) = tc.target.run(cmd, raw = True)
70 if status != 0 or not output:
71 raise Exception("Error while fetching compressed artifacts")
72 archive_name = os.path.join(outputdir, "tests_artifacts.tar.gz")
73 with open(archive_name, "wb") as f:
74 f.write(output)
75 except Exception as e:
76 bb.warn(f"Can not retrieve artifacts from test target: {e}")
77
78
79##################################################################
80# General post actions runner
81##################################################################
82
83def run_failed_tests_post_actions(d, tc):
84 artifacts = d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS")
85 # Allow all the code to be disabled by having no artifacts set, e.g. for systems with no ssh support
86 if not artifacts:
87 return
88
89 outputdir = get_artefact_dir(d)
90 os.makedirs(outputdir, exist_ok=True)
91 datestr = datetime.datetime.now().strftime('%Y%m%d')
92 outputdir = tempfile.mkdtemp(prefix='oeqa-target-artefacts-%s-' % datestr, dir=outputdir)
93 os.chmod(outputdir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
94
95 post_actions=[
96 list_and_fetch_failed_tests_artifacts,
97 get_target_disk_usage,
98 get_host_disk_usage
99 ]
100
101 for action in post_actions:
102 action(d, tc, artifacts, outputdir)
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py
index 77ec939ad7..c4db0cf038 100644
--- a/meta/lib/oeqa/utils/qemurunner.py
+++ b/meta/lib/oeqa/utils/qemurunner.py
@@ -19,20 +19,33 @@ import errno
19import string 19import string
20import threading 20import threading
21import codecs 21import codecs
22import logging 22import tempfile
23from oeqa.utils.dump import HostDumper
24from collections import defaultdict 23from collections import defaultdict
24from contextlib import contextmanager
25import importlib
26import traceback
25 27
26# Get Unicode non printable control chars 28# Get Unicode non printable control chars
27control_range = list(range(0,32))+list(range(127,160)) 29control_range = list(range(0,32))+list(range(127,160))
28control_chars = [chr(x) for x in control_range 30control_chars = [chr(x) for x in control_range
29 if chr(x) not in string.printable] 31 if chr(x) not in string.printable]
30re_control_char = re.compile('[%s]' % re.escape("".join(control_chars))) 32re_control_char = re.compile('[%s]' % re.escape("".join(control_chars)))
33# Regex to remove the ANSI (color) control codes from console strings in order to match the text only
34re_vt100 = re.compile(r'(\x1b\[|\x9b)[^@-_a-z]*[@-_a-z]|\x1b[@-_a-z]')
35
36def getOutput(o):
37 import fcntl
38 fl = fcntl.fcntl(o, fcntl.F_GETFL)
39 fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
40 try:
41 return os.read(o.fileno(), 1000000).decode("utf-8")
42 except BlockingIOError:
43 return ""
31 44
32class QemuRunner: 45class QemuRunner:
33 46
34 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, dump_host_cmds, 47 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, use_kvm, logger, use_slirp=False,
35 use_kvm, logger, use_slirp=False, serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None): 48 serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None, tmpfsdir=None):
36 49
37 # Popen object for runqemu 50 # Popen object for runqemu
38 self.runqemu = None 51 self.runqemu = None
@@ -55,21 +68,24 @@ class QemuRunner:
55 self.boottime = boottime 68 self.boottime = boottime
56 self.logged = False 69 self.logged = False
57 self.thread = None 70 self.thread = None
71 self.threadsock = None
58 self.use_kvm = use_kvm 72 self.use_kvm = use_kvm
59 self.use_ovmf = use_ovmf 73 self.use_ovmf = use_ovmf
60 self.use_slirp = use_slirp 74 self.use_slirp = use_slirp
61 self.serial_ports = serial_ports 75 self.serial_ports = serial_ports
62 self.msg = '' 76 self.msg = ''
63 self.boot_patterns = boot_patterns 77 self.boot_patterns = boot_patterns
78 self.tmpfsdir = tmpfsdir
64 79
65 self.runqemutime = 120 80 self.runqemutime = 300
66 if not workdir: 81 if not workdir:
67 workdir = os.getcwd() 82 workdir = os.getcwd()
68 self.qemu_pidfile = workdir + '/pidfile_' + str(os.getpid()) 83 self.qemu_pidfile = workdir + '/pidfile_' + str(os.getpid())
69 self.host_dumper = HostDumper(dump_host_cmds, dump_dir)
70 self.monitorpipe = None 84 self.monitorpipe = None
71 85
72 self.logger = logger 86 self.logger = logger
87 # Whether we're expecting an exit and should show related errors
88 self.canexit = False
73 89
74 # Enable testing other OS's 90 # Enable testing other OS's
75 # Set commands for target communication, and default to Linux ALWAYS 91 # Set commands for target communication, and default to Linux ALWAYS
@@ -80,20 +96,21 @@ class QemuRunner:
80 accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished'] 96 accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished']
81 default_boot_patterns = defaultdict(str) 97 default_boot_patterns = defaultdict(str)
82 # Default to the usual paterns used to communicate with the target 98 # Default to the usual paterns used to communicate with the target
83 default_boot_patterns['search_reached_prompt'] = b' login:' 99 default_boot_patterns['search_reached_prompt'] = ' login:'
84 default_boot_patterns['send_login_user'] = 'root\n' 100 default_boot_patterns['send_login_user'] = 'root\n'
85 default_boot_patterns['search_login_succeeded'] = r"root@[a-zA-Z0-9\-]+:~#" 101 default_boot_patterns['search_login_succeeded'] = r"root@[a-zA-Z0-9\-]+:~#"
86 default_boot_patterns['search_cmd_finished'] = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#" 102 default_boot_patterns['search_cmd_finished'] = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#"
87 103
88 # Only override patterns that were set e.g. login user TESTIMAGE_BOOT_PATTERNS[send_login_user] = "webserver\n" 104 # Only override patterns that were set e.g. login user TESTIMAGE_BOOT_PATTERNS[send_login_user] = "webserver\n"
89 for pattern in accepted_patterns: 105 for pattern in accepted_patterns:
90 if not self.boot_patterns[pattern]: 106 if pattern not in self.boot_patterns or not self.boot_patterns[pattern]:
91 self.boot_patterns[pattern] = default_boot_patterns[pattern] 107 self.boot_patterns[pattern] = default_boot_patterns[pattern]
92 108
93 def create_socket(self): 109 def create_socket(self):
94 try: 110 try:
95 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 111 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
96 sock.setblocking(0) 112 sock.setblocking(0)
113 sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
97 sock.bind(("127.0.0.1",0)) 114 sock.bind(("127.0.0.1",0))
98 sock.listen(2) 115 sock.listen(2)
99 port = sock.getsockname()[1] 116 port = sock.getsockname()[1]
@@ -104,30 +121,24 @@ class QemuRunner:
104 sock.close() 121 sock.close()
105 raise 122 raise
106 123
107 def log(self, msg): 124 def decode_qemulog(self, todecode):
108 if self.logfile: 125 # Sanitize the data received from qemu as it may contain control characters
109 # It is needed to sanitize the data received from qemu 126 msg = todecode.decode("utf-8", errors='backslashreplace')
110 # because is possible to have control characters 127 msg = re_control_char.sub('', msg)
111 msg = msg.decode("utf-8", errors='ignore') 128 return msg
112 msg = re_control_char.sub('', msg)
113 self.msg += msg
114 with codecs.open(self.logfile, "a", encoding="utf-8") as f:
115 f.write("%s" % msg)
116
117 def getOutput(self, o):
118 import fcntl
119 fl = fcntl.fcntl(o, fcntl.F_GETFL)
120 fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
121 return os.read(o.fileno(), 1000000).decode("utf-8")
122 129
130 def log(self, msg, extension=""):
131 if self.logfile:
132 with codecs.open(self.logfile + extension, "ab") as f:
133 f.write(msg)
134 self.msg += self.decode_qemulog(msg)
123 135
124 def handleSIGCHLD(self, signum, frame): 136 def handleSIGCHLD(self, signum, frame):
125 if self.runqemu and self.runqemu.poll(): 137 if self.runqemu and self.runqemu.poll():
126 if self.runqemu.returncode: 138 if self.runqemu.returncode:
127 self.logger.error('runqemu exited with code %d' % self.runqemu.returncode) 139 self.logger.error('runqemu exited with code %d' % self.runqemu.returncode)
128 self.logger.error('Output from runqemu:\n%s' % self.getOutput(self.runqemu.stdout)) 140 self.logger.error('Output from runqemu:\n%s' % getOutput(self.runqemu.stdout))
129 self.stop() 141 self.stop()
130 self._dump_host()
131 142
132 def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True): 143 def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True):
133 env = os.environ.copy() 144 env = os.environ.copy()
@@ -150,6 +161,9 @@ class QemuRunner:
150 else: 161 else:
151 env["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image 162 env["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
152 163
164 if self.tmpfsdir:
165 env["RUNQEMU_TMPFS_DIR"] = self.tmpfsdir
166
153 if not launch_cmd: 167 if not launch_cmd:
154 launch_cmd = 'runqemu %s' % ('snapshot' if discard_writes else '') 168 launch_cmd = 'runqemu %s' % ('snapshot' if discard_writes else '')
155 if self.use_kvm: 169 if self.use_kvm:
@@ -163,11 +177,38 @@ class QemuRunner:
163 launch_cmd += ' slirp' 177 launch_cmd += ' slirp'
164 if self.use_ovmf: 178 if self.use_ovmf:
165 launch_cmd += ' ovmf' 179 launch_cmd += ' ovmf'
166 launch_cmd += ' %s %s %s' % (runqemuparams, self.machine, self.rootfs) 180 launch_cmd += ' %s %s' % (runqemuparams, self.machine)
181 if self.rootfs.endswith('.vmdk'):
182 self.logger.debug('Bypassing VMDK rootfs for runqemu')
183 else:
184 launch_cmd += ' %s' % (self.rootfs)
167 185
168 return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env) 186 return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env)
169 187
170 def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None, env = None): 188 def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None, env = None):
189 # use logfile to determine the recipe-sysroot-native path and
190 # then add in the site-packages path components and add that
191 # to the python sys.path so the qmp module can be found.
192 python_path = os.path.dirname(os.path.dirname(self.logfile))
193 python_path += "/recipe-sysroot-native/usr/lib/qemu-python"
194 sys.path.append(python_path)
195 importlib.invalidate_caches()
196 try:
197 qmp = importlib.import_module("qmp")
198 except Exception as e:
199 self.logger.error("qemurunner: qmp module missing, please ensure it's installed in %s (%s)" % (python_path, str(e)))
200 return False
201 # Path relative to tmpdir used as cwd for qemu below to avoid unix socket path length issues
202 qmp_file = "." + next(tempfile._get_candidate_names())
203 qmp_param = ' -S -qmp unix:./%s,server,wait' % (qmp_file)
204 qmp_port = self.tmpdir + "/" + qmp_file
205 # Create a second socket connection for debugging use,
206 # note this will NOT cause qemu to block waiting for the connection
207 qmp_file2 = "." + next(tempfile._get_candidate_names())
208 qmp_param += ' -qmp unix:./%s,server,nowait' % (qmp_file2)
209 qmp_port2 = self.tmpdir + "/" + qmp_file2
210 self.logger.info("QMP Available for connection at %s" % (qmp_port2))
211
171 try: 212 try:
172 if self.serial_ports >= 2: 213 if self.serial_ports >= 2:
173 self.threadsock, threadport = self.create_socket() 214 self.threadsock, threadport = self.create_socket()
@@ -176,7 +217,7 @@ class QemuRunner:
176 self.logger.error("Failed to create listening socket: %s" % msg[1]) 217 self.logger.error("Failed to create listening socket: %s" % msg[1])
177 return False 218 return False
178 219
179 bootparams = 'console=tty1 console=ttyS0,115200n8 printk.time=1' 220 bootparams = ' printk.time=1'
180 if extra_bootparams: 221 if extra_bootparams:
181 bootparams = bootparams + ' ' + extra_bootparams 222 bootparams = bootparams + ' ' + extra_bootparams
182 223
@@ -184,7 +225,8 @@ class QemuRunner:
184 # and analyze descendents in order to determine it. 225 # and analyze descendents in order to determine it.
185 if os.path.exists(self.qemu_pidfile): 226 if os.path.exists(self.qemu_pidfile):
186 os.remove(self.qemu_pidfile) 227 os.remove(self.qemu_pidfile)
187 self.qemuparams = 'bootparams="{0}" qemuparams="-pidfile {1}"'.format(bootparams, self.qemu_pidfile) 228 self.qemuparams = 'bootparams="{0}" qemuparams="-pidfile {1} {2}"'.format(bootparams, self.qemu_pidfile, qmp_param)
229
188 if qemuparams: 230 if qemuparams:
189 self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"' 231 self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"'
190 232
@@ -196,14 +238,15 @@ class QemuRunner:
196 self.origchldhandler = signal.getsignal(signal.SIGCHLD) 238 self.origchldhandler = signal.getsignal(signal.SIGCHLD)
197 signal.signal(signal.SIGCHLD, self.handleSIGCHLD) 239 signal.signal(signal.SIGCHLD, self.handleSIGCHLD)
198 240
199 self.logger.debug('launchcmd=%s'%(launch_cmd)) 241 self.logger.debug('launchcmd=%s' % (launch_cmd))
200 242
201 # FIXME: We pass in stdin=subprocess.PIPE here to work around stty 243 # FIXME: We pass in stdin=subprocess.PIPE here to work around stty
202 # blocking at the end of the runqemu script when using this within 244 # blocking at the end of the runqemu script when using this within
203 # oe-selftest (this makes stty error out immediately). There ought 245 # oe-selftest (this makes stty error out immediately). There ought
204 # to be a proper fix but this will suffice for now. 246 # to be a proper fix but this will suffice for now.
205 self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, preexec_fn=os.setpgrp, env=env) 247 self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, preexec_fn=os.setpgrp, env=env, cwd=self.tmpdir)
206 output = self.runqemu.stdout 248 output = self.runqemu.stdout
249 launch_time = time.time()
207 250
208 # 251 #
209 # We need the preexec_fn above so that all runqemu processes can easily be killed 252 # We need the preexec_fn above so that all runqemu processes can easily be killed
@@ -224,35 +267,41 @@ class QemuRunner:
224 self.monitorpipe = os.fdopen(w, "w") 267 self.monitorpipe = os.fdopen(w, "w")
225 else: 268 else:
226 # child process 269 # child process
227 os.setpgrp() 270 try:
228 os.close(w) 271 os.setpgrp()
229 r = os.fdopen(r) 272 os.close(w)
230 x = r.read() 273 r = os.fdopen(r)
231 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) 274 x = r.read()
232 sys.exit(0) 275 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM)
276 finally:
277 # We must exit under all circumstances
278 os._exit(0)
233 279
234 self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid) 280 self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid)
235 self.logger.debug("waiting at most %s seconds for qemu pid (%s)" % 281 self.logger.debug("waiting at most %d seconds for qemu pid (%s)" %
236 (self.runqemutime, time.strftime("%D %H:%M:%S"))) 282 (self.runqemutime, time.strftime("%D %H:%M:%S")))
237 endtime = time.time() + self.runqemutime 283 endtime = time.time() + self.runqemutime
238 while not self.is_alive() and time.time() < endtime: 284 while not self.is_alive() and time.time() < endtime:
239 if self.runqemu.poll(): 285 if self.runqemu.poll():
240 if self.runqemu_exited: 286 if self.runqemu_exited:
287 self.logger.warning("runqemu during is_alive() test")
241 return False 288 return False
242 if self.runqemu.returncode: 289 if self.runqemu.returncode:
243 # No point waiting any longer 290 # No point waiting any longer
244 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode) 291 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
245 self._dump_host() 292 self.logger.warning("Output from runqemu:\n%s" % getOutput(output))
246 self.logger.warning("Output from runqemu:\n%s" % self.getOutput(output))
247 self.stop() 293 self.stop()
248 return False 294 return False
249 time.sleep(0.5) 295 time.sleep(0.5)
250 296
251 if self.runqemu_exited: 297 if self.runqemu_exited:
252 return False 298 self.logger.warning("runqemu after timeout")
299
300 if self.runqemu.returncode:
301 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
253 302
254 if not self.is_alive(): 303 if not self.is_alive():
255 self.logger.error("Qemu pid didn't appear in %s seconds (%s)" % 304 self.logger.error("Qemu pid didn't appear in %d seconds (%s)" %
256 (self.runqemutime, time.strftime("%D %H:%M:%S"))) 305 (self.runqemutime, time.strftime("%D %H:%M:%S")))
257 306
258 qemu_pid = None 307 qemu_pid = None
@@ -267,8 +316,7 @@ class QemuRunner:
267 ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,pri,ni,command '], stdout=subprocess.PIPE).communicate()[0] 316 ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,pri,ni,command '], stdout=subprocess.PIPE).communicate()[0]
268 processes = ps.decode("utf-8") 317 processes = ps.decode("utf-8")
269 self.logger.debug("Running processes:\n%s" % processes) 318 self.logger.debug("Running processes:\n%s" % processes)
270 self._dump_host() 319 op = getOutput(output)
271 op = self.getOutput(output)
272 self.stop() 320 self.stop()
273 if op: 321 if op:
274 self.logger.error("Output from runqemu:\n%s" % op) 322 self.logger.error("Output from runqemu:\n%s" % op)
@@ -276,10 +324,79 @@ class QemuRunner:
276 self.logger.error("No output from runqemu.\n") 324 self.logger.error("No output from runqemu.\n")
277 return False 325 return False
278 326
327 # Create the client socket for the QEMU Monitor Control Socket
328 # This will allow us to read status from Qemu if the the process
329 # is still alive
330 self.logger.debug("QMP Initializing to %s" % (qmp_port))
331 # chdir dance for path length issues with unix sockets
332 origpath = os.getcwd()
333 try:
334 os.chdir(os.path.dirname(qmp_port))
335 try:
336 from qmp.legacy import QEMUMonitorProtocol
337 self.qmp = QEMUMonitorProtocol(os.path.basename(qmp_port))
338 except OSError as msg:
339 self.logger.warning("Failed to initialize qemu monitor socket: %s File: %s" % (msg, msg.filename))
340 return False
341
342 self.logger.debug("QMP Connecting to %s" % (qmp_port))
343 if not os.path.exists(qmp_port) and self.is_alive():
344 self.logger.debug("QMP Port does not exist waiting for it to be created")
345 endtime = time.time() + self.runqemutime
346 while not os.path.exists(qmp_port) and self.is_alive() and time.time() < endtime:
347 self.logger.info("QMP port does not exist yet!")
348 time.sleep(0.5)
349 if not os.path.exists(qmp_port) and self.is_alive():
350 self.logger.warning("QMP Port still does not exist but QEMU is alive")
351 return False
352
353 try:
354 # set timeout value for all QMP calls
355 self.qmp.settimeout(self.runqemutime)
356 self.qmp.connect()
357 connect_time = time.time()
358 self.logger.info("QMP connected to QEMU at %s and took %.2f seconds" %
359 (time.strftime("%D %H:%M:%S"),
360 time.time() - launch_time))
361 except OSError as msg:
362 self.logger.warning("Failed to connect qemu monitor socket: %s File: %s" % (msg, msg.filename))
363 return False
364 except qmp.legacy.QMPError as msg:
365 self.logger.warning("Failed to communicate with qemu monitor: %s" % (msg))
366 return False
367 finally:
368 os.chdir(origpath)
369
370 # We worry that mmap'd libraries may cause page faults which hang the qemu VM for periods
371 # causing failures. Before we "start" qemu, read through it's mapped files to try and
372 # ensure we don't hit page faults later
373 mapdir = "/proc/" + str(self.qemupid) + "/map_files/"
374 try:
375 for f in os.listdir(mapdir):
376 try:
377 linktarget = os.readlink(os.path.join(mapdir, f))
378 if not linktarget.startswith("/") or linktarget.startswith("/dev") or "deleted" in linktarget:
379 continue
380 with open(linktarget, "rb") as readf:
381 data = True
382 while data:
383 data = readf.read(4096)
384 except FileNotFoundError:
385 continue
386 # Centos7 doesn't allow us to read /map_files/
387 except PermissionError:
388 pass
389
390 # Release the qemu process to continue running
391 self.run_monitor('cont')
392 self.logger.info("QMP released QEMU at %s and took %.2f seconds from connect" %
393 (time.strftime("%D %H:%M:%S"),
394 time.time() - connect_time))
395
279 # We are alive: qemu is running 396 # We are alive: qemu is running
280 out = self.getOutput(output) 397 out = getOutput(output)
281 netconf = False # network configuration is not required by default 398 netconf = False # network configuration is not required by default
282 self.logger.debug("qemu started in %s seconds - qemu procces pid is %s (%s)" % 399 self.logger.debug("qemu started in %.2f seconds - qemu procces pid is %s (%s)" %
283 (time.time() - (endtime - self.runqemutime), 400 (time.time() - (endtime - self.runqemutime),
284 self.qemupid, time.strftime("%D %H:%M:%S"))) 401 self.qemupid, time.strftime("%D %H:%M:%S")))
285 cmdline = '' 402 cmdline = ''
@@ -291,9 +408,10 @@ class QemuRunner:
291 cmdline = re_control_char.sub(' ', cmdline) 408 cmdline = re_control_char.sub(' ', cmdline)
292 try: 409 try:
293 if self.use_slirp: 410 if self.use_slirp:
294 tcp_ports = cmdline.split("hostfwd=tcp::")[1] 411 tcp_ports = cmdline.split("hostfwd=tcp:")[1]
412 ip, tcp_ports = tcp_ports.split(":")[:2]
295 host_port = tcp_ports[:tcp_ports.find('-')] 413 host_port = tcp_ports[:tcp_ports.find('-')]
296 self.ip = "localhost:%s" % host_port 414 self.ip = "%s:%s" % (ip, host_port)
297 else: 415 else:
298 ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1]) 416 ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1])
299 self.ip = ips[0] 417 self.ip = ips[0]
@@ -301,8 +419,8 @@ class QemuRunner:
301 self.logger.debug("qemu cmdline used:\n{}".format(cmdline)) 419 self.logger.debug("qemu cmdline used:\n{}".format(cmdline))
302 except (IndexError, ValueError): 420 except (IndexError, ValueError):
303 # Try to get network configuration from runqemu output 421 # Try to get network configuration from runqemu output
304 match = re.match(r'.*Network configuration: (?:ip=)*([0-9.]+)::([0-9.]+):([0-9.]+)$.*', 422 match = re.match(r'.*Network configuration: (?:ip=)*([0-9.]+)::([0-9.]+):([0-9.]+).*',
305 out, re.MULTILINE|re.DOTALL) 423 out, re.MULTILINE | re.DOTALL)
306 if match: 424 if match:
307 self.ip, self.server_ip, self.netmask = match.groups() 425 self.ip, self.server_ip, self.netmask = match.groups()
308 # network configuration is required as we couldn't get it 426 # network configuration is required as we couldn't get it
@@ -313,16 +431,16 @@ class QemuRunner:
313 self.logger.error("Couldn't get ip from qemu command line and runqemu output! " 431 self.logger.error("Couldn't get ip from qemu command line and runqemu output! "
314 "Here is the qemu command line used:\n%s\n" 432 "Here is the qemu command line used:\n%s\n"
315 "and output from runqemu:\n%s" % (cmdline, out)) 433 "and output from runqemu:\n%s" % (cmdline, out))
316 self._dump_host()
317 self.stop() 434 self.stop()
318 return False 435 return False
319 436
320 self.logger.debug("Target IP: %s" % self.ip) 437 self.logger.debug("Target IP: %s" % self.ip)
321 self.logger.debug("Server IP: %s" % self.server_ip) 438 self.logger.debug("Server IP: %s" % self.server_ip)
322 439
440 self.thread = LoggingThread(self.log, self.threadsock, self.logger, self.runqemu.stdout)
441 self.thread.start()
442
323 if self.serial_ports >= 2: 443 if self.serial_ports >= 2:
324 self.thread = LoggingThread(self.log, self.threadsock, self.logger)
325 self.thread.start()
326 if not self.thread.connection_established.wait(self.boottime): 444 if not self.thread.connection_established.wait(self.boottime):
327 self.logger.error("Didn't receive a console connection from qemu. " 445 self.logger.error("Didn't receive a console connection from qemu. "
328 "Here is the qemu command line used:\n%s\nand " 446 "Here is the qemu command line used:\n%s\nand "
@@ -334,7 +452,7 @@ class QemuRunner:
334 self.logger.debug("Waiting at most %d seconds for login banner (%s)" % 452 self.logger.debug("Waiting at most %d seconds for login banner (%s)" %
335 (self.boottime, time.strftime("%D %H:%M:%S"))) 453 (self.boottime, time.strftime("%D %H:%M:%S")))
336 endtime = time.time() + self.boottime 454 endtime = time.time() + self.boottime
337 socklist = [self.server_socket] 455 filelist = [self.server_socket]
338 reachedlogin = False 456 reachedlogin = False
339 stopread = False 457 stopread = False
340 qemusock = None 458 qemusock = None
@@ -342,64 +460,84 @@ class QemuRunner:
342 data = b'' 460 data = b''
343 while time.time() < endtime and not stopread: 461 while time.time() < endtime and not stopread:
344 try: 462 try:
345 sread, swrite, serror = select.select(socklist, [], [], 5) 463 sread, swrite, serror = select.select(filelist, [], [], 5)
346 except InterruptedError: 464 except InterruptedError:
347 continue 465 continue
348 for sock in sread: 466 for file in sread:
349 if sock is self.server_socket: 467 if file is self.server_socket:
350 qemusock, addr = self.server_socket.accept() 468 qemusock, addr = self.server_socket.accept()
351 qemusock.setblocking(0) 469 qemusock.setblocking(False)
352 socklist.append(qemusock) 470 filelist.append(qemusock)
353 socklist.remove(self.server_socket) 471 filelist.remove(self.server_socket)
354 self.logger.debug("Connection from %s:%s" % addr) 472 self.logger.debug("Connection from %s:%s" % addr)
355 else: 473 else:
356 data = data + sock.recv(1024) 474 # try to avoid reading only a single character at a time
475 time.sleep(0.1)
476 if hasattr(file, 'read'):
477 read = file.read(1024)
478 elif hasattr(file, 'recv'):
479 read = file.recv(1024)
480 else:
481 self.logger.error('Invalid file type: %s\n%s' % (file))
482 read = b''
483
484 self.logger.debug2('Partial boot log:\n%s' % (read.decode('utf-8', errors='backslashreplace')))
485 data = data + read
357 if data: 486 if data:
358 bootlog += data 487 bootlog += data
359 if self.serial_ports < 2: 488 self.log(data, extension = ".2")
360 # this socket has mixed console/kernel data, log it to logfile
361 self.log(data)
362
363 data = b'' 489 data = b''
364 if self.boot_patterns['search_reached_prompt'] in bootlog: 490
491 if bytes(self.boot_patterns['search_reached_prompt'], 'utf-8') in bootlog:
492 self.server_socket.close()
365 self.server_socket = qemusock 493 self.server_socket = qemusock
366 stopread = True 494 stopread = True
367 reachedlogin = True 495 reachedlogin = True
368 self.logger.debug("Reached login banner in %s seconds (%s)" % 496 self.logger.debug("Reached login banner in %.2f seconds (%s)" %
369 (time.time() - (endtime - self.boottime), 497 (time.time() - (endtime - self.boottime),
370 time.strftime("%D %H:%M:%S"))) 498 time.strftime("%D %H:%M:%S")))
371 else: 499 else:
372 # no need to check if reachedlogin unless we support multiple connections 500 # no need to check if reachedlogin unless we support multiple connections
373 self.logger.debug("QEMU socket disconnected before login banner reached. (%s)" % 501 self.logger.debug("QEMU socket disconnected before login banner reached. (%s)" %
374 time.strftime("%D %H:%M:%S")) 502 time.strftime("%D %H:%M:%S"))
375 socklist.remove(sock) 503 filelist.remove(file)
376 sock.close() 504 file.close()
377 stopread = True 505 stopread = True
378 506
379
380 if not reachedlogin: 507 if not reachedlogin:
381 if time.time() >= endtime: 508 if time.time() >= endtime:
382 self.logger.warning("Target didn't reach login banner in %d seconds (%s)" % 509 self.logger.warning("Target didn't reach login banner in %d seconds (%s)" %
383 (self.boottime, time.strftime("%D %H:%M:%S"))) 510 (self.boottime, time.strftime("%D %H:%M:%S")))
384 tail = lambda l: "\n".join(l.splitlines()[-25:]) 511 tail = lambda l: "\n".join(l.splitlines()[-25:])
385 bootlog = bootlog.decode("utf-8") 512 bootlog = self.decode_qemulog(bootlog)
386 # in case bootlog is empty, use tail qemu log store at self.msg 513 self.logger.warning("Last 25 lines of login console (%d):\n%s" % (len(bootlog), tail(bootlog)))
387 lines = tail(bootlog if bootlog else self.msg) 514 self.logger.warning("Last 25 lines of all logging (%d):\n%s" % (len(self.msg), tail(self.msg)))
388 self.logger.warning("Last 25 lines of text:\n%s" % lines)
389 self.logger.warning("Check full boot log: %s" % self.logfile) 515 self.logger.warning("Check full boot log: %s" % self.logfile)
390 self._dump_host()
391 self.stop() 516 self.stop()
517 data = True
518 while data:
519 try:
520 time.sleep(1)
521 data = qemusock.recv(1024)
522 self.log(data, extension = ".2")
523 self.logger.warning('Extra log data read: %s\n' % (data.decode('utf-8', errors='backslashreplace')))
524 except Exception as e:
525 self.logger.warning('Extra log data exception %s' % repr(e))
526 data = None
392 return False 527 return False
393 528
529 with self.thread.serial_lock:
530 self.thread.set_serialsock(self.server_socket)
531
394 # If we are not able to login the tests can continue 532 # If we are not able to login the tests can continue
395 try: 533 try:
396 (status, output) = self.run_serial(self.boot_patterns['send_login_user'], raw=True, timeout=120) 534 (status, output) = self.run_serial(self.boot_patterns['send_login_user'], raw=True, timeout=120)
397 if re.search(self.boot_patterns['search_login_succeeded'], output): 535 if re.search(self.boot_patterns['search_login_succeeded'], output):
398 self.logged = True 536 self.logged = True
399 self.logger.debug("Logged as root in serial console") 537 self.logger.debug("Logged in as %s in serial console" % self.boot_patterns['send_login_user'].replace("\n", ""))
400 if netconf: 538 if netconf:
401 # configure guest networking 539 # configure guest networking
402 cmd = "ifconfig eth0 %s netmask %s up\n" % (self.ip, self.netmask) 540 cmd = "ip addr add %s/%s dev eth0\nip link set dev eth0 up\n" % (self.ip, self.netmask)
403 output = self.run_serial(cmd, raw=True)[1] 541 output = self.run_serial(cmd, raw=True)[1]
404 if re.search(r"root@[a-zA-Z0-9\-]+:~#", output): 542 if re.search(r"root@[a-zA-Z0-9\-]+:~#", output):
405 self.logger.debug("configured ip address %s", self.ip) 543 self.logger.debug("configured ip address %s", self.ip)
@@ -407,7 +545,7 @@ class QemuRunner:
407 self.logger.debug("Couldn't configure guest networking") 545 self.logger.debug("Couldn't configure guest networking")
408 else: 546 else:
409 self.logger.warning("Couldn't login into serial console" 547 self.logger.warning("Couldn't login into serial console"
410 " as root using blank password") 548 " as %s using blank password" % self.boot_patterns['send_login_user'].replace("\n", ""))
411 self.logger.warning("The output:\n%s" % output) 549 self.logger.warning("The output:\n%s" % output)
412 except: 550 except:
413 self.logger.warning("Serial console failed while trying to login") 551 self.logger.warning("Serial console failed while trying to login")
@@ -427,16 +565,24 @@ class QemuRunner:
427 except OSError as e: 565 except OSError as e:
428 if e.errno != errno.ESRCH: 566 if e.errno != errno.ESRCH:
429 raise 567 raise
430 endtime = time.time() + self.runqemutime 568 try:
431 while self.runqemu.poll() is None and time.time() < endtime: 569 outs, errs = self.runqemu.communicate(timeout=self.runqemutime)
432 time.sleep(1) 570 if outs:
433 if self.runqemu.poll() is None: 571 self.logger.info("Output from runqemu:\n%s", outs.decode("utf-8"))
572 if errs:
573 self.logger.info("Stderr from runqemu:\n%s", errs.decode("utf-8"))
574 except subprocess.TimeoutExpired:
434 self.logger.debug("Sending SIGKILL to runqemu") 575 self.logger.debug("Sending SIGKILL to runqemu")
435 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL) 576 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL)
577 if not self.runqemu.stdout.closed:
578 self.logger.info("Output from runqemu:\n%s" % getOutput(self.runqemu.stdout))
436 self.runqemu.stdin.close() 579 self.runqemu.stdin.close()
437 self.runqemu.stdout.close() 580 self.runqemu.stdout.close()
438 self.runqemu_exited = True 581 self.runqemu_exited = True
439 582
583 if hasattr(self, 'qmp') and self.qmp:
584 self.qmp.close()
585 self.qmp = None
440 if hasattr(self, 'server_socket') and self.server_socket: 586 if hasattr(self, 'server_socket') and self.server_socket:
441 self.server_socket.close() 587 self.server_socket.close()
442 self.server_socket = None 588 self.server_socket = None
@@ -467,6 +613,11 @@ class QemuRunner:
467 self.thread.stop() 613 self.thread.stop()
468 self.thread.join() 614 self.thread.join()
469 615
616 def allowexit(self):
617 self.canexit = True
618 if self.thread:
619 self.thread.allowexit()
620
470 def restart(self, qemuparams = None): 621 def restart(self, qemuparams = None):
471 self.logger.warning("Restarting qemu process") 622 self.logger.warning("Restarting qemu process")
472 if self.runqemu.poll() is None: 623 if self.runqemu.poll() is None:
@@ -483,8 +634,12 @@ class QemuRunner:
483 # so it's possible that the file has been created but the content is empty 634 # so it's possible that the file has been created but the content is empty
484 pidfile_timeout = time.time() + 3 635 pidfile_timeout = time.time() + 3
485 while time.time() < pidfile_timeout: 636 while time.time() < pidfile_timeout:
486 with open(self.qemu_pidfile, 'r') as f: 637 try:
487 qemu_pid = f.read().strip() 638 with open(self.qemu_pidfile, 'r') as f:
639 qemu_pid = f.read().strip()
640 except FileNotFoundError:
641 # Can be used to detect shutdown so the pid file can disappear
642 return False
488 # file created but not yet written contents 643 # file created but not yet written contents
489 if not qemu_pid: 644 if not qemu_pid:
490 time.sleep(0.5) 645 time.sleep(0.5)
@@ -495,34 +650,49 @@ class QemuRunner:
495 return True 650 return True
496 return False 651 return False
497 652
653 def run_monitor(self, command, args=None, timeout=60):
654 if hasattr(self, 'qmp') and self.qmp:
655 self.qmp.settimeout(timeout)
656 if args is not None:
657 return self.qmp.cmd_raw(command, args)
658 else:
659 return self.qmp.cmd_raw(command)
660
498 def run_serial(self, command, raw=False, timeout=60): 661 def run_serial(self, command, raw=False, timeout=60):
662 # Returns (status, output) where status is 1 on success and 0 on error
663
499 # We assume target system have echo to get command status 664 # We assume target system have echo to get command status
500 if not raw: 665 if not raw:
501 command = "%s; echo $?\n" % command 666 command = "%s; echo $?\n" % command
502 667
503 data = '' 668 data = ''
504 status = 0 669 status = 0
505 self.server_socket.sendall(command.encode('utf-8')) 670 with self.thread.serial_lock:
506 start = time.time() 671 self.server_socket.sendall(command.encode('utf-8'))
507 end = start + timeout 672 start = time.time()
508 while True: 673 end = start + timeout
509 now = time.time() 674 while True:
510 if now >= end: 675 now = time.time()
511 data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout 676 if now >= end:
512 break 677 data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout
513 try: 678 break
514 sread, _, _ = select.select([self.server_socket],[],[], end - now) 679 try:
515 except InterruptedError: 680 sread, _, _ = select.select([self.server_socket],[],[], end - now)
516 continue 681 except InterruptedError:
517 if sread: 682 continue
518 answer = self.server_socket.recv(1024) 683 if sread:
519 if answer: 684 # try to avoid reading single character at a time
520 data += answer.decode('utf-8') 685 time.sleep(0.1)
521 # Search the prompt to stop 686 answer = self.server_socket.recv(1024)
522 if re.search(self.boot_patterns['search_cmd_finished'], data): 687 if answer:
523 break 688 data += re_vt100.sub("", answer.decode('utf-8'))
524 else: 689 # Search the prompt to stop
525 raise Exception("No data on serial console socket") 690 if re.search(self.boot_patterns['search_cmd_finished'], data):
691 break
692 else:
693 if self.canexit:
694 return (1, "")
695 raise Exception("No data on serial console socket, connection closed?")
526 696
527 if data: 697 if data:
528 if raw: 698 if raw:
@@ -541,34 +711,48 @@ class QemuRunner:
541 status = 1 711 status = 1
542 return (status, str(data)) 712 return (status, str(data))
543 713
544 714@contextmanager
545 def _dump_host(self): 715def nonblocking_lock(lock):
546 self.host_dumper.create_dir("qemu") 716 locked = lock.acquire(False)
547 self.logger.warning("Qemu ended unexpectedly, dump data from host" 717 try:
548 " is in %s" % self.host_dumper.dump_dir) 718 yield locked
549 self.host_dumper.dump_host() 719 finally:
720 if locked:
721 lock.release()
550 722
551# This class is for reading data from a socket and passing it to logfunc 723# This class is for reading data from a socket and passing it to logfunc
552# to be processed. It's completely event driven and has a straightforward 724# to be processed. It's completely event driven and has a straightforward
553# event loop. The mechanism for stopping the thread is a simple pipe which 725# event loop. The mechanism for stopping the thread is a simple pipe which
554# will wake up the poll and allow for tearing everything down. 726# will wake up the poll and allow for tearing everything down.
555class LoggingThread(threading.Thread): 727class LoggingThread(threading.Thread):
556 def __init__(self, logfunc, sock, logger): 728 def __init__(self, logfunc, sock, logger, qemuoutput):
557 self.connection_established = threading.Event() 729 self.connection_established = threading.Event()
730 self.serial_lock = threading.Lock()
731
558 self.serversock = sock 732 self.serversock = sock
733 self.serialsock = None
734 self.qemuoutput = qemuoutput
559 self.logfunc = logfunc 735 self.logfunc = logfunc
560 self.logger = logger 736 self.logger = logger
561 self.readsock = None 737 self.readsock = None
562 self.running = False 738 self.running = False
739 self.canexit = False
563 740
564 self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL 741 self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL
565 self.readevents = select.POLLIN | select.POLLPRI 742 self.readevents = select.POLLIN | select.POLLPRI
566 743
567 threading.Thread.__init__(self, target=self.threadtarget) 744 threading.Thread.__init__(self, target=self.threadtarget)
568 745
746 def set_serialsock(self, serialsock):
747 self.serialsock = serialsock
748
569 def threadtarget(self): 749 def threadtarget(self):
570 try: 750 try:
571 self.eventloop() 751 self.eventloop()
752 except Exception:
753 exc_type, exc_value, exc_traceback = sys.exc_info()
754 self.logger.warning("Exception %s in logging thread" %
755 traceback.format_exception(exc_type, exc_value, exc_traceback))
572 finally: 756 finally:
573 self.teardown() 757 self.teardown()
574 758
@@ -584,7 +768,8 @@ class LoggingThread(threading.Thread):
584 768
585 def teardown(self): 769 def teardown(self):
586 self.logger.debug("Tearing down logging thread") 770 self.logger.debug("Tearing down logging thread")
587 self.close_socket(self.serversock) 771 if self.serversock:
772 self.close_socket(self.serversock)
588 773
589 if self.readsock is not None: 774 if self.readsock is not None:
590 self.close_socket(self.readsock) 775 self.close_socket(self.readsock)
@@ -593,30 +778,37 @@ class LoggingThread(threading.Thread):
593 self.close_ignore_error(self.writepipe) 778 self.close_ignore_error(self.writepipe)
594 self.running = False 779 self.running = False
595 780
781 def allowexit(self):
782 self.canexit = True
783
596 def eventloop(self): 784 def eventloop(self):
597 poll = select.poll() 785 poll = select.poll()
598 event_read_mask = self.errorevents | self.readevents 786 event_read_mask = self.errorevents | self.readevents
599 poll.register(self.serversock.fileno()) 787 if self.serversock:
788 poll.register(self.serversock.fileno())
789 serial_registered = False
790 poll.register(self.qemuoutput.fileno())
600 poll.register(self.readpipe, event_read_mask) 791 poll.register(self.readpipe, event_read_mask)
601 792
602 breakout = False 793 breakout = False
603 self.running = True 794 self.running = True
604 self.logger.debug("Starting thread event loop") 795 self.logger.debug("Starting thread event loop")
605 while not breakout: 796 while not breakout:
606 events = poll.poll() 797 events = poll.poll(2)
607 for event in events: 798 for fd, event in events:
799
608 # An error occurred, bail out 800 # An error occurred, bail out
609 if event[1] & self.errorevents: 801 if event & self.errorevents:
610 raise Exception(self.stringify_event(event[1])) 802 raise Exception(self.stringify_event(event))
611 803
612 # Event to stop the thread 804 # Event to stop the thread
613 if self.readpipe == event[0]: 805 if self.readpipe == fd:
614 self.logger.debug("Stop event received") 806 self.logger.debug("Stop event received")
615 breakout = True 807 breakout = True
616 break 808 break
617 809
618 # A connection request was received 810 # A connection request was received
619 elif self.serversock.fileno() == event[0]: 811 elif self.serversock and self.serversock.fileno() == fd:
620 self.logger.debug("Connection request received") 812 self.logger.debug("Connection request received")
621 self.readsock, _ = self.serversock.accept() 813 self.readsock, _ = self.serversock.accept()
622 self.readsock.setblocking(0) 814 self.readsock.setblocking(0)
@@ -627,18 +819,40 @@ class LoggingThread(threading.Thread):
627 self.connection_established.set() 819 self.connection_established.set()
628 820
629 # Actual data to be logged 821 # Actual data to be logged
630 elif self.readsock.fileno() == event[0]: 822 elif self.readsock and self.readsock.fileno() == fd:
631 data = self.recv(1024) 823 data = self.recv(1024, self.readsock)
632 self.logfunc(data) 824 self.logfunc(data)
825 elif self.qemuoutput.fileno() == fd:
826 data = self.qemuoutput.read()
827 self.logger.debug("Data received on qemu stdout %s" % data)
828 self.logfunc(data, ".stdout")
829 elif self.serialsock and self.serialsock.fileno() == fd:
830 if self.serial_lock.acquire(blocking=False):
831 try:
832 data = self.recv(1024, self.serialsock)
833 self.logger.debug("Data received serial thread %s" % data.decode('utf-8', 'replace'))
834 self.logfunc(data, ".2")
835 finally:
836 self.serial_lock.release()
837 else:
838 serial_registered = False
839 poll.unregister(self.serialsock.fileno())
840
841 if not serial_registered and self.serialsock:
842 with nonblocking_lock(self.serial_lock) as l:
843 if l:
844 serial_registered = True
845 poll.register(self.serialsock.fileno(), event_read_mask)
846
633 847
634 # Since the socket is non-blocking make sure to honor EAGAIN 848 # Since the socket is non-blocking make sure to honor EAGAIN
635 # and EWOULDBLOCK. 849 # and EWOULDBLOCK.
636 def recv(self, count): 850 def recv(self, count, sock):
637 try: 851 try:
638 data = self.readsock.recv(count) 852 data = sock.recv(count)
639 except socket.error as e: 853 except socket.error as e:
640 if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK: 854 if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK:
641 return '' 855 return b''
642 else: 856 else:
643 raise 857 raise
644 858
@@ -649,7 +863,9 @@ class LoggingThread(threading.Thread):
649 # happened. But for this code it counts as an 863 # happened. But for this code it counts as an
650 # error since the connection shouldn't go away 864 # error since the connection shouldn't go away
651 # until qemu exits. 865 # until qemu exits.
652 raise Exception("Console connection closed unexpectedly") 866 if not self.canexit:
867 raise Exception("Console connection closed unexpectedly")
868 return b''
653 869
654 return data 870 return data
655 871
@@ -661,6 +877,9 @@ class LoggingThread(threading.Thread):
661 val = 'POLLHUP' 877 val = 'POLLHUP'
662 elif select.POLLNVAL == event: 878 elif select.POLLNVAL == event:
663 val = 'POLLNVAL' 879 val = 'POLLNVAL'
880 else:
881 val = "0x%x" % (event)
882
664 return val 883 return val
665 884
666 def close_socket(self, sock): 885 def close_socket(self, sock):
diff --git a/meta/lib/oeqa/utils/qemutinyrunner.py b/meta/lib/oeqa/utils/qemutinyrunner.py
index 5c92941c0a..20009401ca 100644
--- a/meta/lib/oeqa/utils/qemutinyrunner.py
+++ b/meta/lib/oeqa/utils/qemutinyrunner.py
@@ -19,7 +19,7 @@ from .qemurunner import QemuRunner
19 19
20class QemuTinyRunner(QemuRunner): 20class QemuTinyRunner(QemuRunner):
21 21
22 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, kernel, boottime, logger): 22 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, kernel, boottime, logger, tmpfsdir=None):
23 23
24 # Popen object for runqemu 24 # Popen object for runqemu
25 self.runqemu = None 25 self.runqemu = None
@@ -37,6 +37,7 @@ class QemuTinyRunner(QemuRunner):
37 self.deploy_dir_image = deploy_dir_image 37 self.deploy_dir_image = deploy_dir_image
38 self.logfile = logfile 38 self.logfile = logfile
39 self.boottime = boottime 39 self.boottime = boottime
40 self.tmpfsdir = tmpfsdir
40 41
41 self.runqemutime = 60 42 self.runqemutime = 60
42 self.socketfile = "console.sock" 43 self.socketfile = "console.sock"
@@ -83,6 +84,9 @@ class QemuTinyRunner(QemuRunner):
83 return False 84 return False
84 else: 85 else:
85 os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image 86 os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
87 if self.tmpfsdir:
88 env["RUNQEMU_TMPFS_DIR"] = self.tmpfsdir
89
86 90
87 # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact 91 # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact
88 # badly with screensavers. 92 # badly with screensavers.
diff --git a/meta/lib/oeqa/utils/sshcontrol.py b/meta/lib/oeqa/utils/sshcontrol.py
index 36c2ecb3db..88a61aff63 100644
--- a/meta/lib/oeqa/utils/sshcontrol.py
+++ b/meta/lib/oeqa/utils/sshcontrol.py
@@ -57,8 +57,10 @@ class SSHProcess(object):
57 if select.select([self.process.stdout], [], [], 5)[0] != []: 57 if select.select([self.process.stdout], [], [], 5)[0] != []:
58 data = os.read(self.process.stdout.fileno(), 1024) 58 data = os.read(self.process.stdout.fileno(), 1024)
59 if not data: 59 if not data:
60 self.process.stdout.close() 60 self.process.poll()
61 eof = True 61 if self.process.returncode is not None:
62 self.process.stdout.close()
63 eof = True
62 else: 64 else:
63 data = data.decode("utf-8") 65 data = data.decode("utf-8")
64 output += data 66 output += data
diff --git a/meta/lib/oeqa/utils/subprocesstweak.py b/meta/lib/oeqa/utils/subprocesstweak.py
index b47975a4bc..1774513023 100644
--- a/meta/lib/oeqa/utils/subprocesstweak.py
+++ b/meta/lib/oeqa/utils/subprocesstweak.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4import subprocess 6import subprocess
@@ -6,16 +8,11 @@ import subprocess
6class OETestCalledProcessError(subprocess.CalledProcessError): 8class OETestCalledProcessError(subprocess.CalledProcessError):
7 def __str__(self): 9 def __str__(self):
8 def strify(o): 10 def strify(o):
9 if isinstance(o, bytes): 11 return o.decode("utf-8", errors="replace") if isinstance(o, bytes) else o
10 return o.decode("utf-8", errors="replace")
11 else:
12 return o
13 12
14 s = "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) 13 s = super().__str__()
15 if hasattr(self, "output") and self.output: 14 s = s + "\nStandard Output: " + strify(self.output)
16 s = s + "\nStandard Output: " + strify(self.output) 15 s = s + "\nStandard Error: " + strify(self.stderr)
17 if hasattr(self, "stderr") and self.stderr:
18 s = s + "\nStandard Error: " + strify(self.stderr)
19 return s 16 return s
20 17
21def errors_have_output(): 18def errors_have_output():
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py
index 1055810ca3..09738add1d 100644
--- a/meta/lib/oeqa/utils/targetbuild.py
+++ b/meta/lib/oeqa/utils/targetbuild.py
@@ -19,6 +19,7 @@ class BuildProject(metaclass=ABCMeta):
19 self.d = d 19 self.d = d
20 self.uri = uri 20 self.uri = uri
21 self.archive = os.path.basename(uri) 21 self.archive = os.path.basename(uri)
22 self.tempdirobj = None
22 if not tmpdir: 23 if not tmpdir:
23 tmpdir = self.d.getVar('WORKDIR') 24 tmpdir = self.d.getVar('WORKDIR')
24 if not tmpdir: 25 if not tmpdir:
@@ -71,9 +72,10 @@ class BuildProject(metaclass=ABCMeta):
71 return self._run('cd %s; make install %s' % (self.targetdir, install_args)) 72 return self._run('cd %s; make install %s' % (self.targetdir, install_args))
72 73
73 def clean(self): 74 def clean(self):
75 if self.tempdirobj:
76 self.tempdirobj.cleanup()
74 self._run('rm -rf %s' % self.targetdir) 77 self._run('rm -rf %s' % self.targetdir)
75 subprocess.check_call('rm -f %s' % self.localarchive, shell=True) 78 subprocess.check_call('rm -f %s' % self.localarchive, shell=True)
76 pass
77 79
78class TargetBuildProject(BuildProject): 80class TargetBuildProject(BuildProject):
79 81
diff --git a/meta/lib/oeqa/utils/testexport.py b/meta/lib/oeqa/utils/testexport.py
index e89d130a9c..3ab024d9e9 100644
--- a/meta/lib/oeqa/utils/testexport.py
+++ b/meta/lib/oeqa/utils/testexport.py
@@ -60,17 +60,17 @@ def process_binaries(d, params):
60 export_env = d.getVar("TEST_EXPORT_ONLY") 60 export_env = d.getVar("TEST_EXPORT_ONLY")
61 61
62 def extract_binary(pth_to_pkg, dest_pth=None): 62 def extract_binary(pth_to_pkg, dest_pth=None):
63 cpio_command = runCmd("which cpio") 63 tar_command = runCmd("which tar")
64 rpm2cpio_command = runCmd("ls /usr/bin/rpm2cpio") 64 rpm2archive_command = runCmd("ls /usr/bin/rpm2archive")
65 if (cpio_command.status != 0) and (rpm2cpio_command.status != 0): 65 if (tar_command.status != 0) and (rpm2archive_command.status != 0):
66 bb.fatal("Either \"rpm2cpio\" or \"cpio\" tools are not available on your system." 66 bb.fatal("Either \"rpm2archive\" or \"tar\" tools are not available on your system."
67 "All binaries extraction processes will not be available, crashing all related tests." 67 "All binaries extraction processes will not be available, crashing all related tests."
68 "Please install them according to your OS recommendations") # will exit here 68 "Please install them according to your OS recommendations") # will exit here
69 if dest_pth: 69 if dest_pth:
70 os.chdir(dest_pth) 70 os.chdir(dest_pth)
71 else: 71 else:
72 os.chdir("%s" % os.sep)# this is for native package 72 os.chdir("%s" % os.sep)# this is for native package
73 extract_bin_command = runCmd("%s %s | %s -idm" % (rpm2cpio_command.output, pth_to_pkg, cpio_command.output)) # semi-hardcoded because of a bug on poky's rpm2cpio 73 extract_bin_command = runCmd("%s -n %s | %s xv" % (rpm2archive_command.output, pth_to_pkg, tar_command.output)) # semi-hardcoded because of a bug on poky's rpm2cpio
74 return extract_bin_command 74 return extract_bin_command
75 75
76 if determine_if_poky_env(): # machine with poky environment 76 if determine_if_poky_env(): # machine with poky environment