summaryrefslogtreecommitdiffstats
path: root/meta/lib
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib')
-rw-r--r--meta/lib/bbconfigbuild/configfragments.py185
-rw-r--r--meta/lib/bblayers/buildconf.py84
-rw-r--r--meta/lib/bblayers/create.py21
-rw-r--r--meta/lib/bblayers/machines.py37
-rw-r--r--meta/lib/bblayers/makesetup.py101
-rw-r--r--meta/lib/bblayers/setupwriters/oe-setup-layers.py122
-rw-r--r--meta/lib/bblayers/templates/layer.conf10
-rw-r--r--meta/lib/oe/__init__.py9
-rw-r--r--meta/lib/oe/bootfiles.py57
-rw-r--r--meta/lib/oe/buildcfg.py79
-rw-r--r--meta/lib/oe/buildhistory_analysis.py2
-rw-r--r--meta/lib/oe/buildstats.py (renamed from meta/lib/buildstats.py)131
-rw-r--r--meta/lib/oe/cachedpath.py6
-rw-r--r--meta/lib/oe/classextend.py13
-rw-r--r--meta/lib/oe/classutils.py2
-rw-r--r--meta/lib/oe/copy_buildsystem.py30
-rw-r--r--meta/lib/oe/cve_check.py324
-rw-r--r--meta/lib/oe/data.py2
-rw-r--r--meta/lib/oe/distro_check.py4
-rw-r--r--meta/lib/oe/elf.py257
-rw-r--r--meta/lib/oe/fitimage.py547
-rw-r--r--meta/lib/oe/go.py38
-rw-r--r--meta/lib/oe/gpg_sign.py58
-rw-r--r--meta/lib/oe/license.py265
-rw-r--r--meta/lib/oe/license_finder.py179
-rw-r--r--meta/lib/oe/lsb.py2
-rw-r--r--meta/lib/oe/maketype.py9
-rw-r--r--meta/lib/oe/manifest.py6
-rw-r--r--meta/lib/oe/npm_registry.py175
-rw-r--r--meta/lib/oe/overlayfs.py54
-rw-r--r--meta/lib/oe/package.py1855
-rw-r--r--meta/lib/oe/package_manager/__init__.py109
-rw-r--r--meta/lib/oe/package_manager/common_deb_ipk.py97
-rw-r--r--meta/lib/oe/package_manager/deb/__init__.py122
-rw-r--r--meta/lib/oe/package_manager/deb/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/deb/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/deb/sdk.py11
-rw-r--r--meta/lib/oe/package_manager/ipk/__init__.py113
-rw-r--r--meta/lib/oe/package_manager/ipk/manifest.py3
-rw-r--r--meta/lib/oe/package_manager/ipk/rootfs.py41
-rw-r--r--meta/lib/oe/package_manager/ipk/sdk.py11
-rw-r--r--meta/lib/oe/package_manager/rpm/__init__.py53
-rw-r--r--meta/lib/oe/package_manager/rpm/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/rootfs.py4
-rw-r--r--meta/lib/oe/package_manager/rpm/sdk.py10
-rw-r--r--meta/lib/oe/packagedata.py279
-rw-r--r--meta/lib/oe/packagegroup.py2
-rw-r--r--meta/lib/oe/patch.py258
-rw-r--r--meta/lib/oe/path.py11
-rw-r--r--meta/lib/oe/prservice.py33
-rw-r--r--meta/lib/oe/qa.py69
-rw-r--r--meta/lib/oe/qemu.py54
-rw-r--r--meta/lib/oe/recipeutils.py210
-rw-r--r--meta/lib/oe/reproducible.py117
-rw-r--r--meta/lib/oe/rootfs.py117
-rw-r--r--meta/lib/oe/rootfspostcommands.py (renamed from meta/lib/rootfspostcommands.py)64
-rw-r--r--meta/lib/oe/rust.py11
-rw-r--r--meta/lib/oe/sbom.py120
-rw-r--r--meta/lib/oe/sbom30.py1096
-rw-r--r--meta/lib/oe/sdk.py14
-rw-r--r--meta/lib/oe/spdx.py357
-rw-r--r--meta/lib/oe/spdx30.py5593
-rw-r--r--meta/lib/oe/spdx30_tasks.py1368
-rw-r--r--meta/lib/oe/spdx_common.py285
-rw-r--r--meta/lib/oe/sstatesig.py289
-rw-r--r--meta/lib/oe/terminal.py30
-rw-r--r--meta/lib/oe/tune.py81
-rw-r--r--meta/lib/oe/types.py2
-rw-r--r--meta/lib/oe/useradd.py4
-rw-r--r--meta/lib/oe/utils.py204
-rw-r--r--meta/lib/oeqa/buildperf/base.py2
-rw-r--r--meta/lib/oeqa/buildtools-docs/cases/README2
-rw-r--r--meta/lib/oeqa/buildtools-docs/cases/build.py19
-rw-r--r--meta/lib/oeqa/buildtools/cases/README2
-rw-r--r--meta/lib/oeqa/buildtools/cases/build.py32
-rw-r--r--meta/lib/oeqa/buildtools/cases/gcc.py31
-rw-r--r--meta/lib/oeqa/buildtools/cases/https.py22
-rw-r--r--meta/lib/oeqa/buildtools/cases/sanity.py24
-rw-r--r--meta/lib/oeqa/controllers/__init__.py2
-rw-r--r--meta/lib/oeqa/controllers/controllerimage.py (renamed from meta/lib/oeqa/controllers/masterimage.py)44
-rw-r--r--meta/lib/oeqa/controllers/testtargetloader.py2
-rw-r--r--meta/lib/oeqa/core/case.py17
-rw-r--r--meta/lib/oeqa/core/context.py2
-rw-r--r--meta/lib/oeqa/core/decorator/__init__.py11
-rw-r--r--meta/lib/oeqa/core/decorator/data.py86
-rw-r--r--meta/lib/oeqa/core/decorator/oetimeout.py5
-rw-r--r--meta/lib/oeqa/core/loader.py12
-rw-r--r--meta/lib/oeqa/core/runner.py14
-rw-r--r--meta/lib/oeqa/core/target/qemu.py40
-rw-r--r--meta/lib/oeqa/core/target/serial.py315
-rw-r--r--meta/lib/oeqa/core/target/ssh.py115
-rw-r--r--meta/lib/oeqa/core/tests/cases/timeout.py13
-rw-r--r--meta/lib/oeqa/core/tests/common.py1
-rwxr-xr-xmeta/lib/oeqa/core/tests/test_data.py2
-rwxr-xr-xmeta/lib/oeqa/core/tests/test_decorators.py6
-rw-r--r--meta/lib/oeqa/core/utils/concurrencytest.py68
-rw-r--r--meta/lib/oeqa/core/utils/misc.py47
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml20
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE201
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT25
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml8
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs48
-rw-r--r--meta/lib/oeqa/files/test.rs2
-rw-r--r--meta/lib/oeqa/files/testresults/testresults.json2
-rw-r--r--meta/lib/oeqa/manual/bsp-hw.json280
-rw-r--r--meta/lib/oeqa/manual/build-appliance.json2
-rw-r--r--meta/lib/oeqa/manual/crops.json294
-rw-r--r--meta/lib/oeqa/manual/eclipse-plugin.json322
-rw-r--r--meta/lib/oeqa/manual/sdk.json2
-rw-r--r--meta/lib/oeqa/manual/toaster-managed-mode.json16
-rw-r--r--meta/lib/oeqa/oetest.py24
-rw-r--r--meta/lib/oeqa/runtime/case.py16
-rw-r--r--meta/lib/oeqa/runtime/cases/_qemutiny.py13
-rw-r--r--meta/lib/oeqa/runtime/cases/apt.py40
-rw-r--r--meta/lib/oeqa/runtime/cases/boot.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/buildcpio.py9
-rw-r--r--meta/lib/oeqa/runtime/cases/buildgalculator.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildlzip.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/connman.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/date.py15
-rw-r--r--meta/lib/oeqa/runtime/cases/df.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/dnf.py88
-rw-r--r--meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py27
-rw-r--r--meta/lib/oeqa/runtime/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/gi.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/go.py21
-rw-r--r--meta/lib/oeqa/runtime/cases/gstreamer.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/kernelmodule.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ksample.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/ldd.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/login.py116
-rw-r--r--meta/lib/oeqa/runtime/cases/logrotate.py16
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp.py28
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp_stress.py3
-rw-r--r--meta/lib/oeqa/runtime/cases/maturin.py58
-rw-r--r--meta/lib/oeqa/runtime/cases/multilib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/oe_syslog.py15
-rw-r--r--meta/lib/oeqa/runtime/cases/opkg.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/pam.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt62
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt19
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt35
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt19
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt4
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt10
l---------meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt1
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py382
-rw-r--r--meta/lib/oeqa/runtime/cases/perl.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ping.py35
-rw-r--r--meta/lib/oeqa/runtime/cases/ptest.py14
-rw-r--r--meta/lib/oeqa/runtime/cases/python.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/rpm.py58
-rw-r--r--meta/lib/oeqa/runtime/cases/rt.py19
-rw-r--r--meta/lib/oeqa/runtime/cases/rtc.py17
-rw-r--r--meta/lib/oeqa/runtime/cases/runlevel.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/rust.py64
-rw-r--r--meta/lib/oeqa/runtime/cases/scons.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/skeletoninit.py9
-rw-r--r--meta/lib/oeqa/runtime/cases/ssh.py33
-rw-r--r--meta/lib/oeqa/runtime/cases/stap.py42
-rw-r--r--meta/lib/oeqa/runtime/cases/storage.py18
-rw-r--r--meta/lib/oeqa/runtime/cases/suspend.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/systemd.py41
-rw-r--r--meta/lib/oeqa/runtime/cases/terminal.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/uki.py16
-rw-r--r--meta/lib/oeqa/runtime/cases/usb_hid.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/weston.py22
-rw-r--r--meta/lib/oeqa/runtime/cases/x32lib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/xorg.py2
-rw-r--r--meta/lib/oeqa/runtime/context.py60
-rw-r--r--meta/lib/oeqa/runtime/decorator/package.py18
-rw-r--r--meta/lib/oeqa/runtime/files/hello.stp1
-rw-r--r--meta/lib/oeqa/sdk/case.py67
-rw-r--r--meta/lib/oeqa/sdk/cases/autotools.py (renamed from meta/lib/oeqa/sdk/cases/buildcpio.py)24
-rw-r--r--meta/lib/oeqa/sdk/cases/buildepoxy.py41
-rw-r--r--meta/lib/oeqa/sdk/cases/buildgalculator.py43
-rw-r--r--meta/lib/oeqa/sdk/cases/cmake.py (renamed from meta/lib/oeqa/sdk/cases/assimp.py)23
-rw-r--r--meta/lib/oeqa/sdk/cases/gcc.py6
-rw-r--r--meta/lib/oeqa/sdk/cases/gtk3.py40
-rw-r--r--meta/lib/oeqa/sdk/cases/kmod.py39
-rw-r--r--meta/lib/oeqa/sdk/cases/makefile.py (renamed from meta/lib/oeqa/sdk/cases/buildlzip.py)12
-rw-r--r--meta/lib/oeqa/sdk/cases/manifest.py26
-rw-r--r--meta/lib/oeqa/sdk/cases/maturin.py66
-rw-r--r--meta/lib/oeqa/sdk/cases/meson.py72
-rw-r--r--meta/lib/oeqa/sdk/cases/perl.py7
-rw-r--r--meta/lib/oeqa/sdk/cases/python.py18
-rw-r--r--meta/lib/oeqa/sdk/cases/rust.py58
-rw-r--r--meta/lib/oeqa/sdk/context.py15
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml6
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/build.rs3
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/src/main.rs3
-rw-r--r--meta/lib/oeqa/sdk/testmetaidesupport.py45
-rw-r--r--meta/lib/oeqa/sdk/testsdk.py39
-rw-r--r--meta/lib/oeqa/sdkext/cases/devtool.py9
-rw-r--r--meta/lib/oeqa/sdkext/context.py4
-rw-r--r--meta/lib/oeqa/sdkext/testsdk.py10
-rw-r--r--meta/lib/oeqa/selftest/case.py19
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py68
-rw-r--r--meta/lib/oeqa/selftest/cases/barebox.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/baremetal.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/bbclasses.py106
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py161
-rw-r--r--meta/lib/oeqa/selftest/cases/bblock.py203
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py182
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py148
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py63
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py61
-rw-r--r--meta/lib/oeqa/selftest/cases/c_cpp.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py462
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py160
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py1412
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py35
-rw-r--r--meta/lib/oeqa/selftest/cases/efibootpartition.py46
-rw-r--r--meta/lib/oeqa/selftest/cases/esdk.py (renamed from meta/lib/oeqa/selftest/cases/eSDK.py)16
-rw-r--r--meta/lib/oeqa/selftest/cases/externalsrc.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py69
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py1864
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/gdbserver.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/gitarchivetests.py136
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py179
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py142
-rw-r--r--meta/lib/oeqa/selftest/cases/intercept.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py9
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py41
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/newlib.py13
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py69
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py541
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py72
-rw-r--r--meta/lib/oeqa/selftest/cases/picolibc.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py35
-rw-r--r--meta/lib/oeqa/selftest/cases/pseudo.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py713
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py20
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py180
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py279
-rw-r--r--meta/lib/oeqa/selftest/cases/retain.py241
-rw-r--r--meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py97
-rw-r--r--meta/lib/oeqa/selftest/cases/rpmtests.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/runqemu.py83
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py169
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py135
-rw-r--r--meta/lib/oeqa/selftest/cases/sdk.py39
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py3
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py288
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py761
-rw-r--r--meta/lib/oeqa/selftest/cases/sysroot.py59
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/toolchain.py71
-rw-r--r--meta/lib/oeqa/selftest/cases/uboot.py98
-rw-r--r--meta/lib/oeqa/selftest/cases/uki.py141
-rw-r--r--meta/lib/oeqa/selftest/cases/usergrouptests.py57
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py1098
-rw-r--r--meta/lib/oeqa/selftest/cases/wrapper.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py39
-rw-r--r--meta/lib/oeqa/selftest/context.py161
-rw-r--r--meta/lib/oeqa/targetcontrol.py26
-rw-r--r--meta/lib/oeqa/utils/__init__.py15
-rw-r--r--meta/lib/oeqa/utils/buildproject.py3
-rw-r--r--meta/lib/oeqa/utils/commands.py81
-rw-r--r--meta/lib/oeqa/utils/decorators.py85
-rw-r--r--meta/lib/oeqa/utils/dump.py89
-rw-r--r--meta/lib/oeqa/utils/ftools.py2
-rw-r--r--meta/lib/oeqa/utils/gitarchive.py62
-rw-r--r--meta/lib/oeqa/utils/httpserver.py29
-rw-r--r--meta/lib/oeqa/utils/logparser.py98
-rw-r--r--meta/lib/oeqa/utils/metadata.py11
-rw-r--r--meta/lib/oeqa/utils/network.py2
-rw-r--r--meta/lib/oeqa/utils/nfs.py10
-rw-r--r--meta/lib/oeqa/utils/package_manager.py2
-rw-r--r--meta/lib/oeqa/utils/postactions.py102
-rw-r--r--meta/lib/oeqa/utils/qemurunner.py469
-rw-r--r--meta/lib/oeqa/utils/qemutinyrunner.py6
-rw-r--r--meta/lib/oeqa/utils/sshcontrol.py6
-rw-r--r--meta/lib/oeqa/utils/subprocesstweak.py15
-rw-r--r--meta/lib/oeqa/utils/targetbuild.py4
-rw-r--r--meta/lib/oeqa/utils/testexport.py10
-rw-r--r--meta/lib/patchtest/README.md20
-rw-r--r--meta/lib/patchtest/mbox.py108
-rw-r--r--meta/lib/patchtest/patchtest_parser.py78
-rw-r--r--meta/lib/patchtest/patchtest_patterns.py98
-rw-r--r--meta/lib/patchtest/repo.py85
-rw-r--r--meta/lib/patchtest/requirements.txt7
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail43
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass43
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail43
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass43
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail67
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass67
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail62
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.fail65
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.pass66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip35
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip41
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail65
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail25
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass25
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail28
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass30
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail42
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass43
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail28
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass44
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail42
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass43
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail66
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass66
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail65
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass66
-rwxr-xr-xmeta/lib/patchtest/selftest/selftest94
-rw-r--r--meta/lib/patchtest/tests/__init__.py0
-rw-r--r--meta/lib/patchtest/tests/base.py252
-rw-r--r--meta/lib/patchtest/tests/test_mbox.py179
-rw-r--r--meta/lib/patchtest/tests/test_metadata.py212
-rw-r--r--meta/lib/patchtest/tests/test_patch.py131
-rw-r--r--meta/lib/patchtest/tests/test_python_pylint.py65
352 files changed, 31674 insertions, 4818 deletions
diff --git a/meta/lib/bbconfigbuild/configfragments.py b/meta/lib/bbconfigbuild/configfragments.py
new file mode 100644
index 0000000000..61c33ac316
--- /dev/null
+++ b/meta/lib/bbconfigbuild/configfragments.py
@@ -0,0 +1,185 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import os
9import sys
10import os.path
11
12import bb.utils
13
14from bblayers.common import LayerPlugin
15
16logger = logging.getLogger('bitbake-config-layers')
17
18sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
19
20def plugin_init(plugins):
21 return ConfigFragmentsPlugin()
22
23class ConfigFragmentsPlugin(LayerPlugin):
24 def get_fragment_info(self, path, name):
25 d = bb.data.init()
26 d.setVar('BBPATH', self.tinfoil.config_data.getVar('BBPATH'))
27 bb.parse.handle(path, d, True)
28 summary = d.getVar('BB_CONF_FRAGMENT_SUMMARY')
29 description = d.getVar('BB_CONF_FRAGMENT_DESCRIPTION')
30 if not summary:
31 raise Exception('Please add a one-line summary as BB_CONF_FRAGMENT_SUMMARY = \"...\" variable at the beginning of {}'.format(path))
32
33 if not description:
34 raise Exception('Please add a description as BB_CONF_FRAGMENT_DESCRIPTION = \"...\" variable at the beginning of {}'.format(path))
35
36 return summary, description
37
38 def discover_fragments(self):
39 fragments_path_prefix = self.tinfoil.config_data.getVar('OE_FRAGMENTS_PREFIX')
40 allfragments = {}
41 for layername in self.bbfile_collections:
42 layerdir = self.bbfile_collections[layername]
43 fragments = []
44 for topdir, dirs, files in os.walk(os.path.join(layerdir, fragments_path_prefix)):
45 fragmentdir = os.path.relpath(topdir, os.path.join(layerdir, fragments_path_prefix))
46 for fragmentfile in sorted(files):
47 if fragmentfile.startswith(".") or not fragmentfile.endswith(".conf"):
48 continue
49 fragmentname = os.path.normpath("/".join((layername, fragmentdir, fragmentfile.split('.')[0])))
50 fragmentpath = os.path.join(topdir, fragmentfile)
51 fragmentsummary, fragmentdesc = self.get_fragment_info(fragmentpath, fragmentname)
52 fragments.append({'path':fragmentpath, 'name':fragmentname, 'summary':fragmentsummary, 'description':fragmentdesc})
53 if fragments:
54 allfragments[layername] = {'layerdir':layerdir,'fragments':fragments}
55 return allfragments
56
57 def do_list_fragments(self, args):
58 """ List available configuration fragments """
59 def print_fragment(f, verbose, is_enabled):
60 if not verbose:
61 print('{}\t{}'.format(f['name'], f['summary']))
62 else:
63 print('Name: {}\nPath: {}\nEnabled: {}\nSummary: {}\nDescription:\n{}\n'.format(f['name'], f['path'], 'yes' if is_enabled else 'no', f['summary'],''.join(f['description'])))
64
65 def print_builtin_fragments(builtin, enabled):
66 print('Available built-in fragments:')
67 builtin_dict = {i[0]:i[1] for i in [f.split(':') for f in builtin]}
68 for prefix,var in builtin_dict.items():
69 print('{}/...\tSets {} = ...'.format(prefix, var))
70 print('')
71 enabled_builtin_fragments = [f for f in enabled if self.builtin_fragment_exists(f)]
72 print('Enabled built-in fragments:')
73 for f in enabled_builtin_fragments:
74 prefix, value = f.split('/', 1)
75 print('{}\tSets {} = "{}"'.format(f, builtin_dict[prefix], value))
76 print('')
77
78 all_enabled_fragments = (self.tinfoil.config_data.getVar('OE_FRAGMENTS') or "").split()
79 all_builtin_fragments = (self.tinfoil.config_data.getVar('OE_FRAGMENTS_BUILTIN') or "").split()
80 print_builtin_fragments(all_builtin_fragments, all_enabled_fragments)
81
82 for layername, layerdata in self.discover_fragments().items():
83 layerdir = layerdata['layerdir']
84 fragments = layerdata['fragments']
85 enabled_fragments = [f for f in fragments if f['name'] in all_enabled_fragments]
86 disabled_fragments = [f for f in fragments if f['name'] not in all_enabled_fragments]
87
88 print('Available fragments in {} layer located in {}:\n'.format(layername, layerdir))
89 if enabled_fragments:
90 print('Enabled fragments:')
91 for f in enabled_fragments:
92 print_fragment(f, args.verbose, is_enabled=True)
93 print('')
94 if disabled_fragments:
95 print('Unused fragments:')
96 for f in disabled_fragments:
97 print_fragment(f, args.verbose, is_enabled=False)
98 print('')
99
100 def fragment_exists(self, fragmentname):
101 for layername, layerdata in self.discover_fragments().items():
102 for f in layerdata['fragments']:
103 if f['name'] == fragmentname:
104 return True
105 return False
106
107 def builtin_fragment_exists(self, fragmentname):
108 fragment_prefix = fragmentname.split("/",1)[0]
109 fragment_prefix_defs = set([f.split(':')[0] for f in self.tinfoil.config_data.getVar('OE_FRAGMENTS_BUILTIN').split()])
110 return fragment_prefix in fragment_prefix_defs
111
112 def create_conf(self, confpath):
113 if not os.path.exists(confpath):
114 with open(confpath, 'w') as f:
115 f.write('')
116 with open(confpath, 'r') as f:
117 lines = f.read()
118 if "OE_FRAGMENTS += " not in lines:
119 lines += "\nOE_FRAGMENTS += \"\"\n"
120 with open(confpath, 'w') as f:
121 f.write(lines)
122
123 def do_enable_fragment(self, args):
124 """ Enable a fragment in the local build configuration """
125 def enable_helper(varname, origvalue, op, newlines):
126 enabled_fragments = origvalue.split()
127 for f in args.fragmentname:
128 if f in enabled_fragments:
129 print("Fragment {} already included in {}".format(f, args.confpath))
130 else:
131 enabled_fragments.append(f)
132 return " ".join(enabled_fragments), None, 0, True
133
134 for f in args.fragmentname:
135 if not self.fragment_exists(f) and not self.builtin_fragment_exists(f):
136 raise Exception("Fragment {} does not exist; use 'list-fragments' to see the full list.".format(f))
137
138 self.create_conf(args.confpath)
139 modified = bb.utils.edit_metadata_file(args.confpath, ["OE_FRAGMENTS"], enable_helper)
140 if modified:
141 print("Fragment {} added to {}.".format(", ".join(args.fragmentname), args.confpath))
142
143 def do_disable_fragment(self, args):
144 """ Disable a fragment in the local build configuration """
145 def disable_helper(varname, origvalue, op, newlines):
146 enabled_fragments = origvalue.split()
147 for f in args.fragmentname:
148 if f in enabled_fragments:
149 enabled_fragments.remove(f)
150 else:
151 print("Fragment {} not currently enabled in {}".format(f, args.confpath))
152 return " ".join(enabled_fragments), None, 0, True
153
154 self.create_conf(args.confpath)
155 modified = bb.utils.edit_metadata_file(args.confpath, ["OE_FRAGMENTS"], disable_helper)
156 if modified:
157 print("Fragment {} removed from {}.".format(", ".join(args.fragmentname), args.confpath))
158
159 def do_disable_all_fragments(self, args):
160 """ Disable all fragments in the local build configuration """
161 def disable_all_helper(varname, origvalue, op, newlines):
162 return "", None, 0, True
163
164 self.create_conf(args.confpath)
165 modified = bb.utils.edit_metadata_file(args.confpath, ["OE_FRAGMENTS"], disable_all_helper)
166 if modified:
167 print("All fragments removed from {}.".format(args.confpath))
168
169 def register_commands(self, sp):
170 default_confpath = os.path.join(os.environ["BBPATH"], "conf/auto.conf")
171
172 parser_list_fragments = self.add_command(sp, 'list-fragments', self.do_list_fragments, parserecipes=False)
173 parser_list_fragments.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath))
174 parser_list_fragments.add_argument('--verbose', '-v', action='store_true', help='Print extended descriptions of the fragments')
175
176 parser_enable_fragment = self.add_command(sp, 'enable-fragment', self.do_enable_fragment, parserecipes=False)
177 parser_enable_fragment.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath))
178 parser_enable_fragment.add_argument('fragmentname', help='The name of the fragment (use list-fragments to see them)', nargs='+')
179
180 parser_disable_fragment = self.add_command(sp, 'disable-fragment', self.do_disable_fragment, parserecipes=False)
181 parser_disable_fragment.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath))
182 parser_disable_fragment.add_argument('fragmentname', help='The name of the fragment', nargs='+')
183
184 parser_disable_all = self.add_command(sp, 'disable-all-fragments', self.do_disable_all_fragments, parserecipes=False)
185 parser_disable_all.add_argument("--confpath", default=default_confpath, help='Configuration file which contains a list of enabled fragments (default is {}).'.format(default_confpath))
diff --git a/meta/lib/bblayers/buildconf.py b/meta/lib/bblayers/buildconf.py
new file mode 100644
index 0000000000..722cf0723c
--- /dev/null
+++ b/meta/lib/bblayers/buildconf.py
@@ -0,0 +1,84 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import os
9import sys
10
11from bblayers.common import LayerPlugin
12
13logger = logging.getLogger('bitbake-layers')
14
15sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
16
17import oe.buildcfg
18
19def plugin_init(plugins):
20 return BuildConfPlugin()
21
22class BuildConfPlugin(LayerPlugin):
23 notes_fixme = """FIXME: Please place here the detailed instructions for using this build configuration.
24They will be shown to the users when they set up their builds via TEMPLATECONF.
25"""
26 summary_fixme = """FIXME: Please place here the short summary of what this build configuration is for.
27It will be shown to the users when they set up their builds via TEMPLATECONF.
28"""
29
30 def _save_conf(self, templatename, templatepath, oecorepath, relpaths_to_oecore):
31 confdir = os.path.join(os.environ["BBPATH"], "conf")
32 destdir = os.path.join(templatepath, "conf", "templates", templatename)
33 os.makedirs(destdir, exist_ok=True)
34
35 with open(os.path.join(confdir, "local.conf")) as src:
36 with open(os.path.join(destdir, "local.conf.sample"), 'w') as dest:
37 dest.write(src.read())
38
39 with open(os.path.join(confdir, "bblayers.conf")) as src:
40 with open(os.path.join(destdir, "bblayers.conf.sample"), 'w') as dest:
41 bblayers_data = src.read()
42
43 for (abspath, relpath) in relpaths_to_oecore:
44 bblayers_data = bblayers_data.replace(abspath, "##OEROOT##/" + relpath)
45 dest.write(bblayers_data)
46
47 with open(os.path.join(destdir, "conf-summary.txt"), 'w') as dest:
48 dest.write(self.summary_fixme)
49 with open(os.path.join(destdir, "conf-notes.txt"), 'w') as dest:
50 dest.write(self.notes_fixme)
51
52 logger.info("""Configuration template placed into {}
53Please review the files in there, and particularly provide a configuration summary in {}
54and notes in {}
55You can try out the configuration with
56TEMPLATECONF={} . {}/oe-init-build-env build-try-{}"""
57.format(destdir, os.path.join(destdir, "conf-summary.txt"), os.path.join(destdir, "conf-notes.txt"), destdir, oecorepath, templatename))
58
59 def do_save_build_conf(self, args):
60 """ Save the currently active build configuration (conf/local.conf, conf/bblayers.conf) as a template into a layer.\n This template can later be used for setting up builds via TEMPLATECONF. """
61 layers = oe.buildcfg.get_layer_revisions(self.tinfoil.config_data)
62 targetlayer = None
63 oecore = None
64
65 for l in layers:
66 if os.path.abspath(l[0]) == os.path.abspath(args.layerpath):
67 targetlayer = l[0]
68 if l[1] == 'meta':
69 oecore = os.path.dirname(l[0])
70
71 if not targetlayer:
72 logger.error("Layer {} not in one of the currently enabled layers:\n{}".format(args.layerpath, "\n".join([l[0] for l in layers])))
73 elif not oecore:
74 logger.error("Openembedded-core not in one of the currently enabled layers:\n{}".format("\n".join([l[0] for l in layers])))
75 else:
76 relpaths_to_oecore = [(l[0], os.path.relpath(l[0], start=oecore)) for l in layers]
77 self._save_conf(args.templatename, targetlayer, oecore, relpaths_to_oecore)
78
79 def register_commands(self, sp):
80 parser_build_conf = self.add_command(sp, 'save-build-conf', self.do_save_build_conf, parserecipes=False)
81 parser_build_conf.add_argument('layerpath',
82 help='The path to the layer where the configuration template should be saved.')
83 parser_build_conf.add_argument('templatename',
84 help='The name of the configuration template.')
diff --git a/meta/lib/bblayers/create.py b/meta/lib/bblayers/create.py
index 542f31fc81..517554c587 100644
--- a/meta/lib/bblayers/create.py
+++ b/meta/lib/bblayers/create.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -10,6 +12,7 @@ import shutil
10import bb.utils 12import bb.utils
11 13
12from bblayers.common import LayerPlugin 14from bblayers.common import LayerPlugin
15from bblayers.action import ActionPlugin
13 16
14logger = logging.getLogger('bitbake-layers') 17logger = logging.getLogger('bitbake-layers')
15 18
@@ -35,6 +38,7 @@ class CreatePlugin(LayerPlugin):
35 bb.utils.mkdirhier(conf) 38 bb.utils.mkdirhier(conf)
36 39
37 layername = os.path.basename(os.path.normpath(args.layerdir)) 40 layername = os.path.basename(os.path.normpath(args.layerdir))
41 layerid = args.layerid if args.layerid is not None else layername
38 42
39 # Create the README from templates/README 43 # Create the README from templates/README
40 readme_template = read_template('README').format(layername=layername) 44 readme_template = read_template('README').format(layername=layername)
@@ -50,11 +54,11 @@ class CreatePlugin(LayerPlugin):
50 shutil.copy(license_src, license_dst) 54 shutil.copy(license_src, license_dst)
51 55
52 # Get the compat value for core layer. 56 # Get the compat value for core layer.
53 compat = self.tinfoil.config_data.getVar('LAYERSERIES_COMPAT_core') or "" 57 compat = self.tinfoil.config_data.getVar('LAYERSERIES_CORENAMES') or ""
54 58
55 # Create the layer.conf from templates/layer.conf 59 # Create the layer.conf from templates/layer.conf
56 layerconf_template = read_template('layer.conf').format( 60 layerconf_template = read_template('layer.conf').format(
57 layername=layername, priority=args.priority, compat=compat) 61 layerid=layerid, priority=args.priority, compat=compat)
58 layerconf = os.path.join(conf, 'layer.conf') 62 layerconf = os.path.join(conf, 'layer.conf')
59 with open(layerconf, 'w') as fd: 63 with open(layerconf, 'w') as fd:
60 fd.write(layerconf_template) 64 fd.write(layerconf_template)
@@ -66,12 +70,21 @@ class CreatePlugin(LayerPlugin):
66 with open(os.path.join(example, args.examplerecipe + '_%s.bb') % args.version, 'w') as fd: 70 with open(os.path.join(example, args.examplerecipe + '_%s.bb') % args.version, 'w') as fd:
67 fd.write(example_template) 71 fd.write(example_template)
68 72
69 logger.plain('Add your new layer with \'bitbake-layers add-layer %s\'' % args.layerdir) 73 if args.add_layer:
74 # Add the layer to bblayers.conf
75 args.layerdir = [layerdir]
76 ActionPlugin.do_add_layer(self, args)
77 logger.plain('Layer added %s' % args.layerdir)
78
79 else:
80 logger.plain('Add your new layer with \'bitbake-layers add-layer %s\'' % args.layerdir)
70 81
71 def register_commands(self, sp): 82 def register_commands(self, sp):
72 parser_create_layer = self.add_command(sp, 'create-layer', self.do_create_layer, parserecipes=False) 83 parser_create_layer = self.add_command(sp, 'create-layer', self.do_create_layer, parserecipes=False)
73 parser_create_layer.add_argument('layerdir', help='Layer directory to create') 84 parser_create_layer.add_argument('layerdir', help='Layer directory to create')
74 parser_create_layer.add_argument('--priority', '-p', default=6, help='Layer directory to create') 85 parser_create_layer.add_argument('--add-layer', '-a', action='store_true', help='Add the layer to bblayers.conf after creation')
86 parser_create_layer.add_argument('--layerid', '-i', help='Layer id to use if different from layername')
87 parser_create_layer.add_argument('--priority', '-p', default=6, help='Priority of recipes in layer')
75 parser_create_layer.add_argument('--example-recipe-name', '-e', dest='examplerecipe', default='example', help='Filename of the example recipe') 88 parser_create_layer.add_argument('--example-recipe-name', '-e', dest='examplerecipe', default='example', help='Filename of the example recipe')
76 parser_create_layer.add_argument('--example-recipe-version', '-v', dest='version', default='0.1', help='Version number for the example recipe') 89 parser_create_layer.add_argument('--example-recipe-version', '-v', dest='version', default='0.1', help='Version number for the example recipe')
77 90
diff --git a/meta/lib/bblayers/machines.py b/meta/lib/bblayers/machines.py
new file mode 100644
index 0000000000..5fd970af0e
--- /dev/null
+++ b/meta/lib/bblayers/machines.py
@@ -0,0 +1,37 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import pathlib
9
10from bblayers.common import LayerPlugin
11
12logger = logging.getLogger('bitbake-layers')
13
14def plugin_init(plugins):
15 return ShowMachinesPlugin()
16
17class ShowMachinesPlugin(LayerPlugin):
18 def do_show_machines(self, args):
19 """List the machines available in the currently configured layers."""
20
21 for layer_dir in self.bblayers:
22 layer_name = self.get_layer_name(layer_dir)
23
24 if args.layer and args.layer != layer_name:
25 continue
26
27 for p in sorted(pathlib.Path(layer_dir).glob("conf/machine/*.conf")):
28 if args.bare:
29 logger.plain("%s" % (p.stem))
30 else:
31 logger.plain("%s (%s)" % (p.stem, layer_name))
32
33
34 def register_commands(self, sp):
35 parser_show_machines = self.add_command(sp, "show-machines", self.do_show_machines)
36 parser_show_machines.add_argument('-b', '--bare', help='output just the machine names, not the source layer', action='store_true')
37 parser_show_machines.add_argument('-l', '--layer', help='Limit to machines in the specified layer')
diff --git a/meta/lib/bblayers/makesetup.py b/meta/lib/bblayers/makesetup.py
new file mode 100644
index 0000000000..4199b5f069
--- /dev/null
+++ b/meta/lib/bblayers/makesetup.py
@@ -0,0 +1,101 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import os
9import sys
10
11import bb.utils
12
13from bblayers.common import LayerPlugin
14
15logger = logging.getLogger('bitbake-layers')
16
17sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
18
19import oe.buildcfg
20
21def plugin_init(plugins):
22 return MakeSetupPlugin()
23
24class MakeSetupPlugin(LayerPlugin):
25
26 def _get_remotes_with_url(self, repo_path):
27 remotes = {}
28 for r in oe.buildcfg.get_metadata_git_remotes(repo_path):
29 remotes[r] = {'uri':oe.buildcfg.get_metadata_git_remote_url(repo_path, r)}
30 return remotes
31
32 def _is_submodule(self, repo_path):
33 # This is slightly brittle: git does not offer a way to tell whether
34 # a given repo dir is a submodule checkout, so we need to rely on .git
35 # being a file (rather than a dir like it is in standalone checkouts).
36 # The file typically contains a gitdir pointer to elsewhere.
37 return os.path.isfile(os.path.join(repo_path,".git"))
38
39 def make_repo_config(self, destdir):
40 """ This is a helper function for the writer plugins that discovers currently configured layers.
41 The writers do not have to use it, but it can save a bit of work and avoid duplicated code, hence it is
42 available here. """
43 repos = {}
44 layers = oe.buildcfg.get_layer_revisions(self.tinfoil.config_data)
45 destdir_repo = oe.buildcfg.get_metadata_git_toplevel(destdir)
46
47 for (l_path, l_name, l_branch, l_rev, l_ismodified) in layers:
48 if l_name == 'workspace':
49 continue
50 if l_ismodified:
51 e = "Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path)
52 logger.error(e)
53 raise Exception(e)
54 repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path)
55
56 if self._is_submodule(repo_path):
57 continue
58 if repo_path not in repos.keys():
59 repos[repo_path] = {'path':os.path.basename(repo_path),'git-remote':{
60 'rev':l_rev,
61 'branch':l_branch,
62 'remotes':self._get_remotes_with_url(repo_path),
63 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}}
64 if repo_path == destdir_repo:
65 repos[repo_path]['contains_this_file'] = True
66
67 top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()])
68
69 repos_nopaths = {}
70 for r in repos.keys():
71 r_nopath = os.path.basename(r)
72 repos_nopaths[r_nopath] = repos[r]
73 r_relpath = os.path.relpath(r, top_path)
74 repos_nopaths[r_nopath]['path'] = r_relpath
75 repos_nopaths[r_nopath]['originpath'] = r
76 return repos_nopaths
77
78 def do_make_setup(self, args):
79 """ Writes out a configuration file and/or a script that replicate the directory structure and revisions of the layers in a current build. """
80 for p in self.plugins:
81 if str(p) == args.writer:
82 p.do_write(self, args)
83
84 def register_commands(self, sp):
85 parser_setup_layers = self.add_command(sp, 'create-layers-setup', self.do_make_setup, parserecipes=False)
86 parser_setup_layers.add_argument('destdir',
87 help='Directory where to write the output\n(if it is inside one of the layers, the layer becomes a bootstrap repository and thus will be excluded from fetching).')
88 parser_setup_layers.add_argument('--output-prefix', '-o',
89 help='File name prefix for the output files, if the default (setup-layers) is undesirable.')
90
91 self.plugins = []
92
93 for path in (self.tinfoil.config_data.getVar('BBPATH').split(':')):
94 pluginpath = os.path.join(path, 'lib', 'bblayers', 'setupwriters')
95 bb.utils.load_plugins(logger, self.plugins, pluginpath)
96
97 parser_setup_layers.add_argument('--writer', '-w', choices=[str(p) for p in self.plugins], help="Choose the output format (defaults to oe-setup-layers).\n\nCurrently supported options are:\noe-setup-layers - a self-contained python script and a json config for it.\n\n", default="oe-setup-layers")
98
99 for plugin in self.plugins:
100 if hasattr(plugin, 'register_arguments'):
101 plugin.register_arguments(parser_setup_layers)
diff --git a/meta/lib/bblayers/setupwriters/oe-setup-layers.py b/meta/lib/bblayers/setupwriters/oe-setup-layers.py
new file mode 100644
index 0000000000..8faeabfabc
--- /dev/null
+++ b/meta/lib/bblayers/setupwriters/oe-setup-layers.py
@@ -0,0 +1,122 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import os
9import json
10import stat
11
12logger = logging.getLogger('bitbake-layers')
13
14def plugin_init(plugins):
15 return OeSetupLayersWriter()
16
17class OeSetupLayersWriter():
18
19 def __str__(self):
20 return "oe-setup-layers"
21
22 def _write_python(self, input, output):
23 with open(input) as f:
24 script = f.read()
25 with open(output, 'w') as f:
26 f.write(script)
27 st = os.stat(output)
28 os.chmod(output, st.st_mode | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
29
30 def _write_json(self, repos, output):
31 with open(output, 'w') as f:
32 json.dump(repos, f, sort_keys=True, indent=4)
33
34 def _read_repo_config(self, json_path):
35 with open(json_path) as f:
36 json_config = json.load(f)
37
38 supported_versions = ["1.0"]
39 if json_config["version"] not in supported_versions:
40 err = "File {} has version {}, which is not in supported versions: {}".format(json_path, json_config["version"], supported_versions)
41 logger.error(err)
42 raise Exception(err)
43
44 return json_config
45
46 def _modify_repo_config(self, json_config, args):
47 sources = json_config['sources']
48 for pair in args.custom_references:
49 try:
50 repo, rev = pair.split(':', maxsplit=1)
51 except ValueError:
52 err = "Invalid custom reference specified: '{}'. Provide one using 'REPOSITORY:REFERENCE'.".format(pair)
53 logger.error(err)
54 raise Exception(err)
55 if not repo in sources.keys():
56 err = "Repository {} does not exist in setup-layers config".format(repo)
57 logger.error(err)
58 raise Exception(err)
59
60 layer_remote = json_config['sources'][repo]['git-remote']
61 layer_remote['rev'] = rev
62 # Clear describe
63 layer_remote['describe'] = ''
64
65 def do_write(self, parent, args):
66 """ Writes out a python script and a json config that replicate the directory structure and revisions of the layers in a current build. """
67 output = args.output_prefix or "setup-layers"
68 output = os.path.join(os.path.abspath(args.destdir), output)
69
70 if args.update:
71 # Modify existing layers setup
72 if args.custom_references is None:
73 err = "No custom reference specified. Please provide one using '--use-custom-reference REPOSITORY:REFERENCE'."
74 logger.error(err)
75 raise Exception(err)
76
77 json = self._read_repo_config(output + ".json")
78 if not 'sources' in json.keys():
79 err = "File {}.json does not contain valid layer sources.".format(output)
80 logger.error(err)
81 raise Exception(err)
82
83 else:
84 # Create new layers setup
85 if not os.path.exists(args.destdir):
86 os.makedirs(args.destdir)
87 repos = parent.make_repo_config(args.destdir)
88 for r in repos.values():
89 if not r['git-remote']['remotes'] and not r.get('contains_this_file', False):
90 e = "Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=r['originpath'])
91 raise Exception(e)
92 del r['originpath']
93 json = {"version":"1.0","sources":repos}
94 if not repos:
95 err = "Could not determine layer sources"
96 logger.error(err)
97 raise Exception(err)
98
99 if args.custom_references is not None:
100 self._modify_repo_config(json, args)
101
102 self._write_json(json, output + ".json")
103 logger.info('Created {}.json'.format(output))
104 if not args.json_only:
105 self._write_python(os.path.join(os.path.dirname(__file__),'../../../../scripts/oe-setup-layers'), output)
106 logger.info('Created {}'.format(output))
107
108 def register_arguments(self, parser):
109 parser.add_argument('--json-only', action='store_true',
110 help='When using the oe-setup-layers writer, write only the layer configuruation in json format. Otherwise, also a copy of scripts/oe-setup-layers (from oe-core or poky) is provided, which is a self contained python script that fetches all the needed layers and sets them to correct revisions using the data from the json.')
111
112 parser.add_argument('--update', '-u',
113 action='store_true',
114 help=("Instead of writing a new json file, update an existing layer setup json file with custom references provided via the '--use-custom-reference' option."
115 "\nThis will only update repositories for which a custom reference is specified, all other repositores will be left unchanged."))
116 parser.add_argument('--use-custom-reference', '-r',
117 action='append',
118 dest='custom_references',
119 metavar='REPOSITORY:REFERENCE',
120 help=("A pair consisting of a repository and a custom reference to use for it (by default the currently checked out commit id would be written out)."
121 "\nThis value can be any reference that 'git checkout' would accept, and is not checked for validity."
122 "\nThis option can be used multiple times."))
diff --git a/meta/lib/bblayers/templates/layer.conf b/meta/lib/bblayers/templates/layer.conf
index e2eaff4346..dddfbf716e 100644
--- a/meta/lib/bblayers/templates/layer.conf
+++ b/meta/lib/bblayers/templates/layer.conf
@@ -5,9 +5,9 @@ BBPATH .= ":${{LAYERDIR}}"
5BBFILES += "${{LAYERDIR}}/recipes-*/*/*.bb \ 5BBFILES += "${{LAYERDIR}}/recipes-*/*/*.bb \
6 ${{LAYERDIR}}/recipes-*/*/*.bbappend" 6 ${{LAYERDIR}}/recipes-*/*/*.bbappend"
7 7
8BBFILE_COLLECTIONS += "{layername}" 8BBFILE_COLLECTIONS += "{layerid}"
9BBFILE_PATTERN_{layername} = "^${{LAYERDIR}}/" 9BBFILE_PATTERN_{layerid} = "^${{LAYERDIR}}/"
10BBFILE_PRIORITY_{layername} = "{priority}" 10BBFILE_PRIORITY_{layerid} = "{priority}"
11 11
12LAYERDEPENDS_{layername} = "core" 12LAYERDEPENDS_{layerid} = "core"
13LAYERSERIES_COMPAT_{layername} = "{compat}" 13LAYERSERIES_COMPAT_{layerid} = "{compat}"
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py
index 4e7c09da04..73de774266 100644
--- a/meta/lib/oe/__init__.py
+++ b/meta/lib/oe/__init__.py
@@ -1,6 +1,15 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5from pkgutil import extend_path 7from pkgutil import extend_path
6__path__ = extend_path(__path__, __name__) 8__path__ = extend_path(__path__, __name__)
9
10# Modules with vistorcode need to go first else anything depending on them won't be
11# processed correctly (e.g. qa)
12BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \
13 "packagegroup", "sstatesig", "lsb", "cachedpath", "license", "qemu", \
14 "reproducible", "rust", "buildcfg", "go", "spdx30_tasks", "spdx_common", \
15 "cve_check", "tune"]
diff --git a/meta/lib/oe/bootfiles.py b/meta/lib/oe/bootfiles.py
new file mode 100644
index 0000000000..7ee148c4e2
--- /dev/null
+++ b/meta/lib/oe/bootfiles.py
@@ -0,0 +1,57 @@
1#
2# SPDX-License-Identifier: MIT
3#
4# Copyright (C) 2024 Marcus Folkesson
5# Author: Marcus Folkesson <marcus.folkesson@gmail.com>
6#
7# Utility functions handling boot files
8#
9# Look into deploy_dir and search for boot_files.
10# Returns a list of tuples with (original filepath relative to
11# deploy_dir, desired filepath renaming)
12#
13# Heavily inspired of bootimg_partition.py
14#
15def get_boot_files(deploy_dir, boot_files):
16 import re
17 import os
18 from glob import glob
19
20 if boot_files is None:
21 return None
22
23 # list of tuples (src_name, dst_name)
24 deploy_files = []
25 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
26 if ';' in src_entry:
27 dst_entry = tuple(src_entry.split(';'))
28 if not dst_entry[0] or not dst_entry[1]:
29 raise ValueError('Malformed boot file entry: %s' % src_entry)
30 else:
31 dst_entry = (src_entry, src_entry)
32
33 deploy_files.append(dst_entry)
34
35 install_files = []
36 for deploy_entry in deploy_files:
37 src, dst = deploy_entry
38 if '*' in src:
39 # by default install files under their basename
40 entry_name_fn = os.path.basename
41 if dst != src:
42 # unless a target name was given, then treat name
43 # as a directory and append a basename
44 entry_name_fn = lambda name: \
45 os.path.join(dst,
46 os.path.basename(name))
47
48 srcs = glob(os.path.join(deploy_dir, src))
49
50 for entry in srcs:
51 src = os.path.relpath(entry, deploy_dir)
52 entry_dst_name = entry_name_fn(entry)
53 install_files.append((src, entry_dst_name))
54 else:
55 install_files.append((src, dst))
56
57 return install_files
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py
new file mode 100644
index 0000000000..85b903fab0
--- /dev/null
+++ b/meta/lib/oe/buildcfg.py
@@ -0,0 +1,79 @@
1
2import os
3import subprocess
4import bb.process
5
6def detect_revision(d):
7 path = get_scmbasepath(d)
8 return get_metadata_git_revision(path)
9
10def detect_branch(d):
11 path = get_scmbasepath(d)
12 return get_metadata_git_branch(path)
13
14def get_scmbasepath(d):
15 return os.path.join(d.getVar('COREBASE'), 'meta')
16
17def get_metadata_git_branch(path):
18 try:
19 rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path)
20 except (bb.process.ExecutionError, bb.process.NotFoundError):
21 rev = '<unknown>'
22 return rev.strip()
23
24def get_metadata_git_revision(path):
25 try:
26 rev, _ = bb.process.run('git rev-parse HEAD', cwd=path)
27 except (bb.process.ExecutionError, bb.process.NotFoundError):
28 rev = '<unknown>'
29 return rev.strip()
30
31def get_metadata_git_toplevel(path):
32 try:
33 toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path)
34 except (bb.process.ExecutionError, bb.process.NotFoundError):
35 return ""
36 return toplevel.strip()
37
38def get_metadata_git_remotes(path):
39 try:
40 remotes_list, _ = bb.process.run('git remote', cwd=path)
41 remotes = remotes_list.split()
42 except (bb.process.ExecutionError, bb.process.NotFoundError):
43 remotes = []
44 return remotes
45
46def get_metadata_git_remote_url(path, remote):
47 try:
48 uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path)
49 except (bb.process.ExecutionError, bb.process.NotFoundError):
50 return ""
51 return uri.strip()
52
53def get_metadata_git_describe(path):
54 try:
55 describe, _ = bb.process.run('git describe --tags --dirty', cwd=path)
56 except (bb.process.ExecutionError, bb.process.NotFoundError):
57 return ""
58 return describe.strip()
59
60def is_layer_modified(path):
61 try:
62 subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
63 git diff --quiet --no-ext-diff
64 git diff --quiet --no-ext-diff --cached""" % path,
65 shell=True,
66 stderr=subprocess.STDOUT)
67 return ""
68 except subprocess.CalledProcessError as ex:
69 # Silently treat errors as "modified", without checking for the
70 # (expected) return code 1 in a modified git repo. For example, we get
71 # output and a 129 return code when a layer isn't a git repo at all.
72 return " -- modified"
73
74def get_layer_revisions(d):
75 layers = (d.getVar("BBLAYERS") or "").split()
76 revisions = []
77 for i in layers:
78 revisions.append((i, os.path.basename(i), get_metadata_git_branch(i).strip(), get_metadata_git_revision(i), is_layer_modified(i)))
79 return revisions
diff --git a/meta/lib/oe/buildhistory_analysis.py b/meta/lib/oe/buildhistory_analysis.py
index b1856846b6..4edad01580 100644
--- a/meta/lib/oe/buildhistory_analysis.py
+++ b/meta/lib/oe/buildhistory_analysis.py
@@ -562,7 +562,7 @@ def compare_siglists(a_blob, b_blob, taskdiff=False):
562 elif not hash2 in hashfiles: 562 elif not hash2 in hashfiles:
563 out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2)) 563 out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2))
564 else: 564 else:
565 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, collapsed=True) 565 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, collapsed=True)
566 for line in out2: 566 for line in out2:
567 m = hashlib.sha256() 567 m = hashlib.sha256()
568 m.update(line.encode('utf-8')) 568 m.update(line.encode('utf-8'))
diff --git a/meta/lib/buildstats.py b/meta/lib/oe/buildstats.py
index 8627ed3c31..2700245ec6 100644
--- a/meta/lib/buildstats.py
+++ b/meta/lib/oe/buildstats.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# Implements system state sampling. Called by buildstats.bbclass. 6# Implements system state sampling. Called by buildstats.bbclass.
@@ -8,19 +10,35 @@
8import time 10import time
9import re 11import re
10import bb.event 12import bb.event
13from collections import deque
11 14
12class SystemStats: 15class SystemStats:
13 def __init__(self, d): 16 def __init__(self, d):
14 bn = d.getVar('BUILDNAME') 17 bn = d.getVar('BUILDNAME')
15 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) 18 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
16 bb.utils.mkdirhier(bsdir) 19 bb.utils.mkdirhier(bsdir)
20 file_handlers = [('diskstats', self._reduce_diskstats),
21 ('meminfo', self._reduce_meminfo),
22 ('stat', self._reduce_stat),
23 ('net/dev', self._reduce_net)]
24
25 # Some hosts like openSUSE have readable /proc/pressure files
26 # but throw errors when these files are opened. Catch these error
27 # and ensure that the reduce_proc_pressure directory is not created.
28 if os.path.exists("/proc/pressure"):
29 try:
30 with open('/proc/pressure/cpu', 'rb') as source:
31 source.read()
32 pressuredir = os.path.join(bsdir, 'reduced_proc_pressure')
33 bb.utils.mkdirhier(pressuredir)
34 file_handlers.extend([('pressure/cpu', self._reduce_pressure),
35 ('pressure/io', self._reduce_pressure),
36 ('pressure/memory', self._reduce_pressure)])
37 except Exception:
38 pass
17 39
18 self.proc_files = [] 40 self.proc_files = []
19 for filename, handler in ( 41 for filename, handler in (file_handlers):
20 ('diskstats', self._reduce_diskstats),
21 ('meminfo', self._reduce_meminfo),
22 ('stat', self._reduce_stat),
23 ):
24 # The corresponding /proc files might not exist on the host. 42 # The corresponding /proc files might not exist on the host.
25 # For example, /proc/diskstats is not available in virtualized 43 # For example, /proc/diskstats is not available in virtualized
26 # environments like Linux-VServer. Silently skip collecting 44 # environments like Linux-VServer. Silently skip collecting
@@ -31,30 +49,42 @@ class SystemStats:
31 # not strictly necessary, but using it makes the class 49 # not strictly necessary, but using it makes the class
32 # more robust should two processes ever write 50 # more robust should two processes ever write
33 # concurrently. 51 # concurrently.
34 destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename)) 52 if filename == 'net/dev':
53 destfile = os.path.join(bsdir, 'reduced_proc_net.log')
54 else:
55 destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename))
35 self.proc_files.append((filename, open(destfile, 'ab'), handler)) 56 self.proc_files.append((filename, open(destfile, 'ab'), handler))
36 self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab') 57 self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab')
37 # Last time that we sampled /proc data resp. recorded disk monitoring data. 58 # Last time that we sampled /proc data resp. recorded disk monitoring data.
38 self.last_proc = 0 59 self.last_proc = 0
39 self.last_disk_monitor = 0 60 self.last_disk_monitor = 0
40 # Minimum number of seconds between recording a sample. This 61 # Minimum number of seconds between recording a sample. This becames relevant when we get
41 # becames relevant when we get called very often while many 62 # called very often while many short tasks get started. Sampling during quiet periods
42 # short tasks get started. Sampling during quiet periods
43 # depends on the heartbeat event, which fires less often. 63 # depends on the heartbeat event, which fires less often.
44 self.min_seconds = 1 64 # By default, the Heartbeat events occur roughly once every second but the actual time
45 65 # between these events deviates by a few milliseconds, in most cases. Hence
46 self.meminfo_regex = re.compile(b'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') 66 # pick a somewhat arbitary tolerance such that we sample a large majority
47 self.diskstats_regex = re.compile(b'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') 67 # of the Heartbeat events. This ignores rare events that fall outside the minimum
68 # and may lead an extra sample in a given second every so often. However, it allows for fairly
69 # consistent intervals between samples without missing many events.
70 self.tolerance = 0.01
71 self.min_seconds = 1.0 - self.tolerance
72
73 self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)')
74 self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+|nvme\d+n\d+.*)$')
48 self.diskstats_ltime = None 75 self.diskstats_ltime = None
49 self.diskstats_data = None 76 self.diskstats_data = None
50 self.stat_ltimes = None 77 self.stat_ltimes = None
78 # Last time we sampled /proc/pressure. All resources stored in a single dict with the key as filename
79 self.last_pressure = {"pressure/cpu": None, "pressure/io": None, "pressure/memory": None}
80 self.net_stats = {}
51 81
52 def close(self): 82 def close(self):
53 self.monitor_disk.close() 83 self.monitor_disk.close()
54 for _, output, _ in self.proc_files: 84 for _, output, _ in self.proc_files:
55 output.close() 85 output.close()
56 86
57 def _reduce_meminfo(self, time, data): 87 def _reduce_meminfo(self, time, data, filename):
58 """ 88 """
59 Extracts 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree' 89 Extracts 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree'
60 and writes their values into a single line, in that order. 90 and writes their values into a single line, in that order.
@@ -69,13 +99,46 @@ class SystemStats:
69 b' '.join([values[x] for x in 99 b' '.join([values[x] for x in
70 (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n') 100 (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n')
71 101
102 def _reduce_net(self, time, data, filename):
103 data = data.split(b'\n')
104 for line in data[2:]:
105 if b":" not in line:
106 continue
107 try:
108 parts = line.split()
109 iface = (parts[0].strip(b':')).decode('ascii')
110 receive_bytes = int(parts[1])
111 transmit_bytes = int(parts[9])
112 except Exception:
113 continue
114
115 if iface not in self.net_stats:
116 self.net_stats[iface] = deque(maxlen=2)
117 self.net_stats[iface].append((receive_bytes, transmit_bytes, 0, 0))
118 prev = self.net_stats[iface][-1] if self.net_stats[iface] else (0, 0, 0, 0)
119 receive_diff = receive_bytes - prev[0]
120 transmit_diff = transmit_bytes - prev[1]
121 self.net_stats[iface].append((
122 receive_bytes,
123 transmit_bytes,
124 receive_diff,
125 transmit_diff
126 ))
127
128 result_str = "\n".join(
129 f"{iface}: {net_data[-1][0]} {net_data[-1][1]} {net_data[-1][2]} {net_data[-1][3]}"
130 for iface, net_data in self.net_stats.items()
131 ) + "\n"
132
133 return time, result_str.encode('ascii')
134
72 def _diskstats_is_relevant_line(self, linetokens): 135 def _diskstats_is_relevant_line(self, linetokens):
73 if len(linetokens) != 14: 136 if len(linetokens) < 14:
74 return False 137 return False
75 disk = linetokens[2] 138 disk = linetokens[2]
76 return self.diskstats_regex.match(disk) 139 return self.diskstats_regex.match(disk)
77 140
78 def _reduce_diskstats(self, time, data): 141 def _reduce_diskstats(self, time, data, filename):
79 relevant_tokens = filter(self._diskstats_is_relevant_line, map(lambda x: x.split(), data.split(b'\n'))) 142 relevant_tokens = filter(self._diskstats_is_relevant_line, map(lambda x: x.split(), data.split(b'\n')))
80 diskdata = [0] * 3 143 diskdata = [0] * 3
81 reduced = None 144 reduced = None
@@ -104,10 +167,10 @@ class SystemStats:
104 return reduced 167 return reduced
105 168
106 169
107 def _reduce_nop(self, time, data): 170 def _reduce_nop(self, time, data, filename):
108 return (time, data) 171 return (time, data)
109 172
110 def _reduce_stat(self, time, data): 173 def _reduce_stat(self, time, data, filename):
111 if not data: 174 if not data:
112 return None 175 return None
113 # CPU times {user, nice, system, idle, io_wait, irq, softirq} from first line 176 # CPU times {user, nice, system, idle, io_wait, irq, softirq} from first line
@@ -126,14 +189,41 @@ class SystemStats:
126 self.stat_ltimes = times 189 self.stat_ltimes = times
127 return reduced 190 return reduced
128 191
192 def _reduce_pressure(self, time, data, filename):
193 """
194 Return reduced pressure: {avg10, avg60, avg300} and delta total compared to the previous sample
195 for the cpu, io and memory resources. A common function is used for all 3 resources since the
196 format of the /proc/pressure file is the same in each case.
197 """
198 if not data:
199 return None
200 tokens = data.split(b'\n', 1)[0].split()
201 avg10 = float(tokens[1].split(b'=')[1])
202 avg60 = float(tokens[2].split(b'=')[1])
203 avg300 = float(tokens[3].split(b'=')[1])
204 total = int(tokens[4].split(b'=')[1])
205
206 reduced = None
207 if self.last_pressure[filename]:
208 delta = total - self.last_pressure[filename]
209 reduced = (time, (avg10, avg60, avg300, delta))
210 self.last_pressure[filename] = total
211 return reduced
212
129 def sample(self, event, force): 213 def sample(self, event, force):
214 """
215 Collect and log proc or disk_monitor stats periodically.
216 Return True if a new sample is collected and hence the value last_proc or last_disk_monitor
217 is changed.
218 """
219 retval = False
130 now = time.time() 220 now = time.time()
131 if (now - self.last_proc > self.min_seconds) or force: 221 if (now - self.last_proc > self.min_seconds) or force:
132 for filename, output, handler in self.proc_files: 222 for filename, output, handler in self.proc_files:
133 with open(os.path.join('/proc', filename), 'rb') as input: 223 with open(os.path.join('/proc', filename), 'rb') as input:
134 data = input.read() 224 data = input.read()
135 if handler: 225 if handler:
136 reduced = handler(now, data) 226 reduced = handler(now, data, filename)
137 else: 227 else:
138 reduced = (now, data) 228 reduced = (now, data)
139 if reduced: 229 if reduced:
@@ -150,6 +240,7 @@ class SystemStats:
150 data + 240 data +
151 b'\n') 241 b'\n')
152 self.last_proc = now 242 self.last_proc = now
243 retval = True
153 244
154 if isinstance(event, bb.event.MonitorDiskEvent) and \ 245 if isinstance(event, bb.event.MonitorDiskEvent) and \
155 ((now - self.last_disk_monitor > self.min_seconds) or force): 246 ((now - self.last_disk_monitor > self.min_seconds) or force):
@@ -159,3 +250,5 @@ class SystemStats:
159 for dev, sample in event.disk_usage.items()]).encode('ascii') + 250 for dev, sample in event.disk_usage.items()]).encode('ascii') +
160 b'\n') 251 b'\n')
161 self.last_disk_monitor = now 252 self.last_disk_monitor = now
253 retval = True
254 return retval \ No newline at end of file
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py
index 254257a83f..68c85807d9 100644
--- a/meta/lib/oe/cachedpath.py
+++ b/meta/lib/oe/cachedpath.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# Based on standard python library functions but avoid 6# Based on standard python library functions but avoid
@@ -109,9 +111,13 @@ class CachedPath(object):
109 return True 111 return True
110 return False 112 return False
111 113
114 # WARNING - this is not currently a drop in replacement since they return False
115 # rather than raise exceptions.
112 def stat(self, path): 116 def stat(self, path):
113 return self.callstat(path) 117 return self.callstat(path)
114 118
119 # WARNING - this is not currently a drop in replacement since they return False
120 # rather than raise exceptions.
115 def lstat(self, path): 121 def lstat(self, path):
116 return self.calllstat(path) 122 return self.calllstat(path)
117 123
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py
index d3d8fbe724..8ae5d3b715 100644
--- a/meta/lib/oe/classextend.py
+++ b/meta/lib/oe/classextend.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -30,6 +32,9 @@ class ClassExtender(object):
30 if name.endswith("-" + self.extname): 32 if name.endswith("-" + self.extname):
31 name = name.replace("-" + self.extname, "") 33 name = name.replace("-" + self.extname, "")
32 if name.startswith("virtual/"): 34 if name.startswith("virtual/"):
35 # Assume large numbers of dashes means a triplet is present and we don't need to convert
36 if name.count("-") >= 3 and name.endswith(("-go",)):
37 return name
33 subs = name.split("/", 1)[1] 38 subs = name.split("/", 1)[1]
34 if not subs.startswith(self.extname): 39 if not subs.startswith(self.extname):
35 return "virtual/" + self.extname + "-" + subs 40 return "virtual/" + self.extname + "-" + subs
@@ -87,7 +92,7 @@ class ClassExtender(object):
87 def map_depends_variable(self, varname, suffix = ""): 92 def map_depends_variable(self, varname, suffix = ""):
88 # We need to preserve EXTENDPKGV so it can be expanded correctly later 93 # We need to preserve EXTENDPKGV so it can be expanded correctly later
89 if suffix: 94 if suffix:
90 varname = varname + "_" + suffix 95 varname = varname + ":" + suffix
91 orig = self.d.getVar("EXTENDPKGV", False) 96 orig = self.d.getVar("EXTENDPKGV", False)
92 self.d.setVar("EXTENDPKGV", "EXTENDPKGV") 97 self.d.setVar("EXTENDPKGV", "EXTENDPKGV")
93 deps = self.d.getVar(varname) 98 deps = self.d.getVar(varname)
@@ -142,15 +147,13 @@ class ClassExtender(object):
142 if pkg_mapping[0].startswith("${") and pkg_mapping[0].endswith("}"): 147 if pkg_mapping[0].startswith("${") and pkg_mapping[0].endswith("}"):
143 continue 148 continue
144 for subs in variables: 149 for subs in variables:
145 self.d.renameVar("%s_%s" % (subs, pkg_mapping[0]), "%s_%s" % (subs, pkg_mapping[1])) 150 self.d.renameVar("%s:%s" % (subs, pkg_mapping[0]), "%s:%s" % (subs, pkg_mapping[1]))
146 151
147class NativesdkClassExtender(ClassExtender): 152class NativesdkClassExtender(ClassExtender):
148 def map_depends(self, dep): 153 def map_depends(self, dep):
149 if dep.startswith(self.extname): 154 if dep.startswith(self.extname):
150 return dep 155 return dep
151 if dep.endswith(("-gcc", "-g++")): 156 if dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
152 return dep + "-crosssdk"
153 elif dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
154 return dep 157 return dep
155 else: 158 else:
156 return self.extend_name(dep) 159 return self.extend_name(dep)
diff --git a/meta/lib/oe/classutils.py b/meta/lib/oe/classutils.py
index 08bb66b365..ec3f6ad720 100644
--- a/meta/lib/oe/classutils.py
+++ b/meta/lib/oe/classutils.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py
index 31a84f5b06..ced751b835 100644
--- a/meta/lib/oe/copy_buildsystem.py
+++ b/meta/lib/oe/copy_buildsystem.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# This class should provide easy access to the different aspects of the 6# This class should provide easy access to the different aspects of the
@@ -20,7 +22,7 @@ def _smart_copy(src, dest):
20 mode = os.stat(src).st_mode 22 mode = os.stat(src).st_mode
21 if stat.S_ISDIR(mode): 23 if stat.S_ISDIR(mode):
22 bb.utils.mkdirhier(dest) 24 bb.utils.mkdirhier(dest)
23 cmd = "tar --exclude='.git' --xattrs --xattrs-include='*' -chf - -C %s -p . \ 25 cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -cf - -C %s -p . \
24 | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) 26 | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest)
25 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) 27 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
26 else: 28 else:
@@ -45,9 +47,6 @@ class BuildSystem(object):
45 47
46 corebase = os.path.abspath(self.d.getVar('COREBASE')) 48 corebase = os.path.abspath(self.d.getVar('COREBASE'))
47 layers.append(corebase) 49 layers.append(corebase)
48 # Get relationship between TOPDIR and COREBASE
49 # Layers should respect it
50 corebase_relative = os.path.dirname(os.path.relpath(os.path.abspath(self.d.getVar('TOPDIR')), corebase))
51 # The bitbake build system uses the meta-skeleton layer as a layout 50 # The bitbake build system uses the meta-skeleton layer as a layout
52 # for common recipies, e.g: the recipetool script to create kernel recipies 51 # for common recipies, e.g: the recipetool script to create kernel recipies
53 # Add the meta-skeleton layer to be included as part of the eSDK installation 52 # Add the meta-skeleton layer to be included as part of the eSDK installation
@@ -100,11 +99,10 @@ class BuildSystem(object):
100 layerdestpath = destdir 99 layerdestpath = destdir
101 if corebase == os.path.dirname(layer): 100 if corebase == os.path.dirname(layer):
102 layerdestpath += '/' + os.path.basename(corebase) 101 layerdestpath += '/' + os.path.basename(corebase)
103 else: 102 # If the layer is located somewhere under the same parent directory
104 layer_relative = os.path.relpath(layer, corebase) 103 # as corebase we keep the layer structure.
105 if os.path.dirname(layer_relative) == corebase_relative: 104 elif os.path.commonpath([layer, corebase]) == os.path.dirname(corebase):
106 layer_relative = os.path.dirname(corebase_relative) + '/' + layernewname 105 layer_relative = os.path.relpath(layer, os.path.dirname(corebase))
107 layer_relative = os.path.basename(corebase) + '/' + layer_relative
108 if os.path.dirname(layer_relative) != layernewname: 106 if os.path.dirname(layer_relative) != layernewname:
109 layerdestpath += '/' + os.path.dirname(layer_relative) 107 layerdestpath += '/' + os.path.dirname(layer_relative)
110 108
@@ -195,13 +193,17 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, p
195 else: 193 else:
196 f.write(line) 194 f.write(line)
197 invalue = False 195 invalue = False
198 elif line.startswith('SIGGEN_LOCKEDSIGS'): 196 elif line.startswith('SIGGEN_LOCKEDSIGS_t'):
199 invalue = True 197 invalue = True
200 f.write(line) 198 f.write(line)
199 else:
200 invalue = False
201 f.write(line)
201 202
202def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): 203def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None):
203 merged = {} 204 merged = {}
204 arch_order = [] 205 arch_order = []
206 otherdata = []
205 with open(lockedsigs_main, 'r') as f: 207 with open(lockedsigs_main, 'r') as f:
206 invalue = None 208 invalue = None
207 for line in f: 209 for line in f:
@@ -214,6 +216,9 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
214 invalue = line[18:].split('=', 1)[0].rstrip() 216 invalue = line[18:].split('=', 1)[0].rstrip()
215 merged[invalue] = [] 217 merged[invalue] = []
216 arch_order.append(invalue) 218 arch_order.append(invalue)
219 else:
220 invalue = None
221 otherdata.append(line)
217 222
218 with open(lockedsigs_extra, 'r') as f: 223 with open(lockedsigs_extra, 'r') as f:
219 invalue = None 224 invalue = None
@@ -248,6 +253,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
248 f.write(' "\n') 253 f.write(' "\n')
249 fulltypes.append(typename) 254 fulltypes.append(typename)
250 f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) 255 f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes))
256 f.write('\n' + ''.join(otherdata))
251 257
252 if copy_output: 258 if copy_output:
253 write_sigs_file(copy_output, list(tocopy.keys()), tocopy) 259 write_sigs_file(copy_output, list(tocopy.keys()), tocopy)
@@ -259,7 +265,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac
259 bb.note('Generating sstate-cache...') 265 bb.note('Generating sstate-cache...')
260 266
261 nativelsbstring = d.getVar('NATIVELSBSTRING') 267 nativelsbstring = d.getVar('NATIVELSBSTRING')
262 bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) 268 bb.process.run("PYTHONDONTWRITEBYTECODE=1 gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or ''))
263 if fixedlsbstring and nativelsbstring != fixedlsbstring: 269 if fixedlsbstring and nativelsbstring != fixedlsbstring:
264 nativedir = output_sstate_cache + '/' + nativelsbstring 270 nativedir = output_sstate_cache + '/' + nativelsbstring
265 if os.path.isdir(nativedir): 271 if os.path.isdir(nativedir):
@@ -286,7 +292,7 @@ def check_sstate_task_list(d, targets, filteroutfile, cmdprefix='', cwd=None, lo
286 logparam = '-l %s' % logfile 292 logparam = '-l %s' % logfile
287 else: 293 else:
288 logparam = '' 294 logparam = ''
289 cmd = "%sBB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) 295 cmd = "%sPYTHONDONTWRITEBYTECODE=1 BB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam)
290 env = dict(d.getVar('BB_ORIGENV', False)) 296 env = dict(d.getVar('BB_ORIGENV', False))
291 env.pop('BUILDDIR', '') 297 env.pop('BUILDDIR', '')
292 env.pop('BBPATH', '') 298 env.pop('BBPATH', '')
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py
index ce755f940a..ae194f27cf 100644
--- a/meta/lib/oe/cve_check.py
+++ b/meta/lib/oe/cve_check.py
@@ -1,7 +1,15 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
1import collections 7import collections
2import re
3import itertools
4import functools 8import functools
9import itertools
10import os.path
11import re
12import oe.patch
5 13
6_Version = collections.namedtuple( 14_Version = collections.namedtuple(
7 "_Version", ["release", "patch_l", "pre_l", "pre_v"] 15 "_Version", ["release", "patch_l", "pre_l", "pre_v"]
@@ -11,8 +19,13 @@ _Version = collections.namedtuple(
11class Version(): 19class Version():
12 20
13 def __init__(self, version, suffix=None): 21 def __init__(self, version, suffix=None):
22
23 suffixes = ["alphabetical", "patch"]
24
14 if str(suffix) == "alphabetical": 25 if str(suffix) == "alphabetical":
15 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" 26 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
27 elif str(suffix) == "patch":
28 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(p|patch)(?P<patch_l>[0-9]+))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
16 else: 29 else:
17 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" 30 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
18 regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) 31 regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE)
@@ -23,7 +36,7 @@ class Version():
23 36
24 self._version = _Version( 37 self._version = _Version(
25 release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), 38 release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")),
26 patch_l=match.group("patch_l") if str(suffix) == "alphabetical" and match.group("patch_l") else "", 39 patch_l=match.group("patch_l") if str(suffix) in suffixes and match.group("patch_l") else "",
27 pre_l=match.group("pre_l"), 40 pre_l=match.group("pre_l"),
28 pre_v=match.group("pre_v") 41 pre_v=match.group("pre_v")
29 ) 42 )
@@ -58,3 +71,308 @@ def _cmpkey(release, patch_l, pre_l, pre_v):
58 else: 71 else:
59 _pre = float(pre_v) if pre_v else float('-inf') 72 _pre = float(pre_v) if pre_v else float('-inf')
60 return _release, _patch, _pre 73 return _release, _patch, _pre
74
75
76def parse_cve_from_filename(patch_filename):
77 """
78 Parses CVE ID from the filename
79
80 Matches the last "CVE-YYYY-ID" in the file name, also if written
81 in lowercase. Possible to have multiple CVE IDs in a single
82 file name, but only the last one will be detected from the file name.
83
84 Returns the last CVE ID foudn in the filename. If no CVE ID is found
85 an empty string is returned.
86 """
87 cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d{4,})", re.IGNORECASE)
88
89 # Check patch file name for CVE ID
90 fname_match = cve_file_name_match.search(patch_filename)
91 return fname_match.group(1).upper() if fname_match else ""
92
93
94def parse_cves_from_patch_contents(patch_contents):
95 """
96 Parses CVE IDs from patch contents
97
98 Matches all CVE IDs contained on a line that starts with "CVE: ". Any
99 delimiter (',', '&', "and", etc.) can be used without any issues. Multiple
100 "CVE:" lines can also exist.
101
102 Returns a set of all CVE IDs found in the patch contents.
103 """
104 cve_ids = set()
105 cve_match = re.compile(r"CVE-\d{4}-\d{4,}")
106 # Search for one or more "CVE: " lines
107 for line in patch_contents.split("\n"):
108 if not line.startswith("CVE:"):
109 continue
110 cve_ids.update(cve_match.findall(line))
111 return cve_ids
112
113
114def parse_cves_from_patch_file(patch_file):
115 """
116 Parses CVE IDs associated with a particular patch file, using both the filename
117 and patch contents.
118
119 Returns a set of all CVE IDs found in the patch filename and contents.
120 """
121 cve_ids = set()
122 filename_cve = parse_cve_from_filename(patch_file)
123 if filename_cve:
124 bb.debug(2, "Found %s from patch file name %s" % (filename_cve, patch_file))
125 cve_ids.add(parse_cve_from_filename(patch_file))
126
127 # Remote patches won't be present and compressed patches won't be
128 # unpacked, so say we're not scanning them
129 if not os.path.isfile(patch_file):
130 bb.note("%s is remote or compressed, not scanning content" % patch_file)
131 return cve_ids
132
133 with open(patch_file, "r", encoding="utf-8") as f:
134 try:
135 patch_text = f.read()
136 except UnicodeDecodeError:
137 bb.debug(
138 1,
139 "Failed to read patch %s using UTF-8 encoding"
140 " trying with iso8859-1" % patch_file,
141 )
142 f.close()
143 with open(patch_file, "r", encoding="iso8859-1") as f:
144 patch_text = f.read()
145
146 cve_ids.update(parse_cves_from_patch_contents(patch_text))
147
148 if not cve_ids:
149 bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
150 else:
151 bb.debug(2, "Patch %s solves %s" % (patch_file, ", ".join(sorted(cve_ids))))
152
153 return cve_ids
154
155
156@bb.parse.vardeps("CVE_STATUS")
157def get_patched_cves(d):
158 """
159 Determines the CVE IDs that have been solved by either patches incuded within
160 SRC_URI or by setting CVE_STATUS.
161
162 Returns a dictionary with the CVE IDs as keys and an associated dictonary of
163 relevant metadata as the value.
164 """
165 patched_cves = {}
166 patches = oe.patch.src_patches(d)
167 bb.debug(2, "Scanning %d patches for CVEs" % len(patches))
168
169 # Check each patch file
170 for url in patches:
171 patch_file = bb.fetch.decodeurl(url)[2]
172 for cve_id in parse_cves_from_patch_file(patch_file):
173 if cve_id not in patched_cves:
174 patched_cves[cve_id] = {
175 "abbrev-status": "Patched",
176 "status": "fix-file-included",
177 "resource": [patch_file],
178 }
179 else:
180 patched_cves[cve_id]["resource"].append(patch_file)
181
182 # Search for additional patched CVEs
183 for cve_id in d.getVarFlags("CVE_STATUS") or {}:
184 decoded_status = decode_cve_status(d, cve_id)
185 products = d.getVar("CVE_PRODUCT")
186 if has_cve_product_match(decoded_status, products):
187 if cve_id in patched_cves:
188 bb.warn(
189 'CVE_STATUS[%s] = "%s" is overwriting previous status of "%s: %s"'
190 % (
191 cve_id,
192 d.getVarFlag("CVE_STATUS", cve_id),
193 patched_cves[cve_id]["abbrev-status"],
194 patched_cves[cve_id]["status"],
195 )
196 )
197 patched_cves[cve_id] = {
198 "abbrev-status": decoded_status["mapping"],
199 "status": decoded_status["detail"],
200 "justification": decoded_status["description"],
201 "affected-vendor": decoded_status["vendor"],
202 "affected-product": decoded_status["product"],
203 }
204
205 return patched_cves
206
207
208def get_cpe_ids(cve_product, version):
209 """
210 Get list of CPE identifiers for the given product and version
211 """
212
213 version = version.split("+git")[0]
214
215 cpe_ids = []
216 for product in cve_product.split():
217 # CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not,
218 # use wildcard for vendor.
219 if ":" in product:
220 vendor, product = product.split(":", 1)
221 else:
222 vendor = "*"
223
224 cpe_id = 'cpe:2.3:*:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version)
225 cpe_ids.append(cpe_id)
226
227 return cpe_ids
228
229def cve_check_merge_jsons(output, data):
230 """
231 Merge the data in the "package" property to the main data file
232 output
233 """
234 if output["version"] != data["version"]:
235 bb.error("Version mismatch when merging JSON outputs")
236 return
237
238 for product in output["package"]:
239 if product["name"] == data["package"][0]["name"]:
240 bb.error("Error adding the same package %s twice" % product["name"])
241 return
242
243 output["package"].append(data["package"][0])
244
245def update_symlinks(target_path, link_path):
246 """
247 Update a symbolic link link_path to point to target_path.
248 Remove the link and recreate it if exist and is different.
249 """
250 if link_path != target_path and os.path.exists(target_path):
251 if os.path.exists(os.path.realpath(link_path)):
252 os.remove(link_path)
253 os.symlink(os.path.basename(target_path), link_path)
254
255
256def convert_cve_version(version):
257 """
258 This function converts from CVE format to Yocto version format.
259 eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1
260
261 Unless it is redefined using CVE_VERSION in the recipe,
262 cve_check uses the version in the name of the recipe (${PV})
263 to check vulnerabilities against a CVE in the database downloaded from NVD.
264
265 When the version has an update, i.e.
266 "p1" in OpenSSH 8.3p1,
267 "-rc1" in linux kernel 6.2-rc1,
268 the database stores the version as version_update (8.3_p1, 6.2_rc1).
269 Therefore, we must transform this version before comparing to the
270 recipe version.
271
272 In this case, the parameter of the function is 8.3_p1.
273 If the version uses the Release Candidate format, "rc",
274 this function replaces the '_' by '-'.
275 If the version uses the Update format, "p",
276 this function removes the '_' completely.
277 """
278 import re
279
280 matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version)
281
282 if not matches:
283 return version
284
285 version = matches.group(1)
286 update = matches.group(2)
287
288 if matches.group(3) == "rc":
289 return version + '-' + update
290
291 return version + update
292
293@bb.parse.vardeps("CVE_STATUS", "CVE_CHECK_STATUSMAP")
294def decode_cve_status(d, cve):
295 """
296 Convert CVE_STATUS into status, vendor, product, detail and description.
297 """
298 status = d.getVarFlag("CVE_STATUS", cve)
299 if not status:
300 return {}
301
302 status_split = status.split(':', 4)
303 status_out = {}
304 status_out["detail"] = status_split[0]
305 product = "*"
306 vendor = "*"
307 description = ""
308 if len(status_split) >= 4 and status_split[1].strip() == "cpe":
309 # Both vendor and product are mandatory if cpe: present, the syntax is then:
310 # detail: cpe:vendor:product:description
311 vendor = status_split[2].strip()
312 product = status_split[3].strip()
313 description = status_split[4].strip()
314 elif len(status_split) >= 2 and status_split[1].strip() == "cpe":
315 # Malformed CPE
316 bb.warn(
317 'Invalid CPE information for CVE_STATUS[%s] = "%s", not setting CPE'
318 % (cve, status)
319 )
320 else:
321 # Other case: no CPE, the syntax is then:
322 # detail: description
323 description = status.split(':', 1)[1].strip() if (len(status_split) > 1) else ""
324
325 status_out["vendor"] = vendor
326 status_out["product"] = product
327 status_out["description"] = description
328
329 detail = status_out["detail"]
330 status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail)
331 if status_mapping is None:
332 bb.warn(
333 'Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched'
334 % (detail, cve, status)
335 )
336 status_mapping = "Unpatched"
337 status_out["mapping"] = status_mapping
338
339 return status_out
340
341def has_cve_product_match(detailed_status, products):
342 """
343 Check product/vendor match between detailed_status from decode_cve_status and a string of
344 products (like from CVE_PRODUCT)
345 """
346 for product in products.split():
347 vendor = "*"
348 if ":" in product:
349 vendor, product = product.split(":", 1)
350
351 if (vendor == detailed_status["vendor"] or detailed_status["vendor"] == "*") and \
352 (product == detailed_status["product"] or detailed_status["product"] == "*"):
353 return True
354
355 #if no match, return False
356 return False
357
358def extend_cve_status(d):
359 # do this only once in case multiple classes use this
360 if d.getVar("CVE_STATUS_EXTENDED"):
361 return
362 d.setVar("CVE_STATUS_EXTENDED", "1")
363
364 # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS
365 cve_check_ignore = d.getVar("CVE_CHECK_IGNORE")
366 if cve_check_ignore:
367 bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS")
368 for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split():
369 d.setVarFlag("CVE_STATUS", cve, "ignored")
370
371 # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once
372 for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split():
373 cve_group = d.getVar(cve_status_group)
374 if cve_group is not None:
375 for cve in cve_group.split():
376 d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status"))
377 else:
378 bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group)
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py
index 602130a904..37121cfad2 100644
--- a/meta/lib/oe/data.py
+++ b/meta/lib/oe/data.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index 88e46c354d..3494520f40 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -26,7 +28,7 @@ def find_latest_numeric_release(url, d):
26 maxstr="" 28 maxstr=""
27 for link in get_links_from_url(url, d): 29 for link in get_links_from_url(url, d):
28 try: 30 try:
29 # TODO use LooseVersion 31 # TODO use bb.utils.vercmp_string_op()
30 release = float(link) 32 release = float(link)
31 except: 33 except:
32 release = 0 34 release = 0
diff --git a/meta/lib/oe/elf.py b/meta/lib/oe/elf.py
index df0a4593fa..9794453092 100644
--- a/meta/lib/oe/elf.py
+++ b/meta/lib/oe/elf.py
@@ -1,133 +1,148 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5def machine_dict(d): 7def machine_dict(d):
6# TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? 8 # Generating this data is slow, so cache it
7 machdata = { 9 if not hasattr(machine_dict, "machdata"):
8 "darwin9" : { 10 machine_dict.machdata = {
9 "arm" : (40, 0, 0, True, 32), 11 # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit?
10 }, 12 "darwin9" : {
11 "eabi" : { 13 "arm" : (40, 0, 0, True, 32),
12 "arm" : (40, 0, 0, True, 32), 14 },
13 }, 15 "eabi" : {
14 "elf" : { 16 "arm" : (40, 0, 0, True, 32),
15 "aarch64" : (183, 0, 0, True, 64), 17 },
16 "aarch64_be" :(183, 0, 0, False, 64), 18 "elf" : {
17 "i586" : (3, 0, 0, True, 32), 19 "aarch64" : (183, 0, 0, True, 64),
18 "i686" : (3, 0, 0, True, 32), 20 "aarch64_be" :(183, 0, 0, False, 64),
19 "x86_64": (62, 0, 0, True, 64), 21 "i586" : (3, 0, 0, True, 32),
20 "epiphany": (4643, 0, 0, True, 32), 22 "i686" : (3, 0, 0, True, 32),
21 "lm32": (138, 0, 0, False, 32), 23 "x86_64": (62, 0, 0, True, 64),
22 "mips": ( 8, 0, 0, False, 32), 24 "epiphany": (4643, 0, 0, True, 32),
23 "mipsel": ( 8, 0, 0, True, 32), 25 "lm32": (138, 0, 0, False, 32),
24 "microblaze": (189, 0, 0, False, 32), 26 "loongarch64":(258, 0, 0, True, 64),
25 "microblazeel":(189, 0, 0, True, 32), 27 "mips": ( 8, 0, 0, False, 32),
26 "powerpc": (20, 0, 0, False, 32), 28 "mipsel": ( 8, 0, 0, True, 32),
27 "riscv32": (243, 0, 0, True, 32), 29 "microblaze": (189, 0, 0, False, 32),
28 "riscv64": (243, 0, 0, True, 64), 30 "microblazeel":(189, 0, 0, True, 32),
29 }, 31 "powerpc": (20, 0, 0, False, 32),
30 "linux" : { 32 "riscv32": (243, 0, 0, True, 32),
31 "aarch64" : (183, 0, 0, True, 64), 33 "riscv64": (243, 0, 0, True, 64),
32 "aarch64_be" :(183, 0, 0, False, 64), 34 },
33 "arm" : (40, 97, 0, True, 32), 35 "linux" : {
34 "armeb": (40, 97, 0, False, 32), 36 "aarch64" : (183, 0, 0, True, 64),
35 "powerpc": (20, 0, 0, False, 32), 37 "aarch64_be" :(183, 0, 0, False, 64),
36 "powerpc64": (21, 0, 0, False, 64), 38 "arm" : (40, 97, 0, True, 32),
37 "powerpc64le": (21, 0, 0, True, 64), 39 "armeb": (40, 97, 0, False, 32),
38 "i386": ( 3, 0, 0, True, 32), 40 "powerpc": (20, 0, 0, False, 32),
39 "i486": ( 3, 0, 0, True, 32), 41 "powerpc64": (21, 0, 0, False, 64),
40 "i586": ( 3, 0, 0, True, 32), 42 "powerpc64le": (21, 0, 0, True, 64),
41 "i686": ( 3, 0, 0, True, 32), 43 "i386": ( 3, 0, 0, True, 32),
42 "x86_64": (62, 0, 0, True, 64), 44 "i486": ( 3, 0, 0, True, 32),
43 "ia64": (50, 0, 0, True, 64), 45 "i586": ( 3, 0, 0, True, 32),
44 "alpha": (36902, 0, 0, True, 64), 46 "i686": ( 3, 0, 0, True, 32),
45 "hppa": (15, 3, 0, False, 32), 47 "x86_64": (62, 0, 0, True, 64),
46 "m68k": ( 4, 0, 0, False, 32), 48 "ia64": (50, 0, 0, True, 64),
47 "mips": ( 8, 0, 0, False, 32), 49 "alpha": (36902, 0, 0, True, 64),
48 "mipsel": ( 8, 0, 0, True, 32), 50 "hppa": (15, 3, 0, False, 32),
49 "mips64": ( 8, 0, 0, False, 64), 51 "loongarch64":(258, 0, 0, True, 64),
50 "mips64el": ( 8, 0, 0, True, 64), 52 "m68k": ( 4, 0, 0, False, 32),
51 "mipsisa32r6": ( 8, 0, 0, False, 32), 53 "mips": ( 8, 0, 0, False, 32),
52 "mipsisa32r6el": ( 8, 0, 0, True, 32), 54 "mipsel": ( 8, 0, 0, True, 32),
53 "mipsisa64r6": ( 8, 0, 0, False, 64), 55 "mips64": ( 8, 0, 0, False, 64),
54 "mipsisa64r6el": ( 8, 0, 0, True, 64), 56 "mips64el": ( 8, 0, 0, True, 64),
55 "nios2": (113, 0, 0, True, 32), 57 "mipsisa32r6": ( 8, 0, 0, False, 32),
56 "riscv32": (243, 0, 0, True, 32), 58 "mipsisa32r6el": ( 8, 0, 0, True, 32),
57 "riscv64": (243, 0, 0, True, 64), 59 "mipsisa64r6": ( 8, 0, 0, False, 64),
58 "s390": (22, 0, 0, False, 32), 60 "mipsisa64r6el": ( 8, 0, 0, True, 64),
59 "sh4": (42, 0, 0, True, 32), 61 "nios2": (113, 0, 0, True, 32),
60 "sparc": ( 2, 0, 0, False, 32), 62 "riscv32": (243, 0, 0, True, 32),
61 "microblaze": (189, 0, 0, False, 32), 63 "riscv64": (243, 0, 0, True, 64),
62 "microblazeel":(189, 0, 0, True, 32), 64 "s390": (22, 0, 0, False, 32),
63 }, 65 "sh4": (42, 0, 0, True, 32),
64 "linux-musl" : { 66 "sparc": ( 2, 0, 0, False, 32),
65 "aarch64" : (183, 0, 0, True, 64), 67 "microblaze": (189, 0, 0, False, 32),
66 "aarch64_be" :(183, 0, 0, False, 64), 68 "microblazeel":(189, 0, 0, True, 32),
67 "arm" : ( 40, 97, 0, True, 32), 69 },
68 "armeb": ( 40, 97, 0, False, 32), 70 "linux-android" : {
69 "powerpc": ( 20, 0, 0, False, 32), 71 "aarch64" : (183, 0, 0, True, 64),
70 "powerpc64": ( 21, 0, 0, False, 64), 72 "i686": ( 3, 0, 0, True, 32),
71 "powerpc64le": (21, 0, 0, True, 64), 73 "x86_64": (62, 0, 0, True, 64),
72 "i386": ( 3, 0, 0, True, 32), 74 },
73 "i486": ( 3, 0, 0, True, 32), 75 "linux-androideabi" : {
74 "i586": ( 3, 0, 0, True, 32), 76 "arm" : (40, 97, 0, True, 32),
75 "i686": ( 3, 0, 0, True, 32), 77 },
76 "x86_64": ( 62, 0, 0, True, 64), 78 "linux-musl" : {
77 "mips": ( 8, 0, 0, False, 32), 79 "aarch64" : (183, 0, 0, True, 64),
78 "mipsel": ( 8, 0, 0, True, 32), 80 "aarch64_be" :(183, 0, 0, False, 64),
79 "mips64": ( 8, 0, 0, False, 64), 81 "arm" : ( 40, 97, 0, True, 32),
80 "mips64el": ( 8, 0, 0, True, 64), 82 "armeb": ( 40, 97, 0, False, 32),
81 "microblaze": (189, 0, 0, False, 32), 83 "powerpc": ( 20, 0, 0, False, 32),
82 "microblazeel":(189, 0, 0, True, 32), 84 "powerpc64": ( 21, 0, 0, False, 64),
83 "riscv32": (243, 0, 0, True, 32), 85 "powerpc64le": (21, 0, 0, True, 64),
84 "riscv64": (243, 0, 0, True, 64), 86 "i386": ( 3, 0, 0, True, 32),
85 "sh4": ( 42, 0, 0, True, 32), 87 "i486": ( 3, 0, 0, True, 32),
86 }, 88 "i586": ( 3, 0, 0, True, 32),
87 "uclinux-uclibc" : { 89 "i686": ( 3, 0, 0, True, 32),
88 "bfin": ( 106, 0, 0, True, 32), 90 "x86_64": ( 62, 0, 0, True, 64),
89 }, 91 "loongarch64":( 258, 0, 0, True, 64),
90 "linux-gnueabi" : { 92 "mips": ( 8, 0, 0, False, 32),
91 "arm" : (40, 0, 0, True, 32), 93 "mipsel": ( 8, 0, 0, True, 32),
92 "armeb" : (40, 0, 0, False, 32), 94 "mips64": ( 8, 0, 0, False, 64),
93 }, 95 "mips64el": ( 8, 0, 0, True, 64),
94 "linux-musleabi" : { 96 "microblaze": (189, 0, 0, False, 32),
95 "arm" : (40, 0, 0, True, 32), 97 "microblazeel":(189, 0, 0, True, 32),
96 "armeb" : (40, 0, 0, False, 32), 98 "riscv32": (243, 0, 0, True, 32),
97 }, 99 "riscv64": (243, 0, 0, True, 64),
98 "linux-gnuspe" : { 100 "sh4": ( 42, 0, 0, True, 32),
99 "powerpc": (20, 0, 0, False, 32), 101 },
100 }, 102 "uclinux-uclibc" : {
101 "linux-muslspe" : { 103 "bfin": ( 106, 0, 0, True, 32),
102 "powerpc": (20, 0, 0, False, 32), 104 },
103 }, 105 "linux-gnueabi" : {
104 "linux-gnu" : { 106 "arm" : (40, 0, 0, True, 32),
105 "powerpc": (20, 0, 0, False, 32), 107 "armeb" : (40, 0, 0, False, 32),
106 "sh4": (42, 0, 0, True, 32), 108 },
107 }, 109 "linux-musleabi" : {
108 "linux-gnu_ilp32" : { 110 "arm" : (40, 0, 0, True, 32),
109 "aarch64" : (183, 0, 0, True, 32), 111 "armeb" : (40, 0, 0, False, 32),
110 }, 112 },
111 "linux-gnux32" : { 113 "linux-gnuspe" : {
112 "x86_64": (62, 0, 0, True, 32), 114 "powerpc": (20, 0, 0, False, 32),
113 }, 115 },
114 "linux-muslx32" : { 116 "linux-muslspe" : {
115 "x86_64": (62, 0, 0, True, 32), 117 "powerpc": (20, 0, 0, False, 32),
116 }, 118 },
117 "linux-gnun32" : { 119 "linux-gnu" : {
118 "mips64": ( 8, 0, 0, False, 32), 120 "powerpc": (20, 0, 0, False, 32),
119 "mips64el": ( 8, 0, 0, True, 32), 121 "sh4": (42, 0, 0, True, 32),
120 "mipsisa64r6": ( 8, 0, 0, False, 32), 122 },
121 "mipsisa64r6el":( 8, 0, 0, True, 32), 123 "linux-gnu_ilp32" : {
122 }, 124 "aarch64" : (183, 0, 0, True, 32),
123 } 125 },
126 "linux-gnux32" : {
127 "x86_64": (62, 0, 0, True, 32),
128 },
129 "linux-muslx32" : {
130 "x86_64": (62, 0, 0, True, 32),
131 },
132 "linux-gnun32" : {
133 "mips64": ( 8, 0, 0, False, 32),
134 "mips64el": ( 8, 0, 0, True, 32),
135 "mipsisa64r6": ( 8, 0, 0, False, 32),
136 "mipsisa64r6el":( 8, 0, 0, True, 32),
137 },
138 }
124 139
125 # Add in any extra user supplied data which may come from a BSP layer, removing the 140 # Add in any extra user supplied data which may come from a BSP layer, removing the
126 # need to always change this class directly 141 # need to always change this class directly
127 extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split() 142 extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split()
128 for m in extra_machdata: 143 for m in extra_machdata:
129 call = m + "(machdata, d)" 144 call = m + "(machdata, d)"
130 locs = { "machdata" : machdata, "d" : d} 145 locs = { "machdata" : machine_dict.machdata, "d" : d}
131 machdata = bb.utils.better_eval(call, locs) 146 machine_dict.machdata = bb.utils.better_eval(call, locs)
132 147
133 return machdata 148 return machine_dict.machdata
diff --git a/meta/lib/oe/fitimage.py b/meta/lib/oe/fitimage.py
new file mode 100644
index 0000000000..f303799155
--- /dev/null
+++ b/meta/lib/oe/fitimage.py
@@ -0,0 +1,547 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# This file contains common functions for the fitimage generation
7
8import os
9import shlex
10import subprocess
11import bb
12
13from oeqa.utils.commands import runCmd
14
15class ItsNode:
16 INDENT_SIZE = 8
17
18 def __init__(self, name, parent_node, sub_nodes=None, properties=None):
19 self.name = name
20 self.parent_node = parent_node
21
22 self.sub_nodes = []
23 if sub_nodes:
24 self.sub_nodes = sub_nodes
25
26 self.properties = {}
27 if properties:
28 self.properties = properties
29
30 if parent_node:
31 parent_node.add_sub_node(self)
32
33 def add_sub_node(self, sub_node):
34 self.sub_nodes.append(sub_node)
35
36 def add_property(self, key, value):
37 self.properties[key] = value
38
39 def emit(self, f, indent):
40 indent_str_name = " " * indent
41 indent_str_props = " " * (indent + self.INDENT_SIZE)
42 f.write("%s%s {\n" % (indent_str_name, self.name))
43 for key, value in self.properties.items():
44 bb.debug(1, "key: %s, value: %s" % (key, str(value)))
45 # Single integer: <0x12ab>
46 if isinstance(value, int):
47 f.write(indent_str_props + key + ' = <0x%x>;\n' % value)
48 # list of strings: "string1", "string2" or integers: <0x12ab 0x34cd>
49 elif isinstance(value, list):
50 if len(value) == 0:
51 f.write(indent_str_props + key + ' = "";\n')
52 elif isinstance(value[0], int):
53 list_entries = ' '.join('0x%x' % entry for entry in value)
54 f.write(indent_str_props + key + ' = <%s>;\n' % list_entries)
55 else:
56 list_entries = ', '.join('"%s"' % entry for entry in value)
57 f.write(indent_str_props + key + ' = %s;\n' % list_entries)
58 elif isinstance(value, str):
59 # path: /incbin/("path/to/file")
60 if key in ["data"] and value.startswith('/incbin/('):
61 f.write(indent_str_props + key + ' = %s;\n' % value)
62 # Integers which are already string formatted
63 elif value.startswith("<") and value.endswith(">"):
64 f.write(indent_str_props + key + ' = %s;\n' % value)
65 else:
66 f.write(indent_str_props + key + ' = "%s";\n' % value)
67 else:
68 bb.fatal("%s has unexpexted data type." % str(value))
69 for sub_node in self.sub_nodes:
70 sub_node.emit(f, indent + self.INDENT_SIZE)
71 f.write(indent_str_name + '};\n')
72
73class ItsNodeImages(ItsNode):
74 def __init__(self, parent_node):
75 super().__init__("images", parent_node)
76
77class ItsNodeConfigurations(ItsNode):
78 def __init__(self, parent_node):
79 super().__init__("configurations", parent_node)
80
81class ItsNodeHash(ItsNode):
82 def __init__(self, name, parent_node, algo, opt_props=None):
83 properties = {
84 "algo": algo
85 }
86 if opt_props:
87 properties.update(opt_props)
88 super().__init__(name, parent_node, None, properties)
89
90class ItsImageSignature(ItsNode):
91 def __init__(self, name, parent_node, algo, keyname, opt_props=None):
92 properties = {
93 "algo": algo,
94 "key-name-hint": keyname
95 }
96 if opt_props:
97 properties.update(opt_props)
98 super().__init__(name, parent_node, None, properties)
99
100class ItsNodeImage(ItsNode):
101 def __init__(self, name, parent_node, description, type, compression, sub_nodes=None, opt_props=None):
102 properties = {
103 "description": description,
104 "type": type,
105 "compression": compression,
106 }
107 if opt_props:
108 properties.update(opt_props)
109 super().__init__(name, parent_node, sub_nodes, properties)
110
111class ItsNodeDtb(ItsNodeImage):
112 def __init__(self, name, parent_node, description, type, compression,
113 sub_nodes=None, opt_props=None, compatible=None):
114 super().__init__(name, parent_node, description, type, compression, sub_nodes, opt_props)
115 self.compatible = compatible
116
117class ItsNodeDtbAlias(ItsNode):
118 """Additional Configuration Node for a DTB
119
120 Symlinks pointing to a DTB file are handled by an addtitional
121 configuration node referring to another DTB image node.
122 """
123 def __init__(self, name, alias_name, compatible=None):
124 super().__init__(name, parent_node=None, sub_nodes=None, properties=None)
125 self.alias_name = alias_name
126 self.compatible = compatible
127
128class ItsNodeConfigurationSignature(ItsNode):
129 def __init__(self, name, parent_node, algo, keyname, opt_props=None):
130 properties = {
131 "algo": algo,
132 "key-name-hint": keyname
133 }
134 if opt_props:
135 properties.update(opt_props)
136 super().__init__(name, parent_node, None, properties)
137
138class ItsNodeConfiguration(ItsNode):
139 def __init__(self, name, parent_node, description, sub_nodes=None, opt_props=None):
140 properties = {
141 "description": description,
142 }
143 if opt_props:
144 properties.update(opt_props)
145 super().__init__(name, parent_node, sub_nodes, properties)
146
147class ItsNodeRootKernel(ItsNode):
148 """Create FIT images for the kernel
149
150 Currently only a single kernel (no less or more) can be added to the FIT
151 image along with 0 or more device trees and 0 or 1 ramdisk.
152
153 If a device tree included in the FIT image, the default configuration is the
154 firt DTB. If there is no dtb present than the default configuation the kernel.
155 """
156 def __init__(self, description, address_cells, host_prefix, arch, conf_prefix,
157 sign_enable=False, sign_keydir=None,
158 mkimage=None, mkimage_dtcopts=None,
159 mkimage_sign=None, mkimage_sign_args=None,
160 hash_algo=None, sign_algo=None, pad_algo=None,
161 sign_keyname_conf=None,
162 sign_individual=False, sign_keyname_img=None):
163 props = {
164 "description": description,
165 "#address-cells": f"<{address_cells}>"
166 }
167 super().__init__("/", None, None, props)
168 self.images = ItsNodeImages(self)
169 self.configurations = ItsNodeConfigurations(self)
170
171 self._host_prefix = host_prefix
172 self._arch = arch
173 self._conf_prefix = conf_prefix
174
175 # Signature related properties
176 self._sign_enable = sign_enable
177 self._sign_keydir = sign_keydir
178 self._mkimage = mkimage
179 self._mkimage_dtcopts = mkimage_dtcopts
180 self._mkimage_sign = mkimage_sign
181 self._mkimage_sign_args = mkimage_sign_args
182 self._hash_algo = hash_algo
183 self._sign_algo = sign_algo
184 self._pad_algo = pad_algo
185 self._sign_keyname_conf = sign_keyname_conf
186 self._sign_individual = sign_individual
187 self._sign_keyname_img = sign_keyname_img
188 self._sanitize_sign_config()
189
190 self._dtbs = []
191 self._dtb_alias = []
192 self._kernel = None
193 self._ramdisk = None
194 self._bootscr = None
195 self._setup = None
196
197 def _sanitize_sign_config(self):
198 if self._sign_enable:
199 if not self._hash_algo:
200 bb.fatal("FIT image signing is enabled but no hash algorithm is provided.")
201 if not self._sign_algo:
202 bb.fatal("FIT image signing is enabled but no signature algorithm is provided.")
203 if not self._pad_algo:
204 bb.fatal("FIT image signing is enabled but no padding algorithm is provided.")
205 if not self._sign_keyname_conf:
206 bb.fatal("FIT image signing is enabled but no configuration key name is provided.")
207 if self._sign_individual and not self._sign_keyname_img:
208 bb.fatal("FIT image signing is enabled for individual images but no image key name is provided.")
209
210 def write_its_file(self, itsfile):
211 with open(itsfile, 'w') as f:
212 f.write("/dts-v1/;\n\n")
213 self.emit(f, 0)
214
215 def its_add_node_image(self, image_id, description, image_type, compression, opt_props):
216 image_node = ItsNodeImage(
217 image_id,
218 self.images,
219 description,
220 image_type,
221 compression,
222 opt_props=opt_props
223 )
224 if self._hash_algo:
225 ItsNodeHash(
226 "hash-1",
227 image_node,
228 self._hash_algo
229 )
230 if self._sign_individual:
231 ItsImageSignature(
232 "signature-1",
233 image_node,
234 f"{self._hash_algo},{self._sign_algo}",
235 self._sign_keyname_img
236 )
237 return image_node
238
239 def its_add_node_dtb(self, image_id, description, image_type, compression, opt_props, compatible):
240 dtb_node = ItsNodeDtb(
241 image_id,
242 self.images,
243 description,
244 image_type,
245 compression,
246 opt_props=opt_props,
247 compatible=compatible
248 )
249 if self._hash_algo:
250 ItsNodeHash(
251 "hash-1",
252 dtb_node,
253 self._hash_algo
254 )
255 if self._sign_individual:
256 ItsImageSignature(
257 "signature-1",
258 dtb_node,
259 f"{self._hash_algo},{self._sign_algo}",
260 self._sign_keyname_img
261 )
262 return dtb_node
263
264 def fitimage_emit_section_kernel(self, kernel_id, kernel_path, compression,
265 load, entrypoint, mkimage_kernel_type, entrysymbol=None):
266 """Emit the fitImage ITS kernel section"""
267 if self._kernel:
268 bb.fatal("Kernel section already exists in the ITS file.")
269 if entrysymbol:
270 result = subprocess.run([self._host_prefix + "nm", "vmlinux"], capture_output=True, text=True)
271 for line in result.stdout.splitlines():
272 parts = line.split()
273 if len(parts) == 3 and parts[2] == entrysymbol:
274 entrypoint = "<0x%s>" % parts[0]
275 break
276 kernel_node = self.its_add_node_image(
277 kernel_id,
278 "Linux kernel",
279 mkimage_kernel_type,
280 compression,
281 {
282 "data": '/incbin/("' + kernel_path + '")',
283 "arch": self._arch,
284 "os": "linux",
285 "load": f"<{load}>",
286 "entry": f"<{entrypoint}>"
287 }
288 )
289 self._kernel = kernel_node
290
291 def fitimage_emit_section_dtb(self, dtb_id, dtb_path, dtb_loadaddress=None,
292 dtbo_loadaddress=None, add_compatible=False):
293 """Emit the fitImage ITS DTB section"""
294 load=None
295 dtb_ext = os.path.splitext(dtb_path)[1]
296 if dtb_ext == ".dtbo":
297 if dtbo_loadaddress:
298 load = dtbo_loadaddress
299 elif dtb_loadaddress:
300 load = dtb_loadaddress
301
302 opt_props = {
303 "data": '/incbin/("' + dtb_path + '")',
304 "arch": self._arch
305 }
306 if load:
307 opt_props["load"] = f"<{load}>"
308
309 # Preserve the DTB's compatible string to be added to the configuration node
310 compatible = None
311 if add_compatible:
312 compatible = get_compatible_from_dtb(dtb_path)
313
314 dtb_node = self.its_add_node_dtb(
315 "fdt-" + dtb_id,
316 "Flattened Device Tree blob",
317 "flat_dt",
318 "none",
319 opt_props,
320 compatible
321 )
322 self._dtbs.append(dtb_node)
323
324 def fitimage_emit_section_dtb_alias(self, dtb_alias_id, dtb_path, add_compatible=False):
325 """Add a configuration node referring to another DTB"""
326 # Preserve the DTB's compatible string to be added to the configuration node
327 compatible = None
328 if add_compatible:
329 compatible = get_compatible_from_dtb(dtb_path)
330
331 dtb_id = os.path.basename(dtb_path)
332 dtb_alias_node = ItsNodeDtbAlias("fdt-" + dtb_id, dtb_alias_id, compatible)
333 self._dtb_alias.append(dtb_alias_node)
334 bb.warn(f"compatible: {compatible}, dtb_alias_id: {dtb_alias_id}, dtb_id: {dtb_id}, dtb_path: {dtb_path}")
335
336 def fitimage_emit_section_boot_script(self, bootscr_id, bootscr_path):
337 """Emit the fitImage ITS u-boot script section"""
338 if self._bootscr:
339 bb.fatal("U-boot script section already exists in the ITS file.")
340 bootscr_node = self.its_add_node_image(
341 bootscr_id,
342 "U-boot script",
343 "script",
344 "none",
345 {
346 "data": '/incbin/("' + bootscr_path + '")',
347 "arch": self._arch,
348 "type": "script"
349 }
350 )
351 self._bootscr = bootscr_node
352
353 def fitimage_emit_section_setup(self, setup_id, setup_path):
354 """Emit the fitImage ITS setup section"""
355 if self._setup:
356 bb.fatal("Setup section already exists in the ITS file.")
357 load = "<0x00090000>"
358 entry = "<0x00090000>"
359 setup_node = self.its_add_node_image(
360 setup_id,
361 "Linux setup.bin",
362 "x86_setup",
363 "none",
364 {
365 "data": '/incbin/("' + setup_path + '")',
366 "arch": self._arch,
367 "os": "linux",
368 "load": load,
369 "entry": entry
370 }
371 )
372 self._setup = setup_node
373
374 def fitimage_emit_section_ramdisk(self, ramdisk_id, ramdisk_path, description="ramdisk", load=None, entry=None):
375 """Emit the fitImage ITS ramdisk section"""
376 if self._ramdisk:
377 bb.fatal("Ramdisk section already exists in the ITS file.")
378 opt_props = {
379 "data": '/incbin/("' + ramdisk_path + '")',
380 "type": "ramdisk",
381 "arch": self._arch,
382 "os": "linux"
383 }
384 if load:
385 opt_props["load"] = f"<{load}>"
386 if entry:
387 opt_props["entry"] = f"<{entry}>"
388
389 ramdisk_node = self.its_add_node_image(
390 ramdisk_id,
391 description,
392 "ramdisk",
393 "none",
394 opt_props
395 )
396 self._ramdisk = ramdisk_node
397
398 def _fitimage_emit_one_section_config(self, conf_node_name, dtb=None):
399 """Emit the fitImage ITS configuration section"""
400 opt_props = {}
401 conf_desc = []
402 sign_entries = []
403
404 if self._kernel:
405 conf_desc.append("Linux kernel")
406 opt_props["kernel"] = self._kernel.name
407 if self._sign_enable:
408 sign_entries.append("kernel")
409
410 if dtb:
411 conf_desc.append("FDT blob")
412 opt_props["fdt"] = dtb.name
413 if dtb.compatible:
414 opt_props["compatible"] = dtb.compatible
415 if self._sign_enable:
416 sign_entries.append("fdt")
417
418 if self._ramdisk:
419 conf_desc.append("ramdisk")
420 opt_props["ramdisk"] = self._ramdisk.name
421 if self._sign_enable:
422 sign_entries.append("ramdisk")
423
424 if self._bootscr:
425 conf_desc.append("u-boot script")
426 opt_props["bootscr"] = self._bootscr.name
427 if self._sign_enable:
428 sign_entries.append("bootscr")
429
430 if self._setup:
431 conf_desc.append("setup")
432 opt_props["setup"] = self._setup.name
433 if self._sign_enable:
434 sign_entries.append("setup")
435
436 # First added configuration is the default configuration
437 default_flag = "0"
438 if len(self.configurations.sub_nodes) == 0:
439 default_flag = "1"
440
441 conf_node = ItsNodeConfiguration(
442 conf_node_name,
443 self.configurations,
444 f"{default_flag} {', '.join(conf_desc)}",
445 opt_props=opt_props
446 )
447 if self._hash_algo:
448 ItsNodeHash(
449 "hash-1",
450 conf_node,
451 self._hash_algo
452 )
453 if self._sign_enable:
454 ItsNodeConfigurationSignature(
455 "signature-1",
456 conf_node,
457 f"{self._hash_algo},{self._sign_algo}",
458 self._sign_keyname_conf,
459 opt_props={
460 "padding": self._pad_algo,
461 "sign-images": sign_entries
462 }
463 )
464
465 def fitimage_emit_section_config(self, default_dtb_image=None):
466 if self._dtbs:
467 for dtb in self._dtbs:
468 dtb_name = dtb.name
469 if dtb.name.startswith("fdt-"):
470 dtb_name = dtb.name[len("fdt-"):]
471 self._fitimage_emit_one_section_config(self._conf_prefix + dtb_name, dtb)
472 for dtb in self._dtb_alias:
473 self._fitimage_emit_one_section_config(self._conf_prefix + dtb.alias_name, dtb)
474 else:
475 # Currently exactly one kernel is supported.
476 self._fitimage_emit_one_section_config(self._conf_prefix + "1")
477
478 default_conf = self.configurations.sub_nodes[0].name
479 if default_dtb_image and self._dtbs:
480 default_conf = self._conf_prefix + default_dtb_image
481 self.configurations.add_property('default', default_conf)
482
483 def run_mkimage_assemble(self, itsfile, fitfile):
484 cmd = [
485 self._mkimage,
486 '-f', itsfile,
487 fitfile
488 ]
489 if self._mkimage_dtcopts:
490 cmd.insert(1, '-D')
491 cmd.insert(2, self._mkimage_dtcopts)
492 try:
493 subprocess.run(cmd, check=True, capture_output=True)
494 except subprocess.CalledProcessError as e:
495 bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}\nitsflile: {os.path.abspath(itsfile)}")
496
497 def run_mkimage_sign(self, fitfile):
498 if not self._sign_enable:
499 bb.debug(1, "FIT image signing is disabled. Skipping signing.")
500 return
501
502 # Some sanity checks because mkimage exits with 0 also without needed keys
503 sign_key_path = os.path.join(self._sign_keydir, self._sign_keyname_conf)
504 if not os.path.exists(sign_key_path + '.key') or not os.path.exists(sign_key_path + '.crt'):
505 bb.fatal("%s.key or .crt does not exist" % sign_key_path)
506 if self._sign_individual:
507 sign_key_img_path = os.path.join(self._sign_keydir, self._sign_keyname_img)
508 if not os.path.exists(sign_key_img_path + '.key') or not os.path.exists(sign_key_img_path + '.crt'):
509 bb.fatal("%s.key or .crt does not exist" % sign_key_img_path)
510
511 cmd = [
512 self._mkimage_sign,
513 '-F',
514 '-k', self._sign_keydir,
515 '-r', fitfile
516 ]
517 if self._mkimage_dtcopts:
518 cmd.extend(['-D', self._mkimage_dtcopts])
519 if self._mkimage_sign_args:
520 cmd.extend(shlex.split(self._mkimage_sign_args))
521 try:
522 subprocess.run(cmd, check=True, capture_output=True)
523 except subprocess.CalledProcessError as e:
524 bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}")
525
526
527def symlink_points_below(file_or_symlink, expected_parent_dir):
528 """returns symlink destination if it points below directory"""
529 file_path = os.path.join(expected_parent_dir, file_or_symlink)
530 if not os.path.islink(file_path):
531 return None
532
533 realpath = os.path.relpath(os.path.realpath(file_path), expected_parent_dir)
534 if realpath.startswith(".."):
535 return None
536
537 return realpath
538
539def get_compatible_from_dtb(dtb_path, fdtget_path="fdtget"):
540 compatible = None
541 cmd = [fdtget_path, "-t", "s", dtb_path, "/", "compatible"]
542 try:
543 ret = subprocess.run(cmd, check=True, capture_output=True, text=True)
544 compatible = ret.stdout.strip().split()
545 except subprocess.CalledProcessError:
546 compatible = None
547 return compatible
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py
new file mode 100644
index 0000000000..4559dc63b2
--- /dev/null
+++ b/meta/lib/oe/go.py
@@ -0,0 +1,38 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import re
8
9def map_arch(a):
10 """
11 Map our architecture names to Go's GOARCH names.
12 See https://github.com/golang/go/blob/master/src/internal/syslist/syslist.go for the complete list.
13 """
14 if re.match('i.86', a):
15 return '386'
16 elif a == 'x86_64':
17 return 'amd64'
18 elif re.match('arm.*', a):
19 return 'arm'
20 elif re.match('aarch64.*', a):
21 return 'arm64'
22 elif re.match('mips64el.*', a):
23 return 'mips64le'
24 elif re.match('mips64.*', a):
25 return 'mips64'
26 elif a == 'mips':
27 return 'mips'
28 elif a == 'mipsel':
29 return 'mipsle'
30 elif re.match('p(pc|owerpc)(64le)', a):
31 return 'ppc64le'
32 elif re.match('p(pc|owerpc)(64)', a):
33 return 'ppc64'
34 elif a == 'riscv64':
35 return 'riscv64'
36 elif a == 'loongarch64':
37 return 'loong64'
38 raise KeyError(f"Cannot map architecture {a}")
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py
index 7634d7ef1d..ede6186c84 100644
--- a/meta/lib/oe/gpg_sign.py
+++ b/meta/lib/oe/gpg_sign.py
@@ -1,13 +1,16 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5"""Helper module for GPG signing""" 7"""Helper module for GPG signing"""
6import os
7 8
8import bb 9import bb
9import subprocess 10import os
10import shlex 11import shlex
12import subprocess
13import tempfile
11 14
12class LocalSigner(object): 15class LocalSigner(object):
13 """Class for handling local (on the build host) signing""" 16 """Class for handling local (on the build host) signing"""
@@ -58,7 +61,7 @@ class LocalSigner(object):
58 for i in range(0, len(files), sign_chunk): 61 for i in range(0, len(files), sign_chunk):
59 subprocess.check_output(shlex.split(cmd + ' '.join(files[i:i+sign_chunk])), stderr=subprocess.STDOUT) 62 subprocess.check_output(shlex.split(cmd + ' '.join(files[i:i+sign_chunk])), stderr=subprocess.STDOUT)
60 63
61 def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True): 64 def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True, output_suffix=None, use_sha256=False):
62 """Create a detached signature of a file""" 65 """Create a detached signature of a file"""
63 66
64 if passphrase_file and passphrase: 67 if passphrase_file and passphrase:
@@ -71,25 +74,35 @@ class LocalSigner(object):
71 cmd += ['--homedir', self.gpg_path] 74 cmd += ['--homedir', self.gpg_path]
72 if armor: 75 if armor:
73 cmd += ['--armor'] 76 cmd += ['--armor']
77 if use_sha256:
78 cmd += ['--digest-algo', "SHA256"]
74 79
75 #gpg > 2.1 supports password pipes only through the loopback interface 80 #gpg > 2.1 supports password pipes only through the loopback interface
76 #gpg < 2.1 errors out if given unknown parameters 81 #gpg < 2.1 errors out if given unknown parameters
77 if self.gpg_version > (2,1,): 82 if self.gpg_version > (2,1,):
78 cmd += ['--pinentry-mode', 'loopback'] 83 cmd += ['--pinentry-mode', 'loopback']
79 84
80 cmd += [input_file]
81
82 try: 85 try:
83 if passphrase_file: 86 if passphrase_file:
84 with open(passphrase_file) as fobj: 87 with open(passphrase_file) as fobj:
85 passphrase = fobj.readline(); 88 passphrase = fobj.readline();
86 89
87 job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE) 90 if not output_suffix:
88 (_, stderr) = job.communicate(passphrase.encode("utf-8")) 91 output_suffix = 'asc' if armor else 'sig'
92 output_file = input_file + "." + output_suffix
93 with tempfile.TemporaryDirectory(dir=os.path.dirname(output_file)) as tmp_dir:
94 tmp_file = os.path.join(tmp_dir, os.path.basename(output_file))
95 cmd += ['-o', tmp_file]
96
97 cmd += [input_file]
98
99 job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
100 (_, stderr) = job.communicate(passphrase.encode("utf-8"))
89 101
90 if job.returncode: 102 if job.returncode:
91 bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8"))) 103 bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8")))
92 104
105 os.rename(tmp_file, output_file)
93 except IOError as e: 106 except IOError as e:
94 bb.error("IO error (%s): %s" % (e.errno, e.strerror)) 107 bb.error("IO error (%s): %s" % (e.errno, e.strerror))
95 raise Exception("Failed to sign '%s'" % input_file) 108 raise Exception("Failed to sign '%s'" % input_file)
@@ -109,16 +122,33 @@ class LocalSigner(object):
109 bb.fatal("Could not get gpg version: %s" % e) 122 bb.fatal("Could not get gpg version: %s" % e)
110 123
111 124
112 def verify(self, sig_file): 125 def verify(self, sig_file, valid_sigs = ''):
113 """Verify signature""" 126 """Verify signature"""
114 cmd = self.gpg_cmd + [" --verify", "--no-permission-warning"] 127 cmd = self.gpg_cmd + ["--verify", "--no-permission-warning", "--status-fd", "1"]
115 if self.gpg_path: 128 if self.gpg_path:
116 cmd += ["--homedir", self.gpg_path] 129 cmd += ["--homedir", self.gpg_path]
117 130
118 cmd += [sig_file] 131 cmd += [sig_file]
119 status = subprocess.call(cmd) 132 status = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
120 ret = False if status else True 133 # Valid if any key matches if unspecified
121 return ret 134 if not valid_sigs:
135 ret = False if status.returncode else True
136 return ret
137
138 import re
139 goodsigs = []
140 sigre = re.compile(r'^\[GNUPG:\] GOODSIG (\S+)\s(.*)$')
141 for l in status.stdout.decode("utf-8").splitlines():
142 s = sigre.match(l)
143 if s:
144 goodsigs += [s.group(1)]
145
146 for sig in valid_sigs.split():
147 if sig in goodsigs:
148 return True
149 if len(goodsigs):
150 bb.warn('No accepted signatures found. Good signatures found: %s.' % ' '.join(goodsigs))
151 return False
122 152
123 153
124def get_signer(d, backend): 154def get_signer(d, backend):
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py
index 665d32ecbb..6e55fa1e7f 100644
--- a/meta/lib/oe/license.py
+++ b/meta/lib/oe/license.py
@@ -1,10 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4"""Code for parsing OpenEmbedded license strings""" 6"""Code for parsing OpenEmbedded license strings"""
5 7
6import ast 8import ast
7import re 9import re
10import oe.qa
8from fnmatch import fnmatchcase as fnmatch 11from fnmatch import fnmatchcase as fnmatch
9 12
10def license_ok(license, dont_want_licenses): 13def license_ok(license, dont_want_licenses):
@@ -14,6 +17,16 @@ def license_ok(license, dont_want_licenses):
14 return False 17 return False
15 return True 18 return True
16 19
20def obsolete_license_list():
21 return ["AGPL-3", "AGPL-3+", "AGPLv3", "AGPLv3+", "AGPLv3.0", "AGPLv3.0+", "AGPL-3.0", "AGPL-3.0+", "BSD-0-Clause",
22 "GPL-1", "GPL-1+", "GPLv1", "GPLv1+", "GPLv1.0", "GPLv1.0+", "GPL-1.0", "GPL-1.0+", "GPL-2", "GPL-2+", "GPLv2",
23 "GPLv2+", "GPLv2.0", "GPLv2.0+", "GPL-2.0", "GPL-2.0+", "GPL-3", "GPL-3+", "GPLv3", "GPLv3+", "GPLv3.0", "GPLv3.0+",
24 "GPL-3.0", "GPL-3.0+", "LGPLv2", "LGPLv2+", "LGPLv2.0", "LGPLv2.0+", "LGPL-2.0", "LGPL-2.0+", "LGPL2.1", "LGPL2.1+",
25 "LGPLv2.1", "LGPLv2.1+", "LGPL-2.1", "LGPL-2.1+", "LGPLv3", "LGPLv3+", "LGPL-3.0", "LGPL-3.0+", "MPL-1", "MPLv1",
26 "MPLv1.1", "MPLv2", "MIT-X", "MIT-style", "openssl", "PSF", "PSFv2", "Python-2", "Apachev2", "Apache-2", "Artisticv1",
27 "Artistic-1", "AFL-2", "AFL-1", "AFLv2", "AFLv1", "CDDLv1", "CDDL-1", "EPLv1.0", "FreeType", "Nauman",
28 "tcl", "vim", "SGIv1"]
29
17class LicenseError(Exception): 30class LicenseError(Exception):
18 pass 31 pass
19 32
@@ -74,6 +87,9 @@ class FlattenVisitor(LicenseVisitor):
74 def visit_Str(self, node): 87 def visit_Str(self, node):
75 self.licenses.append(node.s) 88 self.licenses.append(node.s)
76 89
90 def visit_Constant(self, node):
91 self.licenses.append(node.value)
92
77 def visit_BinOp(self, node): 93 def visit_BinOp(self, node):
78 if isinstance(node.op, ast.BitOr): 94 if isinstance(node.op, ast.BitOr):
79 left = FlattenVisitor(self.choose_licenses) 95 left = FlattenVisitor(self.choose_licenses)
@@ -96,26 +112,26 @@ def flattened_licenses(licensestr, choose_licenses):
96 raise LicenseSyntaxError(licensestr, exc) 112 raise LicenseSyntaxError(licensestr, exc)
97 return flatten.licenses 113 return flatten.licenses
98 114
99def is_included(licensestr, whitelist=None, blacklist=None): 115def is_included(licensestr, include_licenses=None, exclude_licenses=None):
100 """Given a license string and whitelist and blacklist, determine if the 116 """Given a license string, a list of licenses to include and a list of
101 license string matches the whitelist and does not match the blacklist. 117 licenses to exclude, determine if the license string matches the include
118 list and does not match the exclude list.
102 119
103 Returns a tuple holding the boolean state and a list of the applicable 120 Returns a tuple holding the boolean state and a list of the applicable
104 licenses that were excluded if state is False, or the licenses that were 121 licenses that were excluded if state is False, or the licenses that were
105 included if the state is True. 122 included if the state is True."""
106 """
107 123
108 def include_license(license): 124 def include_license(license):
109 return any(fnmatch(license, pattern) for pattern in whitelist) 125 return any(fnmatch(license, pattern) for pattern in include_licenses)
110 126
111 def exclude_license(license): 127 def exclude_license(license):
112 return any(fnmatch(license, pattern) for pattern in blacklist) 128 return any(fnmatch(license, pattern) for pattern in exclude_licenses)
113 129
114 def choose_licenses(alpha, beta): 130 def choose_licenses(alpha, beta):
115 """Select the option in an OR which is the 'best' (has the most 131 """Select the option in an OR which is the 'best' (has the most
116 included licenses and no excluded licenses).""" 132 included licenses and no excluded licenses)."""
117 # The factor 1000 below is arbitrary, just expected to be much larger 133 # The factor 1000 below is arbitrary, just expected to be much larger
118 # that the number of licenses actually specified. That way the weight 134 # than the number of licenses actually specified. That way the weight
119 # will be negative if the list of licenses contains an excluded license, 135 # will be negative if the list of licenses contains an excluded license,
120 # but still gives a higher weight to the list with the most included 136 # but still gives a higher weight to the list with the most included
121 # licenses. 137 # licenses.
@@ -128,11 +144,11 @@ def is_included(licensestr, whitelist=None, blacklist=None):
128 else: 144 else:
129 return beta 145 return beta
130 146
131 if not whitelist: 147 if not include_licenses:
132 whitelist = ['*'] 148 include_licenses = ['*']
133 149
134 if not blacklist: 150 if not exclude_licenses:
135 blacklist = [] 151 exclude_licenses = []
136 152
137 licenses = flattened_licenses(licensestr, choose_licenses) 153 licenses = flattened_licenses(licensestr, choose_licenses)
138 excluded = [lic for lic in licenses if exclude_license(lic)] 154 excluded = [lic for lic in licenses if exclude_license(lic)]
@@ -227,6 +243,9 @@ class ListVisitor(LicenseVisitor):
227 def visit_Str(self, node): 243 def visit_Str(self, node):
228 self.licenses.add(node.s) 244 self.licenses.add(node.s)
229 245
246 def visit_Constant(self, node):
247 self.licenses.add(node.value)
248
230def list_licenses(licensestr): 249def list_licenses(licensestr):
231 """Simply get a list of all licenses mentioned in a license string. 250 """Simply get a list of all licenses mentioned in a license string.
232 Binary operators are not applied or taken into account in any way""" 251 Binary operators are not applied or taken into account in any way"""
@@ -236,3 +255,225 @@ def list_licenses(licensestr):
236 except SyntaxError as exc: 255 except SyntaxError as exc:
237 raise LicenseSyntaxError(licensestr, exc) 256 raise LicenseSyntaxError(licensestr, exc)
238 return visitor.licenses 257 return visitor.licenses
258
259def apply_pkg_license_exception(pkg, bad_licenses, exceptions):
260 """Return remaining bad licenses after removing any package exceptions"""
261
262 return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions]
263
264def return_spdx(d, license):
265 """
266 This function returns the spdx mapping of a license if it exists.
267 """
268 return d.getVarFlag('SPDXLICENSEMAP', license)
269
270def canonical_license(d, license):
271 """
272 Return the canonical (SPDX) form of the license if available (so GPLv3
273 becomes GPL-3.0-only) or the passed license if there is no canonical form.
274 """
275 return d.getVarFlag('SPDXLICENSEMAP', license) or license
276
277def expand_wildcard_licenses(d, wildcard_licenses):
278 """
279 There are some common wildcard values users may want to use. Support them
280 here.
281 """
282 licenses = set(wildcard_licenses)
283 mapping = {
284 "AGPL-3.0*" : ["AGPL-3.0-only", "AGPL-3.0-or-later"],
285 "GPL-3.0*" : ["GPL-3.0-only", "GPL-3.0-or-later"],
286 "LGPL-3.0*" : ["LGPL-3.0-only", "LGPL-3.0-or-later"],
287 }
288 for k in mapping:
289 if k in wildcard_licenses:
290 licenses.remove(k)
291 for item in mapping[k]:
292 licenses.add(item)
293
294 for l in licenses:
295 if l in obsolete_license_list():
296 bb.fatal("Error, %s is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE" % l)
297 if "*" in l:
298 bb.fatal("Error, %s is an invalid license wildcard entry" % l)
299
300 return list(licenses)
301
302def incompatible_license_contains(license, truevalue, falsevalue, d):
303 license = canonical_license(d, license)
304 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
305 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
306 return truevalue if license in bad_licenses else falsevalue
307
308def incompatible_pkg_license(d, dont_want_licenses, license):
309 # Handles an "or" or two license sets provided by
310 # flattened_licenses(), pick one that works if possible.
311 def choose_lic_set(a, b):
312 return a if all(license_ok(canonical_license(d, lic),
313 dont_want_licenses) for lic in a) else b
314
315 try:
316 licenses = flattened_licenses(license, choose_lic_set)
317 except LicenseError as exc:
318 bb.fatal('%s: %s' % (d.getVar('P'), exc))
319
320 incompatible_lic = []
321 for l in licenses:
322 license = canonical_license(d, l)
323 if not license_ok(license, dont_want_licenses):
324 incompatible_lic.append(license)
325
326 return sorted(incompatible_lic)
327
328def incompatible_license(d, dont_want_licenses, package=None):
329 """
330 This function checks if a recipe has only incompatible licenses. It also
331 take into consideration 'or' operand. dont_want_licenses should be passed
332 as canonical (SPDX) names.
333 """
334 license = d.getVar("LICENSE:%s" % package) if package else None
335 if not license:
336 license = d.getVar('LICENSE')
337
338 return incompatible_pkg_license(d, dont_want_licenses, license)
339
340def check_license_flags(d):
341 """
342 This function checks if a recipe has any LICENSE_FLAGS that
343 aren't acceptable.
344
345 If it does, it returns the all LICENSE_FLAGS missing from the list
346 of acceptable license flags, or all of the LICENSE_FLAGS if there
347 is no list of acceptable flags.
348
349 If everything is is acceptable, it returns None.
350 """
351
352 def license_flag_matches(flag, acceptlist, pn):
353 """
354 Return True if flag matches something in acceptlist, None if not.
355
356 Before we test a flag against the acceptlist, we append _${PN}
357 to it. We then try to match that string against the
358 acceptlist. This covers the normal case, where we expect
359 LICENSE_FLAGS to be a simple string like 'commercial', which
360 the user typically matches exactly in the acceptlist by
361 explicitly appending the package name e.g 'commercial_foo'.
362 If we fail the match however, we then split the flag across
363 '_' and append each fragment and test until we either match or
364 run out of fragments.
365 """
366 flag_pn = ("%s_%s" % (flag, pn))
367 for candidate in acceptlist:
368 if flag_pn == candidate:
369 return True
370
371 flag_cur = ""
372 flagments = flag_pn.split("_")
373 flagments.pop() # we've already tested the full string
374 for flagment in flagments:
375 if flag_cur:
376 flag_cur += "_"
377 flag_cur += flagment
378 for candidate in acceptlist:
379 if flag_cur == candidate:
380 return True
381 return False
382
383 def all_license_flags_match(license_flags, acceptlist):
384 """ Return all unmatched flags, None if all flags match """
385 pn = d.getVar('PN')
386 split_acceptlist = acceptlist.split()
387 flags = []
388 for flag in license_flags.split():
389 if not license_flag_matches(flag, split_acceptlist, pn):
390 flags.append(flag)
391 return flags if flags else None
392
393 license_flags = d.getVar('LICENSE_FLAGS')
394 if license_flags:
395 acceptlist = d.getVar('LICENSE_FLAGS_ACCEPTED')
396 if not acceptlist:
397 return license_flags.split()
398 unmatched_flags = all_license_flags_match(license_flags, acceptlist)
399 if unmatched_flags:
400 return unmatched_flags
401 return None
402
403def check_license_format(d):
404 """
405 This function checks if LICENSE is well defined,
406 Validate operators in LICENSES.
407 No spaces are allowed between LICENSES.
408 """
409 pn = d.getVar('PN')
410 licenses = d.getVar('LICENSE')
411
412 elements = list(filter(lambda x: x.strip(), license_operator.split(licenses)))
413 for pos, element in enumerate(elements):
414 if license_pattern.match(element):
415 if pos > 0 and license_pattern.match(elements[pos - 1]):
416 oe.qa.handle_error('license-format',
417 '%s: LICENSE value "%s" has an invalid format - license names ' \
418 'must be separated by the following characters to indicate ' \
419 'the license selection: %s' %
420 (pn, licenses, license_operator_chars), d)
421 elif not license_operator.match(element):
422 oe.qa.handle_error('license-format',
423 '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \
424 'in the valid list of separators (%s)' %
425 (pn, licenses, element, license_operator_chars), d)
426
427def skip_incompatible_package_licenses(d, pkgs):
428 if not pkgs:
429 return {}
430
431 pn = d.getVar("PN")
432
433 check_license = False if pn.startswith("nativesdk-") else True
434 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
435 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
436 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
437 if pn.endswith(d.expand(t)):
438 check_license = False
439 if pn.startswith("gcc-source-"):
440 check_license = False
441
442 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
443 if not check_license or not bad_licenses:
444 return {}
445
446 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
447
448 exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split()
449
450 for lic_exception in exceptions:
451 if ":" in lic_exception:
452 lic_exception = lic_exception.split(":")[1]
453 if lic_exception in obsolete_license_list():
454 bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception)
455
456 skipped_pkgs = {}
457 for pkg in pkgs:
458 remaining_bad_licenses = apply_pkg_license_exception(pkg, bad_licenses, exceptions)
459
460 incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg)
461 if incompatible_lic:
462 skipped_pkgs[pkg] = incompatible_lic
463
464 return skipped_pkgs
465
466def tidy_licenses(value):
467 """
468 Flat, split and sort licenses.
469 """
470 from oe.license import flattened_licenses
471
472 def _choose(a, b):
473 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
474 return ["(%s | %s)" % (str_a, str_b)]
475
476 if not isinstance(value, str):
477 value = " & ".join(value)
478
479 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
diff --git a/meta/lib/oe/license_finder.py b/meta/lib/oe/license_finder.py
new file mode 100644
index 0000000000..16f5d7c94c
--- /dev/null
+++ b/meta/lib/oe/license_finder.py
@@ -0,0 +1,179 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import fnmatch
8import hashlib
9import logging
10import os
11import re
12
13import bb
14import bb.utils
15
16logger = logging.getLogger("BitBake.OE.LicenseFinder")
17
18def _load_hash_csv(d):
19 """
20 Load a mapping of (checksum: license name) from all files/license-hashes.csv
21 files that can be found in the available layers.
22 """
23 import csv
24 md5sums = {}
25
26 # Read license md5sums from csv file
27 for path in d.getVar('BBPATH').split(':'):
28 csv_path = os.path.join(path, 'files', 'license-hashes.csv')
29 if os.path.isfile(csv_path):
30 with open(csv_path, newline='') as csv_file:
31 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=['md5sum', 'license'])
32 for row in reader:
33 md5sums[row['md5sum']] = row['license']
34
35 return md5sums
36
37
38def _crunch_known_licenses(d):
39 """
40 Calculate the MD5 checksums for the original and "crunched" versions of all
41 known licenses.
42 """
43 md5sums = {}
44
45 lic_dirs = [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or "").split()
46 for lic_dir in lic_dirs:
47 for fn in os.listdir(lic_dir):
48 path = os.path.join(lic_dir, fn)
49 # Hash the exact contents
50 md5value = bb.utils.md5_file(path)
51 md5sums[md5value] = fn
52 # Also hash a "crunched" version
53 md5value = _crunch_license(path)
54 md5sums[md5value] = fn
55
56 return md5sums
57
58
59def _crunch_license(licfile):
60 '''
61 Remove non-material text from a license file and then calculate its
62 md5sum. This works well for licenses that contain a copyright statement,
63 but is also a useful way to handle people's insistence upon reformatting
64 the license text slightly (with no material difference to the text of the
65 license).
66 '''
67
68 import oe.utils
69
70 # Note: these are carefully constructed!
71 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
72 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
73 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
74 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
75 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
76 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
77 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
78 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
79
80 lictext = []
81 with open(licfile, 'r', errors='surrogateescape') as f:
82 for line in f:
83 # Drop opening statements
84 if copyright_re.match(line):
85 continue
86 elif disclaimer_re.match(line):
87 continue
88 elif email_re.match(line):
89 continue
90 elif header_re.match(line):
91 continue
92 elif tag_re.match(line):
93 continue
94 elif url_re.match(line):
95 continue
96 elif license_title_re.match(line):
97 continue
98 elif license_statement_re.match(line):
99 continue
100 # Strip comment symbols
101 line = line.replace('*', '') \
102 .replace('#', '')
103 # Unify spelling
104 line = line.replace('sub-license', 'sublicense')
105 # Squash spaces
106 line = oe.utils.squashspaces(line.strip())
107 # Replace smart quotes, double quotes and backticks with single quotes
108 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
109 # Unify brackets
110 line = line.replace("{", "[").replace("}", "]")
111 if line:
112 lictext.append(line)
113
114 m = hashlib.md5()
115 try:
116 m.update(' '.join(lictext).encode('utf-8'))
117 md5val = m.hexdigest()
118 except UnicodeEncodeError:
119 md5val = None
120 return md5val
121
122
123def find_license_files(srctree, first_only=False):
124 """
125 Search srctree for files that look like they could be licenses.
126 If first_only is True, only return the first file found.
127 """
128 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
129 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go", ".sh")
130 licfiles = []
131 for root, dirs, files in os.walk(srctree):
132 # Sort files so that LICENSE is before LICENSE.subcomponent, which is
133 # meaningful if first_only is set.
134 for fn in sorted(files):
135 if fn.endswith(skip_extensions):
136 continue
137 for spec in licspecs:
138 if fnmatch.fnmatch(fn, spec):
139 fullpath = os.path.join(root, fn)
140 if not fullpath in licfiles:
141 licfiles.append(fullpath)
142 if first_only:
143 return licfiles
144
145 return licfiles
146
147
148def match_licenses(licfiles, srctree, d, extra_hashes={}):
149 md5sums = {}
150 md5sums.update(_load_hash_csv(d))
151 md5sums.update(_crunch_known_licenses(d))
152 md5sums.update(extra_hashes)
153
154 licenses = []
155 for licfile in sorted(licfiles):
156 resolved_licfile = d.expand(licfile)
157 md5value = bb.utils.md5_file(resolved_licfile)
158 license = md5sums.get(md5value, None)
159 if not license:
160 crunched_md5 = _crunch_license(resolved_licfile)
161 license = md5sums.get(crunched_md5, None)
162 if not license:
163 license = 'Unknown'
164 logger.info("Please add the following line for '%s' to a 'license-hashes.csv' " \
165 "and replace `Unknown` with the license:\n" \
166 "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value))
167
168 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
169
170 return licenses
171
172
173def find_licenses(srctree, d, first_only=False, extra_hashes={}):
174 licfiles = find_license_files(srctree, first_only)
175 licenses = match_licenses(licfiles, srctree, d, extra_hashes)
176
177 # FIXME should we grab at least one source file with a license header and add that too?
178
179 return licenses
diff --git a/meta/lib/oe/lsb.py b/meta/lib/oe/lsb.py
index 43e46380d7..3ec03e5042 100644
--- a/meta/lib/oe/lsb.py
+++ b/meta/lib/oe/lsb.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/maketype.py b/meta/lib/oe/maketype.py
index d929c8b3e5..7a83bdf602 100644
--- a/meta/lib/oe/maketype.py
+++ b/meta/lib/oe/maketype.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4"""OpenEmbedded variable typing support 6"""OpenEmbedded variable typing support
@@ -10,12 +12,7 @@ the arguments of the type's factory for details.
10 12
11import inspect 13import inspect
12import oe.types as types 14import oe.types as types
13try: 15from collections.abc import Callable
14 # Python 3.7+
15 from collections.abc import Callable
16except ImportError:
17 # Python < 3.7
18 from collections import Callable
19 16
20available_types = {} 17available_types = {}
21 18
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py
index 1a058dcd73..cf7a13c247 100644
--- a/meta/lib/oe/manifest.py
+++ b/meta/lib/oe/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -198,7 +200,3 @@ def create_manifest(d, final_manifest=False, manifest_dir=None,
198 manifest.create_final() 200 manifest.create_final()
199 else: 201 else:
200 manifest.create_initial() 202 manifest.create_initial()
201
202
203if __name__ == "__main__":
204 pass
diff --git a/meta/lib/oe/npm_registry.py b/meta/lib/oe/npm_registry.py
new file mode 100644
index 0000000000..d97ced7cda
--- /dev/null
+++ b/meta/lib/oe/npm_registry.py
@@ -0,0 +1,175 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import bb
8import json
9import subprocess
10
11_ALWAYS_SAFE = frozenset('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
12 'abcdefghijklmnopqrstuvwxyz'
13 '0123456789'
14 '_.-~()')
15
16MISSING_OK = object()
17
18REGISTRY = "https://registry.npmjs.org"
19
20# we can not use urllib.parse here because npm expects lowercase
21# hex-chars but urllib generates uppercase ones
22def uri_quote(s, safe = '/'):
23 res = ""
24 safe_set = set(safe)
25 for c in s:
26 if c in _ALWAYS_SAFE or c in safe_set:
27 res += c
28 else:
29 res += '%%%02x' % ord(c)
30 return res
31
32class PackageJson:
33 def __init__(self, spec):
34 self.__spec = spec
35
36 @property
37 def name(self):
38 return self.__spec['name']
39
40 @property
41 def version(self):
42 return self.__spec['version']
43
44 @property
45 def empty_manifest(self):
46 return {
47 'name': self.name,
48 'description': self.__spec.get('description', ''),
49 'versions': {},
50 }
51
52 def base_filename(self):
53 return uri_quote(self.name, safe = '@')
54
55 def as_manifest_entry(self, tarball_uri):
56 res = {}
57
58 ## NOTE: 'npm install' requires more than basic meta information;
59 ## e.g. it takes 'bin' from this manifest entry but not the actual
60 ## 'package.json'
61 for (idx,dflt) in [('name', None),
62 ('description', ""),
63 ('version', None),
64 ('bin', MISSING_OK),
65 ('man', MISSING_OK),
66 ('scripts', MISSING_OK),
67 ('directories', MISSING_OK),
68 ('dependencies', MISSING_OK),
69 ('devDependencies', MISSING_OK),
70 ('optionalDependencies', MISSING_OK),
71 ('license', "unknown")]:
72 if idx in self.__spec:
73 res[idx] = self.__spec[idx]
74 elif dflt == MISSING_OK:
75 pass
76 elif dflt != None:
77 res[idx] = dflt
78 else:
79 raise Exception("%s-%s: missing key %s" % (self.name,
80 self.version,
81 idx))
82
83 res['dist'] = {
84 'tarball': tarball_uri,
85 }
86
87 return res
88
89class ManifestImpl:
90 def __init__(self, base_fname, spec):
91 self.__base = base_fname
92 self.__spec = spec
93
94 def load(self):
95 try:
96 with open(self.filename, "r") as f:
97 res = json.load(f)
98 except IOError:
99 res = self.__spec.empty_manifest
100
101 return res
102
103 def save(self, meta):
104 with open(self.filename, "w") as f:
105 json.dump(meta, f, indent = 2)
106
107 @property
108 def filename(self):
109 return self.__base + ".meta"
110
111class Manifest:
112 def __init__(self, base_fname, spec):
113 self.__base = base_fname
114 self.__spec = spec
115 self.__lockf = None
116 self.__impl = None
117
118 def __enter__(self):
119 self.__lockf = bb.utils.lockfile(self.__base + ".lock")
120 self.__impl = ManifestImpl(self.__base, self.__spec)
121 return self.__impl
122
123 def __exit__(self, exc_type, exc_val, exc_tb):
124 bb.utils.unlockfile(self.__lockf)
125
126class NpmCache:
127 def __init__(self, cache):
128 self.__cache = cache
129
130 @property
131 def path(self):
132 return self.__cache
133
134 def run(self, type, key, fname):
135 subprocess.run(['oe-npm-cache', self.__cache, type, key, fname],
136 check = True)
137
138class NpmRegistry:
139 def __init__(self, path, cache):
140 self.__path = path
141 self.__cache = NpmCache(cache + '/_cacache')
142 bb.utils.mkdirhier(self.__path)
143 bb.utils.mkdirhier(self.__cache.path)
144
145 @staticmethod
146 ## This function is critical and must match nodejs expectations
147 def _meta_uri(spec):
148 return REGISTRY + '/' + uri_quote(spec.name, safe = '@')
149
150 @staticmethod
151 ## Exact return value does not matter; just make it look like a
152 ## usual registry url
153 def _tarball_uri(spec):
154 return '%s/%s/-/%s-%s.tgz' % (REGISTRY,
155 uri_quote(spec.name, safe = '@'),
156 uri_quote(spec.name, safe = '@/'),
157 spec.version)
158
159 def add_pkg(self, tarball, pkg_json):
160 pkg_json = PackageJson(pkg_json)
161 base = os.path.join(self.__path, pkg_json.base_filename())
162
163 with Manifest(base, pkg_json) as manifest:
164 meta = manifest.load()
165 tarball_uri = self._tarball_uri(pkg_json)
166
167 meta['versions'][pkg_json.version] = pkg_json.as_manifest_entry(tarball_uri)
168
169 manifest.save(meta)
170
171 ## Cache entries are a little bit dependent on the nodejs
172 ## version; version specific cache implementation must
173 ## mitigate differences
174 self.__cache.run('meta', self._meta_uri(pkg_json), manifest.filename);
175 self.__cache.run('tgz', tarball_uri, tarball);
diff --git a/meta/lib/oe/overlayfs.py b/meta/lib/oe/overlayfs.py
new file mode 100644
index 0000000000..8b88900f71
--- /dev/null
+++ b/meta/lib/oe/overlayfs.py
@@ -0,0 +1,54 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# This file contains common functions for overlayfs and its QA check
7
8# this function is based on https://github.com/systemd/systemd/blob/main/src/basic/unit-name.c
9def escapeSystemdUnitName(path):
10 escapeMap = {
11 '/': '-',
12 '-': "\\x2d",
13 '\\': "\\x5d"
14 }
15 return "".join([escapeMap.get(c, c) for c in path.strip('/')])
16
17def strForBash(s):
18 return s.replace('\\', '\\\\')
19
20def allOverlaysUnitName(d):
21 return d.getVar('PN') + '-overlays.service'
22
23def mountUnitName(unit):
24 return escapeSystemdUnitName(unit) + '.mount'
25
26def helperUnitName(unit):
27 return escapeSystemdUnitName(unit) + '-create-upper-dir.service'
28
29def unitFileList(d):
30 fileList = []
31 overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT")
32
33 if not overlayMountPoints:
34 bb.fatal("A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
35
36 # check that we have required mount points set first
37 requiredMountPoints = d.getVarFlags('OVERLAYFS_WRITABLE_PATHS')
38 for mountPoint in requiredMountPoints:
39 if mountPoint not in overlayMountPoints:
40 bb.fatal("Missing required mount point for OVERLAYFS_MOUNT_POINT[%s] in your MACHINE configuration" % mountPoint)
41
42 for mountPoint in overlayMountPoints:
43 mountPointList = d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint)
44 if not mountPointList:
45 bb.debug(1, "No mount points defined for %s flag, don't add to file list", mountPoint)
46 continue
47 for path in mountPointList.split():
48 fileList.append(mountUnitName(path))
49 fileList.append(helperUnitName(path))
50
51 fileList.append(allOverlaysUnitName(d))
52
53 return fileList
54
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index dd700cbb0c..ce69151e5d 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -1,12 +1,25 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import errno
8import fnmatch
9import itertools
10import os
11import shlex
12import re
13import glob
5import stat 14import stat
6import mmap 15import mmap
7import subprocess 16import subprocess
17import shutil
18
19import bb.parse
20import oe.cachedpath
8 21
9def runstrip(arg): 22def runstrip(file, elftype, strip, extra_strip_sections=''):
10 # Function to strip a single file, called from split_and_strip_files below 23 # Function to strip a single file, called from split_and_strip_files below
11 # A working 'file' (one which works on the target architecture) 24 # A working 'file' (one which works on the target architecture)
12 # 25 #
@@ -16,8 +29,6 @@ def runstrip(arg):
16 # 8 - shared library 29 # 8 - shared library
17 # 16 - kernel module 30 # 16 - kernel module
18 31
19 (file, elftype, strip) = arg
20
21 newmode = None 32 newmode = None
22 if not os.access(file, os.W_OK) or os.access(file, os.R_OK): 33 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
23 origmode = os.stat(file)[stat.ST_MODE] 34 origmode = os.stat(file)[stat.ST_MODE]
@@ -26,7 +37,7 @@ def runstrip(arg):
26 37
27 stripcmd = [strip] 38 stripcmd = [strip]
28 skip_strip = False 39 skip_strip = False
29 # kernel module 40 # kernel module
30 if elftype & 16: 41 if elftype & 16:
31 if is_kernel_module_signed(file): 42 if is_kernel_module_signed(file):
32 bb.debug(1, "Skip strip on signed module %s" % file) 43 bb.debug(1, "Skip strip on signed module %s" % file)
@@ -40,6 +51,9 @@ def runstrip(arg):
40 # shared or executable: 51 # shared or executable:
41 elif elftype & 8 or elftype & 4: 52 elif elftype & 8 or elftype & 4:
42 stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"]) 53 stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"])
54 if extra_strip_sections != '':
55 for section in extra_strip_sections.split():
56 stripcmd.extend(["--remove-section=" + section])
43 57
44 stripcmd.append(file) 58 stripcmd.append(file)
45 bb.debug(1, "runstrip: %s" % stripcmd) 59 bb.debug(1, "runstrip: %s" % stripcmd)
@@ -96,7 +110,7 @@ def is_static_lib(path):
96 return start == magic 110 return start == magic
97 return False 111 return False
98 112
99def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripped=False): 113def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False):
100 """ 114 """
101 Strip executable code (like executables, shared libraries) _in_place_ 115 Strip executable code (like executables, shared libraries) _in_place_
102 - Based on sysroot_strip in staging.bbclass 116 - Based on sysroot_strip in staging.bbclass
@@ -104,6 +118,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
104 :param strip_cmd: Strip command (usually ${STRIP}) 118 :param strip_cmd: Strip command (usually ${STRIP})
105 :param libdir: ${libdir} - strip .so files in this directory 119 :param libdir: ${libdir} - strip .so files in this directory
106 :param base_libdir: ${base_libdir} - strip .so files in this directory 120 :param base_libdir: ${base_libdir} - strip .so files in this directory
121 :param max_process: number of stripping processes started in parallel
107 :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP} 122 :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP}
108 This is for proper logging and messages only. 123 This is for proper logging and messages only.
109 """ 124 """
@@ -146,7 +161,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
146 # ...but is it ELF, and is it already stripped? 161 # ...but is it ELF, and is it already stripped?
147 checkelf.append(file) 162 checkelf.append(file)
148 inodecache[file] = s.st_ino 163 inodecache[file] = s.st_ino
149 results = oe.utils.multiprocess_launch(is_elf, checkelf, d) 164 results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process)
150 for (file, elf_file) in results: 165 for (file, elf_file) in results:
151 #elf_file = is_elf(file) 166 #elf_file = is_elf(file)
152 if elf_file & 1: 167 if elf_file & 1:
@@ -174,22 +189,35 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
174 elf_file = int(elffiles[file]) 189 elf_file = int(elffiles[file])
175 sfiles.append((file, elf_file, strip_cmd)) 190 sfiles.append((file, elf_file, strip_cmd))
176 191
177 oe.utils.multiprocess_launch(runstrip, sfiles, d) 192 oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
178 193
194TRANSLATE = (
195 ("@", "@at@"),
196 (" ", "@space@"),
197 ("\t", "@tab@"),
198 ("[", "@openbrace@"),
199 ("]", "@closebrace@"),
200 ("_", "@underscore@"),
201 (":", "@colon@"),
202)
179 203
180def file_translate(file): 204def file_translate(file):
181 ft = file.replace("@", "@at@") 205 ft = file
182 ft = ft.replace(" ", "@space@") 206 for s, replace in TRANSLATE:
183 ft = ft.replace("\t", "@tab@") 207 ft = ft.replace(s, replace)
184 ft = ft.replace("[", "@openbrace@") 208
185 ft = ft.replace("]", "@closebrace@") 209 return ft
186 ft = ft.replace("_", "@underscore@") 210
211def file_reverse_translate(file):
212 ft = file
213 for s, replace in reversed(TRANSLATE):
214 ft = ft.replace(replace, s)
215
187 return ft 216 return ft
188 217
189def filedeprunner(arg): 218def filedeprunner(pkg, pkgfiles, rpmdeps, pkgdest):
190 import re, subprocess, shlex 219 import re, subprocess, shlex
191 220
192 (pkg, pkgfiles, rpmdeps, pkgdest) = arg
193 provides = {} 221 provides = {}
194 requires = {} 222 requires = {}
195 223
@@ -283,3 +311,1800 @@ def read_shlib_providers(d):
283 shlib_provider[s[0]] = {} 311 shlib_provider[s[0]] = {}
284 shlib_provider[s[0]][s[1]] = (dep_pkg, s[2]) 312 shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
285 return shlib_provider 313 return shlib_provider
314
315# We generate a master list of directories to process, we start by
316# seeding this list with reasonable defaults, then load from
317# the fs-perms.txt files
318def fixup_perms(d):
319 import pwd, grp
320
321 cpath = oe.cachedpath.CachedPath()
322 dvar = d.getVar('PKGD')
323
324 # init using a string with the same format as a line as documented in
325 # the fs-perms.txt file
326 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
327 # <path> link <link target>
328 #
329 # __str__ can be used to print out an entry in the input format
330 #
331 # if fs_perms_entry.path is None:
332 # an error occurred
333 # if fs_perms_entry.link, you can retrieve:
334 # fs_perms_entry.path = path
335 # fs_perms_entry.link = target of link
336 # if not fs_perms_entry.link, you can retrieve:
337 # fs_perms_entry.path = path
338 # fs_perms_entry.mode = expected dir mode or None
339 # fs_perms_entry.uid = expected uid or -1
340 # fs_perms_entry.gid = expected gid or -1
341 # fs_perms_entry.walk = 'true' or something else
342 # fs_perms_entry.fmode = expected file mode or None
343 # fs_perms_entry.fuid = expected file uid or -1
344 # fs_perms_entry_fgid = expected file gid or -1
345 class fs_perms_entry():
346 def __init__(self, line):
347 lsplit = line.split()
348 if len(lsplit) == 3 and lsplit[1].lower() == "link":
349 self._setlink(lsplit[0], lsplit[2])
350 elif len(lsplit) == 8:
351 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
352 else:
353 msg = "Fixup Perms: invalid config line %s" % line
354 oe.qa.handle_error("perm-config", msg, d)
355 self.path = None
356 self.link = None
357
358 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
359 self.path = os.path.normpath(path)
360 self.link = None
361 self.mode = self._procmode(mode)
362 self.uid = self._procuid(uid)
363 self.gid = self._procgid(gid)
364 self.walk = walk.lower()
365 self.fmode = self._procmode(fmode)
366 self.fuid = self._procuid(fuid)
367 self.fgid = self._procgid(fgid)
368
369 def _setlink(self, path, link):
370 self.path = os.path.normpath(path)
371 self.link = link
372
373 def _procmode(self, mode):
374 if not mode or (mode and mode == "-"):
375 return None
376 else:
377 return int(mode,8)
378
379 # Note uid/gid -1 has special significance in os.lchown
380 def _procuid(self, uid):
381 if uid is None or uid == "-":
382 return -1
383 elif uid.isdigit():
384 return int(uid)
385 else:
386 return pwd.getpwnam(uid).pw_uid
387
388 def _procgid(self, gid):
389 if gid is None or gid == "-":
390 return -1
391 elif gid.isdigit():
392 return int(gid)
393 else:
394 return grp.getgrnam(gid).gr_gid
395
396 # Use for debugging the entries
397 def __str__(self):
398 if self.link:
399 return "%s link %s" % (self.path, self.link)
400 else:
401 mode = "-"
402 if self.mode:
403 mode = "0%o" % self.mode
404 fmode = "-"
405 if self.fmode:
406 fmode = "0%o" % self.fmode
407 uid = self._mapugid(self.uid)
408 gid = self._mapugid(self.gid)
409 fuid = self._mapugid(self.fuid)
410 fgid = self._mapugid(self.fgid)
411 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
412
413 def _mapugid(self, id):
414 if id is None or id == -1:
415 return "-"
416 else:
417 return "%d" % id
418
419 # Fix the permission, owner and group of path
420 def fix_perms(path, mode, uid, gid, dir):
421 if mode and not os.path.islink(path):
422 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
423 os.chmod(path, mode)
424 # -1 is a special value that means don't change the uid/gid
425 # if they are BOTH -1, don't bother to lchown
426 if not (uid == -1 and gid == -1):
427 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
428 os.lchown(path, uid, gid)
429
430 # Return a list of configuration files based on either the default
431 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
432 # paths are resolved via BBPATH
433 def get_fs_perms_list(d):
434 str = ""
435 bbpath = d.getVar('BBPATH')
436 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
437 for conf_file in fs_perms_tables.split():
438 confpath = bb.utils.which(bbpath, conf_file)
439 if confpath:
440 str += " %s" % bb.utils.which(bbpath, conf_file)
441 else:
442 bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
443 return str
444
445 fs_perms_table = {}
446 fs_link_table = {}
447
448 # By default all of the standard directories specified in
449 # bitbake.conf will get 0755 root:root.
450 target_path_vars = [ 'base_prefix',
451 'prefix',
452 'exec_prefix',
453 'base_bindir',
454 'base_sbindir',
455 'base_libdir',
456 'datadir',
457 'sysconfdir',
458 'servicedir',
459 'sharedstatedir',
460 'localstatedir',
461 'infodir',
462 'mandir',
463 'docdir',
464 'bindir',
465 'sbindir',
466 'libexecdir',
467 'libdir',
468 'includedir' ]
469
470 for path in target_path_vars:
471 dir = d.getVar(path) or ""
472 if dir == "":
473 continue
474 fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
475
476 # Now we actually load from the configuration files
477 for conf in get_fs_perms_list(d).split():
478 if not os.path.exists(conf):
479 continue
480 with open(conf) as f:
481 for line in f:
482 if line.startswith('#'):
483 continue
484 lsplit = line.split()
485 if len(lsplit) == 0:
486 continue
487 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
488 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
489 oe.qa.handle_error("perm-line", msg, d)
490 continue
491 entry = fs_perms_entry(d.expand(line))
492 if entry and entry.path:
493 if entry.link:
494 fs_link_table[entry.path] = entry
495 if entry.path in fs_perms_table:
496 fs_perms_table.pop(entry.path)
497 else:
498 fs_perms_table[entry.path] = entry
499 if entry.path in fs_link_table:
500 fs_link_table.pop(entry.path)
501
502 # Debug -- list out in-memory table
503 #for dir in fs_perms_table:
504 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
505 #for link in fs_link_table:
506 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
507
508 # We process links first, so we can go back and fixup directory ownership
509 # for any newly created directories
510 # Process in sorted order so /run gets created before /run/lock, etc.
511 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
512 link = entry.link
513 dir = entry.path
514 origin = dvar + dir
515 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
516 continue
517
518 if link[0] == "/":
519 target = dvar + link
520 ptarget = link
521 else:
522 target = os.path.join(os.path.dirname(origin), link)
523 ptarget = os.path.join(os.path.dirname(dir), link)
524 if os.path.exists(target):
525 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
526 oe.qa.handle_error("perm-link", msg, d)
527 continue
528
529 # Create path to move directory to, move it, and then setup the symlink
530 bb.utils.mkdirhier(os.path.dirname(target))
531 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
532 bb.utils.rename(origin, target)
533 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
534 os.symlink(link, origin)
535
536 for dir in fs_perms_table:
537 origin = dvar + dir
538 if not (cpath.exists(origin) and cpath.isdir(origin)):
539 continue
540
541 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
542
543 if fs_perms_table[dir].walk == 'true':
544 for root, dirs, files in os.walk(origin):
545 for dr in dirs:
546 each_dir = os.path.join(root, dr)
547 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
548 for f in files:
549 each_file = os.path.join(root, f)
550 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
551
552# Get a list of files from file vars by searching files under current working directory
553# The list contains symlinks, directories and normal files.
554def files_from_filevars(filevars):
555 cpath = oe.cachedpath.CachedPath()
556 files = []
557 for f in filevars:
558 if os.path.isabs(f):
559 f = '.' + f
560 if not f.startswith("./"):
561 f = './' + f
562 globbed = glob.glob(f, recursive=True)
563 if globbed:
564 if [ f ] != globbed:
565 files += globbed
566 continue
567 files.append(f)
568
569 symlink_paths = []
570 for ind, f in enumerate(files):
571 # Handle directory symlinks. Truncate path to the lowest level symlink
572 parent = ''
573 for dirname in f.split('/')[:-1]:
574 parent = os.path.join(parent, dirname)
575 if dirname == '.':
576 continue
577 if cpath.islink(parent):
578 bb.warn("FILES contains file '%s' which resides under a "
579 "directory symlink. Please fix the recipe and use the "
580 "real path for the file." % f[1:])
581 symlink_paths.append(f)
582 files[ind] = parent
583 f = parent
584 break
585
586 if not cpath.islink(f):
587 if cpath.isdir(f):
588 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
589 if newfiles:
590 files += newfiles
591
592 return files, symlink_paths
593
594# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
595def get_conffiles(pkg, d):
596 pkgdest = d.getVar('PKGDEST')
597 root = os.path.join(pkgdest, pkg)
598 cwd = os.getcwd()
599 os.chdir(root)
600
601 conffiles = d.getVar('CONFFILES:%s' % pkg);
602 if conffiles == None:
603 conffiles = d.getVar('CONFFILES')
604 if conffiles == None:
605 conffiles = ""
606 conffiles = conffiles.split()
607 conf_orig_list = files_from_filevars(conffiles)[0]
608
609 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
610 conf_list = []
611 for f in conf_orig_list:
612 if os.path.isdir(f):
613 continue
614 if os.path.islink(f):
615 continue
616 if not os.path.exists(f):
617 continue
618 conf_list.append(f)
619
620 # Remove the leading './'
621 for i in range(0, len(conf_list)):
622 conf_list[i] = conf_list[i][1:]
623
624 os.chdir(cwd)
625 return sorted(conf_list)
626
627def legitimize_package_name(s):
628 """
629 Make sure package names are legitimate strings
630 """
631
632 def fixutf(m):
633 cp = m.group(1)
634 if cp:
635 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
636
637 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
638 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
639
640 # Remaining package name validity fixes
641 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
642
643def split_locales(d):
644 cpath = oe.cachedpath.CachedPath()
645 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
646 bb.debug(1, "package requested not splitting locales")
647 return
648
649 packages = (d.getVar('PACKAGES') or "").split()
650
651 dvar = d.getVar('PKGD')
652 pn = d.getVar('LOCALEBASEPN')
653
654 try:
655 locale_index = packages.index(pn + '-locale')
656 packages.pop(locale_index)
657 except ValueError:
658 locale_index = len(packages)
659
660 lic = d.getVar("LICENSE:" + pn + "-locale")
661
662 localepaths = []
663 locales = set()
664 for localepath in (d.getVar('LOCALE_PATHS') or "").split():
665 localedir = dvar + localepath
666 if not cpath.isdir(localedir):
667 bb.debug(1, 'No locale files in %s' % localepath)
668 continue
669
670 localepaths.append(localepath)
671 with os.scandir(localedir) as it:
672 for entry in it:
673 if entry.is_dir():
674 locales.add(entry.name)
675
676 if len(locales) == 0:
677 bb.debug(1, "No locale files in this package")
678 return
679
680 summary = d.getVar('SUMMARY') or pn
681 description = d.getVar('DESCRIPTION') or ""
682 locale_section = d.getVar('LOCALE_SECTION')
683 mlprefix = d.getVar('MLPREFIX') or ""
684 for l in sorted(locales):
685 ln = legitimize_package_name(l)
686 pkg = pn + '-locale-' + ln
687 packages.insert(locale_index, pkg)
688 locale_index += 1
689 files = []
690 for localepath in localepaths:
691 files.append(os.path.join(localepath, l))
692 d.setVar('FILES:' + pkg, " ".join(files))
693 d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
694 d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
695 d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
696 d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
697 if lic:
698 d.setVar('LICENSE:' + pkg, lic)
699 if locale_section:
700 d.setVar('SECTION:' + pkg, locale_section)
701
702 d.setVar('PACKAGES', ' '.join(packages))
703
704 # Disabled by RP 18/06/07
705 # Wildcards aren't supported in debian
706 # They break with ipkg since glibc-locale* will mean that
707 # glibc-localedata-translit* won't install as a dependency
708 # for some other package which breaks meta-toolchain
709 # Probably breaks since virtual-locale- isn't provided anywhere
710 #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
711 #rdep.append('%s-locale*' % pn)
712 #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
713
714def package_debug_vars(d):
715 # We default to '.debug' style
716 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
717 # Single debug-file-directory style debug info
718 debug_vars = {
719 "append": ".debug",
720 "staticappend": "",
721 "dir": "",
722 "staticdir": "",
723 "libdir": "/usr/lib/debug",
724 "staticlibdir": "/usr/lib/debug-static",
725 "srcdir": "/usr/src/debug",
726 }
727 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
728 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
729 debug_vars = {
730 "append": "",
731 "staticappend": "",
732 "dir": "/.debug",
733 "staticdir": "/.debug-static",
734 "libdir": "",
735 "staticlibdir": "",
736 "srcdir": "",
737 }
738 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
739 debug_vars = {
740 "append": "",
741 "staticappend": "",
742 "dir": "/.debug",
743 "staticdir": "/.debug-static",
744 "libdir": "",
745 "staticlibdir": "",
746 "srcdir": "/usr/src/debug",
747 }
748 else:
749 # Original OE-core, a.k.a. ".debug", style debug info
750 debug_vars = {
751 "append": "",
752 "staticappend": "",
753 "dir": "/.debug",
754 "staticdir": "/.debug-static",
755 "libdir": "",
756 "staticlibdir": "",
757 "srcdir": "/usr/src/debug",
758 }
759
760 return debug_vars
761
762
763def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
764 debugfiles = {}
765
766 for line in dwarfsrcfiles_output.splitlines():
767 if line.startswith("\t"):
768 debugfiles[os.path.normpath(line.split()[0])] = ""
769
770 return debugfiles.keys()
771
772def source_info(file, d, fatal=True):
773 cmd = ["dwarfsrcfiles", file]
774 try:
775 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
776 retval = 0
777 except subprocess.CalledProcessError as exc:
778 output = exc.output
779 retval = exc.returncode
780
781 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
782 if retval != 0 and retval != 255:
783 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
784 if fatal:
785 bb.fatal(msg)
786 bb.note(msg)
787
788 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
789
790 return list(debugsources)
791
792def splitdebuginfo(file, dvar, dv, d):
793 # Function to split a single file into two components, one is the stripped
794 # target system binary, the other contains any debugging information. The
795 # two files are linked to reference each other.
796 #
797 # return a mapping of files:debugsources
798
799 src = file[len(dvar):]
800 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
801 debugfile = dvar + dest
802 sources = []
803
804 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
805 if oe.package.is_kernel_module_signed(file):
806 bb.debug(1, "Skip strip on signed module %s" % file)
807 return (file, sources)
808
809 # Split the file...
810 bb.utils.mkdirhier(os.path.dirname(debugfile))
811 #bb.note("Split %s -> %s" % (file, debugfile))
812 # Only store off the hard link reference if we successfully split!
813
814 dvar = d.getVar('PKGD')
815 objcopy = d.getVar("OBJCOPY")
816
817 newmode = None
818 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
819 origmode = os.stat(file)[stat.ST_MODE]
820 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
821 os.chmod(file, newmode)
822
823 # We need to extract the debug src information here...
824 if dv["srcdir"]:
825 sources = source_info(file, d)
826
827 bb.utils.mkdirhier(os.path.dirname(debugfile))
828
829 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
830
831 # Set the debuglink to have the view of the file path on the target
832 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
833
834 if newmode:
835 os.chmod(file, origmode)
836
837 return (file, sources)
838
839def splitstaticdebuginfo(file, dvar, dv, d):
840 # Unlike the function above, there is no way to split a static library
841 # two components. So to get similar results we will copy the unmodified
842 # static library (containing the debug symbols) into a new directory.
843 # We will then strip (preserving symbols) the static library in the
844 # typical location.
845 #
846 # return a mapping of files:debugsources
847
848 src = file[len(dvar):]
849 dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
850 debugfile = dvar + dest
851 sources = []
852
853 # Copy the file...
854 bb.utils.mkdirhier(os.path.dirname(debugfile))
855 #bb.note("Copy %s -> %s" % (file, debugfile))
856
857 dvar = d.getVar('PKGD')
858
859 newmode = None
860 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
861 origmode = os.stat(file)[stat.ST_MODE]
862 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
863 os.chmod(file, newmode)
864
865 # We need to extract the debug src information here...
866 if dv["srcdir"]:
867 sources = source_info(file, d)
868
869 bb.utils.mkdirhier(os.path.dirname(debugfile))
870
871 # Copy the unmodified item to the debug directory
872 shutil.copy2(file, debugfile)
873
874 if newmode:
875 os.chmod(file, origmode)
876
877 return (file, sources)
878
879def inject_minidebuginfo(file, dvar, dv, d):
880 # Extract just the symbols from debuginfo into minidebuginfo,
881 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
882 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
883
884 readelf = d.getVar('READELF')
885 nm = d.getVar('NM')
886 objcopy = d.getVar('OBJCOPY')
887
888 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
889
890 src = file[len(dvar):]
891 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
892 debugfile = dvar + dest
893 minidebugfile = minidebuginfodir + src + '.minidebug'
894 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
895
896 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
897 # so skip it.
898 if not os.path.exists(debugfile):
899 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
900 return
901
902 # minidebuginfo does not make sense to apply to ELF objects other than
903 # executables and shared libraries, skip applying the minidebuginfo
904 # generation for objects like kernel modules.
905 for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
906 if not line.strip().startswith("Type:"):
907 continue
908 elftype = line.split(":")[1].strip()
909 if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
910 bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
911 return
912 break
913
914 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
915 # We will exclude all of these from minidebuginfo to save space.
916 remove_section_names = []
917 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
918 # strip the leading " [ 1]" section index to allow splitting on space
919 if ']' not in line:
920 continue
921 fields = line[line.index(']') + 1:].split()
922 if len(fields) < 7:
923 continue
924 name = fields[0]
925 type = fields[1]
926 flags = fields[6]
927 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
928 if name.startswith('.debug_'):
929 continue
930 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
931 remove_section_names.append(name)
932
933 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
934 # because they are always present in the binary.
935 dynsyms = set()
936 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
937 dynsyms.add(line.split()[0])
938
939 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
940 # These are the ones we want to keep in minidebuginfo.
941 keep_symbols_file = minidebugfile + '.symlist'
942 found_any_symbols = False
943 with open(keep_symbols_file, 'w') as f:
944 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
945 fields = line.split('|')
946 if len(fields) < 7:
947 continue
948 name = fields[0].strip()
949 type = fields[3].strip()
950 if type == 'FUNC' and name not in dynsyms:
951 f.write('{}\n'.format(name))
952 found_any_symbols = True
953
954 if not found_any_symbols:
955 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
956 return
957
958 bb.utils.remove(minidebugfile)
959 bb.utils.remove(minidebugfile + '.xz')
960
961 subprocess.check_call([objcopy, '-S'] +
962 ['--remove-section={}'.format(s) for s in remove_section_names] +
963 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
964
965 subprocess.check_call(['xz', '--keep', minidebugfile])
966
967 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
968
969def copydebugsources(debugsrcdir, sources, d):
970 # The debug src information written out to sourcefile is further processed
971 # and copied to the destination here.
972
973 cpath = oe.cachedpath.CachedPath()
974
975 if debugsrcdir and sources:
976 sourcefile = d.expand("${WORKDIR}/debugsources.list")
977 bb.utils.remove(sourcefile)
978
979 # filenames are null-separated - this is an artefact of the previous use
980 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
981 # is still assuming that.
982 debuglistoutput = '\0'.join(sources) + '\0'
983 with open(sourcefile, 'a') as sf:
984 sf.write(debuglistoutput)
985
986 dvar = d.getVar('PKGD')
987 strip = d.getVar("STRIP")
988 objcopy = d.getVar("OBJCOPY")
989 workdir = d.getVar("WORKDIR")
990 sdir = d.getVar("S")
991 cflags = d.expand("${CFLAGS}")
992
993 prefixmap = {}
994 for flag in cflags.split():
995 if not flag.startswith("-ffile-prefix-map"):
996 continue
997 if "recipe-sysroot" in flag:
998 continue
999 flag = flag.split("=")
1000 prefixmap[flag[1]] = flag[2]
1001
1002 nosuchdir = []
1003 basepath = dvar
1004 for p in debugsrcdir.split("/"):
1005 basepath = basepath + "/" + p
1006 if not cpath.exists(basepath):
1007 nosuchdir.append(basepath)
1008 bb.utils.mkdirhier(basepath)
1009 cpath.updatecache(basepath)
1010
1011 for pmap in prefixmap:
1012 # Ignore files from the recipe sysroots (target and native)
1013 cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
1014 # We need to ignore files that are not actually ours
1015 # we do this by only paying attention to items from this package
1016 cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
1017 # Remove prefix in the source paths
1018 cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
1019 cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
1020
1021 try:
1022 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1023 except subprocess.CalledProcessError:
1024 # Can "fail" if internal headers/transient sources are attempted
1025 pass
1026 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
1027 # Work around this by manually finding and copying any symbolic links that made it through.
1028 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
1029 (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
1030 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1031
1032 # debugsources.list may be polluted from the host if we used externalsrc,
1033 # cpio uses copy-pass and may have just created a directory structure
1034 # matching the one from the host, if thats the case move those files to
1035 # debugsrcdir to avoid host contamination.
1036 # Empty dir structure will be deleted in the next step.
1037
1038 # Same check as above for externalsrc
1039 if workdir not in sdir:
1040 if os.path.exists(dvar + debugsrcdir + sdir):
1041 cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
1042 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1043
1044 # The copy by cpio may have resulted in some empty directories! Remove these
1045 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
1046 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1047
1048 # Also remove debugsrcdir if its empty
1049 for p in nosuchdir[::-1]:
1050 if os.path.exists(p) and not os.listdir(p):
1051 os.rmdir(p)
1052
1053@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
1054def save_debugsources_info(debugsrcdir, sources_raw, d):
1055 import json
1056 import bb.compress.zstd
1057 if debugsrcdir and sources_raw:
1058 debugsources_file = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd")
1059 debugsources_dir = os.path.dirname(debugsources_file)
1060 if not os.path.isdir(debugsources_dir):
1061 bb.utils.mkdirhier(debugsources_dir)
1062 bb.utils.remove(debugsources_file)
1063
1064 workdir = d.getVar("WORKDIR")
1065 pn = d.getVar('PN')
1066
1067 # Kernel sources are in a different directory and are special case
1068 # we format the sources as expected by spdx by replacing /usr/src/kernel/
1069 # into BP/
1070 kernel_src = d.getVar('KERNEL_SRC_PATH')
1071 bp = d.getVar('BP')
1072 sources_dict = {}
1073 for file, src_files in sources_raw:
1074 file_clean = file.replace(f"{workdir}/package/","")
1075 sources_clean = [
1076 src.replace(f"{debugsrcdir}/{pn}/", "")
1077 if not kernel_src else src.replace(f"{kernel_src}/", f"{bp}/")
1078 for src in src_files
1079 if not any(keyword in src for keyword in ("<internal>", "<built-in>")) and not src.endswith("/")
1080 ]
1081 sources_dict[file_clean] = sorted(sources_clean)
1082 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1083 with bb.compress.zstd.open(debugsources_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
1084 json.dump(sources_dict, f, sort_keys=True)
1085
1086@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
1087def read_debugsources_info(d):
1088 import json
1089 import bb.compress.zstd
1090 try:
1091 fn = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd")
1092 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1093 with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f:
1094 return json.load(f)
1095 except FileNotFoundError:
1096 bb.debug(1, f"File not found: {fn}")
1097 return None
1098
1099def process_split_and_strip_files(d):
1100 cpath = oe.cachedpath.CachedPath()
1101
1102 dvar = d.getVar('PKGD')
1103 pn = d.getVar('PN')
1104 hostos = d.getVar('HOST_OS')
1105
1106 oldcwd = os.getcwd()
1107 os.chdir(dvar)
1108
1109 dv = package_debug_vars(d)
1110
1111 #
1112 # First lets figure out all of the files we may have to process ... do this only once!
1113 #
1114 elffiles = {}
1115 symlinks = {}
1116 staticlibs = []
1117 inodes = {}
1118 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1119 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1120 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1121 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1122 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1123 checkelf = {}
1124 checkelflinks = {}
1125 checkstatic = {}
1126 for root, dirs, files in cpath.walk(dvar):
1127 for f in files:
1128 file = os.path.join(root, f)
1129
1130 # Skip debug files
1131 if dv["append"] and file.endswith(dv["append"]):
1132 continue
1133 if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1134 continue
1135
1136 if file in skipfiles:
1137 continue
1138
1139 try:
1140 ltarget = cpath.realpath(file, dvar, False)
1141 s = cpath.lstat(ltarget)
1142 except OSError as e:
1143 (err, strerror) = e.args
1144 if err != errno.ENOENT:
1145 raise
1146 # Skip broken symlinks
1147 continue
1148 if not s:
1149 continue
1150
1151 if oe.package.is_static_lib(file):
1152 # Use a reference of device ID and inode number to identify files
1153 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1154 checkstatic[file] = (file, file_reference)
1155 continue
1156
1157 # Check its an executable
1158 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1159 or (s[stat.ST_MODE] & stat.S_IXOTH) \
1160 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1161 and (".so" in f or ".node" in f)) \
1162 or (f.startswith('vmlinux') or ".ko" in f):
1163
1164 if cpath.islink(file):
1165 checkelflinks[file] = ltarget
1166 continue
1167 # Use a reference of device ID and inode number to identify files
1168 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1169 checkelf[file] = (file, file_reference)
1170
1171 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1172 results_map = {}
1173 for (ltarget, elf_file) in results:
1174 results_map[ltarget] = elf_file
1175 for file in checkelflinks:
1176 ltarget = checkelflinks[file]
1177 # If it's a symlink, and points to an ELF file, we capture the readlink target
1178 if results_map[ltarget]:
1179 target = os.readlink(file)
1180 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1181 symlinks[file] = target
1182
1183 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1184
1185 # Sort results by file path. This ensures that the files are always
1186 # processed in the same order, which is important to make sure builds
1187 # are reproducible when dealing with hardlinks
1188 results.sort(key=lambda x: x[0])
1189
1190 for (file, elf_file) in results:
1191 # It's a file (or hardlink), not a link
1192 # ...but is it ELF, and is it already stripped?
1193 if elf_file & 1:
1194 if elf_file & 2:
1195 if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1196 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1197 else:
1198 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1199 oe.qa.handle_error("already-stripped", msg, d)
1200 continue
1201
1202 # At this point we have an unstripped elf file. We need to:
1203 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
1204 # b) Only strip any hardlinked file once (no races)
1205 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1206
1207 # Use a reference of device ID and inode number to identify files
1208 file_reference = checkelf[file][1]
1209 if file_reference in inodes:
1210 os.unlink(file)
1211 os.link(inodes[file_reference][0], file)
1212 inodes[file_reference].append(file)
1213 else:
1214 inodes[file_reference] = [file]
1215 # break hardlink
1216 bb.utils.break_hardlinks(file)
1217 elffiles[file] = elf_file
1218 # Modified the file so clear the cache
1219 cpath.updatecache(file)
1220
1221 # Do the same hardlink processing as above, but for static libraries
1222 results = list(checkstatic.keys())
1223
1224 # As above, sort the results.
1225 results.sort(key=lambda x: x[0])
1226
1227 for file in results:
1228 # Use a reference of device ID and inode number to identify files
1229 file_reference = checkstatic[file][1]
1230 if file_reference in inodes:
1231 os.unlink(file)
1232 os.link(inodes[file_reference][0], file)
1233 inodes[file_reference].append(file)
1234 else:
1235 inodes[file_reference] = [file]
1236 # break hardlink
1237 bb.utils.break_hardlinks(file)
1238 staticlibs.append(file)
1239 # Modified the file so clear the cache
1240 cpath.updatecache(file)
1241
1242 def strip_pkgd_prefix(f):
1243 nonlocal dvar
1244
1245 if f.startswith(dvar):
1246 return f[len(dvar):]
1247
1248 return f
1249
1250 #
1251 # First lets process debug splitting
1252 #
1253 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1254 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1255
1256 if dv["srcdir"] and not hostos.startswith("mingw"):
1257 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1258 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1259 else:
1260 for file in staticlibs:
1261 results.append( (file,source_info(file, d)) )
1262
1263 d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1264
1265 sources = set()
1266 for r in results:
1267 sources.update(r[1])
1268
1269 # Hardlink our debug symbols to the other hardlink copies
1270 for ref in inodes:
1271 if len(inodes[ref]) == 1:
1272 continue
1273
1274 target = inodes[ref][0][len(dvar):]
1275 for file in inodes[ref][1:]:
1276 src = file[len(dvar):]
1277 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1278 fpath = dvar + dest
1279 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1280 if os.access(ftarget, os.R_OK):
1281 bb.utils.mkdirhier(os.path.dirname(fpath))
1282 # Only one hardlink of separated debug info file in each directory
1283 if not os.access(fpath, os.R_OK):
1284 #bb.note("Link %s -> %s" % (fpath, ftarget))
1285 os.link(ftarget, fpath)
1286 elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1287 deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"]
1288 fpath = dvar + deststatic
1289 ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"]
1290 if os.access(ftarget, os.R_OK):
1291 bb.utils.mkdirhier(os.path.dirname(fpath))
1292 # Only one hardlink of separated debug info file in each directory
1293 if not os.access(fpath, os.R_OK):
1294 #bb.note("Link %s -> %s" % (fpath, ftarget))
1295 os.link(ftarget, fpath)
1296 else:
1297 bb.note("Unable to find inode link target %s" % (target))
1298
1299 # Create symlinks for all cases we were able to split symbols
1300 for file in symlinks:
1301 src = file[len(dvar):]
1302 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1303 fpath = dvar + dest
1304 # Skip it if the target doesn't exist
1305 try:
1306 s = os.stat(fpath)
1307 except OSError as e:
1308 (err, strerror) = e.args
1309 if err != errno.ENOENT:
1310 raise
1311 continue
1312
1313 ltarget = symlinks[file]
1314 lpath = os.path.dirname(ltarget)
1315 lbase = os.path.basename(ltarget)
1316 ftarget = ""
1317 if lpath and lpath != ".":
1318 ftarget += lpath + dv["dir"] + "/"
1319 ftarget += lbase + dv["append"]
1320 if lpath.startswith(".."):
1321 ftarget = os.path.join("..", ftarget)
1322 bb.utils.mkdirhier(os.path.dirname(fpath))
1323 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1324 os.symlink(ftarget, fpath)
1325
1326 # Process the dv["srcdir"] if requested...
1327 # This copies and places the referenced sources for later debugging...
1328 copydebugsources(dv["srcdir"], sources, d)
1329
1330 # Save source info to be accessible to other tasks
1331 save_debugsources_info(dv["srcdir"], results, d)
1332 #
1333 # End of debug splitting
1334 #
1335
1336 #
1337 # Now lets go back over things and strip them
1338 #
1339 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1340 strip = d.getVar("STRIP")
1341 sfiles = []
1342 for file in elffiles:
1343 elf_file = int(elffiles[file])
1344 #bb.note("Strip %s" % file)
1345 sfiles.append((file, elf_file, strip))
1346 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1347 for f in staticlibs:
1348 sfiles.append((f, 16, strip))
1349
1350 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1351
1352 # Build "minidebuginfo" and reinject it back into the stripped binaries
1353 if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
1354 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1355 extraargs=(dvar, dv, d))
1356
1357 #
1358 # End of strip
1359 #
1360 os.chdir(oldcwd)
1361
1362
1363def populate_packages(d):
1364 cpath = oe.cachedpath.CachedPath()
1365
1366 workdir = d.getVar('WORKDIR')
1367 outdir = d.getVar('DEPLOY_DIR')
1368 dvar = d.getVar('PKGD')
1369 packages = d.getVar('PACKAGES').split()
1370 pn = d.getVar('PN')
1371
1372 bb.utils.mkdirhier(outdir)
1373 os.chdir(dvar)
1374
1375 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1376
1377 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1378
1379 # If debug-with-srcpkg mode is enabled then add the source package if it
1380 # doesn't exist and add the source file contents to the source package.
1381 if split_source_package:
1382 src_package_name = ('%s-src' % d.getVar('PN'))
1383 if not src_package_name in packages:
1384 packages.append(src_package_name)
1385 d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1386
1387 # Sanity check PACKAGES for duplicates
1388 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1389 package_dict = {}
1390
1391 for i, pkg in enumerate(packages):
1392 if pkg in package_dict:
1393 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1394 oe.qa.handle_error("packages-list", msg, d)
1395 # Ensure the source package gets the chance to pick up the source files
1396 # before the debug package by ordering it first in PACKAGES. Whether it
1397 # actually picks up any source files is controlled by
1398 # PACKAGE_DEBUG_SPLIT_STYLE.
1399 elif pkg.endswith("-src"):
1400 package_dict[pkg] = (10, i)
1401 elif autodebug and pkg.endswith("-dbg"):
1402 package_dict[pkg] = (30, i)
1403 else:
1404 package_dict[pkg] = (50, i)
1405 packages = sorted(package_dict.keys(), key=package_dict.get)
1406 d.setVar('PACKAGES', ' '.join(packages))
1407 pkgdest = d.getVar('PKGDEST')
1408
1409 seen = []
1410
1411 # os.mkdir masks the permissions with umask so we have to unset it first
1412 oldumask = os.umask(0)
1413
1414 debug = []
1415 for root, dirs, files in cpath.walk(dvar):
1416 dir = root[len(dvar):]
1417 if not dir:
1418 dir = os.sep
1419 for f in (files + dirs):
1420 path = "." + os.path.join(dir, f)
1421 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1422 debug.append(path)
1423
1424 for pkg in packages:
1425 root = os.path.join(pkgdest, pkg)
1426 bb.utils.mkdirhier(root)
1427
1428 filesvar = d.getVar('FILES:%s' % pkg) or ""
1429 if "//" in filesvar:
1430 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1431 oe.qa.handle_error("files-invalid", msg, d)
1432 filesvar.replace("//", "/")
1433
1434 origfiles = filesvar.split()
1435 files, symlink_paths = oe.package.files_from_filevars(origfiles)
1436
1437 if autodebug and pkg.endswith("-dbg"):
1438 files.extend(debug)
1439
1440 for file in files:
1441 if (not cpath.islink(file)) and (not cpath.exists(file)):
1442 continue
1443 if file in seen:
1444 continue
1445 seen.append(file)
1446
1447 def mkdir(src, dest, p):
1448 src = os.path.join(src, p)
1449 dest = os.path.join(dest, p)
1450 fstat = cpath.stat(src)
1451 os.mkdir(dest)
1452 os.chmod(dest, fstat.st_mode)
1453 os.chown(dest, fstat.st_uid, fstat.st_gid)
1454 if p not in seen:
1455 seen.append(p)
1456 cpath.updatecache(dest)
1457
1458 def mkdir_recurse(src, dest, paths):
1459 if cpath.exists(dest + '/' + paths):
1460 return
1461 while paths.startswith("./"):
1462 paths = paths[2:]
1463 p = "."
1464 for c in paths.split("/"):
1465 p = os.path.join(p, c)
1466 if not cpath.exists(os.path.join(dest, p)):
1467 mkdir(src, dest, p)
1468
1469 if cpath.isdir(file) and not cpath.islink(file):
1470 mkdir_recurse(dvar, root, file)
1471 continue
1472
1473 mkdir_recurse(dvar, root, os.path.dirname(file))
1474 fpath = os.path.join(root,file)
1475 if not cpath.islink(file):
1476 os.link(file, fpath)
1477 continue
1478 ret = bb.utils.copyfile(file, fpath)
1479 if ret is False or ret == 0:
1480 bb.fatal("File population failed")
1481
1482 # Check if symlink paths exist
1483 for file in symlink_paths:
1484 if not os.path.exists(os.path.join(root,file)):
1485 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1486 "parent directory structure does not exist. One of "
1487 "its parent directories is a symlink whose target "
1488 "directory is not included in the package." %
1489 (file, pkg))
1490
1491 os.umask(oldumask)
1492 os.chdir(workdir)
1493
1494 # Handle excluding packages with incompatible licenses
1495 package_list = []
1496 skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages)
1497 for pkg in packages:
1498 if pkg in skipped_pkgs:
1499 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg])
1500 oe.qa.handle_error("incompatible-license", msg, d)
1501 else:
1502 package_list.append(pkg)
1503 d.setVar('PACKAGES', ' '.join(package_list))
1504
1505 unshipped = []
1506 for root, dirs, files in cpath.walk(dvar):
1507 dir = root[len(dvar):]
1508 if not dir:
1509 dir = os.sep
1510 for f in (files + dirs):
1511 path = os.path.join(dir, f)
1512 if ('.' + path) not in seen:
1513 unshipped.append(path)
1514
1515 if unshipped != []:
1516 msg = pn + ": Files/directories were installed but not shipped in any package:"
1517 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1518 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1519 else:
1520 for f in unshipped:
1521 msg = msg + "\n " + f
1522 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1523 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1524 oe.qa.handle_error("installed-vs-shipped", msg, d)
1525
1526def process_fixsymlinks(pkgfiles, d):
1527 cpath = oe.cachedpath.CachedPath()
1528 pkgdest = d.getVar('PKGDEST')
1529 packages = d.getVar("PACKAGES", False).split()
1530
1531 dangling_links = {}
1532 pkg_files = {}
1533 for pkg in packages:
1534 dangling_links[pkg] = []
1535 pkg_files[pkg] = []
1536 inst_root = os.path.join(pkgdest, pkg)
1537 for path in pkgfiles[pkg]:
1538 rpath = path[len(inst_root):]
1539 pkg_files[pkg].append(rpath)
1540 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1541 if not cpath.lexists(rtarget):
1542 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1543
1544 newrdepends = {}
1545 for pkg in dangling_links:
1546 for l in dangling_links[pkg]:
1547 found = False
1548 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1549 for p in packages:
1550 if l in pkg_files[p]:
1551 found = True
1552 bb.debug(1, "target found in %s" % p)
1553 if p == pkg:
1554 break
1555 if pkg not in newrdepends:
1556 newrdepends[pkg] = []
1557 newrdepends[pkg].append(p)
1558 break
1559 if found == False:
1560 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1561
1562 for pkg in newrdepends:
1563 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1564 for p in newrdepends[pkg]:
1565 if p not in rdepends:
1566 rdepends[p] = []
1567 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1568
1569def process_filedeps(pkgfiles, d):
1570 """
1571 Collect perfile run-time dependency metadata
1572 Output:
1573 FILERPROVIDESFLIST:pkg - list of all files w/ deps
1574 FILERPROVIDES:filepath:pkg - per file dep
1575
1576 FILERDEPENDSFLIST:pkg - list of all files w/ deps
1577 FILERDEPENDS:filepath:pkg - per file dep
1578 """
1579 if d.getVar('SKIP_FILEDEPS') == '1':
1580 return
1581
1582 pkgdest = d.getVar('PKGDEST')
1583 packages = d.getVar('PACKAGES')
1584 rpmdeps = d.getVar('RPMDEPS')
1585
1586 def chunks(files, n):
1587 return [files[i:i+n] for i in range(0, len(files), n)]
1588
1589 pkglist = []
1590 for pkg in packages.split():
1591 if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1592 continue
1593 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1594 continue
1595 for files in chunks(pkgfiles[pkg], 100):
1596 pkglist.append((pkg, files, rpmdeps, pkgdest))
1597
1598 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1599
1600 provides_files = {}
1601 requires_files = {}
1602
1603 for result in processed:
1604 (pkg, provides, requires) = result
1605
1606 if pkg not in provides_files:
1607 provides_files[pkg] = []
1608 if pkg not in requires_files:
1609 requires_files[pkg] = []
1610
1611 for file in sorted(provides):
1612 provides_files[pkg].append(file)
1613 key = "FILERPROVIDES:" + file + ":" + pkg
1614 d.appendVar(key, " " + " ".join(provides[file]))
1615
1616 for file in sorted(requires):
1617 requires_files[pkg].append(file)
1618 key = "FILERDEPENDS:" + file + ":" + pkg
1619 d.appendVar(key, " " + " ".join(requires[file]))
1620
1621 for pkg in requires_files:
1622 d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1623 for pkg in provides_files:
1624 d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1625
1626def process_shlibs(pkgfiles, d):
1627 cpath = oe.cachedpath.CachedPath()
1628
1629 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1630 if exclude_shlibs:
1631 bb.note("not generating shlibs")
1632 return
1633
1634 lib_re = re.compile(r"^.*\.so")
1635 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1636
1637 packages = d.getVar('PACKAGES')
1638
1639 shlib_pkgs = []
1640 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1641 if exclusion_list:
1642 for pkg in packages.split():
1643 if pkg not in exclusion_list.split():
1644 shlib_pkgs.append(pkg)
1645 else:
1646 bb.note("not generating shlibs for %s" % pkg)
1647 else:
1648 shlib_pkgs = packages.split()
1649
1650 hostos = d.getVar('HOST_OS')
1651
1652 workdir = d.getVar('WORKDIR')
1653
1654 ver = d.getVar('PKGV')
1655 if not ver:
1656 msg = "PKGV not defined"
1657 oe.qa.handle_error("pkgv-undefined", msg, d)
1658 return
1659
1660 pkgdest = d.getVar('PKGDEST')
1661
1662 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1663
1664 def linux_so(file, pkg, pkgver, d):
1665 needs_ldconfig = False
1666 needed = set()
1667 sonames = set()
1668 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1669 cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null"
1670 fd = os.popen(cmd)
1671 lines = fd.readlines()
1672 fd.close()
1673 rpath = tuple()
1674 for l in lines:
1675 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1676 if m:
1677 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1678 rpath = tuple(map(os.path.normpath, rpaths))
1679 for l in lines:
1680 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1681 if m:
1682 dep = m.group(1)
1683 if dep not in needed:
1684 needed.add((dep, file, rpath))
1685 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1686 if m:
1687 this_soname = m.group(1)
1688 prov = (this_soname, ldir, pkgver)
1689 if not prov in sonames:
1690 # if library is private (only used by package) then do not build shlib for it
1691 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1692 sonames.add(prov)
1693 if libdir_re.match(os.path.dirname(file)):
1694 needs_ldconfig = True
1695 return (needs_ldconfig, needed, sonames)
1696
1697 def darwin_so(file, needed, sonames, pkgver):
1698 if not os.path.exists(file):
1699 return
1700 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1701
1702 def get_combinations(base):
1703 #
1704 # Given a base library name, find all combinations of this split by "." and "-"
1705 #
1706 combos = []
1707 options = base.split(".")
1708 for i in range(1, len(options) + 1):
1709 combos.append(".".join(options[0:i]))
1710 options = base.split("-")
1711 for i in range(1, len(options) + 1):
1712 combos.append("-".join(options[0:i]))
1713 return combos
1714
1715 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1716 # Drop suffix
1717 name = os.path.basename(file).rsplit(".",1)[0]
1718 # Find all combinations
1719 combos = get_combinations(name)
1720 for combo in combos:
1721 if not combo in sonames:
1722 prov = (combo, ldir, pkgver)
1723 sonames.add(prov)
1724 if file.endswith('.dylib') or file.endswith('.so'):
1725 rpath = []
1726 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1727 out, err = p.communicate()
1728 # If returned successfully, process stdout for results
1729 if p.returncode == 0:
1730 for l in out.split("\n"):
1731 l = l.strip()
1732 if l.startswith('path '):
1733 rpath.append(l.split()[1])
1734
1735 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1736 out, err = p.communicate()
1737 # If returned successfully, process stdout for results
1738 if p.returncode == 0:
1739 for l in out.split("\n"):
1740 l = l.strip()
1741 if not l or l.endswith(":"):
1742 continue
1743 if "is not an object file" in l:
1744 continue
1745 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1746 if name and name not in needed[pkg]:
1747 needed[pkg].add((name, file, tuple()))
1748
1749 def mingw_dll(file, needed, sonames, pkgver):
1750 if not os.path.exists(file):
1751 return
1752
1753 if file.endswith(".dll"):
1754 # assume all dlls are shared objects provided by the package
1755 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1756
1757 if (file.endswith(".dll") or file.endswith(".exe")):
1758 # use objdump to search for "DLL Name: .*\.dll"
1759 p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1760 out, err = p.communicate()
1761 # process the output, grabbing all .dll names
1762 if p.returncode == 0:
1763 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1764 dllname = m.group(1)
1765 if dllname:
1766 needed[pkg].add((dllname, file, tuple()))
1767
1768 needed = {}
1769
1770 shlib_provider = oe.package.read_shlib_providers(d)
1771
1772 for pkg in shlib_pkgs:
1773 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1774 private_libs = private_libs.split()
1775 needs_ldconfig = False
1776 bb.debug(2, "calculating shlib provides for %s" % pkg)
1777
1778 pkgver = d.getVar('PKGV:' + pkg)
1779 if not pkgver:
1780 pkgver = d.getVar('PV_' + pkg)
1781 if not pkgver:
1782 pkgver = ver
1783
1784 needed[pkg] = set()
1785 sonames = set()
1786 linuxlist = []
1787 for file in pkgfiles[pkg]:
1788 soname = None
1789 if cpath.islink(file):
1790 continue
1791 if hostos.startswith("darwin"):
1792 darwin_so(file, needed, sonames, pkgver)
1793 elif hostos.startswith("mingw"):
1794 mingw_dll(file, needed, sonames, pkgver)
1795 elif os.access(file, os.X_OK) or lib_re.match(file):
1796 linuxlist.append(file)
1797
1798 if linuxlist:
1799 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1800 for r in results:
1801 ldconfig = r[0]
1802 needed[pkg] |= r[1]
1803 sonames |= r[2]
1804 needs_ldconfig = needs_ldconfig or ldconfig
1805
1806 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1807 if len(sonames):
1808 with open(shlibs_file, 'w') as fd:
1809 for s in sorted(sonames):
1810 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1811 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1812 if old_pkg != pkg:
1813 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1814 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1815 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1816 if s[0] not in shlib_provider:
1817 shlib_provider[s[0]] = {}
1818 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1819 if needs_ldconfig:
1820 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1821 postinst = d.getVar('pkg_postinst:%s' % pkg)
1822 if not postinst:
1823 postinst = '#!/bin/sh\n'
1824 postinst += d.getVar('ldconfig_postinst_fragment')
1825 d.setVar('pkg_postinst:%s' % pkg, postinst)
1826 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1827
1828 assumed_libs = d.getVar('ASSUME_SHLIBS')
1829 if assumed_libs:
1830 libdir = d.getVar("libdir")
1831 for e in assumed_libs.split():
1832 l, dep_pkg = e.split(":")
1833 lib_ver = None
1834 dep_pkg = dep_pkg.rsplit("_", 1)
1835 if len(dep_pkg) == 2:
1836 lib_ver = dep_pkg[1]
1837 dep_pkg = dep_pkg[0]
1838 if l not in shlib_provider:
1839 shlib_provider[l] = {}
1840 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1841
1842 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1843
1844 for pkg in shlib_pkgs:
1845 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1846
1847 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1848 private_libs = private_libs.split()
1849
1850 deps = list()
1851 for n in needed[pkg]:
1852 # if n is in private libraries, don't try to search provider for it
1853 # this could cause problem in case some abc.bb provides private
1854 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1855 # but skipping it is still better alternative than providing own
1856 # version and then adding runtime dependency for the same system library
1857 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
1858 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1859 continue
1860 if n[0] in shlib_provider.keys():
1861 shlib_provider_map = shlib_provider[n[0]]
1862 matches = set()
1863 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
1864 if p in shlib_provider_map:
1865 matches.add(p)
1866 if len(matches) > 1:
1867 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
1868 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
1869 elif len(matches) == 1:
1870 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
1871
1872 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1873
1874 if dep_pkg == pkg:
1875 continue
1876
1877 if ver_needed:
1878 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1879 else:
1880 dep = dep_pkg
1881 if not dep in deps:
1882 deps.append(dep)
1883 continue
1884 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1885
1886 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1887 if os.path.exists(deps_file):
1888 os.remove(deps_file)
1889 if deps:
1890 with open(deps_file, 'w') as fd:
1891 for dep in sorted(deps):
1892 fd.write(dep + '\n')
1893
1894def process_pkgconfig(pkgfiles, d):
1895 packages = d.getVar('PACKAGES')
1896 workdir = d.getVar('WORKDIR')
1897 pkgdest = d.getVar('PKGDEST')
1898
1899 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1900 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1901
1902 pc_re = re.compile(r'(.*)\.pc$')
1903 var_re = re.compile(r'(.*)=(.*)')
1904 field_re = re.compile(r'(.*): (.*)')
1905
1906 pkgconfig_provided = {}
1907 pkgconfig_needed = {}
1908 for pkg in packages.split():
1909 pkgconfig_provided[pkg] = []
1910 pkgconfig_needed[pkg] = []
1911 for file in sorted(pkgfiles[pkg]):
1912 m = pc_re.match(file)
1913 if m:
1914 pd = bb.data.init()
1915 name = m.group(1)
1916 pkgconfig_provided[pkg].append(os.path.basename(name))
1917 if not os.access(file, os.R_OK):
1918 continue
1919 with open(file, 'r') as f:
1920 lines = f.readlines()
1921 for l in lines:
1922 m = field_re.match(l)
1923 if m:
1924 hdr = m.group(1)
1925 exp = pd.expand(m.group(2))
1926 if hdr == 'Requires' or hdr == 'Requires.private':
1927 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1928 continue
1929 m = var_re.match(l)
1930 if m:
1931 name = m.group(1)
1932 val = m.group(2)
1933 pd.setVar(name, pd.expand(val))
1934
1935 for pkg in packages.split():
1936 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1937 if pkgconfig_provided[pkg] != []:
1938 with open(pkgs_file, 'w') as f:
1939 for p in sorted(pkgconfig_provided[pkg]):
1940 f.write('%s\n' % p)
1941
1942 # Go from least to most specific since the last one found wins
1943 for dir in reversed(shlibs_dirs):
1944 if not os.path.exists(dir):
1945 continue
1946 for file in sorted(os.listdir(dir)):
1947 m = re.match(r'^(.*)\.pclist$', file)
1948 if m:
1949 pkg = m.group(1)
1950 with open(os.path.join(dir, file)) as fd:
1951 lines = fd.readlines()
1952 pkgconfig_provided[pkg] = []
1953 for l in lines:
1954 pkgconfig_provided[pkg].append(l.rstrip())
1955
1956 for pkg in packages.split():
1957 deps = []
1958 for n in pkgconfig_needed[pkg]:
1959 found = False
1960 for k in pkgconfig_provided.keys():
1961 if n in pkgconfig_provided[k]:
1962 if k != pkg and not (k in deps):
1963 deps.append(k)
1964 found = True
1965 if found == False:
1966 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1967 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1968 if len(deps):
1969 with open(deps_file, 'w') as fd:
1970 for dep in deps:
1971 fd.write(dep + '\n')
1972
1973def read_libdep_files(d):
1974 pkglibdeps = {}
1975 packages = d.getVar('PACKAGES').split()
1976 for pkg in packages:
1977 pkglibdeps[pkg] = {}
1978 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1979 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1980 if os.access(depsfile, os.R_OK):
1981 with open(depsfile) as fd:
1982 lines = fd.readlines()
1983 for l in lines:
1984 l.rstrip()
1985 deps = bb.utils.explode_dep_versions2(l)
1986 for dep in deps:
1987 if not dep in pkglibdeps[pkg]:
1988 pkglibdeps[pkg][dep] = deps[dep]
1989 return pkglibdeps
1990
1991def process_depchains(pkgfiles, d):
1992 """
1993 For a given set of prefix and postfix modifiers, make those packages
1994 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1995
1996 Example: If package A depends upon package B, and A's .bb emits an
1997 A-dev package, this would make A-dev Recommends: B-dev.
1998
1999 If only one of a given suffix is specified, it will take the RRECOMMENDS
2000 based on the RDEPENDS of *all* other packages. If more than one of a given
2001 suffix is specified, its will only use the RDEPENDS of the single parent
2002 package.
2003 """
2004
2005 packages = d.getVar('PACKAGES')
2006 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
2007 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
2008
2009 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
2010
2011 #bb.note('depends for %s is %s' % (base, depends))
2012 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2013
2014 for depend in sorted(depends):
2015 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
2016 #bb.note("Skipping %s" % depend)
2017 continue
2018 if depend.endswith('-dev'):
2019 depend = depend[:-4]
2020 if depend.endswith('-dbg'):
2021 depend = depend[:-4]
2022 pkgname = getname(depend, suffix)
2023 #bb.note("Adding %s for %s" % (pkgname, depend))
2024 if pkgname not in rreclist and pkgname != pkg:
2025 rreclist[pkgname] = []
2026
2027 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2028 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2029
2030 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
2031
2032 #bb.note('rdepends for %s is %s' % (base, rdepends))
2033 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2034
2035 for depend in sorted(rdepends):
2036 if depend.find('virtual-locale-') != -1:
2037 #bb.note("Skipping %s" % depend)
2038 continue
2039 if depend.endswith('-dev'):
2040 depend = depend[:-4]
2041 if depend.endswith('-dbg'):
2042 depend = depend[:-4]
2043 pkgname = getname(depend, suffix)
2044 #bb.note("Adding %s for %s" % (pkgname, depend))
2045 if pkgname not in rreclist and pkgname != pkg:
2046 rreclist[pkgname] = []
2047
2048 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2049 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2050
2051 def add_dep(list, dep):
2052 if dep not in list:
2053 list.append(dep)
2054
2055 depends = []
2056 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2057 add_dep(depends, dep)
2058
2059 rdepends = []
2060 for pkg in packages.split():
2061 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
2062 add_dep(rdepends, dep)
2063
2064 #bb.note('rdepends is %s' % rdepends)
2065
2066 def post_getname(name, suffix):
2067 return '%s%s' % (name, suffix)
2068 def pre_getname(name, suffix):
2069 return '%s%s' % (suffix, name)
2070
2071 pkgs = {}
2072 for pkg in packages.split():
2073 for postfix in postfixes:
2074 if pkg.endswith(postfix):
2075 if not postfix in pkgs:
2076 pkgs[postfix] = {}
2077 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2078
2079 for prefix in prefixes:
2080 if pkg.startswith(prefix):
2081 if not prefix in pkgs:
2082 pkgs[prefix] = {}
2083 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2084
2085 if "-dbg" in pkgs:
2086 pkglibdeps = read_libdep_files(d)
2087 pkglibdeplist = []
2088 for pkg in pkglibdeps:
2089 for k in pkglibdeps[pkg]:
2090 add_dep(pkglibdeplist, k)
2091 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2092
2093 for suffix in pkgs:
2094 for pkg in pkgs[suffix]:
2095 if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2096 continue
2097 (base, func) = pkgs[suffix][pkg]
2098 if suffix == "-dev":
2099 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2100 elif suffix == "-dbg":
2101 if not dbgdefaultdeps:
2102 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2103 continue
2104 if len(pkgs[suffix]) == 1:
2105 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2106 else:
2107 rdeps = []
2108 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2109 add_dep(rdeps, dep)
2110 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py
index 8e7128b195..2100a97c12 100644
--- a/meta/lib/oe/package_manager/__init__.py
+++ b/meta/lib/oe/package_manager/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -90,7 +92,7 @@ def opkg_query(cmd_output):
90 92
91def failed_postinsts_abort(pkgs, log_path): 93def failed_postinsts_abort(pkgs, log_path):
92 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot, 94 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot,
93then please place them into pkg_postinst_ontarget_${PN} (). 95then please place them into pkg_postinst_ontarget:${PN} ().
94Deferring to first boot via 'exit 1' is no longer supported. 96Deferring to first boot via 'exit 1' is no longer supported.
95Details of the failure are in %s.""" %(pkgs, log_path)) 97Details of the failure are in %s.""" %(pkgs, log_path))
96 98
@@ -120,7 +122,8 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
120 "riscv32": ["--uint32-align=4", "--little-endian"], 122 "riscv32": ["--uint32-align=4", "--little-endian"],
121 "i586": ["--uint32-align=4", "--little-endian"], 123 "i586": ["--uint32-align=4", "--little-endian"],
122 "i686": ["--uint32-align=4", "--little-endian"], 124 "i686": ["--uint32-align=4", "--little-endian"],
123 "x86_64": ["--uint32-align=4", "--little-endian"] 125 "x86_64": ["--uint32-align=4", "--little-endian"],
126 "loongarch64": ["--uint32-align=4", "--little-endian"]
124 } 127 }
125 if target_arch in locale_arch_options: 128 if target_arch in locale_arch_options:
126 arch_options = locale_arch_options[target_arch] 129 arch_options = locale_arch_options[target_arch]
@@ -189,7 +192,7 @@ class PackageManager(object, metaclass=ABCMeta):
189 bb.utils.remove(self.intercepts_dir, True) 192 bb.utils.remove(self.intercepts_dir, True)
190 bb.utils.mkdirhier(self.intercepts_dir) 193 bb.utils.mkdirhier(self.intercepts_dir)
191 for intercept in postinst_intercepts: 194 for intercept in postinst_intercepts:
192 bb.utils.copyfile(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) 195 shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept)))
193 196
194 @abstractmethod 197 @abstractmethod
195 def _handle_intercept_failure(self, failed_script): 198 def _handle_intercept_failure(self, failed_script):
@@ -266,7 +269,7 @@ class PackageManager(object, metaclass=ABCMeta):
266 pass 269 pass
267 270
268 @abstractmethod 271 @abstractmethod
269 def install(self, pkgs, attempt_only=False): 272 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
270 """ 273 """
271 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is 274 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
272 True, installation failures are ignored. 275 True, installation failures are ignored.
@@ -321,7 +324,7 @@ class PackageManager(object, metaclass=ABCMeta):
321 # TODO don't have sdk here but have a property on the superclass 324 # TODO don't have sdk here but have a property on the superclass
322 # (and respect in install_complementary) 325 # (and respect in install_complementary)
323 if sdk: 326 if sdk:
324 pkgdatadir = self.d.expand("${TMPDIR}/pkgdata/${SDK_SYS}") 327 pkgdatadir = self.d.getVar("PKGDATA_DIR_SDK")
325 else: 328 else:
326 pkgdatadir = self.d.getVar("PKGDATA_DIR") 329 pkgdatadir = self.d.getVar("PKGDATA_DIR")
327 330
@@ -344,10 +347,8 @@ class PackageManager(object, metaclass=ABCMeta):
344 def install_complementary(self, globs=None): 347 def install_complementary(self, globs=None):
345 """ 348 """
346 Install complementary packages based upon the list of currently installed 349 Install complementary packages based upon the list of currently installed
347 packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install 350 packages e.g. locales, *-dev, *-dbg, etc. Note: every backend needs to
348 these packages, if they don't exist then no error will occur. Note: every 351 call this function explicitly after the normal package installation.
349 backend needs to call this function explicitly after the normal package
350 installation
351 """ 352 """
352 if globs is None: 353 if globs is None:
353 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') 354 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
@@ -364,45 +365,43 @@ class PackageManager(object, metaclass=ABCMeta):
364 for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split(): 365 for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
365 globs += (" " + complementary_linguas) % lang 366 globs += (" " + complementary_linguas) % lang
366 367
367 if globs is None: 368 if globs:
368 return 369 # we need to write the list of installed packages to a file because the
369 370 # oe-pkgdata-util reads it from a file
370 # we need to write the list of installed packages to a file because the 371 with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
371 # oe-pkgdata-util reads it from a file 372 pkgs = self.list_installed()
372 with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: 373
373 pkgs = self.list_installed() 374 provided_pkgs = set()
374 375 for pkg in pkgs.values():
375 provided_pkgs = set() 376 provided_pkgs |= set(pkg.get('provs', []))
376 for pkg in pkgs.values(): 377
377 provided_pkgs |= set(pkg.get('provs', [])) 378 output = oe.utils.format_pkg_list(pkgs, "arch")
378 379 installed_pkgs.write(output)
379 output = oe.utils.format_pkg_list(pkgs, "arch") 380 installed_pkgs.flush()
380 installed_pkgs.write(output) 381
381 installed_pkgs.flush() 382 cmd = ["oe-pkgdata-util",
382 383 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
383 cmd = ["oe-pkgdata-util", 384 globs]
384 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, 385 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
385 globs] 386 if exclude:
386 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') 387 cmd.extend(['--exclude=' + '|'.join(exclude.split())])
387 if exclude: 388 try:
388 cmd.extend(['--exclude=' + '|'.join(exclude.split())]) 389 bb.note('Running %s' % cmd)
389 try: 390 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
390 bb.note('Running %s' % cmd) 391 stdout, stderr = proc.communicate()
391 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 392 if stderr: bb.note(stderr.decode("utf-8"))
392 stdout, stderr = proc.communicate() 393 complementary_pkgs = stdout.decode("utf-8")
393 if stderr: bb.note(stderr.decode("utf-8")) 394 complementary_pkgs = set(complementary_pkgs.split())
394 complementary_pkgs = stdout.decode("utf-8") 395 skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
395 complementary_pkgs = set(complementary_pkgs.split()) 396 install_pkgs = sorted(complementary_pkgs - provided_pkgs)
396 skip_pkgs = sorted(complementary_pkgs & provided_pkgs) 397 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
397 install_pkgs = sorted(complementary_pkgs - provided_pkgs) 398 ' '.join(install_pkgs),
398 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( 399 ' '.join(skip_pkgs)))
399 ' '.join(install_pkgs), 400 self.install(install_pkgs, hard_depends_only=True)
400 ' '.join(skip_pkgs))) 401 except subprocess.CalledProcessError as e:
401 self.install(install_pkgs, attempt_only=True) 402 bb.fatal("Could not compute complementary packages list. Command "
402 except subprocess.CalledProcessError as e: 403 "'%s' returned %d:\n%s" %
403 bb.fatal("Could not compute complementary packages list. Command " 404 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
404 "'%s' returned %d:\n%s" %
405 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
406 405
407 if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': 406 if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
408 target_arch = self.d.getVar('TARGET_ARCH') 407 target_arch = self.d.getVar('TARGET_ARCH')
@@ -448,7 +447,7 @@ class PackageManager(object, metaclass=ABCMeta):
448 return res 447 return res
449 return _append(uris, base_paths) 448 return _append(uris, base_paths)
450 449
451def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies): 450def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies, include_self=False):
452 """ 451 """
453 Go through our do_package_write_X dependencies and hardlink the packages we depend 452 Go through our do_package_write_X dependencies and hardlink the packages we depend
454 upon into the repo directory. This prevents us seeing other packages that may 453 upon into the repo directory. This prevents us seeing other packages that may
@@ -469,7 +468,10 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
469 # Detect bitbake -b usage 468 # Detect bitbake -b usage
470 nodeps = d.getVar("BB_LIMITEDDEPS") or False 469 nodeps = d.getVar("BB_LIMITEDDEPS") or False
471 if nodeps or not filterbydependencies: 470 if nodeps or not filterbydependencies:
472 oe.path.symlink(deploydir, subrepo_dir, True) 471 for arch in d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").split() + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").replace("-", "_").split():
472 target = os.path.join(deploydir + "/" + arch)
473 if os.path.exists(target):
474 oe.path.symlink(target, subrepo_dir + "/" + arch, True)
473 return 475 return
474 476
475 start = None 477 start = None
@@ -482,14 +484,17 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
482 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") 484 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
483 pkgdeps = set() 485 pkgdeps = set()
484 start = [start] 486 start = [start]
485 seen = set(start) 487 if include_self:
488 seen = set()
489 else:
490 seen = set(start)
486 # Support direct dependencies (do_rootfs -> do_package_write_X) 491 # Support direct dependencies (do_rootfs -> do_package_write_X)
487 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) 492 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X)
488 while start: 493 while start:
489 next = [] 494 next = []
490 for dep2 in start: 495 for dep2 in start:
491 for dep in taskdepdata[dep2][3]: 496 for dep in taskdepdata[dep2][3]:
492 if taskdepdata[dep][0] != pn: 497 if include_self or taskdepdata[dep][0] != pn:
493 if "do_" + taskname in dep: 498 if "do_" + taskname in dep:
494 pkgdeps.add(dep) 499 pkgdeps.add(dep)
495 elif dep not in seen: 500 elif dep not in seen:
diff --git a/meta/lib/oe/package_manager/common_deb_ipk.py b/meta/lib/oe/package_manager/common_deb_ipk.py
new file mode 100644
index 0000000000..6a1e28ee6f
--- /dev/null
+++ b/meta/lib/oe/package_manager/common_deb_ipk.py
@@ -0,0 +1,97 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import glob
8import os
9import subprocess
10import tempfile
11
12import bb
13
14from oe.package_manager import opkg_query, PackageManager
15
16class OpkgDpkgPM(PackageManager):
17 def __init__(self, d, target_rootfs):
18 """
19 This is an abstract class. Do not instantiate this directly.
20 """
21 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
22
23 def package_info(self, pkg):
24 """
25 Returns a dictionary with the package info.
26 """
27 raise NotImplementedError
28
29 def _common_package_info(self, cmd):
30 """
31 "Returns a dictionary with the package info.
32
33 This method extracts the common parts for Opkg and Dpkg
34 """
35
36 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
37 if proc.returncode:
38 bb.fatal("Unable to list available packages. Command '%s' "
39 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
40 elif proc.stderr:
41 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
42
43 return opkg_query(proc.stdout)
44
45 def extract(self, pkg):
46 """
47 Returns the path to a tmpdir where resides the contents of a package.
48
49 Deleting the tmpdir is responsability of the caller.
50 """
51 pkg_info = self.package_info(pkg)
52 if not pkg_info:
53 bb.fatal("Unable to get information for package '%s' while "
54 "trying to extract the package." % pkg)
55
56 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
57 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
58 pkg_path = pkg_info[pkg]["filepath"]
59
60 if not os.path.isfile(pkg_path):
61 bb.fatal("Unable to extract package for '%s'."
62 "File %s doesn't exists" % (pkg, pkg_path))
63
64 tmp_dir = tempfile.mkdtemp()
65 current_dir = os.getcwd()
66 os.chdir(tmp_dir)
67
68 try:
69 cmd = [ar_cmd, 'x', pkg_path]
70 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
71 data_tar = glob.glob("data.tar.*")
72 if len(data_tar) != 1:
73 bb.fatal("Unable to extract %s package. Failed to identify "
74 "data tarball (found tarballs '%s').",
75 pkg_path, data_tar)
76 data_tar = data_tar[0]
77 cmd = [tar_cmd, 'xf', data_tar]
78 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
79 except subprocess.CalledProcessError as e:
80 bb.utils.remove(tmp_dir, recurse=True)
81 bb.fatal("Unable to extract %s package. Command '%s' "
82 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
83 except OSError as e:
84 bb.utils.remove(tmp_dir, recurse=True)
85 bb.fatal("Unable to extract %s package. Command '%s' "
86 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
87
88 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
89 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
90 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
91 bb.utils.remove(os.path.join(tmp_dir, data_tar))
92 os.chdir(current_dir)
93
94 return tmp_dir
95
96 def _handle_intercept_failure(self, registered_pkgs):
97 self.mark_packages("unpacked", registered_pkgs.split())
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py
index 2ee68fefb1..e09e81e490 100644
--- a/meta/lib/oe/package_manager/deb/__init__.py
+++ b/meta/lib/oe/package_manager/deb/__init__.py
@@ -1,10 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import re 7import re
6import subprocess 8import subprocess
7from oe.package_manager import * 9from oe.package_manager import *
10from oe.package_manager.common_deb_ipk import OpkgDpkgPM
8 11
9class DpkgIndexer(Indexer): 12class DpkgIndexer(Indexer):
10 def _create_configs(self): 13 def _create_configs(self):
@@ -53,6 +56,7 @@ class DpkgIndexer(Indexer):
53 56
54 index_cmds = [] 57 index_cmds = []
55 deb_dirs_found = False 58 deb_dirs_found = False
59 index_sign_files = set()
56 for arch in arch_list: 60 for arch in arch_list:
57 arch_dir = os.path.join(self.deploy_dir, arch) 61 arch_dir = os.path.join(self.deploy_dir, arch)
58 if not os.path.isdir(arch_dir): 62 if not os.path.isdir(arch_dir):
@@ -62,7 +66,10 @@ class DpkgIndexer(Indexer):
62 66
63 cmd += "%s -fcn Packages > Packages.gz;" % gzip 67 cmd += "%s -fcn Packages > Packages.gz;" % gzip
64 68
65 with open(os.path.join(arch_dir, "Release"), "w+") as release: 69 release_file = os.path.join(arch_dir, "Release")
70 index_sign_files.add(release_file)
71
72 with open(release_file, "w+") as release:
66 release.write("Label: %s\n" % arch) 73 release.write("Label: %s\n" % arch)
67 74
68 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive 75 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
@@ -77,7 +84,16 @@ class DpkgIndexer(Indexer):
77 84
78 oe.utils.multiprocess_launch(create_index, index_cmds, self.d) 85 oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
79 if self.d.getVar('PACKAGE_FEED_SIGN') == '1': 86 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
80 raise NotImplementedError('Package feed signing not implementd for dpkg') 87 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
88 else:
89 signer = None
90 if signer:
91 for f in index_sign_files:
92 signer.detach_sign(f,
93 self.d.getVar('PACKAGE_FEED_GPG_NAME'),
94 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
95 output_suffix="gpg",
96 use_sha256=True)
81 97
82class PMPkgsList(PkgsList): 98class PMPkgsList(PkgsList):
83 99
@@ -96,72 +112,6 @@ class PMPkgsList(PkgsList):
96 112
97 return opkg_query(cmd_output) 113 return opkg_query(cmd_output)
98 114
99class OpkgDpkgPM(PackageManager):
100 def __init__(self, d, target_rootfs):
101 """
102 This is an abstract class. Do not instantiate this directly.
103 """
104 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
105
106 def package_info(self, pkg, cmd):
107 """
108 Returns a dictionary with the package info.
109
110 This method extracts the common parts for Opkg and Dpkg
111 """
112
113 try:
114 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
115 except subprocess.CalledProcessError as e:
116 bb.fatal("Unable to list available packages. Command '%s' "
117 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
118 return opkg_query(output)
119
120 def extract(self, pkg, pkg_info):
121 """
122 Returns the path to a tmpdir where resides the contents of a package.
123
124 Deleting the tmpdir is responsability of the caller.
125
126 This method extracts the common parts for Opkg and Dpkg
127 """
128
129 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
130 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
131 pkg_path = pkg_info[pkg]["filepath"]
132
133 if not os.path.isfile(pkg_path):
134 bb.fatal("Unable to extract package for '%s'."
135 "File %s doesn't exists" % (pkg, pkg_path))
136
137 tmp_dir = tempfile.mkdtemp()
138 current_dir = os.getcwd()
139 os.chdir(tmp_dir)
140 data_tar = 'data.tar.xz'
141
142 try:
143 cmd = [ar_cmd, 'x', pkg_path]
144 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
145 cmd = [tar_cmd, 'xf', data_tar]
146 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
147 except subprocess.CalledProcessError as e:
148 bb.utils.remove(tmp_dir, recurse=True)
149 bb.fatal("Unable to extract %s package. Command '%s' "
150 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
151 except OSError as e:
152 bb.utils.remove(tmp_dir, recurse=True)
153 bb.fatal("Unable to extract %s package. Command '%s' "
154 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
155
156 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
157 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
158 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
159 os.chdir(current_dir)
160
161 return tmp_dir
162
163 def _handle_intercept_failure(self, registered_pkgs):
164 self.mark_packages("unpacked", registered_pkgs.split())
165 115
166class DpkgPM(OpkgDpkgPM): 116class DpkgPM(OpkgDpkgPM):
167 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): 117 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True):
@@ -214,7 +164,7 @@ class DpkgPM(OpkgDpkgPM):
214 164
215 tmp_sf.write(status) 165 tmp_sf.write(status)
216 166
217 os.rename(status_file + ".tmp", status_file) 167 bb.utils.rename(status_file + ".tmp", status_file)
218 168
219 def run_pre_post_installs(self, package_name=None): 169 def run_pre_post_installs(self, package_name=None):
220 """ 170 """
@@ -276,14 +226,18 @@ class DpkgPM(OpkgDpkgPM):
276 226
277 self.deploy_dir_unlock() 227 self.deploy_dir_unlock()
278 228
279 def install(self, pkgs, attempt_only=False): 229 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
280 if attempt_only and len(pkgs) == 0: 230 if attempt_only and len(pkgs) == 0:
281 return 231 return
282 232
283 os.environ['APT_CONFIG'] = self.apt_conf_file 233 os.environ['APT_CONFIG'] = self.apt_conf_file
284 234
285 cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s" % \ 235 extra_args = ""
286 (self.apt_get_cmd, self.apt_args, ' '.join(pkgs)) 236 if hard_depends_only:
237 extra_args = "--no-install-recommends"
238
239 cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s %s" % \
240 (self.apt_get_cmd, self.apt_args, extra_args, ' '.join(pkgs))
287 241
288 try: 242 try:
289 bb.note("Installing the following packages: %s" % ' '.join(pkgs)) 243 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
@@ -299,13 +253,13 @@ class DpkgPM(OpkgDpkgPM):
299 for dir in dirs: 253 for dir in dirs:
300 new_dir = re.sub(r"\.dpkg-new", "", dir) 254 new_dir = re.sub(r"\.dpkg-new", "", dir)
301 if dir != new_dir: 255 if dir != new_dir:
302 os.rename(os.path.join(root, dir), 256 bb.utils.rename(os.path.join(root, dir),
303 os.path.join(root, new_dir)) 257 os.path.join(root, new_dir))
304 258
305 for file in files: 259 for file in files:
306 new_file = re.sub(r"\.dpkg-new", "", file) 260 new_file = re.sub(r"\.dpkg-new", "", file)
307 if file != new_file: 261 if file != new_file:
308 os.rename(os.path.join(root, file), 262 bb.utils.rename(os.path.join(root, file),
309 os.path.join(root, new_file)) 263 os.path.join(root, new_file))
310 264
311 265
@@ -422,7 +376,7 @@ class DpkgPM(OpkgDpkgPM):
422 multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); 376 multilib_variants = self.d.getVar("MULTILIB_VARIANTS");
423 for variant in multilib_variants.split(): 377 for variant in multilib_variants.split():
424 localdata = bb.data.createCopy(self.d) 378 localdata = bb.data.createCopy(self.d)
425 variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) 379 variant_tune = localdata.getVar("DEFAULTTUNE:virtclass-multilib-" + variant, False)
426 orig_arch = localdata.getVar("DPKG_ARCH") 380 orig_arch = localdata.getVar("DPKG_ARCH")
427 localdata.setVar("DEFAULTTUNE", variant_tune) 381 localdata.setVar("DEFAULTTUNE", variant_tune)
428 variant_arch = localdata.getVar("DPKG_ARCH") 382 variant_arch = localdata.getVar("DPKG_ARCH")
@@ -477,7 +431,7 @@ class DpkgPM(OpkgDpkgPM):
477 Returns a dictionary with the package info. 431 Returns a dictionary with the package info.
478 """ 432 """
479 cmd = "%s show %s" % (self.apt_cache_cmd, pkg) 433 cmd = "%s show %s" % (self.apt_cache_cmd, pkg)
480 pkg_info = super(DpkgPM, self).package_info(pkg, cmd) 434 pkg_info = self._common_package_info(cmd)
481 435
482 pkg_arch = pkg_info[pkg]["pkgarch"] 436 pkg_arch = pkg_info[pkg]["pkgarch"]
483 pkg_filename = pkg_info[pkg]["filename"] 437 pkg_filename = pkg_info[pkg]["filename"]
@@ -485,19 +439,3 @@ class DpkgPM(OpkgDpkgPM):
485 os.path.join(self.deploy_dir, pkg_arch, pkg_filename) 439 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
486 440
487 return pkg_info 441 return pkg_info
488
489 def extract(self, pkg):
490 """
491 Returns the path to a tmpdir where resides the contents of a package.
492
493 Deleting the tmpdir is responsability of the caller.
494 """
495 pkg_info = self.package_info(pkg)
496 if not pkg_info:
497 bb.fatal("Unable to get information for package '%s' while "
498 "trying to extract the package." % pkg)
499
500 tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info)
501 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
502
503 return tmp_dir
diff --git a/meta/lib/oe/package_manager/deb/manifest.py b/meta/lib/oe/package_manager/deb/manifest.py
index d8eab24a06..72983bae98 100644
--- a/meta/lib/oe/package_manager/deb/manifest.py
+++ b/meta/lib/oe/package_manager/deb/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/deb/rootfs.py b/meta/lib/oe/package_manager/deb/rootfs.py
index 8fbaca11d6..1e25b64ed9 100644
--- a/meta/lib/oe/package_manager/deb/rootfs.py
+++ b/meta/lib/oe/package_manager/deb/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/deb/sdk.py b/meta/lib/oe/package_manager/deb/sdk.py
index 9859d8f32d..6f3005053e 100644
--- a/meta/lib/oe/package_manager/deb/sdk.py
+++ b/meta/lib/oe/package_manager/deb/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -65,7 +67,14 @@ class PkgSdk(Sdk):
65 67
66 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 68 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
67 69
70 self.target_pm.run_pre_post_installs()
71
72 env_bkp = os.environ.copy()
73 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
74 os.pathsep + os.environ["PATH"]
75
68 self.target_pm.run_intercepts(populate_sdk='target') 76 self.target_pm.run_intercepts(populate_sdk='target')
77 os.environ.update(env_bkp)
69 78
70 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 79 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
71 80
@@ -78,6 +87,8 @@ class PkgSdk(Sdk):
78 self._populate_sysroot(self.host_pm, self.host_manifest) 87 self._populate_sysroot(self.host_pm, self.host_manifest)
79 self.install_locales(self.host_pm) 88 self.install_locales(self.host_pm)
80 89
90 self.host_pm.run_pre_post_installs()
91
81 self.host_pm.run_intercepts(populate_sdk='host') 92 self.host_pm.run_intercepts(populate_sdk='host')
82 93
83 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) 94 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND"))
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py
index da488c1c7f..3d998e52ff 100644
--- a/meta/lib/oe/package_manager/ipk/__init__.py
+++ b/meta/lib/oe/package_manager/ipk/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -6,6 +8,7 @@ import re
6import shutil 8import shutil
7import subprocess 9import subprocess
8from oe.package_manager import * 10from oe.package_manager import *
11from oe.package_manager.common_deb_ipk import OpkgDpkgPM
9 12
10class OpkgIndexer(Indexer): 13class OpkgIndexer(Indexer):
11 def write_index(self): 14 def write_index(self):
@@ -14,6 +17,7 @@ class OpkgIndexer(Indexer):
14 ] 17 ]
15 18
16 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") 19 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
20 opkg_index_cmd_extra_params = self.d.getVar('OPKG_MAKE_INDEX_EXTRA_PARAMS') or ""
17 if self.d.getVar('PACKAGE_FEED_SIGN') == '1': 21 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
18 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) 22 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
19 else: 23 else:
@@ -39,8 +43,8 @@ class OpkgIndexer(Indexer):
39 if not os.path.exists(pkgs_file): 43 if not os.path.exists(pkgs_file):
40 open(pkgs_file, "w").close() 44 open(pkgs_file, "w").close()
41 45
42 index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s' % 46 index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s %s' %
43 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir)) 47 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir, opkg_index_cmd_extra_params))
44 48
45 index_sign_files.add(pkgs_file) 49 index_sign_files.add(pkgs_file)
46 50
@@ -87,74 +91,6 @@ class PMPkgsList(PkgsList):
87 return opkg_query(cmd_output) 91 return opkg_query(cmd_output)
88 92
89 93
90
91class OpkgDpkgPM(PackageManager):
92 def __init__(self, d, target_rootfs):
93 """
94 This is an abstract class. Do not instantiate this directly.
95 """
96 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
97
98 def package_info(self, pkg, cmd):
99 """
100 Returns a dictionary with the package info.
101
102 This method extracts the common parts for Opkg and Dpkg
103 """
104
105 try:
106 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
107 except subprocess.CalledProcessError as e:
108 bb.fatal("Unable to list available packages. Command '%s' "
109 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
110 return opkg_query(output)
111
112 def extract(self, pkg, pkg_info):
113 """
114 Returns the path to a tmpdir where resides the contents of a package.
115
116 Deleting the tmpdir is responsability of the caller.
117
118 This method extracts the common parts for Opkg and Dpkg
119 """
120
121 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
122 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
123 pkg_path = pkg_info[pkg]["filepath"]
124
125 if not os.path.isfile(pkg_path):
126 bb.fatal("Unable to extract package for '%s'."
127 "File %s doesn't exists" % (pkg, pkg_path))
128
129 tmp_dir = tempfile.mkdtemp()
130 current_dir = os.getcwd()
131 os.chdir(tmp_dir)
132 data_tar = 'data.tar.xz'
133
134 try:
135 cmd = [ar_cmd, 'x', pkg_path]
136 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
137 cmd = [tar_cmd, 'xf', data_tar]
138 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
139 except subprocess.CalledProcessError as e:
140 bb.utils.remove(tmp_dir, recurse=True)
141 bb.fatal("Unable to extract %s package. Command '%s' "
142 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
143 except OSError as e:
144 bb.utils.remove(tmp_dir, recurse=True)
145 bb.fatal("Unable to extract %s package. Command '%s' "
146 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
147
148 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
149 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
150 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
151 os.chdir(current_dir)
152
153 return tmp_dir
154
155 def _handle_intercept_failure(self, registered_pkgs):
156 self.mark_packages("unpacked", registered_pkgs.split())
157
158class OpkgPM(OpkgDpkgPM): 94class OpkgPM(OpkgDpkgPM):
159 def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): 95 def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True):
160 super(OpkgPM, self).__init__(d, target_rootfs) 96 super(OpkgPM, self).__init__(d, target_rootfs)
@@ -213,7 +149,7 @@ class OpkgPM(OpkgDpkgPM):
213 149
214 tmp_sf.write(status) 150 tmp_sf.write(status)
215 151
216 os.rename(status_file + ".tmp", status_file) 152 bb.utils.rename(status_file + ".tmp", status_file)
217 153
218 def _create_custom_config(self): 154 def _create_custom_config(self):
219 bb.note("Building from feeds activated!") 155 bb.note("Building from feeds activated!")
@@ -243,7 +179,7 @@ class OpkgPM(OpkgDpkgPM):
243 """ 179 """
244 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": 180 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "":
245 for arch in self.pkg_archs.split(): 181 for arch in self.pkg_archs.split():
246 cfg_file_name = os.path.join(self.target_rootfs, 182 cfg_file_name = oe.path.join(self.target_rootfs,
247 self.d.getVar("sysconfdir"), 183 self.d.getVar("sysconfdir"),
248 "opkg", 184 "opkg",
249 "local-%s-feed.conf" % arch) 185 "local-%s-feed.conf" % arch)
@@ -337,7 +273,7 @@ class OpkgPM(OpkgDpkgPM):
337 273
338 self.deploy_dir_unlock() 274 self.deploy_dir_unlock()
339 275
340 def install(self, pkgs, attempt_only=False): 276 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
341 if not pkgs: 277 if not pkgs:
342 return 278 return
343 279
@@ -346,6 +282,8 @@ class OpkgPM(OpkgDpkgPM):
346 cmd += " --add-exclude %s" % exclude 282 cmd += " --add-exclude %s" % exclude
347 for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split(): 283 for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split():
348 cmd += " --add-ignore-recommends %s" % bad_recommendation 284 cmd += " --add-ignore-recommends %s" % bad_recommendation
285 if hard_depends_only:
286 cmd += " --no-install-recommends"
349 cmd += " install " 287 cmd += " install "
350 cmd += " ".join(pkgs) 288 cmd += " ".join(pkgs)
351 289
@@ -443,15 +381,16 @@ class OpkgPM(OpkgDpkgPM):
443 cmd = "%s %s --noaction install %s " % (self.opkg_cmd, 381 cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
444 opkg_args, 382 opkg_args,
445 ' '.join(pkgs)) 383 ' '.join(pkgs))
446 try: 384 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
447 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) 385 if proc.returncode:
448 except subprocess.CalledProcessError as e:
449 bb.fatal("Unable to dummy install packages. Command '%s' " 386 bb.fatal("Unable to dummy install packages. Command '%s' "
450 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) 387 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
388 elif proc.stderr:
389 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
451 390
452 bb.utils.remove(temp_rootfs, True) 391 bb.utils.remove(temp_rootfs, True)
453 392
454 return output 393 return proc.stdout
455 394
456 def backup_packaging_data(self): 395 def backup_packaging_data(self):
457 # Save the opkglib for increment ipk image generation 396 # Save the opkglib for increment ipk image generation
@@ -477,7 +416,7 @@ class OpkgPM(OpkgDpkgPM):
477 Returns a dictionary with the package info. 416 Returns a dictionary with the package info.
478 """ 417 """
479 cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) 418 cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg)
480 pkg_info = super(OpkgPM, self).package_info(pkg, cmd) 419 pkg_info = self._common_package_info(cmd)
481 420
482 pkg_arch = pkg_info[pkg]["arch"] 421 pkg_arch = pkg_info[pkg]["arch"]
483 pkg_filename = pkg_info[pkg]["filename"] 422 pkg_filename = pkg_info[pkg]["filename"]
@@ -485,19 +424,3 @@ class OpkgPM(OpkgDpkgPM):
485 os.path.join(self.deploy_dir, pkg_arch, pkg_filename) 424 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
486 425
487 return pkg_info 426 return pkg_info
488
489 def extract(self, pkg):
490 """
491 Returns the path to a tmpdir where resides the contents of a package.
492
493 Deleting the tmpdir is responsability of the caller.
494 """
495 pkg_info = self.package_info(pkg)
496 if not pkg_info:
497 bb.fatal("Unable to get information for package '%s' while "
498 "trying to extract the package." % pkg)
499
500 tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info)
501 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
502
503 return tmp_dir
diff --git a/meta/lib/oe/package_manager/ipk/manifest.py b/meta/lib/oe/package_manager/ipk/manifest.py
index ee4b57bcb0..3549d7428d 100644
--- a/meta/lib/oe/package_manager/ipk/manifest.py
+++ b/meta/lib/oe/package_manager/ipk/manifest.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5from oe.manifest import Manifest 7from oe.manifest import Manifest
8import re
6 9
7class PkgManifest(Manifest): 10class PkgManifest(Manifest):
8 """ 11 """
diff --git a/meta/lib/oe/package_manager/ipk/rootfs.py b/meta/lib/oe/package_manager/ipk/rootfs.py
index 26dbee6f6a..ba93eb62ea 100644
--- a/meta/lib/oe/package_manager/ipk/rootfs.py
+++ b/meta/lib/oe/package_manager/ipk/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -145,51 +147,14 @@ class PkgRootfs(DpkgOpkgRootfs):
145 self.pm.recover_packaging_data() 147 self.pm.recover_packaging_data()
146 148
147 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) 149 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True)
148
149 def _prelink_file(self, root_dir, filename):
150 bb.note('prelink %s in %s' % (filename, root_dir))
151 prelink_cfg = oe.path.join(root_dir,
152 self.d.expand('${sysconfdir}/prelink.conf'))
153 if not os.path.exists(prelink_cfg):
154 shutil.copy(self.d.expand('${STAGING_DIR_NATIVE}${sysconfdir_native}/prelink.conf'),
155 prelink_cfg)
156
157 cmd_prelink = self.d.expand('${STAGING_DIR_NATIVE}${sbindir_native}/prelink')
158 self._exec_shell_cmd([cmd_prelink,
159 '--root',
160 root_dir,
161 '-amR',
162 '-N',
163 '-c',
164 self.d.expand('${sysconfdir}/prelink.conf')])
165
166 ''' 150 '''
167 Compare two files with the same key twice to see if they are equal. 151 Compare two files with the same key twice to see if they are equal.
168 If they are not equal, it means they are duplicated and come from 152 If they are not equal, it means they are duplicated and come from
169 different packages. 153 different packages.
170 1st: Comapre them directly;
171 2nd: While incremental image creation is enabled, one of the
172 files could be probaly prelinked in the previous image
173 creation and the file has been changed, so we need to
174 prelink the other one and compare them.
175 ''' 154 '''
176 def _file_equal(self, key, f1, f2): 155 def _file_equal(self, key, f1, f2):
177
178 # Both of them are not prelinked
179 if filecmp.cmp(f1, f2): 156 if filecmp.cmp(f1, f2):
180 return True 157 return True
181
182 if bb.data.inherits_class('image-prelink', self.d):
183 if self.image_rootfs not in f1:
184 self._prelink_file(f1.replace(key, ''), f1)
185
186 if self.image_rootfs not in f2:
187 self._prelink_file(f2.replace(key, ''), f2)
188
189 # Both of them are prelinked
190 if filecmp.cmp(f1, f2):
191 return True
192
193 # Not equal 158 # Not equal
194 return False 159 return False
195 160
@@ -200,7 +165,7 @@ class PkgRootfs(DpkgOpkgRootfs):
200 """ 165 """
201 def _multilib_sanity_test(self, dirs): 166 def _multilib_sanity_test(self, dirs):
202 167
203 allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP") 168 allow_replace = "|".join((self.d.getVar("MULTILIBRE_ALLOW_REP") or "").split())
204 if allow_replace is None: 169 if allow_replace is None:
205 allow_replace = "" 170 allow_replace = ""
206 171
diff --git a/meta/lib/oe/package_manager/ipk/sdk.py b/meta/lib/oe/package_manager/ipk/sdk.py
index e2ca415c8e..3acd55f548 100644
--- a/meta/lib/oe/package_manager/ipk/sdk.py
+++ b/meta/lib/oe/package_manager/ipk/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -61,12 +63,19 @@ class PkgSdk(Sdk):
61 63
62 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 64 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
63 65
66 env_bkp = os.environ.copy()
67 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
68 os.pathsep + os.environ["PATH"]
69
64 self.target_pm.run_intercepts(populate_sdk='target') 70 self.target_pm.run_intercepts(populate_sdk='target')
71 os.environ.update(env_bkp)
65 72
66 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 73 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
67 74
68 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 75 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
69 self.target_pm.remove_packaging_data() 76 self.target_pm.remove_packaging_data()
77 else:
78 self.target_pm.remove_lists()
70 79
71 bb.note("Installing NATIVESDK packages") 80 bb.note("Installing NATIVESDK packages")
72 self._populate_sysroot(self.host_pm, self.host_manifest) 81 self._populate_sysroot(self.host_pm, self.host_manifest)
@@ -78,6 +87,8 @@ class PkgSdk(Sdk):
78 87
79 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 88 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
80 self.host_pm.remove_packaging_data() 89 self.host_pm.remove_packaging_data()
90 else:
91 self.host_pm.remove_lists()
81 92
82 target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir) 93 target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir)
83 host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir) 94 host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir)
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py
index 6df0092281..323ec5008f 100644
--- a/meta/lib/oe/package_manager/rpm/__init__.py
+++ b/meta/lib/oe/package_manager/rpm/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -96,11 +98,15 @@ class RpmPM(PackageManager):
96 archs = ["sdk_provides_dummy_target"] + archs 98 archs = ["sdk_provides_dummy_target"] + archs
97 confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/") 99 confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
98 bb.utils.mkdirhier(confdir) 100 bb.utils.mkdirhier(confdir)
99 open(confdir + "arch", 'w').write(":".join(archs)) 101 with open(confdir + "arch", 'w') as f:
102 f.write(":".join(archs))
103
100 distro_codename = self.d.getVar('DISTRO_CODENAME') 104 distro_codename = self.d.getVar('DISTRO_CODENAME')
101 open(confdir + "releasever", 'w').write(distro_codename if distro_codename is not None else '') 105 with open(confdir + "releasever", 'w') as f:
106 f.write(distro_codename if distro_codename is not None else '')
102 107
103 open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w').write("") 108 with open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w') as f:
109 f.write("")
104 110
105 111
106 def _configure_rpm(self): 112 def _configure_rpm(self):
@@ -110,14 +116,17 @@ class RpmPM(PackageManager):
110 platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/") 116 platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/")
111 rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/") 117 rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/")
112 bb.utils.mkdirhier(platformconfdir) 118 bb.utils.mkdirhier(platformconfdir)
113 open(platformconfdir + "platform", 'w').write("%s-pc-linux" % self.primary_arch) 119 with open(platformconfdir + "platform", 'w') as f:
120 f.write("%s-pc-linux" % self.primary_arch)
114 with open(rpmrcconfdir + "rpmrc", 'w') as f: 121 with open(rpmrcconfdir + "rpmrc", 'w') as f:
115 f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch)) 122 f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch))
116 f.write("buildarch_compat: %s: noarch\n" % self.primary_arch) 123 f.write("buildarch_compat: %s: noarch\n" % self.primary_arch)
117 124
118 open(platformconfdir + "macros", 'w').write("%_transaction_color 7\n") 125 with open(platformconfdir + "macros", 'w') as f:
126 f.write("%_transaction_color 7\n")
119 if self.d.getVar('RPM_PREFER_ELF_ARCH'): 127 if self.d.getVar('RPM_PREFER_ELF_ARCH'):
120 open(platformconfdir + "macros", 'a').write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH'))) 128 with open(platformconfdir + "macros", 'a') as f:
129 f.write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
121 130
122 if self.d.getVar('RPM_SIGN_PACKAGES') == '1': 131 if self.d.getVar('RPM_SIGN_PACKAGES') == '1':
123 signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND')) 132 signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND'))
@@ -164,13 +173,13 @@ class RpmPM(PackageManager):
164 repo_uri = uri + "/" + arch 173 repo_uri = uri + "/" + arch
165 repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/")) 174 repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/"))
166 repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/")) 175 repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/"))
167 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a').write( 176 with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a') as f:
168 "[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts)) 177 f.write("[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
169 else: 178 else:
170 repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/")) 179 repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/"))
171 repo_uri = uri 180 repo_uri = uri
172 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w').write( 181 with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w') as f:
173 "[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts)) 182 f.write("[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
174 183
175 def _prepare_pkg_transaction(self): 184 def _prepare_pkg_transaction(self):
176 os.environ['D'] = self.target_rootfs 185 os.environ['D'] = self.target_rootfs
@@ -181,7 +190,7 @@ class RpmPM(PackageManager):
181 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') 190 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
182 191
183 192
184 def install(self, pkgs, attempt_only = False): 193 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
185 if len(pkgs) == 0: 194 if len(pkgs) == 0:
186 return 195 return
187 self._prepare_pkg_transaction() 196 self._prepare_pkg_transaction()
@@ -192,13 +201,16 @@ class RpmPM(PackageManager):
192 201
193 output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) + 202 output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) +
194 (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) + 203 (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) +
195 (["--setopt=install_weak_deps=False"] if self.d.getVar('NO_RECOMMENDATIONS') == "1" else []) + 204 (["--setopt=install_weak_deps=False"] if (hard_depends_only or self.d.getVar('NO_RECOMMENDATIONS') == "1") else []) +
196 (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) + 205 (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) +
197 ["install"] + 206 ["install"] +
198 pkgs) 207 pkgs)
199 208
200 failed_scriptlets_pkgnames = collections.OrderedDict() 209 failed_scriptlets_pkgnames = collections.OrderedDict()
201 for line in output.splitlines(): 210 for line in output.splitlines():
211 if line.startswith("Error: Systemctl"):
212 bb.error(line)
213
202 if line.startswith("Error in POSTIN scriptlet in rpm package"): 214 if line.startswith("Error in POSTIN scriptlet in rpm package"):
203 failed_scriptlets_pkgnames[line.split()[-1]] = True 215 failed_scriptlets_pkgnames[line.split()[-1]] = True
204 216
@@ -326,7 +338,8 @@ class RpmPM(PackageManager):
326 return e.output.decode("utf-8") 338 return e.output.decode("utf-8")
327 339
328 def dump_install_solution(self, pkgs): 340 def dump_install_solution(self, pkgs):
329 open(self.solution_manifest, 'w').write(" ".join(pkgs)) 341 with open(self.solution_manifest, 'w') as f:
342 f.write(" ".join(pkgs))
330 return pkgs 343 return pkgs
331 344
332 def load_old_install_solution(self): 345 def load_old_install_solution(self):
@@ -360,7 +373,8 @@ class RpmPM(PackageManager):
360 bb.utils.mkdirhier(target_path) 373 bb.utils.mkdirhier(target_path)
361 num = self._script_num_prefix(target_path) 374 num = self._script_num_prefix(target_path)
362 saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg)) 375 saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg))
363 open(saved_script_name, 'w').write(output) 376 with open(saved_script_name, 'w') as f:
377 f.write(output)
364 os.chmod(saved_script_name, 0o755) 378 os.chmod(saved_script_name, 0o755)
365 379
366 def _handle_intercept_failure(self, registered_pkgs): 380 def _handle_intercept_failure(self, registered_pkgs):
@@ -372,14 +386,15 @@ class RpmPM(PackageManager):
372 self.save_rpmpostinst(pkg) 386 self.save_rpmpostinst(pkg)
373 387
374 def extract(self, pkg): 388 def extract(self, pkg):
375 output = self._invoke_dnf(["repoquery", "--queryformat", "%{location}", pkg]) 389 output = self._invoke_dnf(["repoquery", "--location", pkg])
376 pkg_name = output.splitlines()[-1] 390 pkg_name = output.splitlines()[-1]
377 if not pkg_name.endswith(".rpm"): 391 if not pkg_name.endswith(".rpm"):
378 bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output)) 392 bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output))
379 pkg_path = oe.path.join(self.rpm_repo_dir, pkg_name) 393 # Strip file: prefix
394 pkg_path = pkg_name[5:]
380 395
381 cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio") 396 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
382 rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio") 397 rpm2archive_cmd = bb.utils.which(os.getenv("PATH"), "rpm2archive")
383 398
384 if not os.path.isfile(pkg_path): 399 if not os.path.isfile(pkg_path):
385 bb.fatal("Unable to extract package for '%s'." 400 bb.fatal("Unable to extract package for '%s'."
@@ -390,7 +405,7 @@ class RpmPM(PackageManager):
390 os.chdir(tmp_dir) 405 os.chdir(tmp_dir)
391 406
392 try: 407 try:
393 cmd = "%s %s | %s -idmv" % (rpm2cpio_cmd, pkg_path, cpio_cmd) 408 cmd = "%s -n %s | %s xv" % (rpm2archive_cmd, pkg_path, tar_cmd)
394 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) 409 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
395 except subprocess.CalledProcessError as e: 410 except subprocess.CalledProcessError as e:
396 bb.utils.remove(tmp_dir, recurse=True) 411 bb.utils.remove(tmp_dir, recurse=True)
diff --git a/meta/lib/oe/package_manager/rpm/manifest.py b/meta/lib/oe/package_manager/rpm/manifest.py
index e6604b301f..6ee7c329f0 100644
--- a/meta/lib/oe/package_manager/rpm/manifest.py
+++ b/meta/lib/oe/package_manager/rpm/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/rpm/rootfs.py b/meta/lib/oe/package_manager/rpm/rootfs.py
index 00d07cd9cc..3ba5396320 100644
--- a/meta/lib/oe/package_manager/rpm/rootfs.py
+++ b/meta/lib/oe/package_manager/rpm/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -108,7 +110,7 @@ class PkgRootfs(Rootfs):
108 if self.progress_reporter: 110 if self.progress_reporter:
109 self.progress_reporter.next_stage() 111 self.progress_reporter.next_stage()
110 112
111 self._setup_dbg_rootfs(['/etc', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf']) 113 self._setup_dbg_rootfs(['/etc/rpm', '/etc/rpmrc', '/etc/dnf', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf'])
112 114
113 execute_pre_post_process(self.d, rpm_post_process_cmds) 115 execute_pre_post_process(self.d, rpm_post_process_cmds)
114 116
diff --git a/meta/lib/oe/package_manager/rpm/sdk.py b/meta/lib/oe/package_manager/rpm/sdk.py
index c5f232431f..ea79fe050b 100644
--- a/meta/lib/oe/package_manager/rpm/sdk.py
+++ b/meta/lib/oe/package_manager/rpm/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -65,7 +67,12 @@ class PkgSdk(Sdk):
65 67
66 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 68 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
67 69
70 env_bkp = os.environ.copy()
71 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
72 os.pathsep + os.environ["PATH"]
73
68 self.target_pm.run_intercepts(populate_sdk='target') 74 self.target_pm.run_intercepts(populate_sdk='target')
75 os.environ.update(env_bkp)
69 76
70 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 77 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
71 78
@@ -110,5 +117,6 @@ class PkgSdk(Sdk):
110 for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")): 117 for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")):
111 self.movefile(f, native_sysconf_dir) 118 self.movefile(f, native_sysconf_dir)
112 for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")): 119 for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")):
113 self.movefile(f, native_sysconf_dir) 120 self.mkdirhier(native_sysconf_dir + "/dnf")
121 self.movefile(f, native_sysconf_dir + "/dnf")
114 self.remove(os.path.join(self.sdk_output, "etc"), True) 122 self.remove(os.path.join(self.sdk_output, "etc"), True)
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py
index a82085a792..b6a10a930a 100644
--- a/meta/lib/oe/packagedata.py
+++ b/meta/lib/oe/packagedata.py
@@ -1,9 +1,17 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import codecs 7import codecs
6import os 8import os
9import json
10import bb.parse
11import bb.compress.zstd
12import oe.path
13
14from glob import glob
7 15
8def packaged(pkg, d): 16def packaged(pkg, d):
9 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) 17 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
@@ -19,7 +27,7 @@ def read_pkgdatafile(fn):
19 import re 27 import re
20 with open(fn, 'r') as f: 28 with open(fn, 'r') as f:
21 lines = f.readlines() 29 lines = f.readlines()
22 r = re.compile("([^:]+):\s*(.*)") 30 r = re.compile(r"(^.+?):\s+(.*)")
23 for l in lines: 31 for l in lines:
24 m = r.match(l) 32 m = r.match(l)
25 if m: 33 if m:
@@ -45,18 +53,31 @@ def read_pkgdata(pn, d):
45 return read_pkgdatafile(fn) 53 return read_pkgdatafile(fn)
46 54
47# 55#
48# Collapse FOO_pkg variables into FOO 56# Collapse FOO:pkg variables into FOO
49# 57#
50def read_subpkgdata_dict(pkg, d): 58def read_subpkgdata_dict(pkg, d):
51 ret = {} 59 ret = {}
52 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d)) 60 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d))
53 for var in subd: 61 for var in subd:
54 newvar = var.replace("_" + pkg, "") 62 newvar = var.replace(":" + pkg, "")
55 if newvar == var and var + "_" + pkg in subd: 63 if newvar == var and var + ":" + pkg in subd:
56 continue 64 continue
57 ret[newvar] = subd[var] 65 ret[newvar] = subd[var]
58 return ret 66 return ret
59 67
68@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
69def read_subpkgdata_extended(pkg, d):
70 import json
71 import bb.compress.zstd
72
73 fn = d.expand("${PKGDATA_DIR}/extended/%s.json.zstd" % pkg)
74 try:
75 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
76 with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f:
77 return json.load(f)
78 except FileNotFoundError:
79 return None
80
60def _pkgmap(d): 81def _pkgmap(d):
61 """Return a dictionary mapping package to recipe name.""" 82 """Return a dictionary mapping package to recipe name."""
62 83
@@ -96,3 +117,253 @@ def recipename(pkg, d):
96 """Return the recipe name for the given binary package name.""" 117 """Return the recipe name for the given binary package name."""
97 118
98 return pkgmap(d).get(pkg) 119 return pkgmap(d).get(pkg)
120
121def foreach_runtime_provider_pkgdata(d, rdep, include_rdep=False):
122 pkgdata_dir = d.getVar("PKGDATA_DIR")
123 possibles = set()
124 try:
125 possibles |= set(os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdep)))
126 except OSError:
127 pass
128
129 if include_rdep:
130 possibles.add(rdep)
131
132 for p in sorted(list(possibles)):
133 rdep_data = read_subpkgdata(p, d)
134 yield p, rdep_data
135
136def get_package_mapping(pkg, basepkg, d, depversions=None):
137 import oe.packagedata
138
139 data = oe.packagedata.read_subpkgdata(pkg, d)
140 key = "PKG:%s" % pkg
141
142 if key in data:
143 if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
144 bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
145 # Have to avoid undoing the write_extra_pkgs(global_variants...)
146 if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
147 and data[key] == basepkg:
148 return pkg
149 if depversions == []:
150 # Avoid returning a mapping if the renamed package rprovides its original name
151 rprovkey = "RPROVIDES:%s" % pkg
152 if rprovkey in data:
153 if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
154 bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
155 return pkg
156 # Do map to rewritten package name
157 return data[key]
158
159 return pkg
160
161def get_package_additional_metadata(pkg_type, d):
162 base_key = "PACKAGE_ADD_METADATA"
163 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
164 if d.getVar(key, False) is None:
165 continue
166 d.setVarFlag(key, "type", "list")
167 if d.getVarFlag(key, "separator") is None:
168 d.setVarFlag(key, "separator", "\\n")
169 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
170 return "\n".join(metadata_fields).strip()
171
172def runtime_mapping_rename(varname, pkg, d):
173 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
174
175 new_depends = {}
176 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
177 for depend, depversions in deps.items():
178 new_depend = get_package_mapping(depend, pkg, d, depversions)
179 if depend != new_depend:
180 bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
181 new_depends[new_depend] = deps[depend]
182
183 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
184
185 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
186
187@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
188def emit_pkgdata(pkgfiles, d):
189 def process_postinst_on_target(pkg, mlprefix):
190 pkgval = d.getVar('PKG:%s' % pkg)
191 if pkgval is None:
192 pkgval = pkg
193
194 defer_fragment = """
195if [ -n "$D" ]; then
196 $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
197 exit 0
198fi
199""" % (pkgval, mlprefix)
200
201 postinst = d.getVar('pkg_postinst:%s' % pkg)
202 postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
203
204 if postinst_ontarget:
205 bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
206 if not postinst:
207 postinst = '#!/bin/sh\n'
208 postinst += defer_fragment
209 postinst += postinst_ontarget
210 d.setVar('pkg_postinst:%s' % pkg, postinst)
211
212 def add_set_e_to_scriptlets(pkg):
213 for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
214 scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
215 if scriptlet:
216 scriptlet_split = scriptlet.split('\n')
217 if scriptlet_split[0].startswith("#!"):
218 scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
219 else:
220 scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
221 d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
222
223 def write_if_exists(f, pkg, var):
224 def encode(str):
225 import codecs
226 c = codecs.getencoder("unicode_escape")
227 return c(str)[0].decode("latin1")
228
229 val = d.getVar('%s:%s' % (var, pkg))
230 if val:
231 f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
232 return val
233 val = d.getVar('%s' % (var))
234 if val:
235 f.write('%s: %s\n' % (var, encode(val)))
236 return val
237
238 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
239 for variant in variants:
240 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
241 fd.write("PACKAGES: %s\n" % ' '.join(
242 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
243
244 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
245 for variant in variants:
246 for pkg in packages.split():
247 ml_pkg = "%s-%s" % (variant, pkg)
248 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
249 with open(subdata_file, 'w') as fd:
250 fd.write("PKG:%s: %s" % (ml_pkg, pkg))
251
252 packages = d.getVar('PACKAGES')
253 pkgdest = d.getVar('PKGDEST')
254 pkgdatadir = d.getVar('PKGDESTWORK')
255
256 data_file = pkgdatadir + d.expand("/${PN}")
257 with open(data_file, 'w') as fd:
258 fd.write("PACKAGES: %s\n" % packages)
259
260 pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
261
262 pn = d.getVar('PN')
263 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
264 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
265
266 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
267 write_extra_pkgs(variants, pn, packages, pkgdatadir)
268
269 if bb.data.inherits_class('allarch', d) and not variants \
270 and not bb.data.inherits_class('packagegroup', d):
271 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
272
273 workdir = d.getVar('WORKDIR')
274
275 for pkg in packages.split():
276 pkgval = d.getVar('PKG:%s' % pkg)
277 if pkgval is None:
278 pkgval = pkg
279 d.setVar('PKG:%s' % pkg, pkg)
280
281 extended_data = {
282 "files_info": {}
283 }
284
285 pkgdestpkg = os.path.join(pkgdest, pkg)
286 files = {}
287 files_extra = {}
288 total_size = 0
289 seen = set()
290 for f in pkgfiles[pkg]:
291 fpath = os.sep + os.path.relpath(f, pkgdestpkg)
292
293 fstat = os.lstat(f)
294 files[fpath] = fstat.st_size
295
296 extended_data["files_info"].setdefault(fpath, {})
297 extended_data["files_info"][fpath]['size'] = fstat.st_size
298
299 if fstat.st_ino not in seen:
300 seen.add(fstat.st_ino)
301 total_size += fstat.st_size
302
303 if fpath in pkgdebugsource:
304 extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
305 del pkgdebugsource[fpath]
306
307 d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
308
309 process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
310 add_set_e_to_scriptlets(pkg)
311
312 subdata_file = pkgdatadir + "/runtime/%s" % pkg
313 with open(subdata_file, 'w') as sf:
314 for var in (d.getVar('PKGDATA_VARS') or "").split():
315 val = write_if_exists(sf, pkg, var)
316
317 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
318 for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
319 write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
320
321 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
322 for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
323 write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
324
325 sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
326
327 subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
328 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
329 with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
330 json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
331
332 # Symlinks needed for rprovides lookup
333 rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
334 if rprov:
335 for p in bb.utils.explode_deps(rprov):
336 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
337 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
338 oe.path.relsymlink(subdata_file, subdata_sym, True)
339
340 allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
341 if not allow_empty:
342 allow_empty = d.getVar('ALLOW_EMPTY')
343 root = "%s/%s" % (pkgdest, pkg)
344 os.chdir(root)
345 g = glob('*')
346 if g or allow_empty == "1":
347 # Symlinks needed for reverse lookups (from the final package name)
348 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
349 oe.path.relsymlink(subdata_file, subdata_sym, True)
350
351 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
352 open(packagedfile, 'w').close()
353
354 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
355 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
356
357 if bb.data.inherits_class('allarch', d) and not variants \
358 and not bb.data.inherits_class('packagegroup', d):
359 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
360
361def mapping_rename_hook(d):
362 """
363 Rewrite variables to account for package renaming in things
364 like debian.bbclass or manual PKG variable name changes
365 """
366 pkg = d.getVar("PKG")
367 oe.packagedata.runtime_mapping_rename("RDEPENDS", pkg, d)
368 oe.packagedata.runtime_mapping_rename("RRECOMMENDS", pkg, d)
369 oe.packagedata.runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py
index 8fcaecde82..7b7594751a 100644
--- a/meta/lib/oe/packagegroup.py
+++ b/meta/lib/oe/packagegroup.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index fccbedb519..edd77196ee 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -1,7 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import os
8import shlex
9import subprocess
5import oe.path 10import oe.path
6import oe.types 11import oe.types
7 12
@@ -24,9 +29,6 @@ class CmdError(bb.BBHandledException):
24 29
25 30
26def runcmd(args, dir = None): 31def runcmd(args, dir = None):
27 import pipes
28 import subprocess
29
30 if dir: 32 if dir:
31 olddir = os.path.abspath(os.curdir) 33 olddir = os.path.abspath(os.curdir)
32 if not os.path.exists(dir): 34 if not os.path.exists(dir):
@@ -35,7 +37,7 @@ def runcmd(args, dir = None):
35 # print("cwd: %s -> %s" % (olddir, dir)) 37 # print("cwd: %s -> %s" % (olddir, dir))
36 38
37 try: 39 try:
38 args = [ pipes.quote(str(arg)) for arg in args ] 40 args = [ shlex.quote(str(arg)) for arg in args ]
39 cmd = " ".join(args) 41 cmd = " ".join(args)
40 # print("cmd: %s" % cmd) 42 # print("cmd: %s" % cmd)
41 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) 43 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
@@ -56,6 +58,7 @@ def runcmd(args, dir = None):
56 if dir: 58 if dir:
57 os.chdir(olddir) 59 os.chdir(olddir)
58 60
61
59class PatchError(Exception): 62class PatchError(Exception):
60 def __init__(self, msg): 63 def __init__(self, msg):
61 self.msg = msg 64 self.msg = msg
@@ -214,7 +217,7 @@ class PatchTree(PatchSet):
214 with open(self.seriespath, 'w') as f: 217 with open(self.seriespath, 'w') as f:
215 for p in patches: 218 for p in patches:
216 f.write(p) 219 f.write(p)
217 220
218 def Import(self, patch, force = None): 221 def Import(self, patch, force = None):
219 """""" 222 """"""
220 PatchSet.Import(self, patch, force) 223 PatchSet.Import(self, patch, force)
@@ -291,13 +294,32 @@ class PatchTree(PatchSet):
291 self.Pop(all=True) 294 self.Pop(all=True)
292 295
293class GitApplyTree(PatchTree): 296class GitApplyTree(PatchTree):
294 patch_line_prefix = '%% original patch' 297 notes_ref = "refs/notes/devtool"
295 ignore_commit_prefix = '%% ignore' 298 original_patch = 'original patch'
299 ignore_commit = 'ignore'
296 300
297 def __init__(self, dir, d): 301 def __init__(self, dir, d):
298 PatchTree.__init__(self, dir, d) 302 PatchTree.__init__(self, dir, d)
299 self.commituser = d.getVar('PATCH_GIT_USER_NAME') 303 self.commituser = d.getVar('PATCH_GIT_USER_NAME')
300 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') 304 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
305 if not self._isInitialized(d):
306 self._initRepo()
307
308 def _isInitialized(self, d):
309 cmd = "git rev-parse --show-toplevel"
310 try:
311 output = runcmd(cmd.split(), self.dir).strip()
312 except CmdError as err:
313 ## runcmd returned non-zero which most likely means 128
314 ## Not a git directory
315 return False
316 ## Make sure repo is in builddir to not break top-level git repos, or under workdir
317 return os.path.samefile(output, self.dir) or oe.path.is_path_parent(d.getVar('WORKDIR'), output)
318
319 def _initRepo(self):
320 runcmd("git init".split(), self.dir)
321 runcmd("git add .".split(), self.dir)
322 runcmd("git commit -a --allow-empty -m bitbake_patching_started".split(), self.dir)
301 323
302 @staticmethod 324 @staticmethod
303 def extractPatchHeader(patchfile): 325 def extractPatchHeader(patchfile):
@@ -431,7 +453,7 @@ class GitApplyTree(PatchTree):
431 # Prepare git command 453 # Prepare git command
432 cmd = ["git"] 454 cmd = ["git"]
433 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail) 455 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail)
434 cmd += ["commit", "-F", tmpfile] 456 cmd += ["commit", "-F", tmpfile, "--no-verify"]
435 # git doesn't like plain email addresses as authors 457 # git doesn't like plain email addresses as authors
436 if author and '<' in author: 458 if author and '<' in author:
437 cmd.append('--author="%s"' % author) 459 cmd.append('--author="%s"' % author)
@@ -440,44 +462,133 @@ class GitApplyTree(PatchTree):
440 return (tmpfile, cmd) 462 return (tmpfile, cmd)
441 463
442 @staticmethod 464 @staticmethod
443 def extractPatches(tree, startcommit, outdir, paths=None): 465 def addNote(repo, ref, key, value=None, commituser=None, commitemail=None):
466 note = key + (": %s" % value if value else "")
467 notes_ref = GitApplyTree.notes_ref
468 runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo)
469 runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo)
470 runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo)
471 cmd = ["git"]
472 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail)
473 runcmd(cmd + ["notes", "--ref", notes_ref, "append", "-m", note, ref], repo)
474
475 @staticmethod
476 def removeNote(repo, ref, key, commituser=None, commitemail=None):
477 notes = GitApplyTree.getNotes(repo, ref)
478 notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")}
479 runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo)
480 for note, value in notes.items():
481 GitApplyTree.addNote(repo, ref, note, value, commituser, commitemail)
482
483 @staticmethod
484 def getNotes(repo, ref):
485 import re
486
487 note = None
488 try:
489 note = runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "show", ref], repo)
490 prefix = ""
491 except CmdError:
492 note = runcmd(['git', 'show', '-s', '--format=%B', ref], repo)
493 prefix = "%% "
494
495 note_re = re.compile(r'^%s(.*?)(?::\s*(.*))?$' % prefix)
496 notes = dict()
497 for line in note.splitlines():
498 m = note_re.match(line)
499 if m:
500 notes[m.group(1)] = m.group(2)
501
502 return notes
503
504 @staticmethod
505 def commitIgnored(subject, dir=None, files=None, d=None):
506 if files:
507 runcmd(['git', 'add'] + files, dir)
508 cmd = ["git"]
509 GitApplyTree.gitCommandUserOptions(cmd, d=d)
510 cmd += ["commit", "-m", subject, "--no-verify"]
511 runcmd(cmd, dir)
512 GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit, d.getVar('PATCH_GIT_USER_NAME'), d.getVar('PATCH_GIT_USER_EMAIL'))
513
514 @staticmethod
515 def extractPatches(tree, startcommits, outdir, paths=None):
444 import tempfile 516 import tempfile
445 import shutil 517 import shutil
446 tempdir = tempfile.mkdtemp(prefix='oepatch') 518 tempdir = tempfile.mkdtemp(prefix='oepatch')
447 try: 519 try:
448 shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", startcommit, "-o", tempdir] 520 for name, rev in startcommits.items():
449 if paths: 521 shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", rev, "-o", tempdir]
450 shellcmd.append('--') 522 if paths:
451 shellcmd.extend(paths) 523 shellcmd.append('--')
452 out = runcmd(["sh", "-c", " ".join(shellcmd)], tree) 524 shellcmd.extend(paths)
453 if out: 525 out = runcmd(["sh", "-c", " ".join(shellcmd)], os.path.join(tree, name))
454 for srcfile in out.split(): 526 if out:
455 for encoding in ['utf-8', 'latin-1']: 527 for srcfile in out.split():
456 patchlines = [] 528 # This loop, which is used to remove any line that
457 outfile = None 529 # starts with "%% original patch", is kept for backwards
458 try: 530 # compatibility. If/when that compatibility is dropped,
459 with open(srcfile, 'r', encoding=encoding) as f: 531 # it can be replaced with code to just read the first
460 for line in f: 532 # line of the patch file to get the SHA-1, and the code
461 if line.startswith(GitApplyTree.patch_line_prefix): 533 # below that writes the modified patch file can be
462 outfile = line.split()[-1].strip() 534 # replaced with a simple file move.
463 continue 535 for encoding in ['utf-8', 'latin-1']:
464 if line.startswith(GitApplyTree.ignore_commit_prefix): 536 patchlines = []
465 continue 537 try:
466 patchlines.append(line) 538 with open(srcfile, 'r', encoding=encoding, newline='') as f:
467 except UnicodeDecodeError: 539 for line in f:
540 if line.startswith("%% " + GitApplyTree.original_patch):
541 continue
542 patchlines.append(line)
543 except UnicodeDecodeError:
544 continue
545 break
546 else:
547 raise PatchError('Unable to find a character encoding to decode %s' % srcfile)
548
549 sha1 = patchlines[0].split()[1]
550 notes = GitApplyTree.getNotes(os.path.join(tree, name), sha1)
551 if GitApplyTree.ignore_commit in notes:
468 continue 552 continue
469 break 553 outfile = notes.get(GitApplyTree.original_patch, os.path.basename(srcfile))
470 else: 554
471 raise PatchError('Unable to find a character encoding to decode %s' % srcfile) 555 bb.utils.mkdirhier(os.path.join(outdir, name))
472 556 with open(os.path.join(outdir, name, outfile), 'w') as of:
473 if not outfile: 557 for line in patchlines:
474 outfile = os.path.basename(srcfile) 558 of.write(line)
475 with open(os.path.join(outdir, outfile), 'w') as of:
476 for line in patchlines:
477 of.write(line)
478 finally: 559 finally:
479 shutil.rmtree(tempdir) 560 shutil.rmtree(tempdir)
480 561
562 def _need_dirty_check(self):
563 fetch = bb.fetch2.Fetch([], self.d)
564 check_dirtyness = False
565 for url in fetch.urls:
566 url_data = fetch.ud[url]
567 parm = url_data.parm
568 # a git url with subpath param will surely be dirty
569 # since the git tree from which we clone will be emptied
570 # from all files that are not in the subpath
571 if url_data.type == 'git' and parm.get('subpath'):
572 check_dirtyness = True
573 return check_dirtyness
574
575 def _commitpatch(self, patch, patchfilevar):
576 output = ""
577 # Add all files
578 shellcmd = ["git", "add", "-f", "-A", "."]
579 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
580 # Exclude the patches directory
581 shellcmd = ["git", "reset", "HEAD", self.patchdir]
582 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
583 # Commit the result
584 (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
585 try:
586 shellcmd.insert(0, patchfilevar)
587 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
588 finally:
589 os.remove(tmpfile)
590 return output
591
481 def _applypatch(self, patch, force = False, reverse = False, run = True): 592 def _applypatch(self, patch, force = False, reverse = False, run = True):
482 import shutil 593 import shutil
483 594
@@ -492,27 +603,26 @@ class GitApplyTree(PatchTree):
492 603
493 return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) 604 return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
494 605
495 # Add hooks which add a pointer to the original patch file name in the commit message
496 reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip() 606 reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip()
497 if not reporoot: 607 if not reporoot:
498 raise Exception("Cannot get repository root for directory %s" % self.dir) 608 raise Exception("Cannot get repository root for directory %s" % self.dir)
499 hooks_dir = os.path.join(reporoot, '.git', 'hooks') 609
500 hooks_dir_backup = hooks_dir + '.devtool-orig' 610 patch_applied = True
501 if os.path.lexists(hooks_dir_backup):
502 raise Exception("Git hooks backup directory already exists: %s" % hooks_dir_backup)
503 if os.path.lexists(hooks_dir):
504 shutil.move(hooks_dir, hooks_dir_backup)
505 os.mkdir(hooks_dir)
506 commithook = os.path.join(hooks_dir, 'commit-msg')
507 applyhook = os.path.join(hooks_dir, 'applypatch-msg')
508 with open(commithook, 'w') as f:
509 # NOTE: the formatting here is significant; if you change it you'll also need to
510 # change other places which read it back
511 f.write('echo "\n%s: $PATCHFILE" >> $1' % GitApplyTree.patch_line_prefix)
512 os.chmod(commithook, 0o755)
513 shutil.copy2(commithook, applyhook)
514 try: 611 try:
515 patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file']) 612 patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file'])
613 if self._need_dirty_check():
614 # Check dirtyness of the tree
615 try:
616 output = runcmd(["git", "--work-tree=%s" % reporoot, "status", "--short"])
617 except CmdError:
618 pass
619 else:
620 if output:
621 # The tree is dirty, no need to try to apply patches with git anymore
622 # since they fail, fallback directly to patch
623 output = PatchTree._applypatch(self, patch, force, reverse, run)
624 output += self._commitpatch(patch, patchfilevar)
625 return output
516 try: 626 try:
517 shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot] 627 shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot]
518 self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail) 628 self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail)
@@ -539,24 +649,14 @@ class GitApplyTree(PatchTree):
539 except CmdError: 649 except CmdError:
540 # Fall back to patch 650 # Fall back to patch
541 output = PatchTree._applypatch(self, patch, force, reverse, run) 651 output = PatchTree._applypatch(self, patch, force, reverse, run)
542 # Add all files 652 output += self._commitpatch(patch, patchfilevar)
543 shellcmd = ["git", "add", "-f", "-A", "."]
544 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
545 # Exclude the patches directory
546 shellcmd = ["git", "reset", "HEAD", self.patchdir]
547 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
548 # Commit the result
549 (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
550 try:
551 shellcmd.insert(0, patchfilevar)
552 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
553 finally:
554 os.remove(tmpfile)
555 return output 653 return output
654 except:
655 patch_applied = False
656 raise
556 finally: 657 finally:
557 shutil.rmtree(hooks_dir) 658 if patch_applied:
558 if os.path.lexists(hooks_dir_backup): 659 GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file']), self.commituser, self.commitemail)
559 shutil.move(hooks_dir_backup, hooks_dir)
560 660
561 661
562class QuiltTree(PatchSet): 662class QuiltTree(PatchSet):
@@ -579,6 +679,8 @@ class QuiltTree(PatchSet):
579 679
580 def Clean(self): 680 def Clean(self):
581 try: 681 try:
682 # make sure that patches/series file exists before quilt pop to keep quilt-0.67 happy
683 open(os.path.join(self.dir, "patches","series"), 'a').close()
582 self._runcmd(["pop", "-a", "-f"]) 684 self._runcmd(["pop", "-a", "-f"])
583 oe.path.remove(os.path.join(self.dir, "patches","series")) 685 oe.path.remove(os.path.join(self.dir, "patches","series"))
584 except Exception: 686 except Exception:
@@ -715,8 +817,9 @@ class NOOPResolver(Resolver):
715 self.patchset.Push() 817 self.patchset.Push()
716 except Exception: 818 except Exception:
717 import sys 819 import sys
718 os.chdir(olddir)
719 raise 820 raise
821 finally:
822 os.chdir(olddir)
720 823
721# Patch resolver which relies on the user doing all the work involved in the 824# Patch resolver which relies on the user doing all the work involved in the
722# resolution, with the exception of refreshing the remote copy of the patch 825# resolution, with the exception of refreshing the remote copy of the patch
@@ -776,12 +879,12 @@ class UserResolver(Resolver):
776 # User did not fix the problem. Abort. 879 # User did not fix the problem. Abort.
777 raise PatchError("Patch application failed, and user did not fix and refresh the patch.") 880 raise PatchError("Patch application failed, and user did not fix and refresh the patch.")
778 except Exception: 881 except Exception:
779 os.chdir(olddir)
780 raise 882 raise
781 os.chdir(olddir) 883 finally:
884 os.chdir(olddir)
782 885
783 886
784def patch_path(url, fetch, workdir, expand=True): 887def patch_path(url, fetch, unpackdir, expand=True):
785 """Return the local path of a patch, or return nothing if this isn't a patch""" 888 """Return the local path of a patch, or return nothing if this isn't a patch"""
786 889
787 local = fetch.localpath(url) 890 local = fetch.localpath(url)
@@ -790,7 +893,7 @@ def patch_path(url, fetch, workdir, expand=True):
790 base, ext = os.path.splitext(os.path.basename(local)) 893 base, ext = os.path.splitext(os.path.basename(local))
791 if ext in ('.gz', '.bz2', '.xz', '.Z'): 894 if ext in ('.gz', '.bz2', '.xz', '.Z'):
792 if expand: 895 if expand:
793 local = os.path.join(workdir, base) 896 local = os.path.join(unpackdir, base)
794 ext = os.path.splitext(base)[1] 897 ext = os.path.splitext(base)[1]
795 898
796 urldata = fetch.ud[url] 899 urldata = fetch.ud[url]
@@ -804,12 +907,12 @@ def patch_path(url, fetch, workdir, expand=True):
804 return local 907 return local
805 908
806def src_patches(d, all=False, expand=True): 909def src_patches(d, all=False, expand=True):
807 workdir = d.getVar('WORKDIR') 910 unpackdir = d.getVar('UNPACKDIR')
808 fetch = bb.fetch2.Fetch([], d) 911 fetch = bb.fetch2.Fetch([], d)
809 patches = [] 912 patches = []
810 sources = [] 913 sources = []
811 for url in fetch.urls: 914 for url in fetch.urls:
812 local = patch_path(url, fetch, workdir, expand) 915 local = patch_path(url, fetch, unpackdir, expand)
813 if not local: 916 if not local:
814 if all: 917 if all:
815 local = fetch.localpath(url) 918 local = fetch.localpath(url)
@@ -898,4 +1001,3 @@ def should_apply(parm, d):
898 return False, "applies to later version" 1001 return False, "applies to later version"
899 1002
900 return True, None 1003 return True, None
901
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py
index c8d8ad05b9..a1efe97d88 100644
--- a/meta/lib/oe/path.py
+++ b/meta/lib/oe/path.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -8,6 +10,8 @@ import shutil
8import subprocess 10import subprocess
9import os.path 11import os.path
10 12
13import bb.parse
14
11def join(*paths): 15def join(*paths):
12 """Like os.path.join but doesn't treat absolute RHS specially""" 16 """Like os.path.join but doesn't treat absolute RHS specially"""
13 return os.path.normpath("/".join(paths)) 17 return os.path.normpath("/".join(paths))
@@ -75,6 +79,7 @@ def replace_absolute_symlinks(basedir, d):
75 os.remove(path) 79 os.remove(path)
76 os.symlink(base, path) 80 os.symlink(base, path)
77 81
82@bb.parse.vardepsexclude("TOPDIR")
78def format_display(path, metadata): 83def format_display(path, metadata):
79 """ Prepare a path for display to the user. """ 84 """ Prepare a path for display to the user. """
80 rel = relative(metadata.getVar("TOPDIR"), path) 85 rel = relative(metadata.getVar("TOPDIR"), path)
@@ -123,7 +128,8 @@ def copyhardlinktree(src, dst):
123 if os.path.isdir(src): 128 if os.path.isdir(src):
124 if len(glob.glob('%s/.??*' % src)) > 0: 129 if len(glob.glob('%s/.??*' % src)) > 0:
125 source = './.??* ' 130 source = './.??* '
126 source += './*' 131 if len(glob.glob('%s/**' % src)) > 0:
132 source += './*'
127 s_dir = src 133 s_dir = src
128 else: 134 else:
129 source = src 135 source = src
@@ -169,6 +175,9 @@ def symlink(source, destination, force=False):
169 if e.errno != errno.EEXIST or os.readlink(destination) != source: 175 if e.errno != errno.EEXIST or os.readlink(destination) != source:
170 raise 176 raise
171 177
178def relsymlink(target, name, force=False):
179 symlink(os.path.relpath(target, os.path.dirname(name)), name, force=force)
180
172def find(dir, **walkoptions): 181def find(dir, **walkoptions):
173 """ Given a directory, recurses into that directory, 182 """ Given a directory, recurses into that directory,
174 returning all files as absolute paths. """ 183 returning all files as absolute paths. """
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py
index fcdbe66c19..c41242c878 100644
--- a/meta/lib/oe/prservice.py
+++ b/meta/lib/oe/prservice.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -7,11 +9,10 @@ def prserv_make_conn(d, check = False):
7 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) 9 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
8 try: 10 try:
9 conn = None 11 conn = None
10 conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) 12 conn = prserv.serv.connect(host_params[0], int(host_params[1]))
11 if check: 13 if check:
12 if not conn.ping(): 14 if not conn.ping():
13 raise Exception('service not available') 15 raise Exception('service not available')
14 d.setVar("__PRSERV_CONN",conn)
15 except Exception as exc: 16 except Exception as exc:
16 bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc))) 17 bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc)))
17 18
@@ -22,31 +23,29 @@ def prserv_dump_db(d):
22 bb.error("Not using network based PR service") 23 bb.error("Not using network based PR service")
23 return None 24 return None
24 25
25 conn = d.getVar("__PRSERV_CONN") 26 conn = prserv_make_conn(d)
26 if conn is None: 27 if conn is None:
27 conn = prserv_make_conn(d) 28 bb.error("Making connection failed to remote PR service")
28 if conn is None: 29 return None
29 bb.error("Making connection failed to remote PR service")
30 return None
31 30
32 #dump db 31 #dump db
33 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') 32 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION')
34 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') 33 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH')
35 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') 34 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM')
36 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) 35 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL'))
37 return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) 36 d = conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col)
37 conn.close()
38 return d
38 39
39def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): 40def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None):
40 if not d.getVar('PRSERV_HOST'): 41 if not d.getVar('PRSERV_HOST'):
41 bb.error("Not using network based PR service") 42 bb.error("Not using network based PR service")
42 return None 43 return None
43 44
44 conn = d.getVar("__PRSERV_CONN") 45 conn = prserv_make_conn(d)
45 if conn is None: 46 if conn is None:
46 conn = prserv_make_conn(d) 47 bb.error("Making connection failed to remote PR service")
47 if conn is None: 48 return None
48 bb.error("Making connection failed to remote PR service")
49 return None
50 #get the entry values 49 #get the entry values
51 imported = [] 50 imported = []
52 prefix = "PRAUTO$" 51 prefix = "PRAUTO$"
@@ -70,6 +69,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu
70 bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret)) 69 bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret))
71 else: 70 else:
72 imported.append((version,pkgarch,checksum,value)) 71 imported.append((version,pkgarch,checksum,value))
72 conn.close()
73 return imported 73 return imported
74 74
75def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): 75def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
@@ -78,8 +78,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
78 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) 78 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR'))
79 df = d.getVar('PRSERV_DUMPFILE') 79 df = d.getVar('PRSERV_DUMPFILE')
80 #write data 80 #write data
81 lf = bb.utils.lockfile("%s.lock" % df) 81 with open(df, "a") as f, bb.utils.fileslocked(["%s.lock" % df]) as locks:
82 with open(df, "a") as f:
83 if metainfo: 82 if metainfo:
84 #dump column info 83 #dump column info
85 f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']); 84 f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
@@ -113,7 +112,6 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
113 if not nomax: 112 if not nomax:
114 for i in idx: 113 for i in idx:
115 f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value']))) 114 f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
116 bb.utils.unlockfile(lf)
117 115
118def prserv_check_avail(d): 116def prserv_check_avail(d):
119 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) 117 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
@@ -125,4 +123,5 @@ def prserv_check_avail(d):
125 except TypeError: 123 except TypeError:
126 bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"') 124 bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"')
127 else: 125 else:
128 prserv_make_conn(d, True) 126 conn = prserv_make_conn(d, True)
127 conn.close()
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py
index e8a854a302..cd36cb5070 100644
--- a/meta/lib/oe/qa.py
+++ b/meta/lib/oe/qa.py
@@ -1,7 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import ast
5import os, struct, mmap 8import os, struct, mmap
6 9
7class NotELFFileError(Exception): 10class NotELFFileError(Exception):
@@ -48,6 +51,9 @@ class ELFFile:
48 return self 51 return self
49 52
50 def __exit__(self, exc_type, exc_value, traceback): 53 def __exit__(self, exc_type, exc_value, traceback):
54 self.close()
55
56 def close(self):
51 if self.data: 57 if self.data:
52 self.data.close() 58 self.data.close()
53 59
@@ -128,6 +134,9 @@ class ELFFile:
128 """ 134 """
129 return self.getShort(ELFFile.E_MACHINE) 135 return self.getShort(ELFFile.E_MACHINE)
130 136
137 def set_objdump(self, cmd, output):
138 self.objdump_output[cmd] = output
139
131 def run_objdump(self, cmd, d): 140 def run_objdump(self, cmd, d):
132 import bb.process 141 import bb.process
133 import sys 142 import sys
@@ -171,6 +180,66 @@ def elf_machine_to_string(machine):
171 except: 180 except:
172 return "Unknown (%s)" % repr(machine) 181 return "Unknown (%s)" % repr(machine)
173 182
183def write_error(type, error, d):
184 logfile = d.getVar('QA_LOGFILE')
185 if logfile:
186 p = d.getVar('P')
187 with open(logfile, "a+") as f:
188 f.write("%s: %s [%s]\n" % (p, error, type))
189
190def handle_error_visitorcode(name, args):
191 execs = set()
192 contains = {}
193 warn = None
194 if isinstance(args[0], ast.Constant) and isinstance(args[0].value, str):
195 for i in ["ERROR_QA", "WARN_QA"]:
196 if i not in contains:
197 contains[i] = set()
198 contains[i].add(args[0].value)
199 else:
200 warn = args[0]
201 execs.add(name)
202 return contains, execs, warn
203
204def handle_error(error_class, error_msg, d):
205 if error_class in (d.getVar("ERROR_QA") or "").split():
206 write_error(error_class, error_msg, d)
207 bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
208 d.setVar("QA_ERRORS_FOUND", "True")
209 return False
210 elif error_class in (d.getVar("WARN_QA") or "").split():
211 write_error(error_class, error_msg, d)
212 bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
213 else:
214 bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
215 return True
216handle_error.visitorcode = handle_error_visitorcode
217
218def exit_with_message_if_errors(message, d):
219 qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False)
220 if qa_fatal_errors:
221 bb.fatal(message)
222
223def exit_if_errors(d):
224 exit_with_message_if_errors("Fatal QA errors were found, failing task.", d)
225
226def check_upstream_status(fullpath):
227 import re
228 kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE)
229 strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE)
230 guidelines = "https://docs.yoctoproject.org/contributor-guide/recipe-style-guide.html#patch-upstream-status"
231
232 with open(fullpath, encoding='utf-8', errors='ignore') as f:
233 file_content = f.read()
234 match_kinda = kinda_status_re.search(file_content)
235 match_strict = strict_status_re.search(file_content)
236
237 if not match_strict:
238 if match_kinda:
239 return "Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0))
240 else:
241 return "Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines)
242
174if __name__ == "__main__": 243if __name__ == "__main__":
175 import sys 244 import sys
176 245
diff --git a/meta/lib/oe/qemu.py b/meta/lib/oe/qemu.py
new file mode 100644
index 0000000000..769865036c
--- /dev/null
+++ b/meta/lib/oe/qemu.py
@@ -0,0 +1,54 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7def qemu_target_binary(d):
8 package_arch = d.getVar("PACKAGE_ARCH")
9 qemu_target_binary = (d.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "")
10 if qemu_target_binary:
11 return qemu_target_binary
12
13 target_arch = d.getVar("TARGET_ARCH")
14 if target_arch in ("i486", "i586", "i686"):
15 target_arch = "i386"
16 elif target_arch == "powerpc":
17 target_arch = "ppc"
18 elif target_arch == "powerpc64":
19 target_arch = "ppc64"
20 elif target_arch == "powerpc64le":
21 target_arch = "ppc64le"
22
23 return "qemu-" + target_arch
24
25def qemu_wrapper_cmdline(d, rootfs_path, library_paths, qemu_options=None):
26 import string
27
28 package_arch = d.getVar("PACKAGE_ARCH")
29 if package_arch == "all":
30 return "false"
31
32 qemu_binary = qemu_target_binary(d)
33 if qemu_binary == "qemu-allarch":
34 qemu_binary = "qemuwrapper"
35
36 if qemu_options == None:
37 qemu_options = d.getVar("QEMU_OPTIONS") or ""
38
39 return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\
40 + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " "
41
42# Next function will return a string containing the command that is needed to
43# to run a certain binary through qemu. For example, in order to make a certain
44# postinstall scriptlet run at do_rootfs time and running the postinstall is
45# architecture dependent, we can run it through qemu. For example, in the
46# postinstall scriptlet, we could use the following:
47#
48# ${@qemu_run_binary(d, '$D', '/usr/bin/test_app')} [test_app arguments]
49#
50def qemu_run_binary(d, rootfs_path, binary):
51 libdir = rootfs_path + d.getVar("libdir", False)
52 base_libdir = rootfs_path + d.getVar("base_libdir", False)
53
54 return qemu_wrapper_cmdline(d, rootfs_path, [libdir, base_libdir]) + rootfs_path + binary
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index 407d168894..044f1bfa61 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -24,9 +24,9 @@ from collections import OrderedDict, defaultdict
24from bb.utils import vercmp_string 24from bb.utils import vercmp_string
25 25
26# Help us to find places to insert values 26# Help us to find places to insert values
27recipe_progression = ['SUMMARY', 'DESCRIPTION', 'AUTHOR', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRCPV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND'] 27recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND']
28# Variables that sometimes are a bit long but shouldn't be wrapped 28# Variables that sometimes are a bit long but shouldn't be wrapped
29nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha256sum\]'] 29nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha[0-9]+sum\]']
30list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM'] 30list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM']
31meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION'] 31meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION']
32 32
@@ -47,7 +47,7 @@ def simplify_history(history, d):
47 continue 47 continue
48 has_set = True 48 has_set = True
49 elif event['op'] in ('append', 'prepend', 'postdot', 'predot'): 49 elif event['op'] in ('append', 'prepend', 'postdot', 'predot'):
50 # Reminder: "append" and "prepend" mean += and =+ respectively, NOT _append / _prepend 50 # Reminder: "append" and "prepend" mean += and =+ respectively, NOT :append / :prepend
51 if has_set: 51 if has_set:
52 continue 52 continue
53 ret_history.insert(0, event) 53 ret_history.insert(0, event)
@@ -342,7 +342,7 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
342 def override_applicable(hevent): 342 def override_applicable(hevent):
343 op = hevent['op'] 343 op = hevent['op']
344 if '[' in op: 344 if '[' in op:
345 opoverrides = op.split('[')[1].split(']')[0].split('_') 345 opoverrides = op.split('[')[1].split(']')[0].split(':')
346 for opoverride in opoverrides: 346 for opoverride in opoverrides:
347 if not opoverride in overrides: 347 if not opoverride in overrides:
348 return False 348 return False
@@ -368,13 +368,13 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
368 recipe_set = True 368 recipe_set = True
369 if not recipe_set: 369 if not recipe_set:
370 for event in history: 370 for event in history:
371 if event['op'].startswith('_remove'): 371 if event['op'].startswith(':remove'):
372 continue 372 continue
373 if not override_applicable(event): 373 if not override_applicable(event):
374 continue 374 continue
375 newvalue = value.replace(event['detail'], '') 375 newvalue = value.replace(event['detail'], '')
376 if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith('_'): 376 if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith(':'):
377 op = event['op'].replace('[', '_').replace(']', '') 377 op = event['op'].replace('[', ':').replace(']', '')
378 extravals[var + op] = None 378 extravals[var + op] = None
379 value = newvalue 379 value = newvalue
380 vals[var] = ('+=', value) 380 vals[var] = ('+=', value)
@@ -414,15 +414,13 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=F
414 414
415 fetch_urls(d) 415 fetch_urls(d)
416 if all_variants: 416 if all_variants:
417 # Get files for other variants e.g. in the case of a SRC_URI_append 417 # Get files for other variants e.g. in the case of a SRC_URI:append
418 localdata = bb.data.createCopy(d) 418 localdata = bb.data.createCopy(d)
419 variants = (localdata.getVar('BBCLASSEXTEND') or '').split() 419 variants = (localdata.getVar('BBCLASSEXTEND') or '').split()
420 if variants: 420 if variants:
421 # Ensure we handle class-target if we're dealing with one of the variants 421 # Ensure we handle class-target if we're dealing with one of the variants
422 variants.append('target') 422 variants.append('target')
423 for variant in variants: 423 for variant in variants:
424 if variant.startswith("devupstream"):
425 localdata.setVar('SRCPV', 'git')
426 localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant) 424 localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant)
427 fetch_urls(localdata) 425 fetch_urls(localdata)
428 426
@@ -666,19 +664,23 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
666 return (appendpath, pathok) 664 return (appendpath, pathok)
667 665
668 666
669def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None): 667def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None, update_original_recipe=False):
670 """ 668 """
671 Writes a bbappend file for a recipe 669 Writes a bbappend file for a recipe
672 Parameters: 670 Parameters:
673 rd: data dictionary for the recipe 671 rd: data dictionary for the recipe
674 destlayerdir: base directory of the layer to place the bbappend in 672 destlayerdir: base directory of the layer to place the bbappend in
675 (subdirectory path from there will be determined automatically) 673 (subdirectory path from there will be determined automatically)
676 srcfiles: dict of source files to add to SRC_URI, where the value 674 srcfiles: dict of source files to add to SRC_URI, where the key
677 is the full path to the file to be added, and the value is the 675 is the full path to the file to be added, and the value is a
678 original filename as it would appear in SRC_URI or None if it 676 dict with following optional keys:
679 isn't already present. You may pass None for this parameter if 677 path: the original filename as it would appear in SRC_URI
680 you simply want to specify your own content via the extralines 678 or None if it isn't already present.
681 parameter. 679 patchdir: the patchdir parameter
680 newname: the name to give to the new added file. None to use
681 the default value: basename(path)
682 You may pass None for this parameter if you simply want to specify
683 your own content via the extralines parameter.
682 install: dict mapping entries in srcfiles to a tuple of two elements: 684 install: dict mapping entries in srcfiles to a tuple of two elements:
683 install path (*without* ${D} prefix) and permission value (as a 685 install path (*without* ${D} prefix) and permission value (as a
684 string, e.g. '0644'). 686 string, e.g. '0644').
@@ -696,18 +698,32 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
696 redirect_output: 698 redirect_output:
697 If specified, redirects writing the output file to the 699 If specified, redirects writing the output file to the
698 specified directory (for dry-run purposes) 700 specified directory (for dry-run purposes)
701 params:
702 Parameters to use when adding entries to SRC_URI. If specified,
703 should be a list of dicts with the same length as srcfiles.
704 update_original_recipe:
705 Force to update the original recipe instead of creating/updating
706 a bbapend. destlayerdir must contain the original recipe
699 """ 707 """
700 708
701 if not removevalues: 709 if not removevalues:
702 removevalues = {} 710 removevalues = {}
703 711
704 # Determine how the bbappend should be named 712 recipefile = rd.getVar('FILE')
705 appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver) 713 if update_original_recipe:
706 if not appendpath: 714 if destlayerdir not in recipefile:
707 bb.error('Unable to determine layer directory containing %s' % recipefile) 715 bb.error("destlayerdir %s doesn't contain the original recipe (%s), cannot update it" % (destlayerdir, recipefile))
708 return (None, None) 716 return (None, None)
709 if not pathok: 717
710 bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath))) 718 appendpath = recipefile
719 else:
720 # Determine how the bbappend should be named
721 appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver)
722 if not appendpath:
723 bb.error('Unable to determine layer directory containing %s' % recipefile)
724 return (None, None)
725 if not pathok:
726 bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath)))
711 727
712 appenddir = os.path.dirname(appendpath) 728 appenddir = os.path.dirname(appendpath)
713 if not redirect_output: 729 if not redirect_output:
@@ -752,30 +768,48 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
752 bbappendlines.append((varname, op, value)) 768 bbappendlines.append((varname, op, value))
753 769
754 destsubdir = rd.getVar('PN') 770 destsubdir = rd.getVar('PN')
755 if srcfiles: 771 if not update_original_recipe and srcfiles:
756 bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) 772 bbappendlines.append(('FILESEXTRAPATHS:prepend', ':=', '${THISDIR}/${PN}:'))
757 773
758 appendoverride = '' 774 appendoverride = ''
759 if machine: 775 if machine:
760 bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}')) 776 bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}'))
761 appendoverride = '_%s' % machine 777 appendoverride = ':%s' % machine
762 copyfiles = {} 778 copyfiles = {}
763 if srcfiles: 779 if srcfiles:
764 instfunclines = [] 780 instfunclines = []
765 for newfile, origsrcfile in srcfiles.items(): 781 for i, (newfile, param) in enumerate(srcfiles.items()):
766 srcfile = origsrcfile
767 srcurientry = None 782 srcurientry = None
768 if not srcfile: 783 if not 'path' in param or not param['path']:
769 srcfile = os.path.basename(newfile) 784 if 'newname' in param and param['newname']:
785 srcfile = param['newname']
786 else:
787 srcfile = os.path.basename(newfile)
770 srcurientry = 'file://%s' % srcfile 788 srcurientry = 'file://%s' % srcfile
789 oldentry = None
790 for uri in rd.getVar('SRC_URI').split():
791 if srcurientry in uri:
792 oldentry = uri
793 if params and params[i]:
794 srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items()))
771 # Double-check it's not there already 795 # Double-check it's not there already
772 # FIXME do we care if the entry is added by another bbappend that might go away? 796 # FIXME do we care if the entry is added by another bbappend that might go away?
773 if not srcurientry in rd.getVar('SRC_URI').split(): 797 if not srcurientry in rd.getVar('SRC_URI').split():
774 if machine: 798 if machine:
775 appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) 799 if oldentry:
800 appendline('SRC_URI:remove%s' % appendoverride, '=', ' ' + oldentry)
801 appendline('SRC_URI:append%s' % appendoverride, '=', ' ' + srcurientry)
776 else: 802 else:
803 if oldentry:
804 if update_original_recipe:
805 removevalues['SRC_URI'] = oldentry
806 else:
807 appendline('SRC_URI:remove', '=', oldentry)
777 appendline('SRC_URI', '+=', srcurientry) 808 appendline('SRC_URI', '+=', srcurientry)
778 copyfiles[newfile] = srcfile 809 param['path'] = srcfile
810 else:
811 srcfile = param['path']
812 copyfiles[newfile] = param
779 if install: 813 if install:
780 institem = install.pop(newfile, None) 814 institem = install.pop(newfile, None)
781 if institem: 815 if institem:
@@ -784,9 +818,9 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
784 instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) 818 instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath)
785 if not instdirline in instfunclines: 819 if not instdirline in instfunclines:
786 instfunclines.append(instdirline) 820 instfunclines.append(instdirline)
787 instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) 821 instfunclines.append('install -m %s ${UNPACKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath))
788 if instfunclines: 822 if instfunclines:
789 bbappendlines.append(('do_install_append%s()' % appendoverride, '', instfunclines)) 823 bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines))
790 824
791 if redirect_output: 825 if redirect_output:
792 bb.note('Writing append file %s (dry-run)' % appendpath) 826 bb.note('Writing append file %s (dry-run)' % appendpath)
@@ -795,6 +829,8 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
795 # multiple times per operation when we're handling overrides) 829 # multiple times per operation when we're handling overrides)
796 if os.path.exists(appendpath) and not os.path.exists(outfile): 830 if os.path.exists(appendpath) and not os.path.exists(outfile):
797 shutil.copy2(appendpath, outfile) 831 shutil.copy2(appendpath, outfile)
832 elif update_original_recipe:
833 outfile = recipefile
798 else: 834 else:
799 bb.note('Writing append file %s' % appendpath) 835 bb.note('Writing append file %s' % appendpath)
800 outfile = appendpath 836 outfile = appendpath
@@ -804,15 +840,15 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
804 extvars = {'destsubdir': destsubdir} 840 extvars = {'destsubdir': destsubdir}
805 841
806 def appendfile_varfunc(varname, origvalue, op, newlines): 842 def appendfile_varfunc(varname, origvalue, op, newlines):
807 if varname == 'FILESEXTRAPATHS_prepend': 843 if varname == 'FILESEXTRAPATHS:prepend':
808 if origvalue.startswith('${THISDIR}/'): 844 if origvalue.startswith('${THISDIR}/'):
809 popline('FILESEXTRAPATHS_prepend') 845 popline('FILESEXTRAPATHS:prepend')
810 extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':')) 846 extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':'))
811 elif varname == 'PACKAGE_ARCH': 847 elif varname == 'PACKAGE_ARCH':
812 if machine: 848 if machine:
813 popline('PACKAGE_ARCH') 849 popline('PACKAGE_ARCH')
814 return (machine, None, 4, False) 850 return (machine, None, 4, False)
815 elif varname.startswith('do_install_append'): 851 elif varname.startswith('do_install:append'):
816 func = popline(varname) 852 func = popline(varname)
817 if func: 853 if func:
818 instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()] 854 instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()]
@@ -824,7 +860,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
824 splitval = split_var_value(origvalue, assignment=False) 860 splitval = split_var_value(origvalue, assignment=False)
825 changed = False 861 changed = False
826 removevar = varname 862 removevar = varname
827 if varname in ['SRC_URI', 'SRC_URI_append%s' % appendoverride]: 863 if varname in ['SRC_URI', 'SRC_URI:append%s' % appendoverride]:
828 removevar = 'SRC_URI' 864 removevar = 'SRC_URI'
829 line = popline(varname) 865 line = popline(varname)
830 if line: 866 if line:
@@ -853,11 +889,11 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
853 newvalue = splitval 889 newvalue = splitval
854 if len(newvalue) == 1: 890 if len(newvalue) == 1:
855 # Ensure it's written out as one line 891 # Ensure it's written out as one line
856 if '_append' in varname: 892 if ':append' in varname:
857 newvalue = ' ' + newvalue[0] 893 newvalue = ' ' + newvalue[0]
858 else: 894 else:
859 newvalue = newvalue[0] 895 newvalue = newvalue[0]
860 if not newvalue and (op in ['+=', '.='] or '_append' in varname): 896 if not newvalue and (op in ['+=', '.='] or ':append' in varname):
861 # There's no point appending nothing 897 # There's no point appending nothing
862 newvalue = None 898 newvalue = None
863 if varname.endswith('()'): 899 if varname.endswith('()'):
@@ -898,7 +934,12 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
898 outdir = redirect_output 934 outdir = redirect_output
899 else: 935 else:
900 outdir = appenddir 936 outdir = appenddir
901 for newfile, srcfile in copyfiles.items(): 937 for newfile, param in copyfiles.items():
938 srcfile = param['path']
939 patchdir = param.get('patchdir', ".")
940
941 if patchdir != ".":
942 newfile = os.path.join(os.path.split(newfile)[0], patchdir, os.path.split(newfile)[1])
902 filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile)) 943 filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile))
903 if os.path.abspath(newfile) != os.path.abspath(filedest): 944 if os.path.abspath(newfile) != os.path.abspath(filedest):
904 if newfile.startswith(tempfile.gettempdir()): 945 if newfile.startswith(tempfile.gettempdir()):
@@ -942,10 +983,9 @@ def replace_dir_vars(path, d):
942 path = path.replace(dirpath, '${%s}' % dirvars[dirpath]) 983 path = path.replace(dirpath, '${%s}' % dirvars[dirpath])
943 return path 984 return path
944 985
945def get_recipe_pv_without_srcpv(pv, uri_type): 986def get_recipe_pv_with_pfx_sfx(pv, uri_type):
946 """ 987 """
947 Get PV without SRCPV common in SCM's for now only 988 Get PV separating prefix and suffix components.
948 support git.
949 989
950 Returns tuple with pv, prefix and suffix. 990 Returns tuple with pv, prefix and suffix.
951 """ 991 """
@@ -953,7 +993,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
953 sfx = '' 993 sfx = ''
954 994
955 if uri_type == 'git': 995 if uri_type == 'git':
956 git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+))(?P<rev>.*)") 996 git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+)?)(?P<rev>.*)")
957 m = git_regex.match(pv) 997 m = git_regex.match(pv)
958 998
959 if m: 999 if m:
@@ -1005,7 +1045,7 @@ def get_recipe_upstream_version(rd):
1005 src_uri = src_uris.split()[0] 1045 src_uri = src_uris.split()[0]
1006 uri_type, _, _, _, _, _ = decodeurl(src_uri) 1046 uri_type, _, _, _, _, _ = decodeurl(src_uri)
1007 1047
1008 (pv, pfx, sfx) = get_recipe_pv_without_srcpv(rd.getVar('PV'), uri_type) 1048 (pv, pfx, sfx) = get_recipe_pv_with_pfx_sfx(rd.getVar('PV'), uri_type)
1009 ru['current_version'] = pv 1049 ru['current_version'] = pv
1010 1050
1011 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") 1051 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION")
@@ -1029,10 +1069,16 @@ def get_recipe_upstream_version(rd):
1029 else: 1069 else:
1030 ud = bb.fetch2.FetchData(src_uri, rd) 1070 ud = bb.fetch2.FetchData(src_uri, rd)
1031 if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": 1071 if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1":
1032 revision = ud.method.latest_revision(ud, rd, 'default') 1072 bb.fetch2.get_srcrev(rd)
1033 upversion = pv 1073 upversion = None
1034 if revision != rd.getVar("SRCREV"): 1074 revision = None
1035 upversion = upversion + "-new-commits-available" 1075 try:
1076 revision = ud.method.latest_revision(ud, rd, 'default')
1077 upversion = pv
1078 if revision != rd.getVar("SRCREV"):
1079 upversion = upversion + "-new-commits-available"
1080 except bb.fetch2.FetchError as e:
1081 bb.warn("Unable to obtain latest revision: {}".format(e))
1036 else: 1082 else:
1037 pupver = ud.method.latest_versionstring(ud, rd) 1083 pupver = ud.method.latest_versionstring(ud, rd)
1038 (upversion, revision) = pupver 1084 (upversion, revision) = pupver
@@ -1071,7 +1117,7 @@ def _get_recipe_upgrade_status(data):
1071 maintainer = data.getVar('RECIPE_MAINTAINER') 1117 maintainer = data.getVar('RECIPE_MAINTAINER')
1072 no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') 1118 no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')
1073 1119
1074 return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason) 1120 return {'pn':pn, 'status':status, 'cur_ver':cur_ver, 'next_ver':next_ver, 'maintainer':maintainer, 'revision':revision, 'no_upgrade_reason':no_upgrade_reason}
1075 1121
1076def get_recipe_upgrade_status(recipes=None): 1122def get_recipe_upgrade_status(recipes=None):
1077 pkgs_list = [] 1123 pkgs_list = []
@@ -1113,6 +1159,7 @@ def get_recipe_upgrade_status(recipes=None):
1113 if not recipes: 1159 if not recipes:
1114 recipes = tinfoil.all_recipe_files(variants=False) 1160 recipes = tinfoil.all_recipe_files(variants=False)
1115 1161
1162 recipeincludes = {}
1116 for fn in recipes: 1163 for fn in recipes:
1117 try: 1164 try:
1118 if fn.startswith("/"): 1165 if fn.startswith("/"):
@@ -1137,8 +1184,65 @@ def get_recipe_upgrade_status(recipes=None):
1137 1184
1138 data_copy_list.append(data_copy) 1185 data_copy_list.append(data_copy)
1139 1186
1187 recipeincludes[data.getVar('FILE')] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
1188
1140 from concurrent.futures import ProcessPoolExecutor 1189 from concurrent.futures import ProcessPoolExecutor
1141 with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: 1190 with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
1142 pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) 1191 pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)
1143 1192
1144 return pkgs_list 1193 return _group_recipes(pkgs_list, _get_common_include_recipes(recipeincludes))
1194
1195def get_common_include_recipes():
1196 with bb.tinfoil.Tinfoil() as tinfoil:
1197 tinfoil.prepare(config_only=False)
1198
1199 recipes = tinfoil.all_recipe_files(variants=False)
1200
1201 recipeincludes = {}
1202 for fn in recipes:
1203 data = tinfoil.parse_recipe_file(fn)
1204 recipeincludes[fn] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
1205 return _get_common_include_recipes(recipeincludes)
1206
1207def _get_common_include_recipes(recipeincludes_all):
1208 recipeincludes = {}
1209 for fn,data in recipeincludes_all.items():
1210 bbincluded_filtered = [i for i in data['bbincluded'] if os.path.dirname(i) == os.path.dirname(fn) and i != fn]
1211 if bbincluded_filtered:
1212 recipeincludes[data['pn']] = bbincluded_filtered
1213
1214 recipeincludes_inverted = {}
1215 for k,v in recipeincludes.items():
1216 for i in v:
1217 recipeincludes_inverted.setdefault(i,set()).add(k)
1218
1219 recipeincludes_inverted_filtered = {k:v for k,v in recipeincludes_inverted.items() if len(v) > 1}
1220
1221 recipes_with_shared_includes = list()
1222 for v in recipeincludes_inverted_filtered.values():
1223 recipeset = v
1224 for v1 in recipeincludes_inverted_filtered.values():
1225 if recipeset.intersection(v1):
1226 recipeset.update(v1)
1227 if recipeset not in recipes_with_shared_includes:
1228 recipes_with_shared_includes.append(recipeset)
1229
1230 return recipes_with_shared_includes
1231
1232def _group_recipes(recipes, groups):
1233 recipedict = {}
1234 for r in recipes:
1235 recipedict[r['pn']] = r
1236
1237 recipegroups = []
1238 for g in groups:
1239 recipeset = []
1240 for r in g:
1241 if r in recipedict.keys():
1242 recipeset.append(recipedict[r])
1243 del recipedict[r]
1244 recipegroups.append(recipeset)
1245
1246 for r in recipedict.values():
1247 recipegroups.append([r])
1248 return recipegroups
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py
index 204b9bd734..0270024a83 100644
--- a/meta/lib/oe/reproducible.py
+++ b/meta/lib/oe/reproducible.py
@@ -1,10 +1,63 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import os 6import os
5import subprocess 7import subprocess
6import bb 8import bb
7 9
10# For reproducible builds, this code sets the default SOURCE_DATE_EPOCH in each
11# component's build environment. The format is number of seconds since the
12# system epoch.
13#
14# Upstream components (generally) respect this environment variable,
15# using it in place of the "current" date and time.
16# See https://reproducible-builds.org/specs/source-date-epoch/
17#
18# The default value of SOURCE_DATE_EPOCH comes from the function
19# get_source_date_epoch_value which reads from the SDE_FILE, or if the file
20# is not available will use the fallback of SOURCE_DATE_EPOCH_FALLBACK.
21#
22# The SDE_FILE is normally constructed from the function
23# create_source_date_epoch_stamp which is typically added as a postfuncs to
24# the do_unpack task. If a recipe does NOT have do_unpack, it should be added
25# to a task that runs after the source is available and before the
26# do_deploy_source_date_epoch task is executed.
27#
28# If a recipe wishes to override the default behavior it should set it's own
29# SOURCE_DATE_EPOCH or override the do_deploy_source_date_epoch_stamp task
30# with recipe-specific functionality to write the appropriate
31# SOURCE_DATE_EPOCH into the SDE_FILE.
32#
33# SOURCE_DATE_EPOCH is intended to be a reproducible value. This value should
34# be reproducible for anyone who builds the same revision from the same
35# sources.
36#
37# There are 4 ways the create_source_date_epoch_stamp function determines what
38# becomes SOURCE_DATE_EPOCH:
39#
40# 1. Use the value from __source_date_epoch.txt file if this file exists.
41# This file was most likely created in the previous build by one of the
42# following methods 2,3,4.
43# Alternatively, it can be provided by a recipe via SRC_URI.
44#
45# If the file does not exist:
46#
47# 2. If there is a git checkout, use the last git commit timestamp.
48# Git does not preserve file timestamps on checkout.
49#
50# 3. Use the mtime of "known" files such as NEWS, CHANGELOG, ...
51# This works for well-kept repositories distributed via tarball.
52#
53# 4. Use the modification time of the youngest file in the source tree, if
54# there is one.
55# This will be the newest file from the distribution tarball, if any.
56#
57# 5. Fall back to a fixed timestamp (SOURCE_DATE_EPOCH_FALLBACK).
58#
59# Once the value is determined, it is stored in the recipe's SDE_FILE.
60
8def get_source_date_epoch_from_known_files(d, sourcedir): 61def get_source_date_epoch_from_known_files(d, sourcedir):
9 source_date_epoch = None 62 source_date_epoch = None
10 newest_file = None 63 newest_file = None
@@ -22,10 +75,11 @@ def get_source_date_epoch_from_known_files(d, sourcedir):
22 return source_date_epoch 75 return source_date_epoch
23 76
24def find_git_folder(d, sourcedir): 77def find_git_folder(d, sourcedir):
25 # First guess: WORKDIR/git 78 # First guess: UNPACKDIR/BB_GIT_DEFAULT_DESTSUFFIX
26 # This is the default git fetcher unpack path 79 # This is the default git fetcher unpack path
27 workdir = d.getVar('WORKDIR') 80 unpackdir = d.getVar('UNPACKDIR')
28 gitpath = os.path.join(workdir, "git/.git") 81 default_destsuffix = d.getVar('BB_GIT_DEFAULT_DESTSUFFIX')
82 gitpath = os.path.join(unpackdir, default_destsuffix, ".git")
29 if os.path.isdir(gitpath): 83 if os.path.isdir(gitpath):
30 return gitpath 84 return gitpath
31 85
@@ -35,15 +89,16 @@ def find_git_folder(d, sourcedir):
35 return gitpath 89 return gitpath
36 90
37 # Perhaps there was a subpath or destsuffix specified. 91 # Perhaps there was a subpath or destsuffix specified.
38 # Go looking in the WORKDIR 92 # Go looking in the UNPACKDIR
39 exclude = set(["build", "image", "license-destdir", "patches", "pseudo", 93 for root, dirs, files in os.walk(unpackdir, topdown=True):
40 "recipe-sysroot", "recipe-sysroot-native", "sysroot-destdir", "temp"])
41 for root, dirs, files in os.walk(workdir, topdown=True):
42 dirs[:] = [d for d in dirs if d not in exclude]
43 if '.git' in dirs: 94 if '.git' in dirs:
44 return root 95 return os.path.join(root, ".git")
45 96
46 bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) 97 for root, dirs, files in os.walk(sourcedir, topdown=True):
98 if '.git' in dirs:
99 return os.path.join(root, ".git")
100
101 bb.warn("Failed to find a git repository in UNPACKDIR: %s" % unpackdir)
47 return None 102 return None
48 103
49def get_source_date_epoch_from_git(d, sourcedir): 104def get_source_date_epoch_from_git(d, sourcedir):
@@ -62,11 +117,12 @@ def get_source_date_epoch_from_git(d, sourcedir):
62 return None 117 return None
63 118
64 bb.debug(1, "git repository: %s" % gitpath) 119 bb.debug(1, "git repository: %s" % gitpath)
65 p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE) 120 p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'],
121 check=True, stdout=subprocess.PIPE)
66 return int(p.stdout.decode('utf-8')) 122 return int(p.stdout.decode('utf-8'))
67 123
68def get_source_date_epoch_from_youngest_file(d, sourcedir): 124def get_source_date_epoch_from_youngest_file(d, sourcedir):
69 if sourcedir == d.getVar('WORKDIR'): 125 if sourcedir == d.getVar('UNPACKDIR'):
70 # These sources are almost certainly not from a tarball 126 # These sources are almost certainly not from a tarball
71 return None 127 return None
72 128
@@ -77,6 +133,9 @@ def get_source_date_epoch_from_youngest_file(d, sourcedir):
77 files = [f for f in files if not f[0] == '.'] 133 files = [f for f in files if not f[0] == '.']
78 134
79 for fname in files: 135 for fname in files:
136 if fname == "singletask.lock":
137 # Ignore externalsrc/devtool lockfile [YOCTO #14921]
138 continue
80 filename = os.path.join(root, fname) 139 filename = os.path.join(root, fname)
81 try: 140 try:
82 mtime = int(os.lstat(filename).st_mtime) 141 mtime = int(os.lstat(filename).st_mtime)
@@ -101,8 +160,40 @@ def fixed_source_date_epoch(d):
101def get_source_date_epoch(d, sourcedir): 160def get_source_date_epoch(d, sourcedir):
102 return ( 161 return (
103 get_source_date_epoch_from_git(d, sourcedir) or 162 get_source_date_epoch_from_git(d, sourcedir) or
104 get_source_date_epoch_from_known_files(d, sourcedir) or
105 get_source_date_epoch_from_youngest_file(d, sourcedir) or 163 get_source_date_epoch_from_youngest_file(d, sourcedir) or
106 fixed_source_date_epoch(d) # Last resort 164 fixed_source_date_epoch(d) # Last resort
107 ) 165 )
108 166
167def epochfile_read(epochfile, d):
168 cached, efile = d.getVar('__CACHED_SOURCE_DATE_EPOCH') or (None, None)
169 if cached and efile == epochfile:
170 return cached
171
172 if cached and epochfile != efile:
173 bb.debug(1, "Epoch file changed from %s to %s" % (efile, epochfile))
174
175 source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
176 try:
177 with open(epochfile, 'r') as f:
178 s = f.read()
179 try:
180 source_date_epoch = int(s)
181 except ValueError:
182 bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK" % s)
183 source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
184 bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
185 except FileNotFoundError:
186 bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
187
188 d.setVar('__CACHED_SOURCE_DATE_EPOCH', (str(source_date_epoch), epochfile))
189 return str(source_date_epoch)
190
191def epochfile_write(source_date_epoch, epochfile, d):
192
193 bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
194 bb.utils.mkdirhier(os.path.dirname(epochfile))
195
196 tmp_file = "%s.new" % epochfile
197 with open(tmp_file, 'w') as f:
198 f.write(str(source_date_epoch))
199 os.rename(tmp_file, epochfile)
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 249c685dcf..14befac8fa 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4from abc import ABCMeta, abstractmethod 6from abc import ABCMeta, abstractmethod
@@ -104,7 +106,7 @@ class Rootfs(object, metaclass=ABCMeta):
104 def _cleanup(self): 106 def _cleanup(self):
105 pass 107 pass
106 108
107 def _setup_dbg_rootfs(self, dirs): 109 def _setup_dbg_rootfs(self, package_paths):
108 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' 110 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0'
109 if gen_debugfs != '1': 111 if gen_debugfs != '1':
110 return 112 return
@@ -114,17 +116,18 @@ class Rootfs(object, metaclass=ABCMeta):
114 shutil.rmtree(self.image_rootfs + '-orig') 116 shutil.rmtree(self.image_rootfs + '-orig')
115 except: 117 except:
116 pass 118 pass
117 os.rename(self.image_rootfs, self.image_rootfs + '-orig') 119 bb.utils.rename(self.image_rootfs, self.image_rootfs + '-orig')
118 120
119 bb.note(" Creating debug rootfs...") 121 bb.note(" Creating debug rootfs...")
120 bb.utils.mkdirhier(self.image_rootfs) 122 bb.utils.mkdirhier(self.image_rootfs)
121 123
122 bb.note(" Copying back package database...") 124 bb.note(" Copying back package database...")
123 for dir in dirs: 125 for path in package_paths:
124 if not os.path.isdir(self.image_rootfs + '-orig' + dir): 126 bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(path))
125 continue 127 if os.path.isdir(self.image_rootfs + '-orig' + path):
126 bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(dir)) 128 shutil.copytree(self.image_rootfs + '-orig' + path, self.image_rootfs + path, symlinks=True)
127 shutil.copytree(self.image_rootfs + '-orig' + dir, self.image_rootfs + dir, symlinks=True) 129 elif os.path.isfile(self.image_rootfs + '-orig' + path):
130 shutil.copyfile(self.image_rootfs + '-orig' + path, self.image_rootfs + path)
128 131
129 # Copy files located in /usr/lib/debug or /usr/src/debug 132 # Copy files located in /usr/lib/debug or /usr/src/debug
130 for dir in ["/usr/lib/debug", "/usr/src/debug"]: 133 for dir in ["/usr/lib/debug", "/usr/src/debug"]:
@@ -160,25 +163,26 @@ class Rootfs(object, metaclass=ABCMeta):
160 bb.note(" Install extra debug packages...") 163 bb.note(" Install extra debug packages...")
161 self.pm.install(extra_debug_pkgs.split(), True) 164 self.pm.install(extra_debug_pkgs.split(), True)
162 165
166 bb.note(" Removing package database...")
167 for path in package_paths:
168 if os.path.isdir(self.image_rootfs + path):
169 shutil.rmtree(self.image_rootfs + path)
170 elif os.path.isfile(self.image_rootfs + path):
171 os.remove(self.image_rootfs + path)
172
163 bb.note(" Rename debug rootfs...") 173 bb.note(" Rename debug rootfs...")
164 try: 174 try:
165 shutil.rmtree(self.image_rootfs + '-dbg') 175 shutil.rmtree(self.image_rootfs + '-dbg')
166 except: 176 except:
167 pass 177 pass
168 os.rename(self.image_rootfs, self.image_rootfs + '-dbg') 178 bb.utils.rename(self.image_rootfs, self.image_rootfs + '-dbg')
169 179
170 bb.note(" Restoreing original rootfs...") 180 bb.note(" Restoring original rootfs...")
171 os.rename(self.image_rootfs + '-orig', self.image_rootfs) 181 bb.utils.rename(self.image_rootfs + '-orig', self.image_rootfs)
172 182
173 def _exec_shell_cmd(self, cmd): 183 def _exec_shell_cmd(self, cmd):
174 fakerootcmd = self.d.getVar('FAKEROOT')
175 if fakerootcmd is not None:
176 exec_cmd = [fakerootcmd, cmd]
177 else:
178 exec_cmd = cmd
179
180 try: 184 try:
181 subprocess.check_output(exec_cmd, stderr=subprocess.STDOUT) 185 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
182 except subprocess.CalledProcessError as e: 186 except subprocess.CalledProcessError as e:
183 return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output)) 187 return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output))
184 188
@@ -190,9 +194,17 @@ class Rootfs(object, metaclass=ABCMeta):
190 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") 194 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND")
191 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') 195 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND')
192 196
193 bb.utils.mkdirhier(self.image_rootfs) 197 def make_last(command, commands):
198 commands = commands.split()
199 if command in commands:
200 commands.remove(command)
201 commands.append(command)
202 return " ".join(commands)
194 203
195 bb.utils.mkdirhier(self.deploydir) 204 # We want this to run as late as possible, in particular after
205 # systemd_sysusers_create and set_user_group. Using :append is not enough
206 post_process_cmds = make_last("tidy_shadowutils_files", post_process_cmds)
207 post_process_cmds = make_last("rootfs_reproducible", post_process_cmds)
196 208
197 execute_pre_post_process(self.d, pre_process_cmds) 209 execute_pre_post_process(self.d, pre_process_cmds)
198 210
@@ -250,16 +262,18 @@ class Rootfs(object, metaclass=ABCMeta):
250 262
251 263
252 def _uninstall_unneeded(self): 264 def _uninstall_unneeded(self):
253 # Remove unneeded init script symlinks 265 # Remove the run-postinsts package if no delayed postinsts are found
254 delayed_postinsts = self._get_delayed_postinsts() 266 delayed_postinsts = self._get_delayed_postinsts()
255 if delayed_postinsts is None: 267 if delayed_postinsts is None:
256 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): 268 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")) or os.path.exists(self.d.expand("${IMAGE_ROOTFS}${systemd_system_unitdir}/run-postinsts.service")):
257 self._exec_shell_cmd(["update-rc.d", "-f", "-r", 269 self.pm.remove(["run-postinsts"])
258 self.d.getVar('IMAGE_ROOTFS'),
259 "run-postinsts", "remove"])
260 270
261 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", 271 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
272 True, False, self.d) and \
273 not bb.utils.contains("IMAGE_FEATURES",
274 "read-only-rootfs-delayed-postinsts",
262 True, False, self.d) 275 True, False, self.d)
276
263 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') 277 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE')
264 278
265 if image_rorfs or image_rorfs_force == "1": 279 if image_rorfs or image_rorfs_force == "1":
@@ -304,10 +318,20 @@ class Rootfs(object, metaclass=ABCMeta):
304 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', 318 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c',
305 'new', '-v', '-X']) 319 'new', '-v', '-X'])
306 320
321 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
322 True, False, self.d)
323 ldconfig_in_features = bb.utils.contains("DISTRO_FEATURES", "ldconfig",
324 True, False, self.d)
325 if image_rorfs or not ldconfig_in_features:
326 ldconfig_cache_dir = os.path.join(self.image_rootfs, "var/cache/ldconfig")
327 if os.path.exists(ldconfig_cache_dir):
328 bb.note("Removing ldconfig auxiliary cache...")
329 shutil.rmtree(ldconfig_cache_dir)
330
307 def _check_for_kernel_modules(self, modules_dir): 331 def _check_for_kernel_modules(self, modules_dir):
308 for root, dirs, files in os.walk(modules_dir, topdown=True): 332 for root, dirs, files in os.walk(modules_dir, topdown=True):
309 for name in files: 333 for name in files:
310 found_ko = name.endswith(".ko") 334 found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz", ".ko.zst"))
311 if found_ko: 335 if found_ko:
312 return found_ko 336 return found_ko
313 return False 337 return False
@@ -319,17 +343,31 @@ class Rootfs(object, metaclass=ABCMeta):
319 bb.note("No Kernel Modules found, not running depmod") 343 bb.note("No Kernel Modules found, not running depmod")
320 return 344 return
321 345
322 kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod", 346 pkgdatadir = self.d.getVar('PKGDATA_DIR')
323 'kernel-abiversion')
324 if not os.path.exists(kernel_abi_ver_file):
325 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
326 347
327 kernel_ver = open(kernel_abi_ver_file).read().strip(' \n') 348 # PKGDATA_DIR can include multiple kernels so we run depmod for each
328 versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) 349 # one of them.
350 for direntry in os.listdir(pkgdatadir):
351 match = re.match('(.*)-depmod', direntry)
352 if not match:
353 continue
354 kernel_package_name = match.group(1)
355
356 kernel_abi_ver_file = oe.path.join(pkgdatadir, direntry, kernel_package_name + '-abiversion')
357 if not os.path.exists(kernel_abi_ver_file):
358 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
329 359
330 bb.utils.mkdirhier(versioned_modules_dir) 360 with open(kernel_abi_ver_file) as f:
361 kernel_ver = f.read().strip(' \n')
331 362
332 self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver]) 363 versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
364
365 if os.path.exists(versioned_modules_dir):
366 bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir)
367 if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]):
368 bb.fatal("Kernel modules dependency generation failed")
369 else:
370 bb.note("Not running depmodwrapper for %s since directory does not exist" % versioned_modules_dir)
333 371
334 """ 372 """
335 Create devfs: 373 Create devfs:
@@ -378,6 +416,10 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None)
378 416
379 417
380def image_list_installed_packages(d, rootfs_dir=None): 418def image_list_installed_packages(d, rootfs_dir=None):
419 # Theres no rootfs for baremetal images
420 if bb.data.inherits_class('baremetal-image', d):
421 return ""
422
381 if not rootfs_dir: 423 if not rootfs_dir:
382 rootfs_dir = d.getVar('IMAGE_ROOTFS') 424 rootfs_dir = d.getVar('IMAGE_ROOTFS')
383 425
@@ -386,12 +428,3 @@ def image_list_installed_packages(d, rootfs_dir=None):
386 import importlib 428 import importlib
387 cls = importlib.import_module('oe.package_manager.' + img_type) 429 cls = importlib.import_module('oe.package_manager.' + img_type)
388 return cls.PMPkgsList(d, rootfs_dir).list_pkgs() 430 return cls.PMPkgsList(d, rootfs_dir).list_pkgs()
389
390if __name__ == "__main__":
391 """
392 We should be able to run this as a standalone script, from outside bitbake
393 environment.
394 """
395 """
396 TBD
397 """
diff --git a/meta/lib/rootfspostcommands.py b/meta/lib/oe/rootfspostcommands.py
index fdb9f5b850..5386eea409 100644
--- a/meta/lib/rootfspostcommands.py
+++ b/meta/lib/oe/rootfspostcommands.py
@@ -1,16 +1,19 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import os 7import os
6 8
7def sort_file(filename, mapping): 9def sort_shadowutils_file(filename, mapping):
8 """ 10 """
9 Sorts a passwd or group file based on the numeric ID in the third column. 11 Sorts a passwd or group file based on the numeric ID in the third column.
10 If a mapping is given, the name from the first column is mapped via that 12 If a mapping is given, the name from the first column is mapped via that
11 dictionary instead (necessary for /etc/shadow and /etc/gshadow). If not, 13 dictionary instead (necessary for /etc/shadow and /etc/gshadow). If not,
12 a new mapping is created on the fly and returned. 14 a new mapping is created on the fly and returned.
13 """ 15 """
16
14 new_mapping = {} 17 new_mapping = {}
15 with open(filename, 'rb+') as f: 18 with open(filename, 'rb+') as f:
16 lines = f.readlines() 19 lines = f.readlines()
@@ -31,30 +34,57 @@ def sort_file(filename, mapping):
31 # We overwrite the entire file, i.e. no truncate() necessary. 34 # We overwrite the entire file, i.e. no truncate() necessary.
32 f.seek(0) 35 f.seek(0)
33 f.write(b''.join(lines)) 36 f.write(b''.join(lines))
37
34 return new_mapping 38 return new_mapping
35 39
36def remove_backup(filename): 40def sort_shadowutils_files(sysconfdir):
37 """ 41 """
38 Removes the backup file for files like /etc/passwd. 42 Sorts shadow-utils 'passwd' and 'group' files in a rootfs' /etc directory
43 by ID.
39 """ 44 """
40 backup_filename = filename + '-'
41 if os.path.exists(backup_filename):
42 os.unlink(backup_filename)
43 45
44def sort_passwd(sysconfdir):
45 """
46 Sorts passwd and group files in a rootfs /etc directory by ID.
47 Backup files are sometimes are inconsistent and then cannot be
48 sorted (YOCTO #11043), and more importantly, are not needed in
49 the initial rootfs, so they get deleted.
50 """
51 for main, shadow in (('passwd', 'shadow'), 46 for main, shadow in (('passwd', 'shadow'),
52 ('group', 'gshadow')): 47 ('group', 'gshadow')):
53 filename = os.path.join(sysconfdir, main) 48 filename = os.path.join(sysconfdir, main)
54 remove_backup(filename)
55 if os.path.exists(filename): 49 if os.path.exists(filename):
56 mapping = sort_file(filename, None) 50 mapping = sort_shadowutils_file(filename, None)
57 filename = os.path.join(sysconfdir, shadow) 51 filename = os.path.join(sysconfdir, shadow)
58 remove_backup(filename)
59 if os.path.exists(filename): 52 if os.path.exists(filename):
60 sort_file(filename, mapping) 53 sort_shadowutils_file(filename, mapping)
54
55def remove_shadowutils_backup_file(filename):
56 """
57 Remove shadow-utils backup file for files like /etc/passwd.
58 """
59
60 backup_filename = filename + '-'
61 if os.path.exists(backup_filename):
62 os.unlink(backup_filename)
63
64def remove_shadowutils_backup_files(sysconfdir):
65 """
66 Remove shadow-utils backup files in a rootfs /etc directory. They are not
67 needed in the initial root filesystem and sorting them can be inconsistent
68 (YOCTO #11043).
69 """
70
71 for filename in (
72 'group',
73 'gshadow',
74 'passwd',
75 'shadow',
76 'subgid',
77 'subuid',
78 ):
79 filepath = os.path.join(sysconfdir, filename)
80 remove_shadowutils_backup_file(filepath)
81
82def tidy_shadowutils_files(sysconfdir):
83 """
84 Tidy up shadow-utils files.
85 """
86
87 remove_shadowutils_backup_files(sysconfdir)
88 sort_shadowutils_files(sysconfdir)
89
90 return True
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py
new file mode 100644
index 0000000000..1dc9cf150d
--- /dev/null
+++ b/meta/lib/oe/rust.py
@@ -0,0 +1,11 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# Handle mismatches between `uname -m`-style output and Rust's arch names
8def arch_to_rust_arch(arch):
9 if arch == "ppc64le":
10 return "powerpc64le"
11 return arch
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py
new file mode 100644
index 0000000000..fd4b6895d8
--- /dev/null
+++ b/meta/lib/oe/sbom.py
@@ -0,0 +1,120 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import collections
8
9DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe"))
10DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file"))
11
12
13def get_recipe_spdxid(d):
14 return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN"))
15
16
17def get_download_spdxid(d, idx):
18 return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx)
19
20
21def get_package_spdxid(pkg):
22 return "SPDXRef-Package-%s" % pkg
23
24
25def get_source_file_spdxid(d, idx):
26 return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx)
27
28
29def get_packaged_file_spdxid(pkg, idx):
30 return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx)
31
32
33def get_image_spdxid(img):
34 return "SPDXRef-Image-%s" % img
35
36
37def get_sdk_spdxid(sdk):
38 return "SPDXRef-SDK-%s" % sdk
39
40
41def _doc_path_by_namespace(spdx_deploy, arch, doc_namespace):
42 return spdx_deploy / "by-namespace" / arch / doc_namespace.replace("/", "_")
43
44
45def doc_find_by_namespace(spdx_deploy, search_arches, doc_namespace):
46 for pkgarch in search_arches:
47 p = _doc_path_by_namespace(spdx_deploy, pkgarch, doc_namespace)
48 if os.path.exists(p):
49 return p
50 return None
51
52
53def _doc_path_by_hashfn(spdx_deploy, arch, doc_name, hashfn):
54 return (
55 spdx_deploy / "by-hash" / arch / hashfn.split()[1] / (doc_name + ".spdx.json")
56 )
57
58
59def doc_find_by_hashfn(spdx_deploy, search_arches, doc_name, hashfn):
60 for pkgarch in search_arches:
61 p = _doc_path_by_hashfn(spdx_deploy, pkgarch, doc_name, hashfn)
62 if os.path.exists(p):
63 return p
64 return None
65
66
67def doc_path(spdx_deploy, doc_name, arch, subdir):
68 return spdx_deploy / arch / subdir / (doc_name + ".spdx.json")
69
70
71def write_doc(d, spdx_doc, arch, subdir, spdx_deploy=None, indent=None):
72 from pathlib import Path
73
74 if spdx_deploy is None:
75 spdx_deploy = Path(d.getVar("SPDXDEPLOY"))
76
77 dest = doc_path(spdx_deploy, spdx_doc.name, arch, subdir)
78 dest.parent.mkdir(exist_ok=True, parents=True)
79 with dest.open("wb") as f:
80 doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
81
82 l = _doc_path_by_namespace(spdx_deploy, arch, spdx_doc.documentNamespace)
83 l.parent.mkdir(exist_ok=True, parents=True)
84 l.symlink_to(os.path.relpath(dest, l.parent))
85
86 l = _doc_path_by_hashfn(
87 spdx_deploy, arch, spdx_doc.name, d.getVar("BB_HASHFILENAME")
88 )
89 l.parent.mkdir(exist_ok=True, parents=True)
90 l.symlink_to(os.path.relpath(dest, l.parent))
91
92 return doc_sha1
93
94
95def read_doc(fn):
96 import hashlib
97 import oe.spdx
98 import io
99 import contextlib
100
101 @contextlib.contextmanager
102 def get_file():
103 if isinstance(fn, io.IOBase):
104 yield fn
105 else:
106 with fn.open("rb") as f:
107 yield f
108
109 with get_file() as f:
110 sha1 = hashlib.sha1()
111 while True:
112 chunk = f.read(4096)
113 if not chunk:
114 break
115 sha1.update(chunk)
116
117 f.seek(0)
118 doc = oe.spdx.SPDXDocument.from_json(f)
119
120 return (doc, sha1.hexdigest())
diff --git a/meta/lib/oe/sbom30.py b/meta/lib/oe/sbom30.py
new file mode 100644
index 0000000000..227ac51877
--- /dev/null
+++ b/meta/lib/oe/sbom30.py
@@ -0,0 +1,1096 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from pathlib import Path
8
9import oe.spdx30
10import bb
11import re
12import hashlib
13import uuid
14import os
15import oe.spdx_common
16from datetime import datetime, timezone
17
18OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/"
19
20VEX_VERSION = "1.0.0"
21
22SPDX_BUILD_TYPE = "http://openembedded.org/bitbake"
23
24OE_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/by-doc-hash/"
25OE_DOC_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/doc/"
26
27
28@oe.spdx30.register(OE_SPDX_BASE + "id-alias")
29class OEIdAliasExtension(oe.spdx30.extension_Extension):
30 """
31 This extension allows an Element to provide an internal alias for the SPDX
32 ID. Since SPDX requires unique URIs for each SPDX ID, most of the objects
33 created have a unique UUID namespace and the unihash of the task encoded in
34 their SPDX ID. However, this causes a problem for referencing documents
35 across recipes, since the taskhash of a dependency may not factor into the
36 taskhash of the current task and thus the current task won't rebuild and
37 see the new SPDX ID when the dependency changes (e.g. ABI safe recipes and
38 tasks).
39
40 To help work around this, this extension provides a non-unique alias for an
41 Element by which it can be referenced from other tasks/recipes. When a
42 final SBoM is created, references to these aliases will be replaced with
43 the actual unique SPDX ID.
44
45 Most Elements will automatically get an alias created when they are written
46 out if they do not already have one. To suppress the creation of an alias,
47 add an extension with a blank `alias` property.
48
49
50 It is in internal extension that should be removed when writing out a final
51 SBoM
52 """
53
54 CLOSED = True
55 INTERNAL = True
56
57 @classmethod
58 def _register_props(cls):
59 super()._register_props()
60 cls._add_property(
61 "alias",
62 oe.spdx30.StringProp(),
63 OE_SPDX_BASE + "alias",
64 max_count=1,
65 )
66
67 cls._add_property(
68 "link_name",
69 oe.spdx30.StringProp(),
70 OE_SPDX_BASE + "link-name",
71 max_count=1,
72 )
73
74
75@oe.spdx30.register(OE_SPDX_BASE + "file-name-alias")
76class OEFileNameAliasExtension(oe.spdx30.extension_Extension):
77 CLOSED = True
78 INTERNAL = True
79
80 @classmethod
81 def _register_props(cls):
82 super()._register_props()
83 cls._add_property(
84 "aliases",
85 oe.spdx30.ListProp(oe.spdx30.StringProp()),
86 OE_SPDX_BASE + "filename-alias",
87 )
88
89
90@oe.spdx30.register(OE_SPDX_BASE + "license-scanned")
91class OELicenseScannedExtension(oe.spdx30.extension_Extension):
92 """
93 The presence of this extension means the file has already been scanned for
94 license information
95 """
96
97 CLOSED = True
98 INTERNAL = True
99
100
101@oe.spdx30.register(OE_SPDX_BASE + "document-extension")
102class OEDocumentExtension(oe.spdx30.extension_Extension):
103 """
104 This extension is added to a SpdxDocument to indicate various useful bits
105 of information about its contents
106 """
107
108 CLOSED = True
109
110 @classmethod
111 def _register_props(cls):
112 super()._register_props()
113 cls._add_property(
114 "is_native",
115 oe.spdx30.BooleanProp(),
116 OE_SPDX_BASE + "is-native",
117 max_count=1,
118 )
119
120
121def spdxid_hash(*items):
122 h = hashlib.md5()
123 for i in items:
124 if isinstance(i, oe.spdx30.Element):
125 h.update(i._id.encode("utf-8"))
126 else:
127 h.update(i.encode("utf-8"))
128 return h.hexdigest()
129
130
131def spdx_sde(d):
132 sde = d.getVar("SOURCE_DATE_EPOCH")
133 if not sde:
134 return datetime.now(timezone.utc)
135
136 return datetime.fromtimestamp(int(sde), timezone.utc)
137
138
139def get_element_link_id(e):
140 """
141 Get the string ID which should be used to link to an Element. If the
142 element has an alias, that will be preferred, otherwise its SPDX ID will be
143 used.
144 """
145 ext = get_alias(e)
146 if ext is not None and ext.alias:
147 return ext.alias
148 return e._id
149
150
151def get_alias(obj):
152 for ext in obj.extension:
153 if not isinstance(ext, OEIdAliasExtension):
154 continue
155 return ext
156
157 return None
158
159
160def hash_id(_id):
161 return hashlib.sha256(_id.encode("utf-8")).hexdigest()
162
163
164def to_list(l):
165 if isinstance(l, set):
166 l = sorted(list(l))
167
168 if not isinstance(l, (list, tuple)):
169 raise TypeError("Must be a list or tuple. Got %s" % type(l))
170
171 return l
172
173
174class ObjectSet(oe.spdx30.SHACLObjectSet):
175 def __init__(self, d):
176 super().__init__()
177 self.d = d
178 self.alias_prefix = None
179
180 def create_index(self):
181 self.by_sha256_hash = {}
182 super().create_index()
183
184 def add_index(self, obj):
185 # Check that all elements are given an ID before being inserted
186 if isinstance(obj, oe.spdx30.Element):
187 if not obj._id:
188 raise ValueError("Element missing ID")
189
190 alias_ext = get_alias(obj)
191 if alias_ext is not None and alias_ext.alias:
192 self.obj_by_id[alias_ext.alias] = obj
193
194 for v in obj.verifiedUsing:
195 if not isinstance(v, oe.spdx30.Hash):
196 continue
197
198 if v.algorithm != oe.spdx30.HashAlgorithm.sha256:
199 continue
200
201 self.by_sha256_hash.setdefault(v.hashValue, set()).add(obj)
202
203 super().add_index(obj)
204 if isinstance(obj, oe.spdx30.SpdxDocument):
205 self.doc = obj
206 alias_ext = get_alias(obj)
207 if alias_ext is not None and alias_ext.alias:
208 self.alias_prefix = OE_ALIAS_PREFIX + hash_id(alias_ext.alias) + "/"
209
210 def __filter_obj(self, obj, attr_filter):
211 return all(getattr(obj, k) == v for k, v in attr_filter.items())
212
213 def foreach_filter(self, typ, *, match_subclass=True, **attr_filter):
214 for obj in self.foreach_type(typ, match_subclass=match_subclass):
215 if self.__filter_obj(obj, attr_filter):
216 yield obj
217
218 def find_filter(self, typ, *, match_subclass=True, **attr_filter):
219 for obj in self.foreach_filter(
220 typ, match_subclass=match_subclass, **attr_filter
221 ):
222 return obj
223 return None
224
225 def foreach_root(self, typ, **attr_filter):
226 for obj in self.doc.rootElement:
227 if not isinstance(obj, typ):
228 continue
229
230 if self.__filter_obj(obj, attr_filter):
231 yield obj
232
233 def find_root(self, typ, **attr_filter):
234 for obj in self.foreach_root(typ, **attr_filter):
235 return obj
236 return None
237
238 def add_root(self, obj):
239 self.add(obj)
240 self.doc.rootElement.append(obj)
241 return obj
242
243 def is_native(self):
244 for e in self.doc.extension:
245 if not isinstance(e, oe.sbom30.OEDocumentExtension):
246 continue
247
248 if e.is_native is not None:
249 return e.is_native
250
251 return False
252
253 def set_is_native(self, is_native):
254 for e in self.doc.extension:
255 if not isinstance(e, oe.sbom30.OEDocumentExtension):
256 continue
257
258 e.is_native = is_native
259 return
260
261 if is_native:
262 self.doc.extension.append(oe.sbom30.OEDocumentExtension(is_native=True))
263
264 def add_aliases(self):
265 for o in self.foreach_type(oe.spdx30.Element):
266 self.set_element_alias(o)
267
268 def new_alias_id(self, obj, replace):
269 unihash = self.d.getVar("BB_UNIHASH")
270 namespace = self.get_namespace()
271 if unihash not in obj._id:
272 bb.warn(f"Unihash {unihash} not found in {obj._id}")
273 return None
274
275 if namespace not in obj._id:
276 bb.warn(f"Namespace {namespace} not found in {obj._id}")
277 return None
278
279 return obj._id.replace(unihash, "UNIHASH").replace(
280 namespace, replace + self.d.getVar("PN")
281 )
282
283 def remove_internal_extensions(self):
284 def remove(o):
285 o.extension = [e for e in o.extension if not getattr(e, "INTERNAL", False)]
286
287 for o in self.foreach_type(oe.spdx30.Element):
288 remove(o)
289
290 if self.doc:
291 remove(self.doc)
292
293 def get_namespace(self):
294 namespace_uuid = uuid.uuid5(
295 uuid.NAMESPACE_DNS, self.d.getVar("SPDX_UUID_NAMESPACE")
296 )
297 pn = self.d.getVar("PN")
298 return "%s/%s-%s" % (
299 self.d.getVar("SPDX_NAMESPACE_PREFIX"),
300 pn,
301 str(uuid.uuid5(namespace_uuid, pn)),
302 )
303
304 def set_element_alias(self, e):
305 if not e._id or e._id.startswith("_:"):
306 return
307
308 alias_ext = get_alias(e)
309 if alias_ext is None:
310 alias_id = self.new_alias_id(e, self.alias_prefix)
311 if alias_id is not None:
312 e.extension.append(OEIdAliasExtension(alias=alias_id))
313 elif (
314 alias_ext.alias
315 and not isinstance(e, oe.spdx30.SpdxDocument)
316 and not alias_ext.alias.startswith(self.alias_prefix)
317 ):
318 bb.warn(
319 f"Element {e._id} has alias {alias_ext.alias}, but it should have prefix {self.alias_prefix}"
320 )
321
322 def new_spdxid(self, *suffix, include_unihash=True):
323 items = [self.get_namespace()]
324 if include_unihash:
325 unihash = self.d.getVar("BB_UNIHASH")
326 items.append(unihash)
327 items.extend(re.sub(r"[^a-zA-Z0-9_-]", "_", s) for s in suffix)
328 return "/".join(items)
329
330 def new_import(self, key):
331 base = f"SPDX_IMPORTS_{key}"
332 spdxid = self.d.getVar(f"{base}_spdxid")
333 if not spdxid:
334 bb.fatal(f"{key} is not a valid SPDX_IMPORTS key")
335
336 for i in self.doc.import_:
337 if i.externalSpdxId == spdxid:
338 # Already imported
339 return spdxid
340
341 m = oe.spdx30.ExternalMap(externalSpdxId=spdxid)
342
343 uri = self.d.getVar(f"{base}_uri")
344 if uri:
345 m.locationHint = uri
346
347 for pyname, algorithm in oe.spdx30.HashAlgorithm.NAMED_INDIVIDUALS.items():
348 value = self.d.getVar(f"{base}_hash_{pyname}")
349 if value:
350 m.verifiedUsing.append(
351 oe.spdx30.Hash(
352 algorithm=algorithm,
353 hashValue=value,
354 )
355 )
356
357 self.doc.import_.append(m)
358 return spdxid
359
360 def new_agent(self, varname, *, creation_info=None, add=True):
361 ref_varname = self.d.getVar(f"{varname}_ref")
362 if ref_varname:
363 if ref_varname == varname:
364 bb.fatal(f"{varname} cannot reference itself")
365 return self.new_agent(ref_varname, creation_info=creation_info)
366
367 import_key = self.d.getVar(f"{varname}_import")
368 if import_key:
369 return self.new_import(import_key)
370
371 name = self.d.getVar(f"{varname}_name")
372 if not name:
373 return None
374
375 spdxid = self.new_spdxid("agent", name)
376 agent = self.find_by_id(spdxid)
377 if agent is not None:
378 return agent
379
380 agent_type = self.d.getVar("%s_type" % varname)
381 if agent_type == "person":
382 agent = oe.spdx30.Person()
383 elif agent_type == "software":
384 agent = oe.spdx30.SoftwareAgent()
385 elif agent_type == "organization":
386 agent = oe.spdx30.Organization()
387 elif not agent_type or agent_type == "agent":
388 agent = oe.spdx30.Agent()
389 else:
390 bb.fatal("Unknown agent type '%s' in %s_type" % (agent_type, varname))
391
392 agent._id = spdxid
393 agent.creationInfo = creation_info or self.doc.creationInfo
394 agent.name = name
395
396 comment = self.d.getVar("%s_comment" % varname)
397 if comment:
398 agent.comment = comment
399
400 for (
401 pyname,
402 idtype,
403 ) in oe.spdx30.ExternalIdentifierType.NAMED_INDIVIDUALS.items():
404 value = self.d.getVar("%s_id_%s" % (varname, pyname))
405 if value:
406 agent.externalIdentifier.append(
407 oe.spdx30.ExternalIdentifier(
408 externalIdentifierType=idtype,
409 identifier=value,
410 )
411 )
412
413 if add:
414 self.add(agent)
415
416 return agent
417
418 def new_creation_info(self):
419 creation_info = oe.spdx30.CreationInfo()
420
421 name = "%s %s" % (
422 self.d.getVar("SPDX_TOOL_NAME"),
423 self.d.getVar("SPDX_TOOL_VERSION"),
424 )
425 tool = self.add(
426 oe.spdx30.Tool(
427 _id=self.new_spdxid("tool", name),
428 creationInfo=creation_info,
429 name=name,
430 )
431 )
432
433 authors = []
434 for a in self.d.getVar("SPDX_AUTHORS").split():
435 varname = "SPDX_AUTHORS_%s" % a
436 author = self.new_agent(varname, creation_info=creation_info)
437
438 if not author:
439 bb.fatal("Unable to find or create author %s" % a)
440
441 authors.append(author)
442
443 creation_info.created = spdx_sde(self.d)
444 creation_info.specVersion = self.d.getVar("SPDX_VERSION")
445 creation_info.createdBy = authors
446 creation_info.createdUsing = [tool]
447
448 return creation_info
449
450 def copy_creation_info(self, copy):
451 c = oe.spdx30.CreationInfo(
452 created=spdx_sde(self.d),
453 specVersion=self.d.getVar("SPDX_VERSION"),
454 )
455
456 for author in copy.createdBy:
457 if isinstance(author, str):
458 c.createdBy.append(author)
459 else:
460 c.createdBy.append(author._id)
461
462 for tool in copy.createdUsing:
463 if isinstance(tool, str):
464 c.createdUsing.append(tool)
465 else:
466 c.createdUsing.append(tool._id)
467
468 return c
469
470 def new_annotation(self, subject, comment, typ):
471 return self.add(
472 oe.spdx30.Annotation(
473 _id=self.new_spdxid("annotation", spdxid_hash(comment, typ)),
474 creationInfo=self.doc.creationInfo,
475 annotationType=typ,
476 subject=subject,
477 statement=comment,
478 )
479 )
480
481 def _new_relationship(
482 self,
483 cls,
484 from_,
485 typ,
486 to,
487 *,
488 spdxid_name="relationship",
489 **props,
490 ):
491 from_ = to_list(from_)
492 to = to_list(to)
493
494 if not from_:
495 return []
496
497 if not to:
498 to = [oe.spdx30.IndividualElement.NoneElement]
499
500 ret = []
501
502 for f in from_:
503 hash_args = [typ, f]
504 for k in sorted(props.keys()):
505 hash_args.append(props[k])
506 hash_args.extend(to)
507
508 relationship = self.add(
509 cls(
510 _id=self.new_spdxid(spdxid_name, spdxid_hash(*hash_args)),
511 creationInfo=self.doc.creationInfo,
512 from_=f,
513 relationshipType=typ,
514 to=to,
515 **props,
516 )
517 )
518 ret.append(relationship)
519
520 return ret
521
522 def new_relationship(self, from_, typ, to):
523 return self._new_relationship(oe.spdx30.Relationship, from_, typ, to)
524
525 def new_scoped_relationship(self, from_, typ, scope, to):
526 return self._new_relationship(
527 oe.spdx30.LifecycleScopedRelationship,
528 from_,
529 typ,
530 to,
531 scope=scope,
532 )
533
534 def new_license_expression(
535 self, license_expression, license_data, license_text_map={}
536 ):
537 license_list_version = license_data["licenseListVersion"]
538 # SPDX 3 requires that the license list version be a semver
539 # MAJOR.MINOR.MICRO, but the actual license version might be
540 # MAJOR.MINOR on some older versions. As such, manually append a .0
541 # micro version if its missing to keep SPDX happy
542 if license_list_version.count(".") < 2:
543 license_list_version += ".0"
544
545 spdxid = [
546 "license",
547 license_list_version,
548 re.sub(r"[^a-zA-Z0-9_-]", "_", license_expression),
549 ]
550
551 license_text = [
552 (k, license_text_map[k]) for k in sorted(license_text_map.keys())
553 ]
554
555 if not license_text:
556 lic = self.find_filter(
557 oe.spdx30.simplelicensing_LicenseExpression,
558 simplelicensing_licenseExpression=license_expression,
559 simplelicensing_licenseListVersion=license_list_version,
560 )
561 if lic is not None:
562 return lic
563 else:
564 spdxid.append(spdxid_hash(*(v for _, v in license_text)))
565 lic = self.find_by_id(self.new_spdxid(*spdxid))
566 if lic is not None:
567 return lic
568
569 lic = self.add(
570 oe.spdx30.simplelicensing_LicenseExpression(
571 _id=self.new_spdxid(*spdxid),
572 creationInfo=self.doc.creationInfo,
573 simplelicensing_licenseExpression=license_expression,
574 simplelicensing_licenseListVersion=license_list_version,
575 )
576 )
577
578 for key, value in license_text:
579 lic.simplelicensing_customIdToUri.append(
580 oe.spdx30.DictionaryEntry(key=key, value=value)
581 )
582
583 return lic
584
585 def scan_declared_licenses(self, spdx_file, filepath, license_data):
586 for e in spdx_file.extension:
587 if isinstance(e, OELicenseScannedExtension):
588 return
589
590 file_licenses = set()
591 for extracted_lic in oe.spdx_common.extract_licenses(filepath):
592 lic = self.new_license_expression(extracted_lic, license_data)
593 self.set_element_alias(lic)
594 file_licenses.add(lic)
595
596 self.new_relationship(
597 [spdx_file],
598 oe.spdx30.RelationshipType.hasDeclaredLicense,
599 [oe.sbom30.get_element_link_id(lic_alias) for lic_alias in file_licenses],
600 )
601 spdx_file.extension.append(OELicenseScannedExtension())
602
603 def new_file(self, _id, name, path, *, purposes=[]):
604 sha256_hash = bb.utils.sha256_file(path)
605
606 for f in self.by_sha256_hash.get(sha256_hash, []):
607 if not isinstance(f, oe.spdx30.software_File):
608 continue
609
610 if purposes:
611 new_primary = purposes[0]
612 new_additional = []
613
614 if f.software_primaryPurpose:
615 new_additional.append(f.software_primaryPurpose)
616 new_additional.extend(f.software_additionalPurpose)
617
618 new_additional = sorted(
619 list(set(p for p in new_additional if p != new_primary))
620 )
621
622 f.software_primaryPurpose = new_primary
623 f.software_additionalPurpose = new_additional
624
625 if f.name != name:
626 for e in f.extension:
627 if isinstance(e, OEFileNameAliasExtension):
628 e.aliases.append(name)
629 break
630 else:
631 f.extension.append(OEFileNameAliasExtension(aliases=[name]))
632
633 return f
634
635 spdx_file = oe.spdx30.software_File(
636 _id=_id,
637 creationInfo=self.doc.creationInfo,
638 name=name,
639 )
640 if purposes:
641 spdx_file.software_primaryPurpose = purposes[0]
642 spdx_file.software_additionalPurpose = purposes[1:]
643
644 spdx_file.verifiedUsing.append(
645 oe.spdx30.Hash(
646 algorithm=oe.spdx30.HashAlgorithm.sha256,
647 hashValue=sha256_hash,
648 )
649 )
650
651 return self.add(spdx_file)
652
653 def new_cve_vuln(self, cve):
654 v = oe.spdx30.security_Vulnerability()
655 v._id = self.new_spdxid("vulnerability", cve)
656 v.creationInfo = self.doc.creationInfo
657
658 v.externalIdentifier.append(
659 oe.spdx30.ExternalIdentifier(
660 externalIdentifierType=oe.spdx30.ExternalIdentifierType.cve,
661 identifier=cve,
662 identifierLocator=[
663 f"https://cveawg.mitre.org/api/cve/{cve}",
664 f"https://www.cve.org/CVERecord?id={cve}",
665 ],
666 )
667 )
668 return self.add(v)
669
670 def new_vex_patched_relationship(self, from_, to):
671 return self._new_relationship(
672 oe.spdx30.security_VexFixedVulnAssessmentRelationship,
673 from_,
674 oe.spdx30.RelationshipType.fixedIn,
675 to,
676 spdxid_name="vex-fixed",
677 security_vexVersion=VEX_VERSION,
678 )
679
680 def new_vex_unpatched_relationship(self, from_, to):
681 return self._new_relationship(
682 oe.spdx30.security_VexAffectedVulnAssessmentRelationship,
683 from_,
684 oe.spdx30.RelationshipType.affects,
685 to,
686 spdxid_name="vex-affected",
687 security_vexVersion=VEX_VERSION,
688 security_actionStatement="Mitigation action unknown",
689 )
690
691 def new_vex_ignored_relationship(self, from_, to, *, impact_statement):
692 return self._new_relationship(
693 oe.spdx30.security_VexNotAffectedVulnAssessmentRelationship,
694 from_,
695 oe.spdx30.RelationshipType.doesNotAffect,
696 to,
697 spdxid_name="vex-not-affected",
698 security_vexVersion=VEX_VERSION,
699 security_impactStatement=impact_statement,
700 )
701
702 def import_bitbake_build_objset(self):
703 deploy_dir_spdx = Path(self.d.getVar("DEPLOY_DIR_SPDX"))
704 bb_objset = load_jsonld(
705 self.d, deploy_dir_spdx / "bitbake.spdx.json", required=True
706 )
707 self.doc.import_.extend(bb_objset.doc.import_)
708 self.update(bb_objset.objects)
709
710 return bb_objset
711
712 def import_bitbake_build(self):
713 def find_bitbake_build(objset):
714 return objset.find_filter(
715 oe.spdx30.build_Build,
716 build_buildType=SPDX_BUILD_TYPE,
717 )
718
719 build = find_bitbake_build(self)
720 if build:
721 return build
722
723 bb_objset = self.import_bitbake_build_objset()
724 build = find_bitbake_build(bb_objset)
725 if build is None:
726 bb.fatal(f"No build found in {deploy_dir_spdx}")
727
728 return build
729
730 def new_task_build(self, name, typ):
731 current_task = self.d.getVar("BB_CURRENTTASK")
732 pn = self.d.getVar("PN")
733
734 build = self.add(
735 oe.spdx30.build_Build(
736 _id=self.new_spdxid("build", name),
737 creationInfo=self.doc.creationInfo,
738 name=f"{pn}:do_{current_task}:{name}",
739 build_buildType=f"{SPDX_BUILD_TYPE}/do_{current_task}/{typ}",
740 )
741 )
742
743 if self.d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
744 bitbake_build = self.import_bitbake_build()
745
746 self.new_relationship(
747 [bitbake_build],
748 oe.spdx30.RelationshipType.ancestorOf,
749 [build],
750 )
751
752 if self.d.getVar("SPDX_INCLUDE_BUILD_VARIABLES") == "1":
753 for varname in sorted(self.d.keys()):
754 if varname.startswith("__"):
755 continue
756
757 value = self.d.getVar(varname, expand=False)
758
759 # TODO: Deal with non-string values
760 if not isinstance(value, str):
761 continue
762
763 build.build_parameter.append(
764 oe.spdx30.DictionaryEntry(key=varname, value=value)
765 )
766
767 return build
768
769 def new_archive(self, archive_name):
770 return self.add(
771 oe.spdx30.software_File(
772 _id=self.new_spdxid("archive", str(archive_name)),
773 creationInfo=self.doc.creationInfo,
774 name=str(archive_name),
775 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
776 )
777 )
778
779 @classmethod
780 def new_objset(cls, d, name, copy_from_bitbake_doc=True):
781 objset = cls(d)
782
783 document = oe.spdx30.SpdxDocument(
784 _id=objset.new_spdxid("document", name),
785 name=name,
786 )
787
788 document.extension.append(
789 OEIdAliasExtension(
790 alias=objset.new_alias_id(
791 document,
792 OE_DOC_ALIAS_PREFIX + d.getVar("PN") + "/" + name + "/",
793 ),
794 )
795 )
796 objset.doc = document
797 objset.add_index(document)
798
799 if copy_from_bitbake_doc:
800 bb_objset = objset.import_bitbake_build_objset()
801 document.creationInfo = objset.copy_creation_info(
802 bb_objset.doc.creationInfo
803 )
804 else:
805 document.creationInfo = objset.new_creation_info()
806
807 return objset
808
809 def expand_collection(self, *, add_objectsets=[]):
810 """
811 Expands a collection to pull in all missing elements
812
813 Returns the set of ids that could not be found to link into the document
814 """
815 missing_spdxids = set()
816 imports = {e.externalSpdxId: e for e in self.doc.import_}
817
818 def merge_doc(other):
819 nonlocal imports
820
821 for e in other.doc.import_:
822 if not e.externalSpdxId in imports:
823 imports[e.externalSpdxId] = e
824
825 self.objects |= other.objects
826
827 for o in add_objectsets:
828 merge_doc(o)
829
830 needed_spdxids = self.link()
831 provided_spdxids = set(self.obj_by_id.keys())
832
833 while True:
834 import_spdxids = set(imports.keys())
835 searching_spdxids = (
836 needed_spdxids - provided_spdxids - missing_spdxids - import_spdxids
837 )
838 if not searching_spdxids:
839 break
840
841 spdxid = searching_spdxids.pop()
842 bb.debug(
843 1,
844 f"Searching for {spdxid}. Remaining: {len(searching_spdxids)}, Total: {len(provided_spdxids)}, Missing: {len(missing_spdxids)}, Imports: {len(import_spdxids)}",
845 )
846 dep_objset, dep_path = find_by_spdxid(self.d, spdxid)
847
848 if dep_objset:
849 dep_provided = set(dep_objset.obj_by_id.keys())
850 if spdxid not in dep_provided:
851 bb.fatal(f"{spdxid} not found in {dep_path}")
852 provided_spdxids |= dep_provided
853 needed_spdxids |= dep_objset.missing_ids
854 merge_doc(dep_objset)
855 else:
856 missing_spdxids.add(spdxid)
857
858 self.doc.import_ = sorted(imports.values(), key=lambda e: e.externalSpdxId)
859 bb.debug(1, "Linking...")
860 self.link()
861
862 # Manually go through all of the simplelicensing_customIdToUri DictionaryEntry
863 # items and resolve any aliases to actual objects.
864 for lic in self.foreach_type(oe.spdx30.simplelicensing_LicenseExpression):
865 for d in lic.simplelicensing_customIdToUri:
866 if d.value.startswith(OE_ALIAS_PREFIX):
867 obj = self.find_by_id(d.value)
868 if obj is not None:
869 d.value = obj._id
870 else:
871 self.missing_ids.add(d.value)
872
873 self.missing_ids -= set(imports.keys())
874 return self.missing_ids
875
876
877def load_jsonld(d, path, required=False):
878 deserializer = oe.spdx30.JSONLDDeserializer()
879 objset = ObjectSet(d)
880 try:
881 with path.open("rb") as f:
882 deserializer.read(f, objset)
883 except FileNotFoundError:
884 if required:
885 bb.fatal("No SPDX document named %s found" % path)
886 return None
887
888 if not objset.doc:
889 bb.fatal("SPDX Document %s has no SPDXDocument element" % path)
890 return None
891
892 objset.objects.remove(objset.doc)
893 return objset
894
895
896def jsonld_arch_path(d, arch, subdir, name, deploydir=None):
897 if deploydir is None:
898 deploydir = Path(d.getVar("DEPLOY_DIR_SPDX"))
899 return deploydir / arch / subdir / (name + ".spdx.json")
900
901
902def jsonld_hash_path(h):
903 return Path("by-spdxid-hash") / h[:2], h
904
905
906def load_jsonld_by_arch(d, arch, subdir, name, *, required=False):
907 path = jsonld_arch_path(d, arch, subdir, name)
908 objset = load_jsonld(d, path, required=required)
909 if objset is not None:
910 return (objset, path)
911 return (None, None)
912
913
914def find_jsonld(d, subdir, name, *, required=False):
915 package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
916 package_archs.reverse()
917
918 for arch in package_archs:
919 objset, path = load_jsonld_by_arch(d, arch, subdir, name)
920 if objset is not None:
921 return (objset, path)
922
923 if required:
924 bb.fatal("Could not find a %s SPDX document named %s" % (subdir, name))
925
926 return (None, None)
927
928
929def write_jsonld_doc(d, objset, dest):
930 if not isinstance(objset, ObjectSet):
931 bb.fatal("Only an ObjsetSet can be serialized")
932 return
933
934 if not objset.doc:
935 bb.fatal("ObjectSet is missing a SpdxDocument")
936 return
937
938 objset.doc.rootElement = sorted(list(set(objset.doc.rootElement)))
939 objset.doc.profileConformance = sorted(
940 list(
941 getattr(oe.spdx30.ProfileIdentifierType, p)
942 for p in d.getVar("SPDX_PROFILES").split()
943 )
944 )
945
946 dest.parent.mkdir(exist_ok=True, parents=True)
947
948 if d.getVar("SPDX_PRETTY") == "1":
949 serializer = oe.spdx30.JSONLDSerializer(
950 indent=2,
951 )
952 else:
953 serializer = oe.spdx30.JSONLDInlineSerializer()
954
955 objset.objects.add(objset.doc)
956 with dest.open("wb") as f:
957 serializer.write(objset, f, force_at_graph=True)
958 objset.objects.remove(objset.doc)
959
960
961def write_recipe_jsonld_doc(
962 d,
963 objset,
964 subdir,
965 deploydir,
966 *,
967 create_spdx_id_links=True,
968):
969 pkg_arch = d.getVar("SSTATE_PKGARCH")
970
971 dest = jsonld_arch_path(d, pkg_arch, subdir, objset.doc.name, deploydir=deploydir)
972
973 def link_id(_id):
974 hash_path = jsonld_hash_path(hash_id(_id))
975
976 link_name = jsonld_arch_path(
977 d,
978 pkg_arch,
979 *hash_path,
980 deploydir=deploydir,
981 )
982 try:
983 link_name.parent.mkdir(exist_ok=True, parents=True)
984 link_name.symlink_to(os.path.relpath(dest, link_name.parent))
985 except:
986 target = link_name.readlink()
987 bb.warn(
988 f"Unable to link {_id} in {dest} as {link_name}. Already points to {target}"
989 )
990 raise
991
992 return hash_path[-1]
993
994 objset.add_aliases()
995
996 try:
997 if create_spdx_id_links:
998 alias_ext = get_alias(objset.doc)
999 if alias_ext is not None and alias_ext.alias:
1000 alias_ext.link_name = link_id(alias_ext.alias)
1001
1002 finally:
1003 # It is really helpful for debugging if the JSON document is written
1004 # out, so always do that even if there is an error making the links
1005 write_jsonld_doc(d, objset, dest)
1006
1007
1008def find_root_obj_in_jsonld(d, subdir, fn_name, obj_type, **attr_filter):
1009 objset, fn = find_jsonld(d, subdir, fn_name, required=True)
1010
1011 spdx_obj = objset.find_root(obj_type, **attr_filter)
1012 if not spdx_obj:
1013 bb.fatal("No root %s found in %s" % (obj_type.__name__, fn))
1014
1015 return spdx_obj, objset
1016
1017
1018def load_obj_in_jsonld(d, arch, subdir, fn_name, obj_type, **attr_filter):
1019 objset, fn = load_jsonld_by_arch(d, arch, subdir, fn_name, required=True)
1020
1021 spdx_obj = objset.find_filter(obj_type, **attr_filter)
1022 if not spdx_obj:
1023 bb.fatal("No %s found in %s" % (obj_type.__name__, fn))
1024
1025 return spdx_obj, objset
1026
1027
1028def find_by_spdxid(d, spdxid, *, required=False):
1029 if spdxid.startswith(OE_ALIAS_PREFIX):
1030 h = spdxid[len(OE_ALIAS_PREFIX) :].split("/", 1)[0]
1031 return find_jsonld(d, *jsonld_hash_path(h), required=required)
1032 return find_jsonld(d, *jsonld_hash_path(hash_id(spdxid)), required=required)
1033
1034
1035def create_sbom(d, name, root_elements, add_objectsets=[]):
1036 objset = ObjectSet.new_objset(d, name)
1037
1038 sbom = objset.add(
1039 oe.spdx30.software_Sbom(
1040 _id=objset.new_spdxid("sbom", name),
1041 name=name,
1042 creationInfo=objset.doc.creationInfo,
1043 software_sbomType=[oe.spdx30.software_SbomType.build],
1044 rootElement=root_elements,
1045 )
1046 )
1047
1048 missing_spdxids = objset.expand_collection(add_objectsets=add_objectsets)
1049 if missing_spdxids:
1050 bb.warn(
1051 "The following SPDX IDs were unable to be resolved:\n "
1052 + "\n ".join(sorted(list(missing_spdxids)))
1053 )
1054
1055 # Filter out internal extensions from final SBoMs
1056 objset.remove_internal_extensions()
1057
1058 # SBoM should be the only root element of the document
1059 objset.doc.rootElement = [sbom]
1060
1061 # De-duplicate licenses
1062 unique = set()
1063 dedup = {}
1064 for lic in objset.foreach_type(oe.spdx30.simplelicensing_LicenseExpression):
1065 for u in unique:
1066 if (
1067 u.simplelicensing_licenseExpression
1068 == lic.simplelicensing_licenseExpression
1069 and u.simplelicensing_licenseListVersion
1070 == lic.simplelicensing_licenseListVersion
1071 ):
1072 dedup[lic] = u
1073 break
1074 else:
1075 unique.add(lic)
1076
1077 if dedup:
1078 for rel in objset.foreach_filter(
1079 oe.spdx30.Relationship,
1080 relationshipType=oe.spdx30.RelationshipType.hasDeclaredLicense,
1081 ):
1082 rel.to = [dedup.get(to, to) for to in rel.to]
1083
1084 for rel in objset.foreach_filter(
1085 oe.spdx30.Relationship,
1086 relationshipType=oe.spdx30.RelationshipType.hasConcludedLicense,
1087 ):
1088 rel.to = [dedup.get(to, to) for to in rel.to]
1089
1090 for k, v in dedup.items():
1091 bb.debug(1, f"Removing duplicate License {k._id} -> {v._id}")
1092 objset.objects.remove(k)
1093
1094 objset.create_index()
1095
1096 return objset, sbom
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py
index 37b59afd1a..9fe0fbb752 100644
--- a/meta/lib/oe/sdk.py
+++ b/meta/lib/oe/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -68,7 +70,7 @@ class Sdk(object, metaclass=ABCMeta):
68 #FIXME: using umbrella exc catching because bb.utils method raises it 70 #FIXME: using umbrella exc catching because bb.utils method raises it
69 except Exception as e: 71 except Exception as e:
70 bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc()) 72 bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
71 bb.error("unable to place %s in final SDK location" % sourcefile) 73 bb.fatal("unable to place %s in final SDK location" % sourcefile)
72 74
73 def mkdirhier(self, dirpath): 75 def mkdirhier(self, dirpath):
74 try: 76 try:
@@ -115,6 +117,10 @@ def sdk_list_installed_packages(d, target, rootfs_dir=None):
115 117
116 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] 118 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True]
117 119
120 if target is False:
121 ipkgconf_sdk_target = d.getVar("IPKGCONF_SDK")
122 d.setVar("IPKGCONF_TARGET", ipkgconf_sdk_target)
123
118 img_type = d.getVar('IMAGE_PKGTYPE') 124 img_type = d.getVar('IMAGE_PKGTYPE')
119 import importlib 125 import importlib
120 cls = importlib.import_module('oe.package_manager.' + img_type) 126 cls = importlib.import_module('oe.package_manager.' + img_type)
@@ -142,13 +148,11 @@ def get_extra_sdkinfo(sstate_dir):
142 extra_info['filesizes'] = {} 148 extra_info['filesizes'] = {}
143 for root, _, files in os.walk(sstate_dir): 149 for root, _, files in os.walk(sstate_dir):
144 for fn in files: 150 for fn in files:
145 if fn.endswith('.tgz'): 151 # Note that this makes an assumption about the sstate filenames
152 if '.tar.' in fn and not fn.endswith('.siginfo'):
146 fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024)) 153 fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024))
147 task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0] 154 task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0]
148 origtotal = extra_info['tasksizes'].get(task, 0) 155 origtotal = extra_info['tasksizes'].get(task, 0)
149 extra_info['tasksizes'][task] = origtotal + fsize 156 extra_info['tasksizes'][task] = origtotal + fsize
150 extra_info['filesizes'][fn] = fsize 157 extra_info['filesizes'][fn] = fsize
151 return extra_info 158 return extra_info
152
153if __name__ == "__main__":
154 pass
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py
new file mode 100644
index 0000000000..7aaf2af5ed
--- /dev/null
+++ b/meta/lib/oe/spdx.py
@@ -0,0 +1,357 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7#
8# This library is intended to capture the JSON SPDX specification in a type
9# safe manner. It is not intended to encode any particular OE specific
10# behaviors, see the sbom.py for that.
11#
12# The documented SPDX spec document doesn't cover the JSON syntax for
13# particular configuration, which can make it hard to determine what the JSON
14# syntax should be. I've found it is actually much simpler to read the official
15# SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec
16# in schemas/spdx-schema.json
17#
18
19import hashlib
20import itertools
21import json
22
23SPDX_VERSION = "2.2"
24
25
26#
27# The following are the support classes that are used to implement SPDX object
28#
29
30class _Property(object):
31 """
32 A generic SPDX object property. The different types will derive from this
33 class
34 """
35
36 def __init__(self, *, default=None):
37 self.default = default
38
39 def setdefault(self, dest, name):
40 if self.default is not None:
41 dest.setdefault(name, self.default)
42
43
44class _String(_Property):
45 """
46 A scalar string property for an SPDX object
47 """
48
49 def __init__(self, **kwargs):
50 super().__init__(**kwargs)
51
52 def set_property(self, attrs, name):
53 def get_helper(obj):
54 return obj._spdx[name]
55
56 def set_helper(obj, value):
57 obj._spdx[name] = value
58
59 def del_helper(obj):
60 del obj._spdx[name]
61
62 attrs[name] = property(get_helper, set_helper, del_helper)
63
64 def init(self, source):
65 return source
66
67
68class _Object(_Property):
69 """
70 A scalar SPDX object property of a SPDX object
71 """
72
73 def __init__(self, cls, **kwargs):
74 super().__init__(**kwargs)
75 self.cls = cls
76
77 def set_property(self, attrs, name):
78 def get_helper(obj):
79 if not name in obj._spdx:
80 obj._spdx[name] = self.cls()
81 return obj._spdx[name]
82
83 def set_helper(obj, value):
84 obj._spdx[name] = value
85
86 def del_helper(obj):
87 del obj._spdx[name]
88
89 attrs[name] = property(get_helper, set_helper)
90
91 def init(self, source):
92 return self.cls(**source)
93
94
95class _ListProperty(_Property):
96 """
97 A list of SPDX properties
98 """
99
100 def __init__(self, prop, **kwargs):
101 super().__init__(**kwargs)
102 self.prop = prop
103
104 def set_property(self, attrs, name):
105 def get_helper(obj):
106 if not name in obj._spdx:
107 obj._spdx[name] = []
108 return obj._spdx[name]
109
110 def set_helper(obj, value):
111 obj._spdx[name] = list(value)
112
113 def del_helper(obj):
114 del obj._spdx[name]
115
116 attrs[name] = property(get_helper, set_helper, del_helper)
117
118 def init(self, source):
119 return [self.prop.init(o) for o in source]
120
121
122class _StringList(_ListProperty):
123 """
124 A list of strings as a property for an SPDX object
125 """
126
127 def __init__(self, **kwargs):
128 super().__init__(_String(), **kwargs)
129
130
131class _ObjectList(_ListProperty):
132 """
133 A list of SPDX objects as a property for an SPDX object
134 """
135
136 def __init__(self, cls, **kwargs):
137 super().__init__(_Object(cls), **kwargs)
138
139
140class MetaSPDXObject(type):
141 """
142 A metaclass that allows properties (anything derived from a _Property
143 class) to be defined for a SPDX object
144 """
145 def __new__(mcls, name, bases, attrs):
146 attrs["_properties"] = {}
147
148 for key in attrs.keys():
149 if isinstance(attrs[key], _Property):
150 prop = attrs[key]
151 attrs["_properties"][key] = prop
152 prop.set_property(attrs, key)
153
154 return super().__new__(mcls, name, bases, attrs)
155
156
157class SPDXObject(metaclass=MetaSPDXObject):
158 """
159 The base SPDX object; all SPDX spec classes must derive from this class
160 """
161 def __init__(self, **d):
162 self._spdx = {}
163
164 for name, prop in self._properties.items():
165 prop.setdefault(self._spdx, name)
166 if name in d:
167 self._spdx[name] = prop.init(d[name])
168
169 def serializer(self):
170 return self._spdx
171
172 def __setattr__(self, name, value):
173 if name in self._properties or name == "_spdx":
174 super().__setattr__(name, value)
175 return
176 raise KeyError("%r is not a valid SPDX property" % name)
177
178#
179# These are the SPDX objects implemented from the spec. The *only* properties
180# that can be added to these objects are ones directly specified in the SPDX
181# spec, however you may add helper functions to make operations easier.
182#
183# Defaults should *only* be specified if the SPDX spec says there is a certain
184# required value for a field (e.g. dataLicense), or if the field is mandatory
185# and has some sane "this field is unknown" (e.g. "NOASSERTION")
186#
187
188class SPDXAnnotation(SPDXObject):
189 annotationDate = _String()
190 annotationType = _String()
191 annotator = _String()
192 comment = _String()
193
194class SPDXChecksum(SPDXObject):
195 algorithm = _String()
196 checksumValue = _String()
197
198
199class SPDXRelationship(SPDXObject):
200 spdxElementId = _String()
201 relatedSpdxElement = _String()
202 relationshipType = _String()
203 comment = _String()
204 annotations = _ObjectList(SPDXAnnotation)
205
206
207class SPDXExternalReference(SPDXObject):
208 referenceCategory = _String()
209 referenceType = _String()
210 referenceLocator = _String()
211
212
213class SPDXPackageVerificationCode(SPDXObject):
214 packageVerificationCodeValue = _String()
215 packageVerificationCodeExcludedFiles = _StringList()
216
217
218class SPDXPackage(SPDXObject):
219 ALLOWED_CHECKSUMS = [
220 "SHA1",
221 "SHA224",
222 "SHA256",
223 "SHA384",
224 "SHA512",
225 "MD2",
226 "MD4",
227 "MD5",
228 "MD6",
229 ]
230
231 name = _String()
232 SPDXID = _String()
233 versionInfo = _String()
234 downloadLocation = _String(default="NOASSERTION")
235 supplier = _String(default="NOASSERTION")
236 homepage = _String()
237 licenseConcluded = _String(default="NOASSERTION")
238 licenseDeclared = _String(default="NOASSERTION")
239 summary = _String()
240 description = _String()
241 sourceInfo = _String()
242 copyrightText = _String(default="NOASSERTION")
243 licenseInfoFromFiles = _StringList(default=["NOASSERTION"])
244 externalRefs = _ObjectList(SPDXExternalReference)
245 packageVerificationCode = _Object(SPDXPackageVerificationCode)
246 hasFiles = _StringList()
247 packageFileName = _String()
248 annotations = _ObjectList(SPDXAnnotation)
249 checksums = _ObjectList(SPDXChecksum)
250
251
252class SPDXFile(SPDXObject):
253 SPDXID = _String()
254 fileName = _String()
255 licenseConcluded = _String(default="NOASSERTION")
256 copyrightText = _String(default="NOASSERTION")
257 licenseInfoInFiles = _StringList(default=["NOASSERTION"])
258 checksums = _ObjectList(SPDXChecksum)
259 fileTypes = _StringList()
260
261
262class SPDXCreationInfo(SPDXObject):
263 created = _String()
264 licenseListVersion = _String()
265 comment = _String()
266 creators = _StringList()
267
268
269class SPDXExternalDocumentRef(SPDXObject):
270 externalDocumentId = _String()
271 spdxDocument = _String()
272 checksum = _Object(SPDXChecksum)
273
274
275class SPDXExtractedLicensingInfo(SPDXObject):
276 name = _String()
277 comment = _String()
278 licenseId = _String()
279 extractedText = _String()
280
281
282class SPDXDocument(SPDXObject):
283 spdxVersion = _String(default="SPDX-" + SPDX_VERSION)
284 dataLicense = _String(default="CC0-1.0")
285 SPDXID = _String(default="SPDXRef-DOCUMENT")
286 name = _String()
287 documentNamespace = _String()
288 creationInfo = _Object(SPDXCreationInfo)
289 packages = _ObjectList(SPDXPackage)
290 files = _ObjectList(SPDXFile)
291 relationships = _ObjectList(SPDXRelationship)
292 externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef)
293 hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo)
294
295 def __init__(self, **d):
296 super().__init__(**d)
297
298 def to_json(self, f, *, sort_keys=False, indent=None, separators=None):
299 class Encoder(json.JSONEncoder):
300 def default(self, o):
301 if isinstance(o, SPDXObject):
302 return o.serializer()
303
304 return super().default(o)
305
306 sha1 = hashlib.sha1()
307 for chunk in Encoder(
308 sort_keys=sort_keys,
309 indent=indent,
310 separators=separators,
311 ).iterencode(self):
312 chunk = chunk.encode("utf-8")
313 f.write(chunk)
314 sha1.update(chunk)
315
316 return sha1.hexdigest()
317
318 @classmethod
319 def from_json(cls, f):
320 return cls(**json.load(f))
321
322 def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None):
323 if isinstance(_from, SPDXObject):
324 from_spdxid = _from.SPDXID
325 else:
326 from_spdxid = _from
327
328 if isinstance(_to, SPDXObject):
329 to_spdxid = _to.SPDXID
330 else:
331 to_spdxid = _to
332
333 r = SPDXRelationship(
334 spdxElementId=from_spdxid,
335 relatedSpdxElement=to_spdxid,
336 relationshipType=relationship,
337 )
338
339 if comment is not None:
340 r.comment = comment
341
342 if annotation is not None:
343 r.annotations.append(annotation)
344
345 self.relationships.append(r)
346
347 def find_by_spdxid(self, spdxid):
348 for o in itertools.chain(self.packages, self.files):
349 if o.SPDXID == spdxid:
350 return o
351 return None
352
353 def find_external_document_ref(self, namespace):
354 for r in self.externalDocumentRefs:
355 if r.spdxDocument == namespace:
356 return r
357 return None
diff --git a/meta/lib/oe/spdx30.py b/meta/lib/oe/spdx30.py
new file mode 100644
index 0000000000..cd97eebd18
--- /dev/null
+++ b/meta/lib/oe/spdx30.py
@@ -0,0 +1,5593 @@
1#! /usr/bin/env python3
2#
3# Generated Python bindings from a SHACL model
4#
5# This file was automatically generated by shacl2code. DO NOT MANUALLY MODIFY IT
6#
7# SPDX-License-Identifier: MIT
8
9import functools
10import hashlib
11import json
12import re
13import sys
14import threading
15import time
16from contextlib import contextmanager
17from datetime import datetime, timezone, timedelta
18from enum import Enum
19from abc import ABC, abstractmethod
20
21
22def check_type(obj, types):
23 if not isinstance(obj, types):
24 if isinstance(types, (list, tuple)):
25 raise TypeError(
26 f"Value must be one of type: {', '.join(t.__name__ for t in types)}. Got {type(obj)}"
27 )
28 raise TypeError(f"Value must be of type {types.__name__}. Got {type(obj)}")
29
30
31class Property(ABC):
32 """
33 A generic SHACL object property. The different types will derive from this
34 class
35 """
36
37 def __init__(self, *, pattern=None):
38 self.pattern = pattern
39
40 def init(self):
41 return None
42
43 def validate(self, value):
44 check_type(value, self.VALID_TYPES)
45 if self.pattern is not None and not re.search(
46 self.pattern, self.to_string(value)
47 ):
48 raise ValueError(
49 f"Value is not correctly formatted. Got '{self.to_string(value)}'"
50 )
51
52 def set(self, value):
53 return value
54
55 def check_min_count(self, value, min_count):
56 return min_count == 1
57
58 def check_max_count(self, value, max_count):
59 return max_count == 1
60
61 def elide(self, value):
62 return value is None
63
64 def walk(self, value, callback, path):
65 callback(value, path)
66
67 def iter_objects(self, value, recursive, visited):
68 return []
69
70 def link_prop(self, value, objectset, missing, visited):
71 return value
72
73 def to_string(self, value):
74 return str(value)
75
76 @abstractmethod
77 def encode(self, encoder, value, state):
78 pass
79
80 @abstractmethod
81 def decode(self, decoder, *, objectset=None):
82 pass
83
84
85class StringProp(Property):
86 """
87 A scalar string property for an SHACL object
88 """
89
90 VALID_TYPES = str
91
92 def set(self, value):
93 return str(value)
94
95 def encode(self, encoder, value, state):
96 encoder.write_string(value)
97
98 def decode(self, decoder, *, objectset=None):
99 return decoder.read_string()
100
101
102class AnyURIProp(StringProp):
103 def encode(self, encoder, value, state):
104 encoder.write_iri(value)
105
106 def decode(self, decoder, *, objectset=None):
107 return decoder.read_iri()
108
109
110class DateTimeProp(Property):
111 """
112 A Date/Time Object with optional timezone
113 """
114
115 VALID_TYPES = datetime
116 UTC_FORMAT_STR = "%Y-%m-%dT%H:%M:%SZ"
117 REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})?$"
118
119 def set(self, value):
120 return self._normalize(value)
121
122 def encode(self, encoder, value, state):
123 encoder.write_datetime(self.to_string(value))
124
125 def decode(self, decoder, *, objectset=None):
126 s = decoder.read_datetime()
127 if s is None:
128 return None
129 v = self.from_string(s)
130 return self._normalize(v)
131
132 def _normalize(self, value):
133 if value.utcoffset() is None:
134 value = value.astimezone()
135 offset = value.utcoffset()
136 seconds = offset % timedelta(minutes=-1 if offset.total_seconds() < 0 else 1)
137 if seconds:
138 offset = offset - seconds
139 value = value.replace(tzinfo=timezone(offset))
140 value = value.replace(microsecond=0)
141 return value
142
143 def to_string(self, value):
144 value = self._normalize(value)
145 if value.tzinfo == timezone.utc:
146 return value.strftime(self.UTC_FORMAT_STR)
147 return value.isoformat()
148
149 def from_string(self, value):
150 if not re.match(self.REGEX, value):
151 raise ValueError(f"'{value}' is not a correctly formatted datetime")
152 if "Z" in value:
153 d = datetime(
154 *(time.strptime(value, self.UTC_FORMAT_STR)[0:6]),
155 tzinfo=timezone.utc,
156 )
157 else:
158 d = datetime.fromisoformat(value)
159
160 return self._normalize(d)
161
162
163class DateTimeStampProp(DateTimeProp):
164 """
165 A Date/Time Object with required timestamp
166 """
167
168 REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})$"
169
170
171class IntegerProp(Property):
172 VALID_TYPES = int
173
174 def set(self, value):
175 return int(value)
176
177 def encode(self, encoder, value, state):
178 encoder.write_integer(value)
179
180 def decode(self, decoder, *, objectset=None):
181 return decoder.read_integer()
182
183
184class PositiveIntegerProp(IntegerProp):
185 def validate(self, value):
186 super().validate(value)
187 if value < 1:
188 raise ValueError(f"Value must be >=1. Got {value}")
189
190
191class NonNegativeIntegerProp(IntegerProp):
192 def validate(self, value):
193 super().validate(value)
194 if value < 0:
195 raise ValueError(f"Value must be >= 0. Got {value}")
196
197
198class BooleanProp(Property):
199 VALID_TYPES = bool
200
201 def set(self, value):
202 return bool(value)
203
204 def encode(self, encoder, value, state):
205 encoder.write_bool(value)
206
207 def decode(self, decoder, *, objectset=None):
208 return decoder.read_bool()
209
210
211class FloatProp(Property):
212 VALID_TYPES = (float, int)
213
214 def set(self, value):
215 return float(value)
216
217 def encode(self, encoder, value, state):
218 encoder.write_float(value)
219
220 def decode(self, decoder, *, objectset=None):
221 return decoder.read_float()
222
223
224class IRIProp(Property):
225 def __init__(self, context=[], *, pattern=None):
226 super().__init__(pattern=pattern)
227 self.context = context
228
229 def compact(self, value):
230 for iri, compact in self.context:
231 if value == iri:
232 return compact
233 return None
234
235 def expand(self, value):
236 for iri, compact in self.context:
237 if value == compact:
238 return iri
239 return None
240
241 def iri_values(self):
242 return (iri for iri, _ in self.context)
243
244
245class ObjectProp(IRIProp):
246 """
247 A scalar SHACL object property of a SHACL object
248 """
249
250 def __init__(self, cls, required, context=[]):
251 super().__init__(context)
252 self.cls = cls
253 self.required = required
254
255 def init(self):
256 if self.required and not self.cls.IS_ABSTRACT:
257 return self.cls()
258 return None
259
260 def validate(self, value):
261 check_type(value, (self.cls, str))
262
263 def walk(self, value, callback, path):
264 if value is None:
265 return
266
267 if not isinstance(value, str):
268 value.walk(callback, path)
269 else:
270 callback(value, path)
271
272 def iter_objects(self, value, recursive, visited):
273 if value is None or isinstance(value, str):
274 return
275
276 if value not in visited:
277 visited.add(value)
278 yield value
279
280 if recursive:
281 for c in value.iter_objects(recursive=True, visited=visited):
282 yield c
283
284 def encode(self, encoder, value, state):
285 if value is None:
286 raise ValueError("Object cannot be None")
287
288 if isinstance(value, str):
289 encoder.write_iri(value, self.compact(value))
290 return
291
292 return value.encode(encoder, state)
293
294 def decode(self, decoder, *, objectset=None):
295 iri = decoder.read_iri()
296 if iri is None:
297 return self.cls.decode(decoder, objectset=objectset)
298
299 iri = self.expand(iri) or iri
300
301 if objectset is None:
302 return iri
303
304 obj = objectset.find_by_id(iri)
305 if obj is None:
306 return iri
307
308 self.validate(obj)
309 return obj
310
311 def link_prop(self, value, objectset, missing, visited):
312 if value is None:
313 return value
314
315 if isinstance(value, str):
316 o = objectset.find_by_id(value)
317 if o is not None:
318 self.validate(o)
319 return o
320
321 if missing is not None:
322 missing.add(value)
323
324 return value
325
326 # De-duplicate IDs
327 if value._id:
328 value = objectset.find_by_id(value._id, value)
329 self.validate(value)
330
331 value.link_helper(objectset, missing, visited)
332 return value
333
334
335class ListProxy(object):
336 def __init__(self, prop, data=None):
337 if data is None:
338 self.__data = []
339 else:
340 self.__data = data
341 self.__prop = prop
342
343 def append(self, value):
344 self.__prop.validate(value)
345 self.__data.append(self.__prop.set(value))
346
347 def insert(self, idx, value):
348 self.__prop.validate(value)
349 self.__data.insert(idx, self.__prop.set(value))
350
351 def extend(self, items):
352 for i in items:
353 self.append(i)
354
355 def sort(self, *args, **kwargs):
356 self.__data.sort(*args, **kwargs)
357
358 def __getitem__(self, key):
359 return self.__data[key]
360
361 def __setitem__(self, key, value):
362 if isinstance(key, slice):
363 for v in value:
364 self.__prop.validate(v)
365 self.__data[key] = [self.__prop.set(v) for v in value]
366 else:
367 self.__prop.validate(value)
368 self.__data[key] = self.__prop.set(value)
369
370 def __delitem__(self, key):
371 del self.__data[key]
372
373 def __contains__(self, item):
374 return item in self.__data
375
376 def __iter__(self):
377 return iter(self.__data)
378
379 def __len__(self):
380 return len(self.__data)
381
382 def __str__(self):
383 return str(self.__data)
384
385 def __repr__(self):
386 return repr(self.__data)
387
388 def __eq__(self, other):
389 if isinstance(other, ListProxy):
390 return self.__data == other.__data
391
392 return self.__data == other
393
394
395class ListProp(Property):
396 """
397 A list of SHACL properties
398 """
399
400 VALID_TYPES = (list, ListProxy)
401
402 def __init__(self, prop):
403 super().__init__()
404 self.prop = prop
405
406 def init(self):
407 return ListProxy(self.prop)
408
409 def validate(self, value):
410 super().validate(value)
411
412 for i in value:
413 self.prop.validate(i)
414
415 def set(self, value):
416 if isinstance(value, ListProxy):
417 return value
418
419 return ListProxy(self.prop, [self.prop.set(d) for d in value])
420
421 def check_min_count(self, value, min_count):
422 check_type(value, ListProxy)
423 return len(value) >= min_count
424
425 def check_max_count(self, value, max_count):
426 check_type(value, ListProxy)
427 return len(value) <= max_count
428
429 def elide(self, value):
430 check_type(value, ListProxy)
431 return len(value) == 0
432
433 def walk(self, value, callback, path):
434 callback(value, path)
435 for idx, v in enumerate(value):
436 self.prop.walk(v, callback, path + [f"[{idx}]"])
437
438 def iter_objects(self, value, recursive, visited):
439 for v in value:
440 for c in self.prop.iter_objects(v, recursive, visited):
441 yield c
442
443 def link_prop(self, value, objectset, missing, visited):
444 if isinstance(value, ListProxy):
445 data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
446 else:
447 data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
448
449 return ListProxy(self.prop, data=data)
450
451 def encode(self, encoder, value, state):
452 check_type(value, ListProxy)
453
454 with encoder.write_list() as list_s:
455 for v in value:
456 with list_s.write_list_item() as item_s:
457 self.prop.encode(item_s, v, state)
458
459 def decode(self, decoder, *, objectset=None):
460 data = []
461 for val_d in decoder.read_list():
462 v = self.prop.decode(val_d, objectset=objectset)
463 self.prop.validate(v)
464 data.append(v)
465
466 return ListProxy(self.prop, data=data)
467
468
469class EnumProp(IRIProp):
470 VALID_TYPES = str
471
472 def __init__(self, values, *, pattern=None):
473 super().__init__(values, pattern=pattern)
474
475 def validate(self, value):
476 super().validate(value)
477
478 valid_values = self.iri_values()
479 if value not in valid_values:
480 raise ValueError(
481 f"'{value}' is not a valid value. Choose one of {' '.join(valid_values)}"
482 )
483
484 def encode(self, encoder, value, state):
485 encoder.write_enum(value, self, self.compact(value))
486
487 def decode(self, decoder, *, objectset=None):
488 v = decoder.read_enum(self)
489 return self.expand(v) or v
490
491
492class NodeKind(Enum):
493 BlankNode = 1
494 IRI = 2
495 BlankNodeOrIRI = 3
496
497
498def is_IRI(s):
499 if not isinstance(s, str):
500 return False
501 if s.startswith("_:"):
502 return False
503 if ":" not in s:
504 return False
505 return True
506
507
508def is_blank_node(s):
509 if not isinstance(s, str):
510 return False
511 if not s.startswith("_:"):
512 return False
513 return True
514
515
516def register(type_iri, *, compact_type=None, abstract=False):
517 def add_class(key, c):
518 assert (
519 key not in SHACLObject.CLASSES
520 ), f"{key} already registered to {SHACLObject.CLASSES[key].__name__}"
521 SHACLObject.CLASSES[key] = c
522
523 def decorator(c):
524 global NAMED_INDIVIDUALS
525
526 assert issubclass(
527 c, SHACLObject
528 ), f"{c.__name__} is not derived from SHACLObject"
529
530 c._OBJ_TYPE = type_iri
531 c.IS_ABSTRACT = abstract
532 add_class(type_iri, c)
533
534 c._OBJ_COMPACT_TYPE = compact_type
535 if compact_type:
536 add_class(compact_type, c)
537
538 NAMED_INDIVIDUALS |= set(c.NAMED_INDIVIDUALS.values())
539
540 # Registration is deferred until the first instance of class is created
541 # so that it has access to any other defined class
542 c._NEEDS_REG = True
543 return c
544
545 return decorator
546
547
548register_lock = threading.Lock()
549NAMED_INDIVIDUALS = set()
550
551
552@functools.total_ordering
553class SHACLObject(object):
554 CLASSES = {}
555 NODE_KIND = NodeKind.BlankNodeOrIRI
556 ID_ALIAS = None
557 IS_ABSTRACT = True
558
559 def __init__(self, **kwargs):
560 if self._is_abstract():
561 raise NotImplementedError(
562 f"{self.__class__.__name__} is abstract and cannot be implemented"
563 )
564
565 with register_lock:
566 cls = self.__class__
567 if cls._NEEDS_REG:
568 cls._OBJ_PROPERTIES = {}
569 cls._OBJ_IRIS = {}
570 cls._register_props()
571 cls._NEEDS_REG = False
572
573 self.__dict__["_obj_data"] = {}
574 self.__dict__["_obj_metadata"] = {}
575
576 for iri, prop, _, _, _, _ in self.__iter_props():
577 self.__dict__["_obj_data"][iri] = prop.init()
578
579 for k, v in kwargs.items():
580 setattr(self, k, v)
581
582 def _is_abstract(self):
583 return self.__class__.IS_ABSTRACT
584
585 @classmethod
586 def _register_props(cls):
587 cls._add_property("_id", StringProp(), iri="@id")
588
589 @classmethod
590 def _add_property(
591 cls,
592 pyname,
593 prop,
594 iri,
595 min_count=None,
596 max_count=None,
597 compact=None,
598 ):
599 if pyname in cls._OBJ_IRIS:
600 raise KeyError(f"'{pyname}' is already defined for '{cls.__name__}'")
601 if iri in cls._OBJ_PROPERTIES:
602 raise KeyError(f"'{iri}' is already defined for '{cls.__name__}'")
603
604 while hasattr(cls, pyname):
605 pyname = pyname + "_"
606
607 pyname = sys.intern(pyname)
608 iri = sys.intern(iri)
609
610 cls._OBJ_IRIS[pyname] = iri
611 cls._OBJ_PROPERTIES[iri] = (prop, min_count, max_count, pyname, compact)
612
613 def __setattr__(self, name, value):
614 if name == self.ID_ALIAS:
615 self["@id"] = value
616 return
617
618 try:
619 iri = self._OBJ_IRIS[name]
620 self[iri] = value
621 except KeyError:
622 raise AttributeError(
623 f"'{name}' is not a valid property of {self.__class__.__name__}"
624 )
625
626 def __getattr__(self, name):
627 if name in self._OBJ_IRIS:
628 return self.__dict__["_obj_data"][self._OBJ_IRIS[name]]
629
630 if name == self.ID_ALIAS:
631 return self.__dict__["_obj_data"]["@id"]
632
633 if name == "_metadata":
634 return self.__dict__["_obj_metadata"]
635
636 if name == "_IRI":
637 return self._OBJ_IRIS
638
639 if name == "TYPE":
640 return self.__class__._OBJ_TYPE
641
642 if name == "COMPACT_TYPE":
643 return self.__class__._OBJ_COMPACT_TYPE
644
645 raise AttributeError(
646 f"'{name}' is not a valid property of {self.__class__.__name__}"
647 )
648
649 def __delattr__(self, name):
650 if name == self.ID_ALIAS:
651 del self["@id"]
652 return
653
654 try:
655 iri = self._OBJ_IRIS[name]
656 del self[iri]
657 except KeyError:
658 raise AttributeError(
659 f"'{name}' is not a valid property of {self.__class__.__name__}"
660 )
661
662 def __get_prop(self, iri):
663 if iri not in self._OBJ_PROPERTIES:
664 raise KeyError(
665 f"'{iri}' is not a valid property of {self.__class__.__name__}"
666 )
667
668 return self._OBJ_PROPERTIES[iri]
669
670 def __iter_props(self):
671 for iri, v in self._OBJ_PROPERTIES.items():
672 yield iri, *v
673
674 def __getitem__(self, iri):
675 return self.__dict__["_obj_data"][iri]
676
677 def __setitem__(self, iri, value):
678 if iri == "@id":
679 if self.NODE_KIND == NodeKind.BlankNode:
680 if not is_blank_node(value):
681 raise ValueError(
682 f"{self.__class__.__name__} ({id(self)}) can only have local reference. Property '{iri}' cannot be set to '{value}' and must start with '_:'"
683 )
684 elif self.NODE_KIND == NodeKind.IRI:
685 if not is_IRI(value):
686 raise ValueError(
687 f"{self.__class__.__name__} ({id(self)}) can only have an IRI value. Property '{iri}' cannot be set to '{value}'"
688 )
689 else:
690 if not is_blank_node(value) and not is_IRI(value):
691 raise ValueError(
692 f"{self.__class__.__name__} ({id(self)}) Has invalid Property '{iri}' '{value}'. Must be a blank node or IRI"
693 )
694
695 prop, _, _, _, _ = self.__get_prop(iri)
696 prop.validate(value)
697 self.__dict__["_obj_data"][iri] = prop.set(value)
698
699 def __delitem__(self, iri):
700 prop, _, _, _, _ = self.__get_prop(iri)
701 self.__dict__["_obj_data"][iri] = prop.init()
702
703 def __iter__(self):
704 return self._OBJ_PROPERTIES.keys()
705
706 def walk(self, callback, path=None):
707 """
708 Walk object tree, invoking the callback for each item
709
710 Callback has the form:
711
712 def callback(object, path):
713 """
714 if path is None:
715 path = ["."]
716
717 if callback(self, path):
718 for iri, prop, _, _, _, _ in self.__iter_props():
719 prop.walk(self.__dict__["_obj_data"][iri], callback, path + [f".{iri}"])
720
721 def property_keys(self):
722 for iri, _, _, _, pyname, compact in self.__iter_props():
723 if iri == "@id":
724 compact = self.ID_ALIAS
725 yield pyname, iri, compact
726
727 def iter_objects(self, *, recursive=False, visited=None):
728 """
729 Iterate of all objects that are a child of this one
730 """
731 if visited is None:
732 visited = set()
733
734 for iri, prop, _, _, _, _ in self.__iter_props():
735 for c in prop.iter_objects(
736 self.__dict__["_obj_data"][iri], recursive=recursive, visited=visited
737 ):
738 yield c
739
740 def encode(self, encoder, state):
741 idname = self.ID_ALIAS or self._OBJ_IRIS["_id"]
742 if not self._id and self.NODE_KIND == NodeKind.IRI:
743 raise ValueError(
744 f"{self.__class__.__name__} ({id(self)}) must have a IRI for property '{idname}'"
745 )
746
747 if state.is_written(self):
748 encoder.write_iri(state.get_object_id(self))
749 return
750
751 state.add_written(self)
752
753 with encoder.write_object(
754 self,
755 state.get_object_id(self),
756 bool(self._id) or state.is_refed(self),
757 ) as obj_s:
758 self._encode_properties(obj_s, state)
759
760 def _encode_properties(self, encoder, state):
761 for iri, prop, min_count, max_count, pyname, compact in self.__iter_props():
762 value = self.__dict__["_obj_data"][iri]
763 if prop.elide(value):
764 if min_count:
765 raise ValueError(
766 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) is required (currently {value!r})"
767 )
768 continue
769
770 if min_count is not None:
771 if not prop.check_min_count(value, min_count):
772 raise ValueError(
773 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a minimum of {min_count} elements"
774 )
775
776 if max_count is not None:
777 if not prop.check_max_count(value, max_count):
778 raise ValueError(
779 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a maximum of {max_count} elements"
780 )
781
782 if iri == self._OBJ_IRIS["_id"]:
783 continue
784
785 with encoder.write_property(iri, compact) as prop_s:
786 prop.encode(prop_s, value, state)
787
788 @classmethod
789 def _make_object(cls, typ):
790 if typ not in cls.CLASSES:
791 raise TypeError(f"Unknown type {typ}")
792
793 return cls.CLASSES[typ]()
794
795 @classmethod
796 def decode(cls, decoder, *, objectset=None):
797 typ, obj_d = decoder.read_object()
798 if typ is None:
799 raise TypeError("Unable to determine type for object")
800
801 obj = cls._make_object(typ)
802 for key in (obj.ID_ALIAS, obj._OBJ_IRIS["_id"]):
803 with obj_d.read_property(key) as prop_d:
804 if prop_d is None:
805 continue
806
807 _id = prop_d.read_iri()
808 if _id is None:
809 raise TypeError(f"Object key '{key}' is the wrong type")
810
811 obj._id = _id
812 break
813
814 if obj.NODE_KIND == NodeKind.IRI and not obj._id:
815 raise ValueError("Object is missing required IRI")
816
817 if objectset is not None:
818 if obj._id:
819 v = objectset.find_by_id(_id)
820 if v is not None:
821 return v
822
823 obj._decode_properties(obj_d, objectset=objectset)
824
825 if objectset is not None:
826 objectset.add_index(obj)
827 return obj
828
829 def _decode_properties(self, decoder, objectset=None):
830 for key in decoder.object_keys():
831 if not self._decode_prop(decoder, key, objectset=objectset):
832 raise KeyError(f"Unknown property '{key}'")
833
834 def _decode_prop(self, decoder, key, objectset=None):
835 if key in (self._OBJ_IRIS["_id"], self.ID_ALIAS):
836 return True
837
838 for iri, prop, _, _, _, compact in self.__iter_props():
839 if compact == key:
840 read_key = compact
841 elif iri == key:
842 read_key = iri
843 else:
844 continue
845
846 with decoder.read_property(read_key) as prop_d:
847 v = prop.decode(prop_d, objectset=objectset)
848 prop.validate(v)
849 self.__dict__["_obj_data"][iri] = v
850 return True
851
852 return False
853
854 def link_helper(self, objectset, missing, visited):
855 if self in visited:
856 return
857
858 visited.add(self)
859
860 for iri, prop, _, _, _, _ in self.__iter_props():
861 self.__dict__["_obj_data"][iri] = prop.link_prop(
862 self.__dict__["_obj_data"][iri],
863 objectset,
864 missing,
865 visited,
866 )
867
868 def __str__(self):
869 parts = [
870 f"{self.__class__.__name__}(",
871 ]
872 if self._id:
873 parts.append(f"@id='{self._id}'")
874 parts.append(")")
875 return "".join(parts)
876
877 def __hash__(self):
878 return super().__hash__()
879
880 def __eq__(self, other):
881 return super().__eq__(other)
882
883 def __lt__(self, other):
884 def sort_key(obj):
885 if isinstance(obj, str):
886 return (obj, "", "", "")
887 return (
888 obj._id or "",
889 obj.TYPE,
890 getattr(obj, "name", None) or "",
891 id(obj),
892 )
893
894 return sort_key(self) < sort_key(other)
895
896
897class SHACLExtensibleObject(object):
898 CLOSED = False
899
900 def __init__(self, typ=None, **kwargs):
901 if typ:
902 self.__dict__["_obj_TYPE"] = (typ, None)
903 else:
904 self.__dict__["_obj_TYPE"] = (self._OBJ_TYPE, self._OBJ_COMPACT_TYPE)
905 super().__init__(**kwargs)
906
907 def _is_abstract(self):
908 # Unknown classes are assumed to not be abstract so that they can be
909 # deserialized
910 typ = self.__dict__["_obj_TYPE"][0]
911 if typ in self.__class__.CLASSES:
912 return self.__class__.CLASSES[typ].IS_ABSTRACT
913
914 return False
915
916 @classmethod
917 def _make_object(cls, typ):
918 # Check for a known type, and if so, deserialize as that instead
919 if typ in cls.CLASSES:
920 return cls.CLASSES[typ]()
921
922 obj = cls(typ)
923 return obj
924
925 def _decode_properties(self, decoder, objectset=None):
926 def decode_value(d):
927 if not d.is_list():
928 return d.read_value()
929
930 return [decode_value(val_d) for val_d in d.read_list()]
931
932 if self.CLOSED:
933 super()._decode_properties(decoder, objectset=objectset)
934 return
935
936 for key in decoder.object_keys():
937 if self._decode_prop(decoder, key, objectset=objectset):
938 continue
939
940 if not is_IRI(key):
941 raise KeyError(
942 f"Extensible object properties must be IRIs. Got '{key}'"
943 )
944
945 with decoder.read_property(key) as prop_d:
946 self.__dict__["_obj_data"][key] = decode_value(prop_d)
947
948 def _encode_properties(self, encoder, state):
949 def encode_value(encoder, v):
950 if isinstance(v, bool):
951 encoder.write_bool(v)
952 elif isinstance(v, str):
953 encoder.write_string(v)
954 elif isinstance(v, int):
955 encoder.write_integer(v)
956 elif isinstance(v, float):
957 encoder.write_float(v)
958 elif isinstance(v, list):
959 with encoder.write_list() as list_s:
960 for i in v:
961 with list_s.write_list_item() as item_s:
962 encode_value(item_s, i)
963 else:
964 raise TypeError(
965 f"Unsupported serialized type {type(v)} with value '{v}'"
966 )
967
968 super()._encode_properties(encoder, state)
969 if self.CLOSED:
970 return
971
972 for iri, value in self.__dict__["_obj_data"].items():
973 if iri in self._OBJ_PROPERTIES:
974 continue
975
976 with encoder.write_property(iri) as prop_s:
977 encode_value(prop_s, value)
978
979 def __setitem__(self, iri, value):
980 try:
981 super().__setitem__(iri, value)
982 except KeyError:
983 if self.CLOSED:
984 raise
985
986 if not is_IRI(iri):
987 raise KeyError(f"Key '{iri}' must be an IRI")
988 self.__dict__["_obj_data"][iri] = value
989
990 def __delitem__(self, iri):
991 try:
992 super().__delitem__(iri)
993 except KeyError:
994 if self.CLOSED:
995 raise
996
997 if not is_IRI(iri):
998 raise KeyError(f"Key '{iri}' must be an IRI")
999 del self.__dict__["_obj_data"][iri]
1000
1001 def __getattr__(self, name):
1002 if name == "TYPE":
1003 return self.__dict__["_obj_TYPE"][0]
1004 if name == "COMPACT_TYPE":
1005 return self.__dict__["_obj_TYPE"][1]
1006 return super().__getattr__(name)
1007
1008 def property_keys(self):
1009 iris = set()
1010 for pyname, iri, compact in super().property_keys():
1011 iris.add(iri)
1012 yield pyname, iri, compact
1013
1014 if self.CLOSED:
1015 return
1016
1017 for iri in self.__dict__["_obj_data"].keys():
1018 if iri not in iris:
1019 yield None, iri, None
1020
1021
1022class SHACLObjectSet(object):
1023 def __init__(self, objects=[], *, link=False):
1024 self.objects = set()
1025 self.missing_ids = set()
1026 for o in objects:
1027 self.objects.add(o)
1028 self.create_index()
1029 if link:
1030 self._link()
1031
1032 def create_index(self):
1033 """
1034 (re)Create object index
1035
1036 Creates or recreates the indices for the object set to enable fast
1037 lookup. All objects and their children are walked and indexed
1038 """
1039 self.obj_by_id = {}
1040 self.obj_by_type = {}
1041 for o in self.foreach():
1042 self.add_index(o)
1043
1044 def add_index(self, obj):
1045 """
1046 Add object to index
1047
1048 Adds the object to all appropriate indices
1049 """
1050
1051 def reg_type(typ, compact, o, exact):
1052 self.obj_by_type.setdefault(typ, set()).add((exact, o))
1053 if compact:
1054 self.obj_by_type.setdefault(compact, set()).add((exact, o))
1055
1056 if not isinstance(obj, SHACLObject):
1057 raise TypeError("Object is not of type SHACLObject")
1058
1059 for typ in SHACLObject.CLASSES.values():
1060 if isinstance(obj, typ):
1061 reg_type(
1062 typ._OBJ_TYPE, typ._OBJ_COMPACT_TYPE, obj, obj.__class__ is typ
1063 )
1064
1065 # This covers custom extensions
1066 reg_type(obj.TYPE, obj.COMPACT_TYPE, obj, True)
1067
1068 if not obj._id:
1069 return
1070
1071 self.missing_ids.discard(obj._id)
1072
1073 if obj._id in self.obj_by_id:
1074 return
1075
1076 self.obj_by_id[obj._id] = obj
1077
1078 def add(self, obj):
1079 """
1080 Add object to object set
1081
1082 Adds a SHACLObject to the object set and index it.
1083
1084 NOTE: Child objects of the attached object are not indexes
1085 """
1086 if not isinstance(obj, SHACLObject):
1087 raise TypeError("Object is not of type SHACLObject")
1088
1089 if obj not in self.objects:
1090 self.objects.add(obj)
1091 self.add_index(obj)
1092 return obj
1093
1094 def update(self, *others):
1095 """
1096 Update object set adding all objects in each other iterable
1097 """
1098 for o in others:
1099 for obj in o:
1100 self.add(obj)
1101
1102 def __contains__(self, item):
1103 """
1104 Returns True if the item is in the object set
1105 """
1106 return item in self.objects
1107
1108 def link(self):
1109 """
1110 Link object set
1111
1112 Links the object in the object set by replacing string object
1113 references with references to the objects themselves. e.g.
1114 a property that references object "https://foo/bar" by a string
1115 reference will be replaced with an actual reference to the object in
1116 the object set with the same ID if it exists in the object set
1117
1118 If multiple objects with the same ID are found, the duplicates are
1119 eliminated
1120 """
1121 self.create_index()
1122 return self._link()
1123
1124 def _link(self):
1125 global NAMED_INDIVIDUALS
1126
1127 self.missing_ids = set()
1128 visited = set()
1129
1130 new_objects = set()
1131
1132 for o in self.objects:
1133 if o._id:
1134 o = self.find_by_id(o._id, o)
1135 o.link_helper(self, self.missing_ids, visited)
1136 new_objects.add(o)
1137
1138 self.objects = new_objects
1139
1140 # Remove blank nodes
1141 obj_by_id = {}
1142 for _id, obj in self.obj_by_id.items():
1143 if _id.startswith("_:"):
1144 del obj._id
1145 else:
1146 obj_by_id[_id] = obj
1147 self.obj_by_id = obj_by_id
1148
1149 # Named individuals aren't considered missing
1150 self.missing_ids -= NAMED_INDIVIDUALS
1151
1152 return self.missing_ids
1153
1154 def find_by_id(self, _id, default=None):
1155 """
1156 Find object by ID
1157
1158 Returns objects that match the specified ID, or default if there is no
1159 object with the specified ID
1160 """
1161 if _id not in self.obj_by_id:
1162 return default
1163 return self.obj_by_id[_id]
1164
1165 def foreach(self):
1166 """
1167 Iterate over every object in the object set, and all child objects
1168 """
1169 visited = set()
1170 for o in self.objects:
1171 if o not in visited:
1172 yield o
1173 visited.add(o)
1174
1175 for child in o.iter_objects(recursive=True, visited=visited):
1176 yield child
1177
1178 def foreach_type(self, typ, *, match_subclass=True):
1179 """
1180 Iterate over each object of a specified type (or subclass there of)
1181
1182 If match_subclass is True, and class derived from typ will also match
1183 (similar to isinstance()). If False, only exact matches will be
1184 returned
1185 """
1186 if not isinstance(typ, str):
1187 if not issubclass(typ, SHACLObject):
1188 raise TypeError(f"Type must be derived from SHACLObject, got {typ}")
1189 typ = typ._OBJ_TYPE
1190
1191 if typ not in self.obj_by_type:
1192 return
1193
1194 for exact, o in self.obj_by_type[typ]:
1195 if match_subclass or exact:
1196 yield o
1197
1198 def merge(self, *objectsets):
1199 """
1200 Merge object sets
1201
1202 Returns a new object set that is the combination of this object set and
1203 all provided arguments
1204 """
1205 new_objects = set()
1206 new_objects |= self.objects
1207 for d in objectsets:
1208 new_objects |= d.objects
1209
1210 return SHACLObjectSet(new_objects, link=True)
1211
1212 def encode(self, encoder, force_list=False, *, key=None):
1213 """
1214 Serialize a list of objects to a serialization encoder
1215
1216 If force_list is true, a list will always be written using the encoder.
1217 """
1218 ref_counts = {}
1219 state = EncodeState()
1220
1221 def walk_callback(value, path):
1222 nonlocal state
1223 nonlocal ref_counts
1224
1225 if not isinstance(value, SHACLObject):
1226 return True
1227
1228 # Remove blank node ID for re-assignment
1229 if value._id and value._id.startswith("_:"):
1230 del value._id
1231
1232 if value._id:
1233 state.add_refed(value)
1234
1235 # If the object is referenced more than once, add it to the set of
1236 # referenced objects
1237 ref_counts.setdefault(value, 0)
1238 ref_counts[value] += 1
1239 if ref_counts[value] > 1:
1240 state.add_refed(value)
1241 return False
1242
1243 return True
1244
1245 for o in self.objects:
1246 if o._id:
1247 state.add_refed(o)
1248 o.walk(walk_callback)
1249
1250 use_list = force_list or len(self.objects) > 1
1251
1252 if use_list:
1253 # If we are making a list add all the objects referred to by reference
1254 # to the list
1255 objects = list(self.objects | state.ref_objects)
1256 else:
1257 objects = list(self.objects)
1258
1259 objects.sort(key=key)
1260
1261 if use_list:
1262 # Ensure top level objects are only written in the top level graph
1263 # node, and referenced by ID everywhere else. This is done by setting
1264 # the flag that indicates this object has been written for all the top
1265 # level objects, then clearing it right before serializing the object.
1266 #
1267 # In this way, if an object is referenced before it is supposed to be
1268 # serialized into the @graph, it will serialize as a string instead of
1269 # the actual object
1270 for o in objects:
1271 state.written_objects.add(o)
1272
1273 with encoder.write_list() as list_s:
1274 for o in objects:
1275 # Allow this specific object to be written now
1276 state.written_objects.remove(o)
1277 with list_s.write_list_item() as item_s:
1278 o.encode(item_s, state)
1279
1280 elif objects:
1281 objects[0].encode(encoder, state)
1282
1283 def decode(self, decoder):
1284 self.create_index()
1285
1286 for obj_d in decoder.read_list():
1287 o = SHACLObject.decode(obj_d, objectset=self)
1288 self.objects.add(o)
1289
1290 self._link()
1291
1292
1293class EncodeState(object):
1294 def __init__(self):
1295 self.ref_objects = set()
1296 self.written_objects = set()
1297 self.blank_objects = {}
1298
1299 def get_object_id(self, o):
1300 if o._id:
1301 return o._id
1302
1303 if o not in self.blank_objects:
1304 _id = f"_:{o.__class__.__name__}{len(self.blank_objects)}"
1305 self.blank_objects[o] = _id
1306
1307 return self.blank_objects[o]
1308
1309 def is_refed(self, o):
1310 return o in self.ref_objects
1311
1312 def add_refed(self, o):
1313 self.ref_objects.add(o)
1314
1315 def is_written(self, o):
1316 return o in self.written_objects
1317
1318 def add_written(self, o):
1319 self.written_objects.add(o)
1320
1321
1322class Decoder(ABC):
1323 @abstractmethod
1324 def read_value(self):
1325 """
1326 Consume next item
1327
1328 Consumes the next item of any type
1329 """
1330 pass
1331
1332 @abstractmethod
1333 def read_string(self):
1334 """
1335 Consume the next item as a string.
1336
1337 Returns the string value of the next item, or `None` if the next item
1338 is not a string
1339 """
1340 pass
1341
1342 @abstractmethod
1343 def read_datetime(self):
1344 """
1345 Consumes the next item as a date & time string
1346
1347 Returns the string value of the next item, if it is a ISO datetime, or
1348 `None` if the next item is not a ISO datetime string.
1349
1350 Note that validation of the string is done by the caller, so a minimal
1351 implementation can just check if the next item is a string without
1352 worrying about the format
1353 """
1354 pass
1355
1356 @abstractmethod
1357 def read_integer(self):
1358 """
1359 Consumes the next item as an integer
1360
1361 Returns the integer value of the next item, or `None` if the next item
1362 is not an integer
1363 """
1364 pass
1365
1366 @abstractmethod
1367 def read_iri(self):
1368 """
1369 Consumes the next item as an IRI string
1370
1371 Returns the string value of the next item an IRI, or `None` if the next
1372 item is not an IRI.
1373
1374 The returned string should be either a fully-qualified IRI, or a blank
1375 node ID
1376 """
1377 pass
1378
1379 @abstractmethod
1380 def read_enum(self, e):
1381 """
1382 Consumes the next item as an Enum value string
1383
1384 Returns the fully qualified IRI of the next enum item, or `None` if the
1385 next item is not an enum value.
1386
1387 The callee is responsible for validating that the returned IRI is
1388 actually a member of the specified Enum, so the `Decoder` does not need
1389 to check that, but can if it wishes
1390 """
1391 pass
1392
1393 @abstractmethod
1394 def read_bool(self):
1395 """
1396 Consume the next item as a boolean value
1397
1398 Returns the boolean value of the next item, or `None` if the next item
1399 is not a boolean
1400 """
1401 pass
1402
1403 @abstractmethod
1404 def read_float(self):
1405 """
1406 Consume the next item as a float value
1407
1408 Returns the float value of the next item, or `None` if the next item is
1409 not a float
1410 """
1411 pass
1412
1413 @abstractmethod
1414 def read_list(self):
1415 """
1416 Consume the next item as a list generator
1417
1418 This should generate a `Decoder` object for each item in the list. The
1419 generated `Decoder` can be used to read the corresponding item from the
1420 list
1421 """
1422 pass
1423
1424 @abstractmethod
1425 def is_list(self):
1426 """
1427 Checks if the next item is a list
1428
1429 Returns True if the next item is a list, or False if it is a scalar
1430 """
1431 pass
1432
1433 @abstractmethod
1434 def read_object(self):
1435 """
1436 Consume next item as an object
1437
1438 A context manager that "enters" the next item as a object and yields a
1439 `Decoder` that can read properties from it. If the next item is not an
1440 object, yields `None`
1441
1442 Properties will be read out of the object using `read_property` and
1443 `read_object_id`
1444 """
1445 pass
1446
1447 @abstractmethod
1448 @contextmanager
1449 def read_property(self, key):
1450 """
1451 Read property from object
1452
1453 A context manager that yields a `Decoder` that can be used to read the
1454 value of the property with the given key in current object, or `None`
1455 if the property does not exist in the current object.
1456 """
1457 pass
1458
1459 @abstractmethod
1460 def object_keys(self):
1461 """
1462 Read property keys from an object
1463
1464 Iterates over all the serialized keys for the current object
1465 """
1466 pass
1467
1468 @abstractmethod
1469 def read_object_id(self, alias=None):
1470 """
1471 Read current object ID property
1472
1473 Returns the ID of the current object if one is defined, or `None` if
1474 the current object has no ID.
1475
1476 The ID must be a fully qualified IRI or a blank node
1477
1478 If `alias` is provided, is is a hint as to another name by which the ID
1479 might be found, if the `Decoder` supports aliases for an ID
1480 """
1481 pass
1482
1483
1484class JSONLDDecoder(Decoder):
1485 def __init__(self, data, root=False):
1486 self.data = data
1487 self.root = root
1488
1489 def read_value(self):
1490 if isinstance(self.data, str):
1491 try:
1492 return float(self.data)
1493 except ValueError:
1494 pass
1495 return self.data
1496
1497 def read_string(self):
1498 if isinstance(self.data, str):
1499 return self.data
1500 return None
1501
1502 def read_datetime(self):
1503 return self.read_string()
1504
1505 def read_integer(self):
1506 if isinstance(self.data, int):
1507 return self.data
1508 return None
1509
1510 def read_bool(self):
1511 if isinstance(self.data, bool):
1512 return self.data
1513 return None
1514
1515 def read_float(self):
1516 if isinstance(self.data, (int, float, str)):
1517 return float(self.data)
1518 return None
1519
1520 def read_iri(self):
1521 if isinstance(self.data, str):
1522 return self.data
1523 return None
1524
1525 def read_enum(self, e):
1526 if isinstance(self.data, str):
1527 return self.data
1528 return None
1529
1530 def read_list(self):
1531 if self.is_list():
1532 for v in self.data:
1533 yield self.__class__(v)
1534 else:
1535 yield self
1536
1537 def is_list(self):
1538 return isinstance(self.data, (list, tuple, set))
1539
1540 def __get_value(self, *keys):
1541 for k in keys:
1542 if k and k in self.data:
1543 return self.data[k]
1544 return None
1545
1546 @contextmanager
1547 def read_property(self, key):
1548 v = self.__get_value(key)
1549 if v is not None:
1550 yield self.__class__(v)
1551 else:
1552 yield None
1553
1554 def object_keys(self):
1555 for key in self.data.keys():
1556 if key in ("@type", "type"):
1557 continue
1558 if self.root and key == "@context":
1559 continue
1560 yield key
1561
1562 def read_object(self):
1563 typ = self.__get_value("@type", "type")
1564 if typ is not None:
1565 return typ, self
1566
1567 return None, self
1568
1569 def read_object_id(self, alias=None):
1570 return self.__get_value(alias, "@id")
1571
1572
1573class JSONLDDeserializer(object):
1574 def deserialize_data(self, data, objectset: SHACLObjectSet):
1575 if "@graph" in data:
1576 h = JSONLDDecoder(data["@graph"], True)
1577 else:
1578 h = JSONLDDecoder(data, True)
1579
1580 objectset.decode(h)
1581
1582 def read(self, f, objectset: SHACLObjectSet):
1583 data = json.load(f)
1584 self.deserialize_data(data, objectset)
1585
1586
1587class Encoder(ABC):
1588 @abstractmethod
1589 def write_string(self, v):
1590 """
1591 Write a string value
1592
1593 Encodes the value as a string in the output
1594 """
1595 pass
1596
1597 @abstractmethod
1598 def write_datetime(self, v):
1599 """
1600 Write a date & time string
1601
1602 Encodes the value as an ISO datetime string
1603
1604 Note: The provided string is already correctly encoded as an ISO datetime
1605 """
1606 pass
1607
1608 @abstractmethod
1609 def write_integer(self, v):
1610 """
1611 Write an integer value
1612
1613 Encodes the value as an integer in the output
1614 """
1615 pass
1616
1617 @abstractmethod
1618 def write_iri(self, v, compact=None):
1619 """
1620 Write IRI
1621
1622 Encodes the string as an IRI. Note that the string will be either a
1623 fully qualified IRI or a blank node ID. If `compact` is provided and
1624 the serialization supports compacted IRIs, it should be preferred to
1625 the full IRI
1626 """
1627 pass
1628
1629 @abstractmethod
1630 def write_enum(self, v, e, compact=None):
1631 """
1632 Write enum value IRI
1633
1634 Encodes the string enum value IRI. Note that the string will be a fully
1635 qualified IRI. If `compact` is provided and the serialization supports
1636 compacted IRIs, it should be preferred to the full IRI.
1637 """
1638 pass
1639
1640 @abstractmethod
1641 def write_bool(self, v):
1642 """
1643 Write boolean
1644
1645 Encodes the value as a boolean in the output
1646 """
1647 pass
1648
1649 @abstractmethod
1650 def write_float(self, v):
1651 """
1652 Write float
1653
1654 Encodes the value as a floating point number in the output
1655 """
1656 pass
1657
1658 @abstractmethod
1659 @contextmanager
1660 def write_object(self, o, _id, needs_id):
1661 """
1662 Write object
1663
1664 A context manager that yields an `Encoder` that can be used to encode
1665 the given object properties.
1666
1667 The provided ID will always be a valid ID (even if o._id is `None`), in
1668 case the `Encoder` _must_ have an ID. `needs_id` is a hint to indicate
1669 to the `Encoder` if an ID must be written or not (if that is even an
1670 option). If it is `True`, the `Encoder` must encode an ID for the
1671 object. If `False`, the encoder is not required to encode an ID and may
1672 omit it.
1673
1674 The ID will be either a fully qualified IRI, or a blank node IRI.
1675
1676 Properties will be written the object using `write_property`
1677 """
1678 pass
1679
1680 @abstractmethod
1681 @contextmanager
1682 def write_property(self, iri, compact=None):
1683 """
1684 Write object property
1685
1686 A context manager that yields an `Encoder` that can be used to encode
1687 the value for the property with the given IRI in the current object
1688
1689 Note that the IRI will be fully qualified. If `compact` is provided and
1690 the serialization supports compacted IRIs, it should be preferred to
1691 the full IRI.
1692 """
1693 pass
1694
1695 @abstractmethod
1696 @contextmanager
1697 def write_list(self):
1698 """
1699 Write list
1700
1701 A context manager that yields an `Encoder` that can be used to encode a
1702 list.
1703
1704 Each item of the list will be added using `write_list_item`
1705 """
1706 pass
1707
1708 @abstractmethod
1709 @contextmanager
1710 def write_list_item(self):
1711 """
1712 Write list item
1713
1714 A context manager that yields an `Encoder` that can be used to encode
1715 the value for a list item
1716 """
1717 pass
1718
1719
1720class JSONLDEncoder(Encoder):
1721 def __init__(self, data=None):
1722 self.data = data
1723
1724 def write_string(self, v):
1725 self.data = v
1726
1727 def write_datetime(self, v):
1728 self.data = v
1729
1730 def write_integer(self, v):
1731 self.data = v
1732
1733 def write_iri(self, v, compact=None):
1734 self.write_string(compact or v)
1735
1736 def write_enum(self, v, e, compact=None):
1737 self.write_string(compact or v)
1738
1739 def write_bool(self, v):
1740 self.data = v
1741
1742 def write_float(self, v):
1743 self.data = str(v)
1744
1745 @contextmanager
1746 def write_property(self, iri, compact=None):
1747 s = self.__class__(None)
1748 yield s
1749 if s.data is not None:
1750 self.data[compact or iri] = s.data
1751
1752 @contextmanager
1753 def write_object(self, o, _id, needs_id):
1754 self.data = {
1755 "type": o.COMPACT_TYPE or o.TYPE,
1756 }
1757 if needs_id:
1758 self.data[o.ID_ALIAS or "@id"] = _id
1759 yield self
1760
1761 @contextmanager
1762 def write_list(self):
1763 self.data = []
1764 yield self
1765 if not self.data:
1766 self.data = None
1767
1768 @contextmanager
1769 def write_list_item(self):
1770 s = self.__class__(None)
1771 yield s
1772 if s.data is not None:
1773 self.data.append(s.data)
1774
1775
1776class JSONLDSerializer(object):
1777 def __init__(self, **args):
1778 self.args = args
1779
1780 def serialize_data(
1781 self,
1782 objectset: SHACLObjectSet,
1783 force_at_graph=False,
1784 ):
1785 h = JSONLDEncoder()
1786 objectset.encode(h, force_at_graph)
1787 data = {}
1788 if len(CONTEXT_URLS) == 1:
1789 data["@context"] = CONTEXT_URLS[0]
1790 elif CONTEXT_URLS:
1791 data["@context"] = CONTEXT_URLS
1792
1793 if isinstance(h.data, list):
1794 data["@graph"] = h.data
1795 else:
1796 for k, v in h.data.items():
1797 data[k] = v
1798
1799 return data
1800
1801 def write(
1802 self,
1803 objectset: SHACLObjectSet,
1804 f,
1805 force_at_graph=False,
1806 **kwargs,
1807 ):
1808 """
1809 Write a SHACLObjectSet to a JSON LD file
1810
1811 If force_at_graph is True, a @graph node will always be written
1812 """
1813 data = self.serialize_data(objectset, force_at_graph)
1814
1815 args = {**self.args, **kwargs}
1816
1817 sha1 = hashlib.sha1()
1818 for chunk in json.JSONEncoder(**args).iterencode(data):
1819 chunk = chunk.encode("utf-8")
1820 f.write(chunk)
1821 sha1.update(chunk)
1822
1823 return sha1.hexdigest()
1824
1825
1826class JSONLDInlineEncoder(Encoder):
1827 def __init__(self, f, sha1):
1828 self.f = f
1829 self.comma = False
1830 self.sha1 = sha1
1831
1832 def write(self, s):
1833 s = s.encode("utf-8")
1834 self.f.write(s)
1835 self.sha1.update(s)
1836
1837 def _write_comma(self):
1838 if self.comma:
1839 self.write(",")
1840 self.comma = False
1841
1842 def write_string(self, v):
1843 self.write(json.dumps(v))
1844
1845 def write_datetime(self, v):
1846 self.write_string(v)
1847
1848 def write_integer(self, v):
1849 self.write(f"{v}")
1850
1851 def write_iri(self, v, compact=None):
1852 self.write_string(compact or v)
1853
1854 def write_enum(self, v, e, compact=None):
1855 self.write_iri(v, compact)
1856
1857 def write_bool(self, v):
1858 if v:
1859 self.write("true")
1860 else:
1861 self.write("false")
1862
1863 def write_float(self, v):
1864 self.write(json.dumps(str(v)))
1865
1866 @contextmanager
1867 def write_property(self, iri, compact=None):
1868 self._write_comma()
1869 self.write_string(compact or iri)
1870 self.write(":")
1871 yield self
1872 self.comma = True
1873
1874 @contextmanager
1875 def write_object(self, o, _id, needs_id):
1876 self._write_comma()
1877
1878 self.write("{")
1879 self.write_string("type")
1880 self.write(":")
1881 self.write_string(o.COMPACT_TYPE or o.TYPE)
1882 self.comma = True
1883
1884 if needs_id:
1885 self._write_comma()
1886 self.write_string(o.ID_ALIAS or "@id")
1887 self.write(":")
1888 self.write_string(_id)
1889 self.comma = True
1890
1891 self.comma = True
1892 yield self
1893
1894 self.write("}")
1895 self.comma = True
1896
1897 @contextmanager
1898 def write_list(self):
1899 self._write_comma()
1900 self.write("[")
1901 yield self.__class__(self.f, self.sha1)
1902 self.write("]")
1903 self.comma = True
1904
1905 @contextmanager
1906 def write_list_item(self):
1907 self._write_comma()
1908 yield self.__class__(self.f, self.sha1)
1909 self.comma = True
1910
1911
1912class JSONLDInlineSerializer(object):
1913 def write(
1914 self,
1915 objectset: SHACLObjectSet,
1916 f,
1917 force_at_graph=False,
1918 ):
1919 """
1920 Write a SHACLObjectSet to a JSON LD file
1921
1922 Note: force_at_graph is included for compatibility, but ignored. This
1923 serializer always writes out a graph
1924 """
1925 sha1 = hashlib.sha1()
1926 h = JSONLDInlineEncoder(f, sha1)
1927 h.write('{"@context":')
1928 if len(CONTEXT_URLS) == 1:
1929 h.write(f'"{CONTEXT_URLS[0]}"')
1930 elif CONTEXT_URLS:
1931 h.write('["')
1932 h.write('","'.join(CONTEXT_URLS))
1933 h.write('"]')
1934 h.write(",")
1935
1936 h.write('"@graph":')
1937
1938 objectset.encode(h, True)
1939 h.write("}")
1940 return sha1.hexdigest()
1941
1942
1943def print_tree(objects, all_fields=False):
1944 """
1945 Print object tree
1946 """
1947 seen = set()
1948
1949 def callback(value, path):
1950 nonlocal seen
1951
1952 s = (" " * (len(path) - 1)) + f"{path[-1]}"
1953 if isinstance(value, SHACLObject):
1954 s += f" {value} ({id(value)})"
1955 is_empty = False
1956 elif isinstance(value, ListProxy):
1957 is_empty = len(value) == 0
1958 if is_empty:
1959 s += " []"
1960 else:
1961 s += f" {value!r}"
1962 is_empty = value is None
1963
1964 if all_fields or not is_empty:
1965 print(s)
1966
1967 if isinstance(value, SHACLObject):
1968 if value in seen:
1969 return False
1970 seen.add(value)
1971 return True
1972
1973 return True
1974
1975 for o in objects:
1976 o.walk(callback)
1977
1978
1979# fmt: off
1980"""Format Guard"""
1981
1982
1983CONTEXT_URLS = [
1984 "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
1985]
1986
1987
1988# CLASSES
1989# A class for describing the energy consumption incurred by an AI model in
1990# different stages of its lifecycle.
1991@register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumption", compact_type="ai_EnergyConsumption", abstract=False)
1992class ai_EnergyConsumption(SHACLObject):
1993 NODE_KIND = NodeKind.BlankNodeOrIRI
1994 NAMED_INDIVIDUALS = {
1995 }
1996
1997 @classmethod
1998 def _register_props(cls):
1999 super()._register_props()
2000 # Specifies the amount of energy consumed when finetuning the AI model that is
2001 # being used in the AI system.
2002 cls._add_property(
2003 "ai_finetuningEnergyConsumption",
2004 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2005 iri="https://spdx.org/rdf/3.0.1/terms/AI/finetuningEnergyConsumption",
2006 compact="ai_finetuningEnergyConsumption",
2007 )
2008 # Specifies the amount of energy consumed during inference time by an AI model
2009 # that is being used in the AI system.
2010 cls._add_property(
2011 "ai_inferenceEnergyConsumption",
2012 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2013 iri="https://spdx.org/rdf/3.0.1/terms/AI/inferenceEnergyConsumption",
2014 compact="ai_inferenceEnergyConsumption",
2015 )
2016 # Specifies the amount of energy consumed when training the AI model that is
2017 # being used in the AI system.
2018 cls._add_property(
2019 "ai_trainingEnergyConsumption",
2020 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2021 iri="https://spdx.org/rdf/3.0.1/terms/AI/trainingEnergyConsumption",
2022 compact="ai_trainingEnergyConsumption",
2023 )
2024
2025
2026# The class that helps note down the quantity of energy consumption and the unit
2027# used for measurement.
2028@register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumptionDescription", compact_type="ai_EnergyConsumptionDescription", abstract=False)
2029class ai_EnergyConsumptionDescription(SHACLObject):
2030 NODE_KIND = NodeKind.BlankNodeOrIRI
2031 NAMED_INDIVIDUALS = {
2032 }
2033
2034 @classmethod
2035 def _register_props(cls):
2036 super()._register_props()
2037 # Represents the energy quantity.
2038 cls._add_property(
2039 "ai_energyQuantity",
2040 FloatProp(),
2041 iri="https://spdx.org/rdf/3.0.1/terms/AI/energyQuantity",
2042 min_count=1,
2043 compact="ai_energyQuantity",
2044 )
2045 # Specifies the unit in which energy is measured.
2046 cls._add_property(
2047 "ai_energyUnit",
2048 EnumProp([
2049 ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", "kilowattHour"),
2050 ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", "megajoule"),
2051 ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", "other"),
2052 ]),
2053 iri="https://spdx.org/rdf/3.0.1/terms/AI/energyUnit",
2054 min_count=1,
2055 compact="ai_energyUnit",
2056 )
2057
2058
2059# Specifies the unit of energy consumption.
2060@register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType", compact_type="ai_EnergyUnitType", abstract=False)
2061class ai_EnergyUnitType(SHACLObject):
2062 NODE_KIND = NodeKind.BlankNodeOrIRI
2063 NAMED_INDIVIDUALS = {
2064 "kilowattHour": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour",
2065 "megajoule": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule",
2066 "other": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other",
2067 }
2068 # Kilowatt-hour.
2069 kilowattHour = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour"
2070 # Megajoule.
2071 megajoule = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule"
2072 # Any other units of energy measurement.
2073 other = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other"
2074
2075
2076# Specifies the safety risk level.
2077@register("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType", compact_type="ai_SafetyRiskAssessmentType", abstract=False)
2078class ai_SafetyRiskAssessmentType(SHACLObject):
2079 NODE_KIND = NodeKind.BlankNodeOrIRI
2080 NAMED_INDIVIDUALS = {
2081 "high": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high",
2082 "low": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low",
2083 "medium": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium",
2084 "serious": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious",
2085 }
2086 # The second-highest level of risk posed by an AI system.
2087 high = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high"
2088 # Low/no risk is posed by an AI system.
2089 low = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low"
2090 # The third-highest level of risk posed by an AI system.
2091 medium = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium"
2092 # The highest level of risk posed by an AI system.
2093 serious = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious"
2094
2095
2096# Specifies the type of an annotation.
2097@register("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType", compact_type="AnnotationType", abstract=False)
2098class AnnotationType(SHACLObject):
2099 NODE_KIND = NodeKind.BlankNodeOrIRI
2100 NAMED_INDIVIDUALS = {
2101 "other": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other",
2102 "review": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review",
2103 }
2104 # Used to store extra information about an Element which is not part of a review (e.g. extra information provided during the creation of the Element).
2105 other = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other"
2106 # Used when someone reviews the Element.
2107 review = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review"
2108
2109
2110# Provides information about the creation of the Element.
2111@register("https://spdx.org/rdf/3.0.1/terms/Core/CreationInfo", compact_type="CreationInfo", abstract=False)
2112class CreationInfo(SHACLObject):
2113 NODE_KIND = NodeKind.BlankNodeOrIRI
2114 NAMED_INDIVIDUALS = {
2115 }
2116
2117 @classmethod
2118 def _register_props(cls):
2119 super()._register_props()
2120 # Provide consumers with comments by the creator of the Element about the
2121 # Element.
2122 cls._add_property(
2123 "comment",
2124 StringProp(),
2125 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2126 compact="comment",
2127 )
2128 # Identifies when the Element was originally created.
2129 cls._add_property(
2130 "created",
2131 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
2132 iri="https://spdx.org/rdf/3.0.1/terms/Core/created",
2133 min_count=1,
2134 compact="created",
2135 )
2136 # Identifies who or what created the Element.
2137 cls._add_property(
2138 "createdBy",
2139 ListProp(ObjectProp(Agent, False, context=[
2140 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
2141 ],)),
2142 iri="https://spdx.org/rdf/3.0.1/terms/Core/createdBy",
2143 min_count=1,
2144 compact="createdBy",
2145 )
2146 # Identifies the tooling that was used during the creation of the Element.
2147 cls._add_property(
2148 "createdUsing",
2149 ListProp(ObjectProp(Tool, False)),
2150 iri="https://spdx.org/rdf/3.0.1/terms/Core/createdUsing",
2151 compact="createdUsing",
2152 )
2153 # Provides a reference number that can be used to understand how to parse and
2154 # interpret an Element.
2155 cls._add_property(
2156 "specVersion",
2157 StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
2158 iri="https://spdx.org/rdf/3.0.1/terms/Core/specVersion",
2159 min_count=1,
2160 compact="specVersion",
2161 )
2162
2163
2164# A key with an associated value.
2165@register("https://spdx.org/rdf/3.0.1/terms/Core/DictionaryEntry", compact_type="DictionaryEntry", abstract=False)
2166class DictionaryEntry(SHACLObject):
2167 NODE_KIND = NodeKind.BlankNodeOrIRI
2168 NAMED_INDIVIDUALS = {
2169 }
2170
2171 @classmethod
2172 def _register_props(cls):
2173 super()._register_props()
2174 # A key used in a generic key-value pair.
2175 cls._add_property(
2176 "key",
2177 StringProp(),
2178 iri="https://spdx.org/rdf/3.0.1/terms/Core/key",
2179 min_count=1,
2180 compact="key",
2181 )
2182 # A value used in a generic key-value pair.
2183 cls._add_property(
2184 "value",
2185 StringProp(),
2186 iri="https://spdx.org/rdf/3.0.1/terms/Core/value",
2187 compact="value",
2188 )
2189
2190
2191# Base domain class from which all other SPDX-3.0 domain classes derive.
2192@register("https://spdx.org/rdf/3.0.1/terms/Core/Element", compact_type="Element", abstract=True)
2193class Element(SHACLObject):
2194 NODE_KIND = NodeKind.IRI
2195 ID_ALIAS = "spdxId"
2196 NAMED_INDIVIDUALS = {
2197 }
2198
2199 @classmethod
2200 def _register_props(cls):
2201 super()._register_props()
2202 # Provide consumers with comments by the creator of the Element about the
2203 # Element.
2204 cls._add_property(
2205 "comment",
2206 StringProp(),
2207 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2208 compact="comment",
2209 )
2210 # Provides information about the creation of the Element.
2211 cls._add_property(
2212 "creationInfo",
2213 ObjectProp(CreationInfo, True),
2214 iri="https://spdx.org/rdf/3.0.1/terms/Core/creationInfo",
2215 min_count=1,
2216 compact="creationInfo",
2217 )
2218 # Provides a detailed description of the Element.
2219 cls._add_property(
2220 "description",
2221 StringProp(),
2222 iri="https://spdx.org/rdf/3.0.1/terms/Core/description",
2223 compact="description",
2224 )
2225 # Specifies an Extension characterization of some aspect of an Element.
2226 cls._add_property(
2227 "extension",
2228 ListProp(ObjectProp(extension_Extension, False)),
2229 iri="https://spdx.org/rdf/3.0.1/terms/Core/extension",
2230 compact="extension",
2231 )
2232 # Provides a reference to a resource outside the scope of SPDX-3.0 content
2233 # that uniquely identifies an Element.
2234 cls._add_property(
2235 "externalIdentifier",
2236 ListProp(ObjectProp(ExternalIdentifier, False)),
2237 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifier",
2238 compact="externalIdentifier",
2239 )
2240 # Points to a resource outside the scope of the SPDX-3.0 content
2241 # that provides additional characteristics of an Element.
2242 cls._add_property(
2243 "externalRef",
2244 ListProp(ObjectProp(ExternalRef, False)),
2245 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRef",
2246 compact="externalRef",
2247 )
2248 # Identifies the name of an Element as designated by the creator.
2249 cls._add_property(
2250 "name",
2251 StringProp(),
2252 iri="https://spdx.org/rdf/3.0.1/terms/Core/name",
2253 compact="name",
2254 )
2255 # A short description of an Element.
2256 cls._add_property(
2257 "summary",
2258 StringProp(),
2259 iri="https://spdx.org/rdf/3.0.1/terms/Core/summary",
2260 compact="summary",
2261 )
2262 # Provides an IntegrityMethod with which the integrity of an Element can be
2263 # asserted.
2264 cls._add_property(
2265 "verifiedUsing",
2266 ListProp(ObjectProp(IntegrityMethod, False)),
2267 iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing",
2268 compact="verifiedUsing",
2269 )
2270
2271
2272# A collection of Elements, not necessarily with unifying context.
2273@register("https://spdx.org/rdf/3.0.1/terms/Core/ElementCollection", compact_type="ElementCollection", abstract=True)
2274class ElementCollection(Element):
2275 NODE_KIND = NodeKind.IRI
2276 ID_ALIAS = "spdxId"
2277 NAMED_INDIVIDUALS = {
2278 }
2279
2280 @classmethod
2281 def _register_props(cls):
2282 super()._register_props()
2283 # Refers to one or more Elements that are part of an ElementCollection.
2284 cls._add_property(
2285 "element",
2286 ListProp(ObjectProp(Element, False, context=[
2287 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
2288 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
2289 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
2290 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
2291 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
2292 ],)),
2293 iri="https://spdx.org/rdf/3.0.1/terms/Core/element",
2294 compact="element",
2295 )
2296 # Describes one a profile which the creator of this ElementCollection intends to
2297 # conform to.
2298 cls._add_property(
2299 "profileConformance",
2300 ListProp(EnumProp([
2301 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", "ai"),
2302 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", "build"),
2303 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", "core"),
2304 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", "dataset"),
2305 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", "expandedLicensing"),
2306 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", "extension"),
2307 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", "lite"),
2308 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", "security"),
2309 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", "simpleLicensing"),
2310 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", "software"),
2311 ])),
2312 iri="https://spdx.org/rdf/3.0.1/terms/Core/profileConformance",
2313 compact="profileConformance",
2314 )
2315 # This property is used to denote the root Element(s) of a tree of elements contained in a BOM.
2316 cls._add_property(
2317 "rootElement",
2318 ListProp(ObjectProp(Element, False, context=[
2319 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
2320 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
2321 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
2322 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
2323 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
2324 ],)),
2325 iri="https://spdx.org/rdf/3.0.1/terms/Core/rootElement",
2326 compact="rootElement",
2327 )
2328
2329
2330# A reference to a resource identifier defined outside the scope of SPDX-3.0 content that uniquely identifies an Element.
2331@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifier", compact_type="ExternalIdentifier", abstract=False)
2332class ExternalIdentifier(SHACLObject):
2333 NODE_KIND = NodeKind.BlankNodeOrIRI
2334 NAMED_INDIVIDUALS = {
2335 }
2336
2337 @classmethod
2338 def _register_props(cls):
2339 super()._register_props()
2340 # Provide consumers with comments by the creator of the Element about the
2341 # Element.
2342 cls._add_property(
2343 "comment",
2344 StringProp(),
2345 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2346 compact="comment",
2347 )
2348 # Specifies the type of the external identifier.
2349 cls._add_property(
2350 "externalIdentifierType",
2351 EnumProp([
2352 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", "cpe22"),
2353 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", "cpe23"),
2354 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", "cve"),
2355 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", "email"),
2356 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", "gitoid"),
2357 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", "other"),
2358 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", "packageUrl"),
2359 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", "securityOther"),
2360 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", "swhid"),
2361 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", "swid"),
2362 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", "urlScheme"),
2363 ]),
2364 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifierType",
2365 min_count=1,
2366 compact="externalIdentifierType",
2367 )
2368 # Uniquely identifies an external element.
2369 cls._add_property(
2370 "identifier",
2371 StringProp(),
2372 iri="https://spdx.org/rdf/3.0.1/terms/Core/identifier",
2373 min_count=1,
2374 compact="identifier",
2375 )
2376 # Provides the location for more information regarding an external identifier.
2377 cls._add_property(
2378 "identifierLocator",
2379 ListProp(AnyURIProp()),
2380 iri="https://spdx.org/rdf/3.0.1/terms/Core/identifierLocator",
2381 compact="identifierLocator",
2382 )
2383 # An entity that is authorized to issue identification credentials.
2384 cls._add_property(
2385 "issuingAuthority",
2386 StringProp(),
2387 iri="https://spdx.org/rdf/3.0.1/terms/Core/issuingAuthority",
2388 compact="issuingAuthority",
2389 )
2390
2391
2392# Specifies the type of an external identifier.
2393@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType", compact_type="ExternalIdentifierType", abstract=False)
2394class ExternalIdentifierType(SHACLObject):
2395 NODE_KIND = NodeKind.BlankNodeOrIRI
2396 NAMED_INDIVIDUALS = {
2397 "cpe22": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22",
2398 "cpe23": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23",
2399 "cve": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve",
2400 "email": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email",
2401 "gitoid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid",
2402 "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other",
2403 "packageUrl": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl",
2404 "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther",
2405 "swhid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid",
2406 "swid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid",
2407 "urlScheme": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme",
2408 }
2409 # [Common Platform Enumeration Specification 2.2](https://cpe.mitre.org/files/cpe-specification_2.2.pdf)
2410 cpe22 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22"
2411 # [Common Platform Enumeration: Naming Specification Version 2.3](https://csrc.nist.gov/publications/detail/nistir/7695/final)
2412 cpe23 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23"
2413 # Common Vulnerabilities and Exposures identifiers, an identifier for a specific software flaw defined within the official CVE Dictionary and that conforms to the [CVE specification](https://csrc.nist.gov/glossary/term/cve_id).
2414 cve = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve"
2415 # Email address, as defined in [RFC 3696](https://datatracker.ietf.org/doc/rfc3986/) Section 3.
2416 email = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email"
2417 # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg).
2418 gitoid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid"
2419 # Used when the type does not match any of the other options.
2420 other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other"
2421 # Package URL, as defined in the corresponding [Annex](../../../annexes/pkg-url-specification.md) of this specification.
2422 packageUrl = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl"
2423 # Used when there is a security related identifier of unspecified type.
2424 securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther"
2425 # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
2426 swhid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid"
2427 # Concise Software Identification (CoSWID) tag, as defined in [RFC 9393](https://datatracker.ietf.org/doc/rfc9393/) Section 2.3.
2428 swid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid"
2429 # [Uniform Resource Identifier (URI) Schemes](https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml). The scheme used in order to locate a resource.
2430 urlScheme = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme"
2431
2432
2433# A map of Element identifiers that are used within an SpdxDocument but defined
2434# external to that SpdxDocument.
2435@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalMap", compact_type="ExternalMap", abstract=False)
2436class ExternalMap(SHACLObject):
2437 NODE_KIND = NodeKind.BlankNodeOrIRI
2438 NAMED_INDIVIDUALS = {
2439 }
2440
2441 @classmethod
2442 def _register_props(cls):
2443 super()._register_props()
2444 # Artifact representing a serialization instance of SPDX data containing the
2445 # definition of a particular Element.
2446 cls._add_property(
2447 "definingArtifact",
2448 ObjectProp(Artifact, False),
2449 iri="https://spdx.org/rdf/3.0.1/terms/Core/definingArtifact",
2450 compact="definingArtifact",
2451 )
2452 # Identifies an external Element used within an SpdxDocument but defined
2453 # external to that SpdxDocument.
2454 cls._add_property(
2455 "externalSpdxId",
2456 AnyURIProp(),
2457 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalSpdxId",
2458 min_count=1,
2459 compact="externalSpdxId",
2460 )
2461 # Provides an indication of where to retrieve an external Element.
2462 cls._add_property(
2463 "locationHint",
2464 AnyURIProp(),
2465 iri="https://spdx.org/rdf/3.0.1/terms/Core/locationHint",
2466 compact="locationHint",
2467 )
2468 # Provides an IntegrityMethod with which the integrity of an Element can be
2469 # asserted.
2470 cls._add_property(
2471 "verifiedUsing",
2472 ListProp(ObjectProp(IntegrityMethod, False)),
2473 iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing",
2474 compact="verifiedUsing",
2475 )
2476
2477
2478# A reference to a resource outside the scope of SPDX-3.0 content related to an Element.
2479@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRef", compact_type="ExternalRef", abstract=False)
2480class ExternalRef(SHACLObject):
2481 NODE_KIND = NodeKind.BlankNodeOrIRI
2482 NAMED_INDIVIDUALS = {
2483 }
2484
2485 @classmethod
2486 def _register_props(cls):
2487 super()._register_props()
2488 # Provide consumers with comments by the creator of the Element about the
2489 # Element.
2490 cls._add_property(
2491 "comment",
2492 StringProp(),
2493 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2494 compact="comment",
2495 )
2496 # Provides information about the content type of an Element or a Property.
2497 cls._add_property(
2498 "contentType",
2499 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
2500 iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType",
2501 compact="contentType",
2502 )
2503 # Specifies the type of the external reference.
2504 cls._add_property(
2505 "externalRefType",
2506 EnumProp([
2507 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", "altDownloadLocation"),
2508 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", "altWebPage"),
2509 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", "binaryArtifact"),
2510 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", "bower"),
2511 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", "buildMeta"),
2512 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", "buildSystem"),
2513 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", "certificationReport"),
2514 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", "chat"),
2515 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", "componentAnalysisReport"),
2516 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", "cwe"),
2517 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", "documentation"),
2518 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", "dynamicAnalysisReport"),
2519 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", "eolNotice"),
2520 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", "exportControlAssessment"),
2521 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", "funding"),
2522 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", "issueTracker"),
2523 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", "license"),
2524 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", "mailingList"),
2525 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", "mavenCentral"),
2526 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", "metrics"),
2527 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", "npm"),
2528 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", "nuget"),
2529 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", "other"),
2530 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", "privacyAssessment"),
2531 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", "productMetadata"),
2532 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", "purchaseOrder"),
2533 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", "qualityAssessmentReport"),
2534 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", "releaseHistory"),
2535 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", "releaseNotes"),
2536 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", "riskAssessment"),
2537 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", "runtimeAnalysisReport"),
2538 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", "secureSoftwareAttestation"),
2539 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", "securityAdversaryModel"),
2540 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", "securityAdvisory"),
2541 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", "securityFix"),
2542 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", "securityOther"),
2543 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", "securityPenTestReport"),
2544 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", "securityPolicy"),
2545 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", "securityThreatModel"),
2546 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", "socialMedia"),
2547 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", "sourceArtifact"),
2548 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", "staticAnalysisReport"),
2549 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", "support"),
2550 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", "vcs"),
2551 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", "vulnerabilityDisclosureReport"),
2552 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", "vulnerabilityExploitabilityAssessment"),
2553 ]),
2554 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRefType",
2555 compact="externalRefType",
2556 )
2557 # Provides the location of an external reference.
2558 cls._add_property(
2559 "locator",
2560 ListProp(StringProp()),
2561 iri="https://spdx.org/rdf/3.0.1/terms/Core/locator",
2562 compact="locator",
2563 )
2564
2565
2566# Specifies the type of an external reference.
2567@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType", compact_type="ExternalRefType", abstract=False)
2568class ExternalRefType(SHACLObject):
2569 NODE_KIND = NodeKind.BlankNodeOrIRI
2570 NAMED_INDIVIDUALS = {
2571 "altDownloadLocation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation",
2572 "altWebPage": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage",
2573 "binaryArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact",
2574 "bower": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower",
2575 "buildMeta": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta",
2576 "buildSystem": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem",
2577 "certificationReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport",
2578 "chat": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat",
2579 "componentAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport",
2580 "cwe": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe",
2581 "documentation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation",
2582 "dynamicAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport",
2583 "eolNotice": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice",
2584 "exportControlAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment",
2585 "funding": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding",
2586 "issueTracker": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker",
2587 "license": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license",
2588 "mailingList": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList",
2589 "mavenCentral": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral",
2590 "metrics": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics",
2591 "npm": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm",
2592 "nuget": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget",
2593 "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other",
2594 "privacyAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment",
2595 "productMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata",
2596 "purchaseOrder": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder",
2597 "qualityAssessmentReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport",
2598 "releaseHistory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory",
2599 "releaseNotes": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes",
2600 "riskAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment",
2601 "runtimeAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport",
2602 "secureSoftwareAttestation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation",
2603 "securityAdversaryModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel",
2604 "securityAdvisory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory",
2605 "securityFix": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix",
2606 "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther",
2607 "securityPenTestReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport",
2608 "securityPolicy": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy",
2609 "securityThreatModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel",
2610 "socialMedia": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia",
2611 "sourceArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact",
2612 "staticAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport",
2613 "support": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support",
2614 "vcs": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs",
2615 "vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport",
2616 "vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment",
2617 }
2618 # A reference to an alternative download location.
2619 altDownloadLocation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation"
2620 # A reference to an alternative web page.
2621 altWebPage = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage"
2622 # A reference to binary artifacts related to a package.
2623 binaryArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact"
2624 # A reference to a Bower package. The package locator format, looks like `package#version`, is defined in the "install" section of [Bower API documentation](https://bower.io/docs/api/#install).
2625 bower = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower"
2626 # A reference build metadata related to a published package.
2627 buildMeta = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta"
2628 # A reference build system used to create or publish the package.
2629 buildSystem = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem"
2630 # A reference to a certification report for a package from an accredited/independent body.
2631 certificationReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport"
2632 # A reference to the instant messaging system used by the maintainer for a package.
2633 chat = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat"
2634 # A reference to a Software Composition Analysis (SCA) report.
2635 componentAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport"
2636 # [Common Weakness Enumeration](https://csrc.nist.gov/glossary/term/common_weakness_enumeration). A reference to a source of software flaw defined within the official [CWE List](https://cwe.mitre.org/data/) that conforms to the [CWE specification](https://cwe.mitre.org/).
2637 cwe = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe"
2638 # A reference to the documentation for a package.
2639 documentation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation"
2640 # A reference to a dynamic analysis report for a package.
2641 dynamicAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport"
2642 # A reference to the End Of Sale (EOS) and/or End Of Life (EOL) information related to a package.
2643 eolNotice = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice"
2644 # A reference to a export control assessment for a package.
2645 exportControlAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment"
2646 # A reference to funding information related to a package.
2647 funding = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding"
2648 # A reference to the issue tracker for a package.
2649 issueTracker = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker"
2650 # A reference to additional license information related to an artifact.
2651 license = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license"
2652 # A reference to the mailing list used by the maintainer for a package.
2653 mailingList = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList"
2654 # A reference to a Maven repository artifact. The artifact locator format is defined in the [Maven documentation](https://maven.apache.org/guides/mini/guide-naming-conventions.html) and looks like `groupId:artifactId[:version]`.
2655 mavenCentral = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral"
2656 # A reference to metrics related to package such as OpenSSF scorecards.
2657 metrics = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics"
2658 # A reference to an npm package. The package locator format is defined in the [npm documentation](https://docs.npmjs.com/cli/v10/configuring-npm/package-json) and looks like `package@version`.
2659 npm = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm"
2660 # A reference to a NuGet package. The package locator format is defined in the [NuGet documentation](https://docs.nuget.org) and looks like `package/version`.
2661 nuget = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget"
2662 # Used when the type does not match any of the other options.
2663 other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other"
2664 # A reference to a privacy assessment for a package.
2665 privacyAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment"
2666 # A reference to additional product metadata such as reference within organization's product catalog.
2667 productMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata"
2668 # A reference to a purchase order for a package.
2669 purchaseOrder = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder"
2670 # A reference to a quality assessment for a package.
2671 qualityAssessmentReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport"
2672 # A reference to a published list of releases for a package.
2673 releaseHistory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory"
2674 # A reference to the release notes for a package.
2675 releaseNotes = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes"
2676 # A reference to a risk assessment for a package.
2677 riskAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment"
2678 # A reference to a runtime analysis report for a package.
2679 runtimeAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport"
2680 # A reference to information assuring that the software is developed using security practices as defined by [NIST SP 800-218 Secure Software Development Framework (SSDF) Version 1.1](https://csrc.nist.gov/pubs/sp/800/218/final) or [CISA Secure Software Development Attestation Form](https://www.cisa.gov/resources-tools/resources/secure-software-development-attestation-form).
2681 secureSoftwareAttestation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation"
2682 # A reference to the security adversary model for a package.
2683 securityAdversaryModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel"
2684 # A reference to a published security advisory (where advisory as defined per [ISO 29147:2018](https://www.iso.org/standard/72311.html)) that may affect one or more elements, e.g., vendor advisories or specific NVD entries.
2685 securityAdvisory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory"
2686 # A reference to the patch or source code that fixes a vulnerability.
2687 securityFix = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix"
2688 # A reference to related security information of unspecified type.
2689 securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther"
2690 # A reference to a [penetration test](https://en.wikipedia.org/wiki/Penetration_test) report for a package.
2691 securityPenTestReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport"
2692 # A reference to instructions for reporting newly discovered security vulnerabilities for a package.
2693 securityPolicy = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy"
2694 # A reference the [security threat model](https://en.wikipedia.org/wiki/Threat_model) for a package.
2695 securityThreatModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel"
2696 # A reference to a social media channel for a package.
2697 socialMedia = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia"
2698 # A reference to an artifact containing the sources for a package.
2699 sourceArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact"
2700 # A reference to a static analysis report for a package.
2701 staticAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport"
2702 # A reference to the software support channel or other support information for a package.
2703 support = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support"
2704 # A reference to a version control system related to a software artifact.
2705 vcs = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs"
2706 # A reference to a Vulnerability Disclosure Report (VDR) which provides the software supplier's analysis and findings describing the impact (or lack of impact) that reported vulnerabilities have on packages or products in the supplier's SBOM as defined in [NIST SP 800-161 Cybersecurity Supply Chain Risk Management Practices for Systems and Organizations](https://csrc.nist.gov/pubs/sp/800/161/r1/final).
2707 vulnerabilityDisclosureReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport"
2708 # A reference to a Vulnerability Exploitability eXchange (VEX) statement which provides information on whether a product is impacted by a specific vulnerability in an included package and, if affected, whether there are actions recommended to remediate. See also [NTIA VEX one-page summary](https://ntia.gov/files/ntia/publications/vex_one-page_summary.pdf).
2709 vulnerabilityExploitabilityAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment"
2710
2711
2712# A mathematical algorithm that maps data of arbitrary size to a bit string.
2713@register("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm", compact_type="HashAlgorithm", abstract=False)
2714class HashAlgorithm(SHACLObject):
2715 NODE_KIND = NodeKind.BlankNodeOrIRI
2716 NAMED_INDIVIDUALS = {
2717 "adler32": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32",
2718 "blake2b256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256",
2719 "blake2b384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384",
2720 "blake2b512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512",
2721 "blake3": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3",
2722 "crystalsDilithium": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium",
2723 "crystalsKyber": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber",
2724 "falcon": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon",
2725 "md2": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2",
2726 "md4": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4",
2727 "md5": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5",
2728 "md6": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6",
2729 "other": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other",
2730 "sha1": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1",
2731 "sha224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224",
2732 "sha256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256",
2733 "sha384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384",
2734 "sha3_224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224",
2735 "sha3_256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256",
2736 "sha3_384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384",
2737 "sha3_512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512",
2738 "sha512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512",
2739 }
2740 # Adler-32 checksum is part of the widely used zlib compression library as defined in [RFC 1950](https://datatracker.ietf.org/doc/rfc1950/) Section 2.3.
2741 adler32 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32"
2742 # BLAKE2b algorithm with a digest size of 256, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4.
2743 blake2b256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256"
2744 # BLAKE2b algorithm with a digest size of 384, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4.
2745 blake2b384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384"
2746 # BLAKE2b algorithm with a digest size of 512, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4.
2747 blake2b512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512"
2748 # [BLAKE3](https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf)
2749 blake3 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3"
2750 # [Dilithium](https://pq-crystals.org/dilithium/)
2751 crystalsDilithium = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium"
2752 # [Kyber](https://pq-crystals.org/kyber/)
2753 crystalsKyber = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber"
2754 # [FALCON](https://falcon-sign.info/falcon.pdf)
2755 falcon = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon"
2756 # MD2 message-digest algorithm, as defined in [RFC 1319](https://datatracker.ietf.org/doc/rfc1319/).
2757 md2 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2"
2758 # MD4 message-digest algorithm, as defined in [RFC 1186](https://datatracker.ietf.org/doc/rfc1186/).
2759 md4 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4"
2760 # MD5 message-digest algorithm, as defined in [RFC 1321](https://datatracker.ietf.org/doc/rfc1321/).
2761 md5 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5"
2762 # [MD6 hash function](https://people.csail.mit.edu/rivest/pubs/RABCx08.pdf)
2763 md6 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6"
2764 # any hashing algorithm that does not exist in this list of entries
2765 other = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other"
2766 # SHA-1, a secure hashing algorithm, as defined in [RFC 3174](https://datatracker.ietf.org/doc/rfc3174/).
2767 sha1 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1"
2768 # SHA-2 with a digest length of 224, as defined in [RFC 3874](https://datatracker.ietf.org/doc/rfc3874/).
2769 sha224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224"
2770 # SHA-2 with a digest length of 256, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/).
2771 sha256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256"
2772 # SHA-2 with a digest length of 384, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/).
2773 sha384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384"
2774 # SHA-3 with a digest length of 224, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2775 sha3_224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224"
2776 # SHA-3 with a digest length of 256, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2777 sha3_256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256"
2778 # SHA-3 with a digest length of 384, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2779 sha3_384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384"
2780 # SHA-3 with a digest length of 512, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2781 sha3_512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512"
2782 # SHA-2 with a digest length of 512, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/).
2783 sha512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512"
2784
2785
2786# A concrete subclass of Element used by Individuals in the
2787# Core profile.
2788@register("https://spdx.org/rdf/3.0.1/terms/Core/IndividualElement", compact_type="IndividualElement", abstract=False)
2789class IndividualElement(Element):
2790 NODE_KIND = NodeKind.IRI
2791 ID_ALIAS = "spdxId"
2792 NAMED_INDIVIDUALS = {
2793 "NoAssertionElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement",
2794 "NoneElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement",
2795 }
2796 # An Individual Value for Element representing a set of Elements of unknown
2797 # identify or cardinality (number).
2798 NoAssertionElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement"
2799 # An Individual Value for Element representing a set of Elements with
2800 # cardinality (number/count) of zero.
2801 NoneElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement"
2802
2803
2804# Provides an independently reproducible mechanism that permits verification of a specific Element.
2805@register("https://spdx.org/rdf/3.0.1/terms/Core/IntegrityMethod", compact_type="IntegrityMethod", abstract=True)
2806class IntegrityMethod(SHACLObject):
2807 NODE_KIND = NodeKind.BlankNodeOrIRI
2808 NAMED_INDIVIDUALS = {
2809 }
2810
2811 @classmethod
2812 def _register_props(cls):
2813 super()._register_props()
2814 # Provide consumers with comments by the creator of the Element about the
2815 # Element.
2816 cls._add_property(
2817 "comment",
2818 StringProp(),
2819 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2820 compact="comment",
2821 )
2822
2823
2824# Provide an enumerated set of lifecycle phases that can provide context to relationships.
2825@register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType", compact_type="LifecycleScopeType", abstract=False)
2826class LifecycleScopeType(SHACLObject):
2827 NODE_KIND = NodeKind.BlankNodeOrIRI
2828 NAMED_INDIVIDUALS = {
2829 "build": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build",
2830 "design": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design",
2831 "development": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development",
2832 "other": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other",
2833 "runtime": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime",
2834 "test": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test",
2835 }
2836 # A relationship has specific context implications during an element's build phase, during development.
2837 build = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build"
2838 # A relationship has specific context implications during an element's design.
2839 design = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design"
2840 # A relationship has specific context implications during development phase of an element.
2841 development = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development"
2842 # A relationship has other specific context information necessary to capture that the above set of enumerations does not handle.
2843 other = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other"
2844 # A relationship has specific context implications during the execution phase of an element.
2845 runtime = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime"
2846 # A relationship has specific context implications during an element's testing phase, during development.
2847 test = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test"
2848
2849
2850# A mapping between prefixes and namespace partial URIs.
2851@register("https://spdx.org/rdf/3.0.1/terms/Core/NamespaceMap", compact_type="NamespaceMap", abstract=False)
2852class NamespaceMap(SHACLObject):
2853 NODE_KIND = NodeKind.BlankNodeOrIRI
2854 NAMED_INDIVIDUALS = {
2855 }
2856
2857 @classmethod
2858 def _register_props(cls):
2859 super()._register_props()
2860 # Provides an unambiguous mechanism for conveying a URI fragment portion of an
2861 # Element ID.
2862 cls._add_property(
2863 "namespace",
2864 AnyURIProp(),
2865 iri="https://spdx.org/rdf/3.0.1/terms/Core/namespace",
2866 min_count=1,
2867 compact="namespace",
2868 )
2869 # A substitute for a URI.
2870 cls._add_property(
2871 "prefix",
2872 StringProp(),
2873 iri="https://spdx.org/rdf/3.0.1/terms/Core/prefix",
2874 min_count=1,
2875 compact="prefix",
2876 )
2877
2878
2879# An SPDX version 2.X compatible verification method for software packages.
2880@register("https://spdx.org/rdf/3.0.1/terms/Core/PackageVerificationCode", compact_type="PackageVerificationCode", abstract=False)
2881class PackageVerificationCode(IntegrityMethod):
2882 NODE_KIND = NodeKind.BlankNodeOrIRI
2883 NAMED_INDIVIDUALS = {
2884 }
2885
2886 @classmethod
2887 def _register_props(cls):
2888 super()._register_props()
2889 # Specifies the algorithm used for calculating the hash value.
2890 cls._add_property(
2891 "algorithm",
2892 EnumProp([
2893 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"),
2894 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
2895 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
2896 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
2897 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"),
2898 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
2899 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
2900 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"),
2901 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"),
2902 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"),
2903 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"),
2904 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"),
2905 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"),
2906 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"),
2907 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"),
2908 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"),
2909 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"),
2910 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
2911 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
2912 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
2913 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
2914 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"),
2915 ]),
2916 iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm",
2917 min_count=1,
2918 compact="algorithm",
2919 )
2920 # The result of applying a hash algorithm to an Element.
2921 cls._add_property(
2922 "hashValue",
2923 StringProp(),
2924 iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue",
2925 min_count=1,
2926 compact="hashValue",
2927 )
2928 # The relative file name of a file to be excluded from the
2929 # `PackageVerificationCode`.
2930 cls._add_property(
2931 "packageVerificationCodeExcludedFile",
2932 ListProp(StringProp()),
2933 iri="https://spdx.org/rdf/3.0.1/terms/Core/packageVerificationCodeExcludedFile",
2934 compact="packageVerificationCodeExcludedFile",
2935 )
2936
2937
2938# A tuple of two positive integers that define a range.
2939@register("https://spdx.org/rdf/3.0.1/terms/Core/PositiveIntegerRange", compact_type="PositiveIntegerRange", abstract=False)
2940class PositiveIntegerRange(SHACLObject):
2941 NODE_KIND = NodeKind.BlankNodeOrIRI
2942 NAMED_INDIVIDUALS = {
2943 }
2944
2945 @classmethod
2946 def _register_props(cls):
2947 super()._register_props()
2948 # Defines the beginning of a range.
2949 cls._add_property(
2950 "beginIntegerRange",
2951 PositiveIntegerProp(),
2952 iri="https://spdx.org/rdf/3.0.1/terms/Core/beginIntegerRange",
2953 min_count=1,
2954 compact="beginIntegerRange",
2955 )
2956 # Defines the end of a range.
2957 cls._add_property(
2958 "endIntegerRange",
2959 PositiveIntegerProp(),
2960 iri="https://spdx.org/rdf/3.0.1/terms/Core/endIntegerRange",
2961 min_count=1,
2962 compact="endIntegerRange",
2963 )
2964
2965
2966# Categories of presence or absence.
2967@register("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType", compact_type="PresenceType", abstract=False)
2968class PresenceType(SHACLObject):
2969 NODE_KIND = NodeKind.BlankNodeOrIRI
2970 NAMED_INDIVIDUALS = {
2971 "no": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no",
2972 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion",
2973 "yes": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes",
2974 }
2975 # Indicates absence of the field.
2976 no = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no"
2977 # Makes no assertion about the field.
2978 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion"
2979 # Indicates presence of the field.
2980 yes = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes"
2981
2982
2983# Enumeration of the valid profiles.
2984@register("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType", compact_type="ProfileIdentifierType", abstract=False)
2985class ProfileIdentifierType(SHACLObject):
2986 NODE_KIND = NodeKind.BlankNodeOrIRI
2987 NAMED_INDIVIDUALS = {
2988 "ai": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai",
2989 "build": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build",
2990 "core": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core",
2991 "dataset": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset",
2992 "expandedLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing",
2993 "extension": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension",
2994 "lite": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite",
2995 "security": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security",
2996 "simpleLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing",
2997 "software": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software",
2998 }
2999 # the element follows the AI profile specification
3000 ai = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai"
3001 # the element follows the Build profile specification
3002 build = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build"
3003 # the element follows the Core profile specification
3004 core = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core"
3005 # the element follows the Dataset profile specification
3006 dataset = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset"
3007 # the element follows the ExpandedLicensing profile specification
3008 expandedLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing"
3009 # the element follows the Extension profile specification
3010 extension = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension"
3011 # the element follows the Lite profile specification
3012 lite = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite"
3013 # the element follows the Security profile specification
3014 security = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security"
3015 # the element follows the SimpleLicensing profile specification
3016 simpleLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing"
3017 # the element follows the Software profile specification
3018 software = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software"
3019
3020
3021# Describes a relationship between one or more elements.
3022@register("https://spdx.org/rdf/3.0.1/terms/Core/Relationship", compact_type="Relationship", abstract=False)
3023class Relationship(Element):
3024 NODE_KIND = NodeKind.IRI
3025 ID_ALIAS = "spdxId"
3026 NAMED_INDIVIDUALS = {
3027 }
3028
3029 @classmethod
3030 def _register_props(cls):
3031 super()._register_props()
3032 # Provides information about the completeness of relationships.
3033 cls._add_property(
3034 "completeness",
3035 EnumProp([
3036 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", "complete"),
3037 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", "incomplete"),
3038 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", "noAssertion"),
3039 ]),
3040 iri="https://spdx.org/rdf/3.0.1/terms/Core/completeness",
3041 compact="completeness",
3042 )
3043 # Specifies the time from which an element is no longer applicable / valid.
3044 cls._add_property(
3045 "endTime",
3046 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3047 iri="https://spdx.org/rdf/3.0.1/terms/Core/endTime",
3048 compact="endTime",
3049 )
3050 # References the Element on the left-hand side of a relationship.
3051 cls._add_property(
3052 "from_",
3053 ObjectProp(Element, True, context=[
3054 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
3055 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
3056 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
3057 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
3058 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
3059 ],),
3060 iri="https://spdx.org/rdf/3.0.1/terms/Core/from",
3061 min_count=1,
3062 compact="from",
3063 )
3064 # Information about the relationship between two Elements.
3065 cls._add_property(
3066 "relationshipType",
3067 EnumProp([
3068 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", "affects"),
3069 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", "amendedBy"),
3070 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", "ancestorOf"),
3071 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", "availableFrom"),
3072 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", "configures"),
3073 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", "contains"),
3074 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", "coordinatedBy"),
3075 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", "copiedTo"),
3076 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", "delegatedTo"),
3077 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", "dependsOn"),
3078 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", "descendantOf"),
3079 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", "describes"),
3080 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", "doesNotAffect"),
3081 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", "expandsTo"),
3082 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", "exploitCreatedBy"),
3083 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", "fixedBy"),
3084 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", "fixedIn"),
3085 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", "foundBy"),
3086 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", "generates"),
3087 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", "hasAddedFile"),
3088 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", "hasAssessmentFor"),
3089 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", "hasAssociatedVulnerability"),
3090 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", "hasConcludedLicense"),
3091 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", "hasDataFile"),
3092 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", "hasDeclaredLicense"),
3093 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", "hasDeletedFile"),
3094 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", "hasDependencyManifest"),
3095 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", "hasDistributionArtifact"),
3096 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", "hasDocumentation"),
3097 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", "hasDynamicLink"),
3098 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", "hasEvidence"),
3099 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", "hasExample"),
3100 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", "hasHost"),
3101 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", "hasInput"),
3102 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", "hasMetadata"),
3103 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", "hasOptionalComponent"),
3104 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", "hasOptionalDependency"),
3105 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", "hasOutput"),
3106 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", "hasPrerequisite"),
3107 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", "hasProvidedDependency"),
3108 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", "hasRequirement"),
3109 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", "hasSpecification"),
3110 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", "hasStaticLink"),
3111 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", "hasTest"),
3112 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", "hasTestCase"),
3113 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", "hasVariant"),
3114 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", "invokedBy"),
3115 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", "modifiedBy"),
3116 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", "other"),
3117 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", "packagedBy"),
3118 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", "patchedBy"),
3119 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", "publishedBy"),
3120 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", "reportedBy"),
3121 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", "republishedBy"),
3122 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", "serializedInArtifact"),
3123 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", "testedOn"),
3124 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", "trainedOn"),
3125 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", "underInvestigationFor"),
3126 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", "usesTool"),
3127 ]),
3128 iri="https://spdx.org/rdf/3.0.1/terms/Core/relationshipType",
3129 min_count=1,
3130 compact="relationshipType",
3131 )
3132 # Specifies the time from which an element is applicable / valid.
3133 cls._add_property(
3134 "startTime",
3135 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3136 iri="https://spdx.org/rdf/3.0.1/terms/Core/startTime",
3137 compact="startTime",
3138 )
3139 # References an Element on the right-hand side of a relationship.
3140 cls._add_property(
3141 "to",
3142 ListProp(ObjectProp(Element, False, context=[
3143 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
3144 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
3145 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
3146 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
3147 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
3148 ],)),
3149 iri="https://spdx.org/rdf/3.0.1/terms/Core/to",
3150 min_count=1,
3151 compact="to",
3152 )
3153
3154
3155# Indicates whether a relationship is known to be complete, incomplete, or if no assertion is made with respect to relationship completeness.
3156@register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness", compact_type="RelationshipCompleteness", abstract=False)
3157class RelationshipCompleteness(SHACLObject):
3158 NODE_KIND = NodeKind.BlankNodeOrIRI
3159 NAMED_INDIVIDUALS = {
3160 "complete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete",
3161 "incomplete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete",
3162 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion",
3163 }
3164 # The relationship is known to be exhaustive.
3165 complete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete"
3166 # The relationship is known not to be exhaustive.
3167 incomplete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete"
3168 # No assertion can be made about the completeness of the relationship.
3169 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion"
3170
3171
3172# Information about the relationship between two Elements.
3173@register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType", compact_type="RelationshipType", abstract=False)
3174class RelationshipType(SHACLObject):
3175 NODE_KIND = NodeKind.BlankNodeOrIRI
3176 NAMED_INDIVIDUALS = {
3177 "affects": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects",
3178 "amendedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy",
3179 "ancestorOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf",
3180 "availableFrom": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom",
3181 "configures": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures",
3182 "contains": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains",
3183 "coordinatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy",
3184 "copiedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo",
3185 "delegatedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo",
3186 "dependsOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn",
3187 "descendantOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf",
3188 "describes": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes",
3189 "doesNotAffect": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect",
3190 "expandsTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo",
3191 "exploitCreatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy",
3192 "fixedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy",
3193 "fixedIn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn",
3194 "foundBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy",
3195 "generates": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates",
3196 "hasAddedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile",
3197 "hasAssessmentFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor",
3198 "hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability",
3199 "hasConcludedLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense",
3200 "hasDataFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile",
3201 "hasDeclaredLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense",
3202 "hasDeletedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile",
3203 "hasDependencyManifest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest",
3204 "hasDistributionArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact",
3205 "hasDocumentation": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation",
3206 "hasDynamicLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink",
3207 "hasEvidence": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence",
3208 "hasExample": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample",
3209 "hasHost": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost",
3210 "hasInput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput",
3211 "hasMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata",
3212 "hasOptionalComponent": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent",
3213 "hasOptionalDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency",
3214 "hasOutput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput",
3215 "hasPrerequisite": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite",
3216 "hasProvidedDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency",
3217 "hasRequirement": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement",
3218 "hasSpecification": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification",
3219 "hasStaticLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink",
3220 "hasTest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest",
3221 "hasTestCase": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase",
3222 "hasVariant": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant",
3223 "invokedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy",
3224 "modifiedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy",
3225 "other": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other",
3226 "packagedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy",
3227 "patchedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy",
3228 "publishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy",
3229 "reportedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy",
3230 "republishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy",
3231 "serializedInArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact",
3232 "testedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn",
3233 "trainedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn",
3234 "underInvestigationFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor",
3235 "usesTool": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool",
3236 }
3237 # The `from` Vulnerability affects each `to` Element. The use of the `affects` type is constrained to `VexAffectedVulnAssessmentRelationship` classed relationships.
3238 affects = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects"
3239 # The `from` Element is amended by each `to` Element.
3240 amendedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy"
3241 # The `from` Element is an ancestor of each `to` Element.
3242 ancestorOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf"
3243 # The `from` Element is available from the additional supplier described by each `to` Element.
3244 availableFrom = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom"
3245 # The `from` Element is a configuration applied to each `to` Element, during a LifecycleScopeType period.
3246 configures = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures"
3247 # The `from` Element contains each `to` Element.
3248 contains = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains"
3249 # The `from` Vulnerability is coordinatedBy the `to` Agent(s) (vendor, researcher, or consumer agent).
3250 coordinatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy"
3251 # The `from` Element has been copied to each `to` Element.
3252 copiedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo"
3253 # The `from` Agent is delegating an action to the Agent of the `to` Relationship (which must be of type invokedBy), during a LifecycleScopeType (e.g. the `to` invokedBy Relationship is being done on behalf of `from`).
3254 delegatedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo"
3255 # The `from` Element depends on each `to` Element, during a LifecycleScopeType period.
3256 dependsOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn"
3257 # The `from` Element is a descendant of each `to` Element.
3258 descendantOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf"
3259 # The `from` Element describes each `to` Element. To denote the root(s) of a tree of elements in a collection, the rootElement property should be used.
3260 describes = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes"
3261 # The `from` Vulnerability has no impact on each `to` Element. The use of the `doesNotAffect` is constrained to `VexNotAffectedVulnAssessmentRelationship` classed relationships.
3262 doesNotAffect = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect"
3263 # The `from` archive expands out as an artifact described by each `to` Element.
3264 expandsTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo"
3265 # The `from` Vulnerability has had an exploit created against it by each `to` Agent.
3266 exploitCreatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy"
3267 # Designates a `from` Vulnerability has been fixed by the `to` Agent(s).
3268 fixedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy"
3269 # A `from` Vulnerability has been fixed in each `to` Element. The use of the `fixedIn` type is constrained to `VexFixedVulnAssessmentRelationship` classed relationships.
3270 fixedIn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn"
3271 # Designates a `from` Vulnerability was originally discovered by the `to` Agent(s).
3272 foundBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy"
3273 # The `from` Element generates each `to` Element.
3274 generates = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates"
3275 # Every `to` Element is a file added to the `from` Element (`from` hasAddedFile `to`).
3276 hasAddedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile"
3277 # Relates a `from` Vulnerability and each `to` Element with a security assessment. To be used with `VulnAssessmentRelationship` types.
3278 hasAssessmentFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor"
3279 # Used to associate a `from` Artifact with each `to` Vulnerability.
3280 hasAssociatedVulnerability = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability"
3281 # The `from` SoftwareArtifact is concluded by the SPDX data creator to be governed by each `to` license.
3282 hasConcludedLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense"
3283 # The `from` Element treats each `to` Element as a data file. A data file is an artifact that stores data required or optional for the `from` Element's functionality. A data file can be a database file, an index file, a log file, an AI model file, a calibration data file, a temporary file, a backup file, and more. For AI training dataset, test dataset, test artifact, configuration data, build input data, and build output data, please consider using the more specific relationship types: `trainedOn`, `testedOn`, `hasTest`, `configures`, `hasInput`, and `hasOutput`, respectively. This relationship does not imply dependency.
3284 hasDataFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile"
3285 # The `from` SoftwareArtifact was discovered to actually contain each `to` license, for example as detected by use of automated tooling.
3286 hasDeclaredLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense"
3287 # Every `to` Element is a file deleted from the `from` Element (`from` hasDeletedFile `to`).
3288 hasDeletedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile"
3289 # The `from` Element has manifest files that contain dependency information in each `to` Element.
3290 hasDependencyManifest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest"
3291 # The `from` Element is distributed as an artifact in each `to` Element (e.g. an RPM or archive file).
3292 hasDistributionArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact"
3293 # The `from` Element is documented by each `to` Element.
3294 hasDocumentation = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation"
3295 # The `from` Element dynamically links in each `to` Element, during a LifecycleScopeType period.
3296 hasDynamicLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink"
3297 # Every `to` Element is considered as evidence for the `from` Element (`from` hasEvidence `to`).
3298 hasEvidence = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence"
3299 # Every `to` Element is an example for the `from` Element (`from` hasExample `to`).
3300 hasExample = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample"
3301 # The `from` Build was run on the `to` Element during a LifecycleScopeType period (e.g. the host that the build runs on).
3302 hasHost = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost"
3303 # The `from` Build has each `to` Element as an input, during a LifecycleScopeType period.
3304 hasInput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput"
3305 # Every `to` Element is metadata about the `from` Element (`from` hasMetadata `to`).
3306 hasMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata"
3307 # Every `to` Element is an optional component of the `from` Element (`from` hasOptionalComponent `to`).
3308 hasOptionalComponent = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent"
3309 # The `from` Element optionally depends on each `to` Element, during a LifecycleScopeType period.
3310 hasOptionalDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency"
3311 # The `from` Build element generates each `to` Element as an output, during a LifecycleScopeType period.
3312 hasOutput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput"
3313 # The `from` Element has a prerequisite on each `to` Element, during a LifecycleScopeType period.
3314 hasPrerequisite = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite"
3315 # The `from` Element has a dependency on each `to` Element, dependency is not in the distributed artifact, but assumed to be provided, during a LifecycleScopeType period.
3316 hasProvidedDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency"
3317 # The `from` Element has a requirement on each `to` Element, during a LifecycleScopeType period.
3318 hasRequirement = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement"
3319 # Every `to` Element is a specification for the `from` Element (`from` hasSpecification `to`), during a LifecycleScopeType period.
3320 hasSpecification = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification"
3321 # The `from` Element statically links in each `to` Element, during a LifecycleScopeType period.
3322 hasStaticLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink"
3323 # Every `to` Element is a test artifact for the `from` Element (`from` hasTest `to`), during a LifecycleScopeType period.
3324 hasTest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest"
3325 # Every `to` Element is a test case for the `from` Element (`from` hasTestCase `to`).
3326 hasTestCase = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase"
3327 # Every `to` Element is a variant the `from` Element (`from` hasVariant `to`).
3328 hasVariant = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant"
3329 # The `from` Element was invoked by the `to` Agent, during a LifecycleScopeType period (for example, a Build element that describes a build step).
3330 invokedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy"
3331 # The `from` Element is modified by each `to` Element.
3332 modifiedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy"
3333 # Every `to` Element is related to the `from` Element where the relationship type is not described by any of the SPDX relationship types (this relationship is directionless).
3334 other = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other"
3335 # Every `to` Element is a packaged instance of the `from` Element (`from` packagedBy `to`).
3336 packagedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy"
3337 # Every `to` Element is a patch for the `from` Element (`from` patchedBy `to`).
3338 patchedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy"
3339 # Designates a `from` Vulnerability was made available for public use or reference by each `to` Agent.
3340 publishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy"
3341 # Designates a `from` Vulnerability was first reported to a project, vendor, or tracking database for formal identification by each `to` Agent.
3342 reportedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy"
3343 # Designates a `from` Vulnerability's details were tracked, aggregated, and/or enriched to improve context (i.e. NVD) by each `to` Agent.
3344 republishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy"
3345 # The `from` SpdxDocument can be found in a serialized form in each `to` Artifact.
3346 serializedInArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact"
3347 # The `from` Element has been tested on the `to` Element(s).
3348 testedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn"
3349 # The `from` Element has been trained on the `to` Element(s).
3350 trainedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn"
3351 # The `from` Vulnerability impact is being investigated for each `to` Element. The use of the `underInvestigationFor` type is constrained to `VexUnderInvestigationVulnAssessmentRelationship` classed relationships.
3352 underInvestigationFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor"
3353 # The `from` Element uses each `to` Element as a tool, during a LifecycleScopeType period.
3354 usesTool = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool"
3355
3356
3357# A collection of SPDX Elements that could potentially be serialized.
3358@register("https://spdx.org/rdf/3.0.1/terms/Core/SpdxDocument", compact_type="SpdxDocument", abstract=False)
3359class SpdxDocument(ElementCollection):
3360 NODE_KIND = NodeKind.IRI
3361 ID_ALIAS = "spdxId"
3362 NAMED_INDIVIDUALS = {
3363 }
3364
3365 @classmethod
3366 def _register_props(cls):
3367 super()._register_props()
3368 # Provides the license under which the SPDX documentation of the Element can be
3369 # used.
3370 cls._add_property(
3371 "dataLicense",
3372 ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[
3373 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
3374 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
3375 ],),
3376 iri="https://spdx.org/rdf/3.0.1/terms/Core/dataLicense",
3377 compact="dataLicense",
3378 )
3379 # Provides an ExternalMap of Element identifiers.
3380 cls._add_property(
3381 "import_",
3382 ListProp(ObjectProp(ExternalMap, False)),
3383 iri="https://spdx.org/rdf/3.0.1/terms/Core/import",
3384 compact="import",
3385 )
3386 # Provides a NamespaceMap of prefixes and associated namespace partial URIs applicable to an SpdxDocument and independent of any specific serialization format or instance.
3387 cls._add_property(
3388 "namespaceMap",
3389 ListProp(ObjectProp(NamespaceMap, False)),
3390 iri="https://spdx.org/rdf/3.0.1/terms/Core/namespaceMap",
3391 compact="namespaceMap",
3392 )
3393
3394
3395# Indicates the type of support that is associated with an artifact.
3396@register("https://spdx.org/rdf/3.0.1/terms/Core/SupportType", compact_type="SupportType", abstract=False)
3397class SupportType(SHACLObject):
3398 NODE_KIND = NodeKind.BlankNodeOrIRI
3399 NAMED_INDIVIDUALS = {
3400 "deployed": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed",
3401 "development": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development",
3402 "endOfSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport",
3403 "limitedSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport",
3404 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion",
3405 "noSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport",
3406 "support": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support",
3407 }
3408 # in addition to being supported by the supplier, the software is known to have been deployed and is in use. For a software as a service provider, this implies the software is now available as a service.
3409 deployed = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed"
3410 # the artifact is in active development and is not considered ready for formal support from the supplier.
3411 development = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development"
3412 # there is a defined end of support for the artifact from the supplier. This may also be referred to as end of life. There is a validUntilDate that can be used to signal when support ends for the artifact.
3413 endOfSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport"
3414 # the artifact has been released, and there is limited support available from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
3415 limitedSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport"
3416 # no assertion about the type of support is made. This is considered the default if no other support type is used.
3417 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion"
3418 # there is no support for the artifact from the supplier, consumer assumes any support obligations.
3419 noSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport"
3420 # the artifact has been released, and is supported from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
3421 support = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support"
3422
3423
3424# An element of hardware and/or software utilized to carry out a particular function.
3425@register("https://spdx.org/rdf/3.0.1/terms/Core/Tool", compact_type="Tool", abstract=False)
3426class Tool(Element):
3427 NODE_KIND = NodeKind.IRI
3428 ID_ALIAS = "spdxId"
3429 NAMED_INDIVIDUALS = {
3430 }
3431
3432
3433# Categories of confidentiality level.
3434@register("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType", compact_type="dataset_ConfidentialityLevelType", abstract=False)
3435class dataset_ConfidentialityLevelType(SHACLObject):
3436 NODE_KIND = NodeKind.BlankNodeOrIRI
3437 NAMED_INDIVIDUALS = {
3438 "amber": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber",
3439 "clear": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear",
3440 "green": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green",
3441 "red": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red",
3442 }
3443 # Data points in the dataset can be shared only with specific organizations and their clients on a need to know basis.
3444 amber = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber"
3445 # Dataset may be distributed freely, without restriction.
3446 clear = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear"
3447 # Dataset can be shared within a community of peers and partners.
3448 green = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green"
3449 # Data points in the dataset are highly confidential and can only be shared with named recipients.
3450 red = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red"
3451
3452
3453# Availability of dataset.
3454@register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType", compact_type="dataset_DatasetAvailabilityType", abstract=False)
3455class dataset_DatasetAvailabilityType(SHACLObject):
3456 NODE_KIND = NodeKind.BlankNodeOrIRI
3457 NAMED_INDIVIDUALS = {
3458 "clickthrough": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough",
3459 "directDownload": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload",
3460 "query": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query",
3461 "registration": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration",
3462 "scrapingScript": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript",
3463 }
3464 # the dataset is not publicly available and can only be accessed after affirmatively accepting terms on a clickthrough webpage.
3465 clickthrough = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough"
3466 # the dataset is publicly available and can be downloaded directly.
3467 directDownload = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload"
3468 # the dataset is publicly available, but not all at once, and can only be accessed through queries which return parts of the dataset.
3469 query = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query"
3470 # the dataset is not publicly available and an email registration is required before accessing the dataset, although without an affirmative acceptance of terms.
3471 registration = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration"
3472 # the dataset provider is not making available the underlying data and the dataset must be reassembled, typically using the provided script for scraping the data.
3473 scrapingScript = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript"
3474
3475
3476# Enumeration of dataset types.
3477@register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType", compact_type="dataset_DatasetType", abstract=False)
3478class dataset_DatasetType(SHACLObject):
3479 NODE_KIND = NodeKind.BlankNodeOrIRI
3480 NAMED_INDIVIDUALS = {
3481 "audio": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio",
3482 "categorical": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical",
3483 "graph": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph",
3484 "image": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image",
3485 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion",
3486 "numeric": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric",
3487 "other": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other",
3488 "sensor": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor",
3489 "structured": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured",
3490 "syntactic": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic",
3491 "text": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text",
3492 "timeseries": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries",
3493 "timestamp": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp",
3494 "video": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video",
3495 }
3496 # data is audio based, such as a collection of music from the 80s.
3497 audio = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio"
3498 # data that is classified into a discrete number of categories, such as the eye color of a population of people.
3499 categorical = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical"
3500 # data is in the form of a graph where entries are somehow related to each other through edges, such a social network of friends.
3501 graph = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph"
3502 # data is a collection of images such as pictures of animals.
3503 image = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image"
3504 # data type is not known.
3505 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion"
3506 # data consists only of numeric entries.
3507 numeric = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric"
3508 # data is of a type not included in this list.
3509 other = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other"
3510 # data is recorded from a physical sensor, such as a thermometer reading or biometric device.
3511 sensor = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor"
3512 # data is stored in tabular format or retrieved from a relational database.
3513 structured = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured"
3514 # data describes the syntax or semantics of a language or text, such as a parse tree used for natural language processing.
3515 syntactic = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic"
3516 # data consists of unstructured text, such as a book, Wikipedia article (without images), or transcript.
3517 text = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text"
3518 # data is recorded in an ordered sequence of timestamped entries, such as the price of a stock over the course of a day.
3519 timeseries = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries"
3520 # data is recorded with a timestamp for each entry, but not necessarily ordered or at specific intervals, such as when a taxi ride starts and ends.
3521 timestamp = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp"
3522 # data is video based, such as a collection of movie clips featuring Tom Hanks.
3523 video = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video"
3524
3525
3526# Abstract class for additional text intended to be added to a License, but
3527# which is not itself a standalone License.
3528@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/LicenseAddition", compact_type="expandedlicensing_LicenseAddition", abstract=True)
3529class expandedlicensing_LicenseAddition(Element):
3530 NODE_KIND = NodeKind.IRI
3531 ID_ALIAS = "spdxId"
3532 NAMED_INDIVIDUALS = {
3533 }
3534
3535 @classmethod
3536 def _register_props(cls):
3537 super()._register_props()
3538 # Identifies the full text of a LicenseAddition.
3539 cls._add_property(
3540 "expandedlicensing_additionText",
3541 StringProp(),
3542 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/additionText",
3543 min_count=1,
3544 compact="expandedlicensing_additionText",
3545 )
3546 # Specifies whether an additional text identifier has been marked as deprecated.
3547 cls._add_property(
3548 "expandedlicensing_isDeprecatedAdditionId",
3549 BooleanProp(),
3550 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedAdditionId",
3551 compact="expandedlicensing_isDeprecatedAdditionId",
3552 )
3553 # Identifies all the text and metadata associated with a license in the license
3554 # XML format.
3555 cls._add_property(
3556 "expandedlicensing_licenseXml",
3557 StringProp(),
3558 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml",
3559 compact="expandedlicensing_licenseXml",
3560 )
3561 # Specifies the licenseId that is preferred to be used in place of a deprecated
3562 # License or LicenseAddition.
3563 cls._add_property(
3564 "expandedlicensing_obsoletedBy",
3565 StringProp(),
3566 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy",
3567 compact="expandedlicensing_obsoletedBy",
3568 )
3569 # Contains a URL where the License or LicenseAddition can be found in use.
3570 cls._add_property(
3571 "expandedlicensing_seeAlso",
3572 ListProp(AnyURIProp()),
3573 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso",
3574 compact="expandedlicensing_seeAlso",
3575 )
3576 # Identifies the full text of a LicenseAddition, in SPDX templating format.
3577 cls._add_property(
3578 "expandedlicensing_standardAdditionTemplate",
3579 StringProp(),
3580 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardAdditionTemplate",
3581 compact="expandedlicensing_standardAdditionTemplate",
3582 )
3583
3584
3585# A license exception that is listed on the SPDX Exceptions list.
3586@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicenseException", compact_type="expandedlicensing_ListedLicenseException", abstract=False)
3587class expandedlicensing_ListedLicenseException(expandedlicensing_LicenseAddition):
3588 NODE_KIND = NodeKind.IRI
3589 ID_ALIAS = "spdxId"
3590 NAMED_INDIVIDUALS = {
3591 }
3592
3593 @classmethod
3594 def _register_props(cls):
3595 super()._register_props()
3596 # Specifies the SPDX License List version in which this license or exception
3597 # identifier was deprecated.
3598 cls._add_property(
3599 "expandedlicensing_deprecatedVersion",
3600 StringProp(),
3601 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion",
3602 compact="expandedlicensing_deprecatedVersion",
3603 )
3604 # Specifies the SPDX License List version in which this ListedLicense or
3605 # ListedLicenseException identifier was first added.
3606 cls._add_property(
3607 "expandedlicensing_listVersionAdded",
3608 StringProp(),
3609 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded",
3610 compact="expandedlicensing_listVersionAdded",
3611 )
3612
3613
3614# A property name with an associated value.
3615@register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertyEntry", compact_type="extension_CdxPropertyEntry", abstract=False)
3616class extension_CdxPropertyEntry(SHACLObject):
3617 NODE_KIND = NodeKind.BlankNodeOrIRI
3618 NAMED_INDIVIDUALS = {
3619 }
3620
3621 @classmethod
3622 def _register_props(cls):
3623 super()._register_props()
3624 # A name used in a CdxPropertyEntry name-value pair.
3625 cls._add_property(
3626 "extension_cdxPropName",
3627 StringProp(),
3628 iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropName",
3629 min_count=1,
3630 compact="extension_cdxPropName",
3631 )
3632 # A value used in a CdxPropertyEntry name-value pair.
3633 cls._add_property(
3634 "extension_cdxPropValue",
3635 StringProp(),
3636 iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropValue",
3637 compact="extension_cdxPropValue",
3638 )
3639
3640
3641# A characterization of some aspect of an Element that is associated with the Element in a generalized fashion.
3642@register("https://spdx.org/rdf/3.0.1/terms/Extension/Extension", compact_type="extension_Extension", abstract=True)
3643class extension_Extension(SHACLExtensibleObject, SHACLObject):
3644 NODE_KIND = NodeKind.BlankNodeOrIRI
3645 NAMED_INDIVIDUALS = {
3646 }
3647
3648
3649# Specifies the CVSS base, temporal, threat, or environmental severity type.
3650@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType", compact_type="security_CvssSeverityType", abstract=False)
3651class security_CvssSeverityType(SHACLObject):
3652 NODE_KIND = NodeKind.BlankNodeOrIRI
3653 NAMED_INDIVIDUALS = {
3654 "critical": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical",
3655 "high": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high",
3656 "low": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low",
3657 "medium": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium",
3658 "none": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none",
3659 }
3660 # When a CVSS score is between 9.0 - 10.0
3661 critical = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical"
3662 # When a CVSS score is between 7.0 - 8.9
3663 high = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high"
3664 # When a CVSS score is between 0.1 - 3.9
3665 low = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low"
3666 # When a CVSS score is between 4.0 - 6.9
3667 medium = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium"
3668 # When a CVSS score is 0.0
3669 none = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none"
3670
3671
3672# Specifies the exploit catalog type.
3673@register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType", compact_type="security_ExploitCatalogType", abstract=False)
3674class security_ExploitCatalogType(SHACLObject):
3675 NODE_KIND = NodeKind.BlankNodeOrIRI
3676 NAMED_INDIVIDUALS = {
3677 "kev": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev",
3678 "other": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other",
3679 }
3680 # CISA's Known Exploited Vulnerability (KEV) Catalog
3681 kev = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev"
3682 # Other exploit catalogs
3683 other = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other"
3684
3685
3686# Specifies the SSVC decision type.
3687@register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType", compact_type="security_SsvcDecisionType", abstract=False)
3688class security_SsvcDecisionType(SHACLObject):
3689 NODE_KIND = NodeKind.BlankNodeOrIRI
3690 NAMED_INDIVIDUALS = {
3691 "act": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act",
3692 "attend": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend",
3693 "track": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track",
3694 "trackStar": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar",
3695 }
3696 # The vulnerability requires attention from the organization's internal, supervisory-level and leadership-level individuals. Necessary actions include requesting assistance or information about the vulnerability, as well as publishing a notification either internally and/or externally. Typically, internal groups would meet to determine the overall response and then execute agreed upon actions. CISA recommends remediating Act vulnerabilities as soon as possible.
3697 act = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act"
3698 # The vulnerability requires attention from the organization's internal, supervisory-level individuals. Necessary actions include requesting assistance or information about the vulnerability, and may involve publishing a notification either internally and/or externally. CISA recommends remediating Attend vulnerabilities sooner than standard update timelines.
3699 attend = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend"
3700 # The vulnerability does not require action at this time. The organization would continue to track the vulnerability and reassess it if new information becomes available. CISA recommends remediating Track vulnerabilities within standard update timelines.
3701 track = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track"
3702 # ("Track\*" in the SSVC spec) The vulnerability contains specific characteristics that may require closer monitoring for changes. CISA recommends remediating Track\* vulnerabilities within standard update timelines.
3703 trackStar = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar"
3704
3705
3706# Specifies the VEX justification type.
3707@register("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType", compact_type="security_VexJustificationType", abstract=False)
3708class security_VexJustificationType(SHACLObject):
3709 NODE_KIND = NodeKind.BlankNodeOrIRI
3710 NAMED_INDIVIDUALS = {
3711 "componentNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent",
3712 "inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist",
3713 "vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary",
3714 "vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath",
3715 "vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent",
3716 }
3717 # The software is not affected because the vulnerable component is not in the product.
3718 componentNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent"
3719 # Built-in inline controls or mitigations prevent an adversary from leveraging the vulnerability.
3720 inlineMitigationsAlreadyExist = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist"
3721 # The vulnerable component is present, and the component contains the vulnerable code. However, vulnerable code is used in such a way that an attacker cannot mount any anticipated attack.
3722 vulnerableCodeCannotBeControlledByAdversary = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary"
3723 # The affected code is not reachable through the execution of the code, including non-anticipated states of the product.
3724 vulnerableCodeNotInExecutePath = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath"
3725 # The product is not affected because the code underlying the vulnerability is not present in the product.
3726 vulnerableCodeNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent"
3727
3728
3729# Abstract ancestor class for all vulnerability assessments
3730@register("https://spdx.org/rdf/3.0.1/terms/Security/VulnAssessmentRelationship", compact_type="security_VulnAssessmentRelationship", abstract=True)
3731class security_VulnAssessmentRelationship(Relationship):
3732 NODE_KIND = NodeKind.IRI
3733 ID_ALIAS = "spdxId"
3734 NAMED_INDIVIDUALS = {
3735 }
3736
3737 @classmethod
3738 def _register_props(cls):
3739 super()._register_props()
3740 # Identifies who or what supplied the artifact or VulnAssessmentRelationship
3741 # referenced by the Element.
3742 cls._add_property(
3743 "suppliedBy",
3744 ObjectProp(Agent, False, context=[
3745 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
3746 ],),
3747 iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy",
3748 compact="suppliedBy",
3749 )
3750 # Specifies an Element contained in a piece of software where a vulnerability was
3751 # found.
3752 cls._add_property(
3753 "security_assessedElement",
3754 ObjectProp(software_SoftwareArtifact, False),
3755 iri="https://spdx.org/rdf/3.0.1/terms/Security/assessedElement",
3756 compact="security_assessedElement",
3757 )
3758 # Specifies a time when a vulnerability assessment was modified
3759 cls._add_property(
3760 "security_modifiedTime",
3761 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3762 iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime",
3763 compact="security_modifiedTime",
3764 )
3765 # Specifies the time when a vulnerability was published.
3766 cls._add_property(
3767 "security_publishedTime",
3768 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3769 iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime",
3770 compact="security_publishedTime",
3771 )
3772 # Specified the time and date when a vulnerability was withdrawn.
3773 cls._add_property(
3774 "security_withdrawnTime",
3775 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3776 iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime",
3777 compact="security_withdrawnTime",
3778 )
3779
3780
3781# Abstract class representing a license combination consisting of one or more licenses.
3782@register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/AnyLicenseInfo", compact_type="simplelicensing_AnyLicenseInfo", abstract=True)
3783class simplelicensing_AnyLicenseInfo(Element):
3784 NODE_KIND = NodeKind.IRI
3785 ID_ALIAS = "spdxId"
3786 NAMED_INDIVIDUALS = {
3787 }
3788
3789
3790# An SPDX Element containing an SPDX license expression string.
3791@register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/LicenseExpression", compact_type="simplelicensing_LicenseExpression", abstract=False)
3792class simplelicensing_LicenseExpression(simplelicensing_AnyLicenseInfo):
3793 NODE_KIND = NodeKind.IRI
3794 ID_ALIAS = "spdxId"
3795 NAMED_INDIVIDUALS = {
3796 }
3797
3798 @classmethod
3799 def _register_props(cls):
3800 super()._register_props()
3801 # Maps a LicenseRef or AdditionRef string for a Custom License or a Custom
3802 # License Addition to its URI ID.
3803 cls._add_property(
3804 "simplelicensing_customIdToUri",
3805 ListProp(ObjectProp(DictionaryEntry, False)),
3806 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/customIdToUri",
3807 compact="simplelicensing_customIdToUri",
3808 )
3809 # A string in the license expression format.
3810 cls._add_property(
3811 "simplelicensing_licenseExpression",
3812 StringProp(),
3813 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseExpression",
3814 min_count=1,
3815 compact="simplelicensing_licenseExpression",
3816 )
3817 # The version of the SPDX License List used in the license expression.
3818 cls._add_property(
3819 "simplelicensing_licenseListVersion",
3820 StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
3821 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseListVersion",
3822 compact="simplelicensing_licenseListVersion",
3823 )
3824
3825
3826# A license or addition that is not listed on the SPDX License List.
3827@register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/SimpleLicensingText", compact_type="simplelicensing_SimpleLicensingText", abstract=False)
3828class simplelicensing_SimpleLicensingText(Element):
3829 NODE_KIND = NodeKind.IRI
3830 ID_ALIAS = "spdxId"
3831 NAMED_INDIVIDUALS = {
3832 }
3833
3834 @classmethod
3835 def _register_props(cls):
3836 super()._register_props()
3837 # Identifies the full text of a License or Addition.
3838 cls._add_property(
3839 "simplelicensing_licenseText",
3840 StringProp(),
3841 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText",
3842 min_count=1,
3843 compact="simplelicensing_licenseText",
3844 )
3845
3846
3847# A canonical, unique, immutable identifier
3848@register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifier", compact_type="software_ContentIdentifier", abstract=False)
3849class software_ContentIdentifier(IntegrityMethod):
3850 NODE_KIND = NodeKind.BlankNodeOrIRI
3851 NAMED_INDIVIDUALS = {
3852 }
3853
3854 @classmethod
3855 def _register_props(cls):
3856 super()._register_props()
3857 # Specifies the type of the content identifier.
3858 cls._add_property(
3859 "software_contentIdentifierType",
3860 EnumProp([
3861 ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", "gitoid"),
3862 ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", "swhid"),
3863 ]),
3864 iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierType",
3865 min_count=1,
3866 compact="software_contentIdentifierType",
3867 )
3868 # Specifies the value of the content identifier.
3869 cls._add_property(
3870 "software_contentIdentifierValue",
3871 AnyURIProp(),
3872 iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierValue",
3873 min_count=1,
3874 compact="software_contentIdentifierValue",
3875 )
3876
3877
3878# Specifies the type of a content identifier.
3879@register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType", compact_type="software_ContentIdentifierType", abstract=False)
3880class software_ContentIdentifierType(SHACLObject):
3881 NODE_KIND = NodeKind.BlankNodeOrIRI
3882 NAMED_INDIVIDUALS = {
3883 "gitoid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid",
3884 "swhid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid",
3885 }
3886 # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg).
3887 gitoid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid"
3888 # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
3889 swhid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid"
3890
3891
3892# Enumeration of the different kinds of SPDX file.
3893@register("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType", compact_type="software_FileKindType", abstract=False)
3894class software_FileKindType(SHACLObject):
3895 NODE_KIND = NodeKind.BlankNodeOrIRI
3896 NAMED_INDIVIDUALS = {
3897 "directory": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory",
3898 "file": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file",
3899 }
3900 # The file represents a directory and all content stored in that directory.
3901 directory = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory"
3902 # The file represents a single file (default).
3903 file = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file"
3904
3905
3906# Provides a set of values to be used to describe the common types of SBOMs that
3907# tools may create.
3908@register("https://spdx.org/rdf/3.0.1/terms/Software/SbomType", compact_type="software_SbomType", abstract=False)
3909class software_SbomType(SHACLObject):
3910 NODE_KIND = NodeKind.BlankNodeOrIRI
3911 NAMED_INDIVIDUALS = {
3912 "analyzed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed",
3913 "build": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build",
3914 "deployed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed",
3915 "design": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design",
3916 "runtime": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime",
3917 "source": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source",
3918 }
3919 # SBOM generated through analysis of artifacts (e.g., executables, packages, containers, and virtual machine images) after its build. Such analysis generally requires a variety of heuristics. In some contexts, this may also be referred to as a "3rd party" SBOM.
3920 analyzed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed"
3921 # SBOM generated as part of the process of building the software to create a releasable artifact (e.g., executable or package) from data such as source files, dependencies, built components, build process ephemeral data, and other SBOMs.
3922 build = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build"
3923 # SBOM provides an inventory of software that is present on a system. This may be an assembly of other SBOMs that combines analysis of configuration options, and examination of execution behavior in a (potentially simulated) deployment environment.
3924 deployed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed"
3925 # SBOM of intended, planned software project or product with included components (some of which may not yet exist) for a new software artifact.
3926 design = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design"
3927 # SBOM generated through instrumenting the system running the software, to capture only components present in the system, as well as external call-outs or dynamically loaded components. In some contexts, this may also be referred to as an "Instrumented" or "Dynamic" SBOM.
3928 runtime = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime"
3929 # SBOM created directly from the development environment, source files, and included dependencies used to build an product artifact.
3930 source = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source"
3931
3932
3933# Provides information about the primary purpose of an Element.
3934@register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose", compact_type="software_SoftwarePurpose", abstract=False)
3935class software_SoftwarePurpose(SHACLObject):
3936 NODE_KIND = NodeKind.BlankNodeOrIRI
3937 NAMED_INDIVIDUALS = {
3938 "application": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application",
3939 "archive": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive",
3940 "bom": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom",
3941 "configuration": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration",
3942 "container": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container",
3943 "data": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data",
3944 "device": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device",
3945 "deviceDriver": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver",
3946 "diskImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage",
3947 "documentation": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation",
3948 "evidence": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence",
3949 "executable": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable",
3950 "file": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file",
3951 "filesystemImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage",
3952 "firmware": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware",
3953 "framework": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework",
3954 "install": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install",
3955 "library": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library",
3956 "manifest": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest",
3957 "model": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model",
3958 "module": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module",
3959 "operatingSystem": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem",
3960 "other": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other",
3961 "patch": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch",
3962 "platform": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform",
3963 "requirement": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement",
3964 "source": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source",
3965 "specification": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification",
3966 "test": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test",
3967 }
3968 # The Element is a software application.
3969 application = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application"
3970 # The Element is an archived collection of one or more files (.tar, .zip, etc.).
3971 archive = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive"
3972 # The Element is a bill of materials.
3973 bom = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom"
3974 # The Element is configuration data.
3975 configuration = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration"
3976 # The Element is a container image which can be used by a container runtime application.
3977 container = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container"
3978 # The Element is data.
3979 data = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data"
3980 # The Element refers to a chipset, processor, or electronic board.
3981 device = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device"
3982 # The Element represents software that controls hardware devices.
3983 deviceDriver = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver"
3984 # The Element refers to a disk image that can be written to a disk, booted in a VM, etc. A disk image typically contains most or all of the components necessary to boot, such as bootloaders, kernels, firmware, userspace, etc.
3985 diskImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage"
3986 # The Element is documentation.
3987 documentation = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation"
3988 # The Element is the evidence that a specification or requirement has been fulfilled.
3989 evidence = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence"
3990 # The Element is an Artifact that can be run on a computer.
3991 executable = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable"
3992 # The Element is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc.).
3993 file = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file"
3994 # The Element is a file system image that can be written to a disk (or virtual) partition.
3995 filesystemImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage"
3996 # The Element provides low level control over a device's hardware.
3997 firmware = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware"
3998 # The Element is a software framework.
3999 framework = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework"
4000 # The Element is used to install software on disk.
4001 install = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install"
4002 # The Element is a software library.
4003 library = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library"
4004 # The Element is a software manifest.
4005 manifest = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest"
4006 # The Element is a machine learning or artificial intelligence model.
4007 model = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model"
4008 # The Element is a module of a piece of software.
4009 module = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module"
4010 # The Element is an operating system.
4011 operatingSystem = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem"
4012 # The Element doesn't fit into any of the other categories.
4013 other = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other"
4014 # The Element contains a set of changes to update, fix, or improve another Element.
4015 patch = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch"
4016 # The Element represents a runtime environment.
4017 platform = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform"
4018 # The Element provides a requirement needed as input for another Element.
4019 requirement = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement"
4020 # The Element is a single or a collection of source files.
4021 source = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source"
4022 # The Element is a plan, guideline or strategy how to create, perform or analyze an application.
4023 specification = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification"
4024 # The Element is a test used to verify functionality on an software element.
4025 test = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test"
4026
4027
4028# Class that describes a build instance of software/artifacts.
4029@register("https://spdx.org/rdf/3.0.1/terms/Build/Build", compact_type="build_Build", abstract=False)
4030class build_Build(Element):
4031 NODE_KIND = NodeKind.IRI
4032 ID_ALIAS = "spdxId"
4033 NAMED_INDIVIDUALS = {
4034 }
4035
4036 @classmethod
4037 def _register_props(cls):
4038 super()._register_props()
4039 # Property that describes the time at which a build stops.
4040 cls._add_property(
4041 "build_buildEndTime",
4042 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4043 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildEndTime",
4044 compact="build_buildEndTime",
4045 )
4046 # A buildId is a locally unique identifier used by a builder to identify a unique
4047 # instance of a build produced by it.
4048 cls._add_property(
4049 "build_buildId",
4050 StringProp(),
4051 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildId",
4052 compact="build_buildId",
4053 )
4054 # Property describing the start time of a build.
4055 cls._add_property(
4056 "build_buildStartTime",
4057 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4058 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildStartTime",
4059 compact="build_buildStartTime",
4060 )
4061 # A buildType is a hint that is used to indicate the toolchain, platform, or
4062 # infrastructure that the build was invoked on.
4063 cls._add_property(
4064 "build_buildType",
4065 AnyURIProp(),
4066 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildType",
4067 min_count=1,
4068 compact="build_buildType",
4069 )
4070 # Property that describes the digest of the build configuration file used to
4071 # invoke a build.
4072 cls._add_property(
4073 "build_configSourceDigest",
4074 ListProp(ObjectProp(Hash, False)),
4075 iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceDigest",
4076 compact="build_configSourceDigest",
4077 )
4078 # Property describes the invocation entrypoint of a build.
4079 cls._add_property(
4080 "build_configSourceEntrypoint",
4081 ListProp(StringProp()),
4082 iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceEntrypoint",
4083 compact="build_configSourceEntrypoint",
4084 )
4085 # Property that describes the URI of the build configuration source file.
4086 cls._add_property(
4087 "build_configSourceUri",
4088 ListProp(AnyURIProp()),
4089 iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceUri",
4090 compact="build_configSourceUri",
4091 )
4092 # Property describing the session in which a build is invoked.
4093 cls._add_property(
4094 "build_environment",
4095 ListProp(ObjectProp(DictionaryEntry, False)),
4096 iri="https://spdx.org/rdf/3.0.1/terms/Build/environment",
4097 compact="build_environment",
4098 )
4099 # Property describing a parameter used in an instance of a build.
4100 cls._add_property(
4101 "build_parameter",
4102 ListProp(ObjectProp(DictionaryEntry, False)),
4103 iri="https://spdx.org/rdf/3.0.1/terms/Build/parameter",
4104 compact="build_parameter",
4105 )
4106
4107
4108# Agent represents anything with the potential to act on a system.
4109@register("https://spdx.org/rdf/3.0.1/terms/Core/Agent", compact_type="Agent", abstract=False)
4110class Agent(Element):
4111 NODE_KIND = NodeKind.IRI
4112 ID_ALIAS = "spdxId"
4113 NAMED_INDIVIDUALS = {
4114 }
4115
4116
4117# An assertion made in relation to one or more elements.
4118@register("https://spdx.org/rdf/3.0.1/terms/Core/Annotation", compact_type="Annotation", abstract=False)
4119class Annotation(Element):
4120 NODE_KIND = NodeKind.IRI
4121 ID_ALIAS = "spdxId"
4122 NAMED_INDIVIDUALS = {
4123 }
4124
4125 @classmethod
4126 def _register_props(cls):
4127 super()._register_props()
4128 # Describes the type of annotation.
4129 cls._add_property(
4130 "annotationType",
4131 EnumProp([
4132 ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", "other"),
4133 ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", "review"),
4134 ]),
4135 iri="https://spdx.org/rdf/3.0.1/terms/Core/annotationType",
4136 min_count=1,
4137 compact="annotationType",
4138 )
4139 # Provides information about the content type of an Element or a Property.
4140 cls._add_property(
4141 "contentType",
4142 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
4143 iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType",
4144 compact="contentType",
4145 )
4146 # Commentary on an assertion that an annotator has made.
4147 cls._add_property(
4148 "statement",
4149 StringProp(),
4150 iri="https://spdx.org/rdf/3.0.1/terms/Core/statement",
4151 compact="statement",
4152 )
4153 # An Element an annotator has made an assertion about.
4154 cls._add_property(
4155 "subject",
4156 ObjectProp(Element, True, context=[
4157 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
4158 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
4159 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
4160 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
4161 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
4162 ],),
4163 iri="https://spdx.org/rdf/3.0.1/terms/Core/subject",
4164 min_count=1,
4165 compact="subject",
4166 )
4167
4168
4169# A distinct article or unit within the digital domain.
4170@register("https://spdx.org/rdf/3.0.1/terms/Core/Artifact", compact_type="Artifact", abstract=True)
4171class Artifact(Element):
4172 NODE_KIND = NodeKind.IRI
4173 ID_ALIAS = "spdxId"
4174 NAMED_INDIVIDUALS = {
4175 }
4176
4177 @classmethod
4178 def _register_props(cls):
4179 super()._register_props()
4180 # Specifies the time an artifact was built.
4181 cls._add_property(
4182 "builtTime",
4183 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4184 iri="https://spdx.org/rdf/3.0.1/terms/Core/builtTime",
4185 compact="builtTime",
4186 )
4187 # Identifies from where or whom the Element originally came.
4188 cls._add_property(
4189 "originatedBy",
4190 ListProp(ObjectProp(Agent, False, context=[
4191 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
4192 ],)),
4193 iri="https://spdx.org/rdf/3.0.1/terms/Core/originatedBy",
4194 compact="originatedBy",
4195 )
4196 # Specifies the time an artifact was released.
4197 cls._add_property(
4198 "releaseTime",
4199 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4200 iri="https://spdx.org/rdf/3.0.1/terms/Core/releaseTime",
4201 compact="releaseTime",
4202 )
4203 # The name of a relevant standard that may apply to an artifact.
4204 cls._add_property(
4205 "standardName",
4206 ListProp(StringProp()),
4207 iri="https://spdx.org/rdf/3.0.1/terms/Core/standardName",
4208 compact="standardName",
4209 )
4210 # Identifies who or what supplied the artifact or VulnAssessmentRelationship
4211 # referenced by the Element.
4212 cls._add_property(
4213 "suppliedBy",
4214 ObjectProp(Agent, False, context=[
4215 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
4216 ],),
4217 iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy",
4218 compact="suppliedBy",
4219 )
4220 # Specifies the level of support associated with an artifact.
4221 cls._add_property(
4222 "supportLevel",
4223 ListProp(EnumProp([
4224 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", "deployed"),
4225 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", "development"),
4226 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", "endOfSupport"),
4227 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", "limitedSupport"),
4228 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", "noAssertion"),
4229 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", "noSupport"),
4230 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", "support"),
4231 ])),
4232 iri="https://spdx.org/rdf/3.0.1/terms/Core/supportLevel",
4233 compact="supportLevel",
4234 )
4235 # Specifies until when the artifact can be used before its usage needs to be
4236 # reassessed.
4237 cls._add_property(
4238 "validUntilTime",
4239 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4240 iri="https://spdx.org/rdf/3.0.1/terms/Core/validUntilTime",
4241 compact="validUntilTime",
4242 )
4243
4244
4245# A collection of Elements that have a shared context.
4246@register("https://spdx.org/rdf/3.0.1/terms/Core/Bundle", compact_type="Bundle", abstract=False)
4247class Bundle(ElementCollection):
4248 NODE_KIND = NodeKind.IRI
4249 ID_ALIAS = "spdxId"
4250 NAMED_INDIVIDUALS = {
4251 }
4252
4253 @classmethod
4254 def _register_props(cls):
4255 super()._register_props()
4256 # Gives information about the circumstances or unifying properties
4257 # that Elements of the bundle have been assembled under.
4258 cls._add_property(
4259 "context",
4260 StringProp(),
4261 iri="https://spdx.org/rdf/3.0.1/terms/Core/context",
4262 compact="context",
4263 )
4264
4265
4266# A mathematically calculated representation of a grouping of data.
4267@register("https://spdx.org/rdf/3.0.1/terms/Core/Hash", compact_type="Hash", abstract=False)
4268class Hash(IntegrityMethod):
4269 NODE_KIND = NodeKind.BlankNodeOrIRI
4270 NAMED_INDIVIDUALS = {
4271 }
4272
4273 @classmethod
4274 def _register_props(cls):
4275 super()._register_props()
4276 # Specifies the algorithm used for calculating the hash value.
4277 cls._add_property(
4278 "algorithm",
4279 EnumProp([
4280 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"),
4281 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
4282 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
4283 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
4284 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"),
4285 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
4286 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
4287 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"),
4288 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"),
4289 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"),
4290 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"),
4291 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"),
4292 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"),
4293 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"),
4294 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"),
4295 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"),
4296 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"),
4297 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
4298 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
4299 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
4300 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
4301 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"),
4302 ]),
4303 iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm",
4304 min_count=1,
4305 compact="algorithm",
4306 )
4307 # The result of applying a hash algorithm to an Element.
4308 cls._add_property(
4309 "hashValue",
4310 StringProp(),
4311 iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue",
4312 min_count=1,
4313 compact="hashValue",
4314 )
4315
4316
4317# Provide context for a relationship that occurs in the lifecycle.
4318@register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopedRelationship", compact_type="LifecycleScopedRelationship", abstract=False)
4319class LifecycleScopedRelationship(Relationship):
4320 NODE_KIND = NodeKind.IRI
4321 ID_ALIAS = "spdxId"
4322 NAMED_INDIVIDUALS = {
4323 }
4324
4325 @classmethod
4326 def _register_props(cls):
4327 super()._register_props()
4328 # Capture the scope of information about a specific relationship between elements.
4329 cls._add_property(
4330 "scope",
4331 EnumProp([
4332 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", "build"),
4333 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", "design"),
4334 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", "development"),
4335 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", "other"),
4336 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", "runtime"),
4337 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", "test"),
4338 ]),
4339 iri="https://spdx.org/rdf/3.0.1/terms/Core/scope",
4340 compact="scope",
4341 )
4342
4343
4344# A group of people who work together in an organized way for a shared purpose.
4345@register("https://spdx.org/rdf/3.0.1/terms/Core/Organization", compact_type="Organization", abstract=False)
4346class Organization(Agent):
4347 NODE_KIND = NodeKind.IRI
4348 ID_ALIAS = "spdxId"
4349 NAMED_INDIVIDUALS = {
4350 "SpdxOrganization": "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization",
4351 }
4352 # An Organization representing the SPDX Project.
4353 SpdxOrganization = "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization"
4354
4355
4356# An individual human being.
4357@register("https://spdx.org/rdf/3.0.1/terms/Core/Person", compact_type="Person", abstract=False)
4358class Person(Agent):
4359 NODE_KIND = NodeKind.IRI
4360 ID_ALIAS = "spdxId"
4361 NAMED_INDIVIDUALS = {
4362 }
4363
4364
4365# A software agent.
4366@register("https://spdx.org/rdf/3.0.1/terms/Core/SoftwareAgent", compact_type="SoftwareAgent", abstract=False)
4367class SoftwareAgent(Agent):
4368 NODE_KIND = NodeKind.IRI
4369 ID_ALIAS = "spdxId"
4370 NAMED_INDIVIDUALS = {
4371 }
4372
4373
4374# Portion of an AnyLicenseInfo representing a set of licensing information
4375# where all elements apply.
4376@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ConjunctiveLicenseSet", compact_type="expandedlicensing_ConjunctiveLicenseSet", abstract=False)
4377class expandedlicensing_ConjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
4378 NODE_KIND = NodeKind.IRI
4379 ID_ALIAS = "spdxId"
4380 NAMED_INDIVIDUALS = {
4381 }
4382
4383 @classmethod
4384 def _register_props(cls):
4385 super()._register_props()
4386 # A license expression participating in a license set.
4387 cls._add_property(
4388 "expandedlicensing_member",
4389 ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[
4390 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
4391 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
4392 ],)),
4393 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member",
4394 min_count=2,
4395 compact="expandedlicensing_member",
4396 )
4397
4398
4399# A license addition that is not listed on the SPDX Exceptions List.
4400@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicenseAddition", compact_type="expandedlicensing_CustomLicenseAddition", abstract=False)
4401class expandedlicensing_CustomLicenseAddition(expandedlicensing_LicenseAddition):
4402 NODE_KIND = NodeKind.IRI
4403 ID_ALIAS = "spdxId"
4404 NAMED_INDIVIDUALS = {
4405 }
4406
4407
4408# Portion of an AnyLicenseInfo representing a set of licensing information where
4409# only one of the elements applies.
4410@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/DisjunctiveLicenseSet", compact_type="expandedlicensing_DisjunctiveLicenseSet", abstract=False)
4411class expandedlicensing_DisjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
4412 NODE_KIND = NodeKind.IRI
4413 ID_ALIAS = "spdxId"
4414 NAMED_INDIVIDUALS = {
4415 }
4416
4417 @classmethod
4418 def _register_props(cls):
4419 super()._register_props()
4420 # A license expression participating in a license set.
4421 cls._add_property(
4422 "expandedlicensing_member",
4423 ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[
4424 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
4425 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
4426 ],)),
4427 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member",
4428 min_count=2,
4429 compact="expandedlicensing_member",
4430 )
4431
4432
4433# Abstract class representing a License or an OrLaterOperator.
4434@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ExtendableLicense", compact_type="expandedlicensing_ExtendableLicense", abstract=True)
4435class expandedlicensing_ExtendableLicense(simplelicensing_AnyLicenseInfo):
4436 NODE_KIND = NodeKind.IRI
4437 ID_ALIAS = "spdxId"
4438 NAMED_INDIVIDUALS = {
4439 }
4440
4441
4442# A concrete subclass of AnyLicenseInfo used by Individuals in the
4443# ExpandedLicensing profile.
4444@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/IndividualLicensingInfo", compact_type="expandedlicensing_IndividualLicensingInfo", abstract=False)
4445class expandedlicensing_IndividualLicensingInfo(simplelicensing_AnyLicenseInfo):
4446 NODE_KIND = NodeKind.IRI
4447 ID_ALIAS = "spdxId"
4448 NAMED_INDIVIDUALS = {
4449 "NoAssertionLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense",
4450 "NoneLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense",
4451 }
4452 # An Individual Value for License when no assertion can be made about its actual
4453 # value.
4454 NoAssertionLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense"
4455 # An Individual Value for License where the SPDX data creator determines that no
4456 # license is present.
4457 NoneLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense"
4458
4459
4460# Abstract class for the portion of an AnyLicenseInfo representing a license.
4461@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/License", compact_type="expandedlicensing_License", abstract=True)
4462class expandedlicensing_License(expandedlicensing_ExtendableLicense):
4463 NODE_KIND = NodeKind.IRI
4464 ID_ALIAS = "spdxId"
4465 NAMED_INDIVIDUALS = {
4466 }
4467
4468 @classmethod
4469 def _register_props(cls):
4470 super()._register_props()
4471 # Specifies whether a license or additional text identifier has been marked as
4472 # deprecated.
4473 cls._add_property(
4474 "expandedlicensing_isDeprecatedLicenseId",
4475 BooleanProp(),
4476 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedLicenseId",
4477 compact="expandedlicensing_isDeprecatedLicenseId",
4478 )
4479 # Specifies whether the License is listed as free by the
4480 # Free Software Foundation (FSF).
4481 cls._add_property(
4482 "expandedlicensing_isFsfLibre",
4483 BooleanProp(),
4484 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isFsfLibre",
4485 compact="expandedlicensing_isFsfLibre",
4486 )
4487 # Specifies whether the License is listed as approved by the
4488 # Open Source Initiative (OSI).
4489 cls._add_property(
4490 "expandedlicensing_isOsiApproved",
4491 BooleanProp(),
4492 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isOsiApproved",
4493 compact="expandedlicensing_isOsiApproved",
4494 )
4495 # Identifies all the text and metadata associated with a license in the license
4496 # XML format.
4497 cls._add_property(
4498 "expandedlicensing_licenseXml",
4499 StringProp(),
4500 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml",
4501 compact="expandedlicensing_licenseXml",
4502 )
4503 # Specifies the licenseId that is preferred to be used in place of a deprecated
4504 # License or LicenseAddition.
4505 cls._add_property(
4506 "expandedlicensing_obsoletedBy",
4507 StringProp(),
4508 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy",
4509 compact="expandedlicensing_obsoletedBy",
4510 )
4511 # Contains a URL where the License or LicenseAddition can be found in use.
4512 cls._add_property(
4513 "expandedlicensing_seeAlso",
4514 ListProp(AnyURIProp()),
4515 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso",
4516 compact="expandedlicensing_seeAlso",
4517 )
4518 # Provides a License author's preferred text to indicate that a file is covered
4519 # by the License.
4520 cls._add_property(
4521 "expandedlicensing_standardLicenseHeader",
4522 StringProp(),
4523 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseHeader",
4524 compact="expandedlicensing_standardLicenseHeader",
4525 )
4526 # Identifies the full text of a License, in SPDX templating format.
4527 cls._add_property(
4528 "expandedlicensing_standardLicenseTemplate",
4529 StringProp(),
4530 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseTemplate",
4531 compact="expandedlicensing_standardLicenseTemplate",
4532 )
4533 # Identifies the full text of a License or Addition.
4534 cls._add_property(
4535 "simplelicensing_licenseText",
4536 StringProp(),
4537 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText",
4538 min_count=1,
4539 compact="simplelicensing_licenseText",
4540 )
4541
4542
4543# A license that is listed on the SPDX License List.
4544@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicense", compact_type="expandedlicensing_ListedLicense", abstract=False)
4545class expandedlicensing_ListedLicense(expandedlicensing_License):
4546 NODE_KIND = NodeKind.IRI
4547 ID_ALIAS = "spdxId"
4548 NAMED_INDIVIDUALS = {
4549 }
4550
4551 @classmethod
4552 def _register_props(cls):
4553 super()._register_props()
4554 # Specifies the SPDX License List version in which this license or exception
4555 # identifier was deprecated.
4556 cls._add_property(
4557 "expandedlicensing_deprecatedVersion",
4558 StringProp(),
4559 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion",
4560 compact="expandedlicensing_deprecatedVersion",
4561 )
4562 # Specifies the SPDX License List version in which this ListedLicense or
4563 # ListedLicenseException identifier was first added.
4564 cls._add_property(
4565 "expandedlicensing_listVersionAdded",
4566 StringProp(),
4567 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded",
4568 compact="expandedlicensing_listVersionAdded",
4569 )
4570
4571
4572# Portion of an AnyLicenseInfo representing this version, or any later version,
4573# of the indicated License.
4574@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/OrLaterOperator", compact_type="expandedlicensing_OrLaterOperator", abstract=False)
4575class expandedlicensing_OrLaterOperator(expandedlicensing_ExtendableLicense):
4576 NODE_KIND = NodeKind.IRI
4577 ID_ALIAS = "spdxId"
4578 NAMED_INDIVIDUALS = {
4579 }
4580
4581 @classmethod
4582 def _register_props(cls):
4583 super()._register_props()
4584 # A License participating in an 'or later' model.
4585 cls._add_property(
4586 "expandedlicensing_subjectLicense",
4587 ObjectProp(expandedlicensing_License, True),
4588 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectLicense",
4589 min_count=1,
4590 compact="expandedlicensing_subjectLicense",
4591 )
4592
4593
4594# Portion of an AnyLicenseInfo representing a License which has additional
4595# text applied to it.
4596@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/WithAdditionOperator", compact_type="expandedlicensing_WithAdditionOperator", abstract=False)
4597class expandedlicensing_WithAdditionOperator(simplelicensing_AnyLicenseInfo):
4598 NODE_KIND = NodeKind.IRI
4599 ID_ALIAS = "spdxId"
4600 NAMED_INDIVIDUALS = {
4601 }
4602
4603 @classmethod
4604 def _register_props(cls):
4605 super()._register_props()
4606 # A LicenseAddition participating in a 'with addition' model.
4607 cls._add_property(
4608 "expandedlicensing_subjectAddition",
4609 ObjectProp(expandedlicensing_LicenseAddition, True),
4610 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectAddition",
4611 min_count=1,
4612 compact="expandedlicensing_subjectAddition",
4613 )
4614 # A License participating in a 'with addition' model.
4615 cls._add_property(
4616 "expandedlicensing_subjectExtendableLicense",
4617 ObjectProp(expandedlicensing_ExtendableLicense, True),
4618 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectExtendableLicense",
4619 min_count=1,
4620 compact="expandedlicensing_subjectExtendableLicense",
4621 )
4622
4623
4624# A type of extension consisting of a list of name value pairs.
4625@register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertiesExtension", compact_type="extension_CdxPropertiesExtension", abstract=False)
4626class extension_CdxPropertiesExtension(extension_Extension):
4627 NODE_KIND = NodeKind.BlankNodeOrIRI
4628 NAMED_INDIVIDUALS = {
4629 }
4630
4631 @classmethod
4632 def _register_props(cls):
4633 super()._register_props()
4634 # Provides a map of a property names to a values.
4635 cls._add_property(
4636 "extension_cdxProperty",
4637 ListProp(ObjectProp(extension_CdxPropertyEntry, False)),
4638 iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxProperty",
4639 min_count=1,
4640 compact="extension_cdxProperty",
4641 )
4642
4643
4644# Provides a CVSS version 2.0 assessment for a vulnerability.
4645@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV2VulnAssessmentRelationship", compact_type="security_CvssV2VulnAssessmentRelationship", abstract=False)
4646class security_CvssV2VulnAssessmentRelationship(security_VulnAssessmentRelationship):
4647 NODE_KIND = NodeKind.IRI
4648 ID_ALIAS = "spdxId"
4649 NAMED_INDIVIDUALS = {
4650 }
4651
4652 @classmethod
4653 def _register_props(cls):
4654 super()._register_props()
4655 # Provides a numerical (0-10) representation of the severity of a vulnerability.
4656 cls._add_property(
4657 "security_score",
4658 FloatProp(),
4659 iri="https://spdx.org/rdf/3.0.1/terms/Security/score",
4660 min_count=1,
4661 compact="security_score",
4662 )
4663 # Specifies the CVSS vector string for a vulnerability.
4664 cls._add_property(
4665 "security_vectorString",
4666 StringProp(),
4667 iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString",
4668 min_count=1,
4669 compact="security_vectorString",
4670 )
4671
4672
4673# Provides a CVSS version 3 assessment for a vulnerability.
4674@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV3VulnAssessmentRelationship", compact_type="security_CvssV3VulnAssessmentRelationship", abstract=False)
4675class security_CvssV3VulnAssessmentRelationship(security_VulnAssessmentRelationship):
4676 NODE_KIND = NodeKind.IRI
4677 ID_ALIAS = "spdxId"
4678 NAMED_INDIVIDUALS = {
4679 }
4680
4681 @classmethod
4682 def _register_props(cls):
4683 super()._register_props()
4684 # Provides a numerical (0-10) representation of the severity of a vulnerability.
4685 cls._add_property(
4686 "security_score",
4687 FloatProp(),
4688 iri="https://spdx.org/rdf/3.0.1/terms/Security/score",
4689 min_count=1,
4690 compact="security_score",
4691 )
4692 # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
4693 cls._add_property(
4694 "security_severity",
4695 EnumProp([
4696 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"),
4697 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"),
4698 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"),
4699 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"),
4700 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"),
4701 ]),
4702 iri="https://spdx.org/rdf/3.0.1/terms/Security/severity",
4703 min_count=1,
4704 compact="security_severity",
4705 )
4706 # Specifies the CVSS vector string for a vulnerability.
4707 cls._add_property(
4708 "security_vectorString",
4709 StringProp(),
4710 iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString",
4711 min_count=1,
4712 compact="security_vectorString",
4713 )
4714
4715
4716# Provides a CVSS version 4 assessment for a vulnerability.
4717@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV4VulnAssessmentRelationship", compact_type="security_CvssV4VulnAssessmentRelationship", abstract=False)
4718class security_CvssV4VulnAssessmentRelationship(security_VulnAssessmentRelationship):
4719 NODE_KIND = NodeKind.IRI
4720 ID_ALIAS = "spdxId"
4721 NAMED_INDIVIDUALS = {
4722 }
4723
4724 @classmethod
4725 def _register_props(cls):
4726 super()._register_props()
4727 # Provides a numerical (0-10) representation of the severity of a vulnerability.
4728 cls._add_property(
4729 "security_score",
4730 FloatProp(),
4731 iri="https://spdx.org/rdf/3.0.1/terms/Security/score",
4732 min_count=1,
4733 compact="security_score",
4734 )
4735 # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
4736 cls._add_property(
4737 "security_severity",
4738 EnumProp([
4739 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"),
4740 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"),
4741 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"),
4742 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"),
4743 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"),
4744 ]),
4745 iri="https://spdx.org/rdf/3.0.1/terms/Security/severity",
4746 min_count=1,
4747 compact="security_severity",
4748 )
4749 # Specifies the CVSS vector string for a vulnerability.
4750 cls._add_property(
4751 "security_vectorString",
4752 StringProp(),
4753 iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString",
4754 min_count=1,
4755 compact="security_vectorString",
4756 )
4757
4758
4759# Provides an EPSS assessment for a vulnerability.
4760@register("https://spdx.org/rdf/3.0.1/terms/Security/EpssVulnAssessmentRelationship", compact_type="security_EpssVulnAssessmentRelationship", abstract=False)
4761class security_EpssVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4762 NODE_KIND = NodeKind.IRI
4763 ID_ALIAS = "spdxId"
4764 NAMED_INDIVIDUALS = {
4765 }
4766
4767 @classmethod
4768 def _register_props(cls):
4769 super()._register_props()
4770 # The percentile of the current probability score.
4771 cls._add_property(
4772 "security_percentile",
4773 FloatProp(),
4774 iri="https://spdx.org/rdf/3.0.1/terms/Security/percentile",
4775 min_count=1,
4776 compact="security_percentile",
4777 )
4778 # A probability score between 0 and 1 of a vulnerability being exploited.
4779 cls._add_property(
4780 "security_probability",
4781 FloatProp(),
4782 iri="https://spdx.org/rdf/3.0.1/terms/Security/probability",
4783 min_count=1,
4784 compact="security_probability",
4785 )
4786
4787
4788# Provides an exploit assessment of a vulnerability.
4789@register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogVulnAssessmentRelationship", compact_type="security_ExploitCatalogVulnAssessmentRelationship", abstract=False)
4790class security_ExploitCatalogVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4791 NODE_KIND = NodeKind.IRI
4792 ID_ALIAS = "spdxId"
4793 NAMED_INDIVIDUALS = {
4794 }
4795
4796 @classmethod
4797 def _register_props(cls):
4798 super()._register_props()
4799 # Specifies the exploit catalog type.
4800 cls._add_property(
4801 "security_catalogType",
4802 EnumProp([
4803 ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", "kev"),
4804 ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", "other"),
4805 ]),
4806 iri="https://spdx.org/rdf/3.0.1/terms/Security/catalogType",
4807 min_count=1,
4808 compact="security_catalogType",
4809 )
4810 # Describe that a CVE is known to have an exploit because it's been listed in an exploit catalog.
4811 cls._add_property(
4812 "security_exploited",
4813 BooleanProp(),
4814 iri="https://spdx.org/rdf/3.0.1/terms/Security/exploited",
4815 min_count=1,
4816 compact="security_exploited",
4817 )
4818 # Provides the location of an exploit catalog.
4819 cls._add_property(
4820 "security_locator",
4821 AnyURIProp(),
4822 iri="https://spdx.org/rdf/3.0.1/terms/Security/locator",
4823 min_count=1,
4824 compact="security_locator",
4825 )
4826
4827
4828# Provides an SSVC assessment for a vulnerability.
4829@register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcVulnAssessmentRelationship", compact_type="security_SsvcVulnAssessmentRelationship", abstract=False)
4830class security_SsvcVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4831 NODE_KIND = NodeKind.IRI
4832 ID_ALIAS = "spdxId"
4833 NAMED_INDIVIDUALS = {
4834 }
4835
4836 @classmethod
4837 def _register_props(cls):
4838 super()._register_props()
4839 # Provide the enumeration of possible decisions in the
4840 # [Stakeholder-Specific Vulnerability Categorization (SSVC) decision tree](https://www.cisa.gov/stakeholder-specific-vulnerability-categorization-ssvc).
4841 cls._add_property(
4842 "security_decisionType",
4843 EnumProp([
4844 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", "act"),
4845 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", "attend"),
4846 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", "track"),
4847 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", "trackStar"),
4848 ]),
4849 iri="https://spdx.org/rdf/3.0.1/terms/Security/decisionType",
4850 min_count=1,
4851 compact="security_decisionType",
4852 )
4853
4854
4855# Abstract ancestor class for all VEX relationships
4856@register("https://spdx.org/rdf/3.0.1/terms/Security/VexVulnAssessmentRelationship", compact_type="security_VexVulnAssessmentRelationship", abstract=True)
4857class security_VexVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4858 NODE_KIND = NodeKind.IRI
4859 ID_ALIAS = "spdxId"
4860 NAMED_INDIVIDUALS = {
4861 }
4862
4863 @classmethod
4864 def _register_props(cls):
4865 super()._register_props()
4866 # Conveys information about how VEX status was determined.
4867 cls._add_property(
4868 "security_statusNotes",
4869 StringProp(),
4870 iri="https://spdx.org/rdf/3.0.1/terms/Security/statusNotes",
4871 compact="security_statusNotes",
4872 )
4873 # Specifies the version of a VEX statement.
4874 cls._add_property(
4875 "security_vexVersion",
4876 StringProp(),
4877 iri="https://spdx.org/rdf/3.0.1/terms/Security/vexVersion",
4878 compact="security_vexVersion",
4879 )
4880
4881
4882# Specifies a vulnerability and its associated information.
4883@register("https://spdx.org/rdf/3.0.1/terms/Security/Vulnerability", compact_type="security_Vulnerability", abstract=False)
4884class security_Vulnerability(Artifact):
4885 NODE_KIND = NodeKind.IRI
4886 ID_ALIAS = "spdxId"
4887 NAMED_INDIVIDUALS = {
4888 }
4889
4890 @classmethod
4891 def _register_props(cls):
4892 super()._register_props()
4893 # Specifies a time when a vulnerability assessment was modified
4894 cls._add_property(
4895 "security_modifiedTime",
4896 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4897 iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime",
4898 compact="security_modifiedTime",
4899 )
4900 # Specifies the time when a vulnerability was published.
4901 cls._add_property(
4902 "security_publishedTime",
4903 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4904 iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime",
4905 compact="security_publishedTime",
4906 )
4907 # Specified the time and date when a vulnerability was withdrawn.
4908 cls._add_property(
4909 "security_withdrawnTime",
4910 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4911 iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime",
4912 compact="security_withdrawnTime",
4913 )
4914
4915
4916# A distinct article or unit related to Software.
4917@register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwareArtifact", compact_type="software_SoftwareArtifact", abstract=True)
4918class software_SoftwareArtifact(Artifact):
4919 NODE_KIND = NodeKind.IRI
4920 ID_ALIAS = "spdxId"
4921 NAMED_INDIVIDUALS = {
4922 }
4923
4924 @classmethod
4925 def _register_props(cls):
4926 super()._register_props()
4927 # Provides additional purpose information of the software artifact.
4928 cls._add_property(
4929 "software_additionalPurpose",
4930 ListProp(EnumProp([
4931 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"),
4932 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"),
4933 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"),
4934 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"),
4935 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"),
4936 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"),
4937 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"),
4938 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
4939 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
4940 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"),
4941 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"),
4942 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"),
4943 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"),
4944 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
4945 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"),
4946 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"),
4947 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"),
4948 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"),
4949 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"),
4950 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"),
4951 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"),
4952 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
4953 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"),
4954 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"),
4955 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"),
4956 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"),
4957 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"),
4958 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"),
4959 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"),
4960 ])),
4961 iri="https://spdx.org/rdf/3.0.1/terms/Software/additionalPurpose",
4962 compact="software_additionalPurpose",
4963 )
4964 # Provides a place for the SPDX data creator to record acknowledgement text for
4965 # a software Package, File or Snippet.
4966 cls._add_property(
4967 "software_attributionText",
4968 ListProp(StringProp()),
4969 iri="https://spdx.org/rdf/3.0.1/terms/Software/attributionText",
4970 compact="software_attributionText",
4971 )
4972 # A canonical, unique, immutable identifier of the artifact content, that may be
4973 # used for verifying its identity and/or integrity.
4974 cls._add_property(
4975 "software_contentIdentifier",
4976 ListProp(ObjectProp(software_ContentIdentifier, False)),
4977 iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifier",
4978 compact="software_contentIdentifier",
4979 )
4980 # Identifies the text of one or more copyright notices for a software Package,
4981 # File or Snippet, if any.
4982 cls._add_property(
4983 "software_copyrightText",
4984 StringProp(),
4985 iri="https://spdx.org/rdf/3.0.1/terms/Software/copyrightText",
4986 compact="software_copyrightText",
4987 )
4988 # Provides information about the primary purpose of the software artifact.
4989 cls._add_property(
4990 "software_primaryPurpose",
4991 EnumProp([
4992 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"),
4993 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"),
4994 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"),
4995 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"),
4996 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"),
4997 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"),
4998 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"),
4999 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
5000 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
5001 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"),
5002 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"),
5003 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"),
5004 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"),
5005 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
5006 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"),
5007 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"),
5008 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"),
5009 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"),
5010 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"),
5011 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"),
5012 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"),
5013 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
5014 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"),
5015 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"),
5016 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"),
5017 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"),
5018 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"),
5019 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"),
5020 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"),
5021 ]),
5022 iri="https://spdx.org/rdf/3.0.1/terms/Software/primaryPurpose",
5023 compact="software_primaryPurpose",
5024 )
5025
5026
5027# A container for a grouping of SPDX-3.0 content characterizing details
5028# (provenence, composition, licensing, etc.) about a product.
5029@register("https://spdx.org/rdf/3.0.1/terms/Core/Bom", compact_type="Bom", abstract=False)
5030class Bom(Bundle):
5031 NODE_KIND = NodeKind.IRI
5032 ID_ALIAS = "spdxId"
5033 NAMED_INDIVIDUALS = {
5034 }
5035
5036
5037# A license that is not listed on the SPDX License List.
5038@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicense", compact_type="expandedlicensing_CustomLicense", abstract=False)
5039class expandedlicensing_CustomLicense(expandedlicensing_License):
5040 NODE_KIND = NodeKind.IRI
5041 ID_ALIAS = "spdxId"
5042 NAMED_INDIVIDUALS = {
5043 }
5044
5045
5046# Connects a vulnerability and an element designating the element as a product
5047# affected by the vulnerability.
5048@register("https://spdx.org/rdf/3.0.1/terms/Security/VexAffectedVulnAssessmentRelationship", compact_type="security_VexAffectedVulnAssessmentRelationship", abstract=False)
5049class security_VexAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5050 NODE_KIND = NodeKind.IRI
5051 ID_ALIAS = "spdxId"
5052 NAMED_INDIVIDUALS = {
5053 }
5054
5055 @classmethod
5056 def _register_props(cls):
5057 super()._register_props()
5058 # Provides advise on how to mitigate or remediate a vulnerability when a VEX product
5059 # is affected by it.
5060 cls._add_property(
5061 "security_actionStatement",
5062 StringProp(),
5063 iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatement",
5064 min_count=1,
5065 compact="security_actionStatement",
5066 )
5067 # Records the time when a recommended action was communicated in a VEX statement
5068 # to mitigate a vulnerability.
5069 cls._add_property(
5070 "security_actionStatementTime",
5071 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5072 iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatementTime",
5073 compact="security_actionStatementTime",
5074 )
5075
5076
5077# Links a vulnerability and elements representing products (in the VEX sense) where
5078# a fix has been applied and are no longer affected.
5079@register("https://spdx.org/rdf/3.0.1/terms/Security/VexFixedVulnAssessmentRelationship", compact_type="security_VexFixedVulnAssessmentRelationship", abstract=False)
5080class security_VexFixedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5081 NODE_KIND = NodeKind.IRI
5082 ID_ALIAS = "spdxId"
5083 NAMED_INDIVIDUALS = {
5084 }
5085
5086
5087# Links a vulnerability and one or more elements designating the latter as products
5088# not affected by the vulnerability.
5089@register("https://spdx.org/rdf/3.0.1/terms/Security/VexNotAffectedVulnAssessmentRelationship", compact_type="security_VexNotAffectedVulnAssessmentRelationship", abstract=False)
5090class security_VexNotAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5091 NODE_KIND = NodeKind.IRI
5092 ID_ALIAS = "spdxId"
5093 NAMED_INDIVIDUALS = {
5094 }
5095
5096 @classmethod
5097 def _register_props(cls):
5098 super()._register_props()
5099 # Explains why a VEX product is not affected by a vulnerability. It is an
5100 # alternative in VexNotAffectedVulnAssessmentRelationship to the machine-readable
5101 # justification label.
5102 cls._add_property(
5103 "security_impactStatement",
5104 StringProp(),
5105 iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatement",
5106 compact="security_impactStatement",
5107 )
5108 # Timestamp of impact statement.
5109 cls._add_property(
5110 "security_impactStatementTime",
5111 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5112 iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatementTime",
5113 compact="security_impactStatementTime",
5114 )
5115 # Impact justification label to be used when linking a vulnerability to an element
5116 # representing a VEX product with a VexNotAffectedVulnAssessmentRelationship
5117 # relationship.
5118 cls._add_property(
5119 "security_justificationType",
5120 EnumProp([
5121 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", "componentNotPresent"),
5122 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", "inlineMitigationsAlreadyExist"),
5123 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", "vulnerableCodeCannotBeControlledByAdversary"),
5124 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", "vulnerableCodeNotInExecutePath"),
5125 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", "vulnerableCodeNotPresent"),
5126 ]),
5127 iri="https://spdx.org/rdf/3.0.1/terms/Security/justificationType",
5128 compact="security_justificationType",
5129 )
5130
5131
5132# Designates elements as products where the impact of a vulnerability is being
5133# investigated.
5134@register("https://spdx.org/rdf/3.0.1/terms/Security/VexUnderInvestigationVulnAssessmentRelationship", compact_type="security_VexUnderInvestigationVulnAssessmentRelationship", abstract=False)
5135class security_VexUnderInvestigationVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5136 NODE_KIND = NodeKind.IRI
5137 ID_ALIAS = "spdxId"
5138 NAMED_INDIVIDUALS = {
5139 }
5140
5141
5142# Refers to any object that stores content on a computer.
5143@register("https://spdx.org/rdf/3.0.1/terms/Software/File", compact_type="software_File", abstract=False)
5144class software_File(software_SoftwareArtifact):
5145 NODE_KIND = NodeKind.IRI
5146 ID_ALIAS = "spdxId"
5147 NAMED_INDIVIDUALS = {
5148 }
5149
5150 @classmethod
5151 def _register_props(cls):
5152 super()._register_props()
5153 # Provides information about the content type of an Element or a Property.
5154 cls._add_property(
5155 "contentType",
5156 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
5157 iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType",
5158 compact="contentType",
5159 )
5160 # Describes if a given file is a directory or non-directory kind of file.
5161 cls._add_property(
5162 "software_fileKind",
5163 EnumProp([
5164 ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", "directory"),
5165 ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", "file"),
5166 ]),
5167 iri="https://spdx.org/rdf/3.0.1/terms/Software/fileKind",
5168 compact="software_fileKind",
5169 )
5170
5171
5172# Refers to any unit of content that can be associated with a distribution of
5173# software.
5174@register("https://spdx.org/rdf/3.0.1/terms/Software/Package", compact_type="software_Package", abstract=False)
5175class software_Package(software_SoftwareArtifact):
5176 NODE_KIND = NodeKind.IRI
5177 ID_ALIAS = "spdxId"
5178 NAMED_INDIVIDUALS = {
5179 }
5180
5181 @classmethod
5182 def _register_props(cls):
5183 super()._register_props()
5184 # Identifies the download Uniform Resource Identifier for the package at the time
5185 # that the document was created.
5186 cls._add_property(
5187 "software_downloadLocation",
5188 AnyURIProp(),
5189 iri="https://spdx.org/rdf/3.0.1/terms/Software/downloadLocation",
5190 compact="software_downloadLocation",
5191 )
5192 # A place for the SPDX document creator to record a website that serves as the
5193 # package's home page.
5194 cls._add_property(
5195 "software_homePage",
5196 AnyURIProp(),
5197 iri="https://spdx.org/rdf/3.0.1/terms/Software/homePage",
5198 compact="software_homePage",
5199 )
5200 # Provides a place for the SPDX data creator to record the package URL string
5201 # (in accordance with the Package URL specification) for a software Package.
5202 cls._add_property(
5203 "software_packageUrl",
5204 AnyURIProp(),
5205 iri="https://spdx.org/rdf/3.0.1/terms/Software/packageUrl",
5206 compact="software_packageUrl",
5207 )
5208 # Identify the version of a package.
5209 cls._add_property(
5210 "software_packageVersion",
5211 StringProp(),
5212 iri="https://spdx.org/rdf/3.0.1/terms/Software/packageVersion",
5213 compact="software_packageVersion",
5214 )
5215 # Records any relevant background information or additional comments
5216 # about the origin of the package.
5217 cls._add_property(
5218 "software_sourceInfo",
5219 StringProp(),
5220 iri="https://spdx.org/rdf/3.0.1/terms/Software/sourceInfo",
5221 compact="software_sourceInfo",
5222 )
5223
5224
5225# A collection of SPDX Elements describing a single package.
5226@register("https://spdx.org/rdf/3.0.1/terms/Software/Sbom", compact_type="software_Sbom", abstract=False)
5227class software_Sbom(Bom):
5228 NODE_KIND = NodeKind.IRI
5229 ID_ALIAS = "spdxId"
5230 NAMED_INDIVIDUALS = {
5231 }
5232
5233 @classmethod
5234 def _register_props(cls):
5235 super()._register_props()
5236 # Provides information about the type of an SBOM.
5237 cls._add_property(
5238 "software_sbomType",
5239 ListProp(EnumProp([
5240 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", "analyzed"),
5241 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", "build"),
5242 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", "deployed"),
5243 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", "design"),
5244 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", "runtime"),
5245 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", "source"),
5246 ])),
5247 iri="https://spdx.org/rdf/3.0.1/terms/Software/sbomType",
5248 compact="software_sbomType",
5249 )
5250
5251
5252# Describes a certain part of a file.
5253@register("https://spdx.org/rdf/3.0.1/terms/Software/Snippet", compact_type="software_Snippet", abstract=False)
5254class software_Snippet(software_SoftwareArtifact):
5255 NODE_KIND = NodeKind.IRI
5256 ID_ALIAS = "spdxId"
5257 NAMED_INDIVIDUALS = {
5258 }
5259
5260 @classmethod
5261 def _register_props(cls):
5262 super()._register_props()
5263 # Defines the byte range in the original host file that the snippet information
5264 # applies to.
5265 cls._add_property(
5266 "software_byteRange",
5267 ObjectProp(PositiveIntegerRange, False),
5268 iri="https://spdx.org/rdf/3.0.1/terms/Software/byteRange",
5269 compact="software_byteRange",
5270 )
5271 # Defines the line range in the original host file that the snippet information
5272 # applies to.
5273 cls._add_property(
5274 "software_lineRange",
5275 ObjectProp(PositiveIntegerRange, False),
5276 iri="https://spdx.org/rdf/3.0.1/terms/Software/lineRange",
5277 compact="software_lineRange",
5278 )
5279 # Defines the original host file that the snippet information applies to.
5280 cls._add_property(
5281 "software_snippetFromFile",
5282 ObjectProp(software_File, True),
5283 iri="https://spdx.org/rdf/3.0.1/terms/Software/snippetFromFile",
5284 min_count=1,
5285 compact="software_snippetFromFile",
5286 )
5287
5288
5289# Specifies an AI package and its associated information.
5290@register("https://spdx.org/rdf/3.0.1/terms/AI/AIPackage", compact_type="ai_AIPackage", abstract=False)
5291class ai_AIPackage(software_Package):
5292 NODE_KIND = NodeKind.IRI
5293 ID_ALIAS = "spdxId"
5294 NAMED_INDIVIDUALS = {
5295 }
5296
5297 @classmethod
5298 def _register_props(cls):
5299 super()._register_props()
5300 # Indicates whether the system can perform a decision or action without human
5301 # involvement or guidance.
5302 cls._add_property(
5303 "ai_autonomyType",
5304 EnumProp([
5305 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"),
5306 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"),
5307 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"),
5308 ]),
5309 iri="https://spdx.org/rdf/3.0.1/terms/AI/autonomyType",
5310 compact="ai_autonomyType",
5311 )
5312 # Captures the domain in which the AI package can be used.
5313 cls._add_property(
5314 "ai_domain",
5315 ListProp(StringProp()),
5316 iri="https://spdx.org/rdf/3.0.1/terms/AI/domain",
5317 compact="ai_domain",
5318 )
5319 # Indicates the amount of energy consumption incurred by an AI model.
5320 cls._add_property(
5321 "ai_energyConsumption",
5322 ObjectProp(ai_EnergyConsumption, False),
5323 iri="https://spdx.org/rdf/3.0.1/terms/AI/energyConsumption",
5324 compact="ai_energyConsumption",
5325 )
5326 # Records a hyperparameter used to build the AI model contained in the AI
5327 # package.
5328 cls._add_property(
5329 "ai_hyperparameter",
5330 ListProp(ObjectProp(DictionaryEntry, False)),
5331 iri="https://spdx.org/rdf/3.0.1/terms/AI/hyperparameter",
5332 compact="ai_hyperparameter",
5333 )
5334 # Provides relevant information about the AI software, not including the model
5335 # description.
5336 cls._add_property(
5337 "ai_informationAboutApplication",
5338 StringProp(),
5339 iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutApplication",
5340 compact="ai_informationAboutApplication",
5341 )
5342 # Describes relevant information about different steps of the training process.
5343 cls._add_property(
5344 "ai_informationAboutTraining",
5345 StringProp(),
5346 iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutTraining",
5347 compact="ai_informationAboutTraining",
5348 )
5349 # Captures a limitation of the AI software.
5350 cls._add_property(
5351 "ai_limitation",
5352 StringProp(),
5353 iri="https://spdx.org/rdf/3.0.1/terms/AI/limitation",
5354 compact="ai_limitation",
5355 )
5356 # Records the measurement of prediction quality of the AI model.
5357 cls._add_property(
5358 "ai_metric",
5359 ListProp(ObjectProp(DictionaryEntry, False)),
5360 iri="https://spdx.org/rdf/3.0.1/terms/AI/metric",
5361 compact="ai_metric",
5362 )
5363 # Captures the threshold that was used for computation of a metric described in
5364 # the metric field.
5365 cls._add_property(
5366 "ai_metricDecisionThreshold",
5367 ListProp(ObjectProp(DictionaryEntry, False)),
5368 iri="https://spdx.org/rdf/3.0.1/terms/AI/metricDecisionThreshold",
5369 compact="ai_metricDecisionThreshold",
5370 )
5371 # Describes all the preprocessing steps applied to the training data before the
5372 # model training.
5373 cls._add_property(
5374 "ai_modelDataPreprocessing",
5375 ListProp(StringProp()),
5376 iri="https://spdx.org/rdf/3.0.1/terms/AI/modelDataPreprocessing",
5377 compact="ai_modelDataPreprocessing",
5378 )
5379 # Describes methods that can be used to explain the results from the AI model.
5380 cls._add_property(
5381 "ai_modelExplainability",
5382 ListProp(StringProp()),
5383 iri="https://spdx.org/rdf/3.0.1/terms/AI/modelExplainability",
5384 compact="ai_modelExplainability",
5385 )
5386 # Records the results of general safety risk assessment of the AI system.
5387 cls._add_property(
5388 "ai_safetyRiskAssessment",
5389 EnumProp([
5390 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", "high"),
5391 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", "low"),
5392 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", "medium"),
5393 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", "serious"),
5394 ]),
5395 iri="https://spdx.org/rdf/3.0.1/terms/AI/safetyRiskAssessment",
5396 compact="ai_safetyRiskAssessment",
5397 )
5398 # Captures a standard that is being complied with.
5399 cls._add_property(
5400 "ai_standardCompliance",
5401 ListProp(StringProp()),
5402 iri="https://spdx.org/rdf/3.0.1/terms/AI/standardCompliance",
5403 compact="ai_standardCompliance",
5404 )
5405 # Records the type of the model used in the AI software.
5406 cls._add_property(
5407 "ai_typeOfModel",
5408 ListProp(StringProp()),
5409 iri="https://spdx.org/rdf/3.0.1/terms/AI/typeOfModel",
5410 compact="ai_typeOfModel",
5411 )
5412 # Records if sensitive personal information is used during model training or
5413 # could be used during the inference.
5414 cls._add_property(
5415 "ai_useSensitivePersonalInformation",
5416 EnumProp([
5417 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"),
5418 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"),
5419 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"),
5420 ]),
5421 iri="https://spdx.org/rdf/3.0.1/terms/AI/useSensitivePersonalInformation",
5422 compact="ai_useSensitivePersonalInformation",
5423 )
5424
5425
5426# Specifies a data package and its associated information.
5427@register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetPackage", compact_type="dataset_DatasetPackage", abstract=False)
5428class dataset_DatasetPackage(software_Package):
5429 NODE_KIND = NodeKind.IRI
5430 ID_ALIAS = "spdxId"
5431 NAMED_INDIVIDUALS = {
5432 }
5433
5434 @classmethod
5435 def _register_props(cls):
5436 super()._register_props()
5437 # Describes the anonymization methods used.
5438 cls._add_property(
5439 "dataset_anonymizationMethodUsed",
5440 ListProp(StringProp()),
5441 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/anonymizationMethodUsed",
5442 compact="dataset_anonymizationMethodUsed",
5443 )
5444 # Describes the confidentiality level of the data points contained in the dataset.
5445 cls._add_property(
5446 "dataset_confidentialityLevel",
5447 EnumProp([
5448 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", "amber"),
5449 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", "clear"),
5450 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", "green"),
5451 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", "red"),
5452 ]),
5453 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/confidentialityLevel",
5454 compact="dataset_confidentialityLevel",
5455 )
5456 # Describes how the dataset was collected.
5457 cls._add_property(
5458 "dataset_dataCollectionProcess",
5459 StringProp(),
5460 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataCollectionProcess",
5461 compact="dataset_dataCollectionProcess",
5462 )
5463 # Describes the preprocessing steps that were applied to the raw data to create the given dataset.
5464 cls._add_property(
5465 "dataset_dataPreprocessing",
5466 ListProp(StringProp()),
5467 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataPreprocessing",
5468 compact="dataset_dataPreprocessing",
5469 )
5470 # The field describes the availability of a dataset.
5471 cls._add_property(
5472 "dataset_datasetAvailability",
5473 EnumProp([
5474 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", "clickthrough"),
5475 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", "directDownload"),
5476 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", "query"),
5477 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", "registration"),
5478 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", "scrapingScript"),
5479 ]),
5480 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetAvailability",
5481 compact="dataset_datasetAvailability",
5482 )
5483 # Describes potentially noisy elements of the dataset.
5484 cls._add_property(
5485 "dataset_datasetNoise",
5486 StringProp(),
5487 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetNoise",
5488 compact="dataset_datasetNoise",
5489 )
5490 # Captures the size of the dataset.
5491 cls._add_property(
5492 "dataset_datasetSize",
5493 NonNegativeIntegerProp(),
5494 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetSize",
5495 compact="dataset_datasetSize",
5496 )
5497 # Describes the type of the given dataset.
5498 cls._add_property(
5499 "dataset_datasetType",
5500 ListProp(EnumProp([
5501 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", "audio"),
5502 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", "categorical"),
5503 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", "graph"),
5504 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", "image"),
5505 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", "noAssertion"),
5506 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", "numeric"),
5507 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", "other"),
5508 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", "sensor"),
5509 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", "structured"),
5510 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", "syntactic"),
5511 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", "text"),
5512 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", "timeseries"),
5513 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", "timestamp"),
5514 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", "video"),
5515 ])),
5516 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetType",
5517 min_count=1,
5518 compact="dataset_datasetType",
5519 )
5520 # Describes a mechanism to update the dataset.
5521 cls._add_property(
5522 "dataset_datasetUpdateMechanism",
5523 StringProp(),
5524 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetUpdateMechanism",
5525 compact="dataset_datasetUpdateMechanism",
5526 )
5527 # Describes if any sensitive personal information is present in the dataset.
5528 cls._add_property(
5529 "dataset_hasSensitivePersonalInformation",
5530 EnumProp([
5531 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"),
5532 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"),
5533 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"),
5534 ]),
5535 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/hasSensitivePersonalInformation",
5536 compact="dataset_hasSensitivePersonalInformation",
5537 )
5538 # Describes what the given dataset should be used for.
5539 cls._add_property(
5540 "dataset_intendedUse",
5541 StringProp(),
5542 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/intendedUse",
5543 compact="dataset_intendedUse",
5544 )
5545 # Records the biases that the dataset is known to encompass.
5546 cls._add_property(
5547 "dataset_knownBias",
5548 ListProp(StringProp()),
5549 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/knownBias",
5550 compact="dataset_knownBias",
5551 )
5552 # Describes a sensor used for collecting the data.
5553 cls._add_property(
5554 "dataset_sensor",
5555 ListProp(ObjectProp(DictionaryEntry, False)),
5556 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/sensor",
5557 compact="dataset_sensor",
5558 )
5559
5560
5561"""Format Guard"""
5562# fmt: on
5563
5564
5565def main():
5566 import argparse
5567 from pathlib import Path
5568
5569 parser = argparse.ArgumentParser(description="Python SHACL model test")
5570 parser.add_argument("infile", type=Path, help="Input file")
5571 parser.add_argument("--print", action="store_true", help="Print object tree")
5572 parser.add_argument("--outfile", type=Path, help="Output file")
5573
5574 args = parser.parse_args()
5575
5576 objectset = SHACLObjectSet()
5577 with args.infile.open("r") as f:
5578 d = JSONLDDeserializer()
5579 d.read(f, objectset)
5580
5581 if args.print:
5582 print_tree(objectset.objects)
5583
5584 if args.outfile:
5585 with args.outfile.open("wb") as f:
5586 s = JSONLDSerializer()
5587 s.write(objectset, f)
5588
5589 return 0
5590
5591
5592if __name__ == "__main__":
5593 sys.exit(main())
diff --git a/meta/lib/oe/spdx30_tasks.py b/meta/lib/oe/spdx30_tasks.py
new file mode 100644
index 0000000000..5d9f3168d9
--- /dev/null
+++ b/meta/lib/oe/spdx30_tasks.py
@@ -0,0 +1,1368 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import json
8import oe.cve_check
9import oe.packagedata
10import oe.patch
11import oe.sbom30
12import oe.spdx30
13import oe.spdx_common
14import oe.sdk
15import os
16
17from contextlib import contextmanager
18from datetime import datetime, timezone
19from pathlib import Path
20
21
22def walk_error(err):
23 bb.error(f"ERROR walking {err.filename}: {err}")
24
25
26def set_timestamp_now(d, o, prop):
27 if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1":
28 setattr(o, prop, datetime.now(timezone.utc))
29 else:
30 # Doing this helps to validated that the property actually exists, and
31 # also that it is not mandatory
32 delattr(o, prop)
33
34
35def add_license_expression(d, objset, license_expression, license_data):
36 simple_license_text = {}
37 license_text_map = {}
38 license_ref_idx = 0
39
40 def add_license_text(name):
41 nonlocal objset
42 nonlocal simple_license_text
43
44 if name in simple_license_text:
45 return simple_license_text[name]
46
47 lic = objset.find_filter(
48 oe.spdx30.simplelicensing_SimpleLicensingText,
49 name=name,
50 )
51
52 if lic is not None:
53 simple_license_text[name] = lic
54 return lic
55
56 lic = objset.add(
57 oe.spdx30.simplelicensing_SimpleLicensingText(
58 _id=objset.new_spdxid("license-text", name),
59 creationInfo=objset.doc.creationInfo,
60 name=name,
61 )
62 )
63 objset.set_element_alias(lic)
64 simple_license_text[name] = lic
65
66 if name == "PD":
67 lic.simplelicensing_licenseText = "Software released to the public domain"
68 return lic
69
70 # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
71 for directory in [d.getVar("COMMON_LICENSE_DIR")] + (
72 d.getVar("LICENSE_PATH") or ""
73 ).split():
74 try:
75 with (Path(directory) / name).open(errors="replace") as f:
76 lic.simplelicensing_licenseText = f.read()
77 return lic
78
79 except FileNotFoundError:
80 pass
81
82 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
83 filename = d.getVarFlag("NO_GENERIC_LICENSE", name)
84 if filename:
85 filename = d.expand("${S}/" + filename)
86 with open(filename, errors="replace") as f:
87 lic.simplelicensing_licenseText = f.read()
88 return lic
89 else:
90 bb.fatal("Cannot find any text for license %s" % name)
91
92 def convert(l):
93 nonlocal license_text_map
94 nonlocal license_ref_idx
95
96 if l == "(" or l == ")":
97 return l
98
99 if l == "&":
100 return "AND"
101
102 if l == "|":
103 return "OR"
104
105 if l == "CLOSED":
106 return "NONE"
107
108 spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
109 if spdx_license in license_data["licenses"]:
110 return spdx_license
111
112 spdx_license = "LicenseRef-" + l
113 if spdx_license not in license_text_map:
114 license_text_map[spdx_license] = oe.sbom30.get_element_link_id(
115 add_license_text(l)
116 )
117
118 return spdx_license
119
120 lic_split = (
121 license_expression.replace("(", " ( ")
122 .replace(")", " ) ")
123 .replace("|", " | ")
124 .replace("&", " & ")
125 .split()
126 )
127 spdx_license_expression = " ".join(convert(l) for l in lic_split)
128
129 o = objset.new_license_expression(
130 spdx_license_expression, license_data, license_text_map
131 )
132 objset.set_element_alias(o)
133 return o
134
135
136def add_package_files(
137 d,
138 objset,
139 topdir,
140 get_spdxid,
141 get_purposes,
142 license_data=None,
143 *,
144 archive=None,
145 ignore_dirs=[],
146 ignore_top_level_dirs=[],
147):
148 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
149 if source_date_epoch:
150 source_date_epoch = int(source_date_epoch)
151
152 spdx_files = set()
153
154 file_counter = 1
155 if not os.path.exists(topdir):
156 bb.note(f"Skip {topdir}")
157 return spdx_files
158
159 check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1"
160 if check_compiled_sources:
161 compiled_sources, types = oe.spdx_common.get_compiled_sources(d)
162 bb.debug(1, f"Total compiled files: {len(compiled_sources)}")
163
164 for subdir, dirs, files in os.walk(topdir, onerror=walk_error):
165 dirs[:] = [d for d in dirs if d not in ignore_dirs]
166 if subdir == str(topdir):
167 dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
168
169 dirs.sort()
170 files.sort()
171 for file in files:
172 filepath = Path(subdir) / file
173 if filepath.is_symlink() or not filepath.is_file():
174 continue
175
176 filename = str(filepath.relative_to(topdir))
177 file_purposes = get_purposes(filepath)
178
179 # Check if file is compiled
180 if check_compiled_sources:
181 if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types):
182 continue
183
184 spdx_file = objset.new_file(
185 get_spdxid(file_counter),
186 filename,
187 filepath,
188 purposes=file_purposes,
189 )
190 spdx_files.add(spdx_file)
191
192 if (
193 oe.spdx30.software_SoftwarePurpose.source in file_purposes
194 and license_data is not None
195 ):
196 objset.scan_declared_licenses(spdx_file, filepath, license_data)
197
198 if archive is not None:
199 with filepath.open("rb") as f:
200 info = archive.gettarinfo(fileobj=f)
201 info.name = filename
202 info.uid = 0
203 info.gid = 0
204 info.uname = "root"
205 info.gname = "root"
206
207 if source_date_epoch is not None and info.mtime > source_date_epoch:
208 info.mtime = source_date_epoch
209
210 archive.addfile(info, f)
211
212 file_counter += 1
213
214 bb.debug(1, "Added %d files to %s" % (len(spdx_files), objset.doc._id))
215
216 return spdx_files
217
218
219def get_package_sources_from_debug(
220 d, package, package_files, sources, source_hash_cache
221):
222 def file_path_match(file_path, pkg_file):
223 if file_path.lstrip("/") == pkg_file.name.lstrip("/"):
224 return True
225
226 for e in pkg_file.extension:
227 if isinstance(e, oe.sbom30.OEFileNameAliasExtension):
228 for a in e.aliases:
229 if file_path.lstrip("/") == a.lstrip("/"):
230 return True
231
232 return False
233
234 debug_search_paths = [
235 Path(d.getVar("SPDXWORK")),
236 Path(d.getVar("PKGD")),
237 Path(d.getVar("STAGING_DIR_TARGET")),
238 Path(d.getVar("STAGING_DIR_NATIVE")),
239 Path(d.getVar("STAGING_KERNEL_DIR")),
240 ]
241
242 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
243
244 if pkg_data is None:
245 return
246
247 dep_source_files = set()
248
249 for file_path, file_data in pkg_data["files_info"].items():
250 if not "debugsrc" in file_data:
251 continue
252
253 if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files):
254 bb.fatal(
255 "No package file found for %s in %s; SPDX found: %s"
256 % (str(file_path), package, " ".join(p.name for p in package_files))
257 )
258 continue
259
260 for debugsrc in file_data["debugsrc"]:
261 for search in debug_search_paths:
262 if debugsrc.startswith("/usr/src/kernel"):
263 debugsrc_path = search / debugsrc.replace("/usr/src/kernel/", "")
264 else:
265 debugsrc_path = search / debugsrc.lstrip("/")
266
267 if debugsrc_path in source_hash_cache:
268 file_sha256 = source_hash_cache[debugsrc_path]
269 if file_sha256 is None:
270 continue
271 else:
272 # We can only hash files below, skip directories, links, etc.
273 if not debugsrc_path.is_file():
274 source_hash_cache[debugsrc_path] = None
275 continue
276
277 file_sha256 = bb.utils.sha256_file(debugsrc_path)
278 source_hash_cache[debugsrc_path] = file_sha256
279
280 if file_sha256 in sources:
281 source_file = sources[file_sha256]
282 dep_source_files.add(source_file)
283 else:
284 bb.debug(
285 1,
286 "Debug source %s with SHA256 %s not found in any dependency"
287 % (str(debugsrc_path), file_sha256),
288 )
289 break
290 else:
291 bb.debug(1, "Debug source %s not found" % debugsrc)
292
293 return dep_source_files
294
295
296def collect_dep_objsets(d, build):
297 deps = oe.spdx_common.get_spdx_deps(d)
298
299 dep_objsets = []
300 dep_builds = set()
301
302 dep_build_spdxids = set()
303 for dep in deps:
304 bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn))
305 dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(
306 d, "recipes", "recipe-" + dep.pn, oe.spdx30.build_Build
307 )
308 # If the dependency is part of the taskhash, return it to be linked
309 # against. Otherwise, it cannot be linked against because this recipe
310 # will not rebuilt if dependency changes
311 if dep.in_taskhash:
312 dep_objsets.append(dep_objset)
313
314 # The build _can_ be linked against (by alias)
315 dep_builds.add(dep_build)
316
317 return dep_objsets, dep_builds
318
319
320def index_sources_by_hash(sources, dest):
321 for s in sources:
322 if not isinstance(s, oe.spdx30.software_File):
323 continue
324
325 if s.software_primaryPurpose != oe.spdx30.software_SoftwarePurpose.source:
326 continue
327
328 for v in s.verifiedUsing:
329 if v.algorithm == oe.spdx30.HashAlgorithm.sha256:
330 if not v.hashValue in dest:
331 dest[v.hashValue] = s
332 break
333 else:
334 bb.fatal(f"No SHA256 found for {s.name}")
335
336
337def collect_dep_sources(dep_objsets, dest):
338 for objset in dep_objsets:
339 # Don't collect sources from native recipes as they
340 # match non-native sources also.
341 if objset.is_native():
342 continue
343
344 bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name))
345
346 dep_build = objset.find_root(oe.spdx30.build_Build)
347 if not dep_build:
348 bb.fatal("Unable to find a build")
349
350 for e in objset.foreach_type(oe.spdx30.Relationship):
351 if dep_build is not e.from_:
352 continue
353
354 if e.relationshipType != oe.spdx30.RelationshipType.hasInput:
355 continue
356
357 index_sources_by_hash(e.to, dest)
358
359
360def add_download_files(d, objset):
361 inputs = set()
362
363 urls = d.getVar("SRC_URI").split()
364 fetch = bb.fetch2.Fetch(urls, d)
365
366 for download_idx, src_uri in enumerate(urls):
367 fd = fetch.ud[src_uri]
368
369 file_name = os.path.basename(fetch.localpath(src_uri))
370 if oe.patch.patch_path(src_uri, fetch, "", expand=False):
371 primary_purpose = oe.spdx30.software_SoftwarePurpose.patch
372 else:
373 primary_purpose = oe.spdx30.software_SoftwarePurpose.source
374
375 if fd.type == "file":
376 if os.path.isdir(fd.localpath):
377 walk_idx = 1
378 for root, dirs, files in os.walk(fd.localpath, onerror=walk_error):
379 dirs.sort()
380 files.sort()
381 for f in files:
382 f_path = os.path.join(root, f)
383 if os.path.islink(f_path):
384 # TODO: SPDX doesn't support symlinks yet
385 continue
386
387 file = objset.new_file(
388 objset.new_spdxid(
389 "source", str(download_idx + 1), str(walk_idx)
390 ),
391 os.path.join(
392 file_name, os.path.relpath(f_path, fd.localpath)
393 ),
394 f_path,
395 purposes=[primary_purpose],
396 )
397
398 inputs.add(file)
399 walk_idx += 1
400
401 else:
402 file = objset.new_file(
403 objset.new_spdxid("source", str(download_idx + 1)),
404 file_name,
405 fd.localpath,
406 purposes=[primary_purpose],
407 )
408 inputs.add(file)
409
410 else:
411 dl = objset.add(
412 oe.spdx30.software_Package(
413 _id=objset.new_spdxid("source", str(download_idx + 1)),
414 creationInfo=objset.doc.creationInfo,
415 name=file_name,
416 software_primaryPurpose=primary_purpose,
417 software_downloadLocation=oe.spdx_common.fetch_data_to_uri(
418 fd, fd.name
419 ),
420 )
421 )
422
423 if fd.method.supports_checksum(fd):
424 # TODO Need something better than hard coding this
425 for checksum_id in ["sha256", "sha1"]:
426 expected_checksum = getattr(
427 fd, "%s_expected" % checksum_id, None
428 )
429 if expected_checksum is None:
430 continue
431
432 dl.verifiedUsing.append(
433 oe.spdx30.Hash(
434 algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id),
435 hashValue=expected_checksum,
436 )
437 )
438
439 inputs.add(dl)
440
441 return inputs
442
443
444def set_purposes(d, element, *var_names, force_purposes=[]):
445 purposes = force_purposes[:]
446
447 for var_name in var_names:
448 val = d.getVar(var_name)
449 if val:
450 purposes.extend(val.split())
451 break
452
453 if not purposes:
454 bb.warn("No SPDX purposes found in %s" % " ".join(var_names))
455 return
456
457 element.software_primaryPurpose = getattr(
458 oe.spdx30.software_SoftwarePurpose, purposes[0]
459 )
460 element.software_additionalPurpose = [
461 getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:]
462 ]
463
464
465def create_spdx(d):
466 def set_var_field(var, obj, name, package=None):
467 val = None
468 if package:
469 val = d.getVar("%s:%s" % (var, package))
470
471 if not val:
472 val = d.getVar(var)
473
474 if val:
475 setattr(obj, name, val)
476
477 license_data = oe.spdx_common.load_spdx_license_data(d)
478
479 deploydir = Path(d.getVar("SPDXDEPLOY"))
480 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
481 spdx_workdir = Path(d.getVar("SPDXWORK"))
482 include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
483 pkg_arch = d.getVar("SSTATE_PKGARCH")
484 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
485 "cross", d
486 )
487 include_vex = d.getVar("SPDX_INCLUDE_VEX")
488 if not include_vex in ("none", "current", "all"):
489 bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'")
490
491 build_objset = oe.sbom30.ObjectSet.new_objset(d, "recipe-" + d.getVar("PN"))
492
493 build = build_objset.new_task_build("recipe", "recipe")
494 build_objset.set_element_alias(build)
495
496 build_objset.doc.rootElement.append(build)
497
498 build_objset.set_is_native(is_native)
499
500 for var in (d.getVar("SPDX_CUSTOM_ANNOTATION_VARS") or "").split():
501 new_annotation(
502 d,
503 build_objset,
504 build,
505 "%s=%s" % (var, d.getVar(var)),
506 oe.spdx30.AnnotationType.other,
507 )
508
509 build_inputs = set()
510
511 # Add CVEs
512 cve_by_status = {}
513 if include_vex != "none":
514 patched_cves = oe.cve_check.get_patched_cves(d)
515 for cve, patched_cve in patched_cves.items():
516 decoded_status = {
517 "mapping": patched_cve["abbrev-status"],
518 "detail": patched_cve["status"],
519 "description": patched_cve.get("justification", None)
520 }
521
522 # If this CVE is fixed upstream, skip it unless all CVEs are
523 # specified.
524 if (
525 include_vex != "all"
526 and "detail" in decoded_status
527 and decoded_status["detail"]
528 in (
529 "fixed-version",
530 "cpe-stable-backport",
531 )
532 ):
533 bb.debug(1, "Skipping %s since it is already fixed upstream" % cve)
534 continue
535
536 spdx_cve = build_objset.new_cve_vuln(cve)
537 build_objset.set_element_alias(spdx_cve)
538
539 cve_by_status.setdefault(decoded_status["mapping"], {})[cve] = (
540 spdx_cve,
541 decoded_status["detail"],
542 decoded_status["description"],
543 )
544
545 cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
546
547 source_files = add_download_files(d, build_objset)
548 build_inputs |= source_files
549
550 recipe_spdx_license = add_license_expression(
551 d, build_objset, d.getVar("LICENSE"), license_data
552 )
553 build_objset.new_relationship(
554 source_files,
555 oe.spdx30.RelationshipType.hasDeclaredLicense,
556 [oe.sbom30.get_element_link_id(recipe_spdx_license)],
557 )
558
559 dep_sources = {}
560 if oe.spdx_common.process_sources(d) and include_sources:
561 bb.debug(1, "Adding source files to SPDX")
562 oe.spdx_common.get_patched_src(d)
563
564 files = add_package_files(
565 d,
566 build_objset,
567 spdx_workdir,
568 lambda file_counter: build_objset.new_spdxid(
569 "sourcefile", str(file_counter)
570 ),
571 lambda filepath: [oe.spdx30.software_SoftwarePurpose.source],
572 license_data,
573 ignore_dirs=[".git"],
574 ignore_top_level_dirs=["temp"],
575 archive=None,
576 )
577 build_inputs |= files
578 index_sources_by_hash(files, dep_sources)
579
580 dep_objsets, dep_builds = collect_dep_objsets(d, build)
581 if dep_builds:
582 build_objset.new_scoped_relationship(
583 [build],
584 oe.spdx30.RelationshipType.dependsOn,
585 oe.spdx30.LifecycleScopeType.build,
586 sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds),
587 )
588
589 debug_source_ids = set()
590 source_hash_cache = {}
591
592 # Write out the package SPDX data now. It is not complete as we cannot
593 # write the runtime data, so write it to a staging area and a later task
594 # will write out the final collection
595
596 # TODO: Handle native recipe output
597 if not is_native:
598 bb.debug(1, "Collecting Dependency sources files")
599 collect_dep_sources(dep_objsets, dep_sources)
600
601 bb.build.exec_func("read_subpackage_metadata", d)
602
603 pkgdest = Path(d.getVar("PKGDEST"))
604 for package in d.getVar("PACKAGES").split():
605 if not oe.packagedata.packaged(package, d):
606 continue
607
608 pkg_name = d.getVar("PKG:%s" % package) or package
609
610 bb.debug(1, "Creating SPDX for package %s" % pkg_name)
611
612 pkg_objset = oe.sbom30.ObjectSet.new_objset(d, "package-" + pkg_name)
613
614 spdx_package = pkg_objset.add_root(
615 oe.spdx30.software_Package(
616 _id=pkg_objset.new_spdxid("package", pkg_name),
617 creationInfo=pkg_objset.doc.creationInfo,
618 name=pkg_name,
619 software_packageVersion=d.getVar("SPDX_PACKAGE_VERSION"),
620 )
621 )
622 set_timestamp_now(d, spdx_package, "builtTime")
623
624 set_purposes(
625 d,
626 spdx_package,
627 "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package,
628 "SPDX_PACKAGE_ADDITIONAL_PURPOSE",
629 force_purposes=["install"],
630 )
631
632 supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER")
633 if supplier is not None:
634 spdx_package.suppliedBy = (
635 supplier if isinstance(supplier, str) else supplier._id
636 )
637
638 set_var_field(
639 "HOMEPAGE", spdx_package, "software_homePage", package=package
640 )
641 set_var_field("SUMMARY", spdx_package, "summary", package=package)
642 set_var_field("DESCRIPTION", spdx_package, "description", package=package)
643
644 if d.getVar("SPDX_PACKAGE_URL:%s" % package) or d.getVar("SPDX_PACKAGE_URL"):
645 set_var_field(
646 "SPDX_PACKAGE_URL",
647 spdx_package,
648 "software_packageUrl",
649 package=package
650 )
651
652 pkg_objset.new_scoped_relationship(
653 [oe.sbom30.get_element_link_id(build)],
654 oe.spdx30.RelationshipType.hasOutput,
655 oe.spdx30.LifecycleScopeType.build,
656 [spdx_package],
657 )
658
659 for cpe_id in cpe_ids:
660 spdx_package.externalIdentifier.append(
661 oe.spdx30.ExternalIdentifier(
662 externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23,
663 identifier=cpe_id,
664 )
665 )
666
667 # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file
668 # generated and link it to the package
669 # spdx_package_file = pkg_objset.add(oe.spdx30.software_File(
670 # _id=pkg_objset.new_spdxid("distribution", pkg_name),
671 # creationInfo=pkg_objset.doc.creationInfo,
672 # name=pkg_name,
673 # software_primaryPurpose=spdx_package.software_primaryPurpose,
674 # software_additionalPurpose=spdx_package.software_additionalPurpose,
675 # ))
676 # set_timestamp_now(d, spdx_package_file, "builtTime")
677
678 ## TODO add hashes
679 # pkg_objset.new_relationship(
680 # [spdx_package],
681 # oe.spdx30.RelationshipType.hasDistributionArtifact,
682 # [spdx_package_file],
683 # )
684
685 # NOTE: licenses live in the recipe collection and are referenced
686 # by ID in the package collection(s). This helps reduce duplication
687 # (since a lot of packages will have the same license), and also
688 # prevents duplicate license SPDX IDs in the packages
689 package_license = d.getVar("LICENSE:%s" % package)
690 if package_license and package_license != d.getVar("LICENSE"):
691 package_spdx_license = add_license_expression(
692 d, build_objset, package_license, license_data
693 )
694 else:
695 package_spdx_license = recipe_spdx_license
696
697 pkg_objset.new_relationship(
698 [spdx_package],
699 oe.spdx30.RelationshipType.hasConcludedLicense,
700 [oe.sbom30.get_element_link_id(package_spdx_license)],
701 )
702
703 # NOTE: CVE Elements live in the recipe collection
704 all_cves = set()
705 for status, cves in cve_by_status.items():
706 for cve, items in cves.items():
707 spdx_cve, detail, description = items
708 spdx_cve_id = oe.sbom30.get_element_link_id(spdx_cve)
709
710 all_cves.add(spdx_cve_id)
711
712 if status == "Patched":
713 pkg_objset.new_vex_patched_relationship(
714 [spdx_cve_id], [spdx_package]
715 )
716 elif status == "Unpatched":
717 pkg_objset.new_vex_unpatched_relationship(
718 [spdx_cve_id], [spdx_package]
719 )
720 elif status == "Ignored":
721 spdx_vex = pkg_objset.new_vex_ignored_relationship(
722 [spdx_cve_id],
723 [spdx_package],
724 impact_statement=description,
725 )
726
727 if detail in (
728 "ignored",
729 "cpe-incorrect",
730 "disputed",
731 "upstream-wontfix",
732 ):
733 # VEX doesn't have justifications for this
734 pass
735 elif detail in (
736 "not-applicable-config",
737 "not-applicable-platform",
738 ):
739 for v in spdx_vex:
740 v.security_justificationType = (
741 oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent
742 )
743 else:
744 bb.fatal(f"Unknown detail '{detail}' for ignored {cve}")
745 elif status == "Unknown":
746 bb.note(f"Skipping {cve} with status 'Unknown'")
747 else:
748 bb.fatal(f"Unknown {cve} status '{status}'")
749
750 if all_cves:
751 pkg_objset.new_relationship(
752 [spdx_package],
753 oe.spdx30.RelationshipType.hasAssociatedVulnerability,
754 sorted(list(all_cves)),
755 )
756
757 bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name)
758 package_files = add_package_files(
759 d,
760 pkg_objset,
761 pkgdest / package,
762 lambda file_counter: pkg_objset.new_spdxid(
763 "package", pkg_name, "file", str(file_counter)
764 ),
765 # TODO: Can we know the purpose here?
766 lambda filepath: [],
767 license_data,
768 ignore_top_level_dirs=["CONTROL", "DEBIAN"],
769 archive=None,
770 )
771
772 if package_files:
773 pkg_objset.new_relationship(
774 [spdx_package],
775 oe.spdx30.RelationshipType.contains,
776 sorted(list(package_files)),
777 )
778
779 if include_sources:
780 debug_sources = get_package_sources_from_debug(
781 d, package, package_files, dep_sources, source_hash_cache
782 )
783 debug_source_ids |= set(
784 oe.sbom30.get_element_link_id(d) for d in debug_sources
785 )
786
787 oe.sbom30.write_recipe_jsonld_doc(
788 d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False
789 )
790
791 if include_sources:
792 bb.debug(1, "Adding sysroot files to SPDX")
793 sysroot_files = add_package_files(
794 d,
795 build_objset,
796 d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"),
797 lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)),
798 lambda filepath: [],
799 license_data,
800 archive=None,
801 )
802
803 if sysroot_files:
804 build_objset.new_scoped_relationship(
805 [build],
806 oe.spdx30.RelationshipType.hasOutput,
807 oe.spdx30.LifecycleScopeType.build,
808 sorted(list(sysroot_files)),
809 )
810
811 if build_inputs or debug_source_ids:
812 build_objset.new_scoped_relationship(
813 [build],
814 oe.spdx30.RelationshipType.hasInput,
815 oe.spdx30.LifecycleScopeType.build,
816 sorted(list(build_inputs)) + sorted(list(debug_source_ids)),
817 )
818
819 oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir)
820
821
822def create_package_spdx(d):
823 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
824 deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
825 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
826 "cross", d
827 )
828
829 providers = oe.spdx_common.collect_package_providers(d)
830 pkg_arch = d.getVar("SSTATE_PKGARCH")
831
832 if is_native:
833 return
834
835 bb.build.exec_func("read_subpackage_metadata", d)
836
837 dep_package_cache = {}
838
839 # Any element common to all packages that need to be referenced by ID
840 # should be written into this objset set
841 common_objset = oe.sbom30.ObjectSet.new_objset(
842 d, "%s-package-common" % d.getVar("PN")
843 )
844
845 pkgdest = Path(d.getVar("PKGDEST"))
846 for package in d.getVar("PACKAGES").split():
847 localdata = bb.data.createCopy(d)
848 pkg_name = d.getVar("PKG:%s" % package) or package
849 localdata.setVar("PKG", pkg_name)
850 localdata.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + ":" + package)
851
852 if not oe.packagedata.packaged(package, localdata):
853 continue
854
855 spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld(
856 d,
857 pkg_arch,
858 "packages-staging",
859 "package-" + pkg_name,
860 oe.spdx30.software_Package,
861 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
862 )
863
864 # We will write out a new collection, so link it to the new
865 # creation info in the common package data. The old creation info
866 # should still exist and be referenced by all the existing elements
867 # in the package
868 pkg_objset.creationInfo = pkg_objset.copy_creation_info(
869 common_objset.doc.creationInfo
870 )
871
872 runtime_spdx_deps = set()
873
874 deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
875 seen_deps = set()
876 for dep, _ in deps.items():
877 if dep in seen_deps:
878 continue
879
880 if dep not in providers:
881 continue
882
883 (dep, _) = providers[dep]
884
885 if not oe.packagedata.packaged(dep, localdata):
886 continue
887
888 dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
889 dep_pkg = dep_pkg_data["PKG"]
890
891 if dep in dep_package_cache:
892 dep_spdx_package = dep_package_cache[dep]
893 else:
894 bb.debug(1, "Searching for %s" % dep_pkg)
895 dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld(
896 d,
897 "packages-staging",
898 "package-" + dep_pkg,
899 oe.spdx30.software_Package,
900 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
901 )
902 dep_package_cache[dep] = dep_spdx_package
903
904 runtime_spdx_deps.add(dep_spdx_package)
905 seen_deps.add(dep)
906
907 if runtime_spdx_deps:
908 pkg_objset.new_scoped_relationship(
909 [spdx_package],
910 oe.spdx30.RelationshipType.dependsOn,
911 oe.spdx30.LifecycleScopeType.runtime,
912 [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps],
913 )
914
915 oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir)
916
917 oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir)
918
919
920def write_bitbake_spdx(d):
921 # Set PN to "bitbake" so that SPDX IDs can be generated
922 d.setVar("PN", "bitbake")
923 d.setVar("BB_TASKHASH", "bitbake")
924 oe.spdx_common.load_spdx_license_data(d)
925
926 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
927
928 objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False)
929
930 host_import_key = d.getVar("SPDX_BUILD_HOST")
931 invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False)
932 on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False)
933
934 if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
935 # Since the Build objects are unique, we may as well set the creation
936 # time to the current time instead of the fallback SDE
937 objset.doc.creationInfo.created = datetime.now(timezone.utc)
938
939 # Each invocation of bitbake should have a unique ID since it is a
940 # unique build
941 nonce = os.urandom(16).hex()
942
943 build = objset.add_root(
944 oe.spdx30.build_Build(
945 _id=objset.new_spdxid(nonce, include_unihash=False),
946 creationInfo=objset.doc.creationInfo,
947 build_buildType=oe.sbom30.SPDX_BUILD_TYPE,
948 )
949 )
950 set_timestamp_now(d, build, "build_buildStartTime")
951
952 if host_import_key:
953 objset.new_scoped_relationship(
954 [build],
955 oe.spdx30.RelationshipType.hasHost,
956 oe.spdx30.LifecycleScopeType.build,
957 [objset.new_import(host_import_key)],
958 )
959
960 if invoked_by:
961 objset.add(invoked_by)
962 invoked_by_spdx = objset.new_scoped_relationship(
963 [build],
964 oe.spdx30.RelationshipType.invokedBy,
965 oe.spdx30.LifecycleScopeType.build,
966 [invoked_by],
967 )
968
969 if on_behalf_of:
970 objset.add(on_behalf_of)
971 objset.new_scoped_relationship(
972 [on_behalf_of],
973 oe.spdx30.RelationshipType.delegatedTo,
974 oe.spdx30.LifecycleScopeType.build,
975 invoked_by_spdx,
976 )
977
978 elif on_behalf_of:
979 bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set")
980
981 else:
982 if host_import_key:
983 bb.warn(
984 "SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
985 )
986
987 if invoked_by:
988 bb.warn(
989 "SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
990 )
991
992 if on_behalf_of:
993 bb.warn(
994 "SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
995 )
996
997 for obj in objset.foreach_type(oe.spdx30.Element):
998 obj.extension.append(oe.sbom30.OEIdAliasExtension())
999
1000 oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json")
1001
1002
1003def collect_build_package_inputs(d, objset, build, packages, files_by_hash=None):
1004 import oe.sbom30
1005
1006 providers = oe.spdx_common.collect_package_providers(d)
1007
1008 build_deps = set()
1009 missing_providers = set()
1010
1011 for name in sorted(packages.keys()):
1012 if name not in providers:
1013 missing_providers.add(name)
1014 continue
1015
1016 pkg_name, pkg_hashfn = providers[name]
1017
1018 # Copy all of the package SPDX files into the Sbom elements
1019 pkg_spdx, pkg_objset = oe.sbom30.find_root_obj_in_jsonld(
1020 d,
1021 "packages",
1022 "package-" + pkg_name,
1023 oe.spdx30.software_Package,
1024 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
1025 )
1026 build_deps.add(oe.sbom30.get_element_link_id(pkg_spdx))
1027
1028 if files_by_hash is not None:
1029 for h, f in pkg_objset.by_sha256_hash.items():
1030 files_by_hash.setdefault(h, set()).update(f)
1031
1032 if missing_providers:
1033 bb.fatal(
1034 f"Unable to find SPDX provider(s) for: {', '.join(sorted(missing_providers))}"
1035 )
1036
1037 if build_deps:
1038 objset.new_scoped_relationship(
1039 [build],
1040 oe.spdx30.RelationshipType.hasInput,
1041 oe.spdx30.LifecycleScopeType.build,
1042 sorted(list(build_deps)),
1043 )
1044
1045
1046def create_rootfs_spdx(d):
1047 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
1048 deploydir = Path(d.getVar("SPDXROOTFSDEPLOY"))
1049 root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES"))
1050 image_basename = d.getVar("IMAGE_BASENAME")
1051 image_rootfs = d.getVar("IMAGE_ROOTFS")
1052 machine = d.getVar("MACHINE")
1053
1054 with root_packages_file.open("r") as f:
1055 packages = json.load(f)
1056
1057 objset = oe.sbom30.ObjectSet.new_objset(
1058 d, "%s-%s-rootfs" % (image_basename, machine)
1059 )
1060
1061 rootfs = objset.add_root(
1062 oe.spdx30.software_Package(
1063 _id=objset.new_spdxid("rootfs", image_basename),
1064 creationInfo=objset.doc.creationInfo,
1065 name=image_basename,
1066 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
1067 )
1068 )
1069 set_timestamp_now(d, rootfs, "builtTime")
1070
1071 rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs"))
1072 set_timestamp_now(d, rootfs_build, "build_buildEndTime")
1073
1074 objset.new_scoped_relationship(
1075 [rootfs_build],
1076 oe.spdx30.RelationshipType.hasOutput,
1077 oe.spdx30.LifecycleScopeType.build,
1078 [rootfs],
1079 )
1080
1081 files_by_hash = {}
1082 collect_build_package_inputs(d, objset, rootfs_build, packages, files_by_hash)
1083
1084 files = set()
1085 for dirpath, dirnames, filenames in os.walk(image_rootfs, onerror=walk_error):
1086 dirnames.sort()
1087 filenames.sort()
1088 for fn in filenames:
1089 fpath = Path(dirpath) / fn
1090 if fpath.is_symlink() or not fpath.is_file():
1091 continue
1092
1093 relpath = str(fpath.relative_to(image_rootfs))
1094 h = bb.utils.sha256_file(fpath)
1095
1096 found = False
1097 if h in files_by_hash:
1098 for f in files_by_hash[h]:
1099 if isinstance(f, oe.spdx30.software_File) and f.name == relpath:
1100 files.add(oe.sbom30.get_element_link_id(f))
1101 found = True
1102 break
1103
1104 if not found:
1105 files.add(
1106 objset.new_file(
1107 objset.new_spdxid("rootfs-file", relpath),
1108 relpath,
1109 fpath,
1110 )
1111 )
1112
1113 if files:
1114 objset.new_relationship(
1115 [rootfs],
1116 oe.spdx30.RelationshipType.contains,
1117 sorted(list(files)),
1118 )
1119
1120 oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir)
1121
1122
1123def create_image_spdx(d):
1124 import oe.sbom30
1125
1126 image_deploy_dir = Path(d.getVar("IMGDEPLOYDIR"))
1127 manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST"))
1128 spdx_work_dir = Path(d.getVar("SPDXIMAGEWORK"))
1129
1130 image_basename = d.getVar("IMAGE_BASENAME")
1131 machine = d.getVar("MACHINE")
1132
1133 objset = oe.sbom30.ObjectSet.new_objset(
1134 d, "%s-%s-image" % (image_basename, machine)
1135 )
1136
1137 with manifest_path.open("r") as f:
1138 manifest = json.load(f)
1139
1140 builds = []
1141 for task in manifest:
1142 imagetype = task["imagetype"]
1143 taskname = task["taskname"]
1144
1145 image_build = objset.add_root(
1146 objset.new_task_build(taskname, "image/%s" % imagetype)
1147 )
1148 set_timestamp_now(d, image_build, "build_buildEndTime")
1149 builds.append(image_build)
1150
1151 artifacts = []
1152
1153 for image in task["images"]:
1154 image_filename = image["filename"]
1155 image_path = image_deploy_dir / image_filename
1156 if os.path.isdir(image_path):
1157 a = add_package_files(
1158 d,
1159 objset,
1160 image_path,
1161 lambda file_counter: objset.new_spdxid(
1162 "imagefile", str(file_counter)
1163 ),
1164 lambda filepath: [],
1165 license_data=None,
1166 ignore_dirs=[],
1167 ignore_top_level_dirs=[],
1168 archive=None,
1169 )
1170 artifacts.extend(a)
1171 else:
1172 a = objset.add_root(
1173 oe.spdx30.software_File(
1174 _id=objset.new_spdxid("image", image_filename),
1175 creationInfo=objset.doc.creationInfo,
1176 name=image_filename,
1177 verifiedUsing=[
1178 oe.spdx30.Hash(
1179 algorithm=oe.spdx30.HashAlgorithm.sha256,
1180 hashValue=bb.utils.sha256_file(image_path),
1181 )
1182 ],
1183 )
1184 )
1185
1186 artifacts.append(a)
1187
1188 for a in artifacts:
1189 set_purposes(
1190 d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE"
1191 )
1192
1193 set_timestamp_now(d, a, "builtTime")
1194
1195
1196 if artifacts:
1197 objset.new_scoped_relationship(
1198 [image_build],
1199 oe.spdx30.RelationshipType.hasOutput,
1200 oe.spdx30.LifecycleScopeType.build,
1201 artifacts,
1202 )
1203
1204 if builds:
1205 rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
1206 d,
1207 "rootfs",
1208 "%s-%s-rootfs" % (image_basename, machine),
1209 oe.spdx30.software_Package,
1210 # TODO: Should use a purpose to filter here?
1211 )
1212 objset.new_scoped_relationship(
1213 builds,
1214 oe.spdx30.RelationshipType.hasInput,
1215 oe.spdx30.LifecycleScopeType.build,
1216 [oe.sbom30.get_element_link_id(rootfs_image)],
1217 )
1218
1219 objset.add_aliases()
1220 objset.link()
1221 oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir)
1222
1223
1224def create_image_sbom_spdx(d):
1225 import oe.sbom30
1226
1227 image_name = d.getVar("IMAGE_NAME")
1228 image_basename = d.getVar("IMAGE_BASENAME")
1229 image_link_name = d.getVar("IMAGE_LINK_NAME")
1230 imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR"))
1231 machine = d.getVar("MACHINE")
1232
1233 spdx_path = imgdeploydir / (image_name + ".spdx.json")
1234
1235 root_elements = []
1236
1237 # TODO: Do we need to add the rootfs or are the image files sufficient?
1238 rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
1239 d,
1240 "rootfs",
1241 "%s-%s-rootfs" % (image_basename, machine),
1242 oe.spdx30.software_Package,
1243 # TODO: Should use a purpose here?
1244 )
1245 root_elements.append(oe.sbom30.get_element_link_id(rootfs_image))
1246
1247 image_objset, _ = oe.sbom30.find_jsonld(
1248 d, "image", "%s-%s-image" % (image_basename, machine), required=True
1249 )
1250 for o in image_objset.foreach_root(oe.spdx30.software_File):
1251 root_elements.append(oe.sbom30.get_element_link_id(o))
1252
1253 objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements)
1254
1255 oe.sbom30.write_jsonld_doc(d, objset, spdx_path)
1256
1257 def make_image_link(target_path, suffix):
1258 if image_link_name:
1259 link = imgdeploydir / (image_link_name + suffix)
1260 if link != target_path:
1261 link.symlink_to(os.path.relpath(target_path, link.parent))
1262
1263 make_image_link(spdx_path, ".spdx.json")
1264
1265
1266def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname):
1267 sdk_name = toolchain_outputname + "-" + sdk_type
1268 sdk_packages = oe.sdk.sdk_list_installed_packages(d, sdk_type == "target")
1269
1270 objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name)
1271
1272 sdk_rootfs = objset.add_root(
1273 oe.spdx30.software_Package(
1274 _id=objset.new_spdxid("sdk-rootfs", sdk_name),
1275 creationInfo=objset.doc.creationInfo,
1276 name=sdk_name,
1277 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
1278 )
1279 )
1280 set_timestamp_now(d, sdk_rootfs, "builtTime")
1281
1282 sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs"))
1283 set_timestamp_now(d, sdk_build, "build_buildEndTime")
1284
1285 objset.new_scoped_relationship(
1286 [sdk_build],
1287 oe.spdx30.RelationshipType.hasOutput,
1288 oe.spdx30.LifecycleScopeType.build,
1289 [sdk_rootfs],
1290 )
1291
1292 collect_build_package_inputs(d, objset, sdk_build, sdk_packages)
1293
1294 objset.add_aliases()
1295 oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json")
1296
1297
1298def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname):
1299 # Load the document written earlier
1300 rootfs_objset = oe.sbom30.load_jsonld(
1301 d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True
1302 )
1303
1304 # Create a new build for the SDK installer
1305 sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate")
1306 set_timestamp_now(d, sdk_build, "build_buildEndTime")
1307
1308 rootfs = rootfs_objset.find_root(oe.spdx30.software_Package)
1309 if rootfs is None:
1310 bb.fatal("Unable to find rootfs artifact")
1311
1312 rootfs_objset.new_scoped_relationship(
1313 [sdk_build],
1314 oe.spdx30.RelationshipType.hasInput,
1315 oe.spdx30.LifecycleScopeType.build,
1316 [rootfs],
1317 )
1318
1319 files = set()
1320 root_files = []
1321
1322 # NOTE: os.walk() doesn't return symlinks
1323 for dirpath, dirnames, filenames in os.walk(sdk_deploydir, onerror=walk_error):
1324 dirnames.sort()
1325 filenames.sort()
1326 for fn in filenames:
1327 fpath = Path(dirpath) / fn
1328 if not fpath.is_file() or fpath.is_symlink():
1329 continue
1330
1331 relpath = str(fpath.relative_to(sdk_deploydir))
1332
1333 f = rootfs_objset.new_file(
1334 rootfs_objset.new_spdxid("sdk-installer", relpath),
1335 relpath,
1336 fpath,
1337 )
1338 set_timestamp_now(d, f, "builtTime")
1339
1340 if fn.endswith(".manifest"):
1341 f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest
1342 elif fn.endswith(".testdata.json"):
1343 f.software_primaryPurpose = (
1344 oe.spdx30.software_SoftwarePurpose.configuration
1345 )
1346 else:
1347 set_purposes(d, f, "SPDX_SDK_PURPOSE")
1348 root_files.append(f)
1349
1350 files.add(f)
1351
1352 if files:
1353 rootfs_objset.new_scoped_relationship(
1354 [sdk_build],
1355 oe.spdx30.RelationshipType.hasOutput,
1356 oe.spdx30.LifecycleScopeType.build,
1357 files,
1358 )
1359 else:
1360 bb.warn(f"No SDK output files found in {sdk_deploydir}")
1361
1362 objset, sbom = oe.sbom30.create_sbom(
1363 d, toolchain_outputname, sorted(list(files)), [rootfs_objset]
1364 )
1365
1366 oe.sbom30.write_jsonld_doc(
1367 d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json")
1368 )
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py
new file mode 100644
index 0000000000..c2dec65563
--- /dev/null
+++ b/meta/lib/oe/spdx_common.py
@@ -0,0 +1,285 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb
8import collections
9import json
10import oe.packagedata
11import re
12import shutil
13
14from pathlib import Path
15from dataclasses import dataclass
16
17LIC_REGEX = re.compile(
18 rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$",
19 re.MULTILINE,
20)
21
22
23def extract_licenses(filename):
24 """
25 Extract SPDX License identifiers from a file
26 """
27 try:
28 with open(filename, "rb") as f:
29 size = min(15000, os.stat(filename).st_size)
30 txt = f.read(size)
31 licenses = re.findall(LIC_REGEX, txt)
32 if licenses:
33 ascii_licenses = [lic.decode("ascii") for lic in licenses]
34 return ascii_licenses
35 except Exception as e:
36 bb.warn(f"Exception reading {filename}: {e}")
37 return []
38
39
40def is_work_shared_spdx(d):
41 return '/work-shared/' in d.getVar('S')
42
43
44def load_spdx_license_data(d):
45 with open(d.getVar("SPDX_LICENSES"), "r") as f:
46 data = json.load(f)
47 # Transform the license array to a dictionary
48 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
49
50 return data
51
52
53def process_sources(d):
54 """
55 Returns True if the sources for this recipe should be included in the SPDX
56 or False if not
57 """
58 pn = d.getVar("PN")
59 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
60 if pn in assume_provided:
61 for p in d.getVar("PROVIDES").split():
62 if p != pn:
63 pn = p
64 break
65
66 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
67 # so avoid archiving source here.
68 if pn.startswith("glibc-locale"):
69 return False
70 if d.getVar("PN") == "libtool-cross":
71 return False
72 if d.getVar("PN") == "libgcc-initial":
73 return False
74 if d.getVar("PN") == "shadow-sysroot":
75 return False
76
77 return True
78
79
80@dataclass(frozen=True)
81class Dep(object):
82 pn: str
83 hashfn: str
84 in_taskhash: bool
85
86
87def collect_direct_deps(d, dep_task):
88 """
89 Find direct dependencies of current task
90
91 Returns the list of recipes that have a dep_task that the current task
92 depends on
93 """
94 current_task = "do_" + d.getVar("BB_CURRENTTASK")
95 pn = d.getVar("PN")
96
97 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
98
99 for this_dep in taskdepdata.values():
100 if this_dep[0] == pn and this_dep[1] == current_task:
101 break
102 else:
103 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
104
105 deps = set()
106
107 for dep_name in this_dep.deps:
108 dep_data = taskdepdata[dep_name]
109 if dep_data.taskname == dep_task and dep_data.pn != pn:
110 deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps))
111
112 return sorted(deps)
113
114
115def get_spdx_deps(d):
116 """
117 Reads the SPDX dependencies JSON file and returns the data
118 """
119 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
120
121 deps = []
122 with spdx_deps_file.open("r") as f:
123 for d in json.load(f):
124 deps.append(Dep(*d))
125 return deps
126
127
128def collect_package_providers(d):
129 """
130 Returns a dictionary where each RPROVIDES is mapped to the package that
131 provides it
132 """
133 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
134
135 providers = {}
136
137 deps = collect_direct_deps(d, "do_create_spdx")
138 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
139
140 for dep_pn, dep_hashfn, _ in deps:
141 localdata = d
142 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
143 if not recipe_data:
144 localdata = bb.data.createCopy(d)
145 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
146 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
147
148 for pkg in recipe_data.get("PACKAGES", "").split():
149 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
150 rprovides = set(
151 n
152 for n, _ in bb.utils.explode_dep_versions2(
153 pkg_data.get("RPROVIDES", "")
154 ).items()
155 )
156 rprovides.add(pkg)
157
158 if "PKG" in pkg_data:
159 pkg = pkg_data["PKG"]
160 rprovides.add(pkg)
161
162 for r in rprovides:
163 providers[r] = (pkg, dep_hashfn)
164
165 return providers
166
167
168def get_patched_src(d):
169 """
170 Save patched source of the recipe in SPDX_WORKDIR.
171 """
172 spdx_workdir = d.getVar("SPDXWORK")
173 spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE")
174 pn = d.getVar("PN")
175
176 workdir = d.getVar("WORKDIR")
177
178 try:
179 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
180 if not is_work_shared_spdx(d):
181 # Change the WORKDIR to make do_unpack do_patch run in another dir.
182 d.setVar("WORKDIR", spdx_workdir)
183 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
184 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
185
186 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
187 # possibly requiring of the following tasks (such as some recipes's
188 # do_patch required 'B' existed).
189 bb.utils.mkdirhier(d.getVar("B"))
190
191 bb.build.exec_func("do_unpack", d)
192
193 if d.getVar("SRC_URI") != "":
194 if bb.data.inherits_class('dos2unix', d):
195 bb.build.exec_func('do_convert_crlf_to_lf', d)
196 bb.build.exec_func("do_patch", d)
197
198 # Copy source from work-share to spdx_workdir
199 if is_work_shared_spdx(d):
200 share_src = d.getVar('S')
201 d.setVar("WORKDIR", spdx_workdir)
202 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
203 # Copy source to ${SPDXWORK}, same basename dir of ${S};
204 src_dir = (
205 spdx_workdir
206 + "/"
207 + os.path.basename(share_src)
208 )
209 # For kernel souce, rename suffix dir 'kernel-source'
210 # to ${BP} (${BPN}-${PV})
211 if bb.data.inherits_class("kernel", d):
212 src_dir = spdx_workdir + "/" + d.getVar('BP')
213
214 bb.note(f"copyhardlinktree {share_src} to {src_dir}")
215 oe.path.copyhardlinktree(share_src, src_dir)
216
217 # Some userland has no source.
218 if not os.path.exists(spdx_workdir):
219 bb.utils.mkdirhier(spdx_workdir)
220 finally:
221 d.setVar("WORKDIR", workdir)
222
223
224def has_task(d, task):
225 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
226
227
228def fetch_data_to_uri(fd, name):
229 """
230 Translates a bitbake FetchData to a string URI
231 """
232 uri = fd.type
233 # Map gitsm to git, since gitsm:// is not a valid URI protocol
234 if uri == "gitsm":
235 uri = "git"
236 proto = getattr(fd, "proto", None)
237 if proto is not None:
238 uri = uri + "+" + proto
239 uri = uri + "://" + fd.host + fd.path
240
241 if fd.method.supports_srcrev():
242 uri = uri + "@" + fd.revision
243
244 return uri
245
246def is_compiled_source (filename, compiled_sources, types):
247 """
248 Check if the file is a compiled file
249 """
250 import os
251 # If we don't have compiled source, we assume all are compiled.
252 if not compiled_sources:
253 return True
254
255 # We return always true if the file type is not in the list of compiled files.
256 # Some files in the source directory are not compiled, for example, Makefiles,
257 # but also python .py file. We need to include them in the SPDX.
258 basename = os.path.basename(filename)
259 ext = basename.partition(".")[2]
260 if ext not in types:
261 return True
262 # Check that the file is in the list
263 return filename in compiled_sources
264
265def get_compiled_sources(d):
266 """
267 Get list of compiled sources from debug information and normalize the paths
268 """
269 import itertools
270 source_info = oe.package.read_debugsources_info(d)
271 if not source_info:
272 bb.debug(1, "Do not have debugsources.list. Skipping")
273 return [], []
274
275 # Sources are not split now in SPDX, so we aggregate them
276 sources = set(itertools.chain.from_iterable(source_info.values()))
277 # Check extensions of files
278 types = set()
279 for src in sources:
280 basename = os.path.basename(src)
281 ext = basename.partition(".")[2]
282 if ext not in types and ext:
283 types.add(ext)
284 bb.debug(1, f"Num of sources: {len(sources)} and types: {len(types)} {str(types)}")
285 return sources, types
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index 6cd6e11acc..ef687f5d41 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -1,9 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
6import bb.parse
4import bb.siggen 7import bb.siggen
5import bb.runqueue 8import bb.runqueue
6import oe 9import oe
10import netrc
7 11
8def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): 12def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
9 # Return True if we should keep the dependency, False to drop it 13 # Return True if we should keep the dependency, False to drop it
@@ -28,6 +32,12 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
28 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep) 32 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
29 mc, _ = bb.runqueue.split_mc(fn) 33 mc, _ = bb.runqueue.split_mc(fn)
30 34
35 # We can skip the rm_work task signature to avoid running the task
36 # when we remove some tasks from the dependencie chain
37 # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work
38 if task == "do_rm_work":
39 return False
40
31 # (Almost) always include our own inter-task dependencies (unless it comes 41 # (Almost) always include our own inter-task dependencies (unless it comes
32 # from a mcdepends). The exception is the special 42 # from a mcdepends). The exception is the special
33 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass. 43 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
@@ -59,7 +69,7 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
59 return False 69 return False
60 70
61 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum 71 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
62 # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum 72 # if we're just doing an RRECOMMENDS:xxx = "kernel-module-*", not least because the checksum
63 # is machine specific. 73 # is machine specific.
64 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) 74 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
65 # and we reccomend a kernel-module, we exclude the dependency. 75 # and we reccomend a kernel-module, we exclude the dependency.
@@ -84,14 +94,13 @@ def sstate_lockedsigs(d):
84 sigs[pn][task] = [h, siggen_lockedsigs_var] 94 sigs[pn][task] = [h, siggen_lockedsigs_var]
85 return sigs 95 return sigs
86 96
87class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): 97def lockedsigs_unihashmap(d):
88 name = "OEBasic" 98 unihashmap = {}
89 def init_rundepcheck(self, data): 99 data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split()
90 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() 100 for entry in data:
91 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() 101 pn, task, taskhash, unihash = entry.split(":")
92 pass 102 unihashmap[(pn, task)] = (taskhash, unihash)
93 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None): 103 return unihashmap
94 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
95 104
96class SignatureGeneratorOEBasicHashMixIn(object): 105class SignatureGeneratorOEBasicHashMixIn(object):
97 supports_multiconfig_datacaches = True 106 supports_multiconfig_datacaches = True
@@ -100,15 +109,17 @@ class SignatureGeneratorOEBasicHashMixIn(object):
100 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() 109 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
101 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() 110 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
102 self.lockedsigs = sstate_lockedsigs(data) 111 self.lockedsigs = sstate_lockedsigs(data)
112 self.unihashmap = lockedsigs_unihashmap(data)
103 self.lockedhashes = {} 113 self.lockedhashes = {}
104 self.lockedpnmap = {} 114 self.lockedpnmap = {}
105 self.lockedhashfn = {} 115 self.lockedhashfn = {}
106 self.machine = data.getVar("MACHINE") 116 self.machine = data.getVar("MACHINE")
107 self.mismatch_msgs = [] 117 self.mismatch_msgs = []
118 self.mismatch_number = 0
119 self.lockedsigs_msgs = ""
108 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or 120 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
109 "").split() 121 "").split()
110 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } 122 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
111 self.buildarch = data.getVar('BUILD_ARCH')
112 self._internal = False 123 self._internal = False
113 pass 124 pass
114 125
@@ -142,18 +153,12 @@ class SignatureGeneratorOEBasicHashMixIn(object):
142 super().set_taskdata(data[3:]) 153 super().set_taskdata(data[3:])
143 154
144 def dump_sigs(self, dataCache, options): 155 def dump_sigs(self, dataCache, options):
145 sigfile = os.getcwd() + "/locked-sigs.inc" 156 if 'lockedsigs' in options:
146 bb.plain("Writing locked sigs to %s" % sigfile) 157 sigfile = os.getcwd() + "/locked-sigs.inc"
147 self.dump_lockedsigs(sigfile) 158 bb.plain("Writing locked sigs to %s" % sigfile)
159 self.dump_lockedsigs(sigfile)
148 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) 160 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
149 161
150 def prep_taskhash(self, tid, deps, dataCaches):
151 super().prep_taskhash(tid, deps, dataCaches)
152 if hasattr(self, "extramethod"):
153 (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
154 inherits = " ".join(dataCaches[mc].inherits[fn])
155 if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
156 self.extramethod[tid] = ":" + self.buildarch
157 162
158 def get_taskhash(self, tid, deps, dataCaches): 163 def get_taskhash(self, tid, deps, dataCaches):
159 if tid in self.lockedhashes: 164 if tid in self.lockedhashes:
@@ -196,6 +201,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
196 #bb.warn("Using %s %s %s" % (recipename, task, h)) 201 #bb.warn("Using %s %s %s" % (recipename, task, h))
197 202
198 if h != h_locked and h_locked != unihash: 203 if h != h_locked and h_locked != unihash:
204 self.mismatch_number += 1
199 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' 205 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
200 % (recipename, task, h, h_locked, var)) 206 % (recipename, task, h, h_locked, var))
201 207
@@ -210,10 +216,19 @@ class SignatureGeneratorOEBasicHashMixIn(object):
210 return self.lockedhashes[tid] 216 return self.lockedhashes[tid]
211 return super().get_stampfile_hash(tid) 217 return super().get_stampfile_hash(tid)
212 218
213 def get_unihash(self, tid): 219 def get_cached_unihash(self, tid):
214 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: 220 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
215 return self.lockedhashes[tid] 221 return self.lockedhashes[tid]
216 return super().get_unihash(tid) 222
223 (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
224 recipename = self.lockedpnmap[fn]
225
226 if (recipename, task) in self.unihashmap:
227 taskhash, unihash = self.unihashmap[(recipename, task)]
228 if taskhash == self.taskhash[tid]:
229 return unihash
230
231 return super().get_cached_unihash(tid)
217 232
218 def dump_sigtask(self, fn, task, stampbase, runtime): 233 def dump_sigtask(self, fn, task, stampbase, runtime):
219 tid = fn + ":" + task 234 tid = fn + ":" + task
@@ -223,17 +238,26 @@ class SignatureGeneratorOEBasicHashMixIn(object):
223 238
224 def dump_lockedsigs(self, sigfile, taskfilter=None): 239 def dump_lockedsigs(self, sigfile, taskfilter=None):
225 types = {} 240 types = {}
241 unihashmap = {}
226 for tid in self.runtaskdeps: 242 for tid in self.runtaskdeps:
243 # Bitbake changed this to a tuple in newer versions
244 if isinstance(tid, tuple):
245 tid = tid[1]
227 if taskfilter: 246 if taskfilter:
228 if not tid in taskfilter: 247 if not tid in taskfilter:
229 continue 248 continue
230 fn = bb.runqueue.fn_from_tid(tid) 249 (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
231 t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] 250 t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
232 t = 't-' + t.replace('_', '-') 251 t = 't-' + t.replace('_', '-')
233 if t not in types: 252 if t not in types:
234 types[t] = [] 253 types[t] = []
235 types[t].append(tid) 254 types[t].append(tid)
236 255
256 taskhash = self.taskhash[tid]
257 unihash = self.get_unihash(tid)
258 if taskhash != unihash:
259 unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash
260
237 with open(sigfile, "w") as f: 261 with open(sigfile, "w") as f:
238 l = sorted(types) 262 l = sorted(types)
239 for t in l: 263 for t in l:
@@ -246,15 +270,31 @@ class SignatureGeneratorOEBasicHashMixIn(object):
246 continue 270 continue
247 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") 271 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
248 f.write(' "\n') 272 f.write(' "\n')
249 f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) 273 f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l)))
274 f.write('SIGGEN_UNIHASHMAP += "\\\n')
275 sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
276 for tid in sortedtid:
277 f.write(unihashmap[tid] + " \\\n")
278 f.write(' "\n')
279
280 def dump_siglist(self, sigfile, path_prefix_strip=None):
281 def strip_fn(fn):
282 nonlocal path_prefix_strip
283 if not path_prefix_strip:
284 return fn
285
286 fn_exp = fn.split(":")
287 if fn_exp[-1].startswith(path_prefix_strip):
288 fn_exp[-1] = fn_exp[-1][len(path_prefix_strip):]
289
290 return ":".join(fn_exp)
250 291
251 def dump_siglist(self, sigfile):
252 with open(sigfile, "w") as f: 292 with open(sigfile, "w") as f:
253 tasks = [] 293 tasks = []
254 for taskitem in self.taskhash: 294 for taskitem in self.taskhash:
255 (fn, task) = taskitem.rsplit(":", 1) 295 (fn, task) = taskitem.rsplit(":", 1)
256 pn = self.lockedpnmap[fn] 296 pn = self.lockedpnmap[fn]
257 tasks.append((pn, task, fn, self.taskhash[taskitem])) 297 tasks.append((pn, task, strip_fn(fn), self.taskhash[taskitem]))
258 for (pn, task, fn, taskhash) in sorted(tasks): 298 for (pn, task, fn, taskhash) in sorted(tasks):
259 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) 299 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
260 300
@@ -262,6 +302,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
262 warn_msgs = [] 302 warn_msgs = []
263 error_msgs = [] 303 error_msgs = []
264 sstate_missing_msgs = [] 304 sstate_missing_msgs = []
305 info_msgs = None
306
307 if self.lockedsigs:
308 if len(self.lockedsigs) > 10:
309 self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number)
310 else:
311 self.lockedsigs_msgs = "The following recipes have locked tasks:"
312 for pn in self.lockedsigs:
313 self.lockedsigs_msgs += " %s" % (pn)
265 314
266 for tid in sq_data['hash']: 315 for tid in sq_data['hash']:
267 if tid not in found: 316 if tid not in found:
@@ -274,7 +323,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
274 % (pn, taskname, sq_data['hash'][tid])) 323 % (pn, taskname, sq_data['hash'][tid]))
275 324
276 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") 325 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
277 if checklevel == 'warn': 326 if checklevel == 'info':
327 info_msgs = self.lockedsigs_msgs
328 if checklevel == 'warn' or checklevel == 'info':
278 warn_msgs += self.mismatch_msgs 329 warn_msgs += self.mismatch_msgs
279 elif checklevel == 'error': 330 elif checklevel == 'error':
280 error_msgs += self.mismatch_msgs 331 error_msgs += self.mismatch_msgs
@@ -285,6 +336,8 @@ class SignatureGeneratorOEBasicHashMixIn(object):
285 elif checklevel == 'error': 336 elif checklevel == 'error':
286 error_msgs += sstate_missing_msgs 337 error_msgs += sstate_missing_msgs
287 338
339 if info_msgs:
340 bb.note(info_msgs)
288 if warn_msgs: 341 if warn_msgs:
289 bb.warn("\n".join(warn_msgs)) 342 bb.warn("\n".join(warn_msgs))
290 if error_msgs: 343 if error_msgs:
@@ -304,9 +357,20 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge
304 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') 357 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
305 if not self.method: 358 if not self.method:
306 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") 359 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
360 self.username = data.getVar("BB_HASHSERVE_USERNAME")
361 self.password = data.getVar("BB_HASHSERVE_PASSWORD")
362 if not self.username or not self.password:
363 try:
364 n = netrc.netrc()
365 auth = n.authenticators(self.server)
366 if auth is not None:
367 self.username, _, self.password = auth
368 except FileNotFoundError:
369 pass
370 except netrc.NetrcParseError as e:
371 bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg))
307 372
308# Insert these classes into siggen's namespace so it can see and select them 373# Insert these classes into siggen's namespace so it can see and select them
309bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
310bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash 374bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
311bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash 375bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
312 376
@@ -320,14 +384,14 @@ def find_siginfo(pn, taskname, taskhashlist, d):
320 if not taskname: 384 if not taskname:
321 # We have to derive pn and taskname 385 # We have to derive pn and taskname
322 key = pn 386 key = pn
323 splitit = key.split('.bb:') 387 if key.startswith("mc:"):
324 taskname = splitit[1] 388 # mc:<mc>:<pn>:<task>
325 pn = os.path.basename(splitit[0]).split('_')[0] 389 _, _, pn, taskname = key.split(':', 3)
326 if key.startswith('virtual:native:'): 390 else:
327 pn = pn + '-native' 391 # <pn>:<task>
392 pn, taskname = key.split(':', 1)
328 393
329 hashfiles = {} 394 hashfiles = {}
330 filedates = {}
331 395
332 def get_hashval(siginfo): 396 def get_hashval(siginfo):
333 if siginfo.endswith('.siginfo'): 397 if siginfo.endswith('.siginfo'):
@@ -335,6 +399,15 @@ def find_siginfo(pn, taskname, taskhashlist, d):
335 else: 399 else:
336 return siginfo.rpartition('.')[2] 400 return siginfo.rpartition('.')[2]
337 401
402 def get_time(fullpath):
403 # NFS can end up in a weird state where the file exists but has no stat info.
404 # If that happens, we assume it doesn't acutally exist and show a warning
405 try:
406 return os.stat(fullpath).st_mtime
407 except FileNotFoundError:
408 bb.warn("Could not obtain mtime for {}".format(fullpath))
409 return None
410
338 # First search in stamps dir 411 # First search in stamps dir
339 localdata = d.createCopy() 412 localdata = d.createCopy()
340 localdata.setVar('MULTIMACH_TARGET_SYS', '*') 413 localdata.setVar('MULTIMACH_TARGET_SYS', '*')
@@ -346,28 +419,32 @@ def find_siginfo(pn, taskname, taskhashlist, d):
346 if pn.startswith("gcc-source"): 419 if pn.startswith("gcc-source"):
347 # gcc-source shared workdir is a special case :( 420 # gcc-source shared workdir is a special case :(
348 stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") 421 stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
422 elif pn.startswith("llvm-project-source"):
423 # llvm-project-source shared workdir is also a special case :*(
424 stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}")
349 425
350 filespec = '%s.%s.sigdata.*' % (stamp, taskname) 426 filespec = '%s.%s.sigdata.*' % (stamp, taskname)
351 foundall = False 427 foundall = False
352 import glob 428 import glob
429 bb.debug(1, "Calling glob.glob on {}".format(filespec))
353 for fullpath in glob.glob(filespec): 430 for fullpath in glob.glob(filespec):
354 match = False 431 match = False
355 if taskhashlist: 432 if taskhashlist:
356 for taskhash in taskhashlist: 433 for taskhash in taskhashlist:
357 if fullpath.endswith('.%s' % taskhash): 434 if fullpath.endswith('.%s' % taskhash):
358 hashfiles[taskhash] = fullpath 435 mtime = get_time(fullpath)
436 if mtime:
437 hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime}
359 if len(hashfiles) == len(taskhashlist): 438 if len(hashfiles) == len(taskhashlist):
360 foundall = True 439 foundall = True
361 break 440 break
362 else: 441 else:
363 try:
364 filedates[fullpath] = os.stat(fullpath).st_mtime
365 except OSError:
366 continue
367 hashval = get_hashval(fullpath) 442 hashval = get_hashval(fullpath)
368 hashfiles[hashval] = fullpath 443 mtime = get_time(fullpath)
444 if mtime:
445 hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime}
369 446
370 if not taskhashlist or (len(filedates) < 2 and not foundall): 447 if not taskhashlist or (len(hashfiles) < 2 and not foundall):
371 # That didn't work, look in sstate-cache 448 # That didn't work, look in sstate-cache
372 hashes = taskhashlist or ['?' * 64] 449 hashes = taskhashlist or ['?' * 64]
373 localdata = bb.data.createCopy(d) 450 localdata = bb.data.createCopy(d)
@@ -376,35 +453,34 @@ def find_siginfo(pn, taskname, taskhashlist, d):
376 localdata.setVar('TARGET_VENDOR', '*') 453 localdata.setVar('TARGET_VENDOR', '*')
377 localdata.setVar('TARGET_OS', '*') 454 localdata.setVar('TARGET_OS', '*')
378 localdata.setVar('PN', pn) 455 localdata.setVar('PN', pn)
456 # gcc-source is a special case, same as with local stamps above
457 if pn.startswith("gcc-source"):
458 localdata.setVar('PN', "gcc")
379 localdata.setVar('PV', '*') 459 localdata.setVar('PV', '*')
380 localdata.setVar('PR', '*') 460 localdata.setVar('PR', '*')
381 localdata.setVar('BB_TASKHASH', hashval) 461 localdata.setVar('BB_TASKHASH', hashval)
462 localdata.setVar('SSTATE_CURRTASK', taskname[3:])
382 swspec = localdata.getVar('SSTATE_SWSPEC') 463 swspec = localdata.getVar('SSTATE_SWSPEC')
383 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: 464 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
384 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') 465 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
385 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: 466 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
386 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") 467 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
387 sstatename = taskname[3:] 468 filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG')
388 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
389 469
470 bb.debug(1, "Calling glob.glob on {}".format(filespec))
390 matchedfiles = glob.glob(filespec) 471 matchedfiles = glob.glob(filespec)
391 for fullpath in matchedfiles: 472 for fullpath in matchedfiles:
392 actual_hashval = get_hashval(fullpath) 473 actual_hashval = get_hashval(fullpath)
393 if actual_hashval in hashfiles: 474 if actual_hashval in hashfiles:
394 continue 475 continue
395 hashfiles[hashval] = fullpath 476 mtime = get_time(fullpath)
396 if not taskhashlist: 477 if mtime:
397 try: 478 hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime}
398 filedates[fullpath] = os.stat(fullpath).st_mtime
399 except:
400 continue
401 479
402 if taskhashlist: 480 return hashfiles
403 return hashfiles
404 else:
405 return filedates
406 481
407bb.siggen.find_siginfo = find_siginfo 482bb.siggen.find_siginfo = find_siginfo
483bb.siggen.find_siginfo_version = 2
408 484
409 485
410def sstate_get_manifest_filename(task, d): 486def sstate_get_manifest_filename(task, d):
@@ -418,6 +494,7 @@ def sstate_get_manifest_filename(task, d):
418 d2.setVar("SSTATE_MANMACH", extrainf) 494 d2.setVar("SSTATE_MANMACH", extrainf)
419 return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) 495 return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
420 496
497@bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS")
421def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): 498def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
422 d2 = d 499 d2 = d
423 variant = '' 500 variant = ''
@@ -440,7 +517,7 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
440 elif "-cross-canadian" in taskdata: 517 elif "-cross-canadian" in taskdata:
441 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] 518 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
442 elif "-cross-" in taskdata: 519 elif "-cross-" in taskdata:
443 pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"] 520 pkgarchs = ["${BUILD_ARCH}"]
444 elif "-crosssdk" in taskdata: 521 elif "-crosssdk" in taskdata:
445 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] 522 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
446 else: 523 else:
@@ -449,11 +526,15 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
449 pkgarchs.append('allarch') 526 pkgarchs.append('allarch')
450 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') 527 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
451 528
529 searched_manifests = []
530
452 for pkgarch in pkgarchs: 531 for pkgarch in pkgarchs:
453 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) 532 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
454 if os.path.exists(manifest): 533 if os.path.exists(manifest):
455 return manifest, d2 534 return manifest, d2
456 bb.error("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant)) 535 searched_manifests.append(manifest)
536 bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
537 % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
457 return None, d2 538 return None, d2
458 539
459def OEOuthashBasic(path, sigfile, task, d): 540def OEOuthashBasic(path, sigfile, task, d):
@@ -467,6 +548,8 @@ def OEOuthashBasic(path, sigfile, task, d):
467 import stat 548 import stat
468 import pwd 549 import pwd
469 import grp 550 import grp
551 import re
552 import fnmatch
470 553
471 def update_hash(s): 554 def update_hash(s):
472 s = s.encode('utf-8') 555 s = s.encode('utf-8')
@@ -476,20 +559,38 @@ def OEOuthashBasic(path, sigfile, task, d):
476 559
477 h = hashlib.sha256() 560 h = hashlib.sha256()
478 prev_dir = os.getcwd() 561 prev_dir = os.getcwd()
562 corebase = d.getVar("COREBASE")
563 tmpdir = d.getVar("TMPDIR")
479 include_owners = os.environ.get('PSEUDO_DISABLED') == '0' 564 include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
480 if "package_write_" in task or task == "package_qa": 565 if "package_write_" in task or task == "package_qa":
481 include_owners = False 566 include_owners = False
482 include_timestamps = False 567 include_timestamps = False
568 include_root = True
483 if task == "package": 569 if task == "package":
484 include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' 570 include_timestamps = True
485 extra_content = d.getVar('HASHEQUIV_HASH_VERSION') 571 include_root = False
572 source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH"))
573 hash_version = d.getVar('HASHEQUIV_HASH_VERSION')
574 extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA")
575
576 filemaps = {}
577 for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split():
578 entry = m.split(":")
579 if len(entry) != 3 or entry[0] != task:
580 continue
581 filemaps.setdefault(entry[1], [])
582 filemaps[entry[1]].append(entry[2])
486 583
487 try: 584 try:
488 os.chdir(path) 585 os.chdir(path)
586 basepath = os.path.normpath(path)
489 587
490 update_hash("OEOuthashBasic\n") 588 update_hash("OEOuthashBasic\n")
491 if extra_content: 589 if hash_version:
492 update_hash(extra_content + "\n") 590 update_hash(hash_version + "\n")
591
592 if extra_sigdata:
593 update_hash(extra_sigdata + "\n")
493 594
494 # It is only currently useful to get equivalent hashes for things that 595 # It is only currently useful to get equivalent hashes for things that
495 # can be restored from sstate. Since the sstate object is named using 596 # can be restored from sstate. Since the sstate object is named using
@@ -534,32 +635,37 @@ def OEOuthashBasic(path, sigfile, task, d):
534 else: 635 else:
535 add_perm(stat.S_IXUSR, 'x') 636 add_perm(stat.S_IXUSR, 'x')
536 637
537 add_perm(stat.S_IRGRP, 'r') 638 if include_owners:
538 add_perm(stat.S_IWGRP, 'w') 639 # Group/other permissions are only relevant in pseudo context
539 if stat.S_ISGID & s.st_mode: 640 add_perm(stat.S_IRGRP, 'r')
540 add_perm(stat.S_IXGRP, 's', 'S') 641 add_perm(stat.S_IWGRP, 'w')
541 else: 642 if stat.S_ISGID & s.st_mode:
542 add_perm(stat.S_IXGRP, 'x') 643 add_perm(stat.S_IXGRP, 's', 'S')
644 else:
645 add_perm(stat.S_IXGRP, 'x')
543 646
544 add_perm(stat.S_IROTH, 'r') 647 add_perm(stat.S_IROTH, 'r')
545 add_perm(stat.S_IWOTH, 'w') 648 add_perm(stat.S_IWOTH, 'w')
546 if stat.S_ISVTX & s.st_mode: 649 if stat.S_ISVTX & s.st_mode:
547 update_hash('t') 650 update_hash('t')
548 else: 651 else:
549 add_perm(stat.S_IXOTH, 'x') 652 add_perm(stat.S_IXOTH, 'x')
550 653
551 if include_owners:
552 try: 654 try:
553 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) 655 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
554 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) 656 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
555 except KeyError as e: 657 except KeyError as e:
556 bb.warn("KeyError in %s" % path)
557 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match " 658 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match "
558 "any user/group on target. This may be due to host contamination." % (e, path, s.st_uid, s.st_gid)) 659 "any user/group on target. This may be due to host contamination." %
660 (e, os.path.abspath(path), s.st_uid, s.st_gid))
559 raise Exception(msg).with_traceback(e.__traceback__) 661 raise Exception(msg).with_traceback(e.__traceback__)
560 662
561 if include_timestamps: 663 if include_timestamps:
562 update_hash(" %10d" % s.st_mtime) 664 # Need to clamp to SOURCE_DATE_EPOCH
665 if s.st_mtime > source_date_epoch:
666 update_hash(" %10d" % source_date_epoch)
667 else:
668 update_hash(" %10d" % s.st_mtime)
563 669
564 update_hash(" ") 670 update_hash(" ")
565 if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): 671 if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
@@ -567,8 +673,13 @@ def OEOuthashBasic(path, sigfile, task, d):
567 else: 673 else:
568 update_hash(" " * 9) 674 update_hash(" " * 9)
569 675
676 filterfile = False
677 for entry in filemaps:
678 if fnmatch.fnmatch(path, entry):
679 filterfile = True
680
570 update_hash(" ") 681 update_hash(" ")
571 if stat.S_ISREG(s.st_mode): 682 if stat.S_ISREG(s.st_mode) and not filterfile:
572 update_hash("%10d" % s.st_size) 683 update_hash("%10d" % s.st_size)
573 else: 684 else:
574 update_hash(" " * 10) 685 update_hash(" " * 10)
@@ -577,9 +688,24 @@ def OEOuthashBasic(path, sigfile, task, d):
577 fh = hashlib.sha256() 688 fh = hashlib.sha256()
578 if stat.S_ISREG(s.st_mode): 689 if stat.S_ISREG(s.st_mode):
579 # Hash file contents 690 # Hash file contents
580 with open(path, 'rb') as d: 691 if filterfile:
581 for chunk in iter(lambda: d.read(4096), b""): 692 # Need to ignore paths in crossscripts and postinst-useradd files.
693 with open(path, 'rb') as d:
694 chunk = d.read()
695 chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'')
696 for entry in filemaps:
697 if not fnmatch.fnmatch(path, entry):
698 continue
699 for r in filemaps[entry]:
700 if r.startswith("regex-"):
701 chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk)
702 else:
703 chunk = chunk.replace(bytes(r, encoding='utf8'), b'')
582 fh.update(chunk) 704 fh.update(chunk)
705 else:
706 with open(path, 'rb') as d:
707 for chunk in iter(lambda: d.read(4096), b""):
708 fh.update(chunk)
583 update_hash(fh.hexdigest()) 709 update_hash(fh.hexdigest())
584 else: 710 else:
585 update_hash(" " * len(fh.hexdigest())) 711 update_hash(" " * len(fh.hexdigest()))
@@ -592,11 +718,16 @@ def OEOuthashBasic(path, sigfile, task, d):
592 update_hash("\n") 718 update_hash("\n")
593 719
594 # Process this directory and all its child files 720 # Process this directory and all its child files
595 process(root) 721 if include_root or root != ".":
722 process(root)
596 for f in files: 723 for f in files:
597 if f == 'fixmepath': 724 if f == 'fixmepath':
598 continue 725 continue
599 process(os.path.join(root, f)) 726 process(os.path.join(root, f))
727
728 for dir in dirs:
729 if os.path.islink(os.path.join(root, dir)):
730 process(os.path.join(root, dir))
600 finally: 731 finally:
601 os.chdir(prev_dir) 732 os.chdir(prev_dir)
602 733
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
index 61c2687ef4..4412bc14c1 100644
--- a/meta/lib/oe/terminal.py
+++ b/meta/lib/oe/terminal.py
@@ -1,11 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import logging 6import logging
5import oe.classutils 7import oe.classutils
6import shlex 8import shlex
7from bb.process import Popen, ExecutionError 9from bb.process import Popen, ExecutionError
8from distutils.version import LooseVersion
9 10
10logger = logging.getLogger('BitBake.OE.Terminal') 11logger = logging.getLogger('BitBake.OE.Terminal')
11 12
@@ -31,9 +32,10 @@ class Registry(oe.classutils.ClassRegistry):
31 32
32class Terminal(Popen, metaclass=Registry): 33class Terminal(Popen, metaclass=Registry):
33 def __init__(self, sh_cmd, title=None, env=None, d=None): 34 def __init__(self, sh_cmd, title=None, env=None, d=None):
35 from subprocess import STDOUT
34 fmt_sh_cmd = self.format_command(sh_cmd, title) 36 fmt_sh_cmd = self.format_command(sh_cmd, title)
35 try: 37 try:
36 Popen.__init__(self, fmt_sh_cmd, env=env) 38 Popen.__init__(self, fmt_sh_cmd, env=env, stderr=STDOUT)
37 except OSError as exc: 39 except OSError as exc:
38 import errno 40 import errno
39 if exc.errno == errno.ENOENT: 41 if exc.errno == errno.ENOENT:
@@ -86,10 +88,10 @@ class Konsole(XTerminal):
86 def __init__(self, sh_cmd, title=None, env=None, d=None): 88 def __init__(self, sh_cmd, title=None, env=None, d=None):
87 # Check version 89 # Check version
88 vernum = check_terminal_version("konsole") 90 vernum = check_terminal_version("konsole")
89 if vernum and LooseVersion(vernum) < '2.0.0': 91 if vernum and bb.utils.vercmp_string_op(vernum, "2.0.0", "<"):
90 # Konsole from KDE 3.x 92 # Konsole from KDE 3.x
91 self.command = 'konsole -T "{title}" -e {command}' 93 self.command = 'konsole -T "{title}" -e {command}'
92 elif vernum and LooseVersion(vernum) < '16.08.1': 94 elif vernum and bb.utils.vercmp_string_op(vernum, "16.08.1", "<"):
93 # Konsole pre 16.08.01 Has nofork 95 # Konsole pre 16.08.01 Has nofork
94 self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}' 96 self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}'
95 XTerminal.__init__(self, sh_cmd, title, env, d) 97 XTerminal.__init__(self, sh_cmd, title, env, d)
@@ -102,6 +104,10 @@ class Rxvt(XTerminal):
102 command = 'rxvt -T "{title}" -e {command}' 104 command = 'rxvt -T "{title}" -e {command}'
103 priority = 1 105 priority = 1
104 106
107class URxvt(XTerminal):
108 command = 'urxvt -T "{title}" -e {command}'
109 priority = 1
110
105class Screen(Terminal): 111class Screen(Terminal):
106 command = 'screen -D -m -t "{title}" -S devshell {command}' 112 command = 'screen -D -m -t "{title}" -S devshell {command}'
107 113
@@ -163,7 +169,12 @@ class Tmux(Terminal):
163 # devshells, if it's already there, add a new window to it. 169 # devshells, if it's already there, add a new window to it.
164 window_name = 'devshell-%i' % os.getpid() 170 window_name = 'devshell-%i' % os.getpid()
165 171
166 self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'.format(window_name) 172 self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'
173 if not check_tmux_version('1.9'):
174 # `tmux new-session -c` was added in 1.9;
175 # older versions fail with that flag
176 self.command = 'tmux new -d -s {0} -n {0} "{{command}}"'
177 self.command = self.command.format(window_name)
167 Terminal.__init__(self, sh_cmd, title, env, d) 178 Terminal.__init__(self, sh_cmd, title, env, d)
168 179
169 attach_cmd = 'tmux att -t {0}'.format(window_name) 180 attach_cmd = 'tmux att -t {0}'.format(window_name)
@@ -253,13 +264,18 @@ def spawn(name, sh_cmd, title=None, env=None, d=None):
253 except OSError: 264 except OSError:
254 return 265 return
255 266
267def check_tmux_version(desired):
268 vernum = check_terminal_version("tmux")
269 if vernum and bb.utils.vercmp_string_op(vernum, desired, "<"):
270 return False
271 return vernum
272
256def check_tmux_pane_size(tmux): 273def check_tmux_pane_size(tmux):
257 import subprocess as sub 274 import subprocess as sub
258 # On older tmux versions (<1.9), return false. The reason 275 # On older tmux versions (<1.9), return false. The reason
259 # is that there is no easy way to get the height of the active panel 276 # is that there is no easy way to get the height of the active panel
260 # on current window without nested formats (available from version 1.9) 277 # on current window without nested formats (available from version 1.9)
261 vernum = check_terminal_version("tmux") 278 if not check_tmux_version('1.9'):
262 if vernum and LooseVersion(vernum) < '1.9':
263 return False 279 return False
264 try: 280 try:
265 p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, 281 p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux,
diff --git a/meta/lib/oe/tune.py b/meta/lib/oe/tune.py
new file mode 100644
index 0000000000..7fda19430d
--- /dev/null
+++ b/meta/lib/oe/tune.py
@@ -0,0 +1,81 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7# riscv_isa_to_tune(isa)
8#
9# Automatically translate a RISC-V ISA string to TUNE_FEATURES
10#
11# Abbreviations, such as rv32g -> rv32imaffd_zicsr_zifencei are supported.
12#
13# Profiles, such as rva22u64, are NOT supported, you must use ISA strings.
14#
15def riscv_isa_to_tune(isa):
16 _isa = isa.lower()
17
18 feature = []
19 iter = 0
20
21 # rv or riscv
22 if _isa[iter:].startswith('rv'):
23 feature.append('rv')
24 iter = iter + 2
25 elif _isa[iter:].startswith('riscv'):
26 feature.append('rv')
27 iter = iter + 5
28 else:
29 # Not a risc-v ISA!
30 return _isa
31
32 while (_isa[iter:]):
33 # Skip _ and whitespace
34 if _isa[iter] == '_' or _isa[iter].isspace():
35 iter = iter + 1
36 continue
37
38 # Length, just capture numbers here
39 if _isa[iter].isdigit():
40 iter_end = iter
41 while iter_end < len(_isa) and _isa[iter_end].isdigit():
42 iter_end = iter_end + 1
43
44 feature.append(_isa[iter:iter_end])
45 iter = iter_end
46 continue
47
48 # Typically i, e or g is next, followed by extensions.
49 # Extensions are single character, except for Z, Ss, Sh, Sm, Sv, and X
50
51 # If the extension starts with 'Z', 'S' or 'X' use the name until the next _, whitespace or end
52 if _isa[iter] in ['z', 's', 'x']:
53 ext_type = _isa[iter]
54 iter_end = iter + 1
55
56 # Multicharacter extension, these are supposed to have a _ before the next multicharacter extension
57 # See 37.4 and 37.5:
58 # 37.4: Underscores "_" may be used to separate ISA extensions...
59 # 37.5: All multi-letter extensions ... must be separated from other multi-letter extensions by an underscore...
60 # Some extensions permit only alphabetic characters, while others allow alphanumeric chartacters
61 while iter_end < len(_isa) and _isa[iter_end] != "_" and not _isa[iter_end].isspace():
62 iter_end = iter_end + 1
63
64 feature.append(_isa[iter:iter_end])
65 iter = iter_end
66 continue
67
68 # 'g' is special, it's an abbreviation for imafd_zicsr_zifencei
69 # When expanding the abbreviation, any additional letters must appear before the _z* extensions
70 if _isa[iter] == 'g':
71 _isa = 'imafd' + _isa[iter+1:] + '_zicsr_zifencei'
72 iter = 0
73 continue
74
75 feature.append(_isa[iter])
76 iter = iter + 1
77 continue
78
79 # Eliminate duplicates, but preserve the order
80 feature = list(dict.fromkeys(feature))
81 return ' '.join(feature)
diff --git a/meta/lib/oe/types.py b/meta/lib/oe/types.py
index bbbabafbf6..b929afb1f3 100644
--- a/meta/lib/oe/types.py
+++ b/meta/lib/oe/types.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/useradd.py b/meta/lib/oe/useradd.py
index 8fc77568ff..54aa86feb5 100644
--- a/meta/lib/oe/useradd.py
+++ b/meta/lib/oe/useradd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import argparse 6import argparse
@@ -45,7 +47,6 @@ def build_useradd_parser():
45 parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False) 47 parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False)
46 parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true") 48 parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true")
47 parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account") 49 parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account")
48 parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new account")
49 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") 50 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into")
50 parser.add_argument("-r", "--system", help="create a system account", action="store_true") 51 parser.add_argument("-r", "--system", help="create a system account", action="store_true")
51 parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account") 52 parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account")
@@ -63,7 +64,6 @@ def build_groupadd_parser():
63 parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults") 64 parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults")
64 parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true") 65 parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true")
65 parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group") 66 parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group")
66 parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new group")
67 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") 67 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into")
68 parser.add_argument("-r", "--system", help="create a system account", action="store_true") 68 parser.add_argument("-r", "--system", help="create a system account", action="store_true")
69 parser.add_argument("GROUP", help="Group name of the new group") 69 parser.add_argument("GROUP", help="Group name of the new group")
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 9a2187e36f..a11db5f3cd 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -1,10 +1,15 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import subprocess 7import subprocess
6import multiprocessing 8import multiprocessing
7import traceback 9import traceback
10import errno
11
12import bb.parse
8 13
9def read_file(filename): 14def read_file(filename):
10 try: 15 try:
@@ -221,12 +226,12 @@ def packages_filter_out_system(d):
221 PN-dbg PN-doc PN-locale-eb-gb removed. 226 PN-dbg PN-doc PN-locale-eb-gb removed.
222 """ 227 """
223 pn = d.getVar('PN') 228 pn = d.getVar('PN')
224 blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')] 229 pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
225 localepkg = pn + "-locale-" 230 localepkg = pn + "-locale-"
226 pkgs = [] 231 pkgs = []
227 232
228 for pkg in d.getVar('PACKAGES').split(): 233 for pkg in d.getVar('PACKAGES').split():
229 if pkg not in blacklist and localepkg not in pkg: 234 if pkg not in pkgfilter and localepkg not in pkg:
230 pkgs.append(pkg) 235 pkgs.append(pkg)
231 return pkgs 236 return pkgs
232 237
@@ -248,24 +253,32 @@ def trim_version(version, num_parts=2):
248 trimmed = ".".join(parts[:num_parts]) 253 trimmed = ".".join(parts[:num_parts])
249 return trimmed 254 return trimmed
250 255
251def cpu_count(at_least=1): 256def cpu_count(at_least=1, at_most=64):
252 cpus = len(os.sched_getaffinity(0)) 257 cpus = len(os.sched_getaffinity(0))
253 return max(cpus, at_least) 258 return max(min(cpus, at_most), at_least)
254 259
255def execute_pre_post_process(d, cmds): 260def execute_pre_post_process(d, cmds):
256 if cmds is None: 261 if cmds is None:
257 return 262 return
258 263
259 for cmd in cmds.strip().split(';'): 264 cmds = cmds.replace(";", " ")
260 cmd = cmd.strip() 265
261 if cmd != '': 266 for cmd in cmds.split():
262 bb.note("Executing %s ..." % cmd) 267 bb.note("Executing %s ..." % cmd)
263 bb.build.exec_func(cmd, d) 268 bb.build.exec_func(cmd, d)
269
270@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
271def get_bb_number_threads(d):
272 return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
264 273
265# For each item in items, call the function 'target' with item as the first 274def multiprocess_launch(target, items, d, extraargs=None):
275 max_process = get_bb_number_threads(d)
276 return multiprocess_launch_mp(target, items, max_process, extraargs)
277
278# For each item in items, call the function 'target' with item as the first
266# argument, extraargs as the other arguments and handle any exceptions in the 279# argument, extraargs as the other arguments and handle any exceptions in the
267# parent thread 280# parent thread
268def multiprocess_launch(target, items, d, extraargs=None): 281def multiprocess_launch_mp(target, items, max_process, extraargs=None):
269 282
270 class ProcessLaunch(multiprocessing.Process): 283 class ProcessLaunch(multiprocessing.Process):
271 def __init__(self, *args, **kwargs): 284 def __init__(self, *args, **kwargs):
@@ -300,14 +313,15 @@ def multiprocess_launch(target, items, d, extraargs=None):
300 self.update() 313 self.update()
301 return self._result 314 return self._result
302 315
303 max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
304 launched = [] 316 launched = []
305 errors = [] 317 errors = []
306 results = [] 318 results = []
307 items = list(items) 319 items = list(items)
308 while (items and not errors) or launched: 320 while (items and not errors) or launched:
309 if not errors and items and len(launched) < max_process: 321 if not errors and items and len(launched) < max_process:
310 args = (items.pop(),) 322 args = items.pop()
323 if not type(args) is tuple:
324 args = (args,)
311 if extraargs is not None: 325 if extraargs is not None:
312 args = args + extraargs 326 args = args + extraargs
313 p = ProcessLaunch(target=target, args=args) 327 p = ProcessLaunch(target=target, args=args)
@@ -344,7 +358,29 @@ def squashspaces(string):
344 import re 358 import re
345 return re.sub(r"\s+", " ", string).strip() 359 return re.sub(r"\s+", " ", string).strip()
346 360
347def format_pkg_list(pkg_dict, ret_format=None): 361def rprovides_map(pkgdata_dir, pkg_dict):
362 # Map file -> pkg provider
363 rprov_map = {}
364
365 for pkg in pkg_dict:
366 path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
367 if not os.path.isfile(path_to_pkgfile):
368 continue
369 with open(path_to_pkgfile) as f:
370 for line in f:
371 if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'):
372 # List all components provided by pkg.
373 # Exclude version strings, i.e. those starting with (
374 provides = [x for x in line.split()[1:] if not x.startswith('(')]
375 for prov in provides:
376 if prov in rprov_map:
377 rprov_map[prov].append(pkg)
378 else:
379 rprov_map[prov] = [pkg]
380
381 return rprov_map
382
383def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None):
348 output = [] 384 output = []
349 385
350 if ret_format == "arch": 386 if ret_format == "arch":
@@ -357,9 +393,15 @@ def format_pkg_list(pkg_dict, ret_format=None):
357 for pkg in sorted(pkg_dict): 393 for pkg in sorted(pkg_dict):
358 output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"])) 394 output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
359 elif ret_format == "deps": 395 elif ret_format == "deps":
396 rprov_map = rprovides_map(pkgdata_dir, pkg_dict)
360 for pkg in sorted(pkg_dict): 397 for pkg in sorted(pkg_dict):
361 for dep in pkg_dict[pkg]["deps"]: 398 for dep in pkg_dict[pkg]["deps"]:
362 output.append("%s|%s" % (pkg, dep)) 399 if dep in rprov_map:
400 # There could be multiple providers within the image
401 for pkg_provider in rprov_map[dep]:
402 output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep))
403 else:
404 output.append("%s|%s" % (pkg, dep))
363 else: 405 else:
364 for pkg in sorted(pkg_dict): 406 for pkg in sorted(pkg_dict):
365 output.append(pkg) 407 output.append(pkg)
@@ -428,7 +470,7 @@ def host_gcc_version(d, taskcontextonly=False):
428 version = match.group(1) 470 version = match.group(1)
429 return "-%s" % version if version in ("4.8", "4.9") else "" 471 return "-%s" % version if version in ("4.8", "4.9") else ""
430 472
431 473@bb.parse.vardepsexclude("DEFAULTTUNE_MULTILIB_ORIGINAL", "OVERRIDES")
432def get_multilib_datastore(variant, d): 474def get_multilib_datastore(variant, d):
433 localdata = bb.data.createCopy(d) 475 localdata = bb.data.createCopy(d)
434 if variant: 476 if variant:
@@ -445,94 +487,48 @@ def get_multilib_datastore(variant, d):
445 localdata.setVar("MLPREFIX", "") 487 localdata.setVar("MLPREFIX", "")
446 return localdata 488 return localdata
447 489
448#
449# Python 2.7 doesn't have threaded pools (just multiprocessing)
450# so implement a version here
451#
452
453from queue import Queue
454from threading import Thread
455
456class ThreadedWorker(Thread):
457 """Thread executing tasks from a given tasks queue"""
458 def __init__(self, tasks, worker_init, worker_end):
459 Thread.__init__(self)
460 self.tasks = tasks
461 self.daemon = True
462
463 self.worker_init = worker_init
464 self.worker_end = worker_end
465
466 def run(self):
467 from queue import Empty
468
469 if self.worker_init is not None:
470 self.worker_init(self)
471
472 while True:
473 try:
474 func, args, kargs = self.tasks.get(block=False)
475 except Empty:
476 if self.worker_end is not None:
477 self.worker_end(self)
478 break
479
480 try:
481 func(self, *args, **kargs)
482 except Exception as e:
483 print(e)
484 finally:
485 self.tasks.task_done()
486
487class ThreadedPool:
488 """Pool of threads consuming tasks from a queue"""
489 def __init__(self, num_workers, num_tasks, worker_init=None,
490 worker_end=None):
491 self.tasks = Queue(num_tasks)
492 self.workers = []
493
494 for _ in range(num_workers):
495 worker = ThreadedWorker(self.tasks, worker_init, worker_end)
496 self.workers.append(worker)
497
498 def start(self):
499 for worker in self.workers:
500 worker.start()
501
502 def add_task(self, func, *args, **kargs):
503 """Add a task to the queue"""
504 self.tasks.put((func, args, kargs))
505
506 def wait_completion(self):
507 """Wait for completion of all the tasks in the queue"""
508 self.tasks.join()
509 for worker in self.workers:
510 worker.join()
511
512def write_ld_so_conf(d):
513 # Some utils like prelink may not have the correct target library paths
514 # so write an ld.so.conf to help them
515 ldsoconf = d.expand("${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf")
516 if os.path.exists(ldsoconf):
517 bb.utils.remove(ldsoconf)
518 bb.utils.mkdirhier(os.path.dirname(ldsoconf))
519 with open(ldsoconf, "w") as f:
520 f.write(d.getVar("base_libdir") + '\n')
521 f.write(d.getVar("libdir") + '\n')
522
523class ImageQAFailed(Exception):
524 def __init__(self, description, name=None, logfile=None):
525 self.description = description
526 self.name = name
527 self.logfile=logfile
528
529 def __str__(self):
530 msg = 'Function failed: %s' % self.name
531 if self.description:
532 msg = msg + ' (%s)' % self.description
533
534 return msg
535
536def sh_quote(string): 490def sh_quote(string):
537 import shlex 491 import shlex
538 return shlex.quote(string) 492 return shlex.quote(string)
493
494def directory_size(root, blocksize=4096):
495 """
496 Calculate the size of the directory, taking into account hard links,
497 rounding up every size to multiples of the blocksize.
498 """
499 def roundup(size):
500 """
501 Round the size up to the nearest multiple of the block size.
502 """
503 import math
504 return math.ceil(size / blocksize) * blocksize
505
506 def getsize(filename):
507 """
508 Get the size of the filename, not following symlinks, taking into
509 account hard links.
510 """
511 stat = os.lstat(filename)
512 if stat.st_ino not in inodes:
513 inodes.add(stat.st_ino)
514 return stat.st_size
515 else:
516 return 0
517
518 inodes = set()
519 total = 0
520 for root, dirs, files in os.walk(root):
521 total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
522 total += roundup(getsize(root))
523 return total
524
525# Update the mtime of a file, skip if permission/read-only issues
526def touch(filename):
527 try:
528 os.utime(filename, None)
529 except PermissionError:
530 pass
531 except OSError as e:
532 # Handle read-only file systems gracefully
533 if e.errno != errno.EROFS:
534 raise e
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py
index 5f1805d86c..5d656c781a 100644
--- a/meta/lib/oeqa/buildperf/base.py
+++ b/meta/lib/oeqa/buildperf/base.py
@@ -444,7 +444,7 @@ class BuildPerfTestCase(unittest.TestCase):
444 buildstats = [] 444 buildstats = []
445 for fname in os.listdir(bs_dir): 445 for fname in os.listdir(bs_dir):
446 recipe_dir = os.path.join(bs_dir, fname) 446 recipe_dir = os.path.join(bs_dir, fname)
447 if not os.path.isdir(recipe_dir): 447 if not os.path.isdir(recipe_dir) or fname == "reduced_proc_pressure":
448 continue 448 continue
449 name, epoch, version, revision = split_nevr(fname) 449 name, epoch, version, revision = split_nevr(fname)
450 recipe_bs = OrderedDict((('name', name), 450 recipe_bs = OrderedDict((('name', name),
diff --git a/meta/lib/oeqa/buildtools-docs/cases/README b/meta/lib/oeqa/buildtools-docs/cases/README
new file mode 100644
index 0000000000..f8edbc7dad
--- /dev/null
+++ b/meta/lib/oeqa/buildtools-docs/cases/README
@@ -0,0 +1,2 @@
1These test cases are used by build-docs-tarball, and are not used by the testsdk
2class.
diff --git a/meta/lib/oeqa/buildtools-docs/cases/build.py b/meta/lib/oeqa/buildtools-docs/cases/build.py
new file mode 100644
index 0000000000..6e3ee94292
--- /dev/null
+++ b/meta/lib/oeqa/buildtools-docs/cases/build.py
@@ -0,0 +1,19 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import tempfile
8from oeqa.sdk.case import OESDKTestCase
9from oeqa.utils.subprocesstweak import errors_have_output
10errors_have_output()
11
12class BuildTests(OESDKTestCase):
13 """
14 Verify that our docs can build using our docs tools tarball.
15 """
16 def test_docs_build(self):
17 with tempfile.TemporaryDirectory(prefix='docs-tarball-build-', dir=self.tc.sdk_dir) as testdir:
18 self._run('git clone git://git.yoctoproject.org/yocto-docs %s' % testdir)
19 self._run('cd %s/documentation && make html' % testdir)
diff --git a/meta/lib/oeqa/buildtools/cases/README b/meta/lib/oeqa/buildtools/cases/README
new file mode 100644
index 0000000000..d4f20faa9f
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/README
@@ -0,0 +1,2 @@
1These test cases are used by buildtools-tarball, and are not used by the testsdk
2class.
diff --git a/meta/lib/oeqa/buildtools/cases/build.py b/meta/lib/oeqa/buildtools/cases/build.py
new file mode 100644
index 0000000000..c85c32496b
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/build.py
@@ -0,0 +1,32 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os, tempfile
8import time
9from oeqa.sdk.case import OESDKTestCase
10from oeqa.utils.subprocesstweak import errors_have_output
11errors_have_output()
12
13class BuildTests(OESDKTestCase):
14 """
15 Verify that bitbake can build virtual/libc inside the buildtools.
16 """
17 def test_libc(self):
18 with tempfile.TemporaryDirectory(prefix='bitbake-build-', dir=self.tc.sdk_dir) as testdir:
19 corebase = self.td['COREBASE']
20
21 self._run('. %s/oe-init-build-env %s' % (corebase, testdir))
22 with open(os.path.join(testdir, 'conf', 'local.conf'), 'ta') as conf:
23 conf.write('\n')
24 conf.write('DL_DIR = "%s"\n' % self.td['DL_DIR'])
25
26 try:
27 self._run('. %s/oe-init-build-env %s && bitbake virtual/libc' % (corebase, testdir))
28 finally:
29 delay = 10
30 while delay and (os.path.exists(testdir + "/bitbake.lock") or os.path.exists(testdir + "/cache/hashserv.db-wal")):
31 time.sleep(1)
32 delay = delay - 1
diff --git a/meta/lib/oeqa/buildtools/cases/gcc.py b/meta/lib/oeqa/buildtools/cases/gcc.py
new file mode 100644
index 0000000000..a62c4d0bc4
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/gcc.py
@@ -0,0 +1,31 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os.path
8from oeqa.sdk.case import OESDKTestCase
9
10class GccTests(OESDKTestCase):
11 def test_verify_specs(self):
12 """
13 Verify that the compiler has been relocated successfully and isn't
14 looking in the hard-coded prefix.
15 """
16 # Canonicalise the SDK root
17 sdk_base = os.path.realpath(self.tc.sdk_dir)
18 # Canonicalise the location of GCC
19 gcc_path = os.path.realpath(self._run("command -v gcc").strip())
20 # Skip the test if the GCC didn't come from the buildtools, as it only
21 # comes with buildtools-extended-tarball.
22 if os.path.commonprefix((sdk_base, gcc_path)) != sdk_base:
23 self.skipTest("Buildtools does not provide GCC")
24
25 # This is the prefix that GCC is build with, and should be replaced at
26 # installation time.
27 sdkpath = self.td.get("SDKPATH")
28 self.assertTrue(sdkpath)
29
30 for line in self._run('gcc -dumpspecs').splitlines():
31 self.assertNotIn(sdkpath, line)
diff --git a/meta/lib/oeqa/buildtools/cases/https.py b/meta/lib/oeqa/buildtools/cases/https.py
new file mode 100644
index 0000000000..4525e3d758
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/https.py
@@ -0,0 +1,22 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.sdk.case import OESDKTestCase
8from oeqa.utils.subprocesstweak import errors_have_output
9errors_have_output()
10
11class HTTPTests(OESDKTestCase):
12 """
13 Verify that HTTPS certificates are working correctly, as this depends on
14 environment variables being set correctly.
15 """
16
17 def test_wget(self):
18 self._run('env -i wget --debug --output-document /dev/null https://yoctoproject.org/connectivity.html')
19
20 def test_python(self):
21 # urlopen() returns a file-like object on success and throws an exception otherwise
22 self._run('python3 -c \'import urllib.request; urllib.request.urlopen("https://yoctoproject.org/connectivity.html")\'')
diff --git a/meta/lib/oeqa/buildtools/cases/sanity.py b/meta/lib/oeqa/buildtools/cases/sanity.py
new file mode 100644
index 0000000000..a55d456656
--- /dev/null
+++ b/meta/lib/oeqa/buildtools/cases/sanity.py
@@ -0,0 +1,24 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import os.path
9from oeqa.sdk.case import OESDKTestCase
10
11class SanityTests(OESDKTestCase):
12 def test_tools(self):
13 """
14 Test that wget and tar come from the buildtools, not the host. This
15 verifies that the buildtools have installed correctly. We can't check
16 for gcc as that is only installed by buildtools-extended.
17 """
18 for command in ("tar", "wget"):
19 # Canonicalise the SDK root
20 sdk_base = os.path.realpath(self.tc.sdk_dir)
21 # Canonicalise the location of this command
22 tool_path = os.path.realpath(self._run("command -v %s" % command).strip())
23 # Assert that the tool was found inside the SDK root
24 self.assertEqual(os.path.commonprefix((sdk_base, tool_path)), sdk_base)
diff --git a/meta/lib/oeqa/controllers/__init__.py b/meta/lib/oeqa/controllers/__init__.py
index cc3836c4bf..0fc905be9a 100644
--- a/meta/lib/oeqa/controllers/__init__.py
+++ b/meta/lib/oeqa/controllers/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# Enable other layers to have modules in the same named directory 6# Enable other layers to have modules in the same named directory
diff --git a/meta/lib/oeqa/controllers/masterimage.py b/meta/lib/oeqa/controllers/controllerimage.py
index 0bf5917e48..78a4aaff87 100644
--- a/meta/lib/oeqa/controllers/masterimage.py
+++ b/meta/lib/oeqa/controllers/controllerimage.py
@@ -3,13 +3,13 @@
3# SPDX-License-Identifier: MIT 3# SPDX-License-Identifier: MIT
4# 4#
5# This module adds support to testimage.bbclass to deploy images and run 5# This module adds support to testimage.bbclass to deploy images and run
6# tests using a "master image" - this is a "known good" image that is 6# tests using a "controller image" - this is a "known good" image that is
7# installed onto the device as part of initial setup and will be booted into 7# installed onto the device as part of initial setup and will be booted into
8# with no interaction; we can then use it to deploy the image to be tested 8# with no interaction; we can then use it to deploy the image to be tested
9# to a second partition before running the tests. 9# to a second partition before running the tests.
10# 10#
11# For an example master image, see core-image-testmaster 11# For an example controller image, see core-image-testcontroller
12# (meta/recipes-extended/images/core-image-testmaster.bb) 12# (meta/recipes-extended/images/core-image-testcontroller.bb)
13 13
14import os 14import os
15import bb 15import bb
@@ -24,12 +24,12 @@ from oeqa.utils import CommandError
24 24
25from abc import ABCMeta, abstractmethod 25from abc import ABCMeta, abstractmethod
26 26
27class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta): 27class ControllerImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta):
28 28
29 supported_image_fstypes = ['tar.gz', 'tar.bz2'] 29 supported_image_fstypes = ['tar.gz', 'tar.bz2']
30 30
31 def __init__(self, d): 31 def __init__(self, d):
32 super(MasterImageHardwareTarget, self).__init__(d) 32 super(ControllerImageHardwareTarget, self).__init__(d)
33 33
34 # target ip 34 # target ip
35 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') 35 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.')
@@ -61,8 +61,8 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
61 if not os.path.isfile(self.kernel): 61 if not os.path.isfile(self.kernel):
62 bb.fatal("No kernel found. Expected path: %s" % self.kernel) 62 bb.fatal("No kernel found. Expected path: %s" % self.kernel)
63 63
64 # master ssh connection 64 # controller ssh connection
65 self.master = None 65 self.controller = None
66 # if the user knows what they are doing, then by all means... 66 # if the user knows what they are doing, then by all means...
67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS") 67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS")
68 self.deploy_cmds = None 68 self.deploy_cmds = None
@@ -119,19 +119,19 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
119 119
120 def deploy(self): 120 def deploy(self):
121 # base class just sets the ssh log file for us 121 # base class just sets the ssh log file for us
122 super(MasterImageHardwareTarget, self).deploy() 122 super(ControllerImageHardwareTarget, self).deploy()
123 self.master = sshcontrol.SSHControl(ip=self.ip, logfile=self.sshlog, timeout=600, port=self.port) 123 self.controller = sshcontrol.SSHControl(ip=self.ip, logfile=self.sshlog, timeout=600, port=self.port)
124 status, output = self.master.run("cat /etc/masterimage") 124 status, output = self.controller.run("cat /etc/controllerimage")
125 if status != 0: 125 if status != 0:
126 # We're not booted into the master image, so try rebooting 126 # We're not booted into the controller image, so try rebooting
127 bb.plain("%s - booting into the master image" % self.pn) 127 bb.plain("%s - booting into the controller image" % self.pn)
128 self.power_ctl("cycle") 128 self.power_ctl("cycle")
129 self._wait_until_booted() 129 self._wait_until_booted()
130 130
131 bb.plain("%s - deploying image on target" % self.pn) 131 bb.plain("%s - deploying image on target" % self.pn)
132 status, output = self.master.run("cat /etc/masterimage") 132 status, output = self.controller.run("cat /etc/controllerimage")
133 if status != 0: 133 if status != 0:
134 bb.fatal("No ssh connectivity or target isn't running a master image.\n%s" % output) 134 bb.fatal("No ssh connectivity or target isn't running a controller image.\n%s" % output)
135 if self.user_cmds: 135 if self.user_cmds:
136 self.deploy_cmds = self.user_cmds.split("\n") 136 self.deploy_cmds = self.user_cmds.split("\n")
137 try: 137 try:
@@ -156,10 +156,10 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
156 156
157 def stop(self): 157 def stop(self):
158 bb.plain("%s - reboot/powercycle target" % self.pn) 158 bb.plain("%s - reboot/powercycle target" % self.pn)
159 self.power_cycle(self.master) 159 self.power_cycle(self.controller)
160 160
161 161
162class SystemdbootTarget(MasterImageHardwareTarget): 162class SystemdbootTarget(ControllerImageHardwareTarget):
163 163
164 def __init__(self, d): 164 def __init__(self, d):
165 super(SystemdbootTarget, self).__init__(d) 165 super(SystemdbootTarget, self).__init__(d)
@@ -184,16 +184,16 @@ class SystemdbootTarget(MasterImageHardwareTarget):
184 184
185 def _deploy(self): 185 def _deploy(self):
186 # make sure these aren't mounted 186 # make sure these aren't mounted
187 self.master.run("umount /boot; umount /mnt/testrootfs; umount /sys/firmware/efi/efivars;") 187 self.controller.run("umount /boot; umount /mnt/testrootfs; umount /sys/firmware/efi/efivars;")
188 # from now on, every deploy cmd should return 0 188 # from now on, every deploy cmd should return 0
189 # else an exception will be thrown by sshcontrol 189 # else an exception will be thrown by sshcontrol
190 self.master.ignore_status = False 190 self.controller.ignore_status = False
191 self.master.copy_to(self.rootfs, "~/test-rootfs." + self.image_fstype) 191 self.controller.copy_to(self.rootfs, "~/test-rootfs." + self.image_fstype)
192 self.master.copy_to(self.kernel, "~/test-kernel") 192 self.controller.copy_to(self.kernel, "~/test-kernel")
193 for cmd in self.deploy_cmds: 193 for cmd in self.deploy_cmds:
194 self.master.run(cmd) 194 self.controller.run(cmd)
195 195
196 def _start(self, params=None): 196 def _start(self, params=None):
197 self.power_cycle(self.master) 197 self.power_cycle(self.controller)
198 # there are better ways than a timeout but this should work for now 198 # there are better ways than a timeout but this should work for now
199 time.sleep(120) 199 time.sleep(120)
diff --git a/meta/lib/oeqa/controllers/testtargetloader.py b/meta/lib/oeqa/controllers/testtargetloader.py
index 23101c7371..209ff7061a 100644
--- a/meta/lib/oeqa/controllers/testtargetloader.py
+++ b/meta/lib/oeqa/controllers/testtargetloader.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oeqa/core/case.py b/meta/lib/oeqa/core/case.py
index aae451fef2..ad5524a714 100644
--- a/meta/lib/oeqa/core/case.py
+++ b/meta/lib/oeqa/core/case.py
@@ -5,6 +5,7 @@
5# 5#
6 6
7import base64 7import base64
8import os
8import zlib 9import zlib
9import unittest 10import unittest
10 11
@@ -43,8 +44,13 @@ class OETestCase(unittest.TestCase):
43 clss.tearDownClassMethod() 44 clss.tearDownClassMethod()
44 45
45 def _oeSetUp(self): 46 def _oeSetUp(self):
46 for d in self.decorators: 47 try:
47 d.setUpDecorator() 48 for d in self.decorators:
49 d.setUpDecorator()
50 except:
51 for d in self.decorators:
52 d.tearDownDecorator()
53 raise
48 self.setUpMethod() 54 self.setUpMethod()
49 55
50 def _oeTearDown(self): 56 def _oeTearDown(self):
@@ -52,6 +58,13 @@ class OETestCase(unittest.TestCase):
52 d.tearDownDecorator() 58 d.tearDownDecorator()
53 self.tearDownMethod() 59 self.tearDownMethod()
54 60
61 def assertFileExists(self, filename, msg=None):
62 """
63 Test that filename exists. If it does not, the test will fail.
64 """
65 if not os.path.exists(filename):
66 self.fail(msg or "%s does not exist" % filename)
67
55class OEPTestResultTestCase: 68class OEPTestResultTestCase:
56 """ 69 """
57 Mix-in class to provide functions to make interacting with extraresults for 70 Mix-in class to provide functions to make interacting with extraresults for
diff --git a/meta/lib/oeqa/core/context.py b/meta/lib/oeqa/core/context.py
index 2abe353d27..9313271f58 100644
--- a/meta/lib/oeqa/core/context.py
+++ b/meta/lib/oeqa/core/context.py
@@ -81,7 +81,7 @@ class OETestContext(object):
81 def runTests(self, processes=None, skips=[]): 81 def runTests(self, processes=None, skips=[]):
82 self.runner = self.runnerClass(self, descriptions=False, verbosity=2) 82 self.runner = self.runnerClass(self, descriptions=False, verbosity=2)
83 83
84 # Dinamically skip those tests specified though arguments 84 # Dynamically skip those tests specified though arguments
85 self.skipTests(skips) 85 self.skipTests(skips)
86 86
87 self._run_start_time = time.time() 87 self._run_start_time = time.time()
diff --git a/meta/lib/oeqa/core/decorator/__init__.py b/meta/lib/oeqa/core/decorator/__init__.py
index 1a82518ab6..93efd30e1d 100644
--- a/meta/lib/oeqa/core/decorator/__init__.py
+++ b/meta/lib/oeqa/core/decorator/__init__.py
@@ -5,8 +5,7 @@
5# 5#
6 6
7from functools import wraps 7from functools import wraps
8from abc import abstractmethod, ABCMeta 8from abc import ABCMeta
9from oeqa.core.utils.misc import strToList
10 9
11decoratorClasses = set() 10decoratorClasses = set()
12 11
@@ -65,15 +64,11 @@ class OETestDiscover(OETestDecorator):
65 return registry['cases'] 64 return registry['cases']
66 65
67def OETestTag(*tags): 66def OETestTag(*tags):
68 expandedtags = []
69 for tag in tags:
70 expandedtags += strToList(tag)
71 def decorator(item): 67 def decorator(item):
72 if hasattr(item, "__oeqa_testtags"): 68 if hasattr(item, "__oeqa_testtags"):
73 # do not append, create a new list (to handle classes with inheritance) 69 # do not append, create a new list (to handle classes with inheritance)
74 item.__oeqa_testtags = list(item.__oeqa_testtags) + expandedtags 70 item.__oeqa_testtags = list(item.__oeqa_testtags) + list(tags)
75 else: 71 else:
76 item.__oeqa_testtags = expandedtags 72 item.__oeqa_testtags = tags
77 return item 73 return item
78 return decorator 74 return decorator
79
diff --git a/meta/lib/oeqa/core/decorator/data.py b/meta/lib/oeqa/core/decorator/data.py
index bc4939e87c..0daf46334f 100644
--- a/meta/lib/oeqa/core/decorator/data.py
+++ b/meta/lib/oeqa/core/decorator/data.py
@@ -13,8 +13,8 @@ def has_feature(td, feature):
13 Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES. 13 Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES.
14 """ 14 """
15 15
16 if (feature in td.get('DISTRO_FEATURES', '') or 16 if (feature in td.get('DISTRO_FEATURES', '').split() or
17 feature in td.get('IMAGE_FEATURES', '')): 17 feature in td.get('IMAGE_FEATURES', '').split()):
18 return True 18 return True
19 return False 19 return False
20 20
@@ -23,18 +23,7 @@ def has_machine(td, machine):
23 Checks for MACHINE. 23 Checks for MACHINE.
24 """ 24 """
25 25
26 if (machine in td.get('MACHINE', '')): 26 if (machine == td.get('MACHINE', '')):
27 return True
28 return False
29
30def is_qemu(td, qemu):
31 """
32 Checks if MACHINE is qemu.
33 """
34
35 machine = td.get('MACHINE', '')
36 if (qemu in td.get('MACHINE', '') or
37 machine.startswith('qemu')):
38 return True 27 return True
39 return False 28 return False
40 29
@@ -189,34 +178,65 @@ class skipIfMachine(OETestDecorator):
189@registerDecorator 178@registerDecorator
190class skipIfNotQemu(OETestDecorator): 179class skipIfNotQemu(OETestDecorator):
191 """ 180 """
192 Skip test based on MACHINE. 181 Skip test if MACHINE is not qemu*
193
194 value must be a qemu MACHINE or it will skip the test
195 with msg as the reason.
196 """ 182 """
183 def setUpDecorator(self):
184 self.logger.debug("Checking if not qemu MACHINE")
185 if not self.case.td.get('MACHINE', '').startswith('qemu'):
186 self.case.skipTest('Test only runs on qemu machines')
197 187
198 attrs = ('value', 'msg') 188@registerDecorator
199 189class skipIfNotQemuUsermode(OETestDecorator):
190 """
191 Skip test if MACHINE_FEATURES does not contain qemu-usermode
192 """
200 def setUpDecorator(self): 193 def setUpDecorator(self):
201 msg = ('Checking if %s is not this MACHINE' % self.value) 194 self.logger.debug("Checking if MACHINE_FEATURES does not contain qemu-usermode")
202 self.logger.debug(msg) 195 if 'qemu-usermode' not in self.case.td.get('MACHINE_FEATURES', '').split():
203 if not is_qemu(self.case.td, self.value): 196 self.case.skipTest('Test requires qemu-usermode in MACHINE_FEATURES')
204 self.case.skipTest(self.msg)
205 197
206@registerDecorator 198@registerDecorator
207class skipIfQemu(OETestDecorator): 199class skipIfQemu(OETestDecorator):
208 """ 200 """
209 Skip test based on Qemu Machine. 201 Skip test if MACHINE is qemu*
202 """
203 def setUpDecorator(self):
204 self.logger.debug("Checking if qemu MACHINE")
205 if self.case.td.get('MACHINE', '').startswith('qemu'):
206 self.case.skipTest('Test only runs on real hardware')
210 207
211 value must not be a qemu machine or it will skip the test 208@registerDecorator
212 with msg as the reason. 209class skipIfArch(OETestDecorator):
213 """ 210 """
211 Skip test if HOST_ARCH is present in the tuple specified.
212 """
214 213
215 attrs = ('value', 'msg') 214 attrs = ('archs',)
215 def setUpDecorator(self):
216 arch = self.case.td['HOST_ARCH']
217 if arch in self.archs:
218 self.case.skipTest('Test skipped on %s' % arch)
219
220@registerDecorator
221class skipIfNotArch(OETestDecorator):
222 """
223 Skip test if HOST_ARCH is not present in the tuple specified.
224 """
216 225
226 attrs = ('archs',)
217 def setUpDecorator(self): 227 def setUpDecorator(self):
218 msg = ('Checking if %s is this MACHINE' % self.value) 228 arch = self.case.td['HOST_ARCH']
219 self.logger.debug(msg) 229 if arch not in self.archs:
220 if is_qemu(self.case.td, self.value): 230 self.case.skipTest('Test skipped on %s' % arch)
221 self.case.skipTest(self.msg)
222 231
232@registerDecorator
233class skipIfNotBuildArch(OETestDecorator):
234 """
235 Skip test if BUILD_ARCH is not present in the tuple specified.
236 """
237
238 attrs = ('archs',)
239 def setUpDecorator(self):
240 arch = self.case.td['BUILD_ARCH']
241 if arch not in self.archs:
242 self.case.skipTest('Test skipped on %s' % arch)
diff --git a/meta/lib/oeqa/core/decorator/oetimeout.py b/meta/lib/oeqa/core/decorator/oetimeout.py
index df90d1c798..5e6873ad48 100644
--- a/meta/lib/oeqa/core/decorator/oetimeout.py
+++ b/meta/lib/oeqa/core/decorator/oetimeout.py
@@ -24,5 +24,6 @@ class OETimeout(OETestDecorator):
24 24
25 def tearDownDecorator(self): 25 def tearDownDecorator(self):
26 signal.alarm(0) 26 signal.alarm(0)
27 signal.signal(signal.SIGALRM, self.alarmSignal) 27 if hasattr(self, 'alarmSignal'):
28 self.logger.debug("Removed SIGALRM handler") 28 signal.signal(signal.SIGALRM, self.alarmSignal)
29 self.logger.debug("Removed SIGALRM handler")
diff --git a/meta/lib/oeqa/core/loader.py b/meta/lib/oeqa/core/loader.py
index 11978213b8..d12d5a055c 100644
--- a/meta/lib/oeqa/core/loader.py
+++ b/meta/lib/oeqa/core/loader.py
@@ -37,7 +37,7 @@ def _find_duplicated_modules(suite, directory):
37 if path: 37 if path:
38 raise ImportError("Duplicated %s module found in %s" % (module, path)) 38 raise ImportError("Duplicated %s module found in %s" % (module, path))
39 39
40def _built_modules_dict(modules): 40def _built_modules_dict(modules, logger):
41 modules_dict = {} 41 modules_dict = {}
42 42
43 if modules == None: 43 if modules == None:
@@ -48,6 +48,9 @@ def _built_modules_dict(modules):
48 # characters, whereas class names do 48 # characters, whereas class names do
49 m = re.match(r'^([0-9a-z_.]+)(?:\.(\w[^.]*)(?:\.([^.]+))?)?$', module, flags=re.ASCII) 49 m = re.match(r'^([0-9a-z_.]+)(?:\.(\w[^.]*)(?:\.([^.]+))?)?$', module, flags=re.ASCII)
50 if not m: 50 if not m:
51 logger.warn("module '%s' was skipped from selected modules, "\
52 "because it doesn't match with module name assumptions: "\
53 "package and module names do not contain upper case characters, whereas class names do" % module)
51 continue 54 continue
52 55
53 module_name, class_name, test_name = m.groups() 56 module_name, class_name, test_name = m.groups()
@@ -58,6 +61,8 @@ def _built_modules_dict(modules):
58 modules_dict[module_name][class_name] = [] 61 modules_dict[module_name][class_name] = []
59 if test_name and test_name not in modules_dict[module_name][class_name]: 62 if test_name and test_name not in modules_dict[module_name][class_name]:
60 modules_dict[module_name][class_name].append(test_name) 63 modules_dict[module_name][class_name].append(test_name)
64 if modules and not modules_dict:
65 raise OEQATestNotFound("All selected modules were skipped, this would trigger selftest with all tests and -r ignored.")
61 66
62 return modules_dict 67 return modules_dict
63 68
@@ -71,7 +76,7 @@ class OETestLoader(unittest.TestLoader):
71 *args, **kwargs): 76 *args, **kwargs):
72 self.tc = tc 77 self.tc = tc
73 78
74 self.modules = _built_modules_dict(modules) 79 self.modules = _built_modules_dict(modules, tc.logger)
75 80
76 self.tests = tests 81 self.tests = tests
77 self.modules_required = modules_required 82 self.modules_required = modules_required
@@ -311,6 +316,9 @@ class OETestLoader(unittest.TestLoader):
311 module_name_small in self.modules) \ 316 module_name_small in self.modules) \
312 else False 317 else False
313 318
319 if any(c.isupper() for c in module.__name__):
320 raise SystemExit("Module '%s' contains uppercase characters and this isn't supported. Please fix the module name." % module.__name__)
321
314 return (load_module, load_underscore) 322 return (load_module, load_underscore)
315 323
316 324
diff --git a/meta/lib/oeqa/core/runner.py b/meta/lib/oeqa/core/runner.py
index d50690ab37..b683d9b80a 100644
--- a/meta/lib/oeqa/core/runner.py
+++ b/meta/lib/oeqa/core/runner.py
@@ -44,6 +44,7 @@ class OETestResult(_TestResult):
44 self.endtime = {} 44 self.endtime = {}
45 self.progressinfo = {} 45 self.progressinfo = {}
46 self.extraresults = {} 46 self.extraresults = {}
47 self.shownmsg = []
47 48
48 # Inject into tc so that TestDepends decorator can see results 49 # Inject into tc so that TestDepends decorator can see results
49 tc.results = self 50 tc.results = self
@@ -74,6 +75,7 @@ class OETestResult(_TestResult):
74 for (scase, msg) in getattr(self, t): 75 for (scase, msg) in getattr(self, t):
75 if test.id() == scase.id(): 76 if test.id() == scase.id():
76 self.tc.logger.info(str(msg)) 77 self.tc.logger.info(str(msg))
78 self.shownmsg.append(test.id())
77 break 79 break
78 80
79 def logSummary(self, component, context_msg=''): 81 def logSummary(self, component, context_msg=''):
@@ -169,7 +171,6 @@ class OETestResult(_TestResult):
169 171
170 def logDetails(self, json_file_dir=None, configuration=None, result_id=None, 172 def logDetails(self, json_file_dir=None, configuration=None, result_id=None,
171 dump_streams=False): 173 dump_streams=False):
172 self.tc.logger.info("RESULTS:")
173 174
174 result = self.extraresults 175 result = self.extraresults
175 logs = {} 176 logs = {}
@@ -193,6 +194,10 @@ class OETestResult(_TestResult):
193 report = {'status': status} 194 report = {'status': status}
194 if log: 195 if log:
195 report['log'] = log 196 report['log'] = log
197 # Class setup failures wouldn't enter stopTest so would never display
198 if case.id() not in self.shownmsg:
199 self.tc.logger.info("Failure (%s) for %s:\n" % (status, case.id()) + log)
200
196 if duration: 201 if duration:
197 report['duration'] = duration 202 report['duration'] = duration
198 203
@@ -215,6 +220,7 @@ class OETestResult(_TestResult):
215 report['stderr'] = stderr 220 report['stderr'] = stderr
216 result[case.id()] = report 221 result[case.id()] = report
217 222
223 self.tc.logger.info("RESULTS:")
218 for i in ['PASSED', 'SKIPPED', 'EXPECTEDFAIL', 'ERROR', 'FAILED', 'UNKNOWN']: 224 for i in ['PASSED', 'SKIPPED', 'EXPECTEDFAIL', 'ERROR', 'FAILED', 'UNKNOWN']:
219 if i not in logs: 225 if i not in logs:
220 continue 226 continue
@@ -229,6 +235,10 @@ class OETestResult(_TestResult):
229 # Override as we unexpected successes aren't failures for us 235 # Override as we unexpected successes aren't failures for us
230 return (len(self.failures) == len(self.errors) == 0) 236 return (len(self.failures) == len(self.errors) == 0)
231 237
238 def hasAnyFailingTest(self):
239 # Account for expected failures
240 return not self.wasSuccessful() or len(self.expectedFailures)
241
232class OEListTestsResult(object): 242class OEListTestsResult(object):
233 def wasSuccessful(self): 243 def wasSuccessful(self):
234 return True 244 return True
@@ -347,7 +357,7 @@ class OETestResultJSONHelper(object):
347 os.makedirs(write_dir, exist_ok=True) 357 os.makedirs(write_dir, exist_ok=True)
348 test_results = self._get_existing_testresults_if_available(write_dir) 358 test_results = self._get_existing_testresults_if_available(write_dir)
349 test_results[result_id] = {'configuration': configuration, 'result': test_result} 359 test_results[result_id] = {'configuration': configuration, 'result': test_result}
350 json_testresults = json.dumps(test_results, sort_keys=True, indent=4) 360 json_testresults = json.dumps(test_results, sort_keys=True, indent=1)
351 self._write_file(write_dir, self.testresult_filename, json_testresults) 361 self._write_file(write_dir, self.testresult_filename, json_testresults)
352 if has_bb: 362 if has_bb:
353 bb.utils.unlockfile(lf) 363 bb.utils.unlockfile(lf)
diff --git a/meta/lib/oeqa/core/target/qemu.py b/meta/lib/oeqa/core/target/qemu.py
index 0f29414df5..d93b3ac94a 100644
--- a/meta/lib/oeqa/core/target/qemu.py
+++ b/meta/lib/oeqa/core/target/qemu.py
@@ -8,20 +8,21 @@ import os
8import sys 8import sys
9import signal 9import signal
10import time 10import time
11import glob
12import subprocess
11from collections import defaultdict 13from collections import defaultdict
12 14
13from .ssh import OESSHTarget 15from .ssh import OESSHTarget
14from oeqa.utils.qemurunner import QemuRunner 16from oeqa.utils.qemurunner import QemuRunner
15from oeqa.utils.dump import TargetDumper
16 17
17supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] 18supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic']
18 19
19class OEQemuTarget(OESSHTarget): 20class OEQemuTarget(OESSHTarget):
20 def __init__(self, logger, server_ip, timeout=300, user='root', 21 def __init__(self, logger, server_ip, timeout=300, user='root',
21 port=None, machine='', rootfs='', kernel='', kvm=False, slirp=False, 22 port=None, machine='', rootfs='', kernel='', kvm=False, slirp=False,
22 dump_dir='', dump_host_cmds='', display='', bootlog='', 23 dump_dir='', display='', bootlog='',
23 tmpdir='', dir_image='', boottime=60, serial_ports=2, 24 tmpdir='', dir_image='', boottime=60, serial_ports=2,
24 boot_patterns = defaultdict(str), ovmf=False, **kwargs): 25 boot_patterns = defaultdict(str), ovmf=False, tmpfsdir=None, **kwargs):
25 26
26 super(OEQemuTarget, self).__init__(logger, None, server_ip, timeout, 27 super(OEQemuTarget, self).__init__(logger, None, server_ip, timeout,
27 user, port) 28 user, port)
@@ -35,17 +36,15 @@ class OEQemuTarget(OESSHTarget):
35 self.ovmf = ovmf 36 self.ovmf = ovmf
36 self.use_slirp = slirp 37 self.use_slirp = slirp
37 self.boot_patterns = boot_patterns 38 self.boot_patterns = boot_patterns
39 self.dump_dir = dump_dir
40 self.bootlog = bootlog
38 41
39 self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir, 42 self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir,
40 deploy_dir_image=dir_image, display=display, 43 deploy_dir_image=dir_image, display=display,
41 logfile=bootlog, boottime=boottime, 44 logfile=bootlog, boottime=boottime,
42 use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir, 45 use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir, logger=logger,
43 dump_host_cmds=dump_host_cmds, logger=logger,
44 serial_ports=serial_ports, boot_patterns = boot_patterns, 46 serial_ports=serial_ports, boot_patterns = boot_patterns,
45 use_ovmf=ovmf) 47 use_ovmf=ovmf, tmpfsdir=tmpfsdir)
46 dump_target_cmds = kwargs.get("testimage_dump_target")
47 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner)
48 self.target_dumper.create_dir("qemu")
49 48
50 def start(self, params=None, extra_bootparams=None, runqemuparams=''): 49 def start(self, params=None, extra_bootparams=None, runqemuparams=''):
51 if self.use_slirp and not self.server_ip: 50 if self.use_slirp and not self.server_ip:
@@ -68,7 +67,28 @@ class OEQemuTarget(OESSHTarget):
68 self.server_ip = self.runner.server_ip 67 self.server_ip = self.runner.server_ip
69 else: 68 else:
70 self.stop() 69 self.stop()
71 raise RuntimeError("FAILED to start qemu - check the task log and the boot log") 70 # Display the first 20 lines of top and
71 # last 20 lines of the bootlog when the
72 # target is not being booted up.
73 topfile = glob.glob(self.dump_dir + "/*_qemu/host_*_top")
74 msg = "\n\n===== start: snippet =====\n\n"
75 for f in topfile:
76 msg += "file: %s\n\n" % f
77 with open(f) as tf:
78 for x in range(20):
79 msg += next(tf)
80 msg += "\n\n===== end: snippet =====\n\n"
81 blcmd = ["tail", "-20", self.bootlog]
82 msg += "===== start: snippet =====\n\n"
83 try:
84 out = subprocess.check_output(blcmd, stderr=subprocess.STDOUT, timeout=1).decode('utf-8')
85 msg += "file: %s\n\n" % self.bootlog
86 msg += out
87 except (subprocess.CalledProcessError, subprocess.TimeoutExpired, FileNotFoundError) as err:
88 msg += "Error running command: %s\n%s\n" % (blcmd, err)
89 msg += "\n\n===== end: snippet =====\n"
90
91 raise RuntimeError("FAILED to start qemu - check the task log and the boot log %s" % (msg))
72 92
73 def stop(self): 93 def stop(self):
74 self.runner.stop() 94 self.runner.stop()
diff --git a/meta/lib/oeqa/core/target/serial.py b/meta/lib/oeqa/core/target/serial.py
new file mode 100644
index 0000000000..7c2cd8b248
--- /dev/null
+++ b/meta/lib/oeqa/core/target/serial.py
@@ -0,0 +1,315 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import base64
6import logging
7import os
8from threading import Lock
9from . import OETarget
10
11class OESerialTarget(OETarget):
12
13 def __init__(self, logger, target_ip, server_ip, server_port=0,
14 timeout=300, serialcontrol_cmd=None, serialcontrol_extra_args=None,
15 serialcontrol_ps1=None, serialcontrol_connect_timeout=None,
16 machine=None, **kwargs):
17 if not logger:
18 logger = logging.getLogger('target')
19 logger.setLevel(logging.INFO)
20 filePath = os.path.join(os.getcwd(), 'remoteTarget.log')
21 fileHandler = logging.FileHandler(filePath, 'w', 'utf-8')
22 formatter = logging.Formatter(
23 '%(asctime)s.%(msecs)03d %(levelname)s: %(message)s',
24 '%H:%M:%S')
25 fileHandler.setFormatter(formatter)
26 logger.addHandler(fileHandler)
27
28 super(OESerialTarget, self).__init__(logger)
29
30 if serialcontrol_ps1:
31 self.target_ps1 = serialcontrol_ps1
32 elif machine:
33 # fallback to a default value which assumes root@machine
34 self.target_ps1 = f'root@{machine}:.*# '
35 else:
36 raise ValueError("Unable to determine shell command prompt (PS1) format.")
37
38 if not serialcontrol_cmd:
39 raise ValueError("Unable to determine serial control command.")
40
41 if serialcontrol_extra_args:
42 self.connection_script = f'{serialcontrol_cmd} {serialcontrol_extra_args}'
43 else:
44 self.connection_script = serialcontrol_cmd
45
46 if serialcontrol_connect_timeout:
47 self.connect_timeout = serialcontrol_connect_timeout
48 else:
49 self.connect_timeout = 10 # default to 10s connection timeout
50
51 self.default_command_timeout = timeout
52 self.ip = target_ip
53 self.server_ip = server_ip
54 self.server_port = server_port
55 self.conn = None
56 self.mutex = Lock()
57
58 def start(self, **kwargs):
59 pass
60
61 def stop(self, **kwargs):
62 pass
63
64 def get_connection(self):
65 if self.conn is None:
66 self.conn = SerialConnection(self.connection_script,
67 self.target_ps1,
68 self.connect_timeout,
69 self.default_command_timeout)
70
71 return self.conn
72
73 def run(self, cmd, timeout=None):
74 """
75 Runs command on target over the provided serial connection.
76 The first call will open the connection, and subsequent
77 calls will re-use the same connection to send new commands.
78
79 command: Command to run on target.
80 timeout: <value>: Kill command after <val> seconds.
81 None: Kill command default value seconds.
82 0: No timeout, runs until return.
83 """
84 # Lock needed to avoid multiple threads running commands concurrently
85 # A serial connection can only be used by one caller at a time
86 with self.mutex:
87 conn = self.get_connection()
88
89 self.logger.debug(f"[Running]$ {cmd}")
90 # Run the command, then echo $? to get the command's return code
91 try:
92 output = conn.run_command(cmd, timeout)
93 status = conn.run_command("echo $?")
94 self.logger.debug(f" [stdout]: {output}")
95 self.logger.debug(f" [ret code]: {status}\n\n")
96 except SerialTimeoutException as e:
97 self.logger.debug(e)
98 output = ""
99 status = 255
100
101 # Return to $HOME after each command to simulate a stateless SSH connection
102 conn.run_command('cd "$HOME"')
103
104 return (int(status), output)
105
106 def copyTo(self, localSrc, remoteDst):
107 """
108 Copies files by converting them to base 32, then transferring
109 the ASCII text to the target, and decoding it in place on the
110 target.
111
112 On a 115k baud serial connection, this method transfers at
113 roughly 30kbps.
114 """
115 with open(localSrc, 'rb') as file:
116 data = file.read()
117
118 b32 = base64.b32encode(data).decode('utf-8')
119
120 # To avoid shell line limits, send a chunk at a time
121 SPLIT_LEN = 512
122 lines = [b32[i:i+SPLIT_LEN] for i in range(0, len(b32), SPLIT_LEN)]
123
124 with self.mutex:
125 conn = self.get_connection()
126
127 filename = os.path.basename(localSrc)
128 TEMP = f'/tmp/{filename}.b32'
129
130 # Create or empty out the temp file
131 conn.run_command(f'echo -n "" > {TEMP}')
132
133 for line in lines:
134 conn.run_command(f'echo -n {line} >> {TEMP}')
135
136 # Check to see whether the remoteDst is a directory
137 is_directory = conn.run_command(f'[[ -d {remoteDst} ]]; echo $?')
138 if int(is_directory) == 0:
139 # append the localSrc filename to the end of remoteDst
140 remoteDst = os.path.join(remoteDst, filename)
141
142 conn.run_command(f'base32 -d {TEMP} > {remoteDst}')
143 conn.run_command(f'rm {TEMP}')
144
145 return 0, 'Success'
146
147 def copyFrom(self, remoteSrc, localDst):
148 """
149 Copies files by converting them to base 32 on the target, then
150 transferring the ASCII text to the host. That text is then
151 decoded here and written out to the destination.
152
153 On a 115k baud serial connection, this method transfers at
154 roughly 30kbps.
155 """
156 with self.mutex:
157 b32 = self.get_connection().run_command(f'base32 {remoteSrc}')
158
159 data = base64.b32decode(b32.replace('\r\n', ''))
160
161 # If the local path is a directory, get the filename from
162 # the remoteSrc path and append it to localDst
163 if os.path.isdir(localDst):
164 filename = os.path.basename(remoteSrc)
165 localDst = os.path.join(localDst, filename)
166
167 with open(localDst, 'wb') as file:
168 file.write(data)
169
170 return 0, 'Success'
171
172 def copyDirTo(self, localSrc, remoteDst):
173 """
174 Copy recursively localSrc directory to remoteDst in target.
175 """
176
177 for root, dirs, files in os.walk(localSrc):
178 # Create directories in the target as needed
179 for d in dirs:
180 tmpDir = os.path.join(root, d).replace(localSrc, "")
181 newDir = os.path.join(remoteDst, tmpDir.lstrip("/"))
182 cmd = "mkdir -p %s" % newDir
183 self.run(cmd)
184
185 # Copy files into the target
186 for f in files:
187 tmpFile = os.path.join(root, f).replace(localSrc, "")
188 dstFile = os.path.join(remoteDst, tmpFile.lstrip("/"))
189 srcFile = os.path.join(root, f)
190 self.copyTo(srcFile, dstFile)
191
192 def deleteFiles(self, remotePath, files):
193 """
194 Deletes files in target's remotePath.
195 """
196
197 cmd = "rm"
198 if not isinstance(files, list):
199 files = [files]
200
201 for f in files:
202 cmd = "%s %s" % (cmd, os.path.join(remotePath, f))
203
204 self.run(cmd)
205
206 def deleteDir(self, remotePath):
207 """
208 Deletes target's remotePath directory.
209 """
210
211 cmd = "rmdir %s" % remotePath
212 self.run(cmd)
213
214 def deleteDirStructure(self, localPath, remotePath):
215 """
216 Delete recursively localPath structure directory in target's remotePath.
217
218 This function is useful to delete a package that is installed in the
219 device under test (DUT) and the host running the test has such package
220 extracted in tmp directory.
221
222 Example:
223 pwd: /home/user/tmp
224 tree: .
225 └── work
226 ├── dir1
227 │   └── file1
228 └── dir2
229
230 localpath = "/home/user/tmp" and remotepath = "/home/user"
231
232 With the above variables this function will try to delete the
233 directory in the DUT in this order:
234 /home/user/work/dir1/file1
235 /home/user/work/dir1 (if dir is empty)
236 /home/user/work/dir2 (if dir is empty)
237 /home/user/work (if dir is empty)
238 """
239
240 for root, dirs, files in os.walk(localPath, topdown=False):
241 # Delete files first
242 tmpDir = os.path.join(root).replace(localPath, "")
243 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
244 self.deleteFiles(remoteDir, files)
245
246 # Remove dirs if empty
247 for d in dirs:
248 tmpDir = os.path.join(root, d).replace(localPath, "")
249 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
250 self.deleteDir(remoteDir)
251
252class SerialTimeoutException(Exception):
253 def __init__(self, msg):
254 self.msg = msg
255 def __str__(self):
256 return self.msg
257
258class SerialConnection:
259
260 def __init__(self, script, target_prompt, connect_timeout, default_command_timeout):
261 import pexpect # limiting scope to avoid build dependency
262 self.prompt = target_prompt
263 self.connect_timeout = connect_timeout
264 self.default_command_timeout = default_command_timeout
265 self.conn = pexpect.spawn('/bin/bash', ['-c', script], encoding='utf8')
266 self._seek_to_clean_shell()
267 # Disable echo to avoid the need to parse the outgoing command
268 self.run_command('stty -echo')
269
270 def _seek_to_clean_shell(self):
271 """
272 Attempts to find a clean shell, meaning it is clear and
273 ready to accept a new command. This is necessary to ensure
274 the correct output is captured from each command.
275 """
276 import pexpect # limiting scope to avoid build dependency
277 # Look for a clean shell
278 # Wait a short amount of time for the connection to finish
279 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT],
280 timeout=self.connect_timeout)
281
282 # if a timeout occurred, send an empty line and wait for a clean shell
283 if pexpect_code == 1:
284 # send a newline to clear and present the shell
285 self.conn.sendline("")
286 pexpect_code = self.conn.expect(self.prompt)
287
288 def run_command(self, cmd, timeout=None):
289 """
290 Runs command on target over the provided serial connection.
291 Returns any output on the shell while the command was run.
292
293 command: Command to run on target.
294 timeout: <value>: Kill command after <val> seconds.
295 None: Kill command default value seconds.
296 0: No timeout, runs until return.
297 """
298 import pexpect # limiting scope to avoid build dependency
299 # Convert from the OETarget defaults to pexpect timeout values
300 if timeout is None:
301 timeout = self.default_command_timeout
302 elif timeout == 0:
303 timeout = None # passing None to pexpect is infinite timeout
304
305 self.conn.sendline(cmd)
306 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT], timeout=timeout)
307
308 # check for timeout
309 if pexpect_code == 1:
310 self.conn.send('\003') # send Ctrl+C
311 self._seek_to_clean_shell()
312 raise SerialTimeoutException(f'Timeout executing: {cmd} after {timeout}s')
313
314 return self.conn.before.removesuffix('\r\n')
315
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py
index 461448dbc5..8b5c450a05 100644
--- a/meta/lib/oeqa/core/target/ssh.py
+++ b/meta/lib/oeqa/core/target/ssh.py
@@ -34,12 +34,17 @@ class OESSHTarget(OETarget):
34 self.timeout = timeout 34 self.timeout = timeout
35 self.user = user 35 self.user = user
36 ssh_options = [ 36 ssh_options = [
37 '-o', 'ServerAliveCountMax=2',
38 '-o', 'ServerAliveInterval=30',
37 '-o', 'UserKnownHostsFile=/dev/null', 39 '-o', 'UserKnownHostsFile=/dev/null',
38 '-o', 'StrictHostKeyChecking=no', 40 '-o', 'StrictHostKeyChecking=no',
39 '-o', 'LogLevel=ERROR' 41 '-o', 'LogLevel=ERROR'
40 ] 42 ]
43 scp_options = [
44 '-r'
45 ]
41 self.ssh = ['ssh', '-l', self.user ] + ssh_options 46 self.ssh = ['ssh', '-l', self.user ] + ssh_options
42 self.scp = ['scp'] + ssh_options 47 self.scp = ['scp'] + ssh_options + scp_options
43 if port: 48 if port:
44 self.ssh = self.ssh + [ '-p', port ] 49 self.ssh = self.ssh + [ '-p', port ]
45 self.scp = self.scp + [ '-P', port ] 50 self.scp = self.scp + [ '-P', port ]
@@ -50,14 +55,14 @@ class OESSHTarget(OETarget):
50 def stop(self, **kwargs): 55 def stop(self, **kwargs):
51 pass 56 pass
52 57
53 def _run(self, command, timeout=None, ignore_status=True): 58 def _run(self, command, timeout=None, ignore_status=True, raw=False):
54 """ 59 """
55 Runs command in target using SSHProcess. 60 Runs command in target using SSHProcess.
56 """ 61 """
57 self.logger.debug("[Running]$ %s" % " ".join(command)) 62 self.logger.debug("[Running]$ %s" % " ".join(command))
58 63
59 starttime = time.time() 64 starttime = time.time()
60 status, output = SSHCall(command, self.logger, timeout) 65 status, output = SSHCall(command, self.logger, timeout, raw)
61 self.logger.debug("[Command returned '%d' after %.2f seconds]" 66 self.logger.debug("[Command returned '%d' after %.2f seconds]"
62 "" % (status, time.time() - starttime)) 67 "" % (status, time.time() - starttime))
63 68
@@ -67,7 +72,7 @@ class OESSHTarget(OETarget):
67 72
68 return (status, output) 73 return (status, output)
69 74
70 def run(self, command, timeout=None): 75 def run(self, command, timeout=None, ignore_status=True, raw=False):
71 """ 76 """
72 Runs command in target. 77 Runs command in target.
73 78
@@ -86,10 +91,12 @@ class OESSHTarget(OETarget):
86 else: 91 else:
87 processTimeout = self.timeout 92 processTimeout = self.timeout
88 93
89 status, output = self._run(sshCmd, processTimeout, True) 94 status, output = self._run(sshCmd, processTimeout, ignore_status, raw)
90 self.logger.debug('Command: %s\nOutput: %s\n' % (command, output)) 95 if len(output) > (64 * 1024):
91 if (status == 255) and (('No route to host') in output): 96 self.logger.debug('Command: %s\nStatus: %d Output length: %s\n' % (command, status, len(output)))
92 self.target_dumper.dump_target() 97 else:
98 self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output))
99
93 return (status, output) 100 return (status, output)
94 101
95 def copyTo(self, localSrc, remoteDst): 102 def copyTo(self, localSrc, remoteDst):
@@ -202,32 +209,51 @@ class OESSHTarget(OETarget):
202 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) 209 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
203 self.deleteDir(remoteDir) 210 self.deleteDir(remoteDir)
204 211
205def SSHCall(command, logger, timeout=None, **opts): 212def SSHCall(command, logger, timeout=None, raw=False, **opts):
206 213
207 def run(): 214 def run():
208 nonlocal output 215 nonlocal output
209 nonlocal process 216 nonlocal process
217 output_raw = bytearray()
210 starttime = time.time() 218 starttime = time.time()
219 progress = time.time()
211 process = subprocess.Popen(command, **options) 220 process = subprocess.Popen(command, **options)
221 has_timeout = False
222 appendline = None
212 if timeout: 223 if timeout:
213 endtime = starttime + timeout 224 endtime = starttime + timeout
214 eof = False 225 eof = False
215 while time.time() < endtime and not eof: 226 os.set_blocking(process.stdout.fileno(), False)
216 logger.debug('time: %s, endtime: %s' % (time.time(), endtime)) 227 while not has_timeout and not eof:
217 try: 228 try:
218 if select.select([process.stdout], [], [], 5)[0] != []: 229 if select.select([process.stdout], [], [], 5)[0] != []:
219 reader = codecs.getreader('utf-8')(process.stdout, 'ignore') 230 # wait a bit for more data, tries to avoid reading single characters
220 data = reader.read(1024, 4096) 231 time.sleep(0.2)
232 data = process.stdout.read()
221 if not data: 233 if not data:
222 process.stdout.close()
223 eof = True 234 eof = True
224 else: 235 else:
225 output += data 236 output_raw.extend(data)
226 logger.debug('Partial data from SSH call: %s' % data) 237 # ignore errors to capture as much as possible
238 #logger.debug('Partial data from SSH call:\n%s' % data.decode('utf-8', errors='ignore'))
227 endtime = time.time() + timeout 239 endtime = time.time() + timeout
228 except InterruptedError: 240 except InterruptedError:
241 logger.debug('InterruptedError')
242 continue
243 except BlockingIOError:
244 logger.debug('BlockingIOError')
229 continue 245 continue
230 246
247 if time.time() >= endtime:
248 logger.debug('SSHCall has timeout! Time: %s, endtime: %s' % (time.time(), endtime))
249 has_timeout = True
250
251 if time.time() >= (progress + 60):
252 logger.debug('Waiting for process output at time: %s with datasize: %s' % (time.time(), len(output_raw)))
253 progress = time.time()
254
255 process.stdout.close()
256
231 # process hasn't returned yet 257 # process hasn't returned yet
232 if not eof: 258 if not eof:
233 process.terminate() 259 process.terminate()
@@ -235,20 +261,58 @@ def SSHCall(command, logger, timeout=None, **opts):
235 try: 261 try:
236 process.kill() 262 process.kill()
237 except OSError: 263 except OSError:
264 logger.debug('OSError when killing process')
238 pass 265 pass
239 endtime = time.time() - starttime 266 endtime = time.time() - starttime
240 lastline = ("\nProcess killed - no output for %d seconds. Total" 267 appendline = ("\nProcess killed - no output for %d seconds. Total"
241 " running time: %d seconds." % (timeout, endtime)) 268 " running time: %d seconds." % (timeout, endtime))
242 logger.debug('Received data from SSH call %s ' % lastline) 269 logger.debug('Received data from SSH call:\n%s ' % appendline)
243 output += lastline 270 process.wait()
244 271
272 if raw:
273 output = bytes(output_raw)
274 if appendline:
275 output += bytes(appendline, "utf-8")
276 else:
277 output = output_raw.decode('utf-8', errors='ignore')
278 if appendline:
279 output += appendline
245 else: 280 else:
246 output = process.communicate()[0].decode('utf-8', errors='ignore') 281 output = output_raw = process.communicate()[0]
247 logger.debug('Data from SSH call: %s' % output.rstrip()) 282 if not raw:
283 output = output_raw.decode('utf-8', errors='ignore')
284
285 if len(output) < (64 * 1024):
286 if output.rstrip():
287 logger.debug('Data from SSH call:\n%s' % output.rstrip())
288 else:
289 logger.debug('No output from SSH call')
290
291 # timout or not, make sure process exits and is not hanging
292 if process.returncode == None:
293 try:
294 process.wait(timeout=5)
295 except TimeoutExpired:
296 try:
297 process.kill()
298 except OSError:
299 logger.debug('OSError')
300 pass
301 process.wait()
302
303 if has_timeout:
304 # Version of openssh before 8.6_p1 returns error code 0 when killed
305 # by a signal, when the timeout occurs we will receive a 0 error
306 # code because the process is been terminated and it's wrong because
307 # that value means success, but the process timed out.
308 # Afterwards, from version 8.6_p1 onwards, the returned code is 255.
309 # Fix this behaviour by checking the return code
310 if process.returncode == 0:
311 process.returncode = 255
248 312
249 options = { 313 options = {
250 "stdout": subprocess.PIPE, 314 "stdout": subprocess.PIPE,
251 "stderr": subprocess.STDOUT, 315 "stderr": subprocess.STDOUT if not raw else None,
252 "stdin": None, 316 "stdin": None,
253 "shell": False, 317 "shell": False,
254 "bufsize": -1, 318 "bufsize": -1,
@@ -271,6 +335,9 @@ def SSHCall(command, logger, timeout=None, **opts):
271 # whilst running and ensure we don't leave a process behind. 335 # whilst running and ensure we don't leave a process behind.
272 if process.poll() is None: 336 if process.poll() is None:
273 process.kill() 337 process.kill()
338 if process.returncode == None:
339 process.wait()
274 logger.debug('Something went wrong, killing SSH process') 340 logger.debug('Something went wrong, killing SSH process')
275 raise 341 raise
276 return (process.wait(), output.rstrip()) 342
343 return (process.returncode, output if raw else output.rstrip())
diff --git a/meta/lib/oeqa/core/tests/cases/timeout.py b/meta/lib/oeqa/core/tests/cases/timeout.py
index 5dfecc7b7c..69cf969a67 100644
--- a/meta/lib/oeqa/core/tests/cases/timeout.py
+++ b/meta/lib/oeqa/core/tests/cases/timeout.py
@@ -8,6 +8,7 @@ from time import sleep
8 8
9from oeqa.core.case import OETestCase 9from oeqa.core.case import OETestCase
10from oeqa.core.decorator.oetimeout import OETimeout 10from oeqa.core.decorator.oetimeout import OETimeout
11from oeqa.core.decorator.depends import OETestDepends
11 12
12class TimeoutTest(OETestCase): 13class TimeoutTest(OETestCase):
13 14
@@ -19,3 +20,15 @@ class TimeoutTest(OETestCase):
19 def testTimeoutFail(self): 20 def testTimeoutFail(self):
20 sleep(2) 21 sleep(2)
21 self.assertTrue(True, msg='How is this possible?') 22 self.assertTrue(True, msg='How is this possible?')
23
24
25 def testTimeoutSkip(self):
26 self.skipTest("This test needs to be skipped, so that testTimeoutDepends()'s OETestDepends kicks in")
27
28 @OETestDepends(["timeout.TimeoutTest.testTimeoutSkip"])
29 @OETimeout(3)
30 def testTimeoutDepends(self):
31 self.assertTrue(False, msg='How is this possible?')
32
33 def testTimeoutUnrelated(self):
34 sleep(6)
diff --git a/meta/lib/oeqa/core/tests/common.py b/meta/lib/oeqa/core/tests/common.py
index 88cc758ad3..bcc4fde632 100644
--- a/meta/lib/oeqa/core/tests/common.py
+++ b/meta/lib/oeqa/core/tests/common.py
@@ -9,7 +9,6 @@ import os
9 9
10import unittest 10import unittest
11import logging 11import logging
12import os
13 12
14logger = logging.getLogger("oeqa") 13logger = logging.getLogger("oeqa")
15logger.setLevel(logging.INFO) 14logger.setLevel(logging.INFO)
diff --git a/meta/lib/oeqa/core/tests/test_data.py b/meta/lib/oeqa/core/tests/test_data.py
index ac74098b78..acd726f3a0 100755
--- a/meta/lib/oeqa/core/tests/test_data.py
+++ b/meta/lib/oeqa/core/tests/test_data.py
@@ -33,7 +33,7 @@ class TestData(TestBase):
33 33
34 def test_data_fail_wrong_variable(self): 34 def test_data_fail_wrong_variable(self):
35 expectedError = 'AssertionError' 35 expectedError = 'AssertionError'
36 d = {'IMAGE' : 'core-image-sato', 'ARCH' : 'arm'} 36 d = {'IMAGE' : 'core-image-weston', 'ARCH' : 'arm'}
37 37
38 tc = self._testLoader(d=d, modules=self.modules) 38 tc = self._testLoader(d=d, modules=self.modules)
39 results = tc.runTests() 39 results = tc.runTests()
diff --git a/meta/lib/oeqa/core/tests/test_decorators.py b/meta/lib/oeqa/core/tests/test_decorators.py
index b798bf7d33..5095f39948 100755
--- a/meta/lib/oeqa/core/tests/test_decorators.py
+++ b/meta/lib/oeqa/core/tests/test_decorators.py
@@ -133,5 +133,11 @@ class TestTimeoutDecorator(TestBase):
133 msg = "OETestTimeout didn't restore SIGALRM" 133 msg = "OETestTimeout didn't restore SIGALRM"
134 self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg) 134 self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg)
135 135
136 def test_timeout_cancel(self):
137 tests = ['timeout.TimeoutTest.testTimeoutSkip', 'timeout.TimeoutTest.testTimeoutDepends', 'timeout.TimeoutTest.testTimeoutUnrelated']
138 msg = 'Unrelated test failed to complete'
139 tc = self._testLoader(modules=self.modules, tests=tests)
140 self.assertTrue(tc.runTests().wasSuccessful(), msg=msg)
141
136if __name__ == '__main__': 142if __name__ == '__main__':
137 unittest.main() 143 unittest.main()
diff --git a/meta/lib/oeqa/core/utils/concurrencytest.py b/meta/lib/oeqa/core/utils/concurrencytest.py
index b2eb68fb02..d10f8f7f04 100644
--- a/meta/lib/oeqa/core/utils/concurrencytest.py
+++ b/meta/lib/oeqa/core/utils/concurrencytest.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-or-later 5# SPDX-License-Identifier: GPL-2.0-or-later
4# 6#
5# Modified for use in OE by Richard Purdie, 2018 7# Modified for use in OE by Richard Purdie, 2018
@@ -48,11 +50,16 @@ _all__ = [
48# 50#
49class BBThreadsafeForwardingResult(ThreadsafeForwardingResult): 51class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
50 52
51 def __init__(self, target, semaphore, threadnum, totalinprocess, totaltests): 53 def __init__(self, target, semaphore, threadnum, totalinprocess, totaltests, output, finalresult):
52 super(BBThreadsafeForwardingResult, self).__init__(target, semaphore) 54 super(BBThreadsafeForwardingResult, self).__init__(target, semaphore)
53 self.threadnum = threadnum 55 self.threadnum = threadnum
54 self.totalinprocess = totalinprocess 56 self.totalinprocess = totalinprocess
55 self.totaltests = totaltests 57 self.totaltests = totaltests
58 self.buffer = True
59 self.outputbuf = output
60 self.finalresult = finalresult
61 self.finalresult.buffer = True
62 self.target = target
56 63
57 def _add_result_with_semaphore(self, method, test, *args, **kwargs): 64 def _add_result_with_semaphore(self, method, test, *args, **kwargs):
58 self.semaphore.acquire() 65 self.semaphore.acquire()
@@ -61,16 +68,19 @@ class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
61 self.result.starttime[test.id()] = self._test_start.timestamp() 68 self.result.starttime[test.id()] = self._test_start.timestamp()
62 self.result.threadprogress[self.threadnum].append(test.id()) 69 self.result.threadprogress[self.threadnum].append(test.id())
63 totalprogress = sum(len(x) for x in self.result.threadprogress.values()) 70 totalprogress = sum(len(x) for x in self.result.threadprogress.values())
64 self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s)" % ( 71 self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s failed) (%s)" % (
65 self.threadnum, 72 self.threadnum,
66 len(self.result.threadprogress[self.threadnum]), 73 len(self.result.threadprogress[self.threadnum]),
67 self.totalinprocess, 74 self.totalinprocess,
68 totalprogress, 75 totalprogress,
69 self.totaltests, 76 self.totaltests,
70 "{0:.2f}".format(time.time()-self._test_start.timestamp()), 77 "{0:.2f}".format(time.time()-self._test_start.timestamp()),
78 self.target.failed_tests,
71 test.id()) 79 test.id())
72 finally: 80 finally:
73 self.semaphore.release() 81 self.semaphore.release()
82 self.finalresult._stderr_buffer = io.StringIO(initial_value=self.outputbuf.getvalue().decode("utf-8"))
83 self.finalresult._stdout_buffer = io.StringIO()
74 super(BBThreadsafeForwardingResult, self)._add_result_with_semaphore(method, test, *args, **kwargs) 84 super(BBThreadsafeForwardingResult, self)._add_result_with_semaphore(method, test, *args, **kwargs)
75 85
76class ProxyTestResult: 86class ProxyTestResult:
@@ -183,35 +193,28 @@ class dummybuf(object):
183# 193#
184class ConcurrentTestSuite(unittest.TestSuite): 194class ConcurrentTestSuite(unittest.TestSuite):
185 195
186 def __init__(self, suite, processes, setupfunc, removefunc): 196 def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
187 super(ConcurrentTestSuite, self).__init__([suite]) 197 super(ConcurrentTestSuite, self).__init__([suite])
188 self.processes = processes 198 self.processes = processes
189 self.setupfunc = setupfunc 199 self.setupfunc = setupfunc
190 self.removefunc = removefunc 200 self.removefunc = removefunc
201 self.bb_vars = bb_vars
191 202
192 def run(self, result): 203 def run(self, result):
193 tests, totaltests = fork_for_tests(self.processes, self) 204 testservers, totaltests = fork_for_tests(self.processes, self)
194 try: 205 try:
195 threads = {} 206 threads = {}
196 queue = Queue() 207 queue = Queue()
197 semaphore = threading.Semaphore(1) 208 semaphore = threading.Semaphore(1)
198 result.threadprogress = {} 209 result.threadprogress = {}
199 for i, (test, testnum) in enumerate(tests): 210 for i, (testserver, testnum, output) in enumerate(testservers):
200 result.threadprogress[i] = [] 211 result.threadprogress[i] = []
201 process_result = BBThreadsafeForwardingResult( 212 process_result = BBThreadsafeForwardingResult(
202 ExtraResultsDecoderTestResult(result), 213 ExtraResultsDecoderTestResult(result),
203 semaphore, i, testnum, totaltests) 214 semaphore, i, testnum, totaltests, output, result)
204 # Force buffering of stdout/stderr so the console doesn't get corrupted by test output
205 # as per default in parent code
206 process_result.buffer = True
207 # We have to add a buffer object to stdout to keep subunit happy
208 process_result._stderr_buffer = io.StringIO()
209 process_result._stderr_buffer.buffer = dummybuf(process_result._stderr_buffer)
210 process_result._stdout_buffer = io.StringIO()
211 process_result._stdout_buffer.buffer = dummybuf(process_result._stdout_buffer)
212 reader_thread = threading.Thread( 215 reader_thread = threading.Thread(
213 target=self._run_test, args=(test, process_result, queue)) 216 target=self._run_test, args=(testserver, process_result, queue))
214 threads[test] = reader_thread, process_result 217 threads[testserver] = reader_thread, process_result
215 reader_thread.start() 218 reader_thread.start()
216 while threads: 219 while threads:
217 finished_test = queue.get() 220 finished_test = queue.get()
@@ -222,13 +225,13 @@ class ConcurrentTestSuite(unittest.TestSuite):
222 process_result.stop() 225 process_result.stop()
223 raise 226 raise
224 finally: 227 finally:
225 for test in tests: 228 for testserver in testservers:
226 test[0]._stream.close() 229 testserver[0]._stream.close()
227 230
228 def _run_test(self, test, process_result, queue): 231 def _run_test(self, testserver, process_result, queue):
229 try: 232 try:
230 try: 233 try:
231 test.run(process_result) 234 testserver.run(process_result)
232 except Exception: 235 except Exception:
233 # The run logic itself failed 236 # The run logic itself failed
234 case = testtools.ErrorHolder( 237 case = testtools.ErrorHolder(
@@ -236,12 +239,12 @@ class ConcurrentTestSuite(unittest.TestSuite):
236 error=sys.exc_info()) 239 error=sys.exc_info())
237 case.run(process_result) 240 case.run(process_result)
238 finally: 241 finally:
239 queue.put(test) 242 queue.put(testserver)
240 243
241def fork_for_tests(concurrency_num, suite): 244def fork_for_tests(concurrency_num, suite):
242 result = [] 245 testservers = []
243 if 'BUILDDIR' in os.environ: 246 if 'BUILDDIR' in os.environ:
244 selftestdir = get_test_layer() 247 selftestdir = get_test_layer(suite.bb_vars['BBLAYERS'])
245 248
246 test_blocks = partition_tests(suite, concurrency_num) 249 test_blocks = partition_tests(suite, concurrency_num)
247 # Clear the tests from the original suite so it doesn't keep them alive 250 # Clear the tests from the original suite so it doesn't keep them alive
@@ -261,7 +264,7 @@ def fork_for_tests(concurrency_num, suite):
261 ourpid = os.getpid() 264 ourpid = os.getpid()
262 try: 265 try:
263 newbuilddir = None 266 newbuilddir = None
264 stream = os.fdopen(c2pwrite, 'wb', 1) 267 stream = os.fdopen(c2pwrite, 'wb')
265 os.close(c2pread) 268 os.close(c2pread)
266 269
267 (builddir, newbuilddir) = suite.setupfunc("-st-" + str(ourpid), selftestdir, process_suite) 270 (builddir, newbuilddir) = suite.setupfunc("-st-" + str(ourpid), selftestdir, process_suite)
@@ -273,10 +276,11 @@ def fork_for_tests(concurrency_num, suite):
273 newsi = os.open(os.devnull, os.O_RDWR) 276 newsi = os.open(os.devnull, os.O_RDWR)
274 os.dup2(newsi, sys.stdin.fileno()) 277 os.dup2(newsi, sys.stdin.fileno())
275 278
279 # Send stdout/stderr over the stream
280 os.dup2(c2pwrite, sys.stdout.fileno())
281 os.dup2(c2pwrite, sys.stderr.fileno())
282
276 subunit_client = TestProtocolClient(stream) 283 subunit_client = TestProtocolClient(stream)
277 # Force buffering of stdout/stderr so the console doesn't get corrupted by test output
278 # as per default in parent code
279 subunit_client.buffer = True
280 subunit_result = AutoTimingTestResultDecorator(subunit_client) 284 subunit_result = AutoTimingTestResultDecorator(subunit_client)
281 unittest_result = process_suite.run(ExtraResultsEncoderTestResult(subunit_result)) 285 unittest_result = process_suite.run(ExtraResultsEncoderTestResult(subunit_result))
282 if ourpid != os.getpid(): 286 if ourpid != os.getpid():
@@ -305,10 +309,12 @@ def fork_for_tests(concurrency_num, suite):
305 os._exit(0) 309 os._exit(0)
306 else: 310 else:
307 os.close(c2pwrite) 311 os.close(c2pwrite)
308 stream = os.fdopen(c2pread, 'rb', 1) 312 stream = os.fdopen(c2pread, 'rb')
309 test = ProtocolTestCase(stream) 313 # Collect stdout/stderr into an io buffer
310 result.append((test, numtests)) 314 output = io.BytesIO()
311 return result, totaltests 315 testserver = ProtocolTestCase(stream, passthrough=output)
316 testservers.append((testserver, numtests, output))
317 return testservers, totaltests
312 318
313def partition_tests(suite, count): 319def partition_tests(suite, count):
314 # Keep tests from the same class together but allow tests from modules 320 # Keep tests from the same class together but allow tests from modules
diff --git a/meta/lib/oeqa/core/utils/misc.py b/meta/lib/oeqa/core/utils/misc.py
deleted file mode 100644
index e1a59588eb..0000000000
--- a/meta/lib/oeqa/core/utils/misc.py
+++ /dev/null
@@ -1,47 +0,0 @@
1#
2# Copyright (C) 2016 Intel Corporation
3#
4# SPDX-License-Identifier: MIT
5#
6
7def toList(obj, obj_type, obj_name="Object"):
8 if isinstance(obj, obj_type):
9 return [obj]
10 elif isinstance(obj, list):
11 return obj
12 else:
13 raise TypeError("%s must be %s or list" % (obj_name, obj_type))
14
15def toSet(obj, obj_type, obj_name="Object"):
16 if isinstance(obj, obj_type):
17 return {obj}
18 elif isinstance(obj, list):
19 return set(obj)
20 elif isinstance(obj, set):
21 return obj
22 else:
23 raise TypeError("%s must be %s or set" % (obj_name, obj_type))
24
25def strToList(obj, obj_name="Object"):
26 return toList(obj, str, obj_name)
27
28def strToSet(obj, obj_name="Object"):
29 return toSet(obj, str, obj_name)
30
31def intToList(obj, obj_name="Object"):
32 return toList(obj, int, obj_name)
33
34def dataStoteToDict(d, variables):
35 data = {}
36
37 for v in variables:
38 data[v] = d.getVar(v)
39
40 return data
41
42def updateTestData(d, td, variables):
43 """
44 Updates variables with values of data store to test data.
45 """
46 for var in variables:
47 td[var] = d.getVar(var)
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
new file mode 100644
index 0000000000..a78ada2593
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
@@ -0,0 +1,20 @@
1[package]
2name = "guessing-game"
3version = "0.1.0"
4edition = "2021"
5
6# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
7
8[lib]
9name = "guessing_game"
10# "cdylib" is necessary to produce a shared library for Python to import from.
11crate-type = ["cdylib"]
12
13[dependencies]
14rand = "0.8.4"
15
16[dependencies.pyo3]
17version = "0.24.1"
18# "abi3-py38" tells pyo3 (and maturin) to build using the stable ABI with minimum Python version 3.8
19features = ["abi3-py38"]
20
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
@@ -0,0 +1,201 @@
1 Apache License
2 Version 2.0, January 2004
3 http://www.apache.org/licenses/
4
5TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
71. Definitions.
8
9 "License" shall mean the terms and conditions for use, reproduction,
10 and distribution as defined by Sections 1 through 9 of this document.
11
12 "Licensor" shall mean the copyright owner or entity authorized by
13 the copyright owner that is granting the License.
14
15 "Legal Entity" shall mean the union of the acting entity and all
16 other entities that control, are controlled by, or are under common
17 control with that entity. For the purposes of this definition,
18 "control" means (i) the power, direct or indirect, to cause the
19 direction or management of such entity, whether by contract or
20 otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 outstanding shares, or (iii) beneficial ownership of such entity.
22
23 "You" (or "Your") shall mean an individual or Legal Entity
24 exercising permissions granted by this License.
25
26 "Source" form shall mean the preferred form for making modifications,
27 including but not limited to software source code, documentation
28 source, and configuration files.
29
30 "Object" form shall mean any form resulting from mechanical
31 transformation or translation of a Source form, including but
32 not limited to compiled object code, generated documentation,
33 and conversions to other media types.
34
35 "Work" shall mean the work of authorship, whether in Source or
36 Object form, made available under the License, as indicated by a
37 copyright notice that is included in or attached to the work
38 (an example is provided in the Appendix below).
39
40 "Derivative Works" shall mean any work, whether in Source or Object
41 form, that is based on (or derived from) the Work and for which the
42 editorial revisions, annotations, elaborations, or other modifications
43 represent, as a whole, an original work of authorship. For the purposes
44 of this License, Derivative Works shall not include works that remain
45 separable from, or merely link (or bind by name) to the interfaces of,
46 the Work and Derivative Works thereof.
47
48 "Contribution" shall mean any work of authorship, including
49 the original version of the Work and any modifications or additions
50 to that Work or Derivative Works thereof, that is intentionally
51 submitted to Licensor for inclusion in the Work by the copyright owner
52 or by an individual or Legal Entity authorized to submit on behalf of
53 the copyright owner. For the purposes of this definition, "submitted"
54 means any form of electronic, verbal, or written communication sent
55 to the Licensor or its representatives, including but not limited to
56 communication on electronic mailing lists, source code control systems,
57 and issue tracking systems that are managed by, or on behalf of, the
58 Licensor for the purpose of discussing and improving the Work, but
59 excluding communication that is conspicuously marked or otherwise
60 designated in writing by the copyright owner as "Not a Contribution."
61
62 "Contributor" shall mean Licensor and any individual or Legal Entity
63 on behalf of whom a Contribution has been received by Licensor and
64 subsequently incorporated within the Work.
65
662. Grant of Copyright License. Subject to the terms and conditions of
67 this License, each Contributor hereby grants to You a perpetual,
68 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 copyright license to reproduce, prepare Derivative Works of,
70 publicly display, publicly perform, sublicense, and distribute the
71 Work and such Derivative Works in Source or Object form.
72
733. Grant of Patent License. Subject to the terms and conditions of
74 this License, each Contributor hereby grants to You a perpetual,
75 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 (except as stated in this section) patent license to make, have made,
77 use, offer to sell, sell, import, and otherwise transfer the Work,
78 where such license applies only to those patent claims licensable
79 by such Contributor that are necessarily infringed by their
80 Contribution(s) alone or by combination of their Contribution(s)
81 with the Work to which such Contribution(s) was submitted. If You
82 institute patent litigation against any entity (including a
83 cross-claim or counterclaim in a lawsuit) alleging that the Work
84 or a Contribution incorporated within the Work constitutes direct
85 or contributory patent infringement, then any patent licenses
86 granted to You under this License for that Work shall terminate
87 as of the date such litigation is filed.
88
894. Redistribution. You may reproduce and distribute copies of the
90 Work or Derivative Works thereof in any medium, with or without
91 modifications, and in Source or Object form, provided that You
92 meet the following conditions:
93
94 (a) You must give any other recipients of the Work or
95 Derivative Works a copy of this License; and
96
97 (b) You must cause any modified files to carry prominent notices
98 stating that You changed the files; and
99
100 (c) You must retain, in the Source form of any Derivative Works
101 that You distribute, all copyright, patent, trademark, and
102 attribution notices from the Source form of the Work,
103 excluding those notices that do not pertain to any part of
104 the Derivative Works; and
105
106 (d) If the Work includes a "NOTICE" text file as part of its
107 distribution, then any Derivative Works that You distribute must
108 include a readable copy of the attribution notices contained
109 within such NOTICE file, excluding those notices that do not
110 pertain to any part of the Derivative Works, in at least one
111 of the following places: within a NOTICE text file distributed
112 as part of the Derivative Works; within the Source form or
113 documentation, if provided along with the Derivative Works; or,
114 within a display generated by the Derivative Works, if and
115 wherever such third-party notices normally appear. The contents
116 of the NOTICE file are for informational purposes only and
117 do not modify the License. You may add Your own attribution
118 notices within Derivative Works that You distribute, alongside
119 or as an addendum to the NOTICE text from the Work, provided
120 that such additional attribution notices cannot be construed
121 as modifying the License.
122
123 You may add Your own copyright statement to Your modifications and
124 may provide additional or different license terms and conditions
125 for use, reproduction, or distribution of Your modifications, or
126 for any such Derivative Works as a whole, provided Your use,
127 reproduction, and distribution of the Work otherwise complies with
128 the conditions stated in this License.
129
1305. Submission of Contributions. Unless You explicitly state otherwise,
131 any Contribution intentionally submitted for inclusion in the Work
132 by You to the Licensor shall be under the terms and conditions of
133 this License, without any additional terms or conditions.
134 Notwithstanding the above, nothing herein shall supersede or modify
135 the terms of any separate license agreement you may have executed
136 with Licensor regarding such Contributions.
137
1386. Trademarks. This License does not grant permission to use the trade
139 names, trademarks, service marks, or product names of the Licensor,
140 except as required for reasonable and customary use in describing the
141 origin of the Work and reproducing the content of the NOTICE file.
142
1437. Disclaimer of Warranty. Unless required by applicable law or
144 agreed to in writing, Licensor provides the Work (and each
145 Contributor provides its Contributions) on an "AS IS" BASIS,
146 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 implied, including, without limitation, any warranties or conditions
148 of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 PARTICULAR PURPOSE. You are solely responsible for determining the
150 appropriateness of using or redistributing the Work and assume any
151 risks associated with Your exercise of permissions under this License.
152
1538. Limitation of Liability. In no event and under no legal theory,
154 whether in tort (including negligence), contract, or otherwise,
155 unless required by applicable law (such as deliberate and grossly
156 negligent acts) or agreed to in writing, shall any Contributor be
157 liable to You for damages, including any direct, indirect, special,
158 incidental, or consequential damages of any character arising as a
159 result of this License or out of the use or inability to use the
160 Work (including but not limited to damages for loss of goodwill,
161 work stoppage, computer failure or malfunction, or any and all
162 other commercial damages or losses), even if such Contributor
163 has been advised of the possibility of such damages.
164
1659. Accepting Warranty or Additional Liability. While redistributing
166 the Work or Derivative Works thereof, You may choose to offer,
167 and charge a fee for, acceptance of support, warranty, indemnity,
168 or other liability obligations and/or rights consistent with this
169 License. However, in accepting such obligations, You may act only
170 on Your own behalf and on Your sole responsibility, not on behalf
171 of any other Contributor, and only if You agree to indemnify,
172 defend, and hold each Contributor harmless for any liability
173 incurred by, or claims asserted against, such Contributor by reason
174 of your accepting any such warranty or additional liability.
175
176END OF TERMS AND CONDITIONS
177
178APPENDIX: How to apply the Apache License to your work.
179
180 To apply the Apache License to your work, attach the following
181 boilerplate notice, with the fields enclosed by brackets "[]"
182 replaced with your own identifying information. (Don't include
183 the brackets!) The text should be enclosed in the appropriate
184 comment syntax for the file format. We also recommend that a
185 file or class name and description of purpose be included on the
186 same "printed page" as the copyright notice for easier
187 identification within third-party archives.
188
189Copyright [yyyy] [name of copyright owner]
190
191Licensed under the Apache License, Version 2.0 (the "License");
192you may not use this file except in compliance with the License.
193You may obtain a copy of the License at
194
195 http://www.apache.org/licenses/LICENSE-2.0
196
197Unless required by applicable law or agreed to in writing, software
198distributed under the License is distributed on an "AS IS" BASIS,
199WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200See the License for the specific language governing permissions and
201limitations under the License.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
new file mode 100644
index 0000000000..c4a9a58791
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
@@ -0,0 +1,25 @@
1Copyright (c) 2018 konstin
2
3Permission is hereby granted, free of charge, to any
4person obtaining a copy of this software and associated
5documentation files (the "Software"), to deal in the
6Software without restriction, including without
7limitation the rights to use, copy, modify, merge,
8publish, distribute, sublicense, and/or sell copies of
9the Software, and to permit persons to whom the Software
10is furnished to do so, subject to the following
11conditions:
12
13The above copyright notice and this permission notice
14shall be included in all copies or substantial portions
15of the Software.
16
17THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
18ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
19TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
20PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
21SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
22CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
24IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25DEALINGS IN THE SOFTWARE.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
new file mode 100644
index 0000000000..ff35abc472
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
@@ -0,0 +1,8 @@
1[build-system]
2requires = ["maturin>=1.0,<2.0"]
3build-backend = "maturin"
4
5[tool.maturin]
6# "extension-module" tells pyo3 we want to build an extension module (skips linking against libpython.so)
7features = ["pyo3/extension-module"]
8
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
new file mode 100644
index 0000000000..6828466ed1
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
@@ -0,0 +1,48 @@
1use pyo3::prelude::*;
2use rand::Rng;
3use std::cmp::Ordering;
4use std::io;
5
6#[pyfunction]
7fn guess_the_number() {
8 println!("Guess the number!");
9
10 let secret_number = rand::thread_rng().gen_range(1..101);
11
12 loop {
13 println!("Please input your guess.");
14
15 let mut guess = String::new();
16
17 io::stdin()
18 .read_line(&mut guess)
19 .expect("Failed to read line");
20
21 let guess: u32 = match guess.trim().parse() {
22 Ok(num) => num,
23 Err(_) => continue,
24 };
25
26 println!("You guessed: {}", guess);
27
28 match guess.cmp(&secret_number) {
29 Ordering::Less => println!("Too small!"),
30 Ordering::Greater => println!("Too big!"),
31 Ordering::Equal => {
32 println!("You win!");
33 break;
34 }
35 }
36 }
37}
38
39/// A Python module implemented in Rust. The name of this function must match
40/// the `lib.name` setting in the `Cargo.toml`, else Python will not be able to
41/// import the module.
42#[pymodule]
43fn guessing_game(_py: Python, m: &PyModule) -> PyResult<()> {
44 m.add_function(wrap_pyfunction!(guess_the_number, m)?)?;
45
46 Ok(())
47}
48
diff --git a/meta/lib/oeqa/files/test.rs b/meta/lib/oeqa/files/test.rs
new file mode 100644
index 0000000000..f79c691f08
--- /dev/null
+++ b/meta/lib/oeqa/files/test.rs
@@ -0,0 +1,2 @@
1fn main() {
2}
diff --git a/meta/lib/oeqa/files/testresults/testresults.json b/meta/lib/oeqa/files/testresults/testresults.json
index 1a62155618..86e5e412af 100644
--- a/meta/lib/oeqa/files/testresults/testresults.json
+++ b/meta/lib/oeqa/files/testresults/testresults.json
@@ -1,5 +1,5 @@
1{ 1{
2 "runtime_core-image-minimal_qemuarm_20181225195701": { 2 "runtime_core-image-minimal:qemuarm_20181225195701": {
3 "configuration": { 3 "configuration": {
4 "DISTRO": "poky", 4 "DISTRO": "poky",
5 "HOST_DISTRO": "ubuntu-16.04", 5 "HOST_DISTRO": "ubuntu-16.04",
diff --git a/meta/lib/oeqa/manual/bsp-hw.json b/meta/lib/oeqa/manual/bsp-hw.json
index 75b89758cb..308a0807f3 100644
--- a/meta/lib/oeqa/manual/bsp-hw.json
+++ b/meta/lib/oeqa/manual/bsp-hw.json
@@ -26,7 +26,7 @@
26 "expected_results": "" 26 "expected_results": ""
27 }, 27 },
28 "5": { 28 "5": {
29 "action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was whitelisted according to bug 9652.", 29 "action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was allowed according to bug 9652.",
30 "expected_results": "" 30 "expected_results": ""
31 } 31 }
32 }, 32 },
@@ -61,70 +61,6 @@
61 }, 61 },
62 { 62 {
63 "test": { 63 "test": {
64 "@alias": "bsps-hw.bsps-hw.boot_from_runlevel_3",
65 "author": [
66 {
67 "email": "alexandru.c.georgescu@intel.com",
68 "name": "alexandru.c.georgescu@intel.com"
69 }
70 ],
71 "execution": {
72 "1": {
73 "action": "Boot into system and edit /etc/inittab to make sure that system enter at the run level 3 by default, this is done by changing the line \n\n\nid:5:initdefault \n\nto \n\nid:3:initdefault \n\n",
74 "expected_results": ""
75 },
76 "2": {
77 "action": "Reboot system, and press \"Tab\" to enter \"grub\"",
78 "expected_results": ""
79 },
80 "3": {
81 "action": "Get into the \"kernel\" line with the edit option \"e\" and add \"psplash=false text\" at the end line.",
82 "expected_results": ""
83 },
84 "4": {
85 "action": "Press \"F10\" or \"ctrl+x\" to boot system",
86 "expected_results": ""
87 },
88 "5": {
89 "action": "If system ask you for a login type \"root\"",
90 "expected_results": "System should boot to run level 3, showing the command prompt."
91 }
92 },
93 "summary": "boot_from_runlevel_3"
94 }
95 },
96 {
97 "test": {
98 "@alias": "bsps-hw.bsps-hw.boot_from_runlevel_5",
99 "author": [
100 {
101 "email": "alexandru.c.georgescu@intel.com",
102 "name": "alexandru.c.georgescu@intel.com"
103 }
104 ],
105 "execution": {
106 "1": {
107 "action": "Boot into system and edit /etc/inittab to make sure that system enter at the run level 5 by default, this is done by changing the line \n\nid:3:initdefault \n\nto \n\nid:5:initdefault \n\n",
108 "expected_results": ""
109 },
110 "2": {
111 "action": "Reboot system, and press \"Tab\" to enter \"grub\"",
112 "expected_results": ""
113 },
114 "3": {
115 "action": "Get into the \"kernel\" line with the edit option \"e\" and add \"psplash=false text\" at the end line.",
116 "expected_results": ""
117 },
118 "4": {
119 "action": "Press \"F10\" or \"ctrl+x\" to boot system \nNote: The test is only for sato image.",
120 "expected_results": "System should boot to runlevel 5 ."
121 }
122 },
123 "summary": "boot_from_runlevel_5"
124 }
125 },
126 {
127 "test": {
128 "@alias": "bsps-hw.bsps-hw.switch_among_multi_applications_and_desktop", 64 "@alias": "bsps-hw.bsps-hw.switch_among_multi_applications_and_desktop",
129 "author": [ 65 "author": [
130 { 66 {
@@ -155,70 +91,6 @@
155 }, 91 },
156 { 92 {
157 "test": { 93 "test": {
158 "@alias": "bsps-hw.bsps-hw.ethernet_static_ip_set_in_connman",
159 "author": [
160 {
161 "email": "alexandru.c.georgescu@intel.com",
162 "name": "alexandru.c.georgescu@intel.com"
163 }
164 ],
165 "execution": {
166 "1": {
167 "action": "Boot the system and check internet connection is on . ",
168 "expected_results": ""
169 },
170 "2": {
171 "action": "Launch connmand-properties (up-right corner on desktop)",
172 "expected_results": ""
173 },
174 "3": {
175 "action": "Choose Ethernet device and set a valid static ip address for it. \nFor example, in our internal network, we can set as following: \nip address: 10.239.48.xxx \nMask: 255.255.255.0 \nGateway (Broadcast): 10.239.48.255",
176 "expected_results": ""
177 },
178 "4": {
179 "action": "Check the Network configuration with \"ifconfig\"",
180 "expected_results": "Static IP was set successfully \n"
181 },
182 "5": {
183 "action": "ping to another IP adress",
184 "expected_results": "Ping works correclty\n"
185 }
186 },
187 "summary": "ethernet_static_ip_set_in_connman"
188 }
189 },
190 {
191 "test": {
192 "@alias": "bsps-hw.bsps-hw.ethernet_get_IP_in_connman_via_DHCP",
193 "author": [
194 {
195 "email": "alexandru.c.georgescu@intel.com",
196 "name": "alexandru.c.georgescu@intel.com"
197 }
198 ],
199 "execution": {
200 "1": {
201 "action": "Launch connmand-properties (up-right corner on your desktop). ",
202 "expected_results": ""
203 },
204 "2": {
205 "action": "Check if Ethernet device can work properly with static IP, doing \"ping XXX.XXX.XXX.XXX\", once this is set.",
206 "expected_results": "Ping executed successfully . \n\n"
207 },
208 "3": {
209 "action": "Then choose DHCP method for Ethernet device in connmand-properties.",
210 "expected_results": ""
211 },
212 "4": {
213 "action": "Check with 'ifconfig\" and \"ping\" if Ethernet device get IP address via DHCP.",
214 "expected_results": "Ethernet device can get dynamic IP address via DHCP in connmand ."
215 }
216 },
217 "summary": "ethernet_get_IP_in_connman_via_DHCP"
218 }
219 },
220 {
221 "test": {
222 "@alias": "bsps-hw.bsps-hw.connman_offline_mode_in_connman-gnome", 94 "@alias": "bsps-hw.bsps-hw.connman_offline_mode_in_connman-gnome",
223 "author": [ 95 "author": [
224 { 96 {
@@ -241,40 +113,6 @@
241 }, 113 },
242 { 114 {
243 "test": { 115 "test": {
244 "@alias": "bsps-hw.bsps-hw.standby",
245 "author": [
246 {
247 "email": "alexandru.c.georgescu@intel.com",
248 "name": "alexandru.c.georgescu@intel.com"
249 }
250 ],
251 "execution": {
252 "1": {
253 "action": "boot system and launch terminal; check output of \"date\" and launch script \"continue.sh\"",
254 "expected_results": ""
255 },
256 "2": {
257 "action": "echo \"mem\" > /sys/power/state",
258 "expected_results": ""
259 },
260 "3": {
261 "action": "After system go into S3 mode, move mouse or press any key to make it resume (on NUC press power button)",
262 "expected_results": ""
263 },
264 "4": {
265 "action": "Check \"date\" and script \"continue.sh\"",
266 "expected_results": ""
267 },
268 "5": {
269 "action": "Check if application can work as normal \ncontinue.sh as below: \n \n#!/bin/sh \n \ni=1 \nwhile [ 0 ] \ndo \n echo $i \n sleep 1 \n i=$((i+1)) \ndone ",
270 "expected_results": "Screen should resume back and script can run continuously incrementing the i's value from where it was before going to standby state. Date should be the same with the corresponding time increment."
271 }
272 },
273 "summary": "standby"
274 }
275 },
276 {
277 "test": {
278 "@alias": "bsps-hw.bsps-hw.check_CPU_utilization_after_standby", 116 "@alias": "bsps-hw.bsps-hw.check_CPU_utilization_after_standby",
279 "author": [ 117 "author": [
280 { 118 {
@@ -305,88 +143,6 @@
305 }, 143 },
306 { 144 {
307 "test": { 145 "test": {
308 "@alias": "bsps-hw.bsps-hw.Test_if_LAN_device_works_well_after_resume_from_suspend_state",
309 "author": [
310 {
311 "email": "alexandru.c.georgescu@intel.com",
312 "name": "alexandru.c.georgescu@intel.com"
313 }
314 ],
315 "execution": {
316 "1": {
317 "action": "boot system and launch terminal",
318 "expected_results": ""
319 },
320 "2": {
321 "action": "echo \"mem\" > /sys/power/state",
322 "expected_results": ""
323 },
324 "3": {
325 "action": "After system go into S3 mode, move mouse or press any key to make it resume",
326 "expected_results": ""
327 },
328 "4": {
329 "action": "check ping status \n\nNote: This TC apply only for core-image-full-cmd.",
330 "expected_results": "ping should always work before/after standby"
331 }
332 },
333 "summary": "Test_if_LAN_device_works_well_after_resume_from_suspend_state"
334 }
335 },
336 {
337 "test": {
338 "@alias": "bsps-hw.bsps-hw.Test_if_usb_hid_device_works_well_after_resume_from_suspend_state",
339 "author": [
340 {
341 "email": "alexandru.c.georgescu@intel.com",
342 "name": "alexandru.c.georgescu@intel.com"
343 }
344 ],
345 "execution": {
346 "1": {
347 "action": "boot system and launch terminal",
348 "expected_results": ""
349 },
350 "2": {
351 "action": "echo \"mem\" > /sys/power/state",
352 "expected_results": ""
353 },
354 "3": {
355 "action": "After system go into S3 mode, resume the device by pressing the power button or using HID devices",
356 "expected_results": "Devices resumes "
357 },
358 "4": {
359 "action": "check usb mouse and keyboard",
360 "expected_results": "Usb mouse and keyboard should work"
361 }
362 },
363 "summary": "Test_if_usb_hid_device_works_well_after_resume_from_suspend_state"
364 }
365 },
366 {
367 "test": {
368 "@alias": "bsps-hw.bsps-hw.click_terminal_icon_on_X_desktop",
369 "author": [
370 {
371 "email": "alexandru.c.georgescu@intel.com",
372 "name": "alexandru.c.georgescu@intel.com"
373 }
374 ],
375 "execution": {
376 "1": {
377 "action": "After system launch and X start up, click terminal icon on desktop",
378 "expected_results": ""
379 },
380 "2": {
381 "action": "Check if only one terminal window launched and no other problem met",
382 "expected_results": "There should be no problem after launching terminal . "
383 }
384 },
385 "summary": "click_terminal_icon_on_X_desktop"
386 }
387 },
388 {
389 "test": {
390 "@alias": "bsps-hw.bsps-hw.Add_multiple_files_in_media_player", 146 "@alias": "bsps-hw.bsps-hw.Add_multiple_files_in_media_player",
391 "author": [ 147 "author": [
392 { 148 {
@@ -839,40 +595,6 @@
839 }, 595 },
840 { 596 {
841 "test": { 597 "test": {
842 "@alias": "bsps-hw.bsps-hw.Check_if_RTC_(Real_Time_Clock)_can_work_correctly",
843 "author": [
844 {
845 "email": "yi.zhao@windriver.com",
846 "name": "yi.zhao@windriver.com"
847 }
848 ],
849 "execution": {
850 "1": {
851 "action": "Read time from RTC registers. root@localhost:/root> hwclock -r Sun Mar 22 04:05:47 1970 -0.001948 seconds ",
852 "expected_results": "Can read and set the time from RTC.\n"
853 },
854 "2": {
855 "action": "Set system current time root@localhost:/root> date 062309452008 ",
856 "expected_results": ""
857 },
858 "3": {
859 "action": "Synchronize the system current time to RTC registers root@localhost:/root> hwclock -w ",
860 "expected_results": ""
861 },
862 "4": {
863 "action": "Read time from RTC registers root@localhost:/root> hwclock -r ",
864 "expected_results": ""
865 },
866 "5": {
867 "action": "Reboot target and read time from RTC again\n",
868 "expected_results": ""
869 }
870 },
871 "summary": "Check_if_RTC_(Real_Time_Clock)_can_work_correctly"
872 }
873 },
874 {
875 "test": {
876 "@alias": "bsps-hw.bsps-hw.System_can_boot_up_via_NFS", 598 "@alias": "bsps-hw.bsps-hw.System_can_boot_up_via_NFS",
877 "author": [ 599 "author": [
878 { 600 {
diff --git a/meta/lib/oeqa/manual/build-appliance.json b/meta/lib/oeqa/manual/build-appliance.json
index 70f8c72c9b..82a556e93e 100644
--- a/meta/lib/oeqa/manual/build-appliance.json
+++ b/meta/lib/oeqa/manual/build-appliance.json
@@ -48,7 +48,7 @@
48 "expected_results": "" 48 "expected_results": ""
49 }, 49 },
50 "3": { 50 "3": {
51 "action": "Install a new package to the image, for example, acpid. Set the following line in conf/local.conf: IMAGE_INSTALL_append = \" acpid\"", 51 "action": "Install a new package to the image, for example, acpid. Set the following line in conf/local.conf: IMAGE_INSTALL:append = \" acpid\"",
52 "expected_results": "" 52 "expected_results": ""
53 }, 53 },
54 "4": { 54 "4": {
diff --git a/meta/lib/oeqa/manual/crops.json b/meta/lib/oeqa/manual/crops.json
deleted file mode 100644
index 5cfa653843..0000000000
--- a/meta/lib/oeqa/manual/crops.json
+++ /dev/null
@@ -1,294 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "crops-default.crops-default.sdkext_eSDK_devtool_build_make",
5 "author": [
6 {
7 "email": "francisco.j.pedraza.gonzalez@intel.com",
8 "name": "francisco.j.pedraza.gonzalez@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
14 "expected_results": ""
15 },
16 "2": {
17 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n\n",
22 "expected_results": ""
23 },
24 "4": {
25 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
26 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
27 },
28 "5": {
29 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
30 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces."
31 },
32 "6": {
33 "action": " source environment-setup-i586-poky-linux \n\n",
34 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
35 },
36 "7": {
37 "action": " run command which devtool \n\n",
38 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n "
39 },
40 "8": {
41 "action": "devtool add myapp <directory>(this is myapp dir) \n\n\n",
42 "expected_results": "The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb"
43 },
44 "9": {
45 "action": " devtool build myapp \n\n",
46 "expected_results": "This should compile an image"
47 },
48 "10": {
49 "action": " devtool reset myapp ",
50 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase."
51 }
52 },
53 "summary": "sdkext_eSDK_devtool_build_make"
54 }
55 },
56 {
57 "test": {
58 "@alias": "crops-default.crops-default.sdkext_devtool_build_esdk_package",
59 "author": [
60 {
61 "email": "francisco.j.pedraza.gonzalez@intel.com",
62 "name": "francisco.j.pedraza.gonzalez@intel.com"
63 }
64 ],
65 "execution": {
66 "1": {
67 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
68 "expected_results": ""
69 },
70 "2": {
71 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
72 "expected_results": ""
73 },
74 "3": {
75 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp/ \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
76 "expected_results": ""
77 },
78 "4": {
79 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
80 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
81 },
82 "5": {
83 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include<stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
84 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
85 },
86 "6": {
87 "action": " source environment-setup-i586-poky-linux \n\n",
88 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
89 },
90 "7": {
91 "action": " run command which devtool \n\n",
92 "expected_results": " this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
93 },
94 "8": {
95 "action": " devtool add myapp <directory> (this is myapp dir) \n\n",
96 "expected_results": " The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
97 },
98 "9": {
99 "action": " devtool package myapp \n\n",
100 "expected_results": " you should expect a package creation of myapp and it should be under the /tmp/deploy/ \n\n"
101 },
102 "10": {
103 "action": " devtool reset myapp ",
104 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase.\n</package_format>"
105 }
106 },
107 "summary": "sdkext_devtool_build_esdk_package"
108 }
109 },
110 {
111 "test": {
112 "@alias": "crops-default.crops-default.sdkext_devtool_build_cmake",
113 "author": [
114 {
115 "email": "francisco.j.pedraza.gonzalez@intel.com",
116 "name": "francisco.j.pedraza.gonzalez@intel.com"
117 }
118 ],
119 "execution": {
120 "1": {
121 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
122 "expected_results": ""
123 },
124 "2": {
125 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
126 "expected_results": ""
127 },
128 "3": {
129 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
130 "expected_results": ""
131 },
132 "4": {
133 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
134 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
135 },
136 "5": {
137 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
138 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
139 },
140 "6": {
141 "action": " source environment-setup-i586-poky-linux \n\n",
142 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
143 },
144 "7": {
145 "action": " run command which devtool \n\n",
146 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
147 },
148 "8": {
149 "action": " devtool add myapp <directory> (this is myapp_cmake dir) \n\n",
150 "expected_results": "The directory you should input is the myapp_cmake directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
151 },
152 "9": {
153 "action": " devtool build myapp \n\n",
154 "expected_results": "This should compile an image \n\n"
155 },
156 "10": {
157 "action": " devtool reset myapp ",
158 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase. "
159 }
160 },
161 "summary": "sdkext_devtool_build_cmake"
162 }
163 },
164 {
165 "test": {
166 "@alias": "crops-default.crops-default.sdkext_extend_autotools_recipe_creation",
167 "author": [
168 {
169 "email": "francisco.j.pedraza.gonzalez@intel.com",
170 "name": "francisco.j.pedraza.gonzalez@intel.com"
171 }
172 ],
173 "execution": {
174 "1": {
175 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
176 "expected_results": ""
177 },
178 "2": {
179 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
180 "expected_results": ""
181 },
182 "3": {
183 "action": " source environment-setup-i586-poky-linux \n\n",
184 "expected_results": " This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
185 },
186 "4": {
187 "action": "run command which devtool \n\n",
188 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
189 },
190 "5": {
191 "action": "devtool sdk-install -s libxml2 \n\n",
192 "expected_results": "this should install libxml2 \n\n"
193 },
194 "6": {
195 "action": "devtool add librdfa https://github.com/rdfa/librdfa \n\n",
196 "expected_results": "This should automatically create the recipe librdfa.bb under /recipes/librdfa/librdfa.bb \n\n"
197 },
198 "7": {
199 "action": "devtool build librdfa \n\n",
200 "expected_results": "This should compile \n\n"
201 },
202 "8": {
203 "action": "devtool reset librdfa ",
204 "expected_results": "This cleans sysroot of the librdfa recipe, but it leaves the source tree intact. meaning it does not erase."
205 }
206 },
207 "summary": "sdkext_extend_autotools_recipe_creation"
208 }
209 },
210 {
211 "test": {
212 "@alias": "crops-default.crops-default.sdkext_devtool_kernelmodule",
213 "author": [
214 {
215 "email": "francisco.j.pedraza.gonzalez@intel.com",
216 "name": "francisco.j.pedraza.gonzalez@intel.com"
217 }
218 ],
219 "execution": {
220 "1": {
221 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n",
222 "expected_results": ""
223 },
224 "2": {
225 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
226 "expected_results": ""
227 },
228 "3": {
229 "action": "source environment-setup-i586-poky-linux \n\n",
230 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n \n"
231 },
232 "4": {
233 "action": "run command which devtool \n\n",
234 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
235 },
236 "5": {
237 "action": "devtool add kernel-module-hello-world https://git.yoctoproject.org/git/kernel-module-hello-world \n\n",
238 "expected_results": "This should automatically create the recipe kernel-module-hello-world.bb under <crops-esdk-workdir-workspace>/recipes/kernel-module-hello-world/kernel-module-hello-world.bb "
239 },
240 "6": {
241 "action": "devtool build kernel-module-hello-world \n\n",
242 "expected_results": "This should compile an image \n\n"
243 },
244 "7": {
245 "action": "devtool reset kernel-module-hello-world ",
246 "expected_results": "This cleans sysroot of the kernel-module-hello-world recipe, but it leaves the source tree intact. meaning it does not erase."
247 }
248 },
249 "summary": "sdkext_devtool_kernelmodule"
250 }
251 },
252 {
253 "test": {
254 "@alias": "crops-default.crops-default.sdkext_recipes_for_nodejs",
255 "author": [
256 {
257 "email": "francisco.j.pedraza.gonzalez@intel.com",
258 "name": "francisco.j.pedraza.gonzalez@intel.com"
259 }
260 ],
261 "execution": {
262 "1": {
263 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\nlets say variable npm = npm://registry.npmjs.org;name=winston;version=2.2.0 \n\n",
264 "expected_results": ""
265 },
266 "2": {
267 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
268 "expected_results": ""
269 },
270 "3": {
271 "action": "source environment-setup-i586-poky-linux \n\n",
272 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
273 },
274 "4": {
275 "action": "run command which devtool \n\n",
276 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
277 },
278 "5": {
279 "action": " 4a) git clone git://git.openembedded.org/meta-openembedded in layers/build directory \n \n4b) Add meta-openembedded/meta-oe in bblayer.conf as mentioned below: ${SDKBASEMETAPATH}/layers/build/meta-openembedded/meta-oe \\ \n\n4c) devtool add \"npm://registry.npmjs.org;name=npm;version=2.2.0\" \n\n",
280 "expected_results": " This should automatically create the recipe npm.bb under /recipes/npm/npm.bb \n\n"
281 },
282 "6": {
283 "action": "devtool build npm \n\n",
284 "expected_results": "This should compile an image \n\n"
285 },
286 "7": {
287 "action": " devtool reset npm",
288 "expected_results": "This cleans sysroot of the npm recipe, but it leaves the source tree intact. meaning it does not erase."
289 }
290 },
291 "summary": "sdkext_recipes_for_nodejs"
292 }
293 }
294]
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json
deleted file mode 100644
index d77d0e673b..0000000000
--- a/meta/lib/oeqa/manual/eclipse-plugin.json
+++ /dev/null
@@ -1,322 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "eclipse-plugin.eclipse-plugin.support_SSH_connection_to_Target",
5 "author": [
6 {
7 "email": "ee.peng.yeoh@intel.com",
8 "name": "ee.peng.yeoh@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "In Eclipse, swich to Remote System Explorer to create a connention baseed on SSH, input the remote target IP address as the Host name, make sure disable the proxy in Window->Preferences->General->Network Connection, set Direct as Active Provider field. ",
14 "expected_results": "the connection based on SSH could be set up."
15 },
16 "2": {
17 "action": "Configure connection from Eclipse: Run->Run Configurations->C/C++ Remote Application\\ ->New Connection->General->SSH Only ",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Then right click to connect, input the user ID and password. ",
22 "expected_results": ""
23 },
24 "4": {
25 "action": "expand the connection, it will show the Sftp Files etc. \nNOTE. Might need to change dropbear to openssh and add the packagegroup-core-eclipse-debug recipe",
26 "expected_results": ""
27 }
28 },
29 "summary": "support_SSH_connection_to_Target"
30 }
31 },
32 {
33 "test": {
34 "@alias": "eclipse-plugin.eclipse-plugin.Launch_QEMU_from_Eclipse",
35 "author": [
36 {
37 "email": "ee.peng.yeoh@intel.com",
38 "name": "ee.peng.yeoh@intel.com"
39 }
40 ],
41 "execution": {
42 "1": {
43 "action": "Set the Yocto ADT's toolchain root location, sysroot location and kernel, in the menu Window -> Preferences -> Yocto ADT. \n \n",
44 "expected_results": ""
45 },
46 "2": {
47 "action": "wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/qemu (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
48 "expected_results": " Qemu can be lauched normally."
49 },
50 "3": {
51 "action": "(a)Point to the Toolchain: \n \nIf you are using a stand-alone pre-built toolchain, you should be pointing to the /opt/poky/{test-version} directory as Toolchain Root Location. This is the default location for toolchains installed by the ADT Installer or by hand. If ADT is installed in other location, use that location as Toolchain location.\nIf you are using a system-derived toolchain, the path you provide for the Toolchain Root Location field is the Yocto Project's build directory. \n \n E.g:/home/user/yocto/poky/build \n",
52 "expected_results": ""
53 },
54 "4": {
55 "action": "(b)Specify the Sysroot Location: \nSysroot Location is the location where the root filesystem for the target hardware is created on the development system by the ADT Installer (SYSROOT in step 2 of the case ADT installer Installation). \n \n Local : e.g: /home/user/qemux86-sato-sdk \nUsing ADT : e.g :/home/user/test-yocto/qemux86 \n\n",
56 "expected_results": ""
57 },
58 "5": {
59 "action": "(c)Select the Target Architecture: \n \nThe target architecture is the type of hardware you are going to use or emulate. Use the pull-down Target Architecture menu to make your selection. \n \n\n",
60 "expected_results": ""
61 },
62 "6": {
63 "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
64 "expected_results": ""
65 },
66 "7": {
67 "action": "(e) select OK to save the settings. \n\n\n1: In the Eclipse toolbar, expose the Run -> External Tools menu. Your image should appear as a selectable menu item. \n2: Select your image in the navigation pane to launch the emulator in a new window. \n3: If needed, enter your host root password in the shell window at the prompt. This sets up a Tap 0 connection needed for running in user-space NFS mode. \n",
68 "expected_results": ""
69 }
70 },
71 "summary": "Launch_QEMU_from_Eclipse"
72 }
73 },
74 {
75 "test": {
76 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project",
77 "author": [
78 {
79 "email": "ee.peng.yeoh@intel.com",
80 "name": "ee.peng.yeoh@intel.com"
81 }
82 ],
83 "execution": {
84 "1": {
85 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
86 "expected_results": ""
87 },
88 "2": {
89 "action": "Select File -> New -> Project.",
90 "expected_results": ""
91 },
92 "3": {
93 "action": "Double click C/C++.",
94 "expected_results": ""
95 },
96 "4": {
97 "action": "Click C or C++ Project to create the project.",
98 "expected_results": ""
99 },
100 "5": {
101 "action": "Expand Yocto ADT Project.",
102 "expected_results": ""
103 },
104 "6": {
105 "action": "Select Hello World ANSI C Autotools Project.",
106 "expected_results": ""
107 },
108 "7": {
109 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
110 "expected_results": ""
111 },
112 "8": {
113 "action": "Click Next.",
114 "expected_results": ""
115 },
116 "9": {
117 "action": "Add information in the Author and Copyright notice fields. \n1",
118 "expected_results": ""
119 },
120 "10": {
121 "action": "Click Finish. \n1",
122 "expected_results": ""
123 },
124 "11": {
125 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
126 "expected_results": ""
127 },
128 "12": {
129 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
130 "expected_results": ""
131 },
132 "13": {
133 "action": "In the Project Explorer window, right click the project -> Build project. \n1",
134 "expected_results": "Under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
135 },
136 "14": {
137 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
138 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
139 },
140 "15": {
141 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
142 "expected_results": ""
143 },
144 "16": {
145 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \nRight click it again and Debug as -> Debug Configurations \nUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \nin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application.\ne.g.: /home/root/myapplication \nIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
146 "expected_results": ""
147 },
148 "17": {
149 "action": "After all settings are done, select the Debug button on the bottom right corner",
150 "expected_results": ""
151 }
152 },
153 "summary": "Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project"
154 }
155 },
156 {
157 "test": {
158 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project",
159 "author": [
160 {
161 "email": "ee.peng.yeoh@intel.com",
162 "name": "ee.peng.yeoh@intel.com"
163 }
164 ],
165 "execution": {
166 "1": {
167 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
168 "expected_results": ""
169 },
170 "2": {
171 "action": "Select File -> New -> Project. ",
172 "expected_results": ""
173 },
174 "3": {
175 "action": "Double click C/C++. ",
176 "expected_results": ""
177 },
178 "4": {
179 "action": "Click C or C++ Project to create the project. ",
180 "expected_results": ""
181 },
182 "5": {
183 "action": "Expand Yocto ADT Project. ",
184 "expected_results": ""
185 },
186 "6": {
187 "action": "Select Hello World ANSI C++ Autotools Project. ",
188 "expected_results": ""
189 },
190 "7": {
191 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
192 "expected_results": ""
193 },
194 "8": {
195 "action": "Click Next.",
196 "expected_results": ""
197 },
198 "9": {
199 "action": "Add information in the Author and Copyright notice fields.",
200 "expected_results": ""
201 },
202 "10": {
203 "action": "Click Finish. \n1",
204 "expected_results": ""
205 },
206 "11": {
207 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
208 "expected_results": ""
209 },
210 "12": {
211 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
212 "expected_results": ""
213 },
214 "13": {
215 "action": "In the Project Explorer window, right click the project -> Build project. \n\n1",
216 "expected_results": "under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
217 },
218 "14": {
219 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
220 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
221 },
222 "15": {
223 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
224 "expected_results": ""
225 },
226 "16": {
227 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \n\t\tRight click it again and Debug as -> Debug Configurations \n\t\tUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. \n\t\te.g.: /home/root/myapplication \n\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
228 "expected_results": ""
229 },
230 "17": {
231 "action": "After all settings are done, select the Debug button on the bottom right corner",
232 "expected_results": ""
233 }
234 },
235 "summary": "Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project"
236 }
237 },
238 {
239 "test": {
240 "@alias": "eclipse-plugin.eclipse-plugin.Build_Eclipse_Plugin_from_source",
241 "author": [
242 {
243 "email": "laurentiu.serban@intel.com",
244 "name": "laurentiu.serban@intel.com"
245 }
246 ],
247 "execution": {
248 "1": {
249 "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n",
250 "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on http://autobuilder.yoctoproject.org/pub/releases/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
251 },
252 "2": {
253 "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n",
254 "expected_results": "After plugin is build you must have 4 archive in foder scripts from eclipse-poky: \n - org.yocto.bc - mars-master-$date.zip \n - org.yocto.doc - mars-master-$date.zip --> documentation \n - org.yocto.sdk - mars-master-$date.zip \n - org.yocto.sdk - mars-master-$date.-archive.zip --> plugin "
255 },
256 "3": {
257 "action": "Move to scripts/ folder. \n\n",
258 "expected_results": ""
259 },
260 "4": {
261 "action": "Run ./setup.sh \n\n",
262 "expected_results": ""
263 },
264 "5": {
265 "action": "When the script finishes, it prompts a command to issue to build the plugin. It should look similar to the following: \n\n$ ECLIPSE_HOME=/eclipse-poky/scripts/eclipse ./build.sh /&1 | tee -a build.log \n\nHere, the three arguments to the build script are tag name, branch for documentation and release name. \n\n",
266 "expected_results": ""
267 },
268 "6": {
269 "action": "On an eclipse without the Yocto Plugin, select \"Install New Software\" from Help pull-down menu \n\n",
270 "expected_results": ""
271 },
272 "7": {
273 "action": "Select Add and from the dialog choose Archive... Look for the *archive.zip file that was built previously with the build.sh script. Click OK. \n\n",
274 "expected_results": ""
275 },
276 "8": {
277 "action": "Select all components and proceed with Installation of plugin. Restarting eclipse might be required.\n",
278 "expected_results": ""
279 }
280 },
281 "summary": "Build_Eclipse_Plugin_from_source"
282 }
283 },
284 {
285 "test": {
286 "@alias": "eclipse-plugin.eclipse-plugin.Eclipse_Poky_installation_and_setup",
287 "author": [
288 {
289 "email": "ee.peng.yeoh@intel.com",
290 "name": "ee.peng.yeoh@intel.com"
291 }
292 ],
293 "execution": {
294 "1": {
295 "action": "Install SDK \n\ta)Download https://autobuilder.yocto.io/pub/releases//toolchain/x86_64/poky-glibc-x86_64-core-\timage-sato-i586-toolchain-.sh \n\tb)Run the SDK installer and accept the default installation directory ",
296 "expected_results": ""
297 },
298 "2": {
299 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) \n\ta) Go to https://www.eclipse.org/downloads/packages/all, click \"Oxygen R\" \n\tb) Click to download the build for your OS \n\tc) Click \"Download\" button to download from a mirror \n\td) Run \"tar xf\" to extract the downloaded archive ",
300 "expected_result": ""
301 },
302 "3": {
303 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) (Continue) \n\te) Run \"eclipse/eclipse\" to start Eclipse \n\tf) Optional step for host machine within Intel network: In Eclipse workbench window, go to \"Window\" menu -> \"Preferences...\". \n\tg) In \"Preferences\" dialog, go to \"General\" -> \"Network Connections\", set \"Active Provider\" to \"Manual\". In \"Proxy \tentries\" table, select HTTP and click \"Edit\" and enter host \"proxy-chain.intel.com\" port 911, click OK. Repeat for HTTPS with port 912 \nClick OK to close \"Preferences\" dialog. \n\th) Go to \"File\" menu -> \"Restart\" to restart Eclipse for proxy settings to take effect. ",
304 "expected_result": ""
305 },
306 "4": {
307 "action": "Install Eclipse Poky plugins \n\ta) Download https://autobuilder.yocto.io/pub/releases/<yocto-version>/eclipse-plugin/<eclipse-version>/org.yocto.sdk-development-<date>-archive.zip \n\tb) In Eclipse workbench window, go to \"Help\" menu -> \"Install New Software...\" \n\tc) In \"Install\" dialog, click \"Add...\" button \n\td) In \"Add Repository\" dialog, enter \"Eclipse Poky\" for (repository) Name, click \"Archive...\" ",
308 "expected_results": ""
309 },
310 "5": {
311 "action": "Install Eclipse Poky plugins (continue) \n\te) In \"Repository archive\" browse dialog, select the downloaded Eclipse Poky repository archive \n\tf) Back in \"Add Repository\" dialog, click \"OK\" \n\tg) Back in \"Install\" dialog, make sure \"Work with:\" is set to \"Eclipse Poky\" repository, tick \"Yocto Project \tDocumentation Plug-in\" and \"Yocto Project SDK Plug-in\", click \"Next >\" and verify plugins/features name/version, \tclick \"Next >\" and accept license agreement, click \"Finish\" \n\th) If \"Security Warning\" dialog appears, click \"OK\" to install unsigned content. \n\ti) In \"Software Updates\" dialog, click \"Yes\" to restart Eclipse to complete Eclipse Poky plugins installation. ",
312 "expected_results": ""
313 },
314 "6": {
315 "action": "Setup Eclipse Poky to use SDK \n\ta) In Eclipse workbench window, go to \"Window\" menu -> \"Preferences\". \n\tb) In \"Preferences\" window, go to \"Yocto Project SDK\", in \"Cross Compiler Options\" frame, select \"Standalone pre-\tbuilt toolchain\". ",
316 "expected_results": "Eclipse Poky plugins installed and running successfully, e.g. observe that \"Yocto Project Tools\" menu is available on Eclipse workbench window."
317 }
318 },
319 "summary": "Eclipse_Poky_installation_and_setup"
320 }
321 }
322]
diff --git a/meta/lib/oeqa/manual/sdk.json b/meta/lib/oeqa/manual/sdk.json
index 434982f7f5..21d892d26d 100644
--- a/meta/lib/oeqa/manual/sdk.json
+++ b/meta/lib/oeqa/manual/sdk.json
@@ -26,7 +26,7 @@
26 "expected_results": "Expect both qemu to boot up successfully." 26 "expected_results": "Expect both qemu to boot up successfully."
27 } 27 }
28 }, 28 },
29 "summary": "test_install_cross_toolchain_can_run_multiple_qemu_for_x86" 29 "summary": "test_install_cross_toolchain_can_run_multiple_qemu_for:x86"
30 } 30 }
31 } 31 }
32] \ No newline at end of file 32] \ No newline at end of file
diff --git a/meta/lib/oeqa/manual/toaster-managed-mode.json b/meta/lib/oeqa/manual/toaster-managed-mode.json
index 12374c7c64..1a71985c3c 100644
--- a/meta/lib/oeqa/manual/toaster-managed-mode.json
+++ b/meta/lib/oeqa/manual/toaster-managed-mode.json
@@ -136,7 +136,7 @@
136 "expected_results": "" 136 "expected_results": ""
137 }, 137 },
138 "3": { 138 "3": {
139 "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL_append - \"Not set\" \n\tPACKAGE_CLASES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n", 139 "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL:append - \"Not set\" \n\tPACKAGE_CLASSES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n",
140 "expected_results": "" 140 "expected_results": ""
141 }, 141 },
142 "4": { 142 "4": {
@@ -186,7 +186,7 @@
186 "expected_results": "" 186 "expected_results": ""
187 }, 187 },
188 "7": { 188 "7": {
189 "action": "IMAGE_INSTALL_append: \n\t- check that the \"change\" icon is present (represented by a pen icon) \n\t- click on the \"change\" icon and check that the variable becomes a text field, populated with the current value of the variable. \n\n\t- check that the save button is disabled when the text field is empty \n\t- insert test in the text field (for example \"package1\") and hit save; be aware that there is no input validation for this variable \n\t- check that a new \"delete\" icon(a trashcan) has appeared next to the pen icon \n\t- check that clicking on the trashcan icon resets the value to \"Not set\" and makes the trashcan icon dissapear \n\n", 189 "action": "IMAGE_INSTALL:append: \n\t- check that the \"change\" icon is present (represented by a pen icon) \n\t- click on the \"change\" icon and check that the variable becomes a text field, populated with the current value of the variable. \n\n\t- check that the save button is disabled when the text field is empty \n\t- insert test in the text field (for example \"package1\") and hit save; be aware that there is no input validation for this variable \n\t- check that a new \"delete\" icon(a trashcan) has appeared next to the pen icon \n\t- check that clicking on the trashcan icon resets the value to \"Not set\" and makes the trashcan icon dissapear \n\n",
190 "expected_results": "" 190 "expected_results": ""
191 }, 191 },
192 "8": { 192 "8": {
@@ -1574,7 +1574,7 @@
1574 "expected_results": "Open bitbake variables page. \n\n\t" 1574 "expected_results": "Open bitbake variables page. \n\n\t"
1575 }, 1575 },
1576 "5": { 1576 "5": {
1577 "action": "Click on change button for IMAGE_INSTALL_append and add a variable (ex: acpid). \n\n", 1577 "action": "Click on change button for IMAGE_INSTALL:append and add a variable (ex: acpid). \n\n",
1578 "expected_results": "Variable added. \n\n\t" 1578 "expected_results": "Variable added. \n\n\t"
1579 }, 1579 },
1580 "6": { 1580 "6": {
@@ -1590,7 +1590,7 @@
1590 "expected_results": "You should get results for ssh packages." 1590 "expected_results": "You should get results for ssh packages."
1591 } 1591 }
1592 }, 1592 },
1593 "summary": "Test_IMAGE_INSTALL_append_variable" 1593 "summary": "Test_IMAGE_INSTALL:append_variable"
1594 } 1594 }
1595 }, 1595 },
1596 { 1596 {
@@ -2348,7 +2348,7 @@
2348 "expected_results": "" 2348 "expected_results": ""
2349 }, 2349 },
2350 "3": { 2350 "3": {
2351 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. ", 2351 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. ",
2352 "expected_results": " All recipes are built correctly \n\n" 2352 "expected_results": " All recipes are built correctly \n\n"
2353 }, 2353 },
2354 "4": { 2354 "4": {
@@ -2382,7 +2382,7 @@
2382 "expected_results": "" 2382 "expected_results": ""
2383 }, 2383 },
2384 "3": { 2384 "3": {
2385 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. \n\n", 2385 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. \n\n",
2386 "expected_results": "All recipes are built correctly \n\n" 2386 "expected_results": "All recipes are built correctly \n\n"
2387 }, 2387 },
2388 "4": { 2388 "4": {
@@ -2420,7 +2420,7 @@
2420 "expected_results": "" 2420 "expected_results": ""
2421 }, 2421 },
2422 "3": { 2422 "3": {
2423 "action": "Build 4 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. \n\n", 2423 "action": "Build 4 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. \n\n",
2424 "expected_results": " All recipes are built correctly \n\n" 2424 "expected_results": " All recipes are built correctly \n\n"
2425 }, 2425 },
2426 "4": { 2426 "4": {
@@ -2569,4 +2569,4 @@
2569 "summary": "Download_task_log" 2569 "summary": "Download_task_log"
2570 } 2570 }
2571 } 2571 }
2572] \ No newline at end of file 2572]
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py
index 9c84466dd0..bcb6a878c7 100644
--- a/meta/lib/oeqa/oetest.py
+++ b/meta/lib/oeqa/oetest.py
@@ -28,7 +28,7 @@ try:
28 import oeqa.sdkext 28 import oeqa.sdkext
29except ImportError: 29except ImportError:
30 pass 30 pass
31from oeqa.utils.decorators import LogResults, gettag, getResults 31from oeqa.utils.decorators import LogResults, gettag
32 32
33logger = logging.getLogger("BitBake") 33logger = logging.getLogger("BitBake")
34 34
@@ -57,7 +57,6 @@ def filterByTagExp(testsuite, tagexp):
57@LogResults 57@LogResults
58class oeTest(unittest.TestCase): 58class oeTest(unittest.TestCase):
59 59
60 pscmd = "ps"
61 longMessage = True 60 longMessage = True
62 61
63 @classmethod 62 @classmethod
@@ -110,20 +109,6 @@ class oeRuntimeTest(oeTest):
110 def tearDown(self): 109 def tearDown(self):
111 # Uninstall packages in the DUT 110 # Uninstall packages in the DUT
112 self.tc.install_uninstall_packages(self.id(), False) 111 self.tc.install_uninstall_packages(self.id(), False)
113
114 res = getResults()
115 # If a test fails or there is an exception dump
116 # for QemuTarget only
117 if (type(self.target).__name__ == "QemuTarget" and
118 (self.id() in res.getErrorList() or
119 self.id() in res.getFailList())):
120 self.tc.host_dumper.create_dir(self._testMethodName)
121 self.tc.host_dumper.dump_host()
122 self.target.target_dumper.dump_target(
123 self.tc.host_dumper.dump_dir)
124 print ("%s dump data stored in %s" % (self._testMethodName,
125 self.tc.host_dumper.dump_dir))
126
127 self.tearDownLocal() 112 self.tearDownLocal()
128 113
129 # Method to be run after tearDown and implemented by child classes 114 # Method to be run after tearDown and implemented by child classes
@@ -256,7 +241,7 @@ class TestContext(object):
256 241
257 modules = [] 242 modules = []
258 for test in self.testslist: 243 for test in self.testslist:
259 if re.search("\w+\.\w+\.test_\S+", test): 244 if re.search(r"\w+\.\w+\.test_\S+", test):
260 test = '.'.join(t.split('.')[:3]) 245 test = '.'.join(t.split('.')[:3])
261 module = pkgutil.get_loader(test) 246 module = pkgutil.get_loader(test)
262 modules.append(module) 247 modules.append(module)
@@ -398,11 +383,6 @@ class RuntimeTestContext(TestContext):
398 def _get_test_suites_required(self): 383 def _get_test_suites_required(self):
399 return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"] 384 return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"]
400 385
401 def loadTests(self):
402 super(RuntimeTestContext, self).loadTests()
403 if oeTest.hasPackage("procps"):
404 oeRuntimeTest.pscmd = "ps -ef"
405
406 def extract_packages(self): 386 def extract_packages(self):
407 """ 387 """
408 Find packages that will be needed during runtime. 388 Find packages that will be needed during runtime.
diff --git a/meta/lib/oeqa/runtime/case.py b/meta/lib/oeqa/runtime/case.py
index f036982e1f..9515ca2f3d 100644
--- a/meta/lib/oeqa/runtime/case.py
+++ b/meta/lib/oeqa/runtime/case.py
@@ -4,6 +4,9 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import os
8import subprocess
9import time
7from oeqa.core.case import OETestCase 10from oeqa.core.case import OETestCase
8from oeqa.utils.package_manager import install_package, uninstall_package 11from oeqa.utils.package_manager import install_package, uninstall_package
9 12
@@ -18,3 +21,16 @@ class OERuntimeTestCase(OETestCase):
18 def tearDown(self): 21 def tearDown(self):
19 super(OERuntimeTestCase, self).tearDown() 22 super(OERuntimeTestCase, self).tearDown()
20 uninstall_package(self) 23 uninstall_package(self)
24
25def run_network_serialdebug(runner):
26 status, output = runner.run_serial("ip addr")
27 print("ip addr on target: %s %s" % (output, status))
28 status, output = runner.run_serial("ping -c 1 %s" % self.target.server_ip)
29 print("ping on target for %s: %s %s" % (self.target.server_ip, output, status))
30 status, output = runner.run_serial("ping -c 1 %s" % self.target.ip)
31 print("ping on target for %s: %s %s" % (self.target.ip, output, status))
32 # Have to use a full path for netstat which isn't in HOSTTOOLS
33 subprocess.call(["/usr/bin/netstat", "-tunape"])
34 subprocess.call(["/usr/bin/netstat", "-ei"])
35 subprocess.call(["ps", "-awx"], shell=True)
36 print("PID: %s %s" % (str(os.getpid()), time.time()))
diff --git a/meta/lib/oeqa/runtime/cases/_qemutiny.py b/meta/lib/oeqa/runtime/cases/_qemutiny.py
index 6886e36502..816fd4a7cb 100644
--- a/meta/lib/oeqa/runtime/cases/_qemutiny.py
+++ b/meta/lib/oeqa/runtime/cases/_qemutiny.py
@@ -1,12 +1,19 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.runtime.case import OERuntimeTestCase 7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.target.qemu import OEQemuTarget
6 9
7class QemuTinyTest(OERuntimeTestCase): 10class QemuTinyTest(OERuntimeTestCase):
8 11
9 def test_boot_tiny(self): 12 def test_boot_tiny(self):
10 status, output = self.target.run_serial('uname -a') 13 # Until the target has explicit run_serial support, check that the
11 msg = "Cannot detect poky tiny boot!" 14 # target is the qemu runner
12 self.assertTrue("yocto-tiny" in output, msg) 15 if isinstance(self.target, OEQemuTarget):
16 status, output = self.target.runner.run_serial('uname -a')
17 self.assertIn("Linux", output)
18 else:
19 self.skipTest("Target %s is not OEQemuTarget" % self.target)
diff --git a/meta/lib/oeqa/runtime/cases/apt.py b/meta/lib/oeqa/runtime/cases/apt.py
index 53745df93f..8000645843 100644
--- a/meta/lib/oeqa/runtime/cases/apt.py
+++ b/meta/lib/oeqa/runtime/cases/apt.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -21,7 +23,7 @@ class AptRepoTest(AptTest):
21 23
22 @classmethod 24 @classmethod
23 def setUpClass(cls): 25 def setUpClass(cls):
24 service_repo = os.path.join(cls.tc.td['DEPLOY_DIR_DEB'], 'all') 26 service_repo = os.path.join(cls.tc.td['DEPLOY_DIR_DEB'], '')
25 cls.repo_server = HTTPService(service_repo, 27 cls.repo_server = HTTPService(service_repo,
26 '0.0.0.0', port=cls.tc.target.server_port, 28 '0.0.0.0', port=cls.tc.target.server_port,
27 logger=cls.tc.logger) 29 logger=cls.tc.logger)
@@ -34,20 +36,44 @@ class AptRepoTest(AptTest):
34 def setup_source_config_for_package_install(self): 36 def setup_source_config_for_package_install(self):
35 apt_get_source_server = 'http://%s:%s/' % (self.tc.target.server_ip, self.repo_server.port) 37 apt_get_source_server = 'http://%s:%s/' % (self.tc.target.server_ip, self.repo_server.port)
36 apt_get_sourceslist_dir = '/etc/apt/' 38 apt_get_sourceslist_dir = '/etc/apt/'
37 self.target.run('cd %s; echo deb [ allow-insecure=yes ] %s ./ > sources.list' % (apt_get_sourceslist_dir, apt_get_source_server)) 39 self.target.run('cd %s; echo deb [ allow-insecure=yes ] %s/all ./ > sources.list' % (apt_get_sourceslist_dir, apt_get_source_server))
40
41 def setup_source_config_for_package_install_signed(self):
42 apt_get_source_server = 'http://%s:%s' % (self.tc.target.server_ip, self.repo_server.port)
43 apt_get_sourceslist_dir = '/etc/apt/'
44 self.target.run("cd %s; cp sources.list sources.list.bak; sed -i 's|\[trusted=yes\] http://bogus_ip:bogus_port|%s|g' sources.list" % (apt_get_sourceslist_dir, apt_get_source_server))
38 45
39 def cleanup_source_config_for_package_install(self): 46 def cleanup_source_config_for_package_install(self):
40 apt_get_sourceslist_dir = '/etc/apt/' 47 apt_get_sourceslist_dir = '/etc/apt/'
41 self.target.run('cd %s; rm sources.list' % (apt_get_sourceslist_dir)) 48 self.target.run('cd %s; rm sources.list' % (apt_get_sourceslist_dir))
42 49
50 def cleanup_source_config_for_package_install_signed(self):
51 apt_get_sourceslist_dir = '/etc/apt/'
52 self.target.run('cd %s; mv sources.list.bak sources.list' % (apt_get_sourceslist_dir))
53
54 def setup_key(self):
55 # the key is found on the target /etc/pki/packagefeed-gpg/
56 # named PACKAGEFEED-GPG-KEY-poky-branch
57 self.target.run('cd %s; apt-key add P*' % ('/etc/pki/packagefeed-gpg'))
58
43 @skipIfNotFeature('package-management', 59 @skipIfNotFeature('package-management',
44 'Test requires package-management to be in IMAGE_FEATURES') 60 'Test requires package-management to be in IMAGE_FEATURES')
45 @skipIfNotDataVar('IMAGE_PKGTYPE', 'deb', 61 @skipIfNotDataVar('IMAGE_PKGTYPE', 'deb',
46 'DEB is not the primary package manager') 62 'DEB is not the primary package manager')
47 @OEHasPackage(['apt']) 63 @OEHasPackage(['apt'])
48 def test_apt_install_from_repo(self): 64 def test_apt_install_from_repo(self):
49 self.setup_source_config_for_package_install() 65 if not self.tc.td.get('PACKAGE_FEED_GPG_NAME'):
50 self.pkg('update') 66 self.setup_source_config_for_package_install()
51 self.pkg('remove --yes run-postinsts-dev') 67 self.pkg('update')
52 self.pkg('install --yes --allow-unauthenticated run-postinsts-dev') 68 self.pkg('remove --yes run-postinsts-dev')
53 self.cleanup_source_config_for_package_install() 69 self.pkg('install --yes --allow-unauthenticated run-postinsts-dev')
70 self.cleanup_source_config_for_package_install()
71 else:
72 # when we are here a key has been set to sign the package feed and
73 # public key and gnupg installed on the image by test_testimage_apt
74 self.setup_source_config_for_package_install_signed()
75 self.setup_key()
76 self.pkg('update')
77 self.pkg('install --yes run-postinsts-dev')
78 self.pkg('remove --yes run-postinsts-dev')
79 self.cleanup_source_config_for_package_install_signed()
diff --git a/meta/lib/oeqa/runtime/cases/boot.py b/meta/lib/oeqa/runtime/cases/boot.py
index 2142f400a0..dcee3311f7 100644
--- a/meta/lib/oeqa/runtime/cases/boot.py
+++ b/meta/lib/oeqa/runtime/cases/boot.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ from oeqa.core.decorator.data import skipIfQemu
13class BootTest(OERuntimeTestCase): 15class BootTest(OERuntimeTestCase):
14 16
15 @OETimeout(120) 17 @OETimeout(120)
16 @skipIfQemu('qemuall', 'Test only runs on real hardware') 18 @skipIfQemu()
17 @OETestDepends(['ssh.SSHTest.test_ssh']) 19 @OETestDepends(['ssh.SSHTest.test_ssh'])
18 def test_reboot(self): 20 def test_reboot(self):
19 output = '' 21 output = ''
diff --git a/meta/lib/oeqa/runtime/cases/buildcpio.py b/meta/lib/oeqa/runtime/cases/buildcpio.py
index e29bf16ccb..0c9c57a3cb 100644
--- a/meta/lib/oeqa/runtime/cases/buildcpio.py
+++ b/meta/lib/oeqa/runtime/cases/buildcpio.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -12,7 +14,7 @@ class BuildCpioTest(OERuntimeTestCase):
12 14
13 @classmethod 15 @classmethod
14 def setUpClass(cls): 16 def setUpClass(cls):
15 uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.13.tar.gz' 17 uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.15.tar.gz'
16 cls.project = TargetBuildProject(cls.tc.target, 18 cls.project = TargetBuildProject(cls.tc.target,
17 uri, 19 uri,
18 dl_dir = cls.tc.td['DL_DIR']) 20 dl_dir = cls.tc.td['DL_DIR'])
@@ -27,7 +29,6 @@ class BuildCpioTest(OERuntimeTestCase):
27 @OEHasPackage(['autoconf']) 29 @OEHasPackage(['autoconf'])
28 def test_cpio(self): 30 def test_cpio(self):
29 self.project.download_archive() 31 self.project.download_archive()
30 self.project.run_configure('--disable-maintainer-mode', 32 self.project.run_configure(configure_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'")
31 'sed -i -e "/char \*program_name/d" src/global.c;') 33 self.project.run_make(make_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'")
32 self.project.run_make()
33 self.project.run_install() 34 self.project.run_install()
diff --git a/meta/lib/oeqa/runtime/cases/buildgalculator.py b/meta/lib/oeqa/runtime/cases/buildgalculator.py
index e5cc3e2888..2cfb3243dc 100644
--- a/meta/lib/oeqa/runtime/cases/buildgalculator.py
+++ b/meta/lib/oeqa/runtime/cases/buildgalculator.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/buildlzip.py b/meta/lib/oeqa/runtime/cases/buildlzip.py
index bc70b41461..921a0bca61 100644
--- a/meta/lib/oeqa/runtime/cases/buildlzip.py
+++ b/meta/lib/oeqa/runtime/cases/buildlzip.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ class BuildLzipTest(OERuntimeTestCase):
13 @classmethod 15 @classmethod
14 def setUpClass(cls): 16 def setUpClass(cls):
15 uri = 'http://downloads.yoctoproject.org/mirror/sources' 17 uri = 'http://downloads.yoctoproject.org/mirror/sources'
16 uri = '%s/lzip-1.19.tar.gz' % uri 18 uri = '%s/lzip-1.25.tar.gz' % uri
17 cls.project = TargetBuildProject(cls.tc.target, 19 cls.project = TargetBuildProject(cls.tc.target,
18 uri, 20 uri,
19 dl_dir = cls.tc.td['DL_DIR']) 21 dl_dir = cls.tc.td['DL_DIR'])
diff --git a/meta/lib/oeqa/runtime/cases/connman.py b/meta/lib/oeqa/runtime/cases/connman.py
index f0d15fac9b..a488752e3f 100644
--- a/meta/lib/oeqa/runtime/cases/connman.py
+++ b/meta/lib/oeqa/runtime/cases/connman.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/date.py b/meta/lib/oeqa/runtime/cases/date.py
index fdd2a6ae58..a2523de67a 100644
--- a/meta/lib/oeqa/runtime/cases/date.py
+++ b/meta/lib/oeqa/runtime/cases/date.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,12 +15,12 @@ class DateTest(OERuntimeTestCase):
13 def setUp(self): 15 def setUp(self):
14 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 16 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
15 self.logger.debug('Stopping systemd-timesyncd daemon') 17 self.logger.debug('Stopping systemd-timesyncd daemon')
16 self.target.run('systemctl disable --now systemd-timesyncd') 18 self.target.run('systemctl disable --now --runtime systemd-timesyncd')
17 19
18 def tearDown(self): 20 def tearDown(self):
19 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 21 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
20 self.logger.debug('Starting systemd-timesyncd daemon') 22 self.logger.debug('Starting systemd-timesyncd daemon')
21 self.target.run('systemctl enable --now systemd-timesyncd') 23 self.target.run('systemctl enable --now --runtime systemd-timesyncd')
22 24
23 @OETestDepends(['ssh.SSHTest.test_ssh']) 25 @OETestDepends(['ssh.SSHTest.test_ssh'])
24 @OEHasPackage(['coreutils', 'busybox']) 26 @OEHasPackage(['coreutils', 'busybox'])
@@ -28,14 +30,13 @@ class DateTest(OERuntimeTestCase):
28 self.assertEqual(status, 0, msg=msg) 30 self.assertEqual(status, 0, msg=msg)
29 oldDate = output 31 oldDate = output
30 32
31 sampleDate = '"2016-08-09 10:00:00"' 33 sampleTimestamp = 1488800000
32 (status, output) = self.target.run("date -s %s" % sampleDate) 34 (status, output) = self.target.run("date -s @%d" % sampleTimestamp)
33 self.assertEqual(status, 0, msg='Date set failed, output: %s' % output) 35 self.assertEqual(status, 0, msg='Date set failed, output: %s' % output)
34 36
35 (status, output) = self.target.run("date -R") 37 (status, output) = self.target.run('date +"%s"')
36 p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output)
37 msg = 'The date was not set correctly, output: %s' % output 38 msg = 'The date was not set correctly, output: %s' % output
38 self.assertTrue(p, msg=msg) 39 self.assertTrue(int(output) - sampleTimestamp < 300, msg=msg)
39 40
40 (status, output) = self.target.run('date -s "%s"' % oldDate) 41 (status, output) = self.target.run('date -s "%s"' % oldDate)
41 msg = 'Failed to reset date, output: %s' % output 42 msg = 'Failed to reset date, output: %s' % output
diff --git a/meta/lib/oeqa/runtime/cases/df.py b/meta/lib/oeqa/runtime/cases/df.py
index bb155c9cf9..43e0ebf9ea 100644
--- a/meta/lib/oeqa/runtime/cases/df.py
+++ b/meta/lib/oeqa/runtime/cases/df.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/dnf.py b/meta/lib/oeqa/runtime/cases/dnf.py
index f40c63026e..3ccb18ce83 100644
--- a/meta/lib/oeqa/runtime/cases/dnf.py
+++ b/meta/lib/oeqa/runtime/cases/dnf.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -73,48 +75,43 @@ class DnfRepoTest(DnfTest):
73 def test_dnf_makecache(self): 75 def test_dnf_makecache(self):
74 self.dnf_with_repo('makecache') 76 self.dnf_with_repo('makecache')
75 77
76
77# Does not work when repo is specified on the command line
78# @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
79# def test_dnf_repolist(self):
80# self.dnf_with_repo('repolist')
81
82 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 78 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
83 def test_dnf_repoinfo(self): 79 def test_dnf_repoinfo(self):
84 self.dnf_with_repo('repoinfo') 80 self.dnf_with_repo('repoinfo')
85 81
86 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 82 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
87 def test_dnf_install(self): 83 def test_dnf_install(self):
88 output = self.dnf_with_repo('list run-postinsts-dev') 84 self.dnf_with_repo('remove -y dnf-test-*')
89 if 'Installed Packages' in output: 85 self.dnf_with_repo('install -y dnf-test-dep')
90 self.dnf_with_repo('remove -y run-postinsts-dev')
91 self.dnf_with_repo('install -y run-postinsts-dev')
92 86
93 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) 87 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
94 def test_dnf_install_dependency(self): 88 def test_dnf_install_dependency(self):
95 self.dnf_with_repo('remove -y run-postinsts') 89 self.dnf_with_repo('remove -y dnf-test-*')
96 self.dnf_with_repo('install -y run-postinsts-dev') 90 self.dnf_with_repo('install -y dnf-test-main')
91 output = self.dnf('list --installed dnf-test-*')
92 self.assertIn("dnf-test-main.", output)
93 self.assertIn("dnf-test-dep.", output)
97 94
98 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency']) 95 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency'])
99 def test_dnf_install_from_disk(self): 96 def test_dnf_install_from_disk(self):
100 self.dnf_with_repo('remove -y run-postinsts-dev') 97 self.dnf_with_repo('remove -y dnf-test-dep')
101 self.dnf_with_repo('install -y --downloadonly run-postinsts-dev') 98 self.dnf_with_repo('install -y --downloadonly dnf-test-dep')
102 status, output = self.target.run('find /var/cache/dnf -name run-postinsts-dev*rpm', 1500) 99 status, output = self.target.run('find /var/cache/dnf -name dnf-test-dep*rpm')
103 self.assertEqual(status, 0, output) 100 self.assertEqual(status, 0, output)
104 self.dnf_with_repo('install -y %s' % output) 101 self.dnf_with_repo('install -y %s' % output)
105 102
106 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk']) 103 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk'])
107 def test_dnf_install_from_http(self): 104 def test_dnf_install_from_http(self):
108 output = subprocess.check_output('%s %s -name run-postinsts-dev*' % (bb.utils.which(os.getenv('PATH'), "find"), 105 output = subprocess.check_output('%s %s -name dnf-test-dep*' % (bb.utils.which(os.getenv('PATH'), "find"),
109 os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8") 106 os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8")
110 rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1] 107 rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1]
111 url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path) 108 url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path)
112 self.dnf_with_repo('remove -y run-postinsts-dev') 109 self.dnf_with_repo('remove -y dnf-test-dep')
113 self.dnf_with_repo('install -y %s' % url) 110 self.dnf_with_repo('install -y %s' % url)
114 111
115 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) 112 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
116 def test_dnf_reinstall(self): 113 def test_dnf_reinstall(self):
117 self.dnf_with_repo('reinstall -y run-postinsts-dev') 114 self.dnf_with_repo('reinstall -y dnf-test-main')
118 115
119 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 116 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
120 @skipIfInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when not enable usrmerge') 117 @skipIfInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when not enable usrmerge')
@@ -137,55 +134,40 @@ class DnfRepoTest(DnfTest):
137 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500) 134 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
138 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500) 135 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
139 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500) 136 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
140 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath) 137 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
141 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500) 138 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
142 self.assertEqual(0, status, output) 139 self.assertEqual(0, status, output)
143 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500) 140 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500)
144 self.assertEqual(0, status, output) 141 self.assertEqual(0, status, output)
145 142
146 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 143 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
147 @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmege') 144 @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmerge')
148 @OEHasPackage('busybox') 145 @OEHasPackage('busybox')
149 def test_dnf_installroot_usrmerge(self): 146 def test_dnf_installroot_usrmerge(self):
150 rootpath = '/home/root/chroot/test' 147 rootpath = '/home/root/chroot/test'
151 #Copy necessary files to avoid errors with not yet installed tools on 148 #Copy necessary files to avoid errors with not yet installed tools on
152 #installroot directory. 149 #installroot directory.
153 self.target.run('mkdir -p %s/etc' % rootpath, 1500) 150 self.target.run('mkdir -p %s/etc' % rootpath)
154 self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath), 1500) 151 self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath))
155 self.target.run('ln -sf -r %s/usr/bin %s/bin' % (rootpath, rootpath), 1500) 152 self.target.run('ln -sf usr/bin %s/bin' % (rootpath))
156 self.target.run('ln -sf -r %s/usr/sbin %s/sbin' % (rootpath, rootpath), 1500) 153 self.target.run('ln -sf usr/sbin %s/sbin' % (rootpath))
157 self.target.run('mkdir -p %s/dev' % rootpath, 1500) 154 self.target.run('mkdir -p %s/dev' % rootpath)
158 #Handle different architectures lib dirs 155 #Handle different architectures lib dirs
159 self.target.run('mkdir -p %s/usr/lib' % rootpath, 1500) 156 self.target.run("for l in /lib*; do mkdir -p %s/usr/$l; ln -s usr/$l %s/$l; done" % (rootpath, rootpath))
160 self.target.run('mkdir -p %s/usr/libx32' % rootpath, 1500) 157 self.target.run('cp -r /etc/rpm %s/etc' % rootpath)
161 self.target.run('mkdir -p %s/usr/lib64' % rootpath, 1500) 158 self.target.run('cp -r /etc/dnf %s/etc' % rootpath)
162 self.target.run('cp /lib/libtinfo.so.5 %s/usr/lib' % rootpath, 1500) 159 self.target.run('cp /bin/busybox %s/bin/sh' % rootpath)
163 self.target.run('cp /libx32/libtinfo.so.5 %s/usr/libx32' % rootpath, 1500) 160 self.target.run('mount -o bind /dev %s/dev/' % rootpath)
164 self.target.run('cp /lib64/libtinfo.so.5 %s/usr/lib64' % rootpath, 1500) 161 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
165 self.target.run('ln -sf -r %s/lib %s/usr/lib' % (rootpath,rootpath), 1500) 162 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath)
166 self.target.run('ln -sf -r %s/libx32 %s/usr/libx32' % (rootpath,rootpath), 1500)
167 self.target.run('ln -sf -r %s/lib64 %s/usr/lib64' % (rootpath,rootpath), 1500)
168 self.target.run('cp -r /etc/rpm %s/etc' % rootpath, 1500)
169 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
170 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
171 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
172 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath)
173 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
174 self.assertEqual(0, status, output) 163 self.assertEqual(0, status, output)
175 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500) 164 status, output = self.target.run('test -e %s/bin/busybox' % rootpath)
176 self.assertEqual(0, status, output) 165 self.assertEqual(0, status, output)
177 166
178 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 167 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
179 def test_dnf_exclude(self): 168 def test_dnf_exclude(self):
180 excludepkg = 'curl-dev' 169 self.dnf_with_repo('remove -y dnf-test-*')
181 self.dnf_with_repo('install -y curl*') 170 self.dnf_with_repo('install -y --exclude=dnf-test-dep dnf-test-*')
182 self.dnf('list %s' % excludepkg, 0) 171 output = self.dnf('list --installed dnf-test-*')
183 #Avoid remove dependencies to skip some errors on different archs and images 172 self.assertIn("dnf-test-main.", output)
184 self.dnf_with_repo('remove --setopt=clean_requirements_on_remove=0 -y curl*') 173 self.assertNotIn("dnf-test-dev.", output)
185 #check curl-dev is not installed adter removing all curl occurrences
186 status, output = self.target.run('dnf list --installed | grep %s'% excludepkg, 1500)
187 self.assertEqual(1, status, "%s was not removed, is listed as installed"%excludepkg)
188 self.dnf_with_repo('install -y --exclude=%s --exclude=curl-staticdev curl*' % excludepkg)
189 #check curl-dev is not installed after being excluded
190 status, output = self.target.run('dnf list --installed | grep %s'% excludepkg , 1500)
191 self.assertEqual(1, status, "%s was not excluded, is listed as installed"%excludepkg)
diff --git a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
index e010612838..c3be60f006 100644
--- a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
+++ b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
@@ -1,28 +1,15 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
4 9
5class Ethernet_Test(OERuntimeTestCase): 10class Ethernet_Test(OERuntimeTestCase):
6 11
7 def set_ip(self, x): 12 @skipIfQemu()
8 x = x.split(".")
9 sample_host_address = '150'
10 x[3] = sample_host_address
11 x = '.'.join(x)
12 return x
13
14 @skipIfQemu('qemuall', 'Test only runs on real hardware')
15 @OETestDepends(['ssh.SSHTest.test_ssh'])
16 def test_set_virtual_ip(self):
17 (status, output) = self.target.run("ifconfig eth0 | grep 'inet ' | awk '{print $2}'")
18 self.assertEqual(status, 0, msg='Failed to get ip address. Make sure you have an ethernet connection on your device, output: %s' % output)
19 original_ip = output
20 virtual_ip = self.set_ip(original_ip)
21
22 (status, output) = self.target.run("ifconfig eth0:1 %s netmask 255.255.255.0 && sleep 2 && ping -c 5 %s && ifconfig eth0:1 down" % (virtual_ip,virtual_ip))
23 self.assertEqual(status, 0, msg='Failed to create virtual ip address, output: %s' % output)
24
25 @OETestDepends(['ethernet_ip_connman.Ethernet_Test.test_set_virtual_ip'])
26 def test_get_ip_from_dhcp(self): 13 def test_get_ip_from_dhcp(self):
27 (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'") 14 (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'")
28 self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output) 15 self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output)
@@ -33,4 +20,4 @@ class Ethernet_Test(OERuntimeTestCase):
33 default_gateway = output 20 default_gateway = output
34 21
35 (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway)) 22 (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway))
36 self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output) \ No newline at end of file 23 self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output)
diff --git a/meta/lib/oeqa/runtime/cases/gcc.py b/meta/lib/oeqa/runtime/cases/gcc.py
index 1b6e431bf4..17b1483e8d 100644
--- a/meta/lib/oeqa/runtime/cases/gcc.py
+++ b/meta/lib/oeqa/runtime/cases/gcc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/gi.py b/meta/lib/oeqa/runtime/cases/gi.py
index 42bd100a31..78c7ddda2c 100644
--- a/meta/lib/oeqa/runtime/cases/gi.py
+++ b/meta/lib/oeqa/runtime/cases/gi.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/go.py b/meta/lib/oeqa/runtime/cases/go.py
new file mode 100644
index 0000000000..39a80f4dca
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/go.py
@@ -0,0 +1,21 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage
10
11class GoHelloworldTest(OERuntimeTestCase):
12 @OETestDepends(['ssh.SSHTest.test_ssh'])
13 @OEHasPackage(['go-helloworld'])
14 def test_gohelloworld(self):
15 cmd = "go-helloworld"
16 status, output = self.target.run(cmd)
17 msg = 'Exit status was not 0. Output: %s' % output
18 self.assertEqual(status, 0, msg=msg)
19
20 msg = 'Incorrect output: %s' % output
21 self.assertEqual(output, "Hello, world!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/gstreamer.py b/meta/lib/oeqa/runtime/cases/gstreamer.py
index f735f82e3b..2295769cfd 100644
--- a/meta/lib/oeqa/runtime/cases/gstreamer.py
+++ b/meta/lib/oeqa/runtime/cases/gstreamer.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/kernelmodule.py b/meta/lib/oeqa/runtime/cases/kernelmodule.py
index 47fd2f850c..9c42fcc586 100644
--- a/meta/lib/oeqa/runtime/cases/kernelmodule.py
+++ b/meta/lib/oeqa/runtime/cases/kernelmodule.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/ksample.py b/meta/lib/oeqa/runtime/cases/ksample.py
index a9a1620ebd..b6848762e3 100644
--- a/meta/lib/oeqa/runtime/cases/ksample.py
+++ b/meta/lib/oeqa/runtime/cases/ksample.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -10,7 +12,7 @@ from oeqa.core.decorator.depends import OETestDepends
10from oeqa.core.decorator.data import skipIfNotFeature 12from oeqa.core.decorator.data import skipIfNotFeature
11 13
12# need some kernel fragments 14# need some kernel fragments
13# echo "KERNEL_FEATURES_append += \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf 15# echo "KERNEL_FEATURES:append = \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf
14class KSample(OERuntimeTestCase): 16class KSample(OERuntimeTestCase):
15 def cmd_and_check(self, cmd='', match_string=''): 17 def cmd_and_check(self, cmd='', match_string=''):
16 status, output = self.target.run(cmd) 18 status, output = self.target.run(cmd)
diff --git a/meta/lib/oeqa/runtime/cases/ldd.py b/meta/lib/oeqa/runtime/cases/ldd.py
index 9c2caa8f65..f6841c6675 100644
--- a/meta/lib/oeqa/runtime/cases/ldd.py
+++ b/meta/lib/oeqa/runtime/cases/ldd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/login.py b/meta/lib/oeqa/runtime/cases/login.py
new file mode 100644
index 0000000000..e1bc60d49b
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/login.py
@@ -0,0 +1,116 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import subprocess
9import tempfile
10import time
11import os
12from datetime import datetime
13from oeqa.runtime.case import OERuntimeTestCase
14from oeqa.runtime.decorator.package import OEHasPackage
15
16### Status of qemu images.
17# - runqemu qemuppc64 comes up blank. (skip)
18# - qemuarmv5 comes up with multiple heads but sending "head" to screendump.
19# seems to create a png with a bad header? (skip for now, but come back to fix)
20# - qemuriscv32 and qemuloongarch64 doesn't work with testimage apparently? (skip)
21# - qemumips64 is missing mouse icon.
22# - qemumips takes forever to render and is missing mouse icon.
23# - qemuarm and qemuppc are odd as they don't resize so we need to just set width.
24# - All images have home and screen flipper icons not always rendered fully at first.
25# the sleep seems to help this out some, depending on machine load.
26###
27
28class LoginTest(OERuntimeTestCase):
29 @OEHasPackage(['matchbox-desktop', 'dbus-wait'])
30 def test_screenshot(self):
31 if self.td.get('MACHINE') in ("qemuppc64", "qemuarmv5", "qemuriscv32", "qemuriscv64", "qemuloongarch64"):
32 self.skipTest("{0} is not currently supported.".format(self.td.get('MACHINE')))
33
34 pn = self.td.get('PN')
35
36 ourenv = os.environ.copy()
37 origpath = self.td.get("ORIGPATH")
38 if origpath:
39 ourenv['PATH'] = ourenv['PATH'] + ":" + origpath
40
41 for cmd in ["identify.im7", "convert.im7", "compare.im7"]:
42 try:
43 subprocess.check_output(["which", cmd], env=ourenv)
44 except subprocess.CalledProcessError:
45 self.skipTest("%s (from imagemagick) not available" % cmd)
46
47
48 # Store images so we can debug them if needed
49 saved_screenshots_dir = self.td.get('T') + "/saved-screenshots/"
50
51 ###
52 # This is a really horrible way of doing this but I've not found the
53 # right event to determine "The system is loaded and screen is rendered"
54 #
55 # Using dbus-wait for matchbox is the wrong answer because while it
56 # ensures the system is up, it doesn't mean the screen is rendered.
57 #
58 # Checking the qmp socket doesn't work afaik either.
59 #
60 # One way to do this is to do compares of known good screendumps until
61 # we either get expected or close to expected or we time out. Part of the
62 # issue here with that is that there is a very fine difference in the
63 # diff between a screendump where the icons haven't loaded yet and
64 # one where they won't load. I'll look at that next, but, for now, this.
65 #
66 # Which is ugly and I hate it but it 'works' for various definitions of
67 # 'works'.
68 ###
69 # RP: if the signal is sent before we run this, it will never be seen and we'd timeout
70 #status, output = self.target.run('dbus-wait org.matchbox_project.desktop Loaded')
71 #if status != 0 or "Timeout" in output:
72 # self.fail('dbus-wait failed (%s, %s). This could mean that the image never loaded the matchbox desktop.' % (status, output))
73
74 # Start taking screenshots every 2 seconds until diff=0 or timeout is 60 seconds
75 timeout = time.time() + 60
76 diff = True
77 with tempfile.NamedTemporaryFile(prefix="oeqa-screenshot-login", suffix=".png") as t:
78 while diff != 0 and time.time() < timeout:
79 time.sleep(2)
80 ret = self.target.runner.run_monitor("screendump", args={"filename": t.name, "format":"png"})
81
82 # Find out size of image so we can determine where to blank out clock.
83 # qemuarm and qemuppc are odd as it doesn't resize the window and returns
84 # incorrect widths
85 if self.td.get('MACHINE') == "qemuarm" or self.td.get('MACHINE') == "qemuppc":
86 width = "640"
87 else:
88 cmd = "identify.im7 -ping -format '%w' {0}".format(t.name)
89 width = subprocess.check_output(cmd, shell=True, env=ourenv).decode()
90
91 rblank = int(float(width))
92 lblank = rblank-80
93
94 # Use the meta-oe version of convert, along with it's suffix. This blanks out the clock.
95 cmd = "convert.im7 {0} -fill white -draw 'rectangle {1},4 {2},28' {3}".format(t.name, str(rblank), str(lblank), t.name)
96 convert_out=subprocess.check_output(cmd, shell=True, env=ourenv).decode()
97
98 bb.utils.mkdirhier(saved_screenshots_dir)
99 savedfile = "{0}/saved-{1}-{2}-{3}.png".format(saved_screenshots_dir, \
100 datetime.timestamp(datetime.now()), \
101 pn, \
102 self.td.get('MACHINE'))
103 shutil.copy2(t.name, savedfile)
104
105 refimage = self.td.get('COREBASE') + "/meta/files/screenshot-tests/" + pn + "-" + self.td.get('MACHINE') +".png"
106 if not os.path.exists(refimage):
107 self.skipTest("No reference image for comparision (%s)" % refimage)
108
109 cmd = "compare.im7 -metric MSE {0} {1} /dev/null".format(t.name, refimage)
110 compare_out = subprocess.run(cmd, shell=True, capture_output=True, text=True, env=ourenv)
111 diff=float(compare_out.stderr.replace("(", "").replace(")","").split()[1])
112 if diff > 0:
113 # Keep a copy of the failed screenshot so we can see what happened.
114 self.fail("Screenshot diff is {0}. Failed image stored in {1}".format(str(diff), savedfile))
115 else:
116 self.assertEqual(0, diff, "Screenshot diff is {0}.".format(str(diff)))
diff --git a/meta/lib/oeqa/runtime/cases/logrotate.py b/meta/lib/oeqa/runtime/cases/logrotate.py
index a4efcd07c0..6ad980cb6a 100644
--- a/meta/lib/oeqa/runtime/cases/logrotate.py
+++ b/meta/lib/oeqa/runtime/cases/logrotate.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -17,7 +19,7 @@ class LogrotateTest(OERuntimeTestCase):
17 19
18 @classmethod 20 @classmethod
19 def tearDownClass(cls): 21 def tearDownClass(cls):
20 cls.tc.target.run('mv -f $HOME/wtmp.oeqabak /etc/logrotate.d/wtmp && rm -rf $HOME/logrotate_dir') 22 cls.tc.target.run('mv -f $HOME/wtmp.oeqabak /etc/logrotate.d/wtmp && rm -rf /var/log//logrotate_dir')
21 cls.tc.target.run('rm -rf /var/log/logrotate_testfile && rm -rf /etc/logrotate.d/logrotate_testfile') 23 cls.tc.target.run('rm -rf /var/log/logrotate_testfile && rm -rf /etc/logrotate.d/logrotate_testfile')
22 24
23 @OETestDepends(['ssh.SSHTest.test_ssh']) 25 @OETestDepends(['ssh.SSHTest.test_ssh'])
@@ -29,17 +31,17 @@ class LogrotateTest(OERuntimeTestCase):
29 msg = ('Could not create/update /var/log/wtmp with touch') 31 msg = ('Could not create/update /var/log/wtmp with touch')
30 self.assertEqual(status, 0, msg = msg) 32 self.assertEqual(status, 0, msg = msg)
31 33
32 status, output = self.target.run('mkdir $HOME/logrotate_dir') 34 status, output = self.target.run('mkdir /var/log//logrotate_dir')
33 msg = ('Could not create logrotate_dir. Output: %s' % output) 35 msg = ('Could not create logrotate_dir. Output: %s' % output)
34 self.assertEqual(status, 0, msg = msg) 36 self.assertEqual(status, 0, msg = msg)
35 37
36 status, output = self.target.run('echo "create \n olddir $HOME/logrotate_dir \n include /etc/logrotate.d/wtmp" > /tmp/logrotate-test.conf') 38 status, output = self.target.run('echo "create \n olddir /var/log//logrotate_dir \n include /etc/logrotate.d/wtmp" > /tmp/logrotate-test.conf')
37 msg = ('Could not write to /tmp/logrotate-test.conf') 39 msg = ('Could not write to /tmp/logrotate-test.conf')
38 self.assertEqual(status, 0, msg = msg) 40 self.assertEqual(status, 0, msg = msg)
39 41
40 # If logrotate fails to rotate the log, view the verbose output of logrotate to see what prevented it 42 # If logrotate fails to rotate the log, view the verbose output of logrotate to see what prevented it
41 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test.conf') 43 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test.conf')
42 status, _ = self.target.run('find $HOME/logrotate_dir -type f | grep wtmp.1') 44 status, _ = self.target.run('find /var/log//logrotate_dir -type f | grep wtmp.1')
43 msg = ("logrotate did not successfully rotate the wtmp log. Output from logrotate -vf: \n%s" % (logrotate_output)) 45 msg = ("logrotate did not successfully rotate the wtmp log. Output from logrotate -vf: \n%s" % (logrotate_output))
44 self.assertEqual(status, 0, msg = msg) 46 self.assertEqual(status, 0, msg = msg)
45 47
@@ -54,17 +56,17 @@ class LogrotateTest(OERuntimeTestCase):
54 msg = ('Could not write to /etc/logrotate.d/logrotate_testfile') 56 msg = ('Could not write to /etc/logrotate.d/logrotate_testfile')
55 self.assertEqual(status, 0, msg = msg) 57 self.assertEqual(status, 0, msg = msg)
56 58
57 status, output = self.target.run('echo "create \n olddir $HOME/logrotate_dir \n include /etc/logrotate.d/logrotate_testfile" > /tmp/logrotate-test2.conf') 59 status, output = self.target.run('echo "create \n olddir /var/log//logrotate_dir \n include /etc/logrotate.d/logrotate_testfile" > /tmp/logrotate-test2.conf')
58 msg = ('Could not write to /tmp/logrotate_test2.conf') 60 msg = ('Could not write to /tmp/logrotate_test2.conf')
59 self.assertEqual(status, 0, msg = msg) 61 self.assertEqual(status, 0, msg = msg)
60 62
61 status, output = self.target.run('find $HOME/logrotate_dir -type f | grep logrotate_testfile.1') 63 status, output = self.target.run('find /var/log//logrotate_dir -type f | grep logrotate_testfile.1')
62 msg = ('A rotated log for logrotate_testfile is already present in logrotate_dir') 64 msg = ('A rotated log for logrotate_testfile is already present in logrotate_dir')
63 self.assertEqual(status, 1, msg = msg) 65 self.assertEqual(status, 1, msg = msg)
64 66
65 # If logrotate fails to rotate the log, view the verbose output of logrotate instead of just listing the files in olddir 67 # If logrotate fails to rotate the log, view the verbose output of logrotate instead of just listing the files in olddir
66 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test2.conf') 68 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test2.conf')
67 status, _ = self.target.run('find $HOME/logrotate_dir -type f | grep logrotate_testfile.1') 69 status, _ = self.target.run('find /var/log//logrotate_dir -type f | grep logrotate_testfile.1')
68 msg = ('logrotate did not successfully rotate the logrotate_test log. Output from logrotate -vf: \n%s' % (logrotate_output)) 70 msg = ('logrotate did not successfully rotate the logrotate_test log. Output from logrotate -vf: \n%s' % (logrotate_output))
69 self.assertEqual(status, 0, msg = msg) 71 self.assertEqual(status, 0, msg = msg)
70 72
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py
index a66d5d13d7..e81360670c 100644
--- a/meta/lib/oeqa/runtime/cases/ltp.py
+++ b/meta/lib/oeqa/runtime/cases/ltp.py
@@ -57,37 +57,47 @@ class LtpTestBase(OERuntimeTestCase):
57 57
58class LtpTest(LtpTestBase): 58class LtpTest(LtpTestBase):
59 59
60 ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "connectors", "commands", "net.ipv6_lib", "input","fs_perms_simple"] 60 ltp_groups = ["math", "syscalls", "dio", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"]
61 61
62 ltp_fs = ["fs", "fsx", "fs_bind"] 62 ltp_fs = ["fs", "fs_bind"]
63 # skip kernel cpuhotplug 63 # skip kernel cpuhotplug
64 ltp_kernel = ["power_management_tests", "hyperthreading ", "kernel_misc", "hugetlb"] 64 ltp_kernel = ["power_management_tests", "hyperthreading ", "kernel_misc", "hugetlb"]
65 ltp_groups += ltp_fs 65 ltp_groups += ltp_fs
66 66
67 def runltp(self, ltp_group): 67 def runltp(self, ltp_group):
68 cmd = '/opt/ltp/runltp -f %s -p -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group) 68 # LTP appends to log files, so ensure we start with a clean log
69 self.target.deleteFiles("/opt/ltp/results/", ltp_group)
70
71 cmd = '/opt/ltp/runltp -f %s -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group)
72
69 starttime = time.time() 73 starttime = time.time()
70 (status, output) = self.target.run(cmd) 74 (status, output) = self.target.run(cmd, timeout=1200)
71 endtime = time.time() 75 endtime = time.time()
72 76
77 # status of 1 is 'just' tests failing. 255 likely was a command output timeout
78 if status and status != 1:
79 msg = 'Command %s returned exit code %s' % (cmd, status)
80 self.target.logger.warning(msg)
81
82 # Write the console log to disk for convenience
73 with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f: 83 with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f:
74 f.write(output) 84 f.write(output)
75 85
86 # Also put the console log into the test result JSON
76 self.extras['ltpresult.rawlogs']['log'] = self.extras['ltpresult.rawlogs']['log'] + output 87 self.extras['ltpresult.rawlogs']['log'] = self.extras['ltpresult.rawlogs']['log'] + output
77 88
78 # copy nice log from DUT 89 # Copy the machine-readable test results locally so we can parse it
79 dst = os.path.join(self.ltptest_log_dir, "%s" % ltp_group ) 90 dst = os.path.join(self.ltptest_log_dir, ltp_group)
80 remote_src = "/opt/ltp/results/%s" % ltp_group 91 remote_src = "/opt/ltp/results/%s" % ltp_group
81 (status, output) = self.target.copyFrom(remote_src, dst, True) 92 (status, output) = self.target.copyFrom(remote_src, dst, True)
82 msg = 'File could not be copied. Output: %s' % output
83 if status: 93 if status:
94 msg = 'File could not be copied. Output: %s' % output
84 self.target.logger.warning(msg) 95 self.target.logger.warning(msg)
85 96
86 parser = LtpParser() 97 parser = LtpParser()
87 results, sections = parser.parse(dst) 98 results, sections = parser.parse(dst)
88 99
89 runtime = int(endtime-starttime) 100 sections['duration'] = int(endtime-starttime)
90 sections['duration'] = runtime
91 self.sections[ltp_group] = sections 101 self.sections[ltp_group] = sections
92 102
93 failed_tests = {} 103 failed_tests = {}
diff --git a/meta/lib/oeqa/runtime/cases/ltp_stress.py b/meta/lib/oeqa/runtime/cases/ltp_stress.py
index 2445ffbc93..ce6f4bf59d 100644
--- a/meta/lib/oeqa/runtime/cases/ltp_stress.py
+++ b/meta/lib/oeqa/runtime/cases/ltp_stress.py
@@ -89,8 +89,7 @@ class LtpStressTest(LtpStressBase):
89 89
90 # LTP stress runtime tests 90 # LTP stress runtime tests
91 # 91 #
92 @skipIfQemu('qemuall', 'Test only runs on real hardware') 92 @skipIfQemu()
93
94 @OETestDepends(['ssh.SSHTest.test_ssh']) 93 @OETestDepends(['ssh.SSHTest.test_ssh'])
95 @OEHasPackage(["ltp"]) 94 @OEHasPackage(["ltp"])
96 def test_ltp_stress(self): 95 def test_ltp_stress(self):
diff --git a/meta/lib/oeqa/runtime/cases/maturin.py b/meta/lib/oeqa/runtime/cases/maturin.py
new file mode 100644
index 0000000000..4e6384fe5e
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/maturin.py
@@ -0,0 +1,58 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8
9from oeqa.runtime.case import OERuntimeTestCase
10from oeqa.core.decorator.depends import OETestDepends
11from oeqa.runtime.decorator.package import OEHasPackage
12
13
14class MaturinTest(OERuntimeTestCase):
15 @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
16 @OEHasPackage(['python3-maturin'])
17 def test_maturin_list_python(self):
18 status, output = self.target.run("maturin list-python")
19 self.assertEqual(status, 0)
20 _, py_major = self.target.run("python3 -c 'import sys; print(sys.version_info.major)'")
21 _, py_minor = self.target.run("python3 -c 'import sys; print(sys.version_info.minor)'")
22 python_version = "%s.%s" % (py_major, py_minor)
23 self.assertEqual(output, "🐍 1 python interpreter found:\n"
24 " - CPython %s at /usr/bin/python%s" % (python_version, python_version))
25
26
27class MaturinDevelopTest(OERuntimeTestCase):
28 @classmethod
29 def setUp(cls):
30 dst = '/tmp'
31 src = os.path.join(cls.tc.files_dir, "maturin/guessing-game")
32 cls.tc.target.copyTo(src, dst)
33
34 @classmethod
35 def tearDown(cls):
36 cls.tc.target.run('rm -rf %s' % '/tmp/guessing-game/target')
37
38 @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
39 @OEHasPackage(['python3-maturin'])
40 def test_maturin_develop(self):
41 """
42 This test case requires:
43 (1) that a .venv can been created.
44 (2) DNS nameserver to resolve crate URIs for fetching
45 (3) a functional 'rustc' and 'cargo'
46 """
47 targetdir = os.path.join("/tmp", "guessing-game")
48 self.target.run("cd %s; python3 -m venv .venv" % targetdir)
49 self.target.run("echo 'nameserver 8.8.8.8' > /etc/resolv.conf")
50 cmd = "cd %s; maturin develop" % targetdir
51 status, output = self.target.run(cmd)
52 self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
53 self.assertRegex(output, r"🐍 Not using a specific python interpreter")
54 self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
55 self.assertRegex(output, r"Compiling guessing-game v0.1.0")
56 self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
57 self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
58 self.assertEqual(status, 0)
diff --git a/meta/lib/oeqa/runtime/cases/multilib.py b/meta/lib/oeqa/runtime/cases/multilib.py
index 0d1b9ae2c9..68556e45c5 100644
--- a/meta/lib/oeqa/runtime/cases/multilib.py
+++ b/meta/lib/oeqa/runtime/cases/multilib.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/oe_syslog.py b/meta/lib/oeqa/runtime/cases/oe_syslog.py
index f3c2bedbaf..adb876160d 100644
--- a/meta/lib/oeqa/runtime/cases/oe_syslog.py
+++ b/meta/lib/oeqa/runtime/cases/oe_syslog.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -114,18 +116,23 @@ class SyslogTestConfig(OERuntimeTestCase):
114 @OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger']) 116 @OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger'])
115 @OEHasPackage(["busybox-syslog"]) 117 @OEHasPackage(["busybox-syslog"])
116 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', 118 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
117 'Not appropiate for systemd image') 119 'Not appropriate for systemd image')
118 def test_syslog_startup_config(self): 120 def test_syslog_startup_config(self):
119 cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf' 121 cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf'
120 self.target.run(cmd) 122 self.target.run(cmd)
121 123
122 self.test_syslog_restart() 124 self.test_syslog_restart()
123 125
124 cmd = 'logger foobar && grep foobar /var/log/test' 126 cmd = 'logger foobar'
125 status,output = self.target.run(cmd) 127 status, output = self.target.run(cmd)
126 msg = 'Test log string not found. Output: %s ' % output 128 msg = 'Logger command failed, %s. Output: %s ' % (status, output)
127 self.assertEqual(status, 0, msg=msg) 129 self.assertEqual(status, 0, msg=msg)
128 130
131 cmd = 'cat /var/log/test'
132 status, output = self.target.run(cmd)
133 if "foobar" not in output or status:
134 self.fail("'foobar' not found in logfile, status %s, contents %s" % (status, output))
135
129 cmd = "sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf" 136 cmd = "sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf"
130 self.target.run(cmd) 137 self.target.run(cmd)
131 self.test_syslog_restart() 138 self.test_syslog_restart()
diff --git a/meta/lib/oeqa/runtime/cases/opkg.py b/meta/lib/oeqa/runtime/cases/opkg.py
index 9cfee1cd88..a29c93e59a 100644
--- a/meta/lib/oeqa/runtime/cases/opkg.py
+++ b/meta/lib/oeqa/runtime/cases/opkg.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/pam.py b/meta/lib/oeqa/runtime/cases/pam.py
index a482ded945..b3e8b56c3c 100644
--- a/meta/lib/oeqa/runtime/cases/pam.py
+++ b/meta/lib/oeqa/runtime/cases/pam.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
new file mode 100644
index 0000000000..f91abbc941
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
@@ -0,0 +1,62 @@
1# Xserver explains what the short codes mean
2(WW) warning, (EE) error, (NI) not implemented, (??) unknown.
3
4# Xserver warns if compiled with ACPI but no acpid running
5Open ACPI failed (/var/run/acpid.socket) (No such file or directory)
6
7# Some machines (eg qemux86) don't enable PAE (they probably should though)
8NX (Execute Disable) protection cannot be enabled: non-PAE kernel!
9
10# Connman's pacrunner warns if external connectivity isn't available
11Failed to find URL:http://ipv4.connman.net/online/status.html
12Failed to find URL:http://ipv6.connman.net/online/status.html
13
14# x86 on 6.6+ outputs this message, it is informational, not an error
15ACPI: _OSC evaluation for CPUs failed, trying _PDC
16
17# These should be reviewed to see if they are still needed
18dma timeout
19can\'t add hid device:
20usbhid: probe of
21_OSC failed (AE_ERROR)
22_OSC failed (AE_SUPPORT)
23AE_ALREADY_EXISTS
24ACPI _OSC request failed (AE_SUPPORT)
25can\'t disable ASPM
26Failed to load module "vesa"
27Failed to load module "modesetting"
28Failed to load module "glx"
29Failed to load module "fbdev"
30Failed to load module "ati"
31[drm] Cannot find any crtc or sizes
32_OSC failed (AE_NOT_FOUND); disabling ASPM
33hd.: possibly failed opcode
34NETLINK INITIALIZATION FAILED
35kernel: Cannot find map file
36omap_hwmod: debugss: _wait_target_disable failed
37VGA arbiter: cannot open kernel arbiter, no multi-card support
38Online check failed for
39netlink init failed
40Fast TSC calibration
41controller can't do DEVSLP, turning off
42stmmac_dvr_probe: warning: cannot get CSR clock
43error: couldn\'t mount because of unsupported optional features
44GPT: Use GNU Parted to correct GPT errors
45Cannot set xattr user.Librepo.DownloadInProgress
46Failed to read /var/lib/nfs/statd/state: Success
47error retry time-out =
48logind: cannot setup systemd-logind helper (-61), using legacy fallback
49Failed to rename network interface
50Failed to process device, ignoring: Device or resource busy
51Cannot find a map file
52[rdrand]: Initialization Failed
53[rndr ]: Initialization Failed
54[pulseaudio] authkey.c: Failed to open cookie file
55[pulseaudio] authkey.c: Failed to load authentication key
56was skipped because of a failed condition check
57was skipped because all trigger condition checks failed
58xf86OpenConsole: Switching VT failed
59Failed to read LoaderConfigTimeoutOneShot variable, ignoring: Operation not supported
60Failed to read LoaderEntryOneShot variable, ignoring: Operation not supported
61Direct firmware load for regulatory.db
62failed to load regulatory.db
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
new file mode 100644
index 0000000000..156b0f9c10
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
@@ -0,0 +1,19 @@
1# These should be reviewed to see if they are still needed
2cacheinfo: Failed to find cpu0 device node
3
4# 6.10 restructures sysctl registration such that mips
5# registers an empty table and generates harmless warnings:
6# failed when register_sysctl_sz sched_fair_sysctls to kernel
7# failed when register_sysctl_sz sched_core_sysctls to kernel
8failed when register_sysctl_sz sched
9
10# With qemu 9.1.0
11# pci 0000:00:00.0: BAR 2: can't handle BAR above 4GB (bus address 0x1f00000010)
12# pci 0000:00:00.0: BAR 5: error updating (0x1105d034 != 0x0100d034)
13BAR 0: error updating
14BAR 1: error updating
15BAR 2: error updating
16BAR 3: error updating
17BAR 4: error updating
18BAR 5: error updating
19: can't handle BAR above 4GB
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
new file mode 100644
index 0000000000..143db40d63
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
@@ -0,0 +1,35 @@
1# psplash
2FBIOPUT_VSCREENINFO failed, double buffering disabled
3
4# PCI host bridge to bus 0000:00
5# pci_bus 0000:00: root bus resource [mem 0x10000000-0x17ffffff]
6# pci_bus 0000:00: root bus resource [io 0x1000-0x1fffff]
7# pci_bus 0000:00: No busn resource found for root bus, will use [bus 00-ff]
8# pci 0000:00:00.0: [2046:ab11] type 00 class 0x100000
9# pci 0000:00:00.0: [Firmware Bug]: reg 0x10: invalid BAR (can't size)
10# pci 0000:00:00.0: [Firmware Bug]: reg 0x14: invalid BAR (can't size)
11# pci 0000:00:00.0: [Firmware Bug]: reg 0x18: invalid BAR (can't size)
12# pci 0000:00:00.0: [Firmware Bug]: reg 0x1c: invalid BAR (can't size)
13# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size)
14# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size)
15invalid BAR (can't size)
16# 6.10+ the invalid BAR warnings are of this format:
17# pci 0000:00:00.0: [Firmware Bug]: BAR 0: invalid; can't size
18# pci 0000:00:00.0: [Firmware Bug]: BAR 1: invalid; can't size
19# pci 0000:00:00.0: [Firmware Bug]: BAR 2: invalid; can't size
20# pci 0000:00:00.0: [Firmware Bug]: BAR 3: invalid; can't size
21# pci 0000:00:00.0: [Firmware Bug]: BAR 4: invalid; can't size
22# pci 0000:00:00.0: [Firmware Bug]: BAR 5: invalid; can't size
23invalid; can't size
24
25# These should be reviewed to see if they are still needed
26wrong ELF class
27fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge
28can't claim BAR
29amd_nb: Cannot enumerate AMD northbridges
30tsc: HPET/PMTIMER calibration failed
31modeset(0): Failed to initialize the DRI2 extension
32glamor initialization failed
33blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)
34floppy: error
35failed to IDENTIFY (I/O error, err_mask=0x4)
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
new file mode 100644
index 0000000000..260cdde620
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
@@ -0,0 +1,6 @@
1# These should be reviewed to see if they are still needed
2Fatal server error:
3(EE) Server terminated with error (1). Closing log file.
4dmi: Firmware registration failed.
5irq: type mismatch, failed to map hwirq-27 for /intc
6logind: failed to get session seat \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
new file mode 100644
index 0000000000..ed91107b7d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
@@ -0,0 +1,19 @@
1# Code is 2 JENT_ECOARSETIME: Timer too coarse for RNG.
2jitterentropy: Initialization failed with host not compliant with requirements: 2
3
4# These should be reviewed to see if they are still needed
5mmci-pl18x: probe of fpga:05 failed with error -22
6mmci-pl18x: probe of fpga:0b failed with error -22
7
8OF: amba_device_add() failed (-19) for /amba/smc@10100000
9OF: amba_device_add() failed (-19) for /amba/mpmc@10110000
10OF: amba_device_add() failed (-19) for /amba/sctl@101e0000
11OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000
12OF: amba_device_add() failed (-19) for /amba/sci@101f0000
13OF: amba_device_add() failed (-19) for /amba/spi@101f4000
14OF: amba_device_add() failed (-19) for /amba/ssp@101f4000
15OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000
16Failed to initialize '/amba/timer@101e3000': -22
17
18clcd-pl11x: probe of 10120000.display failed with error -2
19arm-charlcd 10008000.lcd: error -ENXIO: IRQ index 0 not found
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
new file mode 100644
index 0000000000..d9b58b58f1
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
@@ -0,0 +1,6 @@
1# These should be reviewed to see if they are still needed
2PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]
3host side 80-wire cable detection failed, limiting max speed
4mode "640x480" test failed
5can't handle BAR above 4GB
6Cannot reserve Legacy IO \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
new file mode 100644
index 0000000000..b736a2aeb7
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
@@ -0,0 +1,4 @@
1# These should be reviewed to see if they are still needed
2vio vio: uevent: failed to send synthetic uevent
3synth uevent: /devices/vio: failed to send uevent
4PCI 0000:00 Cannot reserve Legacy IO [io 0x10000-0x10fff] \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
new file mode 100644
index 0000000000..ebb76f1221
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
@@ -0,0 +1,2 @@
1# These should be reviewed to see if they are still needed
2Failed to access perfctr msr (MSR
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
new file mode 100644
index 0000000000..5985247daf
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
@@ -0,0 +1,10 @@
1# These should be reviewed to see if they are still needed
2[drm:psb_do_init] *ERROR* Debug is
3wrong ELF class
4Could not enable PowerButton event
5probe of LNXPWRBN:00 failed with error -22
6pmd_set_huge: Cannot satisfy
7failed to setup card detect gpio
8amd_nb: Cannot enumerate AMD northbridges
9failed to retrieve link info, disabling eDP
10Direct firmware load for iwlwifi
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
new file mode 120000
index 0000000000..404e384c32
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
@@ -0,0 +1 @@
parselogs-ignores-x86.txt \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py
index a1791b5cca..47c77fccd5 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -1,204 +1,49 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import collections
5import os 8import os
9import sys
6 10
7from subprocess import check_output
8from shutil import rmtree 11from shutil import rmtree
9from oeqa.runtime.case import OERuntimeTestCase 12from oeqa.runtime.case import OERuntimeTestCase
10from oeqa.core.decorator.depends import OETestDepends 13from oeqa.core.decorator.depends import OETestDepends
11from oeqa.core.decorator.data import skipIfDataVar
12from oeqa.runtime.decorator.package import OEHasPackage
13
14#in the future these lists could be moved outside of module
15errors = ["error", "cannot", "can\'t", "failed"]
16
17common_errors = [
18 "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.",
19 "dma timeout",
20 "can\'t add hid device:",
21 "usbhid: probe of ",
22 "_OSC failed (AE_ERROR)",
23 "_OSC failed (AE_SUPPORT)",
24 "AE_ALREADY_EXISTS",
25 "ACPI _OSC request failed (AE_SUPPORT)",
26 "can\'t disable ASPM",
27 "Failed to load module \"vesa\"",
28 "Failed to load module vesa",
29 "Failed to load module \"modesetting\"",
30 "Failed to load module modesetting",
31 "Failed to load module \"glx\"",
32 "Failed to load module \"fbdev\"",
33 "Failed to load module fbdev",
34 "Failed to load module glx",
35 "[drm] Cannot find any crtc or sizes - going 1024x768",
36 "_OSC failed (AE_NOT_FOUND); disabling ASPM",
37 "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)",
38 "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!",
39 "hd.: possibly failed opcode",
40 'NETLINK INITIALIZATION FAILED',
41 'kernel: Cannot find map file',
42 'omap_hwmod: debugss: _wait_target_disable failed',
43 'VGA arbiter: cannot open kernel arbiter, no multi-card support',
44 'Failed to find URL:http://ipv4.connman.net/online/status.html',
45 'Online check failed for',
46 'netlink init failed',
47 'Fast TSC calibration',
48 "BAR 0-9",
49 "Failed to load module \"ati\"",
50 "controller can't do DEVSLP, turning off",
51 "stmmac_dvr_probe: warning: cannot get CSR clock",
52 "error: couldn\'t mount because of unsupported optional features",
53 "GPT: Use GNU Parted to correct GPT errors",
54 "Cannot set xattr user.Librepo.DownloadInProgress",
55 "Failed to read /var/lib/nfs/statd/state: Success",
56 "error retry time-out =",
57 "logind: cannot setup systemd-logind helper (-61), using legacy fallback",
58 "Failed to rename network interface",
59 "Failed to process device, ignoring: Device or resource busy",
60 "Cannot find a map file",
61 "[rdrand]: Initialization Failed",
62 "[pulseaudio] authkey.c: Failed to open cookie file",
63 "[pulseaudio] authkey.c: Failed to load authentication key",
64 ]
65 14
66video_related = [ 15# importlib.resources.open_text in Python <3.10 doesn't search all directories
67] 16# when a package is split across multiple directories. Until we can rely on
17# 3.10+, reimplement the searching logic.
18if sys.version_info < (3, 10):
19 def _open_text(package, resource):
20 import importlib, pathlib
21 module = importlib.import_module(package)
22 for path in module.__path__:
23 candidate = pathlib.Path(path) / resource
24 if candidate.exists():
25 return candidate.open(encoding='utf-8')
26 raise FileNotFoundError
27else:
28 from importlib.resources import open_text as _open_text
68 29
69x86_common = [
70 '[drm:psb_do_init] *ERROR* Debug is',
71 'wrong ELF class',
72 'Could not enable PowerButton event',
73 'probe of LNXPWRBN:00 failed with error -22',
74 'pmd_set_huge: Cannot satisfy',
75 'failed to setup card detect gpio',
76 'amd_nb: Cannot enumerate AMD northbridges',
77 'failed to retrieve link info, disabling eDP',
78 'Direct firmware load for iwlwifi',
79 'Direct firmware load for regulatory.db',
80 'failed to load regulatory.db',
81] + common_errors
82 30
83qemux86_common = [ 31class ParseLogsTest(OERuntimeTestCase):
84 'wrong ELF class',
85 "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
86 "can't claim BAR ",
87 'amd_nb: Cannot enumerate AMD northbridges',
88 'tsc: HPET/PMTIMER calibration failed',
89 "modeset(0): Failed to initialize the DRI2 extension",
90 "glamor initialization failed",
91] + common_errors
92 32
93ignore_errors = { 33 # Which log files should be collected
94 'default' : common_errors, 34 log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"]
95 'qemux86' : [
96 'Failed to access perfctr msr (MSR',
97 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
98 ] + qemux86_common,
99 'qemux86-64' : qemux86_common,
100 'qemumips' : [
101 'Failed to load module "glx"',
102 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
103 'cacheinfo: Failed to find cpu0 device node',
104 ] + common_errors,
105 'qemumips64' : [
106 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
107 'cacheinfo: Failed to find cpu0 device node',
108 ] + common_errors,
109 'qemuppc' : [
110 'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]',
111 'host side 80-wire cable detection failed, limiting max speed',
112 'mode "640x480" test failed',
113 'Failed to load module "glx"',
114 'can\'t handle BAR above 4GB',
115 'Cannot reserve Legacy IO',
116 ] + common_errors,
117 'qemuarm' : [
118 'mmci-pl18x: probe of fpga:05 failed with error -22',
119 'mmci-pl18x: probe of fpga:0b failed with error -22',
120 'Failed to load module "glx"',
121 'OF: amba_device_add() failed (-19) for /amba/smc@10100000',
122 'OF: amba_device_add() failed (-19) for /amba/mpmc@10110000',
123 'OF: amba_device_add() failed (-19) for /amba/sctl@101e0000',
124 'OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000',
125 'OF: amba_device_add() failed (-19) for /amba/sci@101f0000',
126 'OF: amba_device_add() failed (-19) for /amba/spi@101f4000',
127 'OF: amba_device_add() failed (-19) for /amba/ssp@101f4000',
128 'OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000',
129 'Failed to initialize \'/amba/timer@101e3000\': -22',
130 'jitterentropy: Initialization failed with host not compliant with requirements: 2',
131 ] + common_errors,
132 'qemuarm64' : [
133 'Fatal server error:',
134 '(EE) Server terminated with error (1). Closing log file.',
135 'dmi: Firmware registration failed.',
136 'irq: type mismatch, failed to map hwirq-27 for /intc',
137 'logind: failed to get session seat',
138 ] + common_errors,
139 'intel-core2-32' : [
140 'ACPI: No _BQC method, cannot determine initial brightness',
141 '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness',
142 '(EE) Failed to load module "psb"',
143 '(EE) Failed to load module psb',
144 '(EE) Failed to load module "psbdrv"',
145 '(EE) Failed to load module psbdrv',
146 '(EE) open /dev/fb0: No such file or directory',
147 '(EE) AIGLX: reverting to software rendering',
148 'dmi: Firmware registration failed.',
149 'ioremap error for 0x78',
150 ] + x86_common,
151 'intel-corei7-64' : [
152 'can\'t set Max Payload Size to 256',
153 'intel_punit_ipc: can\'t request region for resource',
154 '[drm] parse error at position 4 in video mode \'efifb\'',
155 'ACPI Error: Could not enable RealTimeClock event',
156 'ACPI Warning: Could not enable fixed event - RealTimeClock',
157 'hci_intel INT33E1:00: Unable to retrieve gpio',
158 'hci_intel: probe of INT33E1:00 failed',
159 'can\'t derive routing for PCI INT A',
160 'failed to read out thermal zone',
161 'Bluetooth: hci0: Setting Intel event mask failed',
162 'ttyS2 - failed to request DMA',
163 'Bluetooth: hci0: Failed to send firmware data (-38)',
164 'atkbd serio0: Failed to enable keyboard on isa0060/serio0',
165 ] + x86_common,
166 'genericx86' : x86_common,
167 'genericx86-64' : [
168 'Direct firmware load for i915',
169 'Failed to load firmware i915',
170 'Failed to fetch GuC',
171 'Failed to initialize GuC',
172 'Failed to load DMC firmware',
173 'The driver is built-in, so to load the firmware you need to',
174 ] + x86_common,
175 'edgerouter' : [
176 'not creating \'/sys/firmware/fdt\'',
177 'Failed to find cpu0 device node',
178 'Fatal server error:',
179 'Server terminated with error',
180 ] + common_errors,
181 'beaglebone-yocto' : [
182 'Direct firmware load for regulatory.db',
183 'failed to load regulatory.db',
184 'l4_wkup_cm',
185 'Failed to load module "glx"',
186 'Failed to make EGL context current',
187 'glamor initialization failed',
188 ] + common_errors,
189}
190 35
191log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"] 36 # The keywords that identify error messages in the log files
37 errors = ["error", "cannot", "can't", "failed", "---[ cut here ]---", "No irq handler for vector"]
192 38
193class ParseLogsTest(OERuntimeTestCase): 39 # A list of error messages that should be ignored
40 ignore_errors = []
194 41
195 @classmethod 42 @classmethod
196 def setUpClass(cls): 43 def setUpClass(cls):
197 cls.errors = errors
198
199 # When systemd is enabled we need to notice errors on 44 # When systemd is enabled we need to notice errors on
200 # circular dependencies in units. 45 # circular dependencies in units.
201 if 'systemd' in cls.td.get('DISTRO_FEATURES', ''): 46 if 'systemd' in cls.td.get('DISTRO_FEATURES'):
202 cls.errors.extend([ 47 cls.errors.extend([
203 'Found ordering cycle on', 48 'Found ordering cycle on',
204 'Breaking ordering cycle by deleting job', 49 'Breaking ordering cycle by deleting job',
@@ -206,48 +51,22 @@ class ParseLogsTest(OERuntimeTestCase):
206 'Ordering cycle found, skipping', 51 'Ordering cycle found, skipping',
207 ]) 52 ])
208 53
209 cls.ignore_errors = ignore_errors 54 cls.errors = [s.casefold() for s in cls.errors]
210 cls.log_locations = log_locations
211 cls.msg = ''
212 is_lsb, _ = cls.tc.target.run("which LSB_Test.sh")
213 if is_lsb == 0:
214 for machine in cls.ignore_errors:
215 cls.ignore_errors[machine] = cls.ignore_errors[machine] \
216 + video_related
217
218 def getMachine(self):
219 return self.td.get('MACHINE', '')
220
221 def getWorkdir(self):
222 return self.td.get('WORKDIR', '')
223
224 # Get some information on the CPU of the machine to display at the
225 # beginning of the output. This info might be useful in some cases.
226 def getHardwareInfo(self):
227 hwi = ""
228 cmd = ('cat /proc/cpuinfo | grep "model name" | head -n1 | '
229 " awk 'BEGIN{FS=\":\"}{print $2}'")
230 _, cpu_name = self.target.run(cmd)
231
232 cmd = ('cat /proc/cpuinfo | grep "cpu cores" | head -n1 | '
233 "awk {'print $4'}")
234 _, cpu_physical_cores = self.target.run(cmd)
235
236 cmd = 'cat /proc/cpuinfo | grep "processor" | wc -l'
237 _, cpu_logical_cores = self.target.run(cmd)
238
239 _, cpu_arch = self.target.run('uname -m')
240 55
241 hwi += 'Machine information: \n' 56 cls.load_machine_ignores()
242 hwi += '*******************************\n'
243 hwi += 'Machine name: ' + self.getMachine() + '\n'
244 hwi += 'CPU: ' + str(cpu_name) + '\n'
245 hwi += 'Arch: ' + str(cpu_arch)+ '\n'
246 hwi += 'Physical cores: ' + str(cpu_physical_cores) + '\n'
247 hwi += 'Logical cores: ' + str(cpu_logical_cores) + '\n'
248 hwi += '*******************************\n'
249 57
250 return hwi 58 @classmethod
59 def load_machine_ignores(cls):
60 # Add TARGET_ARCH explicitly as not every machine has that in MACHINEOVERRDES (eg qemux86-64)
61 for candidate in ["common", cls.td.get("TARGET_ARCH")] + cls.td.get("MACHINEOVERRIDES").split(":"):
62 try:
63 name = f"parselogs-ignores-{candidate}.txt"
64 for line in _open_text("oeqa.runtime.cases", name):
65 line = line.strip()
66 if line and not line.startswith("#"):
67 cls.ignore_errors.append(line.casefold())
68 except FileNotFoundError:
69 pass
251 70
252 # Go through the log locations provided and if it's a folder 71 # Go through the log locations provided and if it's a folder
253 # create a list with all the .log files in it, if it's a file 72 # create a list with all the .log files in it, if it's a file
@@ -255,23 +74,23 @@ class ParseLogsTest(OERuntimeTestCase):
255 def getLogList(self, log_locations): 74 def getLogList(self, log_locations):
256 logs = [] 75 logs = []
257 for location in log_locations: 76 for location in log_locations:
258 status, _ = self.target.run('test -f ' + str(location)) 77 status, _ = self.target.run('test -f %s' % location)
259 if status == 0: 78 if status == 0:
260 logs.append(str(location)) 79 logs.append(location)
261 else: 80 else:
262 status, _ = self.target.run('test -d ' + str(location)) 81 status, _ = self.target.run('test -d %s' % location)
263 if status == 0: 82 if status == 0:
264 cmd = 'find ' + str(location) + '/*.log -maxdepth 1 -type f' 83 cmd = 'find %s -name \\*.log -maxdepth 1 -type f' % location
265 status, output = self.target.run(cmd) 84 status, output = self.target.run(cmd)
266 if status == 0: 85 if status == 0:
267 output = output.splitlines() 86 output = output.splitlines()
268 for logfile in output: 87 for logfile in output:
269 logs.append(os.path.join(location, str(logfile))) 88 logs.append(os.path.join(location, logfile))
270 return logs 89 return logs
271 90
272 # Copy the log files to be parsed locally 91 # Copy the log files to be parsed locally
273 def transfer_logs(self, log_list): 92 def transfer_logs(self, log_list):
274 workdir = self.getWorkdir() 93 workdir = self.td.get('WORKDIR')
275 self.target_logs = workdir + '/' + 'target_logs' 94 self.target_logs = workdir + '/' + 'target_logs'
276 target_logs = self.target_logs 95 target_logs = self.target_logs
277 if os.path.exists(target_logs): 96 if os.path.exists(target_logs):
@@ -288,65 +107,55 @@ class ParseLogsTest(OERuntimeTestCase):
288 logs = [f for f in dir_files if os.path.isfile(f)] 107 logs = [f for f in dir_files if os.path.isfile(f)]
289 return logs 108 return logs
290 109
291 # Build the grep command to be used with filters and exclusions 110 def get_context(self, lines, index, before=6, after=3):
292 def build_grepcmd(self, errors, ignore_errors, log): 111 """
293 grepcmd = 'grep ' 112 Given a set of lines and the index of the line that is important, return
294 grepcmd += '-Ei "' 113 a number of lines surrounding that line.
295 for error in errors: 114 """
296 grepcmd += '\<' + error + '\>' + '|' 115 last = len(lines)
297 grepcmd = grepcmd[:-1] 116
298 grepcmd += '" ' + str(log) + " | grep -Eiv \'" 117 start = index - before
299 118 end = index + after + 1
300 try: 119
301 errorlist = ignore_errors[self.getMachine()] 120 if start < 0:
302 except KeyError: 121 end -= start
303 self.msg += 'No ignore list found for this machine, using default\n' 122 start = 0
304 errorlist = ignore_errors['default'] 123 if end > last:
305 124 start -= end - last
306 for ignore_error in errorlist: 125 end = last
307 ignore_error = ignore_error.replace('(', '\(') 126
308 ignore_error = ignore_error.replace(')', '\)') 127 return lines[start:end]
309 ignore_error = ignore_error.replace("'", '.') 128
310 ignore_error = ignore_error.replace('?', '\?') 129 def test_get_context(self):
311 ignore_error = ignore_error.replace('[', '\[') 130 """
312 ignore_error = ignore_error.replace(']', '\]') 131 A test case for the test case.
313 ignore_error = ignore_error.replace('*', '\*') 132 """
314 ignore_error = ignore_error.replace('0-9', '[0-9]') 133 lines = list(range(0,10))
315 grepcmd += ignore_error + '|' 134 self.assertEqual(self.get_context(lines, 0, 2, 1), [0, 1, 2, 3])
316 grepcmd = grepcmd[:-1] 135 self.assertEqual(self.get_context(lines, 5, 2, 1), [3, 4, 5, 6])
317 grepcmd += "\'" 136 self.assertEqual(self.get_context(lines, 9, 2, 1), [6, 7, 8, 9])
318 137
319 return grepcmd 138 def parse_logs(self, logs, lines_before=10, lines_after=10):
320 139 """
321 # Grep only the errors so that their context could be collected. 140 Search the log files @logs looking for error lines (marked by
322 # Default context is 10 lines before and after the error itself 141 @self.errors), ignoring anything listed in @self.ignore_errors.
323 def parse_logs(self, errors, ignore_errors, logs, 142
324 lines_before = 10, lines_after = 10): 143 Returns a dictionary of log filenames to a dictionary of error lines to
325 results = {} 144 the error context (controlled by @lines_before and @lines_after).
326 rez = [] 145 """
327 grep_output = '' 146 results = collections.defaultdict(dict)
328 147
329 for log in logs: 148 for log in logs:
330 result = None 149 with open(log) as f:
331 thegrep = self.build_grepcmd(errors, ignore_errors, log) 150 lines = f.readlines()
332 151
333 try: 152 for i, line in enumerate(lines):
334 result = check_output(thegrep, shell=True).decode('utf-8') 153 line = line.strip()
335 except: 154 line_lower = line.casefold()
336 pass
337 155
338 if result is not None: 156 if any(keyword in line_lower for keyword in self.errors):
339 results[log] = {} 157 if not any(ignore in line_lower for ignore in self.ignore_errors):
340 rez = result.splitlines() 158 results[log][line] = "".join(self.get_context(lines, i, lines_before, lines_after))
341
342 for xrez in rez:
343 try:
344 cmd = ['grep', '-F', xrez, '-B', str(lines_before)]
345 cmd += ['-A', str(lines_after), log]
346 grep_output = check_output(cmd).decode('utf-8')
347 except:
348 pass
349 results[log][xrez]=grep_output
350 159
351 return results 160 return results
352 161
@@ -359,17 +168,18 @@ class ParseLogsTest(OERuntimeTestCase):
359 def test_parselogs(self): 168 def test_parselogs(self):
360 self.write_dmesg() 169 self.write_dmesg()
361 log_list = self.get_local_log_list(self.log_locations) 170 log_list = self.get_local_log_list(self.log_locations)
362 result = self.parse_logs(self.errors, self.ignore_errors, log_list) 171 result = self.parse_logs(log_list)
363 print(self.getHardwareInfo()) 172
364 errcount = 0 173 errcount = 0
174 self.msg = ""
365 for log in result: 175 for log in result:
366 self.msg += 'Log: ' + log + '\n' 176 self.msg += 'Log: ' + log + '\n'
367 self.msg += '-----------------------\n' 177 self.msg += '-----------------------\n'
368 for error in result[log]: 178 for error in result[log]:
369 errcount += 1 179 errcount += 1
370 self.msg += 'Central error: ' + str(error) + '\n' 180 self.msg += 'Central error: ' + error + '\n'
371 self.msg += '***********************\n' 181 self.msg += '***********************\n'
372 self.msg += result[str(log)][str(error)] + '\n' 182 self.msg += result[log][error] + '\n'
373 self.msg += '***********************\n' 183 self.msg += '***********************\n'
374 self.msg += '%s errors found in logs.' % errcount 184 self.msg += '%s errors found in logs.' % errcount
375 self.assertEqual(errcount, 0, msg=self.msg) 185 self.assertEqual(errcount, 0, msg=self.msg)
diff --git a/meta/lib/oeqa/runtime/cases/perl.py b/meta/lib/oeqa/runtime/cases/perl.py
index 2c6b3b7846..f11b300836 100644
--- a/meta/lib/oeqa/runtime/cases/perl.py
+++ b/meta/lib/oeqa/runtime/cases/perl.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/ping.py b/meta/lib/oeqa/runtime/cases/ping.py
index f6603f75ec..efb91d4cc9 100644
--- a/meta/lib/oeqa/runtime/cases/ping.py
+++ b/meta/lib/oeqa/runtime/cases/ping.py
@@ -1,11 +1,15 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from subprocess import Popen, PIPE 7from subprocess import Popen, PIPE
8from time import sleep
6 9
7from oeqa.runtime.case import OERuntimeTestCase 10from oeqa.runtime.case import OERuntimeTestCase, run_network_serialdebug
8from oeqa.core.decorator.oetimeout import OETimeout 11from oeqa.core.decorator.oetimeout import OETimeout
12from oeqa.core.exception import OEQATimeoutError
9 13
10class PingTest(OERuntimeTestCase): 14class PingTest(OERuntimeTestCase):
11 15
@@ -13,14 +17,27 @@ class PingTest(OERuntimeTestCase):
13 def test_ping(self): 17 def test_ping(self):
14 output = '' 18 output = ''
15 count = 0 19 count = 0
16 while count < 5: 20 self.assertNotEqual(len(self.target.ip), 0, msg="No target IP address set")
17 cmd = 'ping -c 1 %s' % self.target.ip 21
18 proc = Popen(cmd, shell=True, stdout=PIPE) 22 # If the target IP is localhost (because user-space networking is being used),
19 output += proc.communicate()[0].decode('utf-8') 23 # then there's no point in pinging it.
20 if proc.poll() == 0: 24 if self.target.ip.startswith("127.0.0.") or self.target.ip in ("localhost", "::1"):
21 count += 1 25 print("runtime/ping: localhost detected, not pinging")
22 else: 26 return
23 count = 0 27
28 try:
29 while count < 5:
30 cmd = 'ping -c 1 %s' % self.target.ip
31 proc = Popen(cmd, shell=True, stdout=PIPE)
32 output += proc.communicate()[0].decode('utf-8')
33 if proc.poll() == 0:
34 count += 1
35 else:
36 count = 0
37 sleep(1)
38 except OEQATimeoutError:
39 run_network_serialdebug(self.target.runner)
40 self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output))
24 msg = ('Expected 5 consecutive, got %d.\n' 41 msg = ('Expected 5 consecutive, got %d.\n'
25 'ping output is:\n%s' % (count,output)) 42 'ping output is:\n%s' % (count,output))
26 self.assertEqual(count, 5, msg = msg) 43 self.assertEqual(count, 5, msg = msg)
diff --git a/meta/lib/oeqa/runtime/cases/ptest.py b/meta/lib/oeqa/runtime/cases/ptest.py
index 0800f3c27f..fbaeb84d00 100644
--- a/meta/lib/oeqa/runtime/cases/ptest.py
+++ b/meta/lib/oeqa/runtime/cases/ptest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -57,7 +59,7 @@ class PtestRunnerTest(OERuntimeTestCase):
57 ptest_dirs = [ '/usr/lib' ] 59 ptest_dirs = [ '/usr/lib' ]
58 if not libdir in ptest_dirs: 60 if not libdir in ptest_dirs:
59 ptest_dirs.append(libdir) 61 ptest_dirs.append(libdir)
60 status, output = self.target.run('ptest-runner -d \"{}\"'.format(' '.join(ptest_dirs)), 0) 62 status, output = self.target.run('ptest-runner -t 450 -d \"{}\"'.format(' '.join(ptest_dirs)), 0)
61 os.makedirs(ptest_log_dir) 63 os.makedirs(ptest_log_dir)
62 with open(ptest_runner_log, 'w') as f: 64 with open(ptest_runner_log, 'w') as f:
63 f.write(output) 65 f.write(output)
@@ -81,17 +83,20 @@ class PtestRunnerTest(OERuntimeTestCase):
81 83
82 extras['ptestresult.sections'] = sections 84 extras['ptestresult.sections'] = sections
83 85
86 zerolength = []
84 trans = str.maketrans("()", "__") 87 trans = str.maketrans("()", "__")
85 for section in results: 88 for section in results:
86 for test in results[section]: 89 for test in results[section]:
87 result = results[section][test] 90 result = results[section][test]
88 testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split()) 91 testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split())
89 extras[testname] = {'status': result} 92 extras[testname] = {'status': result}
93 if not results[section]:
94 zerolength.append(section)
90 95
91 failed_tests = {} 96 failed_tests = {}
92 97
93 for section in sections: 98 for section in sections:
94 if 'exitcode' in sections[section].keys(): 99 if 'exitcode' in sections[section].keys() or 'timeout' in sections[section].keys():
95 failed_tests[section] = sections[section]["log"] 100 failed_tests[section] = sections[section]["log"]
96 101
97 for section in results: 102 for section in results:
@@ -105,7 +110,10 @@ class PtestRunnerTest(OERuntimeTestCase):
105 failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output 110 failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output
106 111
107 if failed_tests: 112 if failed_tests:
108 failmsg = failmsg + "Failed ptests:\n%s" % pprint.pformat(failed_tests) 113 failmsg = failmsg + "\nFailed ptests:\n%s\n" % pprint.pformat(failed_tests)
114
115 if zerolength:
116 failmsg = failmsg + "\nptests which had no test results:\n%s" % pprint.pformat(zerolength)
109 117
110 if failmsg: 118 if failmsg:
111 self.logger.warning("There were failing ptests.") 119 self.logger.warning("There were failing ptests.")
diff --git a/meta/lib/oeqa/runtime/cases/python.py b/meta/lib/oeqa/runtime/cases/python.py
index ec54f1e1db..5d6d133480 100644
--- a/meta/lib/oeqa/runtime/cases/python.py
+++ b/meta/lib/oeqa/runtime/cases/python.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/rpm.py b/meta/lib/oeqa/runtime/cases/rpm.py
index 8e18b426f8..ea5619ffea 100644
--- a/meta/lib/oeqa/runtime/cases/rpm.py
+++ b/meta/lib/oeqa/runtime/cases/rpm.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -49,21 +51,20 @@ class RpmBasicTest(OERuntimeTestCase):
49 msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output) 51 msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output)
50 self.assertEqual(status, 0, msg=msg) 52 self.assertEqual(status, 0, msg=msg)
51 53
52 def check_no_process_for_user(u): 54 def wait_for_no_process_for_user(u, timeout = 120):
53 _, output = self.target.run(self.tc.target_cmds['ps']) 55 timeout_at = time.time() + timeout
54 if u + ' ' in output: 56 while time.time() < timeout_at:
55 return False 57 _, output = self.target.run(self.tc.target_cmds['ps'])
56 else: 58 if u + ' ' not in output:
57 return True 59 return
60 time.sleep(1)
61 user_pss = [ps for ps in output.split("\n") if u + ' ' in ps]
62 msg = "User %s has processes still running: %s" % (u, "\n".join(user_pss))
63 self.fail(msg=msg)
58 64
59 def unset_up_test_user(u): 65 def unset_up_test_user(u):
60 # ensure no test1 process in running 66 # ensure no test1 process in running
61 timeout = time.time() + 30 67 wait_for_no_process_for_user(u)
62 while time.time() < timeout:
63 if check_no_process_for_user(u):
64 break
65 else:
66 time.sleep(1)
67 status, output = self.target.run('userdel -r %s' % u) 68 status, output = self.target.run('userdel -r %s' % u)
68 msg = 'Failed to erase user: %s' % output 69 msg = 'Failed to erase user: %s' % output
69 self.assertTrue(status == 0, msg=msg) 70 self.assertTrue(status == 0, msg=msg)
@@ -79,21 +80,24 @@ class RpmBasicTest(OERuntimeTestCase):
79 80
80class RpmInstallRemoveTest(OERuntimeTestCase): 81class RpmInstallRemoveTest(OERuntimeTestCase):
81 82
82 @classmethod 83 def _find_test_file(self):
83 def setUpClass(cls): 84 pkgarch = self.td['TUNE_PKGARCH'].replace('-', '_')
84 pkgarch = cls.td['TUNE_PKGARCH'].replace('-', '_') 85 rpmdir = os.path.join(self.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
85 rpmdir = os.path.join(cls.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
86 # Pick base-passwd-doc as a test file to get installed, because it's small 86 # Pick base-passwd-doc as a test file to get installed, because it's small
87 # and it will always be built for standard targets 87 # and it will always be built for standard targets
88 rpm_doc = 'base-passwd-doc-*.%s.rpm' % pkgarch 88 rpm_doc = 'base-passwd-doc-*.%s.rpm' % pkgarch
89 if not os.path.exists(rpmdir): 89 if not os.path.exists(rpmdir):
90 return 90 self.fail("Rpm directory {} does not exist".format(rpmdir))
91 for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc): 91 for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc):
92 cls.test_file = os.path.join(rpmdir, f) 92 self.test_file = os.path.join(rpmdir, f)
93 cls.dst = '/tmp/base-passwd-doc.rpm' 93 break
94 else:
95 self.fail("Couldn't find the test rpm file {} in {}".format(rpm_doc, rpmdir))
96 self.dst = '/tmp/base-passwd-doc.rpm'
94 97
95 @OETestDepends(['rpm.RpmBasicTest.test_rpm_query']) 98 @OETestDepends(['rpm.RpmBasicTest.test_rpm_query'])
96 def test_rpm_install(self): 99 def test_rpm_install(self):
100 self._find_test_file()
97 self.tc.target.copyTo(self.test_file, self.dst) 101 self.tc.target.copyTo(self.test_file, self.dst)
98 status, output = self.target.run('rpm -ivh /tmp/base-passwd-doc.rpm') 102 status, output = self.target.run('rpm -ivh /tmp/base-passwd-doc.rpm')
99 msg = 'Failed to install base-passwd-doc package: %s' % output 103 msg = 'Failed to install base-passwd-doc package: %s' % output
@@ -116,12 +120,13 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
116 Author: Alexander Kanavin <alex.kanavin@gmail.com> 120 Author: Alexander Kanavin <alex.kanavin@gmail.com>
117 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com> 121 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
118 """ 122 """
119 db_files_cmd = 'ls /var/lib/rpm/__db.*' 123 self._find_test_file()
124 db_files_cmd = 'ls /var/lib/rpm/rpmdb.sqlite*'
120 check_log_cmd = "grep RPM /var/log/messages | wc -l" 125 check_log_cmd = "grep RPM /var/log/messages | wc -l"
121 126
122 # Make sure that some database files are under /var/lib/rpm as '__db.xxx' 127 # Make sure that some database files are under /var/lib/rpm as 'rpmdb.sqlite'
123 status, output = self.target.run(db_files_cmd) 128 status, output = self.target.run(db_files_cmd)
124 msg = 'Failed to find database files under /var/lib/rpm/ as __db.xxx' 129 msg = 'Failed to find database files under /var/lib/rpm/ as rpmdb.sqlite'
125 self.assertEqual(0, status, msg=msg) 130 self.assertEqual(0, status, msg=msg)
126 131
127 self.tc.target.copyTo(self.test_file, self.dst) 132 self.tc.target.copyTo(self.test_file, self.dst)
@@ -141,13 +146,4 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
141 146
142 self.tc.target.run('rm -f %s' % self.dst) 147 self.tc.target.run('rm -f %s' % self.dst)
143 148
144 # if using systemd this should ensure all entries are flushed to /var
145 status, output = self.target.run("journalctl --sync")
146 # Get the amount of entries in the log file
147 status, output = self.target.run(check_log_cmd)
148 msg = 'Failed to get the final size of the log file.'
149 self.assertEqual(0, status, msg=msg)
150 149
151 # Check that there's enough of them
152 self.assertGreaterEqual(int(output), 80,
153 'Cound not find sufficient amount of rpm entries in /var/log/messages, found {} entries'.format(output))
diff --git a/meta/lib/oeqa/runtime/cases/rt.py b/meta/lib/oeqa/runtime/cases/rt.py
new file mode 100644
index 0000000000..15ab4dbbbb
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rt.py
@@ -0,0 +1,19 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9
10class RtTest(OERuntimeTestCase):
11 @OETestDepends(['ssh.SSHTest.test_ssh'])
12 def test_is_rt(self):
13 """
14 Check that the kernel has CONFIG_PREEMPT_RT enabled.
15 """
16 status, output = self.target.run("uname -a")
17 self.assertEqual(status, 0, msg=output)
18 # Split so we don't get a substring false-positive
19 self.assertIn("PREEMPT_RT", output.split())
diff --git a/meta/lib/oeqa/runtime/cases/rtc.py b/meta/lib/oeqa/runtime/cases/rtc.py
index a34c101a9d..6e45c5db4f 100644
--- a/meta/lib/oeqa/runtime/cases/rtc.py
+++ b/meta/lib/oeqa/runtime/cases/rtc.py
@@ -1,5 +1,11 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
8from oeqa.core.decorator.data import skipIfFeature
3from oeqa.runtime.decorator.package import OEHasPackage 9from oeqa.runtime.decorator.package import OEHasPackage
4 10
5import re 11import re
@@ -9,19 +15,21 @@ class RTCTest(OERuntimeTestCase):
9 def setUp(self): 15 def setUp(self):
10 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 16 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
11 self.logger.debug('Stopping systemd-timesyncd daemon') 17 self.logger.debug('Stopping systemd-timesyncd daemon')
12 self.target.run('systemctl disable --now systemd-timesyncd') 18 self.target.run('systemctl disable --now --runtime systemd-timesyncd')
13 19
14 def tearDown(self): 20 def tearDown(self):
15 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 21 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
16 self.logger.debug('Starting systemd-timesyncd daemon') 22 self.logger.debug('Starting systemd-timesyncd daemon')
17 self.target.run('systemctl enable --now systemd-timesyncd') 23 self.target.run('systemctl enable --now --runtime systemd-timesyncd')
18 24
25 @skipIfFeature('read-only-rootfs',
26 'Test does not work with read-only-rootfs in IMAGE_FEATURES')
19 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
20 @OEHasPackage(['coreutils', 'busybox']) 28 @OEHasPackage(['coreutils', 'busybox'])
21 def test_rtc(self): 29 def test_rtc(self):
22 (status, output) = self.target.run('hwclock -r') 30 (status, output) = self.target.run('hwclock -r')
23 self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output) 31 self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output)
24 32
25 (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"') 33 (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"')
26 self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime) 34 self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime)
27 35
@@ -32,7 +40,6 @@ class RTCTest(OERuntimeTestCase):
32 40
33 (status, output) = self.target.run('date %s' % current_datetime) 41 (status, output) = self.target.run('date %s' % current_datetime)
34 self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output) 42 self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output)
35 43
36 (status, output) = self.target.run('hwclock -w') 44 (status, output) = self.target.run('hwclock -w')
37 self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output) 45 self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output)
38
diff --git a/meta/lib/oeqa/runtime/cases/runlevel.py b/meta/lib/oeqa/runtime/cases/runlevel.py
index 3a4df8ace1..6734b0f5ed 100644
--- a/meta/lib/oeqa/runtime/cases/runlevel.py
+++ b/meta/lib/oeqa/runtime/cases/runlevel.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3 8
diff --git a/meta/lib/oeqa/runtime/cases/rust.py b/meta/lib/oeqa/runtime/cases/rust.py
new file mode 100644
index 0000000000..123c942012
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rust.py
@@ -0,0 +1,64 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage
10
11class RustCompileTest(OERuntimeTestCase):
12
13 @classmethod
14 def setUp(cls):
15 dst = '/tmp/'
16 src = os.path.join(cls.tc.files_dir, 'test.rs')
17 cls.tc.target.copyTo(src, dst)
18
19 @classmethod
20 def tearDown(cls):
21 files = '/tmp/test.rs /tmp/test'
22 cls.tc.target.run('rm %s' % files)
23 dirs = '/tmp/hello'
24 cls.tc.target.run('rm -r %s' % dirs)
25
26 @OETestDepends(['ssh.SSHTest.test_ssh'])
27 @OEHasPackage('rust')
28 @OEHasPackage('openssh-scp')
29 def test_rust_compile(self):
30 status, output = self.target.run('rustc /tmp/test.rs -o /tmp/test')
31 msg = 'rust compile failed, output: %s' % output
32 self.assertEqual(status, 0, msg=msg)
33
34 status, output = self.target.run('/tmp/test')
35 msg = 'running compiled file failed, output: %s' % output
36 self.assertEqual(status, 0, msg=msg)
37
38 @OETestDepends(['ssh.SSHTest.test_ssh'])
39 @OEHasPackage('cargo')
40 @OEHasPackage('openssh-scp')
41 def test_cargo_compile(self):
42 status, output = self.target.run('cargo new /tmp/hello')
43 msg = 'cargo new failed, output: %s' % output
44 self.assertEqual(status, 0, msg=msg)
45
46 status, output = self.target.run('cargo build --manifest-path=/tmp/hello/Cargo.toml')
47 msg = 'cargo build failed, output: %s' % output
48 self.assertEqual(status, 0, msg=msg)
49
50 status, output = self.target.run('cargo run --manifest-path=/tmp/hello/Cargo.toml')
51 msg = 'running compiled file failed, output: %s' % output
52 self.assertEqual(status, 0, msg=msg)
53
54class RustCLibExampleTest(OERuntimeTestCase):
55 @OETestDepends(['ssh.SSHTest.test_ssh'])
56 @OEHasPackage('rust-c-lib-example-bin')
57 def test_rust_c_lib_example(self):
58 cmd = "rust-c-lib-example-bin test"
59 status, output = self.target.run(cmd)
60 msg = 'Exit status was not 0. Output: %s' % output
61 self.assertEqual(status, 0, msg=msg)
62
63 msg = 'Incorrect output: %s' % output
64 self.assertEqual(output, "Hello world in rust from C!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/scons.py b/meta/lib/oeqa/runtime/cases/scons.py
index 3c7c7f7270..4a8d4d40ba 100644
--- a/meta/lib/oeqa/runtime/cases/scons.py
+++ b/meta/lib/oeqa/runtime/cases/scons.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py
index 3a5f292152..364264369a 100644
--- a/meta/lib/oeqa/runtime/cases/scp.py
+++ b/meta/lib/oeqa/runtime/cases/scp.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -23,7 +25,7 @@ class ScpTest(OERuntimeTestCase):
23 os.remove(cls.tmp_path) 25 os.remove(cls.tmp_path)
24 26
25 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
26 @OEHasPackage(['openssh-scp', 'dropbear']) 28 @OEHasPackage({'openssh-scp', 'openssh-sftp-server'})
27 def test_scp_file(self): 29 def test_scp_file(self):
28 dst = '/tmp/test_scp_file' 30 dst = '/tmp/test_scp_file'
29 31
diff --git a/meta/lib/oeqa/runtime/cases/skeletoninit.py b/meta/lib/oeqa/runtime/cases/skeletoninit.py
index 4779cd6bb4..be7b39a9a3 100644
--- a/meta/lib/oeqa/runtime/cases/skeletoninit.py
+++ b/meta/lib/oeqa/runtime/cases/skeletoninit.py
@@ -1,10 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284 7# Image under test must have meta-skeleton layer in bblayers and
6# testcase. Image under test must have meta-skeleton layer in bblayers and 8# IMAGE_INSTALL:append = " service" in local.conf
7# IMAGE_INSTALL_append = " service" in local.conf
8from oeqa.runtime.case import OERuntimeTestCase 9from oeqa.runtime.case import OERuntimeTestCase
9from oeqa.core.decorator.depends import OETestDepends 10from oeqa.core.decorator.depends import OETestDepends
10from oeqa.core.decorator.data import skipIfDataVar 11from oeqa.core.decorator.data import skipIfDataVar
@@ -15,7 +16,7 @@ class SkeletonBasicTest(OERuntimeTestCase):
15 @OETestDepends(['ssh.SSHTest.test_ssh']) 16 @OETestDepends(['ssh.SSHTest.test_ssh'])
16 @OEHasPackage(['service']) 17 @OEHasPackage(['service'])
17 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', 18 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
18 'Not appropiate for systemd image') 19 'Not appropriate for systemd image')
19 def test_skeleton_availability(self): 20 def test_skeleton_availability(self):
20 status, output = self.target.run('ls /etc/init.d/skeleton') 21 status, output = self.target.run('ls /etc/init.d/skeleton')
21 msg = 'skeleton init script not found. Output:\n%s' % output 22 msg = 'skeleton init script not found. Output:\n%s' % output
diff --git a/meta/lib/oeqa/runtime/cases/ssh.py b/meta/lib/oeqa/runtime/cases/ssh.py
index 60a5fbbfbf..b632a29a01 100644
--- a/meta/lib/oeqa/runtime/cases/ssh.py
+++ b/meta/lib/oeqa/runtime/cases/ssh.py
@@ -1,8 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.runtime.case import OERuntimeTestCase 7import time
8import signal
9
10from oeqa.runtime.case import OERuntimeTestCase, run_network_serialdebug
6from oeqa.core.decorator.depends import OETestDepends 11from oeqa.core.decorator.depends import OETestDepends
7from oeqa.runtime.decorator.package import OEHasPackage 12from oeqa.runtime.decorator.package import OEHasPackage
8 13
@@ -11,9 +16,23 @@ class SSHTest(OERuntimeTestCase):
11 @OETestDepends(['ping.PingTest.test_ping']) 16 @OETestDepends(['ping.PingTest.test_ping'])
12 @OEHasPackage(['dropbear', 'openssh-sshd']) 17 @OEHasPackage(['dropbear', 'openssh-sshd'])
13 def test_ssh(self): 18 def test_ssh(self):
14 (status, output) = self.target.run('uname -a') 19 for i in range(5):
15 self.assertEqual(status, 0, msg='SSH Test failed: %s' % output) 20 status, output = self.target.run("uname -a", timeout=30)
16 (status, output) = self.target.run('cat /etc/masterimage') 21 if status == 0:
17 msg = "This isn't the right image - /etc/masterimage " \ 22 break
18 "shouldn't be here %s" % output 23 elif status == 255 or status == -signal.SIGTERM:
19 self.assertEqual(status, 1, msg=msg) 24 # ssh returns 255 only if a ssh error occurs. This could
25 # be an issue with "Connection refused" because the port
26 # isn't open yet, and this could check explicitly for that
27 # here. However, let's keep it simple and just retry for
28 # all errors a limited amount of times with a sleep to
29 # give it time for the port to open.
30 # We sometimes see -15 (SIGTERM) on slow emulation machines too, likely
31 # from boot/init not being 100% complete, retry for these too.
32 time.sleep(5)
33 continue
34 else:
35 run_network_serialdebug(self.target.runner)
36 self.fail("uname failed with \"%s\" (exit code %s)" % (output, status))
37 if status != 0:
38 self.fail("ssh failed with \"%s\" (exit code %s)" % (output, status))
diff --git a/meta/lib/oeqa/runtime/cases/stap.py b/meta/lib/oeqa/runtime/cases/stap.py
index 5342f6ac34..6b55e7de50 100644
--- a/meta/lib/oeqa/runtime/cases/stap.py
+++ b/meta/lib/oeqa/runtime/cases/stap.py
@@ -1,37 +1,35 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.runtime.case import OERuntimeTestCase 9from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.core.decorator.data import skipIfNotFeature 10from oeqa.core.decorator.data import skipIfNotFeature
10from oeqa.runtime.decorator.package import OEHasPackage 11from oeqa.runtime.decorator.package import OEHasPackage
11 12
12class StapTest(OERuntimeTestCase): 13class StapTest(OERuntimeTestCase):
13 14 @skipIfNotFeature('tools-profile', 'Test requires tools-profile to be in IMAGE_FEATURES')
14 @classmethod
15 def setUp(cls):
16 src = os.path.join(cls.tc.runtime_files_dir, 'hello.stp')
17 dst = '/tmp/hello.stp'
18 cls.tc.target.copyTo(src, dst)
19
20 @classmethod
21 def tearDown(cls):
22 files = '/tmp/hello.stp'
23 cls.tc.target.run('rm %s' % files)
24
25 @skipIfNotFeature('tools-profile',
26 'Test requires tools-profile to be in IMAGE_FEATURES')
27 @OETestDepends(['kernelmodule.KernelModuleTest.test_kernel_module'])
28 @OEHasPackage(['systemtap']) 15 @OEHasPackage(['systemtap'])
16 @OEHasPackage(['gcc-symlinks'])
17 @OEHasPackage(['kernel-devsrc'])
29 def test_stap(self): 18 def test_stap(self):
30 cmds = [ 19 try:
31 'cd /usr/src/kernel && make scripts prepare', 20 cmd = 'make -j -C /usr/src/kernel scripts prepare'
32 'cd /lib/modules/`uname -r` && (if [ ! -e build ]; then ln -s /usr/src/kernel build; fi)',
33 'stap --disable-cache -DSTP_NO_VERREL_CHECK /tmp/hello.stp'
34 ]
35 for cmd in cmds:
36 status, output = self.target.run(cmd, 900) 21 status, output = self.target.run(cmd, 900)
37 self.assertEqual(status, 0, msg='\n'.join([cmd, output])) 22 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
23
24 cmd = 'stap -v -p4 -m stap_hello --disable-cache -DSTP_NO_VERREL_CHECK -e \'probe oneshot { print("Hello, "); println("SystemTap!") }\''
25 status, output = self.target.run(cmd, 900)
26 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
27
28 cmd = 'staprun -v -R -b1 stap_hello.ko'
29 status, output = self.target.run(cmd, 60)
30 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
31 self.assertIn('Hello, SystemTap!', output, msg='\n'.join([cmd, output]))
32 except:
33 status, dmesg = self.target.run('dmesg')
34 if status == 0:
35 print(dmesg)
diff --git a/meta/lib/oeqa/runtime/cases/storage.py b/meta/lib/oeqa/runtime/cases/storage.py
index 166d26b252..b05622fea8 100644
--- a/meta/lib/oeqa/runtime/cases/storage.py
+++ b/meta/lib/oeqa/runtime/cases/storage.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -91,24 +93,24 @@ class UsbTest(StorageBase):
91 self.test_file = "usb.tst" 93 self.test_file = "usb.tst"
92 self.test_dir = os.path.join(self.mount_point, "oeqa") 94 self.test_dir = os.path.join(self.mount_point, "oeqa")
93 95
94 @skipIfQemu('qemuall', 'Test only runs on real hardware') 96 @skipIfQemu()
95 @OETestDepends(['ssh.SSHTest.test_ssh']) 97 @OETestDepends(['ssh.SSHTest.test_ssh'])
96 def test_usb_mount(self): 98 def test_usb_mount(self):
97 self.storage_umount(2) 99 self.storage_umount(2)
98 self.storage_mount(5) 100 self.storage_mount(5)
99 101
100 @skipIfQemu('qemuall', 'Test only runs on real hardware') 102 @skipIfQemu()
101 @OETestDepends(['storage.UsbTest.test_usb_mount']) 103 @OETestDepends(['storage.UsbTest.test_usb_mount'])
102 def test_usb_basic_operations(self): 104 def test_usb_basic_operations(self):
103 self.storage_basic() 105 self.storage_basic()
104 106
105 @skipIfQemu('qemuall', 'Test only runs on real hardware') 107 @skipIfQemu()
106 @OETestDepends(['storage.UsbTest.test_usb_basic_operations']) 108 @OETestDepends(['storage.UsbTest.test_usb_basic_operations'])
107 def test_usb_basic_rw(self): 109 def test_usb_basic_rw(self):
108 self.storage_write() 110 self.storage_write()
109 self.storage_read() 111 self.storage_read()
110 112
111 @skipIfQemu('qemuall', 'Test only runs on real hardware') 113 @skipIfQemu()
112 @OETestDepends(['storage.UsbTest.test_usb_mount']) 114 @OETestDepends(['storage.UsbTest.test_usb_mount'])
113 def test_usb_umount(self): 115 def test_usb_umount(self):
114 self.storage_umount(2) 116 self.storage_umount(2)
@@ -126,24 +128,24 @@ class MMCTest(StorageBase):
126 self.test_file = "mmc.tst" 128 self.test_file = "mmc.tst"
127 self.test_dir = os.path.join(self.mount_point, "oeqa") 129 self.test_dir = os.path.join(self.mount_point, "oeqa")
128 130
129 @skipIfQemu('qemuall', 'Test only runs on real hardware') 131 @skipIfQemu()
130 @OETestDepends(['ssh.SSHTest.test_ssh']) 132 @OETestDepends(['ssh.SSHTest.test_ssh'])
131 def test_mmc_mount(self): 133 def test_mmc_mount(self):
132 self.storage_umount(2) 134 self.storage_umount(2)
133 self.storage_mount() 135 self.storage_mount()
134 136
135 @skipIfQemu('qemuall', 'Test only runs on real hardware') 137 @skipIfQemu()
136 @OETestDepends(['storage.MMCTest.test_mmc_mount']) 138 @OETestDepends(['storage.MMCTest.test_mmc_mount'])
137 def test_mmc_basic_operations(self): 139 def test_mmc_basic_operations(self):
138 self.storage_basic() 140 self.storage_basic()
139 141
140 @skipIfQemu('qemuall', 'Test only runs on real hardware') 142 @skipIfQemu()
141 @OETestDepends(['storage.MMCTest.test_mmc_basic_operations']) 143 @OETestDepends(['storage.MMCTest.test_mmc_basic_operations'])
142 def test_mmc_basic_rw(self): 144 def test_mmc_basic_rw(self):
143 self.storage_write() 145 self.storage_write()
144 self.storage_read() 146 self.storage_read()
145 147
146 @skipIfQemu('qemuall', 'Test only runs on real hardware') 148 @skipIfQemu()
147 @OETestDepends(['storage.MMCTest.test_mmc_mount']) 149 @OETestDepends(['storage.MMCTest.test_mmc_mount'])
148 def test_mmc_umount(self): 150 def test_mmc_umount(self):
149 self.storage_umount(2) 151 self.storage_umount(2)
diff --git a/meta/lib/oeqa/runtime/cases/suspend.py b/meta/lib/oeqa/runtime/cases/suspend.py
index 67b6f7e56f..a625cc5901 100644
--- a/meta/lib/oeqa/runtime/cases/suspend.py
+++ b/meta/lib/oeqa/runtime/cases/suspend.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
@@ -23,7 +28,7 @@ class Suspend_Test(OERuntimeTestCase):
23 (status, output) = self.target.run('sudo rtcwake -m mem -s 10') 28 (status, output) = self.target.run('sudo rtcwake -m mem -s 10')
24 self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) 29 self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
25 30
26 @skipIfQemu('qemuall', 'Test only runs on real hardware') 31 @skipIfQemu()
27 @OETestDepends(['ssh.SSHTest.test_ssh']) 32 @OETestDepends(['ssh.SSHTest.test_ssh'])
28 def test_suspend(self): 33 def test_suspend(self):
29 self.test_date() 34 self.test_date()
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py
index 7c44abe8ed..640f28abe9 100644
--- a/meta/lib/oeqa/runtime/cases/systemd.py
+++ b/meta/lib/oeqa/runtime/cases/systemd.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import re 7import re
8import threading
6import time 9import time
7 10
8from oeqa.runtime.case import OERuntimeTestCase 11from oeqa.runtime.case import OERuntimeTestCase
@@ -66,8 +69,8 @@ class SystemdBasicTests(SystemdTest):
66 """ 69 """
67 endtime = time.time() + (60 * 2) 70 endtime = time.time() + (60 * 2)
68 while True: 71 while True:
69 status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl --state=activating') 72 status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl is-system-running')
70 if "0 loaded units listed" in output: 73 if "running" in output or "degraded" in output:
71 return (True, '') 74 return (True, '')
72 if time.time() >= endtime: 75 if time.time() >= endtime:
73 return (False, output) 76 return (False, output)
@@ -134,6 +137,38 @@ class SystemdServiceTests(SystemdTest):
134 status = self.target.run('mount -oro,remount /')[0] 137 status = self.target.run('mount -oro,remount /')[0]
135 self.assertTrue(status == 0, msg='Remounting / as r/o failed') 138 self.assertTrue(status == 0, msg='Remounting / as r/o failed')
136 139
140 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
141 @skipIfNotFeature('minidebuginfo', 'Test requires minidebuginfo to be in DISTRO_FEATURES')
142 @OEHasPackage(['busybox'])
143 def test_systemd_coredump_minidebuginfo(self):
144 """
145 Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks,
146 extracted from the minidebuginfo metadata (.gnu_debugdata elf section).
147 """
148 # use "env sleep" instead of "sleep" to avoid calling the shell builtin function
149 t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && env sleep 1000",))
150 t_thread.start()
151 time.sleep(1)
152
153 status, sleep_pid = self.target.run('pidof sleep')
154 # cause segfault on purpose
155 self.target.run('kill -SEGV %s' % sleep_pid)
156 self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % sleep_pid)
157
158 # Give some time to systemd-coredump@.service to process the coredump
159 for x in range(20):
160 status, output = self.target.run('coredumpctl list %s' % sleep_pid)
161 if status == 0:
162 break
163 time.sleep(1)
164 else:
165 self.fail("Timed out waiting for coredump creation")
166
167 (status, output) = self.target.run('coredumpctl info %s' % sleep_pid)
168 self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output)
169 self.assertEqual('sleep_for_duration (busybox.nosuid' in output or 'xnanosleep (sleep.coreutils' in output,
170 True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output)
171
137class SystemdJournalTests(SystemdTest): 172class SystemdJournalTests(SystemdTest):
138 173
139 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) 174 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
@@ -152,7 +187,7 @@ class SystemdJournalTests(SystemdTest):
152 """ 187 """
153 188
154 # The expression chain that uniquely identifies the time boot message. 189 # The expression chain that uniquely identifies the time boot message.
155 expr_items=['Startup finished', 'kernel', 'userspace','\.$'] 190 expr_items=['Startup finished', 'kernel', 'userspace', r'\.$']
156 try: 191 try:
157 output = self.journalctl(args='-o cat --reverse') 192 output = self.journalctl(args='-o cat --reverse')
158 except AssertionError: 193 except AssertionError:
diff --git a/meta/lib/oeqa/runtime/cases/terminal.py b/meta/lib/oeqa/runtime/cases/terminal.py
index 8fcca99f47..96ba3c3195 100644
--- a/meta/lib/oeqa/runtime/cases/terminal.py
+++ b/meta/lib/oeqa/runtime/cases/terminal.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.runtime.decorator.package import OEHasPackage 8from oeqa.runtime.decorator.package import OEHasPackage
diff --git a/meta/lib/oeqa/runtime/cases/uki.py b/meta/lib/oeqa/runtime/cases/uki.py
new file mode 100644
index 0000000000..77bc5b9791
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/uki.py
@@ -0,0 +1,16 @@
1# SPDX-License-Identifier: MIT
2#
3
4from oeqa.runtime.case import OERuntimeTestCase
5from oeqa.core.decorator.data import skipIfNotInDataVar
6
7class UkiTest(OERuntimeTestCase):
8
9 @skipIfNotInDataVar('IMAGE_CLASSES', 'uki', 'Test case uki is for images which use uki.bbclass')
10 def test_uki(self):
11 uki_filename = self.td.get('UKI_FILENAME')
12 status, output = self.target.run('ls /boot/EFI/Linux/%s' % uki_filename)
13 self.assertEqual(status, 0, output)
14
15 status, output = self.target.run('echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep %s' % uki_filename)
16 self.assertEqual(status, 0, output)
diff --git a/meta/lib/oeqa/runtime/cases/usb_hid.py b/meta/lib/oeqa/runtime/cases/usb_hid.py
index 3c292cf661..6f23d2ff51 100644
--- a/meta/lib/oeqa/runtime/cases/usb_hid.py
+++ b/meta/lib/oeqa/runtime/cases/usb_hid.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
@@ -14,7 +19,7 @@ class USB_HID_Test(OERuntimeTestCase):
14 return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) 19 return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
15 20
16 @OEHasPackage(['xdotool']) 21 @OEHasPackage(['xdotool'])
17 @skipIfQemu('qemuall', 'Test only runs on real hardware') 22 @skipIfQemu()
18 @OETestDepends(['ssh.SSHTest.test_ssh']) 23 @OETestDepends(['ssh.SSHTest.test_ssh'])
19 def test_USB_Hid_input(self): 24 def test_USB_Hid_input(self):
20 self.keyboard_mouse_simulation() 25 self.keyboard_mouse_simulation()
diff --git a/meta/lib/oeqa/runtime/cases/weston.py b/meta/lib/oeqa/runtime/cases/weston.py
index a1c7183213..ee4d336482 100644
--- a/meta/lib/oeqa/runtime/cases/weston.py
+++ b/meta/lib/oeqa/runtime/cases/weston.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -10,7 +12,7 @@ import threading
10import time 12import time
11 13
12class WestonTest(OERuntimeTestCase): 14class WestonTest(OERuntimeTestCase):
13 weston_log_file = '/tmp/weston.log' 15 weston_log_file = '/tmp/weston-2.log'
14 16
15 @classmethod 17 @classmethod
16 def tearDownClass(cls): 18 def tearDownClass(cls):
@@ -31,13 +33,13 @@ class WestonTest(OERuntimeTestCase):
31 return output.split(" ") 33 return output.split(" ")
32 34
33 def get_weston_command(self, cmd): 35 def get_weston_command(self, cmd):
34 return 'export XDG_RUNTIME_DIR=/run/user/0; export WAYLAND_DISPLAY=wayland-0; %s' % cmd 36 return 'export XDG_RUNTIME_DIR=/run/user/`id -u weston`; export WAYLAND_DISPLAY=wayland-1; %s' % cmd
35 37
36 def run_weston_init(self): 38 def run_weston_init(self):
37 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']: 39 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']:
38 self.target.run('systemd-run --collect --unit=weston-ptest.service --uid=0 -p PAMName=login -p TTYPath=/dev/tty6 -E XDG_RUNTIME_DIR=/tmp -E WAYLAND_DISPLAY=wayland-0 /usr/bin/weston --socket=wayland-1 --log=%s' % self.weston_log_file) 40 self.target.run('systemd-run --collect --unit=weston-ptest.service --uid=0 -p PAMName=login -p TTYPath=/dev/tty6 -E XDG_RUNTIME_DIR=/tmp -E WAYLAND_DISPLAY=wayland-0 /usr/bin/weston --socket=wayland-1 --log=%s' % self.weston_log_file)
39 else: 41 else:
40 self.target.run(self.get_weston_command('openvt -- weston --socket=wayland-1 --log=%s' % self.weston_log_file)) 42 self.target.run(self.get_weston_command('openvt -- weston --socket=wayland-2 --log=%s' % self.weston_log_file))
41 43
42 def get_new_wayland_processes(self, existing_wl_processes): 44 def get_new_wayland_processes(self, existing_wl_processes):
43 try_cnt = 0 45 try_cnt = 0
@@ -53,7 +55,11 @@ class WestonTest(OERuntimeTestCase):
53 55
54 @OEHasPackage(['wayland-utils']) 56 @OEHasPackage(['wayland-utils'])
55 def test_wayland_info(self): 57 def test_wayland_info(self):
56 status, output = self.target.run(self.get_weston_command('wayland-info')) 58 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']:
59 command = 'XDG_RUNTIME_DIR=/run wayland-info'
60 else:
61 command = self.get_weston_command('wayland-info')
62 status, output = self.target.run(command)
57 self.assertEqual(status, 0, msg='wayland-info error: %s' % output) 63 self.assertEqual(status, 0, msg='wayland-info error: %s' % output)
58 64
59 @OEHasPackage(['weston']) 65 @OEHasPackage(['weston'])
@@ -73,3 +79,11 @@ class WestonTest(OERuntimeTestCase):
73 self.target.run('kill -9 %s' % w) 79 self.target.run('kill -9 %s' % w)
74 __, weston_log = self.target.run('cat %s' % self.weston_log_file) 80 __, weston_log = self.target.run('cat %s' % self.weston_log_file)
75 self.assertTrue(new_wl_processes, msg='Could not get new weston-desktop-shell processes (%s, try_cnt:%s) weston log: %s' % (new_wl_processes, try_cnt, weston_log)) 81 self.assertTrue(new_wl_processes, msg='Could not get new weston-desktop-shell processes (%s, try_cnt:%s) weston log: %s' % (new_wl_processes, try_cnt, weston_log))
82
83 @skipIfNotFeature('x11', 'Test requires x11 to be in DISTRO_FEATURES')
84 @OEHasPackage(['weston'])
85 def test_weston_supports_xwayland(self):
86 cmd ='cat %s | grep "xserver listening on display"' % self.weston_log_file
87 status, output = self.target.run(cmd)
88 msg = ('xwayland does not appear to be running')
89 self.assertEqual(status, 0, msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/x32lib.py b/meta/lib/oeqa/runtime/cases/x32lib.py
index f419c8f181..014da4b386 100644
--- a/meta/lib/oeqa/runtime/cases/x32lib.py
+++ b/meta/lib/oeqa/runtime/cases/x32lib.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/xorg.py b/meta/lib/oeqa/runtime/cases/xorg.py
index d6845587c2..09afb1e3d1 100644
--- a/meta/lib/oeqa/runtime/cases/xorg.py
+++ b/meta/lib/oeqa/runtime/cases/xorg.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py
index 3826f27642..daabc44910 100644
--- a/meta/lib/oeqa/runtime/context.py
+++ b/meta/lib/oeqa/runtime/context.py
@@ -5,11 +5,12 @@
5# 5#
6 6
7import os 7import os
8import sys
8 9
9from oeqa.core.context import OETestContext, OETestContextExecutor 10from oeqa.core.context import OETestContext, OETestContextExecutor
11from oeqa.core.target.serial import OESerialTarget
10from oeqa.core.target.ssh import OESSHTarget 12from oeqa.core.target.ssh import OESSHTarget
11from oeqa.core.target.qemu import OEQemuTarget 13from oeqa.core.target.qemu import OEQemuTarget
12from oeqa.utils.dump import HostDumper
13 14
14from oeqa.runtime.loader import OERuntimeTestLoader 15from oeqa.runtime.loader import OERuntimeTestLoader
15 16
@@ -19,12 +20,11 @@ class OERuntimeTestContext(OETestContext):
19 os.path.dirname(os.path.abspath(__file__)), "files") 20 os.path.dirname(os.path.abspath(__file__)), "files")
20 21
21 def __init__(self, td, logger, target, 22 def __init__(self, td, logger, target,
22 host_dumper, image_packages, extract_dir): 23 image_packages, extract_dir):
23 super(OERuntimeTestContext, self).__init__(td, logger) 24 super(OERuntimeTestContext, self).__init__(td, logger)
24 25
25 self.target = target 26 self.target = target
26 self.image_packages = image_packages 27 self.image_packages = image_packages
27 self.host_dumper = host_dumper
28 self.extract_dir = extract_dir 28 self.extract_dir = extract_dir
29 self._set_target_cmds() 29 self._set_target_cmds()
30 30
@@ -61,16 +61,16 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
61 runtime_group = self.parser.add_argument_group('runtime options') 61 runtime_group = self.parser.add_argument_group('runtime options')
62 62
63 runtime_group.add_argument('--target-type', action='store', 63 runtime_group.add_argument('--target-type', action='store',
64 default=self.default_target_type, choices=['simpleremote', 'qemu'], 64 default=self.default_target_type, choices=['simpleremote', 'qemu', 'serial'],
65 help="Target type of device under test, default: %s" \ 65 help="Target type of device under test, default: %s" \
66 % self.default_target_type) 66 % self.default_target_type)
67 runtime_group.add_argument('--target-ip', action='store', 67 runtime_group.add_argument('--target-ip', action='store',
68 default=self.default_target_ip, 68 default=self.default_target_ip,
69 help="IP address of device under test, default: %s" \ 69 help="IP address and optionally ssh port (default 22) of device under test, for example '192.168.0.7:22'. Default: %s" \
70 % self.default_target_ip) 70 % self.default_target_ip)
71 runtime_group.add_argument('--server-ip', action='store', 71 runtime_group.add_argument('--server-ip', action='store',
72 default=self.default_target_ip, 72 default=self.default_target_ip,
73 help="IP address of device under test, default: %s" \ 73 help="IP address of the test host from test target machine, default: %s" \
74 % self.default_server_ip) 74 % self.default_server_ip)
75 75
76 runtime_group.add_argument('--host-dumper-dir', action='store', 76 runtime_group.add_argument('--host-dumper-dir', action='store',
@@ -109,6 +109,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
109 target = OESSHTarget(logger, target_ip, server_ip, **kwargs) 109 target = OESSHTarget(logger, target_ip, server_ip, **kwargs)
110 elif target_type == 'qemu': 110 elif target_type == 'qemu':
111 target = OEQemuTarget(logger, server_ip, **kwargs) 111 target = OEQemuTarget(logger, server_ip, **kwargs)
112 elif target_type == 'serial':
113 target = OESerialTarget(logger, target_ip, server_ip, **kwargs)
112 else: 114 else:
113 # XXX: This code uses the old naming convention for controllers and 115 # XXX: This code uses the old naming convention for controllers and
114 # targets, the idea it is to leave just targets as the controller 116 # targets, the idea it is to leave just targets as the controller
@@ -119,8 +121,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
119 # XXX: Don't base your targets on this code it will be refactored 121 # XXX: Don't base your targets on this code it will be refactored
120 # in the near future. 122 # in the near future.
121 # Custom target module loading 123 # Custom target module loading
122 target_modules_path = kwargs.get('target_modules_path', '') 124 controller = OERuntimeTestContextExecutor.getControllerModule(target_type)
123 controller = OERuntimeTestContextExecutor.getControllerModule(target_type, target_modules_path)
124 target = controller(logger, target_ip, server_ip, **kwargs) 125 target = controller(logger, target_ip, server_ip, **kwargs)
125 126
126 return target 127 return target
@@ -130,15 +131,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
130 # AttributeError raised if not found. 131 # AttributeError raised if not found.
131 # ImportError raised if a provided module can not be imported. 132 # ImportError raised if a provided module can not be imported.
132 @staticmethod 133 @staticmethod
133 def getControllerModule(target, target_modules_path): 134 def getControllerModule(target):
134 controllerslist = OERuntimeTestContextExecutor._getControllerModulenames(target_modules_path) 135 controllerslist = OERuntimeTestContextExecutor._getControllerModulenames()
135 controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist) 136 controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist)
136 return controller 137 return controller
137 138
138 # Return a list of all python modules in lib/oeqa/controllers for each 139 # Return a list of all python modules in lib/oeqa/controllers for each
139 # layer in bbpath 140 # layer in bbpath
140 @staticmethod 141 @staticmethod
141 def _getControllerModulenames(target_modules_path): 142 def _getControllerModulenames():
142 143
143 controllerslist = [] 144 controllerslist = []
144 145
@@ -153,9 +154,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
153 else: 154 else:
154 raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module) 155 raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module)
155 156
156 extpath = target_modules_path.split(':') 157 # sys.path can contain duplicate paths, but because of the login in
157 for p in extpath: 158 # add_controller_list this doesn't work and causes testimage to abort.
158 controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers') 159 # Remove duplicates using an intermediate dictionary to ensure this
160 # doesn't happen.
161 for p in list(dict.fromkeys(sys.path)):
162 controllerpath = os.path.join(p, 'oeqa', 'controllers')
159 if os.path.exists(controllerpath): 163 if os.path.exists(controllerpath):
160 add_controller_list(controllerpath) 164 add_controller_list(controllerpath)
161 return controllerslist 165 return controllerslist
@@ -175,16 +179,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
175 # Search for and return a controller or None from given module name 179 # Search for and return a controller or None from given module name
176 @staticmethod 180 @staticmethod
177 def _loadControllerFromModule(target, modulename): 181 def _loadControllerFromModule(target, modulename):
178 obj = None
179 # import module, allowing it to raise import exception
180 module = __import__(modulename, globals(), locals(), [target])
181 # look for target class in the module, catching any exceptions as it
182 # is valid that a module may not have the target class.
183 try: 182 try:
184 obj = getattr(module, target) 183 import importlib
185 except: 184 module = importlib.import_module(modulename)
186 obj = None 185 return getattr(module, target)
187 return obj 186 except AttributeError:
187 return None
188 188
189 @staticmethod 189 @staticmethod
190 def readPackagesManifest(manifest): 190 def readPackagesManifest(manifest):
@@ -200,25 +200,25 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
200 200
201 return image_packages 201 return image_packages
202 202
203 @staticmethod
204 def getHostDumper(cmds, directory):
205 return HostDumper(cmds, directory)
206
207 def _process_args(self, logger, args): 203 def _process_args(self, logger, args):
208 if not args.packages_manifest: 204 if not args.packages_manifest:
209 raise TypeError('Manifest file not provided') 205 raise TypeError('Manifest file not provided')
210 206
211 super(OERuntimeTestContextExecutor, self)._process_args(logger, args) 207 super(OERuntimeTestContextExecutor, self)._process_args(logger, args)
212 208
209 td = self.tc_kwargs['init']['td']
210
213 target_kwargs = {} 211 target_kwargs = {}
212 target_kwargs['machine'] = td.get("MACHINE") or None
214 target_kwargs['qemuboot'] = args.qemu_boot 213 target_kwargs['qemuboot'] = args.qemu_boot
214 target_kwargs['serialcontrol_cmd'] = td.get("TEST_SERIALCONTROL_CMD") or None
215 target_kwargs['serialcontrol_extra_args'] = td.get("TEST_SERIALCONTROL_EXTRA_ARGS") or ""
216 target_kwargs['serialcontrol_ps1'] = td.get("TEST_SERIALCONTROL_PS1") or None
217 target_kwargs['serialcontrol_connect_timeout'] = td.get("TEST_SERIALCONTROL_CONNECT_TIMEOUT") or None
215 218
216 self.tc_kwargs['init']['target'] = \ 219 self.tc_kwargs['init']['target'] = \
217 OERuntimeTestContextExecutor.getTarget(args.target_type, 220 OERuntimeTestContextExecutor.getTarget(args.target_type,
218 None, args.target_ip, args.server_ip, **target_kwargs) 221 None, args.target_ip, args.server_ip, **target_kwargs)
219 self.tc_kwargs['init']['host_dumper'] = \
220 OERuntimeTestContextExecutor.getHostDumper(None,
221 args.host_dumper_dir)
222 self.tc_kwargs['init']['image_packages'] = \ 222 self.tc_kwargs['init']['image_packages'] = \
223 OERuntimeTestContextExecutor.readPackagesManifest( 223 OERuntimeTestContextExecutor.readPackagesManifest(
224 args.packages_manifest) 224 args.packages_manifest)
diff --git a/meta/lib/oeqa/runtime/decorator/package.py b/meta/lib/oeqa/runtime/decorator/package.py
index 57178655cc..b78ac9fc38 100644
--- a/meta/lib/oeqa/runtime/decorator/package.py
+++ b/meta/lib/oeqa/runtime/decorator/package.py
@@ -5,7 +5,6 @@
5# 5#
6 6
7from oeqa.core.decorator import OETestDecorator, registerDecorator 7from oeqa.core.decorator import OETestDecorator, registerDecorator
8from oeqa.core.utils.misc import strToSet
9 8
10@registerDecorator 9@registerDecorator
11class OEHasPackage(OETestDecorator): 10class OEHasPackage(OETestDecorator):
@@ -34,25 +33,30 @@ class OEHasPackage(OETestDecorator):
34 def setUpDecorator(self): 33 def setUpDecorator(self):
35 need_pkgs = set() 34 need_pkgs = set()
36 unneed_pkgs = set() 35 unneed_pkgs = set()
37 pkgs = strToSet(self.need_pkgs) 36
38 for pkg in pkgs: 37 # Turn literal strings into a list so we can just iterate over it
38 if isinstance(self.need_pkgs, str):
39 self.need_pkgs = [self.need_pkgs,]
40
41 mlprefix = self.case.td.get("MLPREFIX")
42 for pkg in self.need_pkgs:
39 if pkg.startswith('!'): 43 if pkg.startswith('!'):
40 unneed_pkgs.add(pkg[1:]) 44 unneed_pkgs.add(mlprefix + pkg[1:])
41 else: 45 else:
42 need_pkgs.add(pkg) 46 need_pkgs.add(mlprefix + pkg)
43 47
44 if unneed_pkgs: 48 if unneed_pkgs:
45 msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs) 49 msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs)
46 self.logger.debug(msg) 50 self.logger.debug(msg)
47 if not self.case.tc.image_packages.isdisjoint(unneed_pkgs): 51 if not self.case.tc.image_packages.isdisjoint(unneed_pkgs):
48 msg = "Test can't run with %s installed" % ', or'.join(unneed_pkgs) 52 msg = "Test can't run with %s installed" % ', or '.join(unneed_pkgs)
49 self._decorator_fail(msg) 53 self._decorator_fail(msg)
50 54
51 if need_pkgs: 55 if need_pkgs:
52 msg = 'Checking if at least one of %s is installed' % ', '.join(need_pkgs) 56 msg = 'Checking if at least one of %s is installed' % ', '.join(need_pkgs)
53 self.logger.debug(msg) 57 self.logger.debug(msg)
54 if self.case.tc.image_packages.isdisjoint(need_pkgs): 58 if self.case.tc.image_packages.isdisjoint(need_pkgs):
55 msg = "Test requires %s to be installed" % ', or'.join(need_pkgs) 59 msg = "Test requires %s to be installed" % ', or '.join(need_pkgs)
56 self._decorator_fail(msg) 60 self._decorator_fail(msg)
57 61
58 def _decorator_fail(self, msg): 62 def _decorator_fail(self, msg):
diff --git a/meta/lib/oeqa/runtime/files/hello.stp b/meta/lib/oeqa/runtime/files/hello.stp
deleted file mode 100644
index 3677147162..0000000000
--- a/meta/lib/oeqa/runtime/files/hello.stp
+++ /dev/null
@@ -1 +0,0 @@
1probe oneshot { println("hello world") }
diff --git a/meta/lib/oeqa/sdk/case.py b/meta/lib/oeqa/sdk/case.py
index c45882689c..1fd3b3b569 100644
--- a/meta/lib/oeqa/sdk/case.py
+++ b/meta/lib/oeqa/sdk/case.py
@@ -6,8 +6,11 @@
6 6
7import os 7import os
8import subprocess 8import subprocess
9import shutil
10import unittest
9 11
10from oeqa.core.case import OETestCase 12from oeqa.core.case import OETestCase
13from oeqa.sdkext.context import OESDKExtTestContext
11 14
12class OESDKTestCase(OETestCase): 15class OESDKTestCase(OETestCase):
13 def _run(self, cmd): 16 def _run(self, cmd):
@@ -15,18 +18,76 @@ class OESDKTestCase(OETestCase):
15 (self.tc.sdk_env, cmd), shell=True, executable="/bin/bash", 18 (self.tc.sdk_env, cmd), shell=True, executable="/bin/bash",
16 stderr=subprocess.STDOUT, universal_newlines=True) 19 stderr=subprocess.STDOUT, universal_newlines=True)
17 20
21 def ensure_host_package(self, *packages, recipe=None):
22 """
23 Check that the host variation of one of the packages listed is available
24 in the SDK (nativesdk-foo for SDK, foo-native for eSDK). The package is
25 a list for the case where debian-renaming may have occured, and the
26 manifest could contain 'foo' or 'libfoo'.
27
28 If testing an eSDK and the package is not found, then try to install the
29 specified recipe to install it from sstate.
30 """
31
32 # In a SDK the manifest is correct. In an eSDK the manifest may be
33 # correct (type=full) or not include packages that exist in sstate but
34 # not installed yet (minimal) so we should try to install the recipe.
35 for package in packages:
36 if isinstance(self.tc, OESDKExtTestContext):
37 package = package + "-native"
38 else:
39 package = "nativesdk-" + package
40
41 if self.tc.hasHostPackage(package):
42 break
43 else:
44 if isinstance(self.tc, OESDKExtTestContext):
45 recipe = (recipe or packages[0]) + "-native"
46 print("Trying to install %s..." % recipe)
47 self._run('devtool sdk-install %s' % recipe)
48 else:
49 raise unittest.SkipTest("Test %s needs one of %s" % (self.id(), ", ".join(packages)))
50
51 def ensure_target_package(self, *packages, multilib=False, recipe=None):
52 """
53 Check that at least one of the packages listed is available in the SDK,
54 adding the multilib prefix if required. The target package is a list for
55 the case where debian-renaming may have occured, and the manifest could
56 contain 'foo' or 'libfoo'.
57
58 If testing an eSDK and the package is not found, then try to install the
59 specified recipe to install it from sstate.
60 """
61
62 # In a SDK the manifest is correct. In an eSDK the manifest may be
63 # correct (type=full) or not include packages that exist in sstate but
64 # not installed yet (minimal) so we should try to install the recipe.
65 for package in packages:
66 if self.tc.hasTargetPackage(package, multilib=multilib):
67 break
68 else:
69 if isinstance(self.tc, OESDKExtTestContext):
70 recipe = recipe or packages[0]
71 print("Trying to install %s..." % recipe)
72 self._run('devtool sdk-install %s' % recipe)
73 else:
74 raise unittest.SkipTest("Test %s needs one of %s" % (self.id(), ", ".join(packages)))
75
76
18 def fetch(self, workdir, dl_dir, url, archive=None): 77 def fetch(self, workdir, dl_dir, url, archive=None):
19 if not archive: 78 if not archive:
20 from urllib.parse import urlparse 79 from urllib.parse import urlparse
21 archive = os.path.basename(urlparse(url).path) 80 archive = os.path.basename(urlparse(url).path)
22 81
23 if dl_dir: 82 if dl_dir:
24 tarball = os.path.join(dl_dir, archive) 83 archive_tarball = os.path.join(dl_dir, archive)
25 if os.path.exists(tarball): 84 if os.path.exists(archive_tarball):
26 return tarball 85 return archive_tarball
27 86
28 tarball = os.path.join(workdir, archive) 87 tarball = os.path.join(workdir, archive)
29 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT) 88 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT)
89 if dl_dir and not os.path.exists(archive_tarball):
90 shutil.copyfile(tarball, archive_tarball)
30 return tarball 91 return tarball
31 92
32 def check_elf(self, path, target_os=None, target_arch=None): 93 def check_elf(self, path, target_os=None, target_arch=None):
diff --git a/meta/lib/oeqa/sdk/cases/buildcpio.py b/meta/lib/oeqa/sdk/cases/autotools.py
index e7fc211a47..ee6c522551 100644
--- a/meta/lib/oeqa/sdk/cases/buildcpio.py
+++ b/meta/lib/oeqa/sdk/cases/autotools.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -11,16 +13,21 @@ from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output() 14errors_have_output()
13 15
14class BuildCpioTest(OESDKTestCase): 16class AutotoolsTest(OESDKTestCase):
15 """ 17 """
16 Check that autotools will cross-compile correctly. 18 Check that autotools will cross-compile correctly.
17 """ 19 """
20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("AutotoolsTest class: SDK doesn't contain a supported C library")
24
18 def test_cpio(self): 25 def test_cpio(self):
19 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir: 26 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir:
20 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz") 27 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz")
21 28
22 dirs = {} 29 dirs = {}
23 dirs["source"] = os.path.join(testdir, "cpio-2.13") 30 dirs["source"] = os.path.join(testdir, "cpio-2.15")
24 dirs["build"] = os.path.join(testdir, "build") 31 dirs["build"] = os.path.join(testdir, "build")
25 dirs["install"] = os.path.join(testdir, "install") 32 dirs["install"] = os.path.join(testdir, "install")
26 33
@@ -28,9 +35,14 @@ class BuildCpioTest(OESDKTestCase):
28 self.assertTrue(os.path.isdir(dirs["source"])) 35 self.assertTrue(os.path.isdir(dirs["source"]))
29 os.makedirs(dirs["build"]) 36 os.makedirs(dirs["build"])
30 37
31 self._run("sed -i -e '/char.*program_name/d' {source}/src/global.c".format(**dirs)) 38 self._run("cd {build} && {source}/configure CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration' $CONFIGURE_FLAGS".format(**dirs))
32 self._run("cd {build} && {source}/configure --disable-maintainer-mode $CONFIGURE_FLAGS".format(**dirs)) 39
33 self._run("cd {build} && make -j".format(**dirs)) 40 # Check that configure detected the target correctly
41 with open(os.path.join(dirs["build"], "config.log")) as f:
42 host_sys = self.td["HOST_SYS"]
43 self.assertIn(f"host_alias='{host_sys}'\n", f.readlines())
44
45 self._run("cd {build} && make CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration' -j".format(**dirs))
34 self._run("cd {build} && make install DESTDIR={install}".format(**dirs)) 46 self._run("cd {build} && make install DESTDIR={install}".format(**dirs))
35 47
36 self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "cpio")) 48 self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "cpio"))
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/buildepoxy.py
deleted file mode 100644
index 385f8ccca8..0000000000
--- a/meta/lib/oeqa/sdk/cases/buildepoxy.py
+++ /dev/null
@@ -1,41 +0,0 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import os
6import subprocess
7import tempfile
8import unittest
9
10from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output()
13
14class EpoxyTest(OESDKTestCase):
15 """
16 Test that Meson builds correctly.
17 """
18 def setUp(self):
19 if not (self.tc.hasHostPackage("nativesdk-meson")):
20 raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain Meson")
21
22 def test_epoxy(self):
23 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
24 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/anholt/libepoxy/releases/download/1.5.3/libepoxy-1.5.3.tar.xz")
25
26 dirs = {}
27 dirs["source"] = os.path.join(testdir, "libepoxy-1.5.3")
28 dirs["build"] = os.path.join(testdir, "build")
29 dirs["install"] = os.path.join(testdir, "install")
30
31 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
32 self.assertTrue(os.path.isdir(dirs["source"]))
33 os.makedirs(dirs["build"])
34
35 log = self._run("meson -Degl=no -Dglx=no -Dx11=false {build} {source}".format(**dirs))
36 # Check that Meson thinks we're doing a cross build and not a native
37 self.assertIn("Build type: cross build", log)
38 self._run("ninja -C {build} -v".format(**dirs))
39 self._run("DESTDIR={install} ninja -C {build} -v install".format(**dirs))
40
41 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libepoxy.so"))
diff --git a/meta/lib/oeqa/sdk/cases/buildgalculator.py b/meta/lib/oeqa/sdk/cases/buildgalculator.py
deleted file mode 100644
index eb3c8ddf39..0000000000
--- a/meta/lib/oeqa/sdk/cases/buildgalculator.py
+++ /dev/null
@@ -1,43 +0,0 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import os
6import subprocess
7import tempfile
8import unittest
9
10from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output()
13
14class GalculatorTest(OESDKTestCase):
15 """
16 Test that autotools and GTK+ 3 compiles correctly.
17 """
18 def setUp(self):
19 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \
20 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)):
21 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3")
22 if not (self.tc.hasHostPackage("nativesdk-gettext-dev")):
23 raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain gettext")
24
25 def test_galculator(self):
26 with tempfile.TemporaryDirectory(prefix="galculator", dir=self.tc.sdk_dir) as testdir:
27 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://galculator.mnim.org/downloads/galculator-2.1.4.tar.bz2")
28
29 dirs = {}
30 dirs["source"] = os.path.join(testdir, "galculator-2.1.4")
31 dirs["build"] = os.path.join(testdir, "build")
32 dirs["install"] = os.path.join(testdir, "install")
33
34 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
35 self.assertTrue(os.path.isdir(dirs["source"]))
36 os.makedirs(dirs["build"])
37
38 self._run("cd {source} && sed -i -e '/s_preferences.*prefs;/d' src/main.c && autoreconf -i -f -I $OECORE_TARGET_SYSROOT/usr/share/aclocal -I m4".format(**dirs))
39 self._run("cd {build} && {source}/configure $CONFIGURE_FLAGS".format(**dirs))
40 self._run("cd {build} && make -j".format(**dirs))
41 self._run("cd {build} && make install DESTDIR={install}".format(**dirs))
42
43 self.check_elf(os.path.join(dirs["install"], "usr", "local", "bin", "galculator"))
diff --git a/meta/lib/oeqa/sdk/cases/assimp.py b/meta/lib/oeqa/sdk/cases/cmake.py
index f166758e49..070682ef08 100644
--- a/meta/lib/oeqa/sdk/cases/assimp.py
+++ b/meta/lib/oeqa/sdk/cases/cmake.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -11,30 +13,35 @@ from oeqa.sdk.case import OESDKTestCase
11from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
12errors_have_output() 14errors_have_output()
13 15
14class BuildAssimp(OESDKTestCase): 16class CMakeTest(OESDKTestCase):
15 """ 17 """
16 Test case to build a project using cmake. 18 Test case to build a project using cmake.
17 """ 19 """
18 20
19 def setUp(self): 21 def setUp(self):
20 if not (self.tc.hasHostPackage("nativesdk-cmake") or 22 libc = self.td.get("TCLIBC")
21 self.tc.hasHostPackage("cmake-native")): 23 if libc in [ 'newlib' ]:
22 raise unittest.SkipTest("Needs cmake") 24 raise unittest.SkipTest("CMakeTest class: SDK doesn't contain a supported C library")
25
26 self.ensure_host_package("cmake")
23 27
24 def test_assimp(self): 28 def test_assimp(self):
25 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: 29 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir:
26 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v4.1.0.tar.gz") 30 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.4.1.tar.gz")
27 31
28 dirs = {} 32 dirs = {}
29 dirs["source"] = os.path.join(testdir, "assimp-4.1.0") 33 dirs["source"] = os.path.join(testdir, "assimp-5.4.1")
30 dirs["build"] = os.path.join(testdir, "build") 34 dirs["build"] = os.path.join(testdir, "build")
31 dirs["install"] = os.path.join(testdir, "install") 35 dirs["install"] = os.path.join(testdir, "install")
32 36
33 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT) 37 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
34 self.assertTrue(os.path.isdir(dirs["source"])) 38 self.assertTrue(os.path.isdir(dirs["source"]))
39 # Apply the zlib patch https://github.com/madler/zlib/commit/a566e156b3fa07b566ddbf6801b517a9dba04fa3
40 # this sed wont be needed once assimp moves its zlib copy to v1.3.1+
41 self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs))
35 os.makedirs(dirs["build"]) 42 os.makedirs(dirs["build"])
36 43
37 self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON {source}".format(**dirs)) 44 self._run("cd {build} && cmake -DASSIMP_WARNINGS_AS_ERRORS=OFF -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs))
38 self._run("cmake --build {build} -- -j".format(**dirs)) 45 self._run("cmake --build {build} -- -j".format(**dirs))
39 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs)) 46 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs))
40 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.4.1.0")) 47 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.4.1"))
diff --git a/meta/lib/oeqa/sdk/cases/gcc.py b/meta/lib/oeqa/sdk/cases/gcc.py
index eb08eadd28..e810d2c42b 100644
--- a/meta/lib/oeqa/sdk/cases/gcc.py
+++ b/meta/lib/oeqa/sdk/cases/gcc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -24,6 +26,10 @@ class GccCompileTest(OESDKTestCase):
24 os.path.join(self.tc.sdk_dir, f)) 26 os.path.join(self.tc.sdk_dir, f))
25 27
26 def setUp(self): 28 def setUp(self):
29 libc = self.td.get("TCLIBC")
30 if libc in [ 'newlib' ]:
31 raise unittest.SkipTest("GccCompileTest class: SDK doesn't contain a supported C library")
32
27 machine = self.td.get("MACHINE") 33 machine = self.td.get("MACHINE")
28 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or 34 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or
29 self.tc.hasHostPackage("^gcc-", regex=True)): 35 self.tc.hasHostPackage("^gcc-", regex=True)):
diff --git a/meta/lib/oeqa/sdk/cases/gtk3.py b/meta/lib/oeqa/sdk/cases/gtk3.py
new file mode 100644
index 0000000000..cdaf50ed38
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/gtk3.py
@@ -0,0 +1,40 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import subprocess
9import tempfile
10
11from oeqa.sdk.cases.meson import MesonTestBase
12
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class GTK3Test(MesonTestBase):
17
18 def setUp(self):
19 super().setUp()
20 self.ensure_target_package("gtk+3", "libgtk-3.0", recipe="gtk+3")
21 self.ensure_host_package("glib-2.0-utils", "libglib-2.0-utils", recipe="glib-2.0")
22
23 """
24 Test that autotools and GTK+ 3 compiles correctly.
25 """
26 def test_libhandy(self):
27 with tempfile.TemporaryDirectory(prefix="libhandy", dir=self.tc.sdk_dir) as testdir:
28 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://download.gnome.org/sources/libhandy/1.8/libhandy-1.8.3.tar.xz")
29
30 sourcedir = os.path.join(testdir, "libhandy-1.8.3")
31 builddir = os.path.join(testdir, "build")
32 installdir = os.path.join(testdir, "install")
33
34 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
35 self.assertTrue(os.path.isdir(sourcedir))
36 os.makedirs(builddir)
37
38 self.build_meson(sourcedir, builddir, installdir, "-Dglade_catalog=disabled -Dintrospection=disabled -Dvapi=false")
39 self.assertTrue(os.path.isdir(installdir))
40 self.check_elf(os.path.join(installdir, "usr", "local", "lib", "libhandy-1.so"))
diff --git a/meta/lib/oeqa/sdk/cases/kmod.py b/meta/lib/oeqa/sdk/cases/kmod.py
new file mode 100644
index 0000000000..0aa6f702e4
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/kmod.py
@@ -0,0 +1,39 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import subprocess
9import tempfile
10
11from oeqa.sdk.case import OESDKTestCase
12from oeqa.sdkext.context import OESDKExtTestContext
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class KernelModuleTest(OESDKTestCase):
17 """
18 Test that out-of-tree kernel modules build.
19 """
20 def test_cryptodev(self):
21 if isinstance(self.tc, OESDKExtTestContext):
22 self.skipTest(f"{self.id()} does not support eSDK (https://bugzilla.yoctoproject.org/show_bug.cgi?id=15850)")
23
24 self.ensure_target_package("kernel-devsrc")
25 # These targets need to be built before kernel modules can be built.
26 self._run("make -j -C $OECORE_TARGET_SYSROOT/usr/src/kernel prepare scripts")
27
28 with tempfile.TemporaryDirectory(prefix="cryptodev", dir=self.tc.sdk_dir) as testdir:
29 git_url = "https://github.com/cryptodev-linux/cryptodev-linux"
30 # This is a knnown-good commit post-1.13 that builds with kernel 6.7+
31 git_sha = "bb8bc7cf60d2c0b097c8b3b0e807f805b577a53f"
32
33 sourcedir = os.path.join(testdir, "cryptodev-linux")
34 subprocess.check_output(["git", "clone", git_url, sourcedir], stderr=subprocess.STDOUT)
35 self.assertTrue(os.path.isdir(sourcedir))
36 subprocess.check_output(["git", "-C", sourcedir, "checkout", git_sha], stderr=subprocess.STDOUT)
37
38 self._run("make -C %s V=1 KERNEL_DIR=$OECORE_TARGET_SYSROOT/usr/src/kernel" % sourcedir)
39 self.check_elf(os.path.join(sourcedir, "cryptodev.ko"))
diff --git a/meta/lib/oeqa/sdk/cases/buildlzip.py b/meta/lib/oeqa/sdk/cases/makefile.py
index 49ae756bf3..e1e2484820 100644
--- a/meta/lib/oeqa/sdk/cases/buildlzip.py
+++ b/meta/lib/oeqa/sdk/cases/makefile.py
@@ -1,16 +1,24 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os, tempfile, subprocess, unittest 7import os, tempfile, subprocess
8import unittest
6from oeqa.sdk.case import OESDKTestCase 9from oeqa.sdk.case import OESDKTestCase
7from oeqa.utils.subprocesstweak import errors_have_output 10from oeqa.utils.subprocesstweak import errors_have_output
8errors_have_output() 11errors_have_output()
9 12
10class BuildLzipTest(OESDKTestCase): 13class MakefileTest(OESDKTestCase):
11 """ 14 """
12 Test that "plain" compilation works, using just $CC $CFLAGS etc. 15 Test that "plain" compilation works, using just $CC $CFLAGS etc.
13 """ 16 """
17 def setUp(self):
18 libc = self.td.get("TCLIBC")
19 if libc in [ 'newlib' ]:
20 raise unittest.SkipTest("MakefileTest class: SDK doesn't contain a supported C library")
21
14 def test_lzip(self): 22 def test_lzip(self):
15 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir: 23 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir:
16 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz") 24 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz")
diff --git a/meta/lib/oeqa/sdk/cases/manifest.py b/meta/lib/oeqa/sdk/cases/manifest.py
new file mode 100644
index 0000000000..ee59a5f338
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/manifest.py
@@ -0,0 +1,26 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.sdk.case import OESDKTestCase
8from oeqa.sdkext.context import OESDKExtTestContext
9
10
11class ManifestTest(OESDKTestCase):
12 def test_manifests(self):
13 """
14 Verify that the host and target manifests are not empty, unless this is
15 a minimal eSDK without toolchain in which case they should be empty.
16 """
17 if (
18 isinstance(self.tc, OESDKExtTestContext)
19 and self.td.get("SDK_EXT_TYPE") == "minimal"
20 and self.td.get("SDK_INCLUDE_TOOLCHAIN") == "0"
21 ):
22 self.assertEqual(self.tc.target_pkg_manifest, {})
23 self.assertEqual(self.tc.host_pkg_manifest, {})
24 else:
25 self.assertNotEqual(self.tc.target_pkg_manifest, {})
26 self.assertNotEqual(self.tc.host_pkg_manifest, {})
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py
new file mode 100644
index 0000000000..e3e8edc781
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/maturin.py
@@ -0,0 +1,66 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import unittest
10
11from oeqa.sdk.case import OESDKTestCase
12from oeqa.utils.subprocesstweak import errors_have_output
13
14errors_have_output()
15
16
17class MaturinTest(OESDKTestCase):
18 def setUp(self):
19 self.ensure_host_package("python3-maturin")
20
21 def test_maturin_list_python(self):
22 out = self._run(r"""python3 -c 'import sys; print(f"{sys.executable}\n{sys.version_info.major}.{sys.version_info.minor}")'""")
23 executable, version = out.splitlines()
24
25 output = self._run("maturin list-python")
26 # The output looks like this:
27 # - CPython 3.13 at /usr/bin/python3
28 # We don't want to assume CPython so just check for the version and path.
29 expected = f"{version} at {executable}"
30 self.assertIn(expected, output)
31
32class MaturinDevelopTest(OESDKTestCase):
33 def setUp(self):
34 machine = self.td.get("MACHINE")
35 self.ensure_host_package("python3-maturin")
36
37 if not (
38 self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine)
39 ):
40 raise unittest.SkipTest(
41 "Testing 'maturin develop' requires Rust cross-canadian in the SDK"
42 )
43
44 def test_maturin_develop(self):
45 """
46 This test case requires:
47 (1) that a .venv can been created.
48 (2) a functional 'rustc' and 'cargo'
49 """
50 targetdir = os.path.join(self.tc.sdk_dir, "guessing-game")
51 try:
52 shutil.rmtree(targetdir)
53 except FileNotFoundError:
54 pass
55 shutil.copytree(
56 os.path.join(self.tc.files_dir, "maturin/guessing-game"), targetdir
57 )
58
59 self._run("cd %s; python3 -m venv .venv" % targetdir)
60 output = self._run("cd %s; maturin develop" % targetdir)
61 self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
62 self.assertRegex(output, r"🐍 Not using a specific python interpreter")
63 self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
64 self.assertRegex(output, r"Compiling guessing-game v0.1.0")
65 self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
66 self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
diff --git a/meta/lib/oeqa/sdk/cases/meson.py b/meta/lib/oeqa/sdk/cases/meson.py
new file mode 100644
index 0000000000..a809ca3a53
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/meson.py
@@ -0,0 +1,72 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
9import subprocess
10import tempfile
11import unittest
12
13from oeqa.sdk.case import OESDKTestCase
14from oeqa.sdkext.context import OESDKExtTestContext
15from oeqa.utils.subprocesstweak import errors_have_output
16errors_have_output()
17
18class MesonTestBase(OESDKTestCase):
19 def setUp(self):
20 libc = self.td.get("TCLIBC")
21 if libc in [ 'newlib' ]:
22 raise unittest.SkipTest("MesonTest class: SDK doesn't contain a supported C library")
23
24 if isinstance(self.tc, OESDKExtTestContext):
25 self.skipTest(f"{self.id()} does not support eSDK (https://bugzilla.yoctoproject.org/show_bug.cgi?id=15854)")
26
27 self.ensure_host_package("meson")
28 self.ensure_host_package("pkgconfig")
29
30 def build_meson(self, sourcedir, builddir, installdir=None, options=""):
31 """
32 Given a source tree in sourcedir, configure it to build in builddir with
33 the specified options, and if installdir is set also install.
34 """
35 log = self._run(f"meson setup --warnlevel 1 {builddir} {sourcedir} {options}")
36
37 # Check that Meson thinks we're doing a cross build and not a native
38 self.assertIn("Build type: cross build", log)
39
40 # Check that the cross-compiler used is the one we set.
41 data = json.loads(self._run(f"meson introspect --compilers {builddir}"))
42 self.assertIn(self.td.get("CC").split()[0], data["host"]["c"]["exelist"])
43
44 # Check that the target architectures was set correctly.
45 data = json.loads(self._run(f"meson introspect --machines {builddir}"))
46 self.assertEqual(data["host"]["cpu"], self.td["HOST_ARCH"])
47
48 self._run(f"meson compile -C {builddir} -v")
49
50 if installdir:
51 self._run(f"meson install -C {builddir} --destdir {installdir}")
52
53class MesonTest(MesonTestBase):
54 """
55 Test that Meson builds correctly.
56 """
57
58 def test_epoxy(self):
59 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
60 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/anholt/libepoxy/releases/download/1.5.3/libepoxy-1.5.3.tar.xz")
61
62 sourcedir = os.path.join(testdir, "libepoxy-1.5.3")
63 builddir = os.path.join(testdir, "build")
64 installdir = os.path.join(testdir, "install")
65
66 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
67 self.assertTrue(os.path.isdir(sourcedir))
68
69 os.makedirs(builddir)
70 self.build_meson(sourcedir, builddir, installdir, "-Degl=no -Dglx=no -Dx11=false")
71 self.assertTrue(os.path.isdir(installdir))
72 self.check_elf(os.path.join(installdir, "usr", "local", "lib", "libepoxy.so"))
diff --git a/meta/lib/oeqa/sdk/cases/perl.py b/meta/lib/oeqa/sdk/cases/perl.py
index 14d76d820f..a72bd2461a 100644
--- a/meta/lib/oeqa/sdk/cases/perl.py
+++ b/meta/lib/oeqa/sdk/cases/perl.py
@@ -1,8 +1,9 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import unittest
6from oeqa.sdk.case import OESDKTestCase 7from oeqa.sdk.case import OESDKTestCase
7 8
8from oeqa.utils.subprocesstweak import errors_have_output 9from oeqa.utils.subprocesstweak import errors_have_output
@@ -10,9 +11,7 @@ errors_have_output()
10 11
11class PerlTest(OESDKTestCase): 12class PerlTest(OESDKTestCase):
12 def setUp(self): 13 def setUp(self):
13 if not (self.tc.hasHostPackage("nativesdk-perl") or 14 self.ensure_host_package("perl")
14 self.tc.hasHostPackage("perl-native")):
15 raise unittest.SkipTest("No perl package in the SDK")
16 15
17 def test_perl(self): 16 def test_perl(self):
18 cmd = "perl -e '$_=\"Uryyb, jbeyq\"; tr/a-zA-Z/n-za-mN-ZA-M/;print'" 17 cmd = "perl -e '$_=\"Uryyb, jbeyq\"; tr/a-zA-Z/n-za-mN-ZA-M/;print'"
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py
index a334abce5f..b990cd889a 100644
--- a/meta/lib/oeqa/sdk/cases/python.py
+++ b/meta/lib/oeqa/sdk/cases/python.py
@@ -1,29 +1,17 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import subprocess, unittest
6from oeqa.sdk.case import OESDKTestCase 7from oeqa.sdk.case import OESDKTestCase
7 8
8from oeqa.utils.subprocesstweak import errors_have_output 9from oeqa.utils.subprocesstweak import errors_have_output
9errors_have_output() 10errors_have_output()
10 11
11class Python2Test(OESDKTestCase):
12 def setUp(self):
13 if not (self.tc.hasHostPackage("nativesdk-python-core") or
14 self.tc.hasHostPackage("python-core-native")):
15 raise unittest.SkipTest("No python package in the SDK")
16
17 def test_python2(self):
18 cmd = "python -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\""
19 output = self._run(cmd)
20 self.assertEqual(output, "Hello, world\n")
21
22class Python3Test(OESDKTestCase): 12class Python3Test(OESDKTestCase):
23 def setUp(self): 13 def setUp(self):
24 if not (self.tc.hasHostPackage("nativesdk-python3-core") or 14 self.ensure_host_package("python3-core", recipe="python3")
25 self.tc.hasHostPackage("python3-core-native")):
26 raise unittest.SkipTest("No python3 package in the SDK")
27 15
28 def test_python3(self): 16 def test_python3(self):
29 cmd = "python3 -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\"" 17 cmd = "python3 -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\""
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py
new file mode 100644
index 0000000000..4b115bebf5
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/rust.py
@@ -0,0 +1,58 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import unittest
10
11from oeqa.sdk.case import OESDKTestCase
12
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class RustCompileTest(OESDKTestCase):
17 td_vars = ['MACHINE']
18
19 @classmethod
20 def setUpClass(self):
21 targetdir = os.path.join(self.tc.sdk_dir, "hello")
22 try:
23 shutil.rmtree(targetdir)
24 except FileNotFoundError:
25 pass
26 shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
27
28 def setUp(self):
29 machine = self.td.get("MACHINE")
30 if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
31 raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
32
33 def test_cargo_build(self):
34 self._run('cd %s/hello; cargo add zstd' % (self.tc.sdk_dir))
35 self._run('cd %s/hello; cargo build' % self.tc.sdk_dir)
36
37class RustHostCompileTest(OESDKTestCase):
38 td_vars = ['MACHINE', 'SDK_SYS']
39
40 @classmethod
41 def setUpClass(self):
42 targetdir = os.path.join(self.tc.sdk_dir, "hello")
43 try:
44 shutil.rmtree(targetdir)
45 except FileNotFoundError:
46 pass
47 shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
48
49 def setUp(self):
50 machine = self.td.get("MACHINE")
51 if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
52 raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
53
54 def test_cargo_build(self):
55 sdksys = self.td.get("SDK_SYS")
56 self._run('cd %s/hello; cargo add zstd' % (self.tc.sdk_dir))
57 self._run('cd %s/hello; cargo build --target %s-gnu' % (self.tc.sdk_dir, sdksys))
58 self._run('cd %s/hello; cargo run --target %s-gnu' % (self.tc.sdk_dir, sdksys))
diff --git a/meta/lib/oeqa/sdk/context.py b/meta/lib/oeqa/sdk/context.py
index 01c38c24e6..d4fdd83207 100644
--- a/meta/lib/oeqa/sdk/context.py
+++ b/meta/lib/oeqa/sdk/context.py
@@ -23,6 +23,13 @@ class OESDKTestContext(OETestContext):
23 self.target_pkg_manifest = target_pkg_manifest 23 self.target_pkg_manifest = target_pkg_manifest
24 self.host_pkg_manifest = host_pkg_manifest 24 self.host_pkg_manifest = host_pkg_manifest
25 25
26 # match multilib according to sdk_env
27 self.multilib = ""
28 multilibs = self.td.get('MULTILIB_VARIANTS', '').split()
29 for ml in multilibs:
30 if ml in os.path.basename(self.sdk_env):
31 self.multilib = ml
32
26 def _hasPackage(self, manifest, pkg, regex=False): 33 def _hasPackage(self, manifest, pkg, regex=False):
27 if regex: 34 if regex:
28 # do regex match 35 # do regex match
@@ -40,12 +47,8 @@ class OESDKTestContext(OETestContext):
40 return self._hasPackage(self.host_pkg_manifest, pkg, regex=regex) 47 return self._hasPackage(self.host_pkg_manifest, pkg, regex=regex)
41 48
42 def hasTargetPackage(self, pkg, multilib=False, regex=False): 49 def hasTargetPackage(self, pkg, multilib=False, regex=False):
43 if multilib: 50 if multilib and self.multilib:
44 # match multilib according to sdk_env 51 pkg = self.multilib + '-' + pkg
45 mls = self.td.get('MULTILIB_VARIANTS', '').split()
46 for ml in mls:
47 if ('ml'+ml) in self.sdk_env:
48 pkg = ml + '-' + pkg
49 return self._hasPackage(self.target_pkg_manifest, pkg, regex=regex) 52 return self._hasPackage(self.target_pkg_manifest, pkg, regex=regex)
50 53
51class OESDKTestContextExecutor(OETestContextExecutor): 54class OESDKTestContextExecutor(OETestContextExecutor):
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
new file mode 100644
index 0000000000..fe619478a6
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
@@ -0,0 +1,6 @@
1[package]
2name = "hello"
3version = "0.1.0"
4edition = "2021"
5
6[dependencies]
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/build.rs b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
new file mode 100644
index 0000000000..b1a533d5df
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
@@ -0,0 +1,3 @@
1/* This is the simplest build script just to invoke host compiler
2 in the build process. */
3fn main() {}
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
new file mode 100644
index 0000000000..a06c03f82a
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
@@ -0,0 +1,3 @@
1fn main() {
2 println!("Hello, OpenEmbedded world!");
3}
diff --git a/meta/lib/oeqa/sdk/testmetaidesupport.py b/meta/lib/oeqa/sdk/testmetaidesupport.py
new file mode 100644
index 0000000000..00ef30e82e
--- /dev/null
+++ b/meta/lib/oeqa/sdk/testmetaidesupport.py
@@ -0,0 +1,45 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7class TestSDK(object):
8 def run(self, d):
9 import json
10 import logging
11 from oeqa.sdk.context import OESDKTestContext, OESDKTestContextExecutor
12 from oeqa.utils import make_logger_bitbake_compatible
13
14 pn = d.getVar("PN")
15
16 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
17
18 sdk_dir = d.expand("${WORKDIR}/testsdk/")
19 bb.utils.remove(sdk_dir, True)
20 bb.utils.mkdirhier(sdk_dir)
21
22 sdk_envs = OESDKTestContextExecutor._get_sdk_environs(d.getVar("DEPLOY_DIR_IMAGE"))
23 tdname = d.expand("${DEPLOY_DIR_IMAGE}/${PN}.testdata.json")
24 test_data = json.load(open(tdname, "r"))
25
26 host_pkg_manifest = {"cmake-native":"", "gcc-cross":"", "gettext-native":"", "meson-native":"", "perl-native":"", "python3-core-native":"", }
27 target_pkg_manifest = {"gtk+3":""}
28
29 for s in sdk_envs:
30 bb.plain("meta-ide-support based SDK testing environment: %s" % s)
31
32 sdk_env = sdk_envs[s]
33
34 tc = OESDKTestContext(td=test_data, logger=logger, sdk_dir=sdk_dir,
35 sdk_env=sdk_env, target_pkg_manifest=target_pkg_manifest,
36 host_pkg_manifest=host_pkg_manifest)
37
38 tc.loadTests(OESDKTestContextExecutor.default_cases)
39
40 results = tc.runTests()
41 if results:
42 results.logSummary(pn)
43
44 if (not results) or (not results.wasSuccessful()):
45 bb.fatal('%s - FAILED' % (pn,), forcelog=True)
diff --git a/meta/lib/oeqa/sdk/testsdk.py b/meta/lib/oeqa/sdk/testsdk.py
index 35e40187bc..cffcf9f49a 100644
--- a/meta/lib/oeqa/sdk/testsdk.py
+++ b/meta/lib/oeqa/sdk/testsdk.py
@@ -23,14 +23,6 @@ class TestSDKBase(object):
23 return configuration 23 return configuration
24 24
25 @staticmethod 25 @staticmethod
26 def get_sdk_json_result_dir(d):
27 json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
28 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
29 if custom_json_result_dir:
30 json_result_dir = custom_json_result_dir
31 return json_result_dir
32
33 @staticmethod
34 def get_sdk_result_id(configuration): 26 def get_sdk_result_id(configuration):
35 return '%s_%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['SDKMACHINE'], configuration['MACHINE'], configuration['STARTTIME']) 27 return '%s_%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['SDKMACHINE'], configuration['MACHINE'], configuration['STARTTIME'])
36 28
@@ -39,6 +31,28 @@ class TestSDK(TestSDKBase):
39 context_class = OESDKTestContext 31 context_class = OESDKTestContext
40 test_type = 'sdk' 32 test_type = 'sdk'
41 33
34 def sdk_dir_names(self, d):
35 """Return list from TESTSDK_CASE_DIRS."""
36 testdirs = d.getVar("TESTSDK_CASE_DIRS")
37 if testdirs:
38 return testdirs.split()
39
40 bb.fatal("TESTSDK_CASE_DIRS unset, can't find SDK test directories.")
41
42 def get_sdk_paths(self, d):
43 """
44 Return a list of paths where SDK test cases reside.
45
46 SDK tests are expected in <LAYER_DIR>/lib/oeqa/<dirname>/cases
47 """
48 paths = []
49 for layer in d.getVar("BBLAYERS").split():
50 for dirname in self.sdk_dir_names(d):
51 case_path = os.path.join(layer, "lib", "oeqa", dirname, "cases")
52 if os.path.isdir(case_path):
53 paths.append(case_path)
54 return paths
55
42 def get_tcname(self, d): 56 def get_tcname(self, d):
43 """ 57 """
44 Get the name of the SDK file 58 Get the name of the SDK file
@@ -72,6 +86,7 @@ class TestSDK(TestSDKBase):
72 86
73 from bb.utils import export_proxies 87 from bb.utils import export_proxies
74 from oeqa.utils import make_logger_bitbake_compatible 88 from oeqa.utils import make_logger_bitbake_compatible
89 from oeqa.utils import get_json_result_dir
75 90
76 pn = d.getVar("PN") 91 pn = d.getVar("PN")
77 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) 92 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -79,6 +94,9 @@ class TestSDK(TestSDKBase):
79 # sdk use network for download projects for build 94 # sdk use network for download projects for build
80 export_proxies(d) 95 export_proxies(d)
81 96
97 # We need the original PATH for testing the eSDK, not with our manipulations
98 os.environ['PATH'] = d.getVar("BB_ORIGENV", False).getVar("PATH")
99
82 tcname = self.get_tcname(d) 100 tcname = self.get_tcname(d)
83 101
84 if not os.path.exists(tcname): 102 if not os.path.exists(tcname):
@@ -118,7 +136,8 @@ class TestSDK(TestSDKBase):
118 host_pkg_manifest=host_pkg_manifest, **context_args) 136 host_pkg_manifest=host_pkg_manifest, **context_args)
119 137
120 try: 138 try:
121 tc.loadTests(self.context_executor_class.default_cases) 139 modules = (d.getVar("TESTSDK_SUITES") or "").split()
140 tc.loadTests(self.get_sdk_paths(d), modules)
122 except Exception as e: 141 except Exception as e:
123 import traceback 142 import traceback
124 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) 143 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc())
@@ -131,7 +150,7 @@ class TestSDK(TestSDKBase):
131 component = "%s %s" % (pn, self.context_executor_class.name) 150 component = "%s %s" % (pn, self.context_executor_class.name)
132 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env)) 151 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
133 configuration = self.get_sdk_configuration(d, self.test_type) 152 configuration = self.get_sdk_configuration(d, self.test_type)
134 result.logDetails(self.get_sdk_json_result_dir(d), 153 result.logDetails(get_json_result_dir(d),
135 configuration, 154 configuration,
136 self.get_sdk_result_id(configuration)) 155 self.get_sdk_result_id(configuration))
137 result.logSummary(component, context_msg) 156 result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/sdkext/cases/devtool.py b/meta/lib/oeqa/sdkext/cases/devtool.py
index a5c6a76e02..d0746e68eb 100644
--- a/meta/lib/oeqa/sdkext/cases/devtool.py
+++ b/meta/lib/oeqa/sdkext/cases/devtool.py
@@ -69,10 +69,9 @@ class DevtoolTest(OESDKExtTestCase):
69 self._test_devtool_build(self.myapp_cmake_dst) 69 self._test_devtool_build(self.myapp_cmake_dst)
70 70
71 def test_extend_autotools_recipe_creation(self): 71 def test_extend_autotools_recipe_creation(self):
72 req = 'https://github.com/rdfa/librdfa' 72 recipe = "test-dbus-wait"
73 recipe = "librdfa" 73 self._run('devtool sdk-install dbus')
74 self._run('devtool sdk-install libxml2') 74 self._run('devtool add %s https://git.yoctoproject.org/git/dbus-wait' % (recipe) )
75 self._run('devtool add %s %s' % (recipe, req) )
76 try: 75 try:
77 self._run('devtool build %s' % recipe) 76 self._run('devtool build %s' % recipe)
78 finally: 77 finally:
@@ -112,7 +111,7 @@ class SdkUpdateTest(OESDKExtTestCase):
112 cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir) 111 cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir)
113 subprocess.check_output(cmd, shell=True) 112 subprocess.check_output(cmd, shell=True)
114 113
115 self.http_service = HTTPService(self.publish_dir) 114 self.http_service = HTTPService(self.publish_dir, logger=self.logger)
116 self.http_service.start() 115 self.http_service.start()
117 116
118 self.http_url = "http://127.0.0.1:%d" % self.http_service.port 117 self.http_url = "http://127.0.0.1:%d" % self.http_service.port
diff --git a/meta/lib/oeqa/sdkext/context.py b/meta/lib/oeqa/sdkext/context.py
index 2ac2bf6ff7..2da57e2ccf 100644
--- a/meta/lib/oeqa/sdkext/context.py
+++ b/meta/lib/oeqa/sdkext/context.py
@@ -12,11 +12,11 @@ class OESDKExtTestContext(OESDKTestContext):
12 12
13 # FIXME - We really need to do better mapping of names here, this at 13 # FIXME - We really need to do better mapping of names here, this at
14 # least allows some tests to run 14 # least allows some tests to run
15 def hasHostPackage(self, pkg): 15 def hasHostPackage(self, pkg, regex=False):
16 # We force a toolchain to be installed into the eSDK even if its minimal 16 # We force a toolchain to be installed into the eSDK even if its minimal
17 if pkg.startswith("packagegroup-cross-canadian-"): 17 if pkg.startswith("packagegroup-cross-canadian-"):
18 return True 18 return True
19 return self._hasPackage(self.host_pkg_manifest, pkg) 19 return self._hasPackage(self.host_pkg_manifest, pkg, regex)
20 20
21class OESDKExtTestContextExecutor(OESDKTestContextExecutor): 21class OESDKExtTestContextExecutor(OESDKTestContextExecutor):
22 _context_class = OESDKExtTestContext 22 _context_class = OESDKExtTestContext
diff --git a/meta/lib/oeqa/sdkext/testsdk.py b/meta/lib/oeqa/sdkext/testsdk.py
index ffd185ec55..6dc23065a4 100644
--- a/meta/lib/oeqa/sdkext/testsdk.py
+++ b/meta/lib/oeqa/sdkext/testsdk.py
@@ -16,6 +16,7 @@ class TestSDKExt(TestSDKBase):
16 from bb.utils import export_proxies 16 from bb.utils import export_proxies
17 from oeqa.utils import avoid_paths_in_environ, make_logger_bitbake_compatible, subprocesstweak 17 from oeqa.utils import avoid_paths_in_environ, make_logger_bitbake_compatible, subprocesstweak
18 from oeqa.sdkext.context import OESDKExtTestContext, OESDKExtTestContextExecutor 18 from oeqa.sdkext.context import OESDKExtTestContext, OESDKExtTestContextExecutor
19 from oeqa.utils import get_json_result_dir
19 20
20 pn = d.getVar("PN") 21 pn = d.getVar("PN")
21 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) 22 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -67,10 +68,10 @@ class TestSDKExt(TestSDKBase):
67 # and we don't spend hours downloading kernels for the kernel module test 68 # and we don't spend hours downloading kernels for the kernel module test
68 # Abuse auto.conf since local.conf would be overwritten by the SDK 69 # Abuse auto.conf since local.conf would be overwritten by the SDK
69 with open(os.path.join(sdk_dir, 'conf', 'auto.conf'), 'a+') as f: 70 with open(os.path.join(sdk_dir, 'conf', 'auto.conf'), 'a+') as f:
70 f.write('SSTATE_MIRRORS += " \\n file://.* file://%s/PATH"\n' % test_data.get('SSTATE_DIR')) 71 f.write('SSTATE_MIRRORS += "file://.* file://%s/PATH"\n' % test_data.get('SSTATE_DIR'))
71 f.write('SOURCE_MIRROR_URL = "file://%s"\n' % test_data.get('DL_DIR')) 72 f.write('SOURCE_MIRROR_URL = "file://%s"\n' % test_data.get('DL_DIR'))
72 f.write('INHERIT += "own-mirrors"\n') 73 f.write('INHERIT += "own-mirrors"\n')
73 f.write('PREMIRRORS_prepend = " git://git.yoctoproject.org/.* git://%s/git2/git.yoctoproject.org.BASENAME \\n "\n' % test_data.get('DL_DIR')) 74 f.write('PREMIRRORS:prepend = "git://git.yoctoproject.org/.* git://%s/git2/git.yoctoproject.org.BASENAME "\n' % test_data.get('DL_DIR'))
74 75
75 # We need to do this in case we have a minimal SDK 76 # We need to do this in case we have a minimal SDK
76 subprocess.check_output(". %s > /dev/null; devtool sdk-install meta-extsdk-toolchain" % \ 77 subprocess.check_output(". %s > /dev/null; devtool sdk-install meta-extsdk-toolchain" % \
@@ -81,7 +82,8 @@ class TestSDKExt(TestSDKBase):
81 host_pkg_manifest=host_pkg_manifest) 82 host_pkg_manifest=host_pkg_manifest)
82 83
83 try: 84 try:
84 tc.loadTests(OESDKExtTestContextExecutor.default_cases) 85 modules = (d.getVar("TESTSDK_SUITES") or "").split()
86 tc.loadTests(OESDKExtTestContextExecutor.default_cases, modules)
85 except Exception as e: 87 except Exception as e:
86 import traceback 88 import traceback
87 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) 89 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc())
@@ -91,7 +93,7 @@ class TestSDKExt(TestSDKBase):
91 component = "%s %s" % (pn, OESDKExtTestContextExecutor.name) 93 component = "%s %s" % (pn, OESDKExtTestContextExecutor.name)
92 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env)) 94 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
93 configuration = self.get_sdk_configuration(d, 'sdkext') 95 configuration = self.get_sdk_configuration(d, 'sdkext')
94 result.logDetails(self.get_sdk_json_result_dir(d), 96 result.logDetails(get_json_result_dir(d),
95 configuration, 97 configuration,
96 self.get_sdk_result_id(configuration)) 98 self.get_sdk_result_id(configuration))
97 result.logSummary(component, context_msg) 99 result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/selftest/case.py b/meta/lib/oeqa/selftest/case.py
index dcad4f76ec..da35b25f68 100644
--- a/meta/lib/oeqa/selftest/case.py
+++ b/meta/lib/oeqa/selftest/case.py
@@ -117,10 +117,6 @@ class OESelftestTestCase(OETestCase):
117 if e.errno != errno.ENOENT: 117 if e.errno != errno.ENOENT:
118 raise 118 raise
119 119
120 if self.tc.custommachine:
121 machine_conf = 'MACHINE ??= "%s"\n' % self.tc.custommachine
122 self.set_machine_config(machine_conf)
123
124 # tests might need their own setup 120 # tests might need their own setup
125 # but if they overwrite this one they have to call 121 # but if they overwrite this one they have to call
126 # super each time, so let's give them an alternative 122 # super each time, so let's give them an alternative
@@ -178,19 +174,11 @@ class OESelftestTestCase(OETestCase):
178 self.logger.debug("Writing to: %s\n%s\n" % (dest_path, data)) 174 self.logger.debug("Writing to: %s\n%s\n" % (dest_path, data))
179 ftools.write_file(dest_path, data) 175 ftools.write_file(dest_path, data)
180 176
181 if not multiconfig and self.tc.custommachine and 'MACHINE' in data:
182 machine = get_bb_var('MACHINE')
183 self.logger.warning('MACHINE overridden: %s' % machine)
184
185 def append_config(self, data): 177 def append_config(self, data):
186 """Append to <builddir>/conf/selftest.inc""" 178 """Append to <builddir>/conf/selftest.inc"""
187 self.logger.debug("Appending to: %s\n%s\n" % (self.testinc_path, data)) 179 self.logger.debug("Appending to: %s\n%s\n" % (self.testinc_path, data))
188 ftools.append_file(self.testinc_path, data) 180 ftools.append_file(self.testinc_path, data)
189 181
190 if self.tc.custommachine and 'MACHINE' in data:
191 machine = get_bb_var('MACHINE')
192 self.logger.warning('MACHINE overridden: %s' % machine)
193
194 def remove_config(self, data): 182 def remove_config(self, data):
195 """Remove data from <builddir>/conf/selftest.inc""" 183 """Remove data from <builddir>/conf/selftest.inc"""
196 self.logger.debug("Removing from: %s\n%s\n" % (self.testinc_path, data)) 184 self.logger.debug("Removing from: %s\n%s\n" % (self.testinc_path, data))
@@ -249,6 +237,13 @@ class OESelftestTestCase(OETestCase):
249 self.logger.debug("Writing to: %s\n%s\n" % (self.machineinc_path, data)) 237 self.logger.debug("Writing to: %s\n%s\n" % (self.machineinc_path, data))
250 ftools.write_file(self.machineinc_path, data) 238 ftools.write_file(self.machineinc_path, data)
251 239
240 def disable_class(self, classname):
241 destfile = "%s/classes/%s.bbclass" % (self.builddir, classname)
242 os.makedirs(os.path.dirname(destfile), exist_ok=True)
243 self.track_for_cleanup(destfile)
244 self.logger.debug("Creating empty class: %s\n" % (destfile))
245 ftools.write_file(destfile, "")
246
252 # check does path exist 247 # check does path exist
253 def assertExists(self, expr, msg=None): 248 def assertExists(self, expr, msg=None):
254 if not os.path.exists(expr): 249 if not os.path.exists(expr):
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
index f7c356ad09..2c9584d329 100644
--- a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
+++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -6,8 +8,8 @@ import os
6import shutil 8import shutil
7 9
8import oeqa.utils.ftools as ftools 10import oeqa.utils.ftools as ftools
9from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer 11from oeqa.utils.commands import runCmd, bitbake, get_bb_var
10from oeqa.selftest.cases.sstate import SStateBase 12from oeqa.selftest.cases.sstatetests import SStateBase
11 13
12 14
13class RebuildFromSState(SStateBase): 15class RebuildFromSState(SStateBase):
@@ -90,7 +92,7 @@ class RebuildFromSState(SStateBase):
90 self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate))) 92 self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate)))
91 93
92 def test_sstate_relocation(self): 94 def test_sstate_relocation(self):
93 self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=True, rebuild_dependencies=True) 95 self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=True, rebuild_dependencies=True)
94 96
95 def test_sstate_rebuild(self): 97 def test_sstate_rebuild(self):
96 self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=False, rebuild_dependencies=True) 98 self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=False, rebuild_dependencies=True)
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
index ddd08ecf84..612ec675a7 100644
--- a/meta/lib/oeqa/selftest/cases/archiver.py
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import glob 8import glob
9import re
7from oeqa.utils.commands import bitbake, get_bb_vars 10from oeqa.utils.commands import bitbake, get_bb_vars
8from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
9 12
@@ -35,11 +38,11 @@ class Archiver(OESelftestTestCase):
35 src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) 38 src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
36 39
37 # Check that include_recipe was included 40 # Check that include_recipe was included
38 included_present = len(glob.glob(src_path + '/%s-*' % include_recipe)) 41 included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe))
39 self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) 42 self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
40 43
41 # Check that exclude_recipe was excluded 44 # Check that exclude_recipe was excluded
42 excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe)) 45 excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe))
43 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) 46 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
44 47
45 def test_archiver_filters_by_type(self): 48 def test_archiver_filters_by_type(self):
@@ -67,11 +70,11 @@ class Archiver(OESelftestTestCase):
67 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) 70 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
68 71
69 # Check that target_recipe was included 72 # Check that target_recipe was included
70 included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe)) 73 included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe))
71 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) 74 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe)
72 75
73 # Check that native_recipe was excluded 76 # Check that native_recipe was excluded
74 excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe)) 77 excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe))
75 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) 78 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe)
76 79
77 def test_archiver_filters_by_type_and_name(self): 80 def test_archiver_filters_by_type_and_name(self):
@@ -104,20 +107,51 @@ class Archiver(OESelftestTestCase):
104 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) 107 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
105 108
106 # Check that target_recipe[0] and native_recipes[1] were included 109 # Check that target_recipe[0] and native_recipes[1] were included
107 included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0])) 110 included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0]))
108 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) 111 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0])
109 112
110 included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1])) 113 included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1]))
111 self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) 114 self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1])
112 115
113 # Check that native_recipes[0] and target_recipes[1] were excluded 116 # Check that native_recipes[0] and target_recipes[1] were excluded
114 excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0])) 117 excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0]))
115 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) 118 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0])
116 119
117 excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1])) 120 excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
118 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) 121 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
119 122
123 def test_archiver_multiconfig_shared_unpack_and_patch(self):
124 """
125 Test that shared recipes in original mode with diff enabled works in multiconfig,
126 otherwise it will not build when using the same TMP dir.
127 """
128
129 features = 'BBMULTICONFIG = "mc1 mc2"\n'
130 features += 'INHERIT += "archiver"\n'
131 features += 'ARCHIVER_MODE[src] = "original"\n'
132 features += 'ARCHIVER_MODE[diff] = "1"\n'
133 self.write_config(features)
134
135 # We can use any machine in multiconfig as long as they are different
136 self.write_config('MACHINE = "qemuarm"\n', 'mc1')
137 self.write_config('MACHINE = "qemux86"\n', 'mc2')
138
139 task = 'do_unpack_and_patch'
140 # Use gcc-source as it is a shared recipe (appends the pv to the pn)
141 pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
142
143 # Generate the tasks signatures
144 bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task))
120 145
146 # Check the tasks signatures
147 # To be machine agnostic the tasks needs to generate the same signature for each machine
148 locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir
149 locked_sigs = open(locked_sigs_inc).read()
150 task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs)
151 uniq_sigs = set(task_sigs)
152 self.assertFalse(len(uniq_sigs) - 1, \
153 'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \
154 % (task, pn, locked_sigs_inc))
121 155
122 def test_archiver_srpm_mode(self): 156 def test_archiver_srpm_mode(self):
123 """ 157 """
@@ -156,28 +190,28 @@ class Archiver(OESelftestTestCase):
156 Test that the archiver works with `ARCHIVER_MODE[src] = "original"`. 190 Test that the archiver works with `ARCHIVER_MODE[src] = "original"`.
157 """ 191 """
158 192
159 self._test_archiver_mode('original', 'ed-1.14.1.tar.lz') 193 self._test_archiver_mode('original', 'ed-1.21.1.tar.lz')
160 194
161 def test_archiver_mode_patched(self): 195 def test_archiver_mode_patched(self):
162 """ 196 """
163 Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`. 197 Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`.
164 """ 198 """
165 199
166 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.gz') 200 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-patched.tar.xz')
167 201
168 def test_archiver_mode_configured(self): 202 def test_archiver_mode_configured(self):
169 """ 203 """
170 Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`. 204 Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`.
171 """ 205 """
172 206
173 self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.gz') 207 self._test_archiver_mode('configured', 'selftest-ed-native-1.21.1-r0-configured.tar.xz')
174 208
175 def test_archiver_mode_recipe(self): 209 def test_archiver_mode_recipe(self):
176 """ 210 """
177 Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`. 211 Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`.
178 """ 212 """
179 213
180 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.gz', 214 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-recipe.tar.xz',
181 'ARCHIVER_MODE[recipe] = "1"\n') 215 'ARCHIVER_MODE[recipe] = "1"\n')
182 216
183 def test_archiver_mode_diff(self): 217 def test_archiver_mode_diff(self):
@@ -186,7 +220,7 @@ class Archiver(OESelftestTestCase):
186 Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested. 220 Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested.
187 """ 221 """
188 222
189 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-diff.gz', 223 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-diff.gz',
190 'ARCHIVER_MODE[diff] = "1"\n') 224 'ARCHIVER_MODE[diff] = "1"\n')
191 225
192 def test_archiver_mode_dumpdata(self): 226 def test_archiver_mode_dumpdata(self):
@@ -194,7 +228,7 @@ class Archiver(OESelftestTestCase):
194 Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`. 228 Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`.
195 """ 229 """
196 230
197 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-showdata.dump', 231 self._test_archiver_mode('patched', 'selftest-ed-native-1.21.1-r0-showdata.dump',
198 'ARCHIVER_MODE[dumpdata] = "1"\n') 232 'ARCHIVER_MODE[dumpdata] = "1"\n')
199 233
200 def test_archiver_mode_mirror(self): 234 def test_archiver_mode_mirror(self):
@@ -202,7 +236,7 @@ class Archiver(OESelftestTestCase):
202 Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`. 236 Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`.
203 """ 237 """
204 238
205 self._test_archiver_mode('mirror', 'ed-1.14.1.tar.lz', 239 self._test_archiver_mode('mirror', 'ed-1.21.1.tar.lz',
206 'BB_GENERATE_MIRROR_TARBALLS = "1"\n') 240 'BB_GENERATE_MIRROR_TARBALLS = "1"\n')
207 241
208 def test_archiver_mode_mirror_excludes(self): 242 def test_archiver_mode_mirror_excludes(self):
@@ -213,7 +247,7 @@ class Archiver(OESelftestTestCase):
213 """ 247 """
214 248
215 target='selftest-ed' 249 target='selftest-ed'
216 target_file_name = 'ed-1.14.1.tar.lz' 250 target_file_name = 'ed-1.21.1.tar.lz'
217 251
218 features = 'INHERIT += "archiver"\n' 252 features = 'INHERIT += "archiver"\n'
219 features += 'ARCHIVER_MODE[src] = "mirror"\n' 253 features += 'ARCHIVER_MODE[src] = "mirror"\n'
@@ -251,7 +285,7 @@ class Archiver(OESelftestTestCase):
251 bitbake('-c deploy_archives %s' % (target)) 285 bitbake('-c deploy_archives %s' % (target))
252 286
253 bb_vars = get_bb_vars(['DEPLOY_DIR_SRC']) 287 bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
254 for target_file_name in ['ed-1.14.1.tar.lz', 'hello.c']: 288 for target_file_name in ['ed-1.21.1.tar.lz', 'hello.c']:
255 glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name) 289 glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
256 glob_result = glob.glob(glob_str) 290 glob_result = glob.glob(glob_str)
257 self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name)) 291 self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name))
diff --git a/meta/lib/oeqa/selftest/cases/barebox.py b/meta/lib/oeqa/selftest/cases/barebox.py
new file mode 100644
index 0000000000..3f8f232432
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/barebox.py
@@ -0,0 +1,44 @@
1# Qemu-based barebox bootloader integration testing
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, runqemu
10from oeqa.core.decorator.data import skipIfNotArch
11from oeqa.core.decorator import OETestTag
12
13barebox_boot_patterns = {
14 'search_reached_prompt': r"stop autoboot",
15 'search_login_succeeded': r"barebox@[^:]+:[^ ]+ ",
16 'search_cmd_finished': r"barebox@[a-zA-Z0-9\-\s]+:/"
17 }
18
19
20class BareboxTest(OESelftestTestCase):
21
22 @skipIfNotArch(['arm', 'aarch64'])
23 @OETestTag("runqemu")
24 def test_boot_barebox(self):
25 """
26 Tests building barebox and booting it with QEMU
27 """
28
29 self.write_config("""
30QB_DEFAULT_KERNEL = "barebox-dt-2nd.img"
31PREFERRED_PROVIDER_virtual/bootloader = "barebox"
32QEMU_USE_KVM = "False"
33""")
34
35 bitbake("virtual/bootloader core-image-minimal")
36
37 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic',
38 boot_patterns=barebox_boot_patterns) as qemu:
39
40 # test if barebox console works
41 cmd = "version"
42 status, output = qemu.run_serial(cmd)
43 self.assertEqual(status, 1, msg=output)
44 self.assertTrue("barebox" in output, msg=output)
diff --git a/meta/lib/oeqa/selftest/cases/baremetal.py b/meta/lib/oeqa/selftest/cases/baremetal.py
new file mode 100644
index 0000000000..cadaea2f1a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/baremetal.py
@@ -0,0 +1,14 @@
1
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake
10
11class BaremetalTest(OESelftestTestCase):
12 def test_baremetal(self):
13 self.write_config('TCLIBC = "baremetal"')
14 bitbake('baremetal-helloworld')
diff --git a/meta/lib/oeqa/selftest/cases/bbclasses.py b/meta/lib/oeqa/selftest/cases/bbclasses.py
new file mode 100644
index 0000000000..10545ebe65
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bbclasses.py
@@ -0,0 +1,106 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import get_bb_vars, bitbake
9
10class Systemd(OESelftestTestCase):
11 """
12 Tests related to the systemd bbclass.
13 """
14
15 def getVars(self, recipe):
16 self.bb_vars = get_bb_vars(
17 [
18 'BPN',
19 'D',
20 'INIT_D_DIR',
21 'prefix',
22 'systemd_system_unitdir',
23 'sysconfdir',
24 ],
25 recipe,
26 )
27
28 def fileExists(self, filename):
29 self.assertExists(filename.format(**self.bb_vars))
30
31 def fileNotExists(self, filename):
32 self.assertNotExists(filename.format(**self.bb_vars))
33
34 def test_systemd_in_distro(self):
35 """
36 Summary: Verify that no sysvinit files are installed when the
37 systemd distro feature is enabled, but sysvinit is not.
38 Expected: Systemd service file exists, but /etc does not.
39 Product: OE-Core
40 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
41 """
42
43 self.write_config("""
44DISTRO_FEATURES:append = " systemd usrmerge"
45DISTRO_FEATURES:remove = "sysvinit"
46VIRTUAL-RUNTIME_init_manager = "systemd"
47""")
48 bitbake("systemd-only systemd-and-sysvinit -c install")
49
50 self.getVars("systemd-only")
51 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
52
53 self.getVars("systemd-and-sysvinit")
54 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
55 self.fileNotExists("{D}{sysconfdir}")
56
57 def test_systemd_and_sysvinit_in_distro(self):
58 """
59 Summary: Verify that both systemd and sysvinit files are installed
60 when both the systemd and sysvinit distro features are
61 enabled.
62 Expected: Systemd service file and sysvinit initscript exist.
63 Product: OE-Core
64 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
65 """
66
67 self.write_config("""
68DISTRO_FEATURES:append = " systemd sysvinit usrmerge"
69VIRTUAL-RUNTIME_init_manager = "systemd"
70""")
71 bitbake("systemd-only systemd-and-sysvinit -c install")
72
73 self.getVars("systemd-only")
74 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
75
76 self.getVars("systemd-and-sysvinit")
77 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
78 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
79
80 def test_sysvinit_in_distro(self):
81 """
82 Summary: Verify that no systemd service files are installed when the
83 sysvinit distro feature is enabled, but systemd is not.
84 Expected: The systemd service file does not exist, nor does /usr.
85 The sysvinit initscript exists.
86 Product: OE-Core
87 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
88 """
89
90 self.write_config("""
91DISTRO_FEATURES:remove = "systemd"
92DISTRO_FEATURES:append = " sysvinit usrmerge"
93VIRTUAL-RUNTIME_init_manager = "sysvinit"
94""")
95 bitbake("systemd-only systemd-and-sysvinit -c install")
96
97 self.getVars("systemd-only")
98 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
99 self.fileNotExists("{D}{prefix}")
100 self.fileNotExists("{D}{sysconfdir}")
101 self.fileExists("{D}")
102
103 self.getVars("systemd-and-sysvinit")
104 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
105 self.fileNotExists("{D}{prefix}")
106 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py
index f131d9856c..68b0377720 100644
--- a/meta/lib/oeqa/selftest/cases/bblayers.py
+++ b/meta/lib/oeqa/selftest/cases/bblayers.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -6,12 +8,23 @@ import os
6import re 8import re
7 9
8import oeqa.utils.ftools as ftools 10import oeqa.utils.ftools as ftools
9from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars 11from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
10 12
11from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
12 14
13class BitbakeLayers(OESelftestTestCase): 15class BitbakeLayers(OESelftestTestCase):
14 16
17 @classmethod
18 def setUpClass(cls):
19 super(BitbakeLayers, cls).setUpClass()
20 bitbake("python3-jsonschema-native")
21 bitbake("-c addto_recipe_sysroot python3-jsonschema-native")
22
23 def test_bitbakelayers_layerindexshowdepends(self):
24 result = runCmd('bitbake-layers layerindex-show-depends meta-poky')
25 find_in_contents = re.search("openembedded-core", result.output)
26 self.assertTrue(find_in_contents, msg = "openembedded-core should have been listed at this step. bitbake-layers layerindex-show-depends meta-poky output: %s" % result.output)
27
15 def test_bitbakelayers_showcrossdepends(self): 28 def test_bitbakelayers_showcrossdepends(self):
16 result = runCmd('bitbake-layers show-cross-depends') 29 result = runCmd('bitbake-layers show-cross-depends')
17 self.assertIn('aspell', result.output) 30 self.assertIn('aspell', result.output)
@@ -41,7 +54,7 @@ class BitbakeLayers(OESelftestTestCase):
41 bb_file = os.path.join(testoutdir, recipe_path, recipe_file) 54 bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
42 self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.") 55 self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
43 contents = ftools.read_file(bb_file) 56 contents = ftools.read_file(bb_file)
44 find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents) 57 find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
45 self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output) 58 self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
46 59
47 def test_bitbakelayers_add_remove(self): 60 def test_bitbakelayers_add_remove(self):
@@ -72,8 +85,9 @@ class BitbakeLayers(OESelftestTestCase):
72 result = runCmd('bitbake-layers show-recipes -i image') 85 result = runCmd('bitbake-layers show-recipes -i image')
73 self.assertIn('core-image-minimal', result.output) 86 self.assertIn('core-image-minimal', result.output)
74 self.assertNotIn('mtd-utils:', result.output) 87 self.assertNotIn('mtd-utils:', result.output)
75 result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig') 88 result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig')
76 self.assertIn('libproxy:', result.output) 89 self.assertIn('libproxy:', result.output)
90 result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
77 self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either 91 self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
78 self.assertNotIn('wget:', result.output) # doesn't inherit cmake 92 self.assertNotIn('wget:', result.output) # doesn't inherit cmake
79 self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig 93 self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
@@ -106,6 +120,11 @@ class BitbakeLayers(OESelftestTestCase):
106 120
107 self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority)) 121 self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
108 122
123 result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1"))
124 for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'):
125 fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f)
126 self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath))
127
109 def get_recipe_basename(self, recipe): 128 def get_recipe_basename(self, recipe):
110 recipe_file = "" 129 recipe_file = ""
111 result = runCmd("bitbake-layers show-recipes -f %s" % recipe) 130 result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
@@ -116,3 +135,139 @@ class BitbakeLayers(OESelftestTestCase):
116 135
117 self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe) 136 self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
118 return os.path.basename(recipe_file) 137 return os.path.basename(recipe_file)
138
139 def validate_layersjson(self, json):
140 python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3')
141 jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema')
142 jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json')
143 result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema))
144
145 def test_validate_examplelayersjson(self):
146 json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json")
147 self.validate_layersjson(json)
148
149 def test_bitbakelayers_setup(self):
150 result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
151 jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
152 self.validate_layersjson(jsonfile)
153
154 # The revision-under-test may not necessarily be available on the remote server,
155 # so replace it with a revision that has a yocto-4.1 tag.
156 import json
157 with open(jsonfile) as f:
158 data = json.load(f)
159 for s in data['sources']:
160 data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0'
161 with open(jsonfile, 'w') as f:
162 json.dump(data, f)
163
164 testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout')
165 result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir))
166 layers_json = os.path.join(testcheckoutdir, ".oe-layers.json")
167 self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json))
168
169 # As setup-layers checkout out an old revision of poky, there is no setup-build symlink,
170 # and we need to run oe-setup-build directly from the current poky tree under test
171 oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build')
172 oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build')
173 os.symlink(oe_setup_build,oe_setup_build_l)
174
175 cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json)
176 result = runCmd(cmd)
177 cond = "conf/templates/default" in result.output
178 self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output))
179
180 # rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user
181 conf = None
182 if 'poky-default' in result.output:
183 conf = 'poky-default'
184 elif 'meta-default' in result.output:
185 conf = 'meta-default'
186 self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output))
187
188 cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf)
189 result = runCmd(cmd)
190
191 def test_bitbakelayers_updatelayer(self):
192 result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
193 jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
194 self.validate_layersjson(jsonfile)
195
196 import json
197 with open(jsonfile) as f:
198 data = json.load(f)
199 repos = []
200 for s in data['sources']:
201 repos.append(s)
202
203 self.assertTrue(len(repos) > 1, "Not enough repositories available")
204 self.validate_layersjson(jsonfile)
205
206 test_ref_1 = 'ref_1'
207 test_ref_2 = 'ref_2'
208
209 # Create a new layers setup using custom references
210 result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}'
211 .format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path))
212 self.validate_layersjson(jsonfile)
213
214 with open(jsonfile) as f:
215 data = json.load(f)
216 first_rev_1 = data['sources'][repos[0]]['git-remote']['rev']
217 first_desc_1 = data['sources'][repos[0]]['git-remote']['describe']
218 second_rev_1 = data['sources'][repos[1]]['git-remote']['rev']
219 second_desc_1 = data['sources'][repos[1]]['git-remote']['describe']
220
221 self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1))
222 self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1))
223 self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1))
224 self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1))
225
226 # Update one of the repositories in the layers setup using a different custom reference
227 # This should only update the selected repository, everything else should remain as is
228 result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}'
229 .format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path))
230 self.validate_layersjson(jsonfile)
231
232 with open(jsonfile) as f:
233 data = json.load(f)
234 first_rev_2 = data['sources'][repos[0]]['git-remote']['rev']
235 first_desc_2 = data['sources'][repos[0]]['git-remote']['describe']
236 second_rev_2 = data['sources'][repos[1]]['git-remote']['rev']
237 second_desc_2 = data['sources'][repos[1]]['git-remote']['describe']
238
239 self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2))
240 self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2))
241 self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2))
242 self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2))
243
244class BitbakeConfigBuild(OESelftestTestCase):
245 def test_enable_disable_fragments(self):
246 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None)
247 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None)
248
249 runCmd('bitbake-config-build enable-fragment selftest/test-fragment')
250 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), 'somevalue')
251 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None)
252
253 runCmd('bitbake-config-build enable-fragment selftest/more-fragments-here/test-another-fragment')
254 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), 'somevalue')
255 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), 'someothervalue')
256
257 fragment_metadata_command = "bitbake-getvar -f {} --value {}"
258 result = runCmd(fragment_metadata_command.format("selftest/test-fragment", "BB_CONF_FRAGMENT_SUMMARY"))
259 self.assertIn("This is a configuration fragment intended for testing in oe-selftest context", result.output)
260 result = runCmd(fragment_metadata_command.format("selftest/test-fragment", "BB_CONF_FRAGMENT_DESCRIPTION"))
261 self.assertIn("It defines a variable that can be checked inside the test.", result.output)
262 result = runCmd(fragment_metadata_command.format("selftest/more-fragments-here/test-another-fragment", "BB_CONF_FRAGMENT_SUMMARY"))
263 self.assertIn("This is a second configuration fragment intended for testing in oe-selftest context", result.output)
264 result = runCmd(fragment_metadata_command.format("selftest/more-fragments-here/test-another-fragment", "BB_CONF_FRAGMENT_DESCRIPTION"))
265 self.assertIn("It defines another variable that can be checked inside the test.", result.output)
266
267 runCmd('bitbake-config-build disable-fragment selftest/test-fragment')
268 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None)
269 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), 'someothervalue')
270
271 runCmd('bitbake-config-build disable-fragment selftest/more-fragments-here/test-another-fragment')
272 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_VARIABLE'), None)
273 self.assertEqual(get_bb_var('SELFTEST_FRAGMENT_ANOTHER_VARIABLE'), None)
diff --git a/meta/lib/oeqa/selftest/cases/bblock.py b/meta/lib/oeqa/selftest/cases/bblock.py
new file mode 100644
index 0000000000..2b62d2a0aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblock.py
@@ -0,0 +1,203 @@
1#
2# Copyright (c) 2023 BayLibre, SAS
3# Author: Julien Stepahn <jstephan@baylibre.com>
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import bb.tinfoil
11
12import oeqa.utils.ftools as ftools
13from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
14
15from oeqa.selftest.case import OESelftestTestCase
16
17
18class BBLock(OESelftestTestCase):
19 @classmethod
20 def setUpClass(cls):
21 super(BBLock, cls).setUpClass()
22 cls.lockfile = cls.builddir + "/conf/bblock.conf"
23
24 def unlock_recipes(self, recipes=None, tasks=None):
25 cmd = "bblock -r "
26 if recipes:
27 cmd += " ".join(recipes)
28 if tasks:
29 cmd += " -t " + ",".join(tasks)
30 result = runCmd(cmd)
31
32 if recipes:
33 # ensure all signatures are removed from lockfile
34 contents = ftools.read_file(self.lockfile)
35 for recipe in recipes:
36 for task in tasks:
37 find_in_contents = re.search(
38 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
39 contents,
40 )
41 self.assertFalse(
42 find_in_contents,
43 msg="%s:%s should not be present into bblock.conf anymore"
44 % (recipe, task),
45 )
46 self.assertExists(self.lockfile)
47 else:
48 self.assertNotExists(self.lockfile)
49
50 def lock_recipes(self, recipes, tasks=None):
51 cmd = "bblock " + " ".join(recipes)
52 if tasks:
53 cmd += " -t " + ",".join(tasks)
54
55 result = runCmd(cmd)
56
57 self.assertExists(self.lockfile)
58
59 # ensure all signatures are added to lockfile
60 contents = ftools.read_file(self.lockfile)
61 for recipe in recipes:
62 if tasks:
63 for task in tasks:
64 find_in_contents = re.search(
65 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
66 contents,
67 )
68 self.assertTrue(
69 find_in_contents,
70 msg="%s:%s was not added into bblock.conf. bblock output: %s"
71 % (recipe, task, result.output),
72 )
73
74 def modify_tasks(self, recipes, tasks):
75 task_append = ""
76 for recipe in recipes:
77 bb_vars = get_bb_vars(["PV"], recipe)
78 recipe_pv = bb_vars["PV"]
79 recipe_append_file = recipe + "_" + recipe_pv + ".bbappend"
80
81 os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe))
82 recipe_append_path = os.path.join(
83 self.testlayer_path, "recipes-test", recipe, recipe_append_file
84 )
85
86 for task in tasks:
87 task_append += "%s:append() {\n#modify task hash \n}\n" % task
88 ftools.write_file(recipe_append_path, task_append)
89 self.add_command_to_tearDown(
90 "rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe)
91 )
92
93 def test_lock_single_recipe_single_task(self):
94 recipes = ["quilt"]
95 tasks = ["do_compile"]
96 self._run_test(recipes, tasks)
97
98 def test_lock_single_recipe_multiple_tasks(self):
99 recipes = ["quilt"]
100 tasks = ["do_compile", "do_install"]
101 self._run_test(recipes, tasks)
102
103 def test_lock_single_recipe_all_tasks(self):
104 recipes = ["quilt"]
105 self._run_test(recipes, None)
106
107 def test_lock_multiple_recipe_single_task(self):
108 recipes = ["quilt", "bc"]
109 tasks = ["do_compile"]
110 self._run_test(recipes, tasks)
111
112 def test_lock_architecture_specific(self):
113 # unlock all recipes and ensure no bblock.conf file exist
114 self.unlock_recipes()
115
116 recipes = ["quilt"]
117 tasks = ["do_compile"]
118
119 # lock quilt's do_compile task for another machine
120 if self.td["MACHINE"] == "qemux86-64":
121 machine = "qemuarm"
122 else:
123 machine = "qemux86-64"
124
125 self.write_config('MACHINE = "%s"\n' % machine)
126
127 self.lock_recipes(recipes, tasks)
128
129 self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"])
130 # modify quilt's do_compile task
131 self.modify_tasks(recipes, tasks)
132
133 # build quilt using the default machine
134 # No Note/Warning should be emitted since sig is locked for another machine
135 # (quilt package is architecture dependant)
136 info_message = "NOTE: The following recipes have locked tasks: " + recipes[0]
137 warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0])
138 result = bitbake(recipes[0] + " -n")
139 self.assertNotIn(info_message, result.output)
140 self.assertNotIn(warn_message, result.output)
141
142 # unlock all recipes
143 self.unlock_recipes()
144
145 def _run_test(self, recipes, tasks=None):
146 # unlock all recipes and ensure no bblock.conf file exist
147 self.unlock_recipes()
148
149 self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"')
150
151 # lock tasks for recipes
152 result = self.lock_recipes(recipes, tasks)
153
154 if not tasks:
155 tasks = []
156 result = bitbake("-c listtasks " + recipes[0])
157 with bb.tinfoil.Tinfoil() as tinfoil:
158 tinfoil.prepare(config_only=False, quiet=2)
159 d = tinfoil.parse_recipe(recipes[0])
160
161 for line in result.output.splitlines():
162 if line.startswith("do_"):
163 task = line.split()[0]
164 if "setscene" in task:
165 continue
166 if d.getVarFlag(task, "nostamp"):
167 continue
168 tasks.append(task)
169
170 # build recipes. At this stage we should have a Note about recipes
171 # having locked task's sig, but no warning since sig still match
172 info_message = "NOTE: The following recipes have locked tasks: " + " ".join(
173 recipes
174 )
175 for recipe in recipes:
176 result = bitbake(recipe + " -n")
177 self.assertIn(info_message, result.output)
178 for task in tasks:
179 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
180 self.assertNotIn(warn_message, result.output)
181
182 # modify all tasks that are locked to trigger a sig change then build the recipes
183 # at this stage we should have a Note as before, but also a Warning for all
184 # locked tasks indicating the sig mismatch
185 self.modify_tasks(recipes, tasks)
186 for recipe in recipes:
187 result = bitbake(recipe + " -n")
188 self.assertIn(info_message, result.output)
189 for task in tasks:
190 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
191 self.assertIn(warn_message, result.output)
192
193 # unlock all tasks and rebuild, no more Note/Warning should remain
194 self.unlock_recipes(recipes, tasks)
195 for recipe in recipes:
196 result = bitbake(recipe + " -n")
197 self.assertNotIn(info_message, result.output)
198 for task in tasks:
199 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
200 self.assertNotIn(warn_message, result.output)
201
202 # unlock all recipes
203 self.unlock_recipes()
diff --git a/meta/lib/oeqa/selftest/cases/bblogging.py b/meta/lib/oeqa/selftest/cases/bblogging.py
new file mode 100644
index 0000000000..040c6db089
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblogging.py
@@ -0,0 +1,182 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake
10
11class BitBakeLogging(OESelftestTestCase):
12
13 def assertCount(self, item, entry, count):
14 self.assertEqual(item.count(entry), count, msg="Output:\n'''\n%s\n'''\ndoesn't contain %d copies of:\n'''\n%s\n'''\n" % (item, count, entry))
15
16 def test_shell_loggingA(self):
17 # no logs, no verbose
18 self.write_config('BBINCLUDELOGS = ""')
19 result = bitbake("logging-test -c shelltest -f", ignore_status = True)
20 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
21 self.assertNotIn("This is shell stdout", result.output)
22 self.assertNotIn("This is shell stderr", result.output)
23
24 def test_shell_loggingB(self):
25 # logs, no verbose
26 self.write_config('BBINCLUDELOGS = "yes"')
27 result = bitbake("logging-test -c shelltest -f", ignore_status = True)
28 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
29 self.assertCount(result.output, "This is shell stdout", 1)
30 self.assertCount(result.output, "This is shell stderr", 1)
31
32 def test_shell_loggingC(self):
33 # no logs, verbose
34 self.write_config('BBINCLUDELOGS = ""')
35 result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
36 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
37 # two copies due to set +x
38 self.assertCount(result.output, "This is shell stdout", 2)
39 self.assertCount(result.output, "This is shell stderr", 2)
40
41 def test_shell_loggingD(self):
42 # logs, verbose
43 self.write_config('BBINCLUDELOGS = "yes"')
44 result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
45 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
46 # two copies due to set +x
47 self.assertCount(result.output, "This is shell stdout", 2)
48 self.assertCount(result.output, "This is shell stderr", 2)
49
50 def test_python_exec_func_shell_loggingA(self):
51 # no logs, no verbose
52 self.write_config('BBINCLUDELOGS = ""')
53 result = bitbake("logging-test -c pythontest_exec_func_shell -f",
54 ignore_status = True)
55 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
56 self.assertNotIn("This is shell stdout", result.output)
57 self.assertNotIn("This is shell stderr", result.output)
58
59 def test_python_exec_func_shell_loggingB(self):
60 # logs, no verbose
61 self.write_config('BBINCLUDELOGS = "yes"')
62 result = bitbake("logging-test -c pythontest_exec_func_shell -f",
63 ignore_status = True)
64 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
65 self.assertCount(result.output, "This is shell stdout", 1)
66 self.assertCount(result.output, "This is shell stderr", 1)
67
68 def test_python_exec_func_shell_loggingC(self):
69 # no logs, verbose
70 self.write_config('BBINCLUDELOGS = ""')
71 result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
72 ignore_status = True)
73 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
74 # two copies due to set +x
75 self.assertCount(result.output, "This is shell stdout", 2)
76 self.assertCount(result.output, "This is shell stderr", 2)
77
78 def test_python_exec_func_shell_loggingD(self):
79 # logs, verbose
80 self.write_config('BBINCLUDELOGS = "yes"')
81 result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
82 ignore_status = True)
83 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
84 # two copies due to set +x
85 self.assertCount(result.output, "This is shell stdout", 2)
86 self.assertCount(result.output, "This is shell stderr", 2)
87
88 def test_python_exit_loggingA(self):
89 # no logs, no verbose
90 self.write_config('BBINCLUDELOGS = ""')
91 result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
92 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
93 self.assertNotIn("This is python stdout", result.output)
94
95 def test_python_exit_loggingB(self):
96 # logs, no verbose
97 self.write_config('BBINCLUDELOGS = "yes"')
98 result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
99 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
100 # A sys.exit() should include the output
101 self.assertCount(result.output, "This is python stdout", 1)
102
103 def test_python_exit_loggingC(self):
104 # no logs, verbose
105 self.write_config('BBINCLUDELOGS = ""')
106 result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
107 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
108 self.assertCount(result.output, "This is python stdout", 1)
109
110 def test_python_exit_loggingD(self):
111 # logs, verbose
112 self.write_config('BBINCLUDELOGS = "yes"')
113 result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
114 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
115 self.assertCount(result.output, "This is python stdout", 1)
116
117 def test_python_exec_func_python_loggingA(self):
118 # no logs, no verbose
119 self.write_config('BBINCLUDELOGS = ""')
120 result = bitbake("logging-test -c pythontest_exec_func_python -f",
121 ignore_status = True)
122 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
123 self.assertNotIn("This is python stdout", result.output)
124
125 def test_python_exec_func_python_loggingB(self):
126 # logs, no verbose
127 self.write_config('BBINCLUDELOGS = "yes"')
128 result = bitbake("logging-test -c pythontest_exec_func_python -f",
129 ignore_status = True)
130 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
131 # A sys.exit() should include the output
132 self.assertCount(result.output, "This is python stdout", 1)
133
134 def test_python_exec_func_python_loggingC(self):
135 # no logs, verbose
136 self.write_config('BBINCLUDELOGS = ""')
137 result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
138 ignore_status = True)
139 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
140 self.assertCount(result.output, "This is python stdout", 1)
141
142 def test_python_exec_func_python_loggingD(self):
143 # logs, verbose
144 self.write_config('BBINCLUDELOGS = "yes"')
145 result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
146 ignore_status = True)
147 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
148 self.assertCount(result.output, "This is python stdout", 1)
149
150 def test_python_fatal_loggingA(self):
151 # no logs, no verbose
152 self.write_config('BBINCLUDELOGS = ""')
153 result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
154 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
155 self.assertNotIn("This is python fatal test stdout", result.output)
156 self.assertCount(result.output, "This is a fatal error", 1)
157
158 def test_python_fatal_loggingB(self):
159 # logs, no verbose
160 self.write_config('BBINCLUDELOGS = "yes"')
161 result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
162 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
163 # A bb.fatal() should not include the output
164 self.assertNotIn("This is python fatal test stdout", result.output)
165 self.assertCount(result.output, "This is a fatal error", 1)
166
167 def test_python_fatal_loggingC(self):
168 # no logs, verbose
169 self.write_config('BBINCLUDELOGS = ""')
170 result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
171 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
172 self.assertCount(result.output, "This is python fatal test stdout", 1)
173 self.assertCount(result.output, "This is a fatal error", 1)
174
175 def test_python_fatal_loggingD(self):
176 # logs, verbose
177 self.write_config('BBINCLUDELOGS = "yes"')
178 result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
179 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
180 self.assertCount(result.output, "This is python fatal test stdout", 1)
181 self.assertCount(result.output, "This is a fatal error", 1)
182
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
index 79390acc0d..51934ef70d 100644
--- a/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -39,7 +41,7 @@ class BitbakeTests(OESelftestTestCase):
39 41
40 def test_event_handler(self): 42 def test_event_handler(self):
41 self.write_config("INHERIT += \"test_events\"") 43 self.write_config("INHERIT += \"test_events\"")
42 result = bitbake('m4-native') 44 result = bitbake('selftest-hello-native')
43 find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output) 45 find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
44 find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output) 46 find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
45 self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output) 47 self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
@@ -47,11 +49,11 @@ class BitbakeTests(OESelftestTestCase):
47 self.assertNotIn('Test for bb.event.InvalidEvent', result.output) 49 self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
48 50
49 def test_local_sstate(self): 51 def test_local_sstate(self):
50 bitbake('m4-native') 52 bitbake('selftest-hello-native')
51 bitbake('m4-native -cclean') 53 bitbake('selftest-hello-native -cclean')
52 result = bitbake('m4-native') 54 result = bitbake('selftest-hello-native')
53 find_setscene = re.search("m4-native.*do_.*_setscene", result.output) 55 find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output)
54 self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output ) 56 self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output )
55 57
56 def test_bitbake_invalid_recipe(self): 58 def test_bitbake_invalid_recipe(self):
57 result = bitbake('-b asdf', ignore_status=True) 59 result = bitbake('-b asdf', ignore_status=True)
@@ -63,15 +65,15 @@ class BitbakeTests(OESelftestTestCase):
63 65
64 def test_warnings_errors(self): 66 def test_warnings_errors(self):
65 result = bitbake('-b asdf', ignore_status=True) 67 result = bitbake('-b asdf', ignore_status=True)
66 find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output) 68 find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages*", result.output)
67 find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output) 69 find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages*", result.output)
68 self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output) 70 self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
69 self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output) 71 self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
70 72
71 def test_invalid_patch(self): 73 def test_invalid_patch(self):
72 # This patch should fail to apply. 74 # This patch should fail to apply.
73 self.write_recipeinc('man-db', 'FILESEXTRAPATHS_prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"') 75 self.write_recipeinc('man-db', 'FILESEXTRAPATHS:prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"')
74 self.write_config("INHERIT_remove = \"report-error\"") 76 self.write_config("INHERIT:remove = \"report-error\"")
75 result = bitbake('man-db -c patch', ignore_status=True) 77 result = bitbake('man-db -c patch', ignore_status=True)
76 self.delete_recipeinc('man-db') 78 self.delete_recipeinc('man-db')
77 bitbake('-cclean man-db') 79 bitbake('-cclean man-db')
@@ -83,8 +85,10 @@ class BitbakeTests(OESelftestTestCase):
83 85
84 def test_force_task_1(self): 86 def test_force_task_1(self):
85 # test 1 from bug 5875 87 # test 1 from bug 5875
88 import uuid
86 test_recipe = 'zlib' 89 test_recipe = 'zlib'
87 test_data = "Microsoft Made No Profit From Anyone's Zunes Yo" 90 # Need to use uuid otherwise hash equivlance would change the workflow
91 test_data = "Microsoft Made No Profit From Anyone's Zunes Yo %s" % uuid.uuid1()
88 bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe) 92 bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
89 image_dir = bb_vars['D'] 93 image_dir = bb_vars['D']
90 pkgsplit_dir = bb_vars['PKGDEST'] 94 pkgsplit_dir = bb_vars['PKGDEST']
@@ -139,19 +143,14 @@ class BitbakeTests(OESelftestTestCase):
139 self.write_recipeinc('man-db', data) 143 self.write_recipeinc('man-db', data)
140 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" 144 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
141SSTATE_DIR = \"${TOPDIR}/download-selftest\" 145SSTATE_DIR = \"${TOPDIR}/download-selftest\"
142INHERIT_remove = \"report-error\" 146INHERIT:remove = \"report-error\"
143""") 147""")
144 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 148 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
145 149
146 bitbake('-ccleanall man-db')
147 result = bitbake('-c fetch man-db', ignore_status=True) 150 result = bitbake('-c fetch man-db', ignore_status=True)
148 bitbake('-ccleanall man-db')
149 self.delete_recipeinc('man-db') 151 self.delete_recipeinc('man-db')
150 self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output) 152 self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
151 self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output) 153 self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output)
152 line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
153 self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
154doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
155 154
156 def test_rename_downloaded_file(self): 155 def test_rename_downloaded_file(self):
157 # TODO unique dldir instead of using cleanall 156 # TODO unique dldir instead of using cleanall
@@ -161,7 +160,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
161""") 160""")
162 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 161 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
163 162
164 data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' 163 data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
165 self.write_recipeinc('aspell', data) 164 self.write_recipeinc('aspell', data)
166 result = bitbake('-f -c fetch aspell', ignore_status=True) 165 result = bitbake('-f -c fetch aspell', ignore_status=True)
167 self.delete_recipeinc('aspell') 166 self.delete_recipeinc('aspell')
@@ -176,7 +175,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
176 self.assertIn('localconf', result.output) 175 self.assertIn('localconf', result.output)
177 176
178 def test_dry_run(self): 177 def test_dry_run(self):
179 result = runCmd('bitbake -n m4-native') 178 result = runCmd('bitbake -n selftest-hello-native')
180 self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output) 179 self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
181 180
182 def test_just_parse(self): 181 def test_just_parse(self):
@@ -189,6 +188,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
189 self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output) 188 self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
190 189
191 def test_prefile(self): 190 def test_prefile(self):
191 # Test when the prefile does not exist
192 result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
193 self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
194 # Test when the prefile exists
192 preconf = os.path.join(self.builddir, 'conf/prefile.conf') 195 preconf = os.path.join(self.builddir, 'conf/prefile.conf')
193 self.track_for_cleanup(preconf) 196 self.track_for_cleanup(preconf)
194 ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"") 197 ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
@@ -199,6 +202,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
199 self.assertIn('localconf', result.output) 202 self.assertIn('localconf', result.output)
200 203
201 def test_postfile(self): 204 def test_postfile(self):
205 # Test when the postfile does not exist
206 result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
207 self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
208 # Test when the postfile exists
202 postconf = os.path.join(self.builddir, 'conf/postfile.conf') 209 postconf = os.path.join(self.builddir, 'conf/postfile.conf')
203 self.track_for_cleanup(postconf) 210 self.track_for_cleanup(postconf)
204 ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"") 211 ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
@@ -213,7 +220,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
213 def test_continue(self): 220 def test_continue(self):
214 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" 221 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
215SSTATE_DIR = \"${TOPDIR}/download-selftest\" 222SSTATE_DIR = \"${TOPDIR}/download-selftest\"
216INHERIT_remove = \"report-error\" 223INHERIT:remove = \"report-error\"
217""") 224""")
218 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 225 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
219 self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" ) 226 self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
@@ -225,16 +232,22 @@ INHERIT_remove = \"report-error\"
225 self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output) 232 self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
226 233
227 def test_non_gplv3(self): 234 def test_non_gplv3(self):
228 self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"') 235 self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"
236OVERRIDES .= ":gplv3test"
237require conf/distro/include/no-gplv3.inc
238''')
229 result = bitbake('selftest-ed', ignore_status=True) 239 result = bitbake('selftest-ed', ignore_status=True)
230 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output)) 240 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
231 lic_dir = get_bb_var('LICENSE_DIRECTORY') 241 lic_dir = get_bb_var('LICENSE_DIRECTORY')
232 self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3'))) 242 arch = get_bb_var('SSTATE_PKGARCH')
233 self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2'))) 243 filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later')
244 self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename)
245 filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-only')
246 self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename)
234 247
235 def test_setscene_only(self): 248 def test_setscene_only(self):
236 """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)""" 249 """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
237 test_recipe = 'ed' 250 test_recipe = 'selftest-hello-native'
238 251
239 bitbake(test_recipe) 252 bitbake(test_recipe)
240 bitbake('-c clean %s' % test_recipe) 253 bitbake('-c clean %s' % test_recipe)
@@ -247,7 +260,7 @@ INHERIT_remove = \"report-error\"
247 'Executed tasks were: %s' % (task, str(tasks))) 260 'Executed tasks were: %s' % (task, str(tasks)))
248 261
249 def test_skip_setscene(self): 262 def test_skip_setscene(self):
250 test_recipe = 'ed' 263 test_recipe = 'selftest-hello-native'
251 264
252 bitbake(test_recipe) 265 bitbake(test_recipe)
253 bitbake('-c clean %s' % test_recipe) 266 bitbake('-c clean %s' % test_recipe)
@@ -298,3 +311,86 @@ INHERIT_remove = \"report-error\"
298 311
299 test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe) 312 test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
300 self.assertEqual(expected_recipe_summary, test_recipe_summary_after) 313 self.assertEqual(expected_recipe_summary, test_recipe_summary_after)
314
315 def test_git_patchtool(self):
316 """ PATCHTOOL=git should work with non-git sources like tarballs
317 test recipe for the test must NOT containt git:// repository in SRC_URI
318 """
319 test_recipe = "man-db"
320 self.write_recipeinc(test_recipe, 'PATCHTOOL=\"git\"')
321 src = get_bb_var("SRC_URI",test_recipe)
322 gitscm = re.search("git://", src)
323 self.assertFalse(gitscm, "test_git_patchtool pre-condition failed: {} test recipe contains git repo!".format(test_recipe))
324 result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
325 fatal = re.search("fatal: not a git repository (or any of the parent directories)", result.output)
326 self.assertFalse(fatal, "Failed to patch using PATCHTOOL=\"git\"")
327 self.delete_recipeinc(test_recipe)
328 bitbake('-cclean {}'.format(test_recipe))
329
330 def test_git_patchtool2(self):
331 """ Test if PATCHTOOL=git works with git repo and doesn't reinitialize it
332 """
333 test_recipe = "gitrepotest"
334 src = get_bb_var("SRC_URI",test_recipe)
335 gitscm = re.search("git://", src)
336 self.assertTrue(gitscm, "test_git_patchtool pre-condition failed: {} test recipe doesn't contains git repo!".format(test_recipe))
337 result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
338 srcdir = get_bb_var('S', test_recipe)
339 result = runCmd("git log", cwd = srcdir)
340 self.assertFalse("bitbake_patching_started" in result.output, msg = "Repository has been reinitialized. {}".format(srcdir))
341 self.delete_recipeinc(test_recipe)
342 bitbake('-cclean {}'.format(test_recipe))
343
344
345 def test_git_unpack_nonetwork(self):
346 """
347 Test that a recipe with a floating tag that needs to be resolved upstream doesn't
348 access the network in a patch task run in a separate builld invocation
349 """
350
351 # Enable the recipe to float using a distro override
352 self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
353
354 bitbake('gitunpackoffline -c fetch')
355 bitbake('gitunpackoffline -c patch')
356
357 def test_git_unpack_nonetwork_fail(self):
358 """
359 Test that a recipe with a floating tag which doesn't call get_srcrev() in the fetcher
360 raises an error when the fetcher is called.
361 """
362
363 # Enable the recipe to float using a distro override
364 self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
365
366 result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True)
367 self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
368
369 def test_unexpanded_variable_in_path(self):
370 """
371 Test that bitbake fails if directory contains unexpanded bitbake variable in the name
372 """
373 recipe_name = "gitunpackoffline"
374 self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"')
375 result = bitbake('{}'.format(recipe_name), ignore_status=True)
376 self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path")
377 self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution",
378 result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output)
379
380 def test_bb_env_bb_getvar_equality(self):
381 """ Test if "bitbake -e" output is identical to "bitbake-getvar" output for a variable set from an anonymous function
382 """
383 self.write_config('''INHERIT += "test_anon_func"
384TEST_SET_FROM_ANON_FUNC ?= ""''')
385
386 result_bb_e = runCmd('bitbake -e')
387 bb_e_var_match = re.search('^TEST_SET_FROM_ANON_FUNC="(?P<value>.*)"$', result_bb_e.output, re.MULTILINE)
388 self.assertTrue(bb_e_var_match, msg = "Can't find TEST_SET_FROM_ANON_FUNC value in \"bitbake -e\" output")
389 bb_e_var_value = bb_e_var_match.group("value")
390
391 result_bb_getvar = runCmd('bitbake-getvar TEST_SET_FROM_ANON_FUNC --value')
392 bb_getvar_var_value = result_bb_getvar.output.strip()
393 self.assertEqual(bb_e_var_value, bb_getvar_var_value,
394 msg='''"bitbake -e" output differs from bitbake-getvar output for TEST_SET_FROM_ANON_FUNC (set from anonymous function)
395bitbake -e: "%s"
396bitbake-getvar: "%s"''' % (bb_e_var_value, bb_getvar_var_value))
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
index 821f52f5a8..5ff263d342 100644
--- a/meta/lib/oeqa/selftest/cases/binutils.py
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -1,12 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
3import sys 7import time
4import re
5import logging
6from oeqa.core.decorator import OETestTag 8from oeqa.core.decorator import OETestTag
7from oeqa.core.case import OEPTestResultTestCase 9from oeqa.core.case import OEPTestResultTestCase
8from oeqa.selftest.case import OESelftestTestCase 10from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars 11from oeqa.utils.commands import bitbake, get_bb_vars
10 12
11def parse_values(content): 13def parse_values(content):
12 for i in content: 14 for i in content:
@@ -31,19 +33,23 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
31 features.append('CHECK_TARGETS = "{0}"'.format(suite)) 33 features.append('CHECK_TARGETS = "{0}"'.format(suite))
32 self.write_config("\n".join(features)) 34 self.write_config("\n".join(features))
33 35
34 recipe = "binutils-cross-testsuite" 36 recipe = "binutils-testsuite"
35 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) 37 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
36 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] 38 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
37 39
40 start_time = time.time()
41
38 bitbake("{0} -c check".format(recipe)) 42 bitbake("{0} -c check".format(recipe))
39 43
44 end_time = time.time()
45
40 sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite)) 46 sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
41 if not os.path.exists(sumspath): 47 if not os.path.exists(sumspath):
42 sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite)) 48 sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
43 logpath = os.path.splitext(sumspath)[0] + ".log" 49 logpath = os.path.splitext(sumspath)[0] + ".log"
44 50
45 ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite 51 ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
46 self.ptest_section(ptestsuite, logfile = logpath) 52 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
47 with open(sumspath, "r") as f: 53 with open(sumspath, "r") as f:
48 for test, result in parse_values(f): 54 for test, result in parse_values(f):
49 self.ptest_result(ptestsuite, test, result) 55 self.ptest_result(ptestsuite, test, result)
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py
index d865da6252..511c666554 100644
--- a/meta/lib/oeqa/selftest/cases/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/buildhistory.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,10 +9,10 @@ import re
7import datetime 9import datetime
8 10
9from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_vars 12from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runCmd
11 13
12 14
13class BuildhistoryBase(OESelftestTestCase): 15class BuildhistoryTests(OESelftestTestCase):
14 16
15 def config_buildhistory(self, tmp_bh_location=False): 17 def config_buildhistory(self, tmp_bh_location=False):
16 bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT']) 18 bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT'])
@@ -46,5 +48,58 @@ class BuildhistoryBase(OESelftestTestCase):
46 else: 48 else:
47 self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output)) 49 self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output))
48 50
49 # No tests should be added to the base class. 51
50 # Please create a new class that inherit this one, or use one of those already available for adding tests. 52 def test_buildhistory_basic(self):
53 self.run_buildhistory_operation('xcursor-transparent-theme')
54 self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
55
56 def test_buildhistory_buildtime_pr_backwards(self):
57 target = 'xcursor-transparent-theme'
58 error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target
59 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
60 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
61
62 def test_fileinfo(self):
63 self.config_buildhistory()
64 bitbake('hicolor-icon-theme')
65 history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme')
66 self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.')
67
68 def load_bh(f):
69 d = {}
70 for line in open(f):
71 split = [s.strip() for s in line.split('=', 1)]
72 if len(split) > 1:
73 d[split[0]] = split[1]
74 return d
75
76 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest'))
77 self.assertIn('FILELIST', data)
78 self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme')
79 self.assertGreater(int(data['PKGSIZE']), 0)
80
81 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest'))
82 if 'FILELIST' in data:
83 self.assertEqual(data['FILELIST'], '/usr/share/pkgconfig/default-icon-theme.pc')
84 self.assertGreater(int(data['PKGSIZE']), 0)
85
86 def test_buildhistory_diff(self):
87 target = 'xcursor-transparent-theme'
88 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
89 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
90 result = runCmd("oe-pkgdata-util read-value PKGV %s" % target)
91 pkgv = result.output.rstrip()
92 result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
93 expected_endlines = [
94 "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
95 "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
96 ]
97 for line in result.output.splitlines():
98 for el in expected_endlines:
99 if line.endswith(el):
100 expected_endlines.remove(el)
101 break
102 else:
103 self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines)))
104 if expected_endlines:
105 self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines)) \ No newline at end of file
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index 3495bee986..767e19bd88 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -8,9 +10,10 @@ import glob as g
8import shutil 10import shutil
9import tempfile 11import tempfile
10from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
11from oeqa.selftest.cases.buildhistory import BuildhistoryBase 13from oeqa.core.decorator.data import skipIfMachine
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars 14from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
13import oeqa.utils.ftools as ftools 15import oeqa.utils.ftools as ftools
16from oeqa.core.decorator import OETestTag
14 17
15class ImageOptionsTests(OESelftestTestCase): 18class ImageOptionsTests(OESelftestTestCase):
16 19
@@ -50,23 +53,23 @@ class ImageOptionsTests(OESelftestTestCase):
50 def test_read_only_image(self): 53 def test_read_only_image(self):
51 distro_features = get_bb_var('DISTRO_FEATURES') 54 distro_features = get_bb_var('DISTRO_FEATURES')
52 if not ('x11' in distro_features and 'opengl' in distro_features): 55 if not ('x11' in distro_features and 'opengl' in distro_features):
53 self.skipTest('core-image-sato requires x11 and opengl in distro features') 56 self.skipTest('core-image-sato/weston requires x11 and opengl in distro features')
54 self.write_config('IMAGE_FEATURES += "read-only-rootfs"') 57 self.write_config('IMAGE_FEATURES += "read-only-rootfs"')
55 bitbake("core-image-sato") 58 bitbake("core-image-sato core-image-weston")
56 # do_image will fail if there are any pending postinsts 59 # do_image will fail if there are any pending postinsts
57 60
58class DiskMonTest(OESelftestTestCase): 61class DiskMonTest(OESelftestTestCase):
59 62
60 def test_stoptask_behavior(self): 63 def test_stoptask_behavior(self):
61 self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"') 64 self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
62 res = bitbake("delay -c delay", ignore_status = True) 65 res = bitbake("delay -c delay", ignore_status = True)
63 self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output) 66 self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output)
64 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 67 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
65 self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"') 68 self.write_config('BB_DISKMON_DIRS = "HALT,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
66 res = bitbake("delay -c delay", ignore_status = True) 69 res = bitbake("delay -c delay", ignore_status = True)
67 self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output) 70 self.assertTrue('ERROR: Immediately halt since the disk space monitor action is "HALT"!' in res.output, "Tasks should have been halted immediately. Disk monitor is set to HALT: %s" % res.output)
68 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 71 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
69 self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"') 72 self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
70 res = bitbake("delay -c delay") 73 res = bitbake("delay -c delay")
71 self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output) 74 self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output)
72 75
@@ -78,9 +81,9 @@ class SanityOptionsTest(OESelftestTestCase):
78 81
79 def test_options_warnqa_errorqa_switch(self): 82 def test_options_warnqa_errorqa_switch(self):
80 83
81 self.write_config("INHERIT_remove = \"report-error\"") 84 self.write_config("INHERIT:remove = \"report-error\"")
82 if "packages-list" not in get_bb_var("ERROR_QA"): 85 if "packages-list" not in get_bb_var("ERROR_QA"):
83 self.append_config("ERROR_QA_append = \" packages-list\"") 86 self.append_config("ERROR_QA:append:pn-xcursor-transparent-theme = \" packages-list\"")
84 87
85 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 88 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
86 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') 89 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
@@ -90,8 +93,8 @@ class SanityOptionsTest(OESelftestTestCase):
90 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) 93 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
91 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 94 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
92 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 95 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
93 self.append_config('ERROR_QA_remove = "packages-list"') 96 self.append_config('ERROR_QA:remove:pn-xcursor-transparent-theme = "packages-list"')
94 self.append_config('WARN_QA_append = " packages-list"') 97 self.append_config('WARN_QA:append:pn-xcursor-transparent-theme = " packages-list"')
95 res = bitbake("xcursor-transparent-theme -f -c package") 98 res = bitbake("xcursor-transparent-theme -f -c package")
96 self.delete_recipeinc('xcursor-transparent-theme') 99 self.delete_recipeinc('xcursor-transparent-theme')
97 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") 100 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
@@ -135,32 +138,24 @@ class SanityOptionsTest(OESelftestTestCase):
135 138
136 self.assertNotIn(err, ret.output) 139 self.assertNotIn(err, ret.output)
137 140
138
139class BuildhistoryTests(BuildhistoryBase):
140
141 def test_buildhistory_basic(self):
142 self.run_buildhistory_operation('xcursor-transparent-theme')
143 self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
144
145 def test_buildhistory_buildtime_pr_backwards(self):
146 target = 'xcursor-transparent-theme'
147 error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target
148 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
149 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
150
151class ArchiverTest(OESelftestTestCase): 141class ArchiverTest(OESelftestTestCase):
152 def test_arch_work_dir_and_export_source(self): 142 def test_arch_work_dir_and_export_source(self):
153 """ 143 """
154 Test for archiving the work directory and exporting the source files. 144 Test for archiving the work directory and exporting the source files.
155 """ 145 """
156 self.write_config("INHERIT += \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"") 146 self.write_config("""
147INHERIT += "archiver"
148PACKAGE_CLASSES = "package_rpm"
149ARCHIVER_MODE[src] = "original"
150ARCHIVER_MODE[srpm] = "1"
151""")
157 res = bitbake("xcursor-transparent-theme", ignore_status=True) 152 res = bitbake("xcursor-transparent-theme", ignore_status=True)
158 self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output) 153 self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
159 deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC') 154 deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC')
160 pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*") 155 pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*")
161 src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm" 156 src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm"
162 tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz" 157 tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.xz"
163 self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under %s/allarch*/xcursor*" % deploy_dir_src) 158 self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.xz files under %s/allarch*/xcursor*" % deploy_dir_src)
164 159
165class ToolchainOptions(OESelftestTestCase): 160class ToolchainOptions(OESelftestTestCase):
166 def test_toolchain_fortran(self): 161 def test_toolchain_fortran(self):
@@ -168,10 +163,11 @@ class ToolchainOptions(OESelftestTestCase):
168 Test that Fortran works by building a Hello, World binary. 163 Test that Fortran works by building a Hello, World binary.
169 """ 164 """
170 165
171 features = 'FORTRAN_forcevariable = ",fortran"\n' 166 features = 'FORTRAN:forcevariable = ",fortran"\n'
172 self.write_config(features) 167 self.write_config(features)
173 bitbake('fortran-helloworld') 168 bitbake('fortran-helloworld')
174 169
170@OETestTag("yocto-mirrors")
175class SourceMirroring(OESelftestTestCase): 171class SourceMirroring(OESelftestTestCase):
176 # Can we download everything from the Yocto Sources Mirror over http only 172 # Can we download everything from the Yocto Sources Mirror over http only
177 def test_yocto_source_mirror(self): 173 def test_yocto_source_mirror(self):
@@ -195,5 +191,10 @@ PREMIRRORS = "\\
195 https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n" 191 https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n"
196""") 192""")
197 193
198 bitbake("world --runall fetch") 194 bitbake("world --runall fetch --continue")
195
199 196
197class Poisoning(OESelftestTestCase):
198 def test_poisoning(self):
199 # The poison recipe fails if the poisoning didn't work
200 bitbake("poison")
diff --git a/meta/lib/oeqa/selftest/cases/c_cpp.py b/meta/lib/oeqa/selftest/cases/c_cpp.py
new file mode 100644
index 0000000000..9a70ce29f5
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/c_cpp.py
@@ -0,0 +1,60 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.core.decorator.data import skipIfNotQemuUsermode
9from oeqa.utils.commands import bitbake
10
11
12class CCppTests(OESelftestTestCase):
13
14 @skipIfNotQemuUsermode()
15 def _qemu_usermode(self, recipe_name):
16 self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
17 bitbake("%s -c run_tests" % recipe_name)
18
19 @skipIfNotQemuUsermode()
20 def _qemu_usermode_failing(self, recipe_name):
21 config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name
22 self.write_config(config)
23 self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
24 result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True)
25 self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % (
26 result.command, result.status, result.output, result.error))
27
28
29class CMakeTests(CCppTests):
30 def test_cmake_qemu(self):
31 """Test for cmake-qemu.bbclass good case
32
33 compile the cmake-example and verify the CTests pass in qemu-user.
34 qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR.
35 """
36 self._qemu_usermode("cmake-example")
37
38 def test_cmake_qemu_failing(self):
39 """Test for cmake-qemu.bbclass bad case
40
41 Break the comparison in the test code and verify the CTests do not pass.
42 """
43 self._qemu_usermode_failing("cmake-example")
44
45
46class MesonTests(CCppTests):
47 def test_meson_qemu(self):
48 """Test the qemu-user feature of the meson.bbclass good case
49
50 compile the meson-example and verify the Unit Test pass in qemu-user.
51 qemu-user is configured by meson's exe_wrapper option.
52 """
53 self._qemu_usermode("meson-example")
54
55 def test_meson_qemu_failing(self):
56 """Test the qemu-user feature of the meson.bbclass bad case
57
58 Break the comparison in the test code and verify the Unit Test does not pass in qemu-user.
59 """
60 self._qemu_usermode_failing("meson-example")
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py
index 79cc8a0f2e..d1ac305a84 100644
--- a/meta/lib/oeqa/selftest/cases/containerimage.py
+++ b/meta/lib/oeqa/selftest/cases/containerimage.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
13# The only package added to the image is container_image_testpkg, which 15# The only package added to the image is container_image_testpkg, which
14# contains one file. However, due to some other things not cleaning up during 16# contains one file. However, due to some other things not cleaning up during
15# rootfs creation, there is some cruft. Ideally bugs will be filed and the 17# rootfs creation, there is some cruft. Ideally bugs will be filed and the
16# cruft removed, but for now we whitelist some known set. 18# cruft removed, but for now we ignore some known set.
17# 19#
18# Also for performance reasons we're only checking the cruft when using ipk. 20# Also for performance reasons we're only checking the cruft when using ipk.
19# When using deb, and rpm it is a bit different and we could test all 21# When using deb, and rpm it is a bit different and we could test all
@@ -22,7 +24,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
22# 24#
23class ContainerImageTests(OESelftestTestCase): 25class ContainerImageTests(OESelftestTestCase):
24 26
25 # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that 27 # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
26 # the conversion type bar gets added as a dep as well 28 # the conversion type bar gets added as a dep as well
27 def test_expected_files(self): 29 def test_expected_files(self):
28 30
@@ -40,10 +42,9 @@ class ContainerImageTests(OESelftestTestCase):
40 self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy" 42 self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy"
41IMAGE_FSTYPES = "container" 43IMAGE_FSTYPES = "container"
42PACKAGE_CLASSES = "package_ipk" 44PACKAGE_CLASSES = "package_ipk"
43IMAGE_FEATURES = ""
44IMAGE_BUILDINFO_FILE = "" 45IMAGE_BUILDINFO_FILE = ""
45INIT_MANAGER = "sysvinit" 46INIT_MANAGER = "sysvinit"
46IMAGE_INSTALL_remove = "ssh-pregen-hostkeys" 47IMAGE_INSTALL:remove = "ssh-pregen-hostkeys"
47 48
48""") 49""")
49 50
@@ -53,8 +54,6 @@ IMAGE_INSTALL_remove = "ssh-pregen-hostkeys"
53 expected_files = [ 54 expected_files = [
54 './', 55 './',
55 '.{bindir}/theapp', 56 '.{bindir}/theapp',
56 '.{sysconfdir}/default/',
57 '.{sysconfdir}/default/postinst',
58 '.{sysconfdir}/ld.so.cache', 57 '.{sysconfdir}/ld.so.cache',
59 '.{sysconfdir}/timestamp', 58 '.{sysconfdir}/timestamp',
60 '.{sysconfdir}/version', 59 '.{sysconfdir}/version',
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index 3f343a2841..511e4b81b4 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -1,9 +1,19 @@
1from oe.cve_check import Version 1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
2from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_vars
3 11
4class CVECheck(OESelftestTestCase): 12class CVECheck(OESelftestTestCase):
5 13
6 def test_version_compare(self): 14 def test_version_compare(self):
15 from oe.cve_check import Version
16
7 result = Version("100") > Version("99") 17 result = Version("100") > Version("99")
8 self.assertTrue( result, msg="Failed to compare version '100' > '99'") 18 self.assertTrue( result, msg="Failed to compare version '100' > '99'")
9 result = Version("2.3.1") > Version("2.2.3") 19 result = Version("2.3.1") > Version("2.2.3")
@@ -34,3 +44,453 @@ class CVECheck(OESelftestTestCase):
34 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'") 44 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'")
35 result = Version("1.0b","alphabetical") > Version("1.0","alphabetical") 45 result = Version("1.0b","alphabetical") > Version("1.0","alphabetical")
36 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'") 46 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'")
47
48 # consider the trailing "p" and "patch" as patched released when comparing
49 result = Version("1.0","patch") < Version("1.0p1","patch")
50 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0' < '1.0p1'")
51 result = Version("1.0p2","patch") > Version("1.0p1","patch")
52 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
53 result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
54 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
55
56
57 def test_convert_cve_version(self):
58 from oe.cve_check import convert_cve_version
59
60 # Default format
61 self.assertEqual(convert_cve_version("8.3"), "8.3")
62 self.assertEqual(convert_cve_version(""), "")
63
64 # OpenSSL format version
65 self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
66
67 # OpenSSH format
68 self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
69 self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
70
71 # Linux kernel format
72 self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
73 self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
74
75 def test_product_match(self):
76 from oe.cve_check import has_cve_product_match
77
78 status = {}
79 status["detail"] = "ignored"
80 status["vendor"] = "*"
81 status["product"] = "*"
82 status["description"] = ""
83 status["mapping"] = ""
84
85 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), True)
86 self.assertEqual(has_cve_product_match(status, "*:*"), True)
87 self.assertEqual(has_cve_product_match(status, "some_product"), True)
88 self.assertEqual(has_cve_product_match(status, "glibc"), True)
89 self.assertEqual(has_cve_product_match(status, "glibca"), True)
90 self.assertEqual(has_cve_product_match(status, "aglibc"), True)
91 self.assertEqual(has_cve_product_match(status, "*"), True)
92 self.assertEqual(has_cve_product_match(status, "aglibc glibc test:test"), True)
93
94 status["product"] = "glibc"
95 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
96 # The CPE in the recipe must be defined, no * accepted
97 self.assertEqual(has_cve_product_match(status, "*:*"), False)
98 self.assertEqual(has_cve_product_match(status, "*"), False)
99 self.assertEqual(has_cve_product_match(status, "some_product"), False)
100 self.assertEqual(has_cve_product_match(status, "glibc"), True)
101 self.assertEqual(has_cve_product_match(status, "glibca"), False)
102 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
103 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), True)
104 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc test"), True)
105 self.assertEqual(has_cve_product_match(status, "test some_vendor:glibc"), True)
106
107 status["vendor"] = "glibca"
108 status["product"] = "glibc"
109 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
110 # The CPE in the recipe must be defined, no * accepted
111 self.assertEqual(has_cve_product_match(status, "*:*"), False)
112 self.assertEqual(has_cve_product_match(status, "*"), False)
113 self.assertEqual(has_cve_product_match(status, "some_product"), False)
114 self.assertEqual(has_cve_product_match(status, "glibc"), False)
115 self.assertEqual(has_cve_product_match(status, "glibca"), False)
116 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
117 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), False)
118 self.assertEqual(has_cve_product_match(status, "glibca:glibc"), True)
119 self.assertEqual(has_cve_product_match(status, "test:test glibca:glibc"), True)
120 self.assertEqual(has_cve_product_match(status, "test glibca:glibc"), True)
121 self.assertEqual(has_cve_product_match(status, "glibca:glibc test"), True)
122
123 def test_parse_cve_from_patch_filename(self):
124 from oe.cve_check import parse_cve_from_filename
125
126 # Patch filename without CVE ID
127 self.assertEqual(parse_cve_from_filename("0001-test.patch"), "")
128
129 # Patch with single CVE ID
130 self.assertEqual(
131 parse_cve_from_filename("CVE-2022-12345.patch"), "CVE-2022-12345"
132 )
133
134 # Patch with multiple CVE IDs
135 self.assertEqual(
136 parse_cve_from_filename("CVE-2022-41741-CVE-2022-41742.patch"),
137 "CVE-2022-41742",
138 )
139
140 # Patches with CVE ID and appended text
141 self.assertEqual(
142 parse_cve_from_filename("CVE-2023-3019-0001.patch"), "CVE-2023-3019"
143 )
144 self.assertEqual(
145 parse_cve_from_filename("CVE-2024-21886-1.patch"), "CVE-2024-21886"
146 )
147
148 # Patch with CVE ID and prepended text
149 self.assertEqual(
150 parse_cve_from_filename("grep-CVE-2012-5667.patch"), "CVE-2012-5667"
151 )
152 self.assertEqual(
153 parse_cve_from_filename("0001-CVE-2012-5667.patch"), "CVE-2012-5667"
154 )
155
156 # Patch with CVE ID and both prepended and appended text
157 self.assertEqual(
158 parse_cve_from_filename(
159 "0001-tpm2_import-fix-fixed-AES-key-CVE-2021-3565-0001.patch"
160 ),
161 "CVE-2021-3565",
162 )
163
164 # Only grab the last CVE ID in the filename
165 self.assertEqual(
166 parse_cve_from_filename("CVE-2012-5667-CVE-2012-5668.patch"),
167 "CVE-2012-5668",
168 )
169
170 # Test invalid CVE ID with incorrect length (must be at least 4 digits)
171 self.assertEqual(
172 parse_cve_from_filename("CVE-2024-001.patch"),
173 "",
174 )
175
176 # Test valid CVE ID with very long length
177 self.assertEqual(
178 parse_cve_from_filename("CVE-2024-0000000000000000000000001.patch"),
179 "CVE-2024-0000000000000000000000001",
180 )
181
182 def test_parse_cve_from_patch_contents(self):
183 import textwrap
184 from oe.cve_check import parse_cves_from_patch_contents
185
186 # Standard patch file excerpt without any patches
187 self.assertEqual(
188 parse_cves_from_patch_contents(
189 textwrap.dedent("""\
190 remove "*" for root since we don't have a /etc/shadow so far.
191
192 Upstream-Status: Inappropriate [configuration]
193
194 Signed-off-by: Scott Garman <scott.a.garman@intel.com>
195
196 --- base-passwd/passwd.master~nobash
197 +++ base-passwd/passwd.master
198 @@ -1,4 +1,4 @@
199 -root:*:0:0:root:/root:/bin/sh
200 +root::0:0:root:/root:/bin/sh
201 daemon:*:1:1:daemon:/usr/sbin:/bin/sh
202 bin:*:2:2:bin:/bin:/bin/sh
203 sys:*:3:3:sys:/dev:/bin/sh
204 """)
205 ),
206 set(),
207 )
208
209 # Patch file with multiple CVE IDs (space-separated)
210 self.assertEqual(
211 parse_cves_from_patch_contents(
212 textwrap.dedent("""\
213 There is an assertion in function _cairo_arc_in_direction().
214
215 CVE: CVE-2019-6461 CVE-2019-6462
216 Upstream-Status: Pending
217 Signed-off-by: Ross Burton <ross.burton@intel.com>
218
219 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
220 index 390397bae..1bde774a4 100644
221 --- a/src/cairo-arc.c
222 +++ b/src/cairo-arc.c
223 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
224 if (cairo_status (cr))
225 return;
226
227 - assert (angle_max >= angle_min);
228 + if (angle_max < angle_min)
229 + return;
230
231 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
232 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
233 """),
234 ),
235 {"CVE-2019-6461", "CVE-2019-6462"},
236 )
237
238 # Patch file with multiple CVE IDs (comma-separated w/ both space and no space)
239 self.assertEqual(
240 parse_cves_from_patch_contents(
241 textwrap.dedent("""\
242 There is an assertion in function _cairo_arc_in_direction().
243
244 CVE: CVE-2019-6461,CVE-2019-6462, CVE-2019-6463
245 Upstream-Status: Pending
246 Signed-off-by: Ross Burton <ross.burton@intel.com>
247
248 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
249 index 390397bae..1bde774a4 100644
250 --- a/src/cairo-arc.c
251 +++ b/src/cairo-arc.c
252 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
253 if (cairo_status (cr))
254 return;
255
256 - assert (angle_max >= angle_min);
257 + if (angle_max < angle_min)
258 + return;
259
260 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
261 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
262
263 """),
264 ),
265 {"CVE-2019-6461", "CVE-2019-6462", "CVE-2019-6463"},
266 )
267
268 # Patch file with multiple CVE IDs (&-separated)
269 self.assertEqual(
270 parse_cves_from_patch_contents(
271 textwrap.dedent("""\
272 There is an assertion in function _cairo_arc_in_direction().
273
274 CVE: CVE-2019-6461 & CVE-2019-6462
275 Upstream-Status: Pending
276 Signed-off-by: Ross Burton <ross.burton@intel.com>
277
278 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
279 index 390397bae..1bde774a4 100644
280 --- a/src/cairo-arc.c
281 +++ b/src/cairo-arc.c
282 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
283 if (cairo_status (cr))
284 return;
285
286 - assert (angle_max >= angle_min);
287 + if (angle_max < angle_min)
288 + return;
289
290 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
291 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
292 """),
293 ),
294 {"CVE-2019-6461", "CVE-2019-6462"},
295 )
296
297 # Patch file with multiple lines with CVE IDs
298 self.assertEqual(
299 parse_cves_from_patch_contents(
300 textwrap.dedent("""\
301 There is an assertion in function _cairo_arc_in_direction().
302
303 CVE: CVE-2019-6461 & CVE-2019-6462
304
305 CVE: CVE-2019-6463 & CVE-2019-6464
306 Upstream-Status: Pending
307 Signed-off-by: Ross Burton <ross.burton@intel.com>
308
309 diff --git a/src/cairo-arc.c b/src/cairo-arc.c
310 index 390397bae..1bde774a4 100644
311 --- a/src/cairo-arc.c
312 +++ b/src/cairo-arc.c
313 @@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
314 if (cairo_status (cr))
315 return;
316
317 - assert (angle_max >= angle_min);
318 + if (angle_max < angle_min)
319 + return;
320
321 if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
322 angle_max = fmod (angle_max - angle_min, 2 * M_PI);
323
324 """),
325 ),
326 {"CVE-2019-6461", "CVE-2019-6462", "CVE-2019-6463", "CVE-2019-6464"},
327 )
328
329 def test_recipe_report_json(self):
330 config = """
331INHERIT += "cve-check"
332CVE_CHECK_FORMAT_JSON = "1"
333"""
334 self.write_config(config)
335
336 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
337 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
338 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
339
340 try:
341 os.remove(summary_json)
342 os.remove(recipe_json)
343 except FileNotFoundError:
344 pass
345
346 bitbake("m4-native -c cve_check")
347
348 def check_m4_json(filename):
349 with open(filename) as f:
350 report = json.load(f)
351 self.assertEqual(report["version"], "1")
352 self.assertEqual(len(report["package"]), 1)
353 package = report["package"][0]
354 self.assertEqual(package["name"], "m4-native")
355 found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
356 self.assertIn("CVE-2008-1687", found_cves)
357 self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
358
359 self.assertExists(summary_json)
360 check_m4_json(summary_json)
361 self.assertExists(recipe_json)
362 check_m4_json(recipe_json)
363
364
365 def test_image_json(self):
366 config = """
367INHERIT += "cve-check"
368CVE_CHECK_FORMAT_JSON = "1"
369"""
370 self.write_config(config)
371
372 vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
373 report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
374 print(report_json)
375 try:
376 os.remove(report_json)
377 except FileNotFoundError:
378 pass
379
380 bitbake("core-image-minimal-initramfs")
381 self.assertExists(report_json)
382
383 # Check that the summary report lists at least one package
384 with open(report_json) as f:
385 report = json.load(f)
386 self.assertEqual(report["version"], "1")
387 self.assertGreater(len(report["package"]), 1)
388
389 # Check that a random recipe wrote a recipe report to deploy/cve/
390 recipename = report["package"][0]["name"]
391 recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
392 self.assertExists(recipe_report)
393 with open(recipe_report) as f:
394 report = json.load(f)
395 self.assertEqual(report["version"], "1")
396 self.assertEqual(len(report["package"]), 1)
397 self.assertEqual(report["package"][0]["name"], recipename)
398
399
400 def test_recipe_report_json_unpatched(self):
401 config = """
402INHERIT += "cve-check"
403CVE_CHECK_FORMAT_JSON = "1"
404CVE_CHECK_REPORT_PATCHED = "0"
405"""
406 self.write_config(config)
407
408 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
409 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
410 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
411
412 try:
413 os.remove(summary_json)
414 os.remove(recipe_json)
415 except FileNotFoundError:
416 pass
417
418 bitbake("m4-native -c cve_check")
419
420 def check_m4_json(filename):
421 with open(filename) as f:
422 report = json.load(f)
423 self.assertEqual(report["version"], "1")
424 self.assertEqual(len(report["package"]), 1)
425 package = report["package"][0]
426 self.assertEqual(package["name"], "m4-native")
427 #m4 had only Patched CVEs, so the issues array will be empty
428 self.assertEqual(package["issue"], [])
429
430 self.assertExists(summary_json)
431 check_m4_json(summary_json)
432 self.assertExists(recipe_json)
433 check_m4_json(recipe_json)
434
435
436 def test_recipe_report_json_ignored(self):
437 config = """
438INHERIT += "cve-check"
439CVE_CHECK_FORMAT_JSON = "1"
440CVE_CHECK_REPORT_PATCHED = "1"
441"""
442 self.write_config(config)
443
444 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
445 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
446 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
447
448 try:
449 os.remove(summary_json)
450 os.remove(recipe_json)
451 except FileNotFoundError:
452 pass
453
454 bitbake("logrotate -c cve_check")
455
456 def check_m4_json(filename):
457 with open(filename) as f:
458 report = json.load(f)
459 self.assertEqual(report["version"], "1")
460 self.assertEqual(len(report["package"]), 1)
461 package = report["package"][0]
462 self.assertEqual(package["name"], "logrotate")
463 found_cves = {}
464 for issue in package["issue"]:
465 found_cves[issue["id"]] = {
466 "status" : issue["status"],
467 "detail" : issue["detail"] if "detail" in issue else "",
468 "description" : issue["description"] if "description" in issue else ""
469 }
470 # m4 CVE should not be in logrotate
471 self.assertNotIn("CVE-2008-1687", found_cves)
472 # logrotate has both Patched and Ignored CVEs
473 detail = "version-not-in-range"
474 self.assertIn("CVE-2011-1098", found_cves)
475 self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
476 self.assertEqual(found_cves["CVE-2011-1098"]["detail"], detail)
477 self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
478 detail = "not-applicable-platform"
479 description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
480 self.assertIn("CVE-2011-1548", found_cves)
481 self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored")
482 self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail)
483 self.assertEqual(found_cves["CVE-2011-1548"]["description"], description)
484 self.assertIn("CVE-2011-1549", found_cves)
485 self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored")
486 self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail)
487 self.assertEqual(found_cves["CVE-2011-1549"]["description"], description)
488 self.assertIn("CVE-2011-1550", found_cves)
489 self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored")
490 self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail)
491 self.assertEqual(found_cves["CVE-2011-1550"]["description"], description)
492
493 self.assertExists(summary_json)
494 check_m4_json(summary_json)
495 self.assertExists(recipe_json)
496 check_m4_json(recipe_json)
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
new file mode 100644
index 0000000000..46c0cd87bb
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -0,0 +1,160 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import socketserver
8import subprocess
9import time
10import urllib
11import pathlib
12
13from oeqa.core.decorator import OETestTag
14from oeqa.selftest.case import OESelftestTestCase
15from oeqa.utils.commands import bitbake, get_bb_var, runqemu
16
17
18class Debuginfod(OESelftestTestCase):
19
20 def wait_for_debuginfod(self, port):
21 """
22 debuginfod takes time to scan the packages and requesting too early may
23 result in a test failure if the right packages haven't been scanned yet.
24
25 Request the metrics endpoint periodically and wait for there to be no
26 busy scanning threads.
27
28 Returns if debuginfod is ready, raises an exception if not within the
29 timeout.
30 """
31
32 # Wait two minutes
33 countdown = 24
34 delay = 5
35 latest = None
36
37 while countdown:
38 self.logger.info("waiting...")
39 time.sleep(delay)
40
41 self.logger.info("polling server")
42 if self.debuginfod.poll():
43 self.logger.info("server dead")
44 self.debuginfod.communicate()
45 self.fail("debuginfod terminated unexpectedly")
46 self.logger.info("server alive")
47
48 try:
49 with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f:
50 for line in f.read().decode("ascii").splitlines():
51 key, value = line.rsplit(" ", 1)
52 if key == "thread_busy{role=\"scan\"}":
53 latest = int(value)
54 self.logger.info("Waiting for %d scan jobs to finish" % latest)
55 if latest == 0:
56 return
57 except urllib.error.URLError as e:
58 # TODO: how to catch just timeouts?
59 self.logger.error(e)
60
61 countdown -= 1
62
63 raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
64
65 def start_debuginfod(self, feed_dir):
66 # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
67
68 # Save some useful paths for later
69 native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native"))
70 native_bindir = native_sysroot / "usr" / "bin"
71 self.debuginfod = native_bindir / "debuginfod"
72 self.debuginfod_find = native_bindir / "debuginfod-find"
73
74 cmd = [
75 self.debuginfod,
76 "--verbose",
77 # In-memory database, this is a one-shot test
78 "--database=:memory:",
79 # Don't use all the host cores
80 "--concurrency=8",
81 "--connection-pool=8",
82 # Disable rescanning, this is a one-shot test
83 "--rescan-time=0",
84 "--groom-time=0",
85 feed_dir,
86 ]
87
88 format = get_bb_var("PACKAGE_CLASSES").split()[0]
89 if format == "package_deb":
90 cmd.append("--scan-deb-dir")
91 elif format == "package_ipk":
92 cmd.append("--scan-deb-dir")
93 elif format == "package_rpm":
94 cmd.append("--scan-rpm-dir")
95 else:
96 self.fail("Unknown package class %s" % format)
97
98 # Find a free port. Racey but the window is small.
99 with socketserver.TCPServer(("localhost", 0), None) as s:
100 self.port = s.server_address[1]
101 cmd.append("--port=%d" % self.port)
102
103 self.logger.info(f"Starting server {cmd}")
104 self.debuginfod = subprocess.Popen(cmd, env={})
105 self.wait_for_debuginfod(self.port)
106
107
108 def test_debuginfod_native(self):
109 """
110 Test debuginfod outside of qemu, by building a package and looking up a
111 binary's debuginfo using elfutils-native.
112 """
113
114 self.write_config("""
115TMPDIR = "${TOPDIR}/tmp-debuginfod"
116DISTRO_FEATURES:append = " debuginfod"
117INHERIT += "localpkgfeed"
118""")
119 bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package xz:do_localpkgfeed")
120
121 try:
122 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
123
124 env = os.environ.copy()
125 env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
126
127 pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz"))
128 cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz")
129 self.logger.info(f"Starting client {cmd}")
130 output = subprocess.check_output(cmd, env=env, text=True)
131 # This should be more comprehensive
132 self.assertIn("/.cache/debuginfod_client/", output)
133 finally:
134 self.debuginfod.kill()
135
136 @OETestTag("runqemu")
137 def test_debuginfod_qemu(self):
138 """
139 Test debuginfod-find inside a qemu, talking to a debuginfod on the host.
140 """
141
142 self.write_config("""
143TMPDIR = "${TOPDIR}/tmp-debuginfod"
144DISTRO_FEATURES:append = " debuginfod"
145INHERIT += "localpkgfeed"
146CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
147 """)
148 bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot xz:do_localpkgfeed")
149
150 try:
151 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
152
153 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
154 cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
155 self.logger.info(f"Starting client {cmd}")
156 status, output = qemu.run_serial(cmd)
157 # This should be more comprehensive
158 self.assertIn("/.cache/debuginfod_client/", output)
159 finally:
160 self.debuginfod.kill()
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 3385546e8e..05f228f03e 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -1,18 +1,23 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import errno
5import os 8import os
6import re 9import re
7import shutil 10import shutil
8import tempfile 11import tempfile
9import glob 12import glob
10import fnmatch 13import fnmatch
14import unittest
15import json
11 16
12import oeqa.utils.ftools as ftools
13from oeqa.selftest.case import OESelftestTestCase 17from oeqa.selftest.case import OESelftestTestCase
14from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer 18from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
15from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer 19from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer
20from oeqa.core.decorator import OETestTag
16 21
17oldmetapath = None 22oldmetapath = None
18 23
@@ -24,6 +29,9 @@ def setUpModule():
24 corecopydir = os.path.join(templayerdir, 'core-copy') 29 corecopydir = os.path.join(templayerdir, 'core-copy')
25 bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf') 30 bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
26 edited_layers = [] 31 edited_layers = []
32 # make sure user doesn't have a local workspace
33 result = runCmd('bitbake-layers show-layers')
34 assert "workspacelayer" not in result.output, "Devtool test suite cannot be run with a local workspace directory"
27 35
28 # We need to take a copy of the meta layer so we can modify it and not 36 # We need to take a copy of the meta layer so we can modify it and not
29 # have any races against other tests that might be running in parallel 37 # have any races against other tests that might be running in parallel
@@ -38,10 +46,17 @@ def setUpModule():
38 canonical_layerpath = os.path.realpath(canonical_layerpath) + '/' 46 canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
39 edited_layers.append(layerpath) 47 edited_layers.append(layerpath)
40 oldmetapath = os.path.realpath(layerpath) 48 oldmetapath = os.path.realpath(layerpath)
49
50 # when downloading poky from tar.gz some tests will be skipped (BUG 12389)
51 try:
52 runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath)
53 except:
54 raise unittest.SkipTest("devtool tests require folder to be a git repo")
55
41 result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath) 56 result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
42 oldreporoot = result.output.rstrip() 57 oldreporoot = result.output.rstrip()
43 newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot)) 58 newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
44 runCmd('git clone %s %s' % (oldreporoot, corecopydir), cwd=templayerdir) 59 runCmd('git clone file://%s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
45 # Now we need to copy any modified files 60 # Now we need to copy any modified files
46 # You might ask "why not just copy the entire tree instead of 61 # You might ask "why not just copy the entire tree instead of
47 # cloning and doing this?" - well, the problem with that is 62 # cloning and doing this?" - well, the problem with that is
@@ -49,11 +64,15 @@ def setUpModule():
49 # under COREBASE and we don't want to copy that, so we have 64 # under COREBASE and we don't want to copy that, so we have
50 # to be selective. 65 # to be selective.
51 result = runCmd('git status --porcelain', cwd=oldreporoot) 66 result = runCmd('git status --porcelain', cwd=oldreporoot)
67
68 # Also copy modifications to the 'scripts/' directory
69 canonical_layerpath_scripts = os.path.normpath(canonical_layerpath + "../scripts")
70
52 for line in result.output.splitlines(): 71 for line in result.output.splitlines():
53 if line.startswith(' M ') or line.startswith('?? '): 72 if line.startswith(' M ') or line.startswith('?? '):
54 relpth = line.split()[1] 73 relpth = line.split()[1]
55 pth = os.path.join(oldreporoot, relpth) 74 pth = os.path.join(oldreporoot, relpth)
56 if pth.startswith(canonical_layerpath): 75 if pth.startswith(canonical_layerpath) or pth.startswith(canonical_layerpath_scripts):
57 if relpth.endswith('/'): 76 if relpth.endswith('/'):
58 destdir = os.path.join(corecopydir, relpth) 77 destdir = os.path.join(corecopydir, relpth)
59 # avoid race condition by not copying .pyc files YPBZ#13421,13803 78 # avoid race condition by not copying .pyc files YPBZ#13421,13803
@@ -80,32 +99,15 @@ def tearDownModule():
80 bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb) 99 bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb)
81 shutil.rmtree(templayerdir) 100 shutil.rmtree(templayerdir)
82 101
83class DevtoolBase(OESelftestTestCase): 102class DevtoolTestCase(OESelftestTestCase):
84
85 @classmethod
86 def setUpClass(cls):
87 super(DevtoolBase, cls).setUpClass()
88 bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
89 cls.original_sstate = bb_vars['SSTATE_DIR']
90 cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
91 cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
92 cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
93 % cls.original_sstate)
94
95 @classmethod
96 def tearDownClass(cls):
97 cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
98 runCmd('rm -rf %s' % cls.devtool_sstate)
99 super(DevtoolBase, cls).tearDownClass()
100 103
101 def setUp(self): 104 def setUp(self):
102 """Test case setup function""" 105 """Test case setup function"""
103 super(DevtoolBase, self).setUp() 106 super(DevtoolTestCase, self).setUp()
104 self.workspacedir = os.path.join(self.builddir, 'workspace') 107 self.workspacedir = os.path.join(self.builddir, 'workspace')
105 self.assertTrue(not os.path.exists(self.workspacedir), 108 self.assertTrue(not os.path.exists(self.workspacedir),
106 'This test cannot be run with a workspace directory ' 109 'This test cannot be run with a workspace directory '
107 'under the build directory') 110 'under the build directory')
108 self.append_config(self.sstate_conf)
109 111
110 def _check_src_repo(self, repo_dir): 112 def _check_src_repo(self, repo_dir):
111 """Check srctree git repository""" 113 """Check srctree git repository"""
@@ -152,7 +154,7 @@ class DevtoolBase(OESelftestTestCase):
152 value = invalue 154 value = invalue
153 invar = None 155 invar = None
154 elif '=' in line: 156 elif '=' in line:
155 splitline = line.split('=', 1) 157 splitline = re.split(r"[?+:]*=[+]?", line, 1)
156 var = splitline[0].rstrip() 158 var = splitline[0].rstrip()
157 value = splitline[1].strip().strip('"') 159 value = splitline[1].strip().strip('"')
158 if value.endswith('\\'): 160 if value.endswith('\\'):
@@ -235,6 +237,103 @@ class DevtoolBase(OESelftestTestCase):
235 filelist.append(' '.join(splitline)) 237 filelist.append(' '.join(splitline))
236 return filelist 238 return filelist
237 239
240 def _check_diff(self, diffoutput, addlines, removelines):
241 """Check output from 'git diff' matches expectation"""
242 remaining_addlines = addlines[:]
243 remaining_removelines = removelines[:]
244 for line in diffoutput.splitlines():
245 if line.startswith('+++') or line.startswith('---'):
246 continue
247 elif line.startswith('+'):
248 matched = False
249 for item in addlines:
250 if re.match(item, line[1:].strip()):
251 matched = True
252 remaining_addlines.remove(item)
253 break
254 self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
255 elif line.startswith('-'):
256 matched = False
257 for item in removelines:
258 if re.match(item, line[1:].strip()):
259 matched = True
260 remaining_removelines.remove(item)
261 break
262 self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
263 if remaining_addlines:
264 self.fail('Expected added lines not found: %s' % remaining_addlines)
265 if remaining_removelines:
266 self.fail('Expected removed lines not found: %s' % remaining_removelines)
267
268 def _check_runqemu_prerequisites(self):
269 """Check runqemu is available
270
271 Whilst some tests would seemingly be better placed as a runtime test,
272 unfortunately the runtime tests run under bitbake and you can't run
273 devtool within bitbake (since devtool needs to run bitbake itself).
274 Additionally we are testing build-time functionality as well, so
275 really this has to be done as an oe-selftest test.
276 """
277 machine = get_bb_var('MACHINE')
278 if not machine.startswith('qemu'):
279 self.skipTest('This test only works with qemu machines')
280 if not os.path.exists('/etc/runqemu-nosudo'):
281 self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
282 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
283 if result.status != 0:
284 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
285 if result.status != 0:
286 self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
287 for line in result.output.splitlines():
288 if line.startswith('tap'):
289 break
290 else:
291 self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
292
293 def _test_devtool_add_git_url(self, git_url, version, pn, resulting_src_uri, srcrev=None):
294 self.track_for_cleanup(self.workspacedir)
295 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
296 command = 'devtool add --version %s %s %s' % (version, pn, git_url)
297 if srcrev :
298 command += ' --srcrev %s' %srcrev
299 result = runCmd(command)
300 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
301 # Check the recipe name is correct
302 recipefile = get_bb_var('FILE', pn)
303 self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named')
304 self.assertIn(recipefile, result.output)
305 # Test devtool status
306 result = runCmd('devtool status')
307 self.assertIn(pn, result.output)
308 self.assertIn(recipefile, result.output)
309 checkvars = {}
310 checkvars['SRC_URI'] = resulting_src_uri
311 self._test_recipe_contents(recipefile, checkvars, [])
312
313class DevtoolBase(DevtoolTestCase):
314
315 @classmethod
316 def setUpClass(cls):
317 super(DevtoolBase, cls).setUpClass()
318 bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
319 cls.original_sstate = bb_vars['SSTATE_DIR']
320 cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
321 cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
322 cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
323 % cls.original_sstate)
324 cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yoctoproject.org:8686"\n')
325
326 @classmethod
327 def tearDownClass(cls):
328 cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
329 runCmd('rm -rf %s' % cls.devtool_sstate)
330 super(DevtoolBase, cls).tearDownClass()
331
332 def setUp(self):
333 """Test case setup function"""
334 super(DevtoolBase, self).setUp()
335 self.append_config(self.sstate_conf)
336
238 337
239class DevtoolTests(DevtoolBase): 338class DevtoolTests(DevtoolBase):
240 339
@@ -304,6 +403,38 @@ class DevtoolAddTests(DevtoolBase):
304 bindir = bindir[1:] 403 bindir = bindir[1:]
305 self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D') 404 self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
306 405
406 def test_devtool_add_binary(self):
407 # Create a binary package containing a known test file
408 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
409 self.track_for_cleanup(tempdir)
410 pn = 'tst-bin'
411 pv = '1.0'
412 test_file_dir = "var/lib/%s/" % pn
413 test_file_name = "test_file"
414 test_file_content = "TEST CONTENT"
415 test_file_package_root = os.path.join(tempdir, pn)
416 test_file_dir_full = os.path.join(test_file_package_root, test_file_dir)
417 bb.utils.mkdirhier(test_file_dir_full)
418 with open(os.path.join(test_file_dir_full, test_file_name), "w") as f:
419 f.write(test_file_content)
420 bin_package_path = os.path.join(tempdir, "%s.tar.gz" % pn)
421 runCmd("tar czf %s -C %s ." % (bin_package_path, test_file_package_root))
422
423 # Test devtool add -b on the binary package
424 self.track_for_cleanup(self.workspacedir)
425 self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
426 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
427 result = runCmd('devtool add -b %s %s' % (pn, bin_package_path))
428 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
429
430 # Build the resulting recipe
431 result = runCmd('devtool build %s' % pn)
432 installdir = get_bb_var('D', pn)
433 self.assertTrue(installdir, 'Could not query installdir variable')
434
435 # Check that a known file from the binary package has indeed been installed
436 self.assertTrue(os.path.isfile(os.path.join(installdir, test_file_dir, test_file_name)), '%s not found in D' % test_file_name)
437
307 def test_devtool_add_git_local(self): 438 def test_devtool_add_git_local(self):
308 # We need dbus built so that DEPENDS recognition works 439 # We need dbus built so that DEPENDS recognition works
309 bitbake('dbus') 440 bitbake('dbus')
@@ -336,15 +467,32 @@ class DevtoolAddTests(DevtoolBase):
336 self.assertIn(srcdir, result.output) 467 self.assertIn(srcdir, result.output)
337 self.assertIn(recipefile, result.output) 468 self.assertIn(recipefile, result.output)
338 checkvars = {} 469 checkvars = {}
339 checkvars['LICENSE'] = 'GPLv2' 470 checkvars['LICENSE'] = 'GPL-2.0-only'
340 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' 471 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
341 checkvars['S'] = '${WORKDIR}/git' 472 checkvars['S'] = None
342 checkvars['PV'] = '0.1+git${SRCPV}' 473 checkvars['PV'] = '0.1+git'
343 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https' 474 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master'
344 checkvars['SRCREV'] = srcrev 475 checkvars['SRCREV'] = srcrev
345 checkvars['DEPENDS'] = set(['dbus']) 476 checkvars['DEPENDS'] = set(['dbus'])
346 self._test_recipe_contents(recipefile, checkvars, []) 477 self._test_recipe_contents(recipefile, checkvars, [])
347 478
479 def test_devtool_add_git_style1(self):
480 version = 'v3.1.0'
481 pn = 'mbedtls'
482 # this will trigger reformat_git_uri with branch parameter in url
483 git_url = "'git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https'"
484 resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https"
485 self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
486
487 def test_devtool_add_git_style2(self):
488 version = 'v3.1.0'
489 srcrev = 'v3.1.0'
490 pn = 'mbedtls'
491 # this will trigger reformat_git_uri with branch parameter in url
492 git_url = "'git://git@github.com/ARMmbed/mbedtls.git;protocol=https'"
493 resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;protocol=https;branch=master"
494 self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri, srcrev)
495
348 def test_devtool_add_library(self): 496 def test_devtool_add_library(self):
349 # Fetch source 497 # Fetch source
350 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 498 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -373,7 +521,7 @@ class DevtoolAddTests(DevtoolBase):
373 recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version) 521 recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version)
374 result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile) 522 result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile)
375 with open(recipefile, 'a') as f: 523 with open(recipefile, 'a') as f:
376 f.write('\nFILES_${PN}-dev += "${datadir}/cmake/Modules"\n') 524 f.write('\nFILES:${PN}-dev += "${datadir}/cmake/Modules"\n')
377 # We don't have the ability to pick up this dependency automatically yet... 525 # We don't have the ability to pick up this dependency automatically yet...
378 f.write('\nDEPENDS += "libusb1"\n') 526 f.write('\nDEPENDS += "libusb1"\n')
379 f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n') 527 f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n')
@@ -405,7 +553,7 @@ class DevtoolAddTests(DevtoolBase):
405 self.track_for_cleanup(self.workspacedir) 553 self.track_for_cleanup(self.workspacedir)
406 self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe) 554 self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
407 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 555 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
408 result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url)) 556 result = runCmd('devtool add --no-pypi %s %s -f %s' % (testrecipe, srcdir, url))
409 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output) 557 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
410 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') 558 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
411 self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created') 559 self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
@@ -417,14 +565,14 @@ class DevtoolAddTests(DevtoolBase):
417 recipefile = get_bb_var('FILE', testrecipe) 565 recipefile = get_bb_var('FILE', testrecipe)
418 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') 566 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
419 checkvars = {} 567 checkvars = {}
420 checkvars['S'] = '${WORKDIR}/MarkupSafe-${PV}' 568 checkvars['S'] = '${UNPACKDIR}/MarkupSafe-${PV}'
421 checkvars['SRC_URI'] = url.replace(testver, '${PV}') 569 checkvars['SRC_URI'] = url.replace(testver, '${PV}')
422 self._test_recipe_contents(recipefile, checkvars, []) 570 self._test_recipe_contents(recipefile, checkvars, [])
423 # Try with version specified 571 # Try with version specified
424 result = runCmd('devtool reset -n %s' % testrecipe) 572 result = runCmd('devtool reset -n %s' % testrecipe)
425 shutil.rmtree(srcdir) 573 shutil.rmtree(srcdir)
426 fakever = '1.9' 574 fakever = '1.9'
427 result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever)) 575 result = runCmd('devtool add --no-pypi %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
428 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') 576 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
429 # Test devtool status 577 # Test devtool status
430 result = runCmd('devtool status') 578 result = runCmd('devtool status')
@@ -434,7 +582,7 @@ class DevtoolAddTests(DevtoolBase):
434 recipefile = get_bb_var('FILE', testrecipe) 582 recipefile = get_bb_var('FILE', testrecipe)
435 self.assertIn('%s_%s.bb' % (testrecipe, fakever), recipefile, 'Recipe file incorrectly named') 583 self.assertIn('%s_%s.bb' % (testrecipe, fakever), recipefile, 'Recipe file incorrectly named')
436 checkvars = {} 584 checkvars = {}
437 checkvars['S'] = '${WORKDIR}/MarkupSafe-%s' % testver 585 checkvars['S'] = '${UNPACKDIR}/MarkupSafe-%s' % testver
438 checkvars['SRC_URI'] = url 586 checkvars['SRC_URI'] = url
439 self._test_recipe_contents(recipefile, checkvars, []) 587 self._test_recipe_contents(recipefile, checkvars, [])
440 588
@@ -442,6 +590,7 @@ class DevtoolAddTests(DevtoolBase):
442 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 590 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
443 self.track_for_cleanup(tempdir) 591 self.track_for_cleanup(tempdir)
444 url = 'gitsm://git.yoctoproject.org/mraa' 592 url = 'gitsm://git.yoctoproject.org/mraa'
593 url_branch = '%s;branch=master' % url
445 checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' 594 checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d'
446 testrecipe = 'mraa' 595 testrecipe = 'mraa'
447 srcdir = os.path.join(tempdir, testrecipe) 596 srcdir = os.path.join(tempdir, testrecipe)
@@ -460,9 +609,9 @@ class DevtoolAddTests(DevtoolBase):
460 recipefile = get_bb_var('FILE', testrecipe) 609 recipefile = get_bb_var('FILE', testrecipe)
461 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') 610 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
462 checkvars = {} 611 checkvars = {}
463 checkvars['S'] = '${WORKDIR}/git' 612 checkvars['S'] = None
464 checkvars['PV'] = '1.0+git${SRCPV}' 613 checkvars['PV'] = '1.0+git'
465 checkvars['SRC_URI'] = url 614 checkvars['SRC_URI'] = url_branch
466 checkvars['SRCREV'] = '${AUTOREV}' 615 checkvars['SRCREV'] = '${AUTOREV}'
467 self._test_recipe_contents(recipefile, checkvars, []) 616 self._test_recipe_contents(recipefile, checkvars, [])
468 # Try with revision and version specified 617 # Try with revision and version specified
@@ -479,9 +628,9 @@ class DevtoolAddTests(DevtoolBase):
479 recipefile = get_bb_var('FILE', testrecipe) 628 recipefile = get_bb_var('FILE', testrecipe)
480 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') 629 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
481 checkvars = {} 630 checkvars = {}
482 checkvars['S'] = '${WORKDIR}/git' 631 checkvars['S'] = None
483 checkvars['PV'] = '1.5+git${SRCPV}' 632 checkvars['PV'] = '1.5+git'
484 checkvars['SRC_URI'] = url 633 checkvars['SRC_URI'] = url_branch
485 checkvars['SRCREV'] = checkrev 634 checkvars['SRCREV'] = checkrev
486 self._test_recipe_contents(recipefile, checkvars, []) 635 self._test_recipe_contents(recipefile, checkvars, [])
487 636
@@ -504,7 +653,7 @@ class DevtoolAddTests(DevtoolBase):
504 result = runCmd('devtool status') 653 result = runCmd('devtool status')
505 self.assertIn(testrecipe, result.output) 654 self.assertIn(testrecipe, result.output)
506 self.assertIn(srcdir, result.output) 655 self.assertIn(srcdir, result.output)
507 # Check recipe 656 # Check recipedevtool add
508 recipefile = get_bb_var('FILE', testrecipe) 657 recipefile = get_bb_var('FILE', testrecipe)
509 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') 658 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
510 checkvars = {} 659 checkvars = {}
@@ -536,6 +685,19 @@ class DevtoolAddTests(DevtoolBase):
536 # Test devtool build 685 # Test devtool build
537 result = runCmd('devtool build %s' % pn) 686 result = runCmd('devtool build %s' % pn)
538 687
688 def test_devtool_add_python_egg_requires(self):
689 # Fetch source
690 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
691 self.track_for_cleanup(tempdir)
692 testver = '0.14.0'
693 url = 'https://files.pythonhosted.org/packages/e9/9e/25d59f5043cf763833b2581c8027fa92342c4cf8ee523b498ecdf460c16d/uvicorn-%s.tar.gz' % testver
694 testrecipe = 'python3-uvicorn'
695 srcdir = os.path.join(tempdir, testrecipe)
696 # Test devtool add
697 self.track_for_cleanup(self.workspacedir)
698 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
699 result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
700
539class DevtoolModifyTests(DevtoolBase): 701class DevtoolModifyTests(DevtoolBase):
540 702
541 def test_devtool_modify(self): 703 def test_devtool_modify(self):
@@ -595,6 +757,25 @@ class DevtoolModifyTests(DevtoolBase):
595 result = runCmd('devtool status') 757 result = runCmd('devtool status')
596 self.assertNotIn('mdadm', result.output) 758 self.assertNotIn('mdadm', result.output)
597 759
760 def test_devtool_modify_go(self):
761 import oe.path
762 from tempfile import TemporaryDirectory
763 with TemporaryDirectory(prefix='devtoolqa') as tempdir:
764 self.track_for_cleanup(self.workspacedir)
765 self.add_command_to_tearDown('bitbake -c clean go-helloworld')
766 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
767 result = runCmd('devtool modify go-helloworld -x %s' % tempdir)
768 self.assertExists(
769 oe.path.join(tempdir, 'src', 'golang.org', 'x', 'example', 'go.mod'),
770 'Extracted source could not be found'
771 )
772 self.assertExists(
773 oe.path.join(self.workspacedir, 'conf', 'layer.conf'),
774 'Workspace directory not created'
775 )
776 matches = glob.glob(oe.path.join(self.workspacedir, 'appends', 'go-helloworld_*.bbappend'))
777 self.assertTrue(matches, 'bbappend not created %s' % result.output)
778
598 def test_devtool_buildclean(self): 779 def test_devtool_buildclean(self):
599 def assertFile(path, *paths): 780 def assertFile(path, *paths):
600 f = os.path.join(path, *paths) 781 f = os.path.join(path, *paths)
@@ -649,7 +830,7 @@ class DevtoolModifyTests(DevtoolBase):
649 self.track_for_cleanup(self.workspacedir) 830 self.track_for_cleanup(self.workspacedir)
650 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 831 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
651 832
652 testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split() 833 testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk'.split()
653 # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose 834 # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose
654 result = runCmd('bitbake-layers show-recipes gcc-source*') 835 result = runCmd('bitbake-layers show-recipes gcc-source*')
655 for line in result.output.splitlines(): 836 for line in result.output.splitlines():
@@ -697,6 +878,7 @@ class DevtoolModifyTests(DevtoolBase):
697 878
698 self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) 879 self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
699 self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) 880 self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
881
700 def test_devtool_modify_localfiles_only(self): 882 def test_devtool_modify_localfiles_only(self):
701 # Check preconditions 883 # Check preconditions
702 testrecipe = 'base-files' 884 testrecipe = 'base-files'
@@ -720,13 +902,8 @@ class DevtoolModifyTests(DevtoolBase):
720 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) 902 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
721 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 903 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
722 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 904 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
723 srcfile = os.path.join(tempdir, 'oe-local-files/share/dot.bashrc') 905 srcfile = os.path.join(tempdir, 'share/dot.bashrc')
724 srclink = os.path.join(tempdir, 'share/dot.bashrc')
725 self.assertExists(srcfile, 'Extracted source could not be found') 906 self.assertExists(srcfile, 'Extracted source could not be found')
726 if os.path.islink(srclink) and os.path.exists(srclink) and os.path.samefile(srcfile, srclink):
727 correct_symlink = True
728 self.assertTrue(correct_symlink, 'Source symlink to oe-local-files is broken')
729
730 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe)) 907 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
731 self.assertTrue(matches, 'bbappend not created') 908 self.assertTrue(matches, 'bbappend not created')
732 # Test devtool status 909 # Test devtool status
@@ -763,6 +940,122 @@ class DevtoolModifyTests(DevtoolBase):
763 # Try building 940 # Try building
764 bitbake(testrecipe) 941 bitbake(testrecipe)
765 942
943 def test_devtool_modify_git_no_extract(self):
944 # Check preconditions
945 testrecipe = 'psplash'
946 src_uri = get_bb_var('SRC_URI', testrecipe)
947 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
948 # Clean up anything in the workdir/sysroot/sstate cache
949 bitbake('%s -c cleansstate' % testrecipe)
950 # Try modifying a recipe
951 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
952 self.track_for_cleanup(tempdir)
953 self.track_for_cleanup(self.workspacedir)
954 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
955 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
956 result = runCmd('git clone https://git.yoctoproject.org/psplash %s && devtool modify -n %s %s' % (tempdir, testrecipe, tempdir))
957 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
958 matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
959 self.assertTrue(matches, 'bbappend not created')
960 # Test devtool status
961 result = runCmd('devtool status')
962 self.assertIn(testrecipe, result.output)
963 self.assertIn(tempdir, result.output)
964
965 def test_devtool_modify_git_crates_subpath(self):
966 # This tests two things in devtool context:
967 # - that we support local git dependencies for cargo based recipe
968 # - that we support patches in SRC_URI when git url contains subpath parameter
969
970 # Check preconditions:
971 # recipe inherits cargo
972 # git:// uri with a subpath as the main package
973 # some crate:// in SRC_URI
974 # others git:// in SRC_URI
975 # cointains a patch
976 testrecipe = 'hello-rs'
977 bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'UNPACKDIR', 'CARGO_HOME'], testrecipe)
978 recipefile = bb_vars['FILE']
979 unpackdir = bb_vars['UNPACKDIR']
980 cargo_home = bb_vars['CARGO_HOME']
981 src_uri = bb_vars['SRC_URI'].split()
982 self.assertTrue(src_uri[0].startswith('git://'),
983 'This test expects the %s recipe to have a git repo has its main uri' % testrecipe)
984 self.assertIn(';subpath=', src_uri[0],
985 'This test expects the %s recipe to have a git uri with subpath' % testrecipe)
986 self.assertTrue(any([uri.startswith('crate://') for uri in src_uri]),
987 'This test expects the %s recipe to have some crates in its src uris' % testrecipe)
988 self.assertGreaterEqual(sum(map(lambda x:x.startswith('git://'), src_uri)), 2,
989 'This test expects the %s recipe to have several git:// uris' % testrecipe)
990 self.assertTrue(any([uri.startswith('file://') and '.patch' in uri for uri in src_uri]),
991 'This test expects the %s recipe to have a patch in its src uris' % testrecipe)
992
993 self._test_recipe_contents(recipefile, {}, ['ptest-cargo'])
994
995 # Clean up anything in the workdir/sysroot/sstate cache
996 bitbake('%s -c cleansstate' % testrecipe)
997 # Try modifying a recipe
998 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
999 self.track_for_cleanup(tempdir)
1000 self.track_for_cleanup(self.workspacedir)
1001 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1002 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1003 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1004 self.assertExists(os.path.join(tempdir, 'Cargo.toml'), 'Extracted source could not be found')
1005 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
1006 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
1007 self.assertTrue(matches, 'bbappend not created')
1008 # Test devtool status
1009 result = runCmd('devtool status')
1010 self.assertIn(testrecipe, result.output)
1011 self.assertIn(tempdir, result.output)
1012 # Check git repo
1013 self._check_src_repo(tempdir)
1014 # Check that the patch is correctly applied.
1015 # The last commit message in the tree must contain the following note:
1016 # Notes (devtool):
1017 # original patch: <patchname>
1018 # ..
1019 patchname = None
1020 for uri in src_uri:
1021 if uri.startswith('file://') and '.patch' in uri:
1022 patchname = uri.replace("file://", "").partition('.patch')[0] + '.patch'
1023 self.assertIsNotNone(patchname)
1024 result = runCmd('git -C %s log -1' % tempdir)
1025 self.assertIn("Notes (devtool):\n original patch: %s" % patchname, result.output)
1026
1027 # Configure the recipe to check that the git dependencies are correctly patched in cargo config
1028 bitbake('-c configure %s' % testrecipe)
1029
1030 cargo_config_path = os.path.join(cargo_home, 'config.toml')
1031 with open(cargo_config_path, "r") as f:
1032 cargo_config_contents = [line.strip('\n') for line in f.readlines()]
1033
1034 # Get back git dependencies of the recipe (ignoring the main one)
1035 # and check that they are all correctly patched to be fetched locally
1036 git_deps = [uri for uri in src_uri if uri.startswith("git://")][1:]
1037 for git_dep in git_deps:
1038 raw_url, _, raw_parms = git_dep.partition(";")
1039 parms = {}
1040 for parm in raw_parms.split(";"):
1041 name_parm, _, value_parm = parm.partition('=')
1042 parms[name_parm]=value_parm
1043 self.assertIn('protocol', parms, 'git dependencies uri should contain the "protocol" parameter')
1044 self.assertIn('name', parms, 'git dependencies uri should contain the "name" parameter')
1045 self.assertIn('destsuffix', parms, 'git dependencies uri should contain the "destsuffix" parameter')
1046 self.assertIn('type', parms, 'git dependencies uri should contain the "type" parameter')
1047 self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
1048 raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
1049 patch_line = '[patch."%s"]' % raw_url
1050 path_patched = os.path.join(unpackdir, parms['destsuffix'])
1051 path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
1052 # Would have been better to use tomllib to read this file :/
1053 self.assertIn(patch_line, cargo_config_contents)
1054 self.assertIn(path_override_line, cargo_config_contents)
1055
1056 # Try to package the recipe
1057 bitbake('-c package_qa %s' % testrecipe)
1058
766 def test_devtool_modify_localfiles(self): 1059 def test_devtool_modify_localfiles(self):
767 # Check preconditions 1060 # Check preconditions
768 testrecipe = 'lighttpd' 1061 testrecipe = 'lighttpd'
@@ -828,12 +1121,43 @@ class DevtoolModifyTests(DevtoolBase):
828 runCmd('git -C %s checkout %s' % (tempdir, branch)) 1121 runCmd('git -C %s checkout %s' % (tempdir, branch))
829 with open(source, "rt") as f: 1122 with open(source, "rt") as f:
830 content = f.read() 1123 content = f.read()
831 self.assertEquals(content, expected) 1124 self.assertEqual(content, expected)
832 check('devtool', 'This is a test for something\n') 1125 if self.td["MACHINE"] == "qemux86":
1126 check('devtool', 'This is a test for qemux86\n')
1127 elif self.td["MACHINE"] == "qemuarm":
1128 check('devtool', 'This is a test for qemuarm\n')
1129 else:
1130 check('devtool', 'This is a test for something\n')
833 check('devtool-no-overrides', 'This is a test for something\n') 1131 check('devtool-no-overrides', 'This is a test for something\n')
834 check('devtool-override-qemuarm', 'This is a test for qemuarm\n') 1132 check('devtool-override-qemuarm', 'This is a test for qemuarm\n')
835 check('devtool-override-qemux86', 'This is a test for qemux86\n') 1133 check('devtool-override-qemux86', 'This is a test for qemux86\n')
836 1134
1135 def test_devtool_modify_multiple_sources(self):
1136 # This test check that recipes fetching several sources can be used with devtool modify/build
1137 # Check preconditions
1138 testrecipe = 'bzip2'
1139 src_uri = get_bb_var('SRC_URI', testrecipe)
1140 src1 = 'https://' in src_uri
1141 src2 = 'git://' in src_uri
1142 self.assertTrue(src1 and src2, 'This test expects the %s recipe to fetch both a git source and a tarball and it seems that it no longer does' % testrecipe)
1143 # Clean up anything in the workdir/sysroot/sstate cache
1144 bitbake('%s -c cleansstate' % testrecipe)
1145 # Try modifying a recipe
1146 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1147 self.track_for_cleanup(tempdir)
1148 self.track_for_cleanup(self.workspacedir)
1149 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1150 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1151 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1152 self.assertEqual(result.status, 0, "Could not modify recipe %s. Output: %s" % (testrecipe, result.output))
1153 # Test devtool status
1154 result = runCmd('devtool status')
1155 self.assertIn(testrecipe, result.output)
1156 self.assertIn(tempdir, result.output)
1157 # Try building
1158 result = bitbake(testrecipe)
1159 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
1160
837class DevtoolUpdateTests(DevtoolBase): 1161class DevtoolUpdateTests(DevtoolBase):
838 1162
839 def test_devtool_update_recipe(self): 1163 def test_devtool_update_recipe(self):
@@ -861,16 +1185,20 @@ class DevtoolUpdateTests(DevtoolBase):
861 result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir) 1185 result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir)
862 result = runCmd('git add devtool-new-file', cwd=tempdir) 1186 result = runCmd('git add devtool-new-file', cwd=tempdir)
863 result = runCmd('git commit -m "Add a new file"', cwd=tempdir) 1187 result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
864 self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1188 cleanup_cmd = 'cd %s; rm %s/*.patch; git add %s; git checkout %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))
1189 self.add_command_to_tearDown(cleanup_cmd)
865 result = runCmd('devtool update-recipe %s' % testrecipe) 1190 result = runCmd('devtool update-recipe %s' % testrecipe)
1191 result = runCmd('git add minicom', cwd=os.path.dirname(recipefile))
866 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1192 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
867 ('??', '.*/0001-Change-the-README.patch$'), 1193 ('A ', '.*/0001-Change-the-README.patch$'),
868 ('??', '.*/0002-Add-a-new-file.patch$')] 1194 ('A ', '.*/0002-Add-a-new-file.patch$')]
869 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1195 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1196 result = runCmd(cleanup_cmd)
1197 self._check_repo_status(os.path.dirname(recipefile), [])
870 1198
871 def test_devtool_update_recipe_git(self): 1199 def test_devtool_update_recipe_git(self):
872 # Check preconditions 1200 # Check preconditions
873 testrecipe = 'mtd-utils' 1201 testrecipe = 'mtd-utils-selftest'
874 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1202 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
875 recipefile = bb_vars['FILE'] 1203 recipefile = bb_vars['FILE']
876 src_uri = bb_vars['SRC_URI'] 1204 src_uri = bb_vars['SRC_URI']
@@ -904,28 +1232,12 @@ class DevtoolUpdateTests(DevtoolBase):
904 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1232 self._check_repo_status(os.path.dirname(recipefile), expected_status)
905 1233
906 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) 1234 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
907 addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"'] 1235 addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"']
908 srcurilines = src_uri.split() 1236 srcurilines = src_uri.split()
909 srcurilines[0] = 'SRC_URI = "' + srcurilines[0] 1237 srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
910 srcurilines.append('"') 1238 srcurilines.append('"')
911 removelines = ['SRCREV = ".*"'] + srcurilines 1239 removelines = ['SRCREV = ".*"'] + srcurilines
912 for line in result.output.splitlines(): 1240 self._check_diff(result.output, addlines, removelines)
913 if line.startswith('+++') or line.startswith('---'):
914 continue
915 elif line.startswith('+'):
916 matched = False
917 for item in addlines:
918 if re.match(item, line[1:].strip()):
919 matched = True
920 break
921 self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
922 elif line.startswith('-'):
923 matched = False
924 for item in removelines:
925 if re.match(item, line[1:].strip()):
926 matched = True
927 break
928 self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
929 # Now try with auto mode 1241 # Now try with auto mode
930 runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile))) 1242 runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
931 result = runCmd('devtool update-recipe %s' % testrecipe) 1243 result = runCmd('devtool update-recipe %s' % testrecipe)
@@ -939,7 +1251,7 @@ class DevtoolUpdateTests(DevtoolBase):
939 1251
940 def test_devtool_update_recipe_append(self): 1252 def test_devtool_update_recipe_append(self):
941 # Check preconditions 1253 # Check preconditions
942 testrecipe = 'mdadm' 1254 testrecipe = 'minicom'
943 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1255 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
944 recipefile = bb_vars['FILE'] 1256 recipefile = bb_vars['FILE']
945 src_uri = bb_vars['SRC_URI'] 1257 src_uri = bb_vars['SRC_URI']
@@ -957,7 +1269,7 @@ class DevtoolUpdateTests(DevtoolBase):
957 # Check git repo 1269 # Check git repo
958 self._check_src_repo(tempsrcdir) 1270 self._check_src_repo(tempsrcdir)
959 # Add a commit 1271 # Add a commit
960 result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir) 1272 result = runCmd('echo "Additional line" >> README', cwd=tempsrcdir)
961 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) 1273 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
962 self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe)) 1274 self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe))
963 # Create a temporary layer and add it to bblayers.conf 1275 # Create a temporary layer and add it to bblayers.conf
@@ -975,7 +1287,7 @@ class DevtoolUpdateTests(DevtoolBase):
975 self.assertExists(patchfile, 'Patch file not created') 1287 self.assertExists(patchfile, 'Patch file not created')
976 1288
977 # Check bbappend contents 1289 # Check bbappend contents
978 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1290 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
979 '\n', 1291 '\n',
980 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n', 1292 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n',
981 '\n'] 1293 '\n']
@@ -987,15 +1299,16 @@ class DevtoolUpdateTests(DevtoolBase):
987 with open(bbappendfile, 'r') as f: 1299 with open(bbappendfile, 'r') as f:
988 self.assertEqual(expectedlines, f.readlines()) 1300 self.assertEqual(expectedlines, f.readlines())
989 # Drop new commit and check patch gets deleted 1301 # Drop new commit and check patch gets deleted
990 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1302 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
991 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1303 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
992 self.assertNotExists(patchfile, 'Patch file not deleted') 1304 self.assertNotExists(patchfile, 'Patch file not deleted')
993 expectedlines2 = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1305 expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
994 '\n'] 1306 '\n']
995 with open(bbappendfile, 'r') as f: 1307 with open(bbappendfile, 'r') as f:
996 self.assertEqual(expectedlines2, f.readlines()) 1308 self.assertEqual(expectedlines2, f.readlines())
997 # Put commit back and check we can run it if layer isn't in bblayers.conf 1309 # Put commit back and check we can run it if layer isn't in bblayers.conf
998 os.remove(bbappendfile) 1310 os.remove(bbappendfile)
1311 result = runCmd('echo "Additional line" >> README', cwd=tempsrcdir)
999 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) 1312 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
1000 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1313 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1001 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1314 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
@@ -1007,10 +1320,11 @@ class DevtoolUpdateTests(DevtoolBase):
1007 1320
1008 def test_devtool_update_recipe_append_git(self): 1321 def test_devtool_update_recipe_append_git(self):
1009 # Check preconditions 1322 # Check preconditions
1010 testrecipe = 'mtd-utils' 1323 testrecipe = 'mtd-utils-selftest'
1011 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1324 bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe)
1012 recipefile = bb_vars['FILE'] 1325 recipefile = bb_vars['FILE']
1013 src_uri = bb_vars['SRC_URI'] 1326 src_uri = bb_vars['SRC_URI']
1327 corenames = bb_vars['LAYERSERIES_CORENAMES']
1014 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) 1328 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
1015 for entry in src_uri.split(): 1329 for entry in src_uri.split():
1016 if entry.startswith('git://'): 1330 if entry.startswith('git://'):
@@ -1041,7 +1355,7 @@ class DevtoolUpdateTests(DevtoolBase):
1041 f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n') 1355 f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
1042 f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n') 1356 f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
1043 f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n') 1357 f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
1044 f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n') 1358 f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames)
1045 self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir) 1359 self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
1046 result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir) 1360 result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
1047 # Create the bbappend 1361 # Create the bbappend
@@ -1069,7 +1383,7 @@ class DevtoolUpdateTests(DevtoolBase):
1069 with open(bbappendfile, 'r') as f: 1383 with open(bbappendfile, 'r') as f:
1070 self.assertEqual(expectedlines, set(f.readlines())) 1384 self.assertEqual(expectedlines, set(f.readlines()))
1071 # Drop new commit and check SRCREV changes 1385 # Drop new commit and check SRCREV changes
1072 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1386 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
1073 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1387 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
1074 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created') 1388 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created')
1075 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir) 1389 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
@@ -1081,6 +1395,7 @@ class DevtoolUpdateTests(DevtoolBase):
1081 self.assertEqual(expectedlines, set(f.readlines())) 1395 self.assertEqual(expectedlines, set(f.readlines()))
1082 # Put commit back and check we can run it if layer isn't in bblayers.conf 1396 # Put commit back and check we can run it if layer isn't in bblayers.conf
1083 os.remove(bbappendfile) 1397 os.remove(bbappendfile)
1398 result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempsrcdir)
1084 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir) 1399 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
1085 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1400 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1086 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1401 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
@@ -1112,22 +1427,39 @@ class DevtoolUpdateTests(DevtoolBase):
1112 # Try building just to ensure we haven't broken that 1427 # Try building just to ensure we haven't broken that
1113 bitbake("%s" % testrecipe) 1428 bitbake("%s" % testrecipe)
1114 # Edit / commit local source 1429 # Edit / commit local source
1115 runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir) 1430 runCmd('echo "/* Foobar */" >> makedevs.c', cwd=tempdir)
1116 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1431 runCmd('echo "Foo" > new-local', cwd=tempdir)
1117 runCmd('echo "Bar" > new-file', cwd=tempdir) 1432 runCmd('echo "Bar" > new-file', cwd=tempdir)
1118 runCmd('git add new-file', cwd=tempdir) 1433 runCmd('git add new-file', cwd=tempdir)
1119 runCmd('git commit -m "Add new file"', cwd=tempdir) 1434 runCmd('git commit -m "Add new file"', cwd=tempdir)
1120 self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' % 1435 runCmd('git add new-local', cwd=tempdir)
1121 os.path.dirname(recipefile))
1122 runCmd('devtool update-recipe %s' % testrecipe) 1436 runCmd('devtool update-recipe %s' % testrecipe)
1123 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1437 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1124 (' M', '.*/makedevs/makedevs.c$'), 1438 (' M', '.*/makedevs/makedevs.c$'),
1125 ('??', '.*/makedevs/new-local$'), 1439 ('??', '.*/makedevs/new-local$'),
1126 ('??', '.*/makedevs/0001-Add-new-file.patch$')] 1440 ('??', '.*/makedevs/0001-Add-new-file.patch$')]
1127 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1441 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1128 1442 # Now try to update recipe in another layer, so first, clean it
1129 def test_devtool_update_recipe_local_files_2(self): 1443 runCmd('cd %s; git clean -fd .; git checkout .' % os.path.dirname(recipefile))
1130 """Check local source files support when oe-local-files is in Git""" 1444 # Create a temporary layer and add it to bblayers.conf
1445 self._create_temp_layer(templayerdir, True, 'templayer')
1446 # Update recipe in templayer
1447 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
1448 self.assertNotIn('WARNING:', result.output)
1449 # Check recipe is still clean
1450 self._check_repo_status(os.path.dirname(recipefile), [])
1451 splitpath = os.path.dirname(recipefile).split(os.sep)
1452 appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
1453 bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
1454 patchfile = os.path.join(appenddir, testrecipe, '0001-Add-new-file.patch')
1455 new_local_file = os.path.join(appenddir, testrecipe, 'new_local')
1456 local_file = os.path.join(appenddir, testrecipe, 'makedevs.c')
1457 self.assertExists(patchfile, 'Patch file 0001-Add-new-file.patch not created')
1458 self.assertExists(local_file, 'File makedevs.c not created')
1459 self.assertExists(patchfile, 'File new_local not created')
1460
1461 def _test_devtool_update_recipe_local_files_2(self):
1462 """Check local source files support when editing local files in Git"""
1131 testrecipe = 'devtool-test-local' 1463 testrecipe = 'devtool-test-local'
1132 recipefile = get_bb_var('FILE', testrecipe) 1464 recipefile = get_bb_var('FILE', testrecipe)
1133 recipedir = os.path.dirname(recipefile) 1465 recipedir = os.path.dirname(recipefile)
@@ -1142,17 +1474,13 @@ class DevtoolUpdateTests(DevtoolBase):
1142 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 1474 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1143 # Check git repo 1475 # Check git repo
1144 self._check_src_repo(tempdir) 1476 self._check_src_repo(tempdir)
1145 # Add oe-local-files to Git
1146 runCmd('rm oe-local-files/.gitignore', cwd=tempdir)
1147 runCmd('git add oe-local-files', cwd=tempdir)
1148 runCmd('git commit -m "Add local sources"', cwd=tempdir)
1149 # Edit / commit local sources 1477 # Edit / commit local sources
1150 runCmd('echo "# Foobar" >> oe-local-files/file1', cwd=tempdir) 1478 runCmd('echo "# Foobar" >> file1', cwd=tempdir)
1151 runCmd('git commit -am "Edit existing file"', cwd=tempdir) 1479 runCmd('git commit -am "Edit existing file"', cwd=tempdir)
1152 runCmd('git rm oe-local-files/file2', cwd=tempdir) 1480 runCmd('git rm file2', cwd=tempdir)
1153 runCmd('git commit -m"Remove file"', cwd=tempdir) 1481 runCmd('git commit -m"Remove file"', cwd=tempdir)
1154 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1482 runCmd('echo "Foo" > new-local', cwd=tempdir)
1155 runCmd('git add oe-local-files/new-local', cwd=tempdir) 1483 runCmd('git add new-local', cwd=tempdir)
1156 runCmd('git commit -m "Add new local file"', cwd=tempdir) 1484 runCmd('git commit -m "Add new local file"', cwd=tempdir)
1157 runCmd('echo "Gar" > new-file', cwd=tempdir) 1485 runCmd('echo "Gar" > new-file', cwd=tempdir)
1158 runCmd('git add new-file', cwd=tempdir) 1486 runCmd('git add new-file', cwd=tempdir)
@@ -1161,7 +1489,7 @@ class DevtoolUpdateTests(DevtoolBase):
1161 os.path.dirname(recipefile)) 1489 os.path.dirname(recipefile))
1162 # Checkout unmodified file to working copy -> devtool should still pick 1490 # Checkout unmodified file to working copy -> devtool should still pick
1163 # the modified version from HEAD 1491 # the modified version from HEAD
1164 runCmd('git checkout HEAD^ -- oe-local-files/file1', cwd=tempdir) 1492 runCmd('git checkout HEAD^ -- file1', cwd=tempdir)
1165 runCmd('devtool update-recipe %s' % testrecipe) 1493 runCmd('devtool update-recipe %s' % testrecipe)
1166 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1494 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1167 (' M', '.*/file1$'), 1495 (' M', '.*/file1$'),
@@ -1236,7 +1564,7 @@ class DevtoolUpdateTests(DevtoolBase):
1236 # (don't bother with cleaning the recipe on teardown, we won't be building it) 1564 # (don't bother with cleaning the recipe on teardown, we won't be building it)
1237 result = runCmd('devtool modify %s' % testrecipe) 1565 result = runCmd('devtool modify %s' % testrecipe)
1238 # Modify one file 1566 # Modify one file
1239 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe, 'oe-local-files')) 1567 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe))
1240 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1568 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
1241 result = runCmd('devtool update-recipe %s' % testrecipe) 1569 result = runCmd('devtool update-recipe %s' % testrecipe)
1242 expected_status = [(' M', '.*/%s/file2$' % testrecipe)] 1570 expected_status = [(' M', '.*/%s/file2$' % testrecipe)]
@@ -1259,7 +1587,7 @@ class DevtoolUpdateTests(DevtoolBase):
1259 # Modify one file 1587 # Modify one file
1260 srctree = os.path.join(self.workspacedir, 'sources', testrecipe) 1588 srctree = os.path.join(self.workspacedir, 'sources', testrecipe)
1261 runCmd('echo "Another line" >> README', cwd=srctree) 1589 runCmd('echo "Another line" >> README', cwd=srctree)
1262 runCmd('git commit -a --amend --no-edit', cwd=srctree) 1590 runCmd('git commit -a --amend --no-edit --no-verify', cwd=srctree)
1263 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1591 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
1264 result = runCmd('devtool update-recipe %s' % testrecipe) 1592 result = runCmd('devtool update-recipe %s' % testrecipe)
1265 expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)] 1593 expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)]
@@ -1295,6 +1623,121 @@ class DevtoolUpdateTests(DevtoolBase):
1295 expected_status = [] 1623 expected_status = []
1296 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1624 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1297 1625
1626 def test_devtool_finish_modify_git_subdir(self):
1627 # Check preconditions
1628 testrecipe = 'dos2unix'
1629 self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n')
1630 bb_vars = get_bb_vars(['SRC_URI', 'S', 'UNPACKDIR', 'FILE', 'BB_GIT_DEFAULT_DESTSUFFIX'], testrecipe)
1631 self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe)
1632 unpackdir_git = '%s/%s/' % (bb_vars['UNPACKDIR'], bb_vars['BB_GIT_DEFAULT_DESTSUFFIX'])
1633 if not bb_vars['S'].startswith(unpackdir_git):
1634 self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe)
1635 subdir = bb_vars['S'].split(unpackdir_git, 1)[1]
1636 # Clean up anything in the workdir/sysroot/sstate cache
1637 bitbake('%s -c cleansstate' % testrecipe)
1638 # Try modifying a recipe
1639 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1640 self.track_for_cleanup(tempdir)
1641 self.track_for_cleanup(self.workspacedir)
1642 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1643 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1644 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1645 testsrcfile = os.path.join(tempdir, subdir, 'dos2unix.c')
1646 self.assertExists(testsrcfile, 'Extracted source could not be found')
1647 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
1648 self.assertNotExists(os.path.join(tempdir, subdir, '.git'), 'Subdirectory has been initialised as a git repo')
1649 # Check git repo
1650 self._check_src_repo(tempdir)
1651 # Modify file
1652 runCmd("sed -i '1s:^:/* Add a comment */\\n:' %s" % testsrcfile)
1653 result = runCmd('git commit -a -m "Add a comment"', cwd=tempdir)
1654 # Now try updating original recipe
1655 recipefile = bb_vars['FILE']
1656 recipedir = os.path.dirname(recipefile)
1657 self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
1658 result = runCmd('devtool update-recipe %s' % testrecipe)
1659 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1660 ('??', '.*/%s/%s/$' % (testrecipe, testrecipe))]
1661 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1662 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
1663 removelines = ['SRC_URI = "git://.*"']
1664 addlines = [
1665 'SRC_URI = "git://.* \\\\',
1666 'file://0001-Add-a-comment.patch;patchdir=.. \\\\',
1667 '"'
1668 ]
1669 self._check_diff(result.output, addlines, removelines)
1670 # Put things back so we can run devtool finish on a different layer
1671 runCmd('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
1672 # Run devtool finish
1673 res = re.search('recipes-.*', recipedir)
1674 self.assertTrue(res, 'Unable to find recipe subdirectory')
1675 recipesubdir = res[0]
1676 self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, recipesubdir))
1677 result = runCmd('devtool finish %s meta-selftest' % testrecipe)
1678 # Check bbappend file contents
1679 appendfn = os.path.join(self.testlayer_path, recipesubdir, '%s_%%.bbappend' % testrecipe)
1680 with open(appendfn, 'r') as f:
1681 appendlines = f.readlines()
1682 expected_appendlines = [
1683 'FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
1684 '\n',
1685 'SRC_URI += "file://0001-Add-a-comment.patch;patchdir=.."\n',
1686 '\n'
1687 ]
1688 self.assertEqual(appendlines, expected_appendlines)
1689 self.assertExists(os.path.join(os.path.dirname(appendfn), testrecipe, '0001-Add-a-comment.patch'))
1690 # Try building
1691 bitbake('%s -c patch' % testrecipe)
1692
1693 def test_devtool_git_submodules(self):
1694 # This tests if we can add a patch in a git submodule and extract it properly using devtool finish
1695 # Check preconditions
1696 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1697 self.track_for_cleanup(self.workspacedir)
1698 recipe = 'vulkan-samples'
1699 src_uri = get_bb_var('SRC_URI', recipe)
1700 self.assertIn('gitsm://', src_uri, 'This test expects the %s recipe to be a git recipe with submodules' % recipe)
1701 oldrecipefile = get_bb_var('FILE', recipe)
1702 recipedir = os.path.dirname(oldrecipefile)
1703 result = runCmd('git status --porcelain .', cwd=recipedir)
1704 if result.output.strip():
1705 self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
1706 self.assertIn('/meta/', recipedir)
1707 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1708 self.track_for_cleanup(tempdir)
1709 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1710 result = runCmd('devtool modify %s %s' % (recipe, tempdir))
1711 self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
1712 # Test devtool status
1713 result = runCmd('devtool status')
1714 self.assertIn(recipe, result.output)
1715 self.assertIn(tempdir, result.output)
1716 # Modify a source file in a submodule, (grab the first one)
1717 result = runCmd('git submodule --quiet foreach \'echo $sm_path\'', cwd=tempdir)
1718 submodule = result.output.splitlines()[0]
1719 submodule_path = os.path.join(tempdir, submodule)
1720 runCmd('echo "#This is a first comment" >> testfile', cwd=submodule_path)
1721 result = runCmd('git status --porcelain . ', cwd=submodule_path)
1722 self.assertIn("testfile", result.output)
1723 runCmd('git add testfile; git commit -m "Adding a new file"', cwd=submodule_path)
1724
1725 # Try finish to the original layer
1726 self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
1727 runCmd('devtool finish -f %s meta' % recipe)
1728 result = runCmd('devtool status')
1729 self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
1730 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
1731 expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)),
1732 ('??', '.*/.*-Adding-a-new-file.patch$')]
1733 self._check_repo_status(recipedir, expected_status)
1734 # Make sure the patch is added to the recipe with the correct "patchdir" option
1735 result = runCmd('git diff .', cwd=recipedir)
1736 addlines = [
1737 'file://0001-Adding-a-new-file.patch;patchdir=%s \\\\' % submodule
1738 ]
1739 self._check_diff(result.output, addlines, [])
1740
1298class DevtoolExtractTests(DevtoolBase): 1741class DevtoolExtractTests(DevtoolBase):
1299 1742
1300 def test_devtool_extract(self): 1743 def test_devtool_extract(self):
@@ -1317,6 +1760,8 @@ class DevtoolExtractTests(DevtoolBase):
1317 self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found') 1760 self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
1318 self._check_src_repo(tempdir) 1761 self._check_src_repo(tempdir)
1319 1762
1763class DevtoolResetTests(DevtoolBase):
1764
1320 def test_devtool_reset_all(self): 1765 def test_devtool_reset_all(self):
1321 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 1766 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1322 self.track_for_cleanup(tempdir) 1767 self.track_for_cleanup(tempdir)
@@ -1343,33 +1788,30 @@ class DevtoolExtractTests(DevtoolBase):
1343 matches2 = glob.glob(stampprefix2 + '*') 1788 matches2 = glob.glob(stampprefix2 + '*')
1344 self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2) 1789 self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2)
1345 1790
1791 def test_devtool_reset_re_plus_plus(self):
1792 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1793 self.track_for_cleanup(tempdir)
1794 self.track_for_cleanup(self.workspacedir)
1795 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1796 testrecipe = 'devtool-test-reset-re++'
1797 result = runCmd('devtool modify %s' % testrecipe)
1798 result = runCmd('devtool reset -n %s' % testrecipe)
1799 self.assertIn(testrecipe, result.output)
1800 result = runCmd('devtool status')
1801 self.assertNotIn(testrecipe, result.output)
1802 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', testrecipe), 'Recipe directory should not exist after resetting')
1803
1804class DevtoolDeployTargetTests(DevtoolBase):
1805
1806 @OETestTag("runqemu")
1346 def test_devtool_deploy_target(self): 1807 def test_devtool_deploy_target(self):
1347 # NOTE: Whilst this test would seemingly be better placed as a runtime test, 1808 self._check_runqemu_prerequisites()
1348 # unfortunately the runtime tests run under bitbake and you can't run
1349 # devtool within bitbake (since devtool needs to run bitbake itself).
1350 # Additionally we are testing build-time functionality as well, so
1351 # really this has to be done as an oe-selftest test.
1352 #
1353 # Check preconditions
1354 machine = get_bb_var('MACHINE')
1355 if not machine.startswith('qemu'):
1356 self.skipTest('This test only works with qemu machines')
1357 if not os.path.exists('/etc/runqemu-nosudo'):
1358 self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
1359 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
1360 if result.status != 0:
1361 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
1362 if result.status != 0:
1363 self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
1364 for line in result.output.splitlines():
1365 if line.startswith('tap'):
1366 break
1367 else:
1368 self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
1369 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 1809 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1370 # Definitions 1810 # Definitions
1371 testrecipe = 'mdadm' 1811 testrecipe = 'mdadm'
1372 testfile = '/sbin/mdadm' 1812 testfile = '/sbin/mdadm'
1813 if "usrmerge" in get_bb_var('DISTRO_FEATURES'):
1814 testfile = '/usr/sbin/mdadm'
1373 testimage = 'oe-selftest-image' 1815 testimage = 'oe-selftest-image'
1374 testcommand = '/sbin/mdadm --help' 1816 testcommand = '/sbin/mdadm --help'
1375 # Build an image to run 1817 # Build an image to run
@@ -1428,6 +1870,8 @@ class DevtoolExtractTests(DevtoolBase):
1428 result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True) 1870 result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True)
1429 self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have') 1871 self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have')
1430 1872
1873class DevtoolBuildImageTests(DevtoolBase):
1874
1431 def test_devtool_build_image(self): 1875 def test_devtool_build_image(self):
1432 """Test devtool build-image plugin""" 1876 """Test devtool build-image plugin"""
1433 # Check preconditions 1877 # Check preconditions
@@ -1463,6 +1907,14 @@ class DevtoolExtractTests(DevtoolBase):
1463 1907
1464class DevtoolUpgradeTests(DevtoolBase): 1908class DevtoolUpgradeTests(DevtoolBase):
1465 1909
1910 def setUp(self):
1911 super().setUp()
1912 try:
1913 runCmd("git config --global user.name")
1914 runCmd("git config --global user.email")
1915 except:
1916 self.skip("Git user.name and user.email must be set")
1917
1466 def test_devtool_upgrade(self): 1918 def test_devtool_upgrade(self):
1467 # Check preconditions 1919 # Check preconditions
1468 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 1920 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1543,6 +1995,100 @@ class DevtoolUpgradeTests(DevtoolBase):
1543 self.assertNotIn(recipe, result.output) 1995 self.assertNotIn(recipe, result.output)
1544 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting') 1996 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
1545 1997
1998 def test_devtool_upgrade_drop_md5sum(self):
1999 # Check preconditions
2000 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2001 self.track_for_cleanup(self.workspacedir)
2002 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2003 # For the moment, we are using a real recipe.
2004 recipe = 'devtool-upgrade-test3'
2005 version = '1.6.0'
2006 oldrecipefile = get_bb_var('FILE', recipe)
2007 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2008 self.track_for_cleanup(tempdir)
2009 # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
2010 # we are downgrading instead of upgrading.
2011 result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
2012 # Check new recipe file is present
2013 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
2014 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2015 # Check recipe got changed as expected
2016 with open(oldrecipefile + '.upgraded', 'r') as f:
2017 desiredlines = f.readlines()
2018 with open(newrecipefile, 'r') as f:
2019 newlines = f.readlines()
2020 self.assertEqual(desiredlines, newlines)
2021
2022 def test_devtool_upgrade_all_checksums(self):
2023 # Check preconditions
2024 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2025 self.track_for_cleanup(self.workspacedir)
2026 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2027 # For the moment, we are using a real recipe.
2028 recipe = 'devtool-upgrade-test4'
2029 version = '1.6.0'
2030 oldrecipefile = get_bb_var('FILE', recipe)
2031 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2032 self.track_for_cleanup(tempdir)
2033 # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
2034 # we are downgrading instead of upgrading.
2035 result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
2036 # Check new recipe file is present
2037 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
2038 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2039 # Check recipe got changed as expected
2040 with open(oldrecipefile + '.upgraded', 'r') as f:
2041 desiredlines = f.readlines()
2042 with open(newrecipefile, 'r') as f:
2043 newlines = f.readlines()
2044 self.assertEqual(desiredlines, newlines)
2045
2046 def test_devtool_upgrade_recipe_upgrade_extra_tasks(self):
2047 # Check preconditions
2048 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2049 self.track_for_cleanup(self.workspacedir)
2050 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2051 recipe = 'python3-guessing-game'
2052 version = '0.2.0'
2053 commit = '40cf004c2772ffa20ea803fa3be1528a75be3e98'
2054 oldrecipefile = get_bb_var('FILE', recipe)
2055 oldcratesincfile = os.path.join(os.path.dirname(oldrecipefile), os.path.basename(oldrecipefile).strip('_git.bb') + '-crates.inc')
2056 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2057 self.track_for_cleanup(tempdir)
2058 # Check that recipe is not already under devtool control
2059 result = runCmd('devtool status')
2060 self.assertNotIn(recipe, result.output)
2061 # Check upgrade
2062 result = runCmd('devtool upgrade %s %s --version %s --srcrev %s' % (recipe, tempdir, version, commit))
2063 # Check if srctree at least is populated
2064 self.assertTrue(len(os.listdir(tempdir)) > 0, 'srctree (%s) should be populated with new (%s) source code' % (tempdir, commit))
2065 # Check new recipe file and new -crates.inc files are present
2066 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldrecipefile))
2067 newcratesincfile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldcratesincfile))
2068 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2069 self.assertExists(newcratesincfile, 'Recipe crates.inc file should exist after upgrade')
2070 # Check devtool status and make sure recipe is present
2071 result = runCmd('devtool status')
2072 self.assertIn(recipe, result.output)
2073 self.assertIn(tempdir, result.output)
2074 # Check recipe got changed as expected
2075 with open(oldrecipefile + '.upgraded', 'r') as f:
2076 desiredlines = f.readlines()
2077 with open(newrecipefile, 'r') as f:
2078 newlines = f.readlines()
2079 self.assertEqual(desiredlines, newlines)
2080 # Check crates.inc got changed as expected
2081 with open(oldcratesincfile + '.upgraded', 'r') as f:
2082 desiredlines = f.readlines()
2083 with open(newcratesincfile, 'r') as f:
2084 newlines = f.readlines()
2085 self.assertEqual(desiredlines, newlines)
2086 # Check devtool reset recipe
2087 result = runCmd('devtool reset %s -n' % recipe)
2088 result = runCmd('devtool status')
2089 self.assertNotIn(recipe, result.output)
2090 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
2091
1546 def test_devtool_layer_plugins(self): 2092 def test_devtool_layer_plugins(self):
1547 """Test that devtool can use plugins from other layers. 2093 """Test that devtool can use plugins from other layers.
1548 2094
@@ -1561,7 +2107,15 @@ class DevtoolUpgradeTests(DevtoolBase):
1561 for p in paths: 2107 for p in paths:
1562 dstdir = os.path.join(dstdir, p) 2108 dstdir = os.path.join(dstdir, p)
1563 if not os.path.exists(dstdir): 2109 if not os.path.exists(dstdir):
1564 os.makedirs(dstdir) 2110 try:
2111 os.makedirs(dstdir)
2112 except PermissionError:
2113 return False
2114 except OSError as e:
2115 if e.errno == errno.EROFS:
2116 return False
2117 else:
2118 raise e
1565 if p == "lib": 2119 if p == "lib":
1566 # Can race with other tests 2120 # Can race with other tests
1567 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) 2121 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -1569,8 +2123,12 @@ class DevtoolUpgradeTests(DevtoolBase):
1569 self.track_for_cleanup(dstdir) 2123 self.track_for_cleanup(dstdir)
1570 dstfile = os.path.join(dstdir, os.path.basename(srcfile)) 2124 dstfile = os.path.join(dstdir, os.path.basename(srcfile))
1571 if srcfile != dstfile: 2125 if srcfile != dstfile:
1572 shutil.copy(srcfile, dstfile) 2126 try:
2127 shutil.copy(srcfile, dstfile)
2128 except PermissionError:
2129 return False
1573 self.track_for_cleanup(dstfile) 2130 self.track_for_cleanup(dstfile)
2131 return True
1574 2132
1575 def test_devtool_load_plugin(self): 2133 def test_devtool_load_plugin(self):
1576 """Test that devtool loads only the first found plugin in BBPATH.""" 2134 """Test that devtool loads only the first found plugin in BBPATH."""
@@ -1588,15 +2146,17 @@ class DevtoolUpgradeTests(DevtoolBase):
1588 plugincontent = fh.readlines() 2146 plugincontent = fh.readlines()
1589 try: 2147 try:
1590 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') 2148 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
1591 for path in searchpath: 2149 searchpath = [
1592 self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool') 2150 path for path in searchpath
2151 if self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
2152 ]
1593 result = runCmd("devtool --quiet count") 2153 result = runCmd("devtool --quiet count")
1594 self.assertEqual(result.output, '1') 2154 self.assertEqual(result.output, '1')
1595 result = runCmd("devtool --quiet multiloaded") 2155 result = runCmd("devtool --quiet multiloaded")
1596 self.assertEqual(result.output, "no") 2156 self.assertEqual(result.output, "no")
1597 for path in searchpath: 2157 for path in searchpath:
1598 result = runCmd("devtool --quiet bbdir") 2158 result = runCmd("devtool --quiet bbdir")
1599 self.assertEqual(result.output, path) 2159 self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
1600 os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py')) 2160 os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py'))
1601 finally: 2161 finally:
1602 with open(srcfile, 'w') as fh: 2162 with open(srcfile, 'w') as fh:
@@ -1777,6 +2337,52 @@ class DevtoolUpgradeTests(DevtoolBase):
1777 if files: 2337 if files:
1778 self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files)) 2338 self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files))
1779 2339
2340 def test_devtool_finish_update_patch(self):
2341 # This test uses a modified version of the sysdig recipe from meta-oe.
2342 # - The patches have been renamed.
2343 # - The dependencies are commented out since the recipe is not being
2344 # built.
2345 #
2346 # The sysdig recipe is interesting in that it fetches two different Git
2347 # repositories, and there are patches for both. This leads to that
2348 # devtool will create ignore commits as it uses Git submodules to keep
2349 # track of the second repository.
2350 #
2351 # This test will verify that the ignored commits actually are ignored
2352 # when a commit in between is modified. It will also verify that the
2353 # updated patch keeps its original name.
2354
2355 # Check preconditions
2356 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2357 # Try modifying a recipe
2358 self.track_for_cleanup(self.workspacedir)
2359 recipe = 'sysdig-selftest'
2360 recipefile = get_bb_var('FILE', recipe)
2361 recipedir = os.path.dirname(recipefile)
2362 result = runCmd('git status --porcelain .', cwd=recipedir)
2363 if result.output.strip():
2364 self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
2365 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2366 self.track_for_cleanup(tempdir)
2367 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2368 result = runCmd('devtool modify %s %s' % (recipe, tempdir))
2369 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (recipedir, recipe, recipe, os.path.basename(recipefile)))
2370 self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
2371 # Make a change to one of the existing commits
2372 result = runCmd('echo "# A comment " >> CMakeLists.txt', cwd=tempdir)
2373 result = runCmd('git status --porcelain', cwd=tempdir)
2374 self.assertIn('M CMakeLists.txt', result.output)
2375 result = runCmd('git commit --fixup HEAD^ CMakeLists.txt', cwd=tempdir)
2376 result = runCmd('git show -s --format=%s', cwd=tempdir)
2377 self.assertIn('fixup! cmake: Pass PROBE_NAME via CFLAGS', result.output)
2378 result = runCmd('GIT_SEQUENCE_EDITOR=true git rebase -i --autosquash devtool-base', cwd=tempdir)
2379 result = runCmd('devtool finish %s meta-selftest' % recipe)
2380 result = runCmd('devtool status')
2381 self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
2382 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
2383 expected_status = [(' M', '.*/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch$')]
2384 self._check_repo_status(recipedir, expected_status)
2385
1780 def test_devtool_rename(self): 2386 def test_devtool_rename(self):
1781 # Check preconditions 2387 # Check preconditions
1782 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 2388 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1808,12 +2414,11 @@ class DevtoolUpgradeTests(DevtoolBase):
1808 newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename) 2414 newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename)
1809 self.assertExists(newsrctree, 'Source directory not renamed') 2415 self.assertExists(newsrctree, 'Source directory not renamed')
1810 checkvars = {} 2416 checkvars = {}
1811 checkvars['S'] = '${WORKDIR}/%s-%s' % (recipename, recipever) 2417 checkvars['S'] = '${UNPACKDIR}/%s-%s' % (recipename, recipever)
1812 checkvars['SRC_URI'] = url 2418 checkvars['SRC_URI'] = url
1813 self._test_recipe_contents(newrecipefile, checkvars, []) 2419 self._test_recipe_contents(newrecipefile, checkvars, [])
1814 # Try again - change just name this time 2420 # Try again - change just name this time
1815 result = runCmd('devtool reset -n %s' % newrecipename) 2421 result = runCmd('devtool reset -n %s' % newrecipename)
1816 shutil.rmtree(newsrctree)
1817 add_recipe() 2422 add_recipe()
1818 newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever)) 2423 newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever))
1819 result = runCmd('devtool rename %s %s' % (recipename, newrecipename)) 2424 result = runCmd('devtool rename %s %s' % (recipename, newrecipename))
@@ -1821,19 +2426,18 @@ class DevtoolUpgradeTests(DevtoolBase):
1821 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists') 2426 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists')
1822 self.assertExists(os.path.join(self.workspacedir, 'sources', newrecipename), 'Source directory not renamed') 2427 self.assertExists(os.path.join(self.workspacedir, 'sources', newrecipename), 'Source directory not renamed')
1823 checkvars = {} 2428 checkvars = {}
1824 checkvars['S'] = '${WORKDIR}/%s-${PV}' % recipename 2429 checkvars['S'] = '${UNPACKDIR}/%s-${PV}' % recipename
1825 checkvars['SRC_URI'] = url.replace(recipever, '${PV}') 2430 checkvars['SRC_URI'] = url.replace(recipever, '${PV}')
1826 self._test_recipe_contents(newrecipefile, checkvars, []) 2431 self._test_recipe_contents(newrecipefile, checkvars, [])
1827 # Try again - change just version this time 2432 # Try again - change just version this time
1828 result = runCmd('devtool reset -n %s' % newrecipename) 2433 result = runCmd('devtool reset -n %s' % newrecipename)
1829 shutil.rmtree(newsrctree)
1830 add_recipe() 2434 add_recipe()
1831 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever)) 2435 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever))
1832 result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever)) 2436 result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever))
1833 self.assertExists(newrecipefile, 'Recipe file not renamed') 2437 self.assertExists(newrecipefile, 'Recipe file not renamed')
1834 self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory no longer exists') 2438 self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory no longer exists')
1835 checkvars = {} 2439 checkvars = {}
1836 checkvars['S'] = '${WORKDIR}/${BPN}-%s' % recipever 2440 checkvars['S'] = '${UNPACKDIR}/${BPN}-%s' % recipever
1837 checkvars['SRC_URI'] = url 2441 checkvars['SRC_URI'] = url
1838 self._test_recipe_contents(newrecipefile, checkvars, []) 2442 self._test_recipe_contents(newrecipefile, checkvars, [])
1839 2443
@@ -1858,8 +2462,9 @@ class DevtoolUpgradeTests(DevtoolBase):
1858 Expected: devtool modify is able to checkout the source of the kernel 2462 Expected: devtool modify is able to checkout the source of the kernel
1859 and modification to the source and configurations are reflected 2463 and modification to the source and configurations are reflected
1860 when building the kernel. 2464 when building the kernel.
1861 """ 2465 """
1862 kernel_provider = get_bb_var('PREFERRED_PROVIDER_virtual/kernel') 2466 kernel_provider = self.td['PREFERRED_PROVIDER_virtual/kernel']
2467
1863 # Clean up the environment 2468 # Clean up the environment
1864 bitbake('%s -c clean' % kernel_provider) 2469 bitbake('%s -c clean' % kernel_provider)
1865 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 2470 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -1886,33 +2491,540 @@ class DevtoolUpgradeTests(DevtoolBase):
1886 self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found') 2491 self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
1887 #Step 4.2 2492 #Step 4.2
1888 configfile = os.path.join(tempdir,'.config') 2493 configfile = os.path.join(tempdir,'.config')
1889 diff = runCmd('diff %s %s' % (tmpconfig, configfile)) 2494 runCmd('diff %s %s' % (tmpconfig, configfile))
1890 self.assertEqual(0,diff.status,'Kernel .config file is not the same using bitbake and devtool') 2495
1891 #Step 4.3 2496 #Step 4.3
1892 #NOTE: virtual/kernel is mapped to kernel_provider 2497 #NOTE: virtual/kernel is mapped to kernel_provider
1893 result = runCmd('devtool build %s' % kernel_provider) 2498 runCmd('devtool build %s' % kernel_provider)
1894 self.assertEqual(0,result.status,'Cannot build kernel using `devtool build`')
1895 kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux') 2499 kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux')
1896 self.assertExists(kernelfile, 'Kernel was not build correctly') 2500 self.assertExists(kernelfile, 'Kernel was not build correctly')
1897 2501
1898 #Modify the kernel source 2502 #Modify the kernel source
1899 modfile = os.path.join(tempdir,'arch/x86/boot/header.S') 2503 modfile = os.path.join(tempdir, 'init/version.c')
1900 modstring = "Use a boot loader. Devtool testing." 2504 # Moved to uts.h in 6.1 onwards
1901 modapplied = runCmd("sed -i 's/Use a boot loader./%s/' %s" % (modstring, modfile)) 2505 modfile2 = os.path.join(tempdir, 'include/linux/uts.h')
1902 self.assertEqual(0,modapplied.status,'Modification to %s on kernel source failed' % modfile) 2506 runCmd("sed -i 's/Linux/LiNuX/g' %s %s" % (modfile, modfile2))
2507
1903 #Modify the configuration 2508 #Modify the configuration
1904 codeconfigfile = os.path.join(tempdir,'.config.new') 2509 codeconfigfile = os.path.join(tempdir, '.config.new')
1905 modconfopt = "CONFIG_SG_POOL=n" 2510 modconfopt = "CONFIG_SG_POOL=n"
1906 modconf = runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile)) 2511 runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile))
1907 self.assertEqual(0,modconf.status,'Modification to %s failed' % codeconfigfile) 2512
1908 #Build again kernel with devtool 2513 #Build again kernel with devtool
1909 rebuild = runCmd('devtool build %s' % kernel_provider) 2514 runCmd('devtool build %s' % kernel_provider)
1910 self.assertEqual(0,rebuild.status,'Fail to build kernel after modification of source and config') 2515
1911 #Step 4.4 2516 #Step 4.4
1912 bzimagename = 'bzImage-' + get_bb_var('KERNEL_VERSION_NAME', kernel_provider) 2517 runCmd("grep '%s' %s" % ('LiNuX', kernelfile))
1913 bzimagefile = os.path.join(get_bb_var('D', kernel_provider),'boot', bzimagename) 2518
1914 checkmodcode = runCmd("grep '%s' %s" % (modstring, bzimagefile))
1915 self.assertEqual(0,checkmodcode.status,'Modification on kernel source failed')
1916 #Step 4.5 2519 #Step 4.5
1917 checkmodconfg = runCmd("grep %s %s" % (modconfopt, codeconfigfile)) 2520 runCmd("grep %s %s" % (modconfopt, codeconfigfile))
1918 self.assertEqual(0,checkmodconfg.status,'Modification to configuration file failed') 2521
2522
2523class DevtoolIdeSdkTests(DevtoolBase):
2524 def _write_bb_config(self, recipe_names):
2525 """Helper to write the bitbake local.conf file"""
2526 conf_lines = [
2527 'IMAGE_CLASSES += "image-combined-dbg"',
2528 'IMAGE_GEN_DEBUGFS = "1"',
2529 'IMAGE_INSTALL:append = " gdbserver %s"' % ' '.join(
2530 [r + '-ptest' for r in recipe_names])
2531 ]
2532 self.write_config("\n".join(conf_lines))
2533
2534 def _check_workspace(self):
2535 """Check if a workspace directory is available and setup the cleanup"""
2536 self.assertTrue(not os.path.exists(self.workspacedir),
2537 'This test cannot be run with a workspace directory under the build directory')
2538 self.track_for_cleanup(self.workspacedir)
2539 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2540
2541 def _workspace_scripts_dir(self, recipe_name):
2542 return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts'))
2543
2544 def _sources_scripts_dir(self, src_dir):
2545 return os.path.realpath(os.path.join(src_dir, 'oe-scripts'))
2546
2547 def _workspace_gdbinit_dir(self, recipe_name):
2548 return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts', 'gdbinit'))
2549
2550 def _sources_gdbinit_dir(self, src_dir):
2551 return os.path.realpath(os.path.join(src_dir, 'oe-gdbinit'))
2552
2553 def _devtool_ide_sdk_recipe(self, recipe_name, build_file, testimage):
2554 """Setup a recipe for working with devtool ide-sdk
2555
2556 Basically devtool modify -x followed by some tests
2557 """
2558 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2559 self.track_for_cleanup(tempdir)
2560 self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name)
2561
2562 result = runCmd('devtool modify %s -x %s --debug-build' % (recipe_name, tempdir))
2563 self.assertExists(os.path.join(tempdir, build_file),
2564 'Extracted source could not be found')
2565 self.assertExists(os.path.join(self.workspacedir, 'conf',
2566 'layer.conf'), 'Workspace directory not created')
2567 matches = glob.glob(os.path.join(self.workspacedir,
2568 'appends', recipe_name + '.bbappend'))
2569 self.assertTrue(matches, 'bbappend not created %s' % result.output)
2570
2571 # Test devtool status
2572 result = runCmd('devtool status')
2573 self.assertIn(recipe_name, result.output)
2574 self.assertIn(tempdir, result.output)
2575 self._check_src_repo(tempdir)
2576
2577 # Usually devtool ide-sdk would initiate the build of the SDK.
2578 # But there is a circular dependency with starting Qemu and passing the IP of runqemu to devtool ide-sdk.
2579 if testimage:
2580 bitbake("%s qemu-native qemu-helper-native" % testimage)
2581 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
2582 self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
2583 self.add_command_to_tearDown(
2584 'rm -f %s/%s*' % (deploy_dir_image, testimage))
2585
2586 return tempdir
2587
2588 def _get_recipe_ids(self, recipe_name):
2589 """IDs needed to write recipe specific config entries into IDE config files"""
2590 package_arch = get_bb_var('PACKAGE_ARCH', recipe_name)
2591 recipe_id = recipe_name + "-" + package_arch
2592 recipe_id_pretty = recipe_name + ": " + package_arch
2593 return (recipe_id, recipe_id_pretty)
2594
2595 def _verify_install_script_code(self, tempdir, recipe_name):
2596 """Verify the scripts referred by the tasks.json file are fine.
2597
2598 This function does not depend on Qemu. Therefore it verifies the scripts
2599 exists and the delete step works as expected. But it does not try to
2600 deploy to Qemu.
2601 """
2602 recipe_id, recipe_id_pretty = self._get_recipe_ids(recipe_name)
2603 with open(os.path.join(tempdir, '.vscode', 'tasks.json')) as tasks_j:
2604 tasks_d = json.load(tasks_j)
2605 tasks = tasks_d["tasks"]
2606 task_install = next(
2607 (task for task in tasks if task["label"] == "install && deploy-target %s" % recipe_id_pretty), None)
2608 self.assertIsNot(task_install, None)
2609 # execute only the bb_run_do_install script since the deploy would require e.g. Qemu running.
2610 i_and_d_script = "install_and_deploy_" + recipe_id
2611 i_and_d_script_path = os.path.join(
2612 self._workspace_scripts_dir(recipe_name), i_and_d_script)
2613 self.assertExists(i_and_d_script_path)
2614
2615 def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe):
2616 """Verify deployment and execution in Qemu system work for one recipe.
2617
2618 This function checks the entire SDK workflow: changing the code, recompiling
2619 it and deploying it back to Qemu, and checking that the changes have been
2620 incorporated into the provided binaries. It also runs the tests of the recipe.
2621 """
2622 recipe_id, _ = self._get_recipe_ids(recipe_name)
2623 i_and_d_script = "install_and_deploy_" + recipe_id
2624 install_deploy_cmd = os.path.join(
2625 self._workspace_scripts_dir(recipe_name), i_and_d_script)
2626 self.assertExists(install_deploy_cmd,
2627 '%s script not found' % install_deploy_cmd)
2628 runCmd(install_deploy_cmd)
2629
2630 MAGIC_STRING_ORIG = "Magic: 123456789"
2631 MAGIC_STRING_NEW = "Magic: 987654321"
2632 ptest_cmd = "ptest-runner " + recipe_name
2633
2634 # validate that SSH is working
2635 status, _ = qemu.run("uname")
2636 self.assertEqual(
2637 status, 0, msg="Failed to connect to the SSH server on Qemu")
2638
2639 # Verify the unmodified example prints the magic string
2640 status, output = qemu.run(example_exe)
2641 self.assertEqual(status, 0, msg="%s failed: %s" %
2642 (example_exe, output))
2643 self.assertIn(MAGIC_STRING_ORIG, output)
2644
2645 # Verify the unmodified ptests work
2646 status, output = qemu.run(ptest_cmd)
2647 self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
2648 self.assertIn("PASS: cpp-example-lib", output)
2649
2650 # Verify remote debugging works
2651 self._gdb_cross_debugging(
2652 qemu, recipe_name, example_exe, MAGIC_STRING_ORIG)
2653
2654 # Replace the Magic String in the code, compile and deploy to Qemu
2655 cpp_example_lib_hpp = os.path.join(tempdir, 'cpp-example-lib.hpp')
2656 with open(cpp_example_lib_hpp, 'r') as file:
2657 cpp_code = file.read()
2658 cpp_code = cpp_code.replace(MAGIC_STRING_ORIG, MAGIC_STRING_NEW)
2659 with open(cpp_example_lib_hpp, 'w') as file:
2660 file.write(cpp_code)
2661 runCmd(install_deploy_cmd, cwd=tempdir)
2662
2663 # Verify the modified example prints the modified magic string
2664 status, output = qemu.run(example_exe)
2665 self.assertEqual(status, 0, msg="%s failed: %s" %
2666 (example_exe, output))
2667 self.assertNotIn(MAGIC_STRING_ORIG, output)
2668 self.assertIn(MAGIC_STRING_NEW, output)
2669
2670 # Verify the modified example ptests work
2671 status, output = qemu.run(ptest_cmd)
2672 self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
2673 self.assertIn("PASS: cpp-example-lib", output)
2674
2675 # Verify remote debugging works wit the modified magic string
2676 self._gdb_cross_debugging(
2677 qemu, recipe_name, example_exe, MAGIC_STRING_NEW)
2678
2679 def _gdb_cross(self):
2680 """Verify gdb-cross is provided by devtool ide-sdk"""
2681 target_arch = self.td["TARGET_ARCH"]
2682 target_sys = self.td["TARGET_SYS"]
2683 gdb_recipe = "gdb-cross-" + target_arch
2684 gdb_binary = target_sys + "-gdb"
2685
2686 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
2687 r = runCmd("%s --version" % gdb_binary,
2688 native_sysroot=native_sysroot, target_sys=target_sys)
2689 self.assertEqual(r.status, 0)
2690 self.assertIn("GNU gdb", r.output)
2691
2692 def _gdb_cross_debugging(self, qemu, recipe_name, example_exe, magic_string):
2693 """Verify gdb-cross is working
2694
2695 Test remote debugging:
2696 break main
2697 run
2698 continue
2699 break CppExample::print_json()
2700 continue
2701 print CppExample::test_string.compare("cpp-example-lib Magic: 123456789")
2702 $1 = 0
2703 print CppExample::test_string.compare("cpp-example-lib Magic: 123456789aaa")
2704 $2 = -3
2705 list cpp-example-lib.hpp:13,13
2706 13 inline static const std::string test_string = "cpp-example-lib Magic: 123456789";
2707 continue
2708 """
2709 sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
2710 gdbserver_script = os.path.join(self._workspace_scripts_dir(
2711 recipe_name), 'gdbserver_1234_usr-bin-' + example_exe + '_m')
2712 gdb_script = os.path.join(self._workspace_scripts_dir(
2713 recipe_name), 'gdb_1234_usr-bin-' + example_exe)
2714
2715 # Start a gdbserver
2716 r = runCmd(gdbserver_script)
2717 self.assertEqual(r.status, 0)
2718
2719 # Check there is a gdbserver running
2720 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
2721 self.assertEqual(r.status, 0)
2722 self.assertIn("gdbserver ", r.output)
2723
2724 # Check the pid file is correct
2725 test_cmd = "cat /proc/$(cat /tmp/gdbserver_1234_usr-bin-" + \
2726 example_exe + "/pid)/cmdline"
2727 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, test_cmd))
2728 self.assertEqual(r.status, 0)
2729 self.assertIn("gdbserver", r.output)
2730
2731 # Test remote debugging works
2732 gdb_batch_cmd = " --batch -ex 'break main' -ex 'run'"
2733 gdb_batch_cmd += " -ex 'break CppExample::print_json()' -ex 'continue'"
2734 gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %s\")'" % magic_string
2735 gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %saaa\")'" % magic_string
2736 gdb_batch_cmd += " -ex 'list cpp-example-lib.hpp:13,13'"
2737 gdb_batch_cmd += " -ex 'continue'"
2738 r = runCmd(gdb_script + gdb_batch_cmd)
2739 self.logger.debug("%s %s returned: %s", gdb_script,
2740 gdb_batch_cmd, r.output)
2741 self.assertEqual(r.status, 0)
2742 self.assertIn("Breakpoint 1, main", r.output)
2743 self.assertIn("$1 = 0", r.output) # test.string.compare equal
2744 self.assertIn("$2 = -3", r.output) # test.string.compare longer
2745 self.assertIn(
2746 'inline static const std::string test_string = "cpp-example-lib %s";' % magic_string, r.output)
2747 self.assertIn("exited normally", r.output)
2748
2749 # Stop the gdbserver
2750 r = runCmd(gdbserver_script + ' stop')
2751 self.assertEqual(r.status, 0)
2752
2753 # Check there is no gdbserver running
2754 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
2755 self.assertEqual(r.status, 0)
2756 self.assertNotIn("gdbserver ", r.output)
2757
2758 def _verify_cmake_preset(self, tempdir):
2759 """Verify the generated cmake preset works as expected
2760
2761 Check if compiling works
2762 Check if unit tests can be executed in qemu (not qemu-system)
2763 """
2764 with open(os.path.join(tempdir, 'CMakeUserPresets.json')) as cmake_preset_j:
2765 cmake_preset_d = json.load(cmake_preset_j)
2766 config_presets = cmake_preset_d["configurePresets"]
2767 self.assertEqual(len(config_presets), 1)
2768 cmake_exe = config_presets[0]["cmakeExecutable"]
2769 preset_name = config_presets[0]["name"]
2770
2771 # Verify the wrapper for cmake native is available
2772 self.assertExists(cmake_exe)
2773
2774 # Verify the cmake preset generated by devtool ide-sdk is available
2775 result = runCmd('%s --list-presets' % cmake_exe, cwd=tempdir)
2776 self.assertIn(preset_name, result.output)
2777
2778 # Verify cmake re-uses the o files compiled by bitbake
2779 result = runCmd('%s --build --preset %s' %
2780 (cmake_exe, preset_name), cwd=tempdir)
2781 self.assertIn("ninja: no work to do.", result.output)
2782
2783 # Verify the unit tests work (in Qemu user mode)
2784 result = runCmd('%s --build --preset %s --target test' %
2785 (cmake_exe, preset_name), cwd=tempdir)
2786 self.assertIn("100% tests passed", result.output)
2787
2788 # Verify re-building and testing works again
2789 result = runCmd('%s --build --preset %s --target clean' %
2790 (cmake_exe, preset_name), cwd=tempdir)
2791 self.assertIn("Cleaning", result.output)
2792 result = runCmd('%s --build --preset %s' %
2793 (cmake_exe, preset_name), cwd=tempdir)
2794 self.assertIn("Building", result.output)
2795 self.assertIn("Linking", result.output)
2796 result = runCmd('%s --build --preset %s --target test' %
2797 (cmake_exe, preset_name), cwd=tempdir)
2798 self.assertIn("Running tests...", result.output)
2799 self.assertIn("100% tests passed", result.output)
2800
2801 @OETestTag("runqemu")
2802 def test_devtool_ide_sdk_none_qemu(self):
2803 """Start qemu-system and run tests for multiple recipes. ide=none is used."""
2804 recipe_names = ["cmake-example", "meson-example"]
2805 testimage = "oe-selftest-image"
2806
2807 self._check_workspace()
2808 self._write_bb_config(recipe_names)
2809 self._check_runqemu_prerequisites()
2810
2811 # Verify deployment to Qemu (system mode) works
2812 bitbake(testimage)
2813 with runqemu(testimage, runqemuparams="nographic") as qemu:
2814 # cmake-example recipe
2815 recipe_name = "cmake-example"
2816 example_exe = "cmake-example"
2817 build_file = "CMakeLists.txt"
2818 tempdir = self._devtool_ide_sdk_recipe(
2819 recipe_name, build_file, testimage)
2820 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
2821 recipe_name, testimage, qemu.ip)
2822 runCmd(bitbake_sdk_cmd)
2823 self._gdb_cross()
2824 self._verify_cmake_preset(tempdir)
2825 self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
2826 # Verify the oe-scripts sym-link is valid
2827 self.assertEqual(self._workspace_scripts_dir(
2828 recipe_name), self._sources_scripts_dir(tempdir))
2829
2830 # meson-example recipe
2831 recipe_name = "meson-example"
2832 example_exe = "mesonex"
2833 build_file = "meson.build"
2834 tempdir = self._devtool_ide_sdk_recipe(
2835 recipe_name, build_file, testimage)
2836 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
2837 recipe_name, testimage, qemu.ip)
2838 runCmd(bitbake_sdk_cmd)
2839 self._gdb_cross()
2840 self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
2841 # Verify the oe-scripts sym-link is valid
2842 self.assertEqual(self._workspace_scripts_dir(
2843 recipe_name), self._sources_scripts_dir(tempdir))
2844
2845 def test_devtool_ide_sdk_code_cmake(self):
2846 """Verify a cmake recipe works with ide=code mode"""
2847 recipe_name = "cmake-example"
2848 build_file = "CMakeLists.txt"
2849 testimage = "oe-selftest-image"
2850
2851 self._check_workspace()
2852 self._write_bb_config([recipe_name])
2853 tempdir = self._devtool_ide_sdk_recipe(
2854 recipe_name, build_file, testimage)
2855 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
2856 recipe_name, testimage)
2857 runCmd(bitbake_sdk_cmd)
2858 self._verify_cmake_preset(tempdir)
2859 self._verify_install_script_code(tempdir, recipe_name)
2860 self._gdb_cross()
2861
2862 def test_devtool_ide_sdk_code_meson(self):
2863 """Verify a meson recipe works with ide=code mode"""
2864 recipe_name = "meson-example"
2865 build_file = "meson.build"
2866 testimage = "oe-selftest-image"
2867
2868 self._check_workspace()
2869 self._write_bb_config([recipe_name])
2870 tempdir = self._devtool_ide_sdk_recipe(
2871 recipe_name, build_file, testimage)
2872 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
2873 recipe_name, testimage)
2874 runCmd(bitbake_sdk_cmd)
2875
2876 with open(os.path.join(tempdir, '.vscode', 'settings.json')) as settings_j:
2877 settings_d = json.load(settings_j)
2878 meson_exe = settings_d["mesonbuild.mesonPath"]
2879 meson_build_folder = settings_d["mesonbuild.buildFolder"]
2880
2881 # Verify the wrapper for meson native is available
2882 self.assertExists(meson_exe)
2883
2884 # Verify meson re-uses the o files compiled by bitbake
2885 result = runCmd('%s compile -C %s' %
2886 (meson_exe, meson_build_folder), cwd=tempdir)
2887 self.assertIn("ninja: no work to do.", result.output)
2888
2889 # Verify the unit tests work (in Qemu)
2890 runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
2891
2892 # Verify re-building and testing works again
2893 result = runCmd('%s compile -C %s --clean' %
2894 (meson_exe, meson_build_folder), cwd=tempdir)
2895 self.assertIn("Cleaning...", result.output)
2896 result = runCmd('%s compile -C %s' %
2897 (meson_exe, meson_build_folder), cwd=tempdir)
2898 self.assertIn("Linking target", result.output)
2899 runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
2900
2901 self._verify_install_script_code(tempdir, recipe_name)
2902 self._gdb_cross()
2903
2904 def test_devtool_ide_sdk_shared_sysroots(self):
2905 """Verify the shared sysroot SDK"""
2906
2907 # Handle the workspace (which is not needed by this test case)
2908 self._check_workspace()
2909
2910 result_init = runCmd(
2911 'devtool ide-sdk -m shared oe-selftest-image cmake-example meson-example --ide=code')
2912 bb_vars = get_bb_vars(
2913 ['REAL_MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'], "meta-ide-support")
2914 environment_script = 'environment-setup-%s' % bb_vars['REAL_MULTIMACH_TARGET_SYS']
2915 deploydir = bb_vars['DEPLOY_DIR_IMAGE']
2916 environment_script_path = os.path.join(deploydir, environment_script)
2917 cpp_example_src = os.path.join(
2918 bb_vars['COREBASE'], 'meta-selftest', 'recipes-test', 'cpp', 'files')
2919
2920 # Verify the cross environment script is available
2921 self.assertExists(environment_script_path)
2922
2923 def runCmdEnv(cmd, cwd):
2924 cmd = '/bin/sh -c ". %s > /dev/null && %s"' % (
2925 environment_script_path, cmd)
2926 return runCmd(cmd, cwd)
2927
2928 # Verify building the C++ example works with CMake
2929 tempdir_cmake = tempfile.mkdtemp(prefix='devtoolqa')
2930 self.track_for_cleanup(tempdir_cmake)
2931
2932 result_cmake = runCmdEnv("which cmake", cwd=tempdir_cmake)
2933 cmake_native = os.path.normpath(result_cmake.output.strip())
2934 self.assertExists(cmake_native)
2935
2936 runCmdEnv('cmake %s' % cpp_example_src, cwd=tempdir_cmake)
2937 runCmdEnv('cmake --build %s' % tempdir_cmake, cwd=tempdir_cmake)
2938
2939 # Verify the printed note really referres to a cmake executable
2940 cmake_native_code = ""
2941 for line in result_init.output.splitlines():
2942 m = re.search(r'"cmake.cmakePath": "(.*)"', line)
2943 if m:
2944 cmake_native_code = m.group(1)
2945 break
2946 self.assertExists(cmake_native_code)
2947 self.assertEqual(cmake_native, cmake_native_code)
2948
2949 # Verify building the C++ example works with Meson
2950 tempdir_meson = tempfile.mkdtemp(prefix='devtoolqa')
2951 self.track_for_cleanup(tempdir_meson)
2952
2953 result_cmake = runCmdEnv("which meson", cwd=tempdir_meson)
2954 meson_native = os.path.normpath(result_cmake.output.strip())
2955 self.assertExists(meson_native)
2956
2957 runCmdEnv('meson setup %s' % tempdir_meson, cwd=cpp_example_src)
2958 runCmdEnv('meson compile', cwd=tempdir_meson)
2959
2960 def test_devtool_ide_sdk_plugins(self):
2961 """Test that devtool ide-sdk can use plugins from other layers."""
2962
2963 # We need a workspace layer and a modified recipe (but no image)
2964 modified_recipe_name = "meson-example"
2965 modified_build_file = "meson.build"
2966 testimage = "oe-selftest-image"
2967 shared_recipe_name = "cmake-example"
2968
2969 self._check_workspace()
2970 self._write_bb_config([modified_recipe_name])
2971 tempdir = self._devtool_ide_sdk_recipe(
2972 modified_recipe_name, modified_build_file, None)
2973
2974 IDE_RE = re.compile(r'.*--ide \{(.*)\}.*')
2975
2976 def get_ides_from_help(help_str):
2977 m = IDE_RE.search(help_str)
2978 return m.group(1).split(',')
2979
2980 # verify the default plugins are available but the foo plugin is not
2981 result = runCmd('devtool ide-sdk -h')
2982 found_ides = get_ides_from_help(result.output)
2983 self.assertIn('code', found_ides)
2984 self.assertIn('none', found_ides)
2985 self.assertNotIn('foo', found_ides)
2986
2987 shared_config_file = os.path.join(tempdir, 'shared-config.txt')
2988 shared_config_str = 'Dummy shared IDE config'
2989 modified_config_file = os.path.join(tempdir, 'modified-config.txt')
2990 modified_config_str = 'Dummy modified IDE config'
2991
2992 # Generate a foo plugin in the workspace layer
2993 plugin_dir = os.path.join(
2994 self.workspacedir, 'lib', 'devtool', 'ide_plugins')
2995 os.makedirs(plugin_dir)
2996 plugin_code = 'from devtool.ide_plugins import IdeBase\n\n'
2997 plugin_code += 'class IdeFoo(IdeBase):\n'
2998 plugin_code += ' def setup_shared_sysroots(self, shared_env):\n'
2999 plugin_code += ' with open("%s", "w") as config_file:\n' % shared_config_file
3000 plugin_code += ' config_file.write("%s")\n\n' % shared_config_str
3001 plugin_code += ' def setup_modified_recipe(self, args, image_recipe, modified_recipe):\n'
3002 plugin_code += ' with open("%s", "w") as config_file:\n' % modified_config_file
3003 plugin_code += ' config_file.write("%s")\n\n' % modified_config_str
3004 plugin_code += 'def register_ide_plugin(ide_plugins):\n'
3005 plugin_code += ' ide_plugins["foo"] = IdeFoo\n'
3006
3007 plugin_py = os.path.join(plugin_dir, 'ide_foo.py')
3008 with open(plugin_py, 'w') as plugin_file:
3009 plugin_file.write(plugin_code)
3010
3011 # Verify the foo plugin is available as well
3012 result = runCmd('devtool ide-sdk -h')
3013 found_ides = get_ides_from_help(result.output)
3014 self.assertIn('code', found_ides)
3015 self.assertIn('none', found_ides)
3016 self.assertIn('foo', found_ides)
3017
3018 # Verify the foo plugin generates a shared config
3019 result = runCmd(
3020 'devtool ide-sdk -m shared --skip-bitbake --ide foo %s' % shared_recipe_name)
3021 with open(shared_config_file) as shared_config:
3022 shared_config_new = shared_config.read()
3023 self.assertEqual(shared_config_str, shared_config_new)
3024
3025 # Verify the foo plugin generates a modified config
3026 result = runCmd('devtool ide-sdk --skip-bitbake --ide foo %s %s' %
3027 (modified_recipe_name, testimage))
3028 with open(modified_config_file) as modified_config:
3029 modified_config_new = modified_config.read()
3030 self.assertEqual(modified_config_str, modified_config_new)
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index e1cfc3b621..f2c6124d70 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -1,11 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
7from oeqa.utils.decorators import testcase
8from oeqa.utils.ftools import write_file
9 8
10import oe.recipeutils 9import oe.recipeutils
11 10
@@ -18,13 +17,13 @@ class Distrodata(OESelftestTestCase):
18 Product: oe-core 17 Product: oe-core
19 Author: Alexander Kanavin <alex.kanavin@gmail.com> 18 Author: Alexander Kanavin <alex.kanavin@gmail.com>
20 """ 19 """
21 feature = 'LICENSE_FLAGS_WHITELIST += " commercial"\n' 20 feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n'
22 self.write_config(feature) 21 self.write_config(feature)
23 22
24 pkgs = oe.recipeutils.get_recipe_upgrade_status() 23 pkggroups = oe.recipeutils.get_recipe_upgrade_status()
25 24
26 regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN'] 25 regressed_failures = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'UNKNOWN_BROKEN']
27 regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN'] 26 regressed_successes = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'KNOWN_BROKEN']
28 msg = "" 27 msg = ""
29 if len(regressed_failures) > 0: 28 if len(regressed_failures) > 0:
30 msg = msg + """ 29 msg = msg + """
@@ -49,21 +48,21 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
49 Author: Alexander Kanavin <alex.kanavin@gmail.com> 48 Author: Alexander Kanavin <alex.kanavin@gmail.com>
50 """ 49 """
51 def is_exception(pkg): 50 def is_exception(pkg):
52 exceptions = ["packagegroup-", "initramfs-", "systemd-machine-units", "target-sdk-provides-dummy"] 51 exceptions = ["packagegroup-",]
53 for i in exceptions: 52 for i in exceptions:
54 if i in pkg: 53 if i in pkg:
55 return True 54 return True
56 return False 55 return False
57 56
58 def is_maintainer_exception(entry): 57 def is_maintainer_exception(entry):
59 exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", 58 exceptions = ["musl", "newlib", "picolibc", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
60 "cve-update-db-native"] 59 "cve-update-nvd2-native", "barebox", "libglvnd"]
61 for i in exceptions: 60 for i in exceptions:
62 if i in entry: 61 if i in entry:
63 return True 62 return True
64 return False 63 return False
65 64
66 feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_WHITELIST += " commercial"\nPARSE_ALL_RECIPES = "1"\n' 65 feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_ACCEPTED += " commercial"\nPARSE_ALL_RECIPES = "1"\nPACKAGE_CLASSES = "package_ipk package_deb package_rpm"\n'
67 self.write_config(feature) 66 self.write_config(feature)
68 67
69 with bb.tinfoil.Tinfoil() as tinfoil: 68 with bb.tinfoil.Tinfoil() as tinfoil:
@@ -74,7 +73,7 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
74 73
75 missing_recipes = [] 74 missing_recipes = []
76 recipes = [] 75 recipes = []
77 prefix = "RECIPE_MAINTAINER_pn-" 76 prefix = "RECIPE_MAINTAINER:pn-"
78 77
79 # We could have used all_recipes() here, but this method will find 78 # We could have used all_recipes() here, but this method will find
80 # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files 79 # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files
@@ -116,3 +115,15 @@ The list of oe-core recipes with maintainers is empty. This may indicate that th
116 self.fail(""" 115 self.fail("""
117Unable to find recipes for the following entries in maintainers.inc: 116Unable to find recipes for the following entries in maintainers.inc:
118""" + "\n".join(['%s' % i for i in missing_recipes])) 117""" + "\n".join(['%s' % i for i in missing_recipes]))
118
119 def test_common_include_recipes(self):
120 """
121 Summary: Test that obtaining recipes that share includes between them returns a sane result
122 Expected: At least cmake and qemu entries are present in the output
123 Product: oe-core
124 Author: Alexander Kanavin <alex.kanavin@gmail.com>
125 """
126 recipes = oe.recipeutils.get_common_include_recipes()
127
128 self.assertIn({'qemu-system-native', 'qemu', 'qemu-native'}, recipes)
129 self.assertIn({'cmake-native', 'cmake'}, recipes)
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py
index a61cf9bcb3..fcfcdaf7e4 100644
--- a/meta/lib/oeqa/selftest/cases/efibootpartition.py
+++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py
@@ -5,42 +5,30 @@
5# SPDX-License-Identifier: MIT 5# SPDX-License-Identifier: MIT
6# 6#
7 7
8import re
9
10from oeqa.selftest.case import OESelftestTestCase 8from oeqa.selftest.case import OESelftestTestCase
11from oeqa.utils.commands import bitbake, runqemu, get_bb_var 9from oeqa.utils.commands import bitbake, runqemu, get_bb_var
10from oeqa.core.decorator.data import skipIfNotMachine
11import oe.types
12 12
13class GenericEFITest(OESelftestTestCase): 13class GenericEFITest(OESelftestTestCase):
14 """EFI booting test class""" 14 """EFI booting test class"""
15 @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently")
16 def test_boot_efi(self):
17 image = "core-image-minimal"
18 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or ""
19 cmd = "runqemu %s nographic serial wic ovmf" % (runqemu_params)
20 if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
21 cmd += " kvm"
15 22
16 cmd_common = "runqemu nographic serial wic ovmf" 23 self.write_config("""
17 efi_provider = "systemd-boot" 24EFI_PROVIDER = "grub-efi"
18 image = "core-image-minimal" 25IMAGE_FSTYPES:pn-%s:append = " wic"
19 machine = "qemux86-64" 26MACHINE_FEATURES:append = " efi"
20 recipes_built = False
21
22 @classmethod
23 def setUpLocal(self):
24 super(GenericEFITest, self).setUpLocal(self)
25
26 self.write_config(self,
27"""
28EFI_PROVIDER = "%s"
29IMAGE_FSTYPES_pn-%s_append = " wic"
30MACHINE = "%s"
31MACHINE_FEATURES_append = " efi"
32WKS_FILE = "efi-bootdisk.wks.in" 27WKS_FILE = "efi-bootdisk.wks.in"
33IMAGE_INSTALL_append = " grub-efi systemd-boot kernel-image-bzimage" 28IMAGE_INSTALL:append = " grub-efi kernel-image-bzimage"
34""" 29"""
35% (self.efi_provider, self.image, self.machine)) 30% (image))
36 if not self.recipes_built:
37 bitbake("ovmf")
38 bitbake(self.image)
39 self.recipes_built = True
40 31
41 @classmethod 32 bitbake(image + " ovmf")
42 def test_boot_efi(self): 33 with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
43 """Test generic boot partition with qemu"""
44 cmd = "%s %s" % (self.cmd_common, self.machine)
45 with runqemu(self.image, ssh=False, launch_cmd=cmd) as qemu:
46 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) 34 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/esdk.py
index 862849af35..7a5fe00a08 100644
--- a/meta/lib/oeqa/selftest/cases/eSDK.py
+++ b/meta/lib/oeqa/selftest/cases/esdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -8,7 +10,7 @@ import os
8import glob 10import glob
9import time 11import time
10from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
11from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars 13from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
12 14
13class oeSDKExtSelfTest(OESelftestTestCase): 15class oeSDKExtSelfTest(OESelftestTestCase):
14 """ 16 """
@@ -25,11 +27,7 @@ class oeSDKExtSelfTest(OESelftestTestCase):
25 return glob.glob(pattern)[0] 27 return glob.glob(pattern)[0]
26 28
27 @staticmethod 29 @staticmethod
28 def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options): 30 def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, **options):
29 if postconfig:
30 esdk_conf_file = os.path.join(tmpdir_eSDKQA, 'conf', 'local.conf')
31 with open(esdk_conf_file, 'a+') as f:
32 f.write(postconfig)
33 if not options: 31 if not options:
34 options = {} 32 options = {}
35 if not 'shell' in options: 33 if not 'shell' in options:
@@ -63,7 +61,7 @@ class oeSDKExtSelfTest(OESelftestTestCase):
63 cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA) 61 cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
64 62
65 sstate_config=""" 63 sstate_config="""
66SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" 64ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
67SSTATE_MIRRORS = "file://.* file://%s/PATH" 65SSTATE_MIRRORS = "file://.* file://%s/PATH"
68CORE_IMAGE_EXTRA_INSTALL = "perl" 66CORE_IMAGE_EXTRA_INSTALL = "perl"
69 """ % sstate_dir 67 """ % sstate_dir
@@ -91,7 +89,7 @@ CORE_IMAGE_EXTRA_INSTALL = "perl"
91 89
92 # Configure eSDK to use sstate mirror from poky 90 # Configure eSDK to use sstate mirror from poky
93 sstate_config=""" 91 sstate_config="""
94SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" 92ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
95SSTATE_MIRRORS = "file://.* file://%s/PATH" 93SSTATE_MIRRORS = "file://.* file://%s/PATH"
96 """ % bb_vars["SSTATE_DIR"] 94 """ % bb_vars["SSTATE_DIR"]
97 with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f: 95 with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
@@ -100,7 +98,7 @@ SSTATE_MIRRORS = "file://.* file://%s/PATH"
100 @classmethod 98 @classmethod
101 def tearDownClass(cls): 99 def tearDownClass(cls):
102 for i in range(0, 10): 100 for i in range(0, 10):
103 if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')): 101 if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
104 time.sleep(1) 102 time.sleep(1)
105 else: 103 else:
106 break 104 break
diff --git a/meta/lib/oeqa/selftest/cases/externalsrc.py b/meta/lib/oeqa/selftest/cases/externalsrc.py
new file mode 100644
index 0000000000..1d800dc82c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/externalsrc.py
@@ -0,0 +1,44 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import tempfile
10
11from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import get_bb_var, runCmd
13
14class ExternalSrc(OESelftestTestCase):
15 # test that srctree_hash_files does not crash
16 # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
17 # so we check only that a recipe with externalsrc can be parsed
18 def test_externalsrc_srctree_hash_files(self):
19 test_recipe = "git-submodule-test"
20 git_url = "git://git.yoctoproject.org/git-submodule-test"
21 externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
22
23 self.write_config(
24 """
25INHERIT += "externalsrc"
26EXTERNALSRC:pn-%s = "%s"
27""" % (test_recipe, externalsrc_dir)
28 )
29
30 # test with git without submodules
31 runCmd('git clone %s %s' % (git_url, externalsrc_dir))
32 os.unlink(externalsrc_dir + "/.gitmodules")
33 open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
34 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
35 os.unlink(".gitmodules")
36
37 # test with git with submodules
38 runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
39 runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
40 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
41
42 # test without git
43 shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
44 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
index 76cbadf2ff..1beef5cfed 100644
--- a/meta/lib/oeqa/selftest/cases/fetch.py
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -1,7 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import tempfile
8import textwrap
9import bb.tinfoil
5import oe.path 10import oe.path
6from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake 12from oeqa.utils.commands import bitbake
@@ -21,8 +26,8 @@ class Fetch(OESelftestTestCase):
21 # No mirrors, should use git to fetch successfully 26 # No mirrors, should use git to fetch successfully
22 features = """ 27 features = """
23DL_DIR = "%s" 28DL_DIR = "%s"
24MIRRORS_forcevariable = "" 29MIRRORS:forcevariable = ""
25PREMIRRORS_forcevariable = "" 30PREMIRRORS:forcevariable = ""
26""" % dldir 31""" % dldir
27 self.write_config(features) 32 self.write_config(features)
28 oe.path.remove(dldir, recurse=True) 33 oe.path.remove(dldir, recurse=True)
@@ -31,9 +36,10 @@ PREMIRRORS_forcevariable = ""
31 # No mirrors and broken git, should fail 36 # No mirrors and broken git, should fail
32 features = """ 37 features = """
33DL_DIR = "%s" 38DL_DIR = "%s"
39SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
34GIT_PROXY_COMMAND = "false" 40GIT_PROXY_COMMAND = "false"
35MIRRORS_forcevariable = "" 41MIRRORS:forcevariable = ""
36PREMIRRORS_forcevariable = "" 42PREMIRRORS:forcevariable = ""
37""" % dldir 43""" % dldir
38 self.write_config(features) 44 self.write_config(features)
39 oe.path.remove(dldir, recurse=True) 45 oe.path.remove(dldir, recurse=True)
@@ -43,9 +49,62 @@ PREMIRRORS_forcevariable = ""
43 # Broken git but a specific mirror 49 # Broken git but a specific mirror
44 features = """ 50 features = """
45DL_DIR = "%s" 51DL_DIR = "%s"
52SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
46GIT_PROXY_COMMAND = "false" 53GIT_PROXY_COMMAND = "false"
47MIRRORS_forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/" 54MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
48""" % dldir 55""" % dldir
49 self.write_config(features) 56 self.write_config(features)
50 oe.path.remove(dldir, recurse=True) 57 oe.path.remove(dldir, recurse=True)
51 bitbake("dbus-wait -c fetch -f") 58 bitbake("dbus-wait -c fetch -f")
59
60
61class Dependencies(OESelftestTestCase):
62 def write_recipe(self, content, tempdir):
63 f = os.path.join(tempdir, "test.bb")
64 with open(f, "w") as fd:
65 fd.write(content)
66 return f
67
68 def test_dependencies(self):
69 """
70 Verify that the correct dependencies are generated for specific SRC_URI entries.
71 """
72
73 with bb.tinfoil.Tinfoil() as tinfoil, tempfile.TemporaryDirectory(prefix="selftest-fetch") as tempdir:
74 tinfoil.prepare(config_only=False, quiet=2)
75
76 r = """
77 LICENSE = "CLOSED"
78 SRC_URI = "http://example.com/tarball.zip"
79 """
80 f = self.write_recipe(textwrap.dedent(r), tempdir)
81 d = tinfoil.parse_recipe_file(f)
82 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
83 self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends"))
84
85 # Verify that the downloadfilename overrides the URI
86 r = """
87 LICENSE = "CLOSED"
88 SRC_URI = "https://example.com/tarball;downloadfilename=something.zip"
89 """
90 f = self.write_recipe(textwrap.dedent(r), tempdir)
91 d = tinfoil.parse_recipe_file(f)
92 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
93 self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "")
94
95 r = """
96 LICENSE = "CLOSED"
97 SRC_URI = "ftp://example.com/tarball.lz"
98 """
99 f = self.write_recipe(textwrap.dedent(r), tempdir)
100 d = tinfoil.parse_recipe_file(f)
101 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
102 self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends"))
103
104 r = """
105 LICENSE = "CLOSED"
106 SRC_URI = "git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
107 """
108 f = self.write_recipe(textwrap.dedent(r), tempdir)
109 d = tinfoil.parse_recipe_file(f)
110 self.assertIn("git-native", d.getVarFlag("do_fetch", "depends"))
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
index 02692de822..3c40857747 100644
--- a/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -1,14 +1,743 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
7import os 7import os
8import json
9import re 8import re
9import shlex
10import logging
11import pprint
12import tempfile
13
14import oe.fitimage
15
16from oeqa.selftest.case import OESelftestTestCase
17from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_bb_var
18
19
20class BbVarsMockGenKeys:
21 def __init__(self, keydir, gen_keys="0", sign_enabled="0", keyname="", sign_ind="0", img_keyname=""):
22 self.bb_vars = {
23 'FIT_GENERATE_KEYS': gen_keys,
24 'FIT_KEY_GENRSA_ARGS': "-F4",
25 'FIT_KEY_REQ_ARGS': "-batch -new",
26 'FIT_KEY_SIGN_PKCS': "-x509",
27 'FIT_SIGN_INDIVIDUAL': sign_ind,
28 'FIT_SIGN_NUMBITS': "2048",
29 'UBOOT_SIGN_ENABLE': sign_enabled,
30 'UBOOT_SIGN_IMG_KEYNAME': img_keyname,
31 'UBOOT_SIGN_KEYDIR': keydir,
32 'UBOOT_SIGN_KEYNAME': keyname,
33 }
34
35 def getVar(self, var):
36 return self.bb_vars[var]
37
38class FitImageTestCase(OESelftestTestCase):
39 """Test functions usable for testing kernel-fitimage.bbclass and uboot-sign.bbclass
40
41 A brief summary showing the structure of a test case:
42
43 self._test_fitimage()
44 # Generate a local.conf file and bitbake the bootloader or the kernel
45 self._bitbake_fit_image()
46
47 # Check if the its file contains the expected paths and attributes.
48 # The _get_req_* functions are implemented by more specific chield classes.
49 self._check_its_file()
50 req_its_paths = self._get_req_its_paths()
51 req_sigvalues_config = self._get_req_sigvalues_config()
52 req_sigvalues_image = self._get_req_sigvalues_image()
53 # Compare the its file against req_its_paths, req_sigvalues_config, req_sigvalues_image
54
55 # Call the dumpimage utiliy and check that it prints all the expected paths and attributes
56 # The _get_req_* functions are implemented by more specific chield classes.
57 self._check_fitimage()
58 self._get_req_sections()
59 # Compare the output of the dumpimage utility against
60 """
61
62 MKIMAGE_HASH_LENGTHS = { 'sha256': 64, 'sha384': 96, 'sha512': 128 }
63 MKIMAGE_SIGNATURE_LENGTHS = { 'rsa2048': 512 }
64
65 def _gen_signing_key(self, bb_vars):
66 """Generate a key pair and a singing certificate
67
68 Generate a UBOOT_SIGN_KEYNAME in the UBOOT_SIGN_KEYDIR similar to what
69 the FIT_GENERATE_KEYS feature does. However, having a static key is
70 probably a more realistic use case than generating a random key with
71 each clean build. So this needs to be tested as well.
72 The FIT_GENERATE_KEYS generates 2 keys: The UBOOT_SIGN_KEYNAME and the
73 UBOOT_SIGN_IMG_KEYNAME. The UBOOT_SIGN_IMG_KEYNAME is used by the
74 FIT_SIGN_INDIVIDUAL feature only. Testing if everything is working if
75 there is only one key available is important as well. Therefore this
76 function generates only the keys which are really needed, not just two.
77 """
78
79 # Define some variables which are usually defined by the kernel-fitimage.bbclass.
80 # But for testing purpose check if the uboot-sign.bbclass is independent from
81 # the kernel-fitimage.bbclass
82 fit_sign_numbits = bb_vars.get('FIT_SIGN_NUMBITS', "2048")
83 fit_key_genrsa_args = bb_vars.get('FIT_KEY_GENRSA_ARGS', "-F4")
84 fit_key_req_args = bb_vars.get('FIT_KEY_REQ_ARGS', "-batch -new")
85 fit_key_sign_pkcs = bb_vars.get('FIT_KEY_SIGN_PKCS', "-x509")
86
87 uboot_sign_keydir = bb_vars['UBOOT_SIGN_KEYDIR']
88 sign_keys = [bb_vars['UBOOT_SIGN_KEYNAME']]
89 if bb_vars['FIT_SIGN_INDIVIDUAL'] == "1":
90 sign_keys.append(bb_vars['UBOOT_SIGN_IMG_KEYNAME'])
91 for sign_key in sign_keys:
92 sing_key_path = os.path.join(uboot_sign_keydir, sign_key)
93 if not os.path.isdir(uboot_sign_keydir):
94 os.makedirs(uboot_sign_keydir)
95 openssl_bindir = FitImageTestCase._setup_native('openssl-native')
96 openssl_path = os.path.join(openssl_bindir, 'openssl')
97 runCmd("%s genrsa %s -out %s.key %s" % (
98 openssl_path,
99 fit_key_genrsa_args,
100 sing_key_path,
101 fit_sign_numbits
102 ))
103 runCmd("%s req %s %s -key %s.key -out %s.crt" % (
104 openssl_path,
105 fit_key_req_args,
106 fit_key_sign_pkcs,
107 sing_key_path,
108 sing_key_path
109 ))
110
111 @staticmethod
112 def _gen_random_file(file_path, num_bytes=65536):
113 with open(file_path, 'wb') as file_out:
114 file_out.write(os.urandom(num_bytes))
115
116 @staticmethod
117 def _setup_native(native_recipe):
118 """Build a native recipe and return the path to its bindir in RECIPE_SYSROOT_NATIVE"""
119 bitbake(native_recipe + " -c addto_recipe_sysroot")
120 vars = get_bb_vars(['RECIPE_SYSROOT_NATIVE', 'bindir'], native_recipe)
121 return os.path.join(vars['RECIPE_SYSROOT_NATIVE'], vars['bindir'])
122
123 def _verify_fit_image_signature(self, uboot_tools_bindir, fitimage_path, dtb_path, conf_name=None):
124 """Verify the signature of a fit configuration
125
126 The fit_check_sign utility from u-boot-tools-native is called.
127 uboot-fit_check_sign -f fitImage -k $dtb_path -c conf-$dtb_name
128 dtb_path refers to a binary device tree containing the public key.
129 """
130 fit_check_sign_path = os.path.join(uboot_tools_bindir, 'uboot-fit_check_sign')
131 cmd = '%s -f %s -k %s' % (fit_check_sign_path, fitimage_path, dtb_path)
132 if conf_name:
133 cmd += ' -c %s' % conf_name
134 result = runCmd(cmd)
135 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
136 self.assertIn("Signature check OK", result.output)
137
138 def _verify_dtb_property(self, dtc_bindir, dtb_path, node_path, property_name, req_property, absent=False):
139 """Verify device tree properties
140
141 The fdtget utility from dtc-native is called and the property is compared.
142 """
143 fdtget_path = os.path.join(dtc_bindir, 'fdtget')
144 cmd = '%s %s %s %s' % (fdtget_path, dtb_path, node_path, property_name)
145 if absent:
146 result = runCmd(cmd, ignore_status=True)
147 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
148 self.assertIn("FDT_ERR_NOTFOUND", result.output)
149 else:
150 result = runCmd(cmd)
151 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
152 self.assertEqual(req_property, result.output.strip())
153
154 @staticmethod
155 def _find_string_in_bin_file(file_path, search_string):
156 """find strings in a binary file
157
158 Shell equivalent: strings "$1" | grep "$2" | wc -l
159 return number of matches
160 """
161 found_positions = 0
162 with open(file_path, 'rb') as file:
163 content = file.read().decode('ascii', errors='ignore')
164 found_positions = content.count(search_string)
165 return found_positions
166
167 @staticmethod
168 def _get_uboot_mkimage_sign_args(uboot_mkimage_sign_args):
169 """Retrive the string passed via -c to the mkimage command
170
171 Example: If a build configutation defines
172 UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
173 this function returns "a smart comment"
174 """
175 a_comment = None
176 if uboot_mkimage_sign_args:
177 mkimage_args = shlex.split(uboot_mkimage_sign_args)
178 try:
179 c_index = mkimage_args.index('-c')
180 a_comment = mkimage_args[c_index+1]
181 except ValueError:
182 pass
183 return a_comment
184
185 @staticmethod
186 def _get_dtb_files(bb_vars):
187 """Return a list of devicetree names
188
189 The list should be used to check the dtb and conf nodes in the FIT image or its file.
190 In addition to the entries from KERNEL_DEVICETREE, the external devicetree and the
191 external devicetree overlay added by the test recipe bbb-dtbs-as-ext are handled as well.
192 """
193 kernel_devicetree = bb_vars.get('KERNEL_DEVICETREE')
194 all_dtbs = []
195 dtb_symlinks = []
196 if kernel_devicetree:
197 all_dtbs += [os.path.basename(dtb) for dtb in kernel_devicetree.split()]
198 # Support only the test recipe which provides 1 devicetree and 1 devicetree overlay
199 pref_prov_dtb = bb_vars.get('PREFERRED_PROVIDER_virtual/dtb')
200 if pref_prov_dtb == "bbb-dtbs-as-ext":
201 all_dtbs += ["am335x-bonegreen-ext.dtb", "BBORG_RELAY-00A2.dtbo"]
202 dtb_symlinks.append("am335x-bonegreen-ext-alias.dtb")
203 return (all_dtbs, dtb_symlinks)
204
205 def _is_req_dict_in_dict(self, found_dict, req_dict):
206 """
207 Check if all key-value pairs in the required dictionary are present in the found dictionary.
208
209 This function recursively checks if the required dictionary (`req_dict`) is a subset of the found dictionary (`found_dict`).
210 It supports nested dictionaries, strings, lists, and sets as values.
211
212 Args:
213 found_dict (dict): The dictionary to search within.
214 req_dict (dict): The dictionary containing the required key-value pairs.
215 """
216 for key, value in req_dict.items():
217 self.assertIn(key, found_dict)
218 if isinstance(value, dict):
219 self._is_req_dict_in_dict(found_dict[key], value)
220 elif isinstance(value, str):
221 self.assertIn(value, found_dict[key])
222 elif isinstance(value, list):
223 self.assertLessEqual(set(value), set(found_dict[key]))
224 elif isinstance(value, set):
225 self.assertLessEqual(value, found_dict[key])
226 else:
227 self.assertEqual(value, found_dict[key])
228
229 def _check_its_file(self, bb_vars, its_file_path):
230 """Check if the its file contains the expected sections and fields"""
231 # print the its file for debugging
232 if logging.DEBUG >= self.logger.level:
233 with open(its_file_path) as its_file:
234 self.logger.debug("its file: %s" % its_file.read())
235
236 # Generate a list of expected paths in the its file
237 req_its_paths = self._get_req_its_paths(bb_vars)
238 self.logger.debug("req_its_paths:\n%s\n" % pprint.pformat(req_its_paths, indent=4))
239
240 # Generate a dict of expected configuration signature nodes
241 req_sigvalues_config = self._get_req_sigvalues_config(bb_vars)
242 self.logger.debug("req_sigvalues_config:\n%s\n" % pprint.pformat(req_sigvalues_config, indent=4))
243
244 # Generate a dict of expected image signature nodes
245 req_sigvalues_image = self._get_req_sigvalues_image(bb_vars)
246 self.logger.debug("req_sigvalues_image:\n%s\n" % pprint.pformat(req_sigvalues_image, indent=4))
247
248 # Parse the its file for paths and signatures
249 its_path = []
250 its_paths = []
251 linect = 0
252 sigs = {}
253 with open(its_file_path) as its_file:
254 for line in its_file:
255 linect += 1
256 line = line.strip()
257 if line.endswith('};'):
258 its_path.pop()
259 elif line.endswith('{'):
260 its_path.append(line[:-1].strip())
261 its_paths.append(its_path[:])
262 # kernel-fitimage uses signature-1, uboot-sign uses signature
263 elif its_path and (its_path[-1] == 'signature-1' or its_path[-1] == 'signature'):
264 itsdotpath = '.'.join(its_path)
265 if not itsdotpath in sigs:
266 sigs[itsdotpath] = {}
267 if not '=' in line or not line.endswith(';'):
268 self.fail('Unexpected formatting in %s sigs section line %d:%s' % (its_file_path, linect, line))
269 key, value = line.split('=', 1)
270 sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
271
272 # Check if all expected paths are found in the its file
273 self.logger.debug("itspaths:\n%s\n" % pprint.pformat(its_paths, indent=4))
274 for req_path in req_its_paths:
275 if not req_path in its_paths:
276 self.fail('Missing path in its file: %s (%s)' % (req_path, its_file_path))
277
278 # Check if all the expected singnature nodes (images and configurations) are found
279 self.logger.debug("sigs:\n%s\n" % pprint.pformat(sigs, indent=4))
280 if req_sigvalues_config or req_sigvalues_image:
281 for its_path, values in sigs.items():
282 if bb_vars.get('FIT_CONF_PREFIX', "conf-") in its_path:
283 reqsigvalues = req_sigvalues_config
284 else:
285 reqsigvalues = req_sigvalues_image
286 for reqkey, reqvalue in reqsigvalues.items():
287 value = values.get(reqkey, None)
288 if value is None:
289 self.fail('Missing key "%s" in its file signature section %s (%s)' % (reqkey, its_path, its_file_path))
290 self.assertEqual(value, reqvalue)
291
292 # Generate a list of expected fields in the its file
293 req_its_fields = self._get_req_its_fields(bb_vars)
294 self.logger.debug("req_its_fields:\n%s\n" % pprint.pformat(req_its_fields, indent=4))
295
296 # Check if all expected fields are in the its file
297 if req_its_fields:
298 field_index = 0
299 field_index_last = len(req_its_fields) - 1
300 with open(its_file_path) as its_file:
301 for line in its_file:
302 if req_its_fields[field_index] in line:
303 if field_index < field_index_last:
304 field_index +=1
305 else:
306 break
307 self.assertEqual(field_index, field_index_last,
308 "Fields in Image Tree Source File %s did not match, error in finding %s"
309 % (its_file_path, req_its_fields[field_index]))
310
311 def _check_fitimage(self, bb_vars, fitimage_path, uboot_tools_bindir):
312 """Run dumpimage on the final FIT image and parse the output into a dict"""
313 dumpimage_path = os.path.join(uboot_tools_bindir, 'dumpimage')
314 cmd = '%s -l %s' % (dumpimage_path, fitimage_path)
315 self.logger.debug("Analyzing output from dumpimage: %s" % cmd)
316 dumpimage_result = runCmd(cmd)
317 in_section = None
318 sections = {}
319 self.logger.debug("dumpimage output: %s" % dumpimage_result.output)
320 for line in dumpimage_result.output.splitlines():
321 # Find potentially hashed and signed sections
322 if line.startswith((' Configuration', ' Image')):
323 in_section = re.search(r'\((.*)\)', line).groups()[0]
324 # Key value lines start with two spaces otherwise the section ended
325 elif not line.startswith(" "):
326 in_section = None
327 # Handle key value lines of this section
328 elif in_section:
329 if not in_section in sections:
330 sections[in_section] = {}
331 try:
332 key, value = line.split(':', 1)
333 key = key.strip()
334 value = value.strip()
335 except ValueError as val_err:
336 # Handle multiple entries as e.g. for Loadables as a list
337 if key and line.startswith(" "):
338 value = sections[in_section][key] + "," + line.strip()
339 else:
340 raise ValueError(f"Error processing line: '{line}'. Original error: {val_err}")
341 sections[in_section][key] = value
342
343 # Check if the requested dictionary is a subset of the parsed dictionary
344 req_sections, num_signatures = self._get_req_sections(bb_vars)
345 self.logger.debug("req_sections: \n%s\n" % pprint.pformat(req_sections, indent=4))
346 self.logger.debug("dumpimage sections: \n%s\n" % pprint.pformat(sections, indent=4))
347 self._is_req_dict_in_dict(sections, req_sections)
348
349 # Call the signing related checks if the function is provided by a inherited class
350 self._check_signing(bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path)
351
352 def _get_req_its_paths(self, bb_vars):
353 self.logger.error("This function needs to be implemented")
354 return []
355
356 def _get_req_its_fields(self, bb_vars):
357 self.logger.error("This function needs to be implemented")
358 return []
359
360 def _get_req_sigvalues_config(self, bb_vars):
361 self.logger.error("This function needs to be implemented")
362 return {}
363
364 def _get_req_sigvalues_image(self, bb_vars):
365 self.logger.error("This function needs to be implemented")
366 return {}
367
368 def _get_req_sections(self, bb_vars):
369 self.logger.error("This function needs to be implemented")
370 return ({}, 0)
371
372 def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path):
373 """Verify the signatures in the FIT image."""
374 self.fail("Function needs to be implemented by inheriting classes")
375
376 def _bitbake_fit_image(self, bb_vars):
377 """Bitbake the FIT image and return the paths to the its file and the FIT image"""
378 self.fail("Function needs to be implemented by inheriting classes")
379
380 def _test_fitimage(self, bb_vars):
381 """Check if the its file and the FIT image are created and signed correctly"""
382 fitimage_its_path, fitimage_path = self._bitbake_fit_image(bb_vars)
383 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
384 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
385
386 self.logger.debug("Checking its: %s" % fitimage_its_path)
387 self._check_its_file(bb_vars, fitimage_its_path)
388
389 # Setup u-boot-tools-native
390 uboot_tools_bindir = FitImageTestCase._setup_native('u-boot-tools-native')
391
392 # Verify the FIT image
393 self._check_fitimage(bb_vars, fitimage_path, uboot_tools_bindir)
394
395class KernelFitImageBase(FitImageTestCase):
396 """Test cases for the linux-yocto-fitimage recipe"""
397
398 def _fit_get_bb_vars(self, additional_vars=[]):
399 """Retrieve BitBake variables specific to the test case.
400
401 Call the get_bb_vars function once and get all variables needed by the test case.
402 """
403 internal_used = {
404 'DEPLOY_DIR_IMAGE',
405 'FIT_CONF_DEFAULT_DTB',
406 'FIT_CONF_PREFIX',
407 'FIT_DESC',
408 'FIT_HASH_ALG',
409 'FIT_KERNEL_COMP_ALG',
410 'FIT_SIGN_ALG',
411 'FIT_SIGN_INDIVIDUAL',
412 'FIT_UBOOT_ENV',
413 'INITRAMFS_IMAGE_BUNDLE',
414 'INITRAMFS_IMAGE_NAME',
415 'INITRAMFS_IMAGE',
416 'KERNEL_DEPLOYSUBDIR',
417 'KERNEL_DEVICETREE',
418 'KERNEL_FIT_LINK_NAME',
419 'MACHINE',
420 'PREFERRED_PROVIDER_virtual/dtb',
421 'UBOOT_ARCH',
422 'UBOOT_ENTRYPOINT',
423 'UBOOT_LOADADDRESS',
424 'UBOOT_MKIMAGE_KERNEL_TYPE',
425 'UBOOT_MKIMAGE_SIGN_ARGS',
426 'UBOOT_RD_ENTRYPOINT',
427 'UBOOT_RD_LOADADDRESS',
428 'UBOOT_SIGN_ENABLE',
429 'UBOOT_SIGN_IMG_KEYNAME',
430 'UBOOT_SIGN_KEYDIR',
431 'UBOOT_SIGN_KEYNAME',
432 }
433 bb_vars = get_bb_vars(list(internal_used | set(additional_vars)), self.kernel_recipe)
434 self.logger.debug("bb_vars: %s" % pprint.pformat(bb_vars, indent=4))
435 return bb_vars
436
437 def _config_add_kernel_classes(self, config):
438 config += '# Use kernel-fit-extra-artifacts.bbclass for the creation of the vmlinux artifact' + os.linesep
439 config += 'KERNEL_CLASSES = "kernel-fit-extra-artifacts"' + os.linesep
440 return config
441
442 @property
443 def kernel_recipe(self):
444 return "linux-yocto-fitimage"
445
446 def _config_add_uboot_env(self, config):
447 """Generate an u-boot environment
448
449 Create a boot.cmd file that is packed into the FIT image as a source-able text file.
450 Updates the configuration to include the boot.cmd file.
451 """
452 fit_uenv_file = "boot.cmd"
453 test_files_dir = "test-files"
454 fit_uenv_path = os.path.join(self.builddir, test_files_dir, fit_uenv_file)
455
456 config += '# Add an u-boot script to the fitImage' + os.linesep
457 config += 'FIT_UBOOT_ENV = "%s"' % fit_uenv_file + os.linesep
458 config += 'FILESEXTRAPATHS:prepend := "${TOPDIR}/%s:"' % test_files_dir + os.linesep
459 config += 'SRC_URI:append:pn-%s = " file://${FIT_UBOOT_ENV}"' % self.kernel_recipe + os.linesep
460
461 if not os.path.isdir(test_files_dir):
462 os.makedirs(test_files_dir)
463 self.logger.debug("Writing to: %s" % fit_uenv_path)
464 with open(fit_uenv_path, "w") as f:
465 f.write('echo "hello world"')
10 466
11class FitImageTests(OESelftestTestCase): 467 return config
468
469 def _bitbake_fit_image(self, bb_vars):
470 """Bitbake the kernel and return the paths to the its file and the FIT image"""
471 bitbake(self.kernel_recipe)
472
473 # Find the right its file and the final fitImage and check if both files are available
474 deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE']
475 initramfs_image = bb_vars['INITRAMFS_IMAGE']
476 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
477 initramfs_image_name = bb_vars['INITRAMFS_IMAGE_NAME']
478 kernel_fit_link_name = bb_vars['KERNEL_FIT_LINK_NAME']
479 if not initramfs_image and initramfs_image_bundle != "1":
480 fitimage_its_name = "fitImage-its-%s" % kernel_fit_link_name
481 fitimage_name = "fitImage"
482 elif initramfs_image and initramfs_image_bundle != "1":
483 fitimage_its_name = "fitImage-its-%s-%s" % (initramfs_image_name, kernel_fit_link_name)
484 fitimage_name = "fitImage-%s-%s" % (initramfs_image_name, kernel_fit_link_name)
485 elif initramfs_image and initramfs_image_bundle == "1":
486 fitimage_its_name = "fitImage-its-%s-%s" % (initramfs_image_name, kernel_fit_link_name)
487 fitimage_name = "fitImage" # or fitImage-${KERNEL_IMAGE_LINK_NAME}${KERNEL_IMAGE_BIN_EXT}
488 else:
489 self.fail('Invalid configuration: INITRAMFS_IMAGE_BUNDLE = "1" and not INITRAMFS_IMAGE')
490 kernel_deploysubdir = bb_vars['KERNEL_DEPLOYSUBDIR']
491 if kernel_deploysubdir:
492 fitimage_its_path = os.path.realpath(os.path.join(deploy_dir_image, kernel_deploysubdir, fitimage_its_name))
493 fitimage_path = os.path.realpath(os.path.join(deploy_dir_image, kernel_deploysubdir, fitimage_name))
494 else:
495 fitimage_its_path = os.path.realpath(os.path.join(deploy_dir_image, fitimage_its_name))
496 fitimage_path = os.path.realpath(os.path.join(deploy_dir_image, fitimage_name))
497 return (fitimage_its_path, fitimage_path)
498
499 def _get_req_its_paths(self, bb_vars):
500 """Generate a list of expected paths in the its file
501
502 Example:
503 [
504 ['/', 'images', 'kernel-1', 'hash-1'],
505 ['/', 'images', 'kernel-1', 'signature-1'],
506 ]
507 """
508 dtb_files, dtb_symlinks = FitImageTestCase._get_dtb_files(bb_vars)
509 fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL']
510 fit_uboot_env = bb_vars['FIT_UBOOT_ENV']
511 initramfs_image = bb_vars['INITRAMFS_IMAGE']
512 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
513 uboot_sign_enable = bb_vars.get('UBOOT_SIGN_ENABLE')
514
515 # image nodes
516 images = [ 'kernel-1' ]
517 if dtb_files:
518 images += [ 'fdt-' + dtb for dtb in dtb_files ]
519 if fit_uboot_env:
520 images.append('bootscr-' + fit_uboot_env)
521 if bb_vars['MACHINE'] == "qemux86-64": # Not really the right if
522 images.append('setup-1')
523 if initramfs_image and initramfs_image_bundle != "1":
524 images.append('ramdisk-1')
525
526 # configuration nodes (one per DTB and also one per symlink)
527 if dtb_files:
528 configurations = [bb_vars['FIT_CONF_PREFIX'] + conf for conf in dtb_files + dtb_symlinks]
529 else:
530 configurations = [bb_vars['FIT_CONF_PREFIX'] + '1']
531
532 # Create a list of paths for all image and configuration nodes
533 req_its_paths = []
534 for image in images:
535 req_its_paths.append(['/', 'images', image, 'hash-1'])
536 if uboot_sign_enable == "1" and fit_sign_individual == "1":
537 req_its_paths.append(['/', 'images', image, 'signature-1'])
538 for configuration in configurations:
539 req_its_paths.append(['/', 'configurations', configuration, 'hash-1'])
540 if uboot_sign_enable == "1":
541 req_its_paths.append(['/', 'configurations', configuration, 'signature-1'])
542 return req_its_paths
543
544 def _get_req_its_fields(self, bb_vars):
545 initramfs_image = bb_vars['INITRAMFS_IMAGE']
546 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
547 uboot_rd_loadaddress = bb_vars.get('UBOOT_RD_LOADADDRESS')
548 uboot_rd_entrypoint = bb_vars.get('UBOOT_RD_ENTRYPOINT')
549
550 its_field_check = [
551 'description = "%s";' % bb_vars['FIT_DESC'],
552 'description = "Linux kernel";',
553 'type = "' + str(bb_vars['UBOOT_MKIMAGE_KERNEL_TYPE']) + '";',
554 # 'compression = "' + str(bb_vars['FIT_KERNEL_COMP_ALG']) + '";', defined based on files in TMPDIR, not ideal...
555 'data = /incbin/("linux.bin");',
556 'arch = "' + str(bb_vars['UBOOT_ARCH']) + '";',
557 'os = "linux";',
558 'load = <' + str(bb_vars['UBOOT_LOADADDRESS']) + '>;',
559 'entry = <' + str(bb_vars['UBOOT_ENTRYPOINT']) + '>;',
560 ]
561 if initramfs_image and initramfs_image_bundle != "1":
562 its_field_check.append('type = "ramdisk";')
563 if uboot_rd_loadaddress:
564 its_field_check.append("load = <%s>;" % uboot_rd_loadaddress)
565 if uboot_rd_entrypoint:
566 its_field_check.append("entry = <%s>;" % uboot_rd_entrypoint)
567
568 fit_conf_default_dtb = bb_vars.get('FIT_CONF_DEFAULT_DTB')
569 if fit_conf_default_dtb:
570 fit_conf_prefix = bb_vars.get('FIT_CONF_PREFIX', "conf-")
571 its_field_check.append('default = "' + fit_conf_prefix + fit_conf_default_dtb + '";')
572
573 its_field_check.append('kernel = "kernel-1";')
574
575 if initramfs_image and initramfs_image_bundle != "1":
576 its_field_check.append('ramdisk = "ramdisk-1";')
577
578 return its_field_check
579
580 def _get_req_sigvalues_config(self, bb_vars):
581 """Generate a dictionary of expected configuration signature nodes"""
582 if bb_vars.get('UBOOT_SIGN_ENABLE') != "1":
583 return {}
584 sign_images = '"kernel", "fdt"'
585 if bb_vars['INITRAMFS_IMAGE'] and bb_vars['INITRAMFS_IMAGE_BUNDLE'] != "1":
586 sign_images += ', "ramdisk"'
587 if bb_vars['FIT_UBOOT_ENV']:
588 sign_images += ', "bootscr"'
589 req_sigvalues_config = {
590 'algo': '"%s,%s"' % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG']),
591 'key-name-hint': '"%s"' % bb_vars['UBOOT_SIGN_KEYNAME'],
592 'sign-images': sign_images,
593 }
594 return req_sigvalues_config
595
596 def _get_req_sigvalues_image(self, bb_vars):
597 """Generate a dictionary of expected image signature nodes"""
598 if bb_vars['FIT_SIGN_INDIVIDUAL'] != "1":
599 return {}
600 req_sigvalues_image = {
601 'algo': '"%s,%s"' % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG']),
602 'key-name-hint': '"%s"' % bb_vars['UBOOT_SIGN_IMG_KEYNAME'],
603 }
604 return req_sigvalues_image
605
606 def _get_req_sections(self, bb_vars):
607 """Generate a dictionary of expected sections in the output of dumpimage"""
608 dtb_files, dtb_symlinks = FitImageTestCase._get_dtb_files(bb_vars)
609 fit_hash_alg = bb_vars['FIT_HASH_ALG']
610 fit_sign_alg = bb_vars['FIT_SIGN_ALG']
611 fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL']
612 fit_uboot_env = bb_vars['FIT_UBOOT_ENV']
613 initramfs_image = bb_vars['INITRAMFS_IMAGE']
614 initramfs_image_bundle = bb_vars['INITRAMFS_IMAGE_BUNDLE']
615 uboot_sign_enable = bb_vars['UBOOT_SIGN_ENABLE']
616 uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME']
617 uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME']
618 num_signatures = 0
619 req_sections = {
620 "kernel-1": {
621 "Type": "Kernel Image",
622 "OS": "Linux",
623 "Load Address": bb_vars['UBOOT_LOADADDRESS'],
624 "Entry Point": bb_vars['UBOOT_ENTRYPOINT'],
625 }
626 }
627 # Create one section per DTB
628 for dtb in dtb_files:
629 req_sections['fdt-' + dtb] = {
630 "Type": "Flat Device Tree",
631 }
632 # Add a script section if there is a script
633 if fit_uboot_env:
634 req_sections['bootscr-' + fit_uboot_env] = { "Type": "Script" }
635 # Add the initramfs
636 if initramfs_image and initramfs_image_bundle != "1":
637 req_sections['ramdisk-1'] = {
638 "Type": "RAMDisk Image",
639 "Load Address": bb_vars['UBOOT_RD_LOADADDRESS'],
640 "Entry Point": bb_vars['UBOOT_RD_ENTRYPOINT']
641 }
642 # Create a configuration section for each DTB
643 if dtb_files:
644 for dtb in dtb_files + dtb_symlinks:
645 conf_name = bb_vars['FIT_CONF_PREFIX'] + dtb
646 # Assume that DTBs with an "-alias" in its name are symlink DTBs created e.g. by the
647 # bbb-dtbs-as-ext test recipe. Make the configuration node pointing to the real DTB.
648 real_dtb = dtb.replace("-alias", "")
649 # dtb overlays do not refer to a kernel (yet?)
650 if dtb.endswith('.dtbo'):
651 req_sections[conf_name] = {
652 "FDT": 'fdt-' + real_dtb,
653 }
654 else:
655 req_sections[conf_name] = {
656 "Kernel": "kernel-1",
657 "FDT": 'fdt-' + real_dtb,
658 }
659 if initramfs_image and initramfs_image_bundle != "1":
660 req_sections[conf_name]['Init Ramdisk'] = "ramdisk-1"
661 else:
662 conf_name = bb_vars['FIT_CONF_PREFIX'] + '1'
663 req_sections[conf_name] = {
664 "Kernel": "kernel-1"
665 }
666 if initramfs_image and initramfs_image_bundle != "1":
667 req_sections[conf_name]['Init Ramdisk'] = "ramdisk-1"
668
669 # Add signing related properties if needed
670 if uboot_sign_enable == "1":
671 for section in req_sections:
672 req_sections[section]['Hash algo'] = fit_hash_alg
673 if section.startswith(bb_vars['FIT_CONF_PREFIX']):
674 req_sections[section]['Hash value'] = "unavailable"
675 req_sections[section]['Sign algo'] = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_keyname)
676 num_signatures += 1
677 elif fit_sign_individual == "1":
678 req_sections[section]['Sign algo'] = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_img_keyname)
679 num_signatures += 1
680 return (req_sections, num_signatures)
681
682 def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path):
683 """Verify the signature nodes in the FIT image"""
684 if bb_vars['UBOOT_SIGN_ENABLE'] == "1":
685 self.logger.debug("Verifying signatures in the FIT image")
686 else:
687 self.logger.debug("FIT image is not signed. Signature verification is not needed.")
688 return
689
690 fit_hash_alg = bb_vars['FIT_HASH_ALG']
691 fit_sign_alg = bb_vars['FIT_SIGN_ALG']
692 uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME']
693 uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME']
694 deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE']
695 kernel_deploysubdir = bb_vars['KERNEL_DEPLOYSUBDIR']
696 fit_sign_individual = bb_vars['FIT_SIGN_INDIVIDUAL']
697 fit_hash_alg_len = FitImageTestCase.MKIMAGE_HASH_LENGTHS[fit_hash_alg]
698 fit_sign_alg_len = FitImageTestCase.MKIMAGE_SIGNATURE_LENGTHS[fit_sign_alg]
699 for section, values in sections.items():
700 # Configuration nodes are always signed with UBOOT_SIGN_KEYNAME (if UBOOT_SIGN_ENABLE = "1")
701 if section.startswith(bb_vars['FIT_CONF_PREFIX']):
702 sign_algo = values.get('Sign algo', None)
703 req_sign_algo = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_keyname)
704 self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section)
705 sign_value = values.get('Sign value', None)
706 self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section)
707 dtb_file_name = section.replace(bb_vars['FIT_CONF_PREFIX'], '')
708 dtb_path = os.path.join(deploy_dir_image, dtb_file_name)
709 if kernel_deploysubdir:
710 dtb_path = os.path.join(deploy_dir_image, kernel_deploysubdir, dtb_file_name)
711 # External devicetrees created by devicetree.bbclass are in a subfolder and have priority
712 dtb_path_ext = os.path.join(deploy_dir_image, "devicetree", dtb_file_name)
713 if os.path.exists(dtb_path_ext):
714 dtb_path = dtb_path_ext
715 self._verify_fit_image_signature(uboot_tools_bindir, fitimage_path, dtb_path, section)
716 else:
717 # Image nodes always need a hash which gets indirectly signed by the config signature
718 hash_algo = values.get('Hash algo', None)
719 self.assertEqual(hash_algo, fit_hash_alg)
720 hash_value = values.get('Hash value', None)
721 self.assertEqual(len(hash_value), fit_hash_alg_len, 'Hash value for section %s not expected length' % section)
722 # Optionally, if FIT_SIGN_INDIVIDUAL = 1 also the image nodes have a signature (which is redundant but possible)
723 if fit_sign_individual == "1":
724 sign_algo = values.get('Sign algo', None)
725 req_sign_algo = "%s,%s:%s" % (fit_hash_alg, fit_sign_alg, uboot_sign_img_keyname)
726 self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section)
727 sign_value = values.get('Sign value', None)
728 self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section)
729
730 # Search for the string passed to mkimage in each signed section of the FIT image.
731 # Looks like mkimage supports to add a comment but does not support to read it back.
732 a_comment = FitImageTestCase._get_uboot_mkimage_sign_args(bb_vars['UBOOT_MKIMAGE_SIGN_ARGS'])
733 self.logger.debug("a_comment: %s" % a_comment)
734 if a_comment:
735 found_comments = FitImageTestCase._find_string_in_bin_file(fitimage_path, a_comment)
736 self.assertEqual(found_comments, num_signatures, "Expected %d signed and commented (%s) sections in the fitImage." %
737 (num_signatures, a_comment))
738
739class KernelFitImageRecipeTests(KernelFitImageBase):
740 """Test cases for the kernel-fitimage bbclass"""
12 741
13 def test_fit_image(self): 742 def test_fit_image(self):
14 """ 743 """
@@ -24,214 +753,230 @@ class FitImageTests(OESelftestTestCase):
24 Author: Usama Arif <usama.arif@arm.com> 753 Author: Usama Arif <usama.arif@arm.com>
25 """ 754 """
26 config = """ 755 config = """
27# Enable creation of fitImage
28KERNEL_IMAGETYPE = "Image" 756KERNEL_IMAGETYPE = "Image"
29KERNEL_IMAGETYPES += " fitImage "
30KERNEL_CLASSES = " kernel-fitimage "
31 757
32# RAM disk variables including load address and entrypoint for kernel and RAM disk 758# RAM disk variables including load address and entrypoint for kernel and RAM disk
33IMAGE_FSTYPES += "cpio.gz" 759IMAGE_FSTYPES += "cpio.gz"
34INITRAMFS_IMAGE = "core-image-minimal" 760INITRAMFS_IMAGE = "core-image-minimal"
761# core-image-minimal is used as initramfs here, drop the rootfs suffix
762IMAGE_NAME_SUFFIX:pn-core-image-minimal = ""
35UBOOT_RD_LOADADDRESS = "0x88000000" 763UBOOT_RD_LOADADDRESS = "0x88000000"
36UBOOT_RD_ENTRYPOINT = "0x88000000" 764UBOOT_RD_ENTRYPOINT = "0x88000000"
37UBOOT_LOADADDRESS = "0x80080000" 765UBOOT_LOADADDRESS = "0x80080000"
38UBOOT_ENTRYPOINT = "0x80080000" 766UBOOT_ENTRYPOINT = "0x80080000"
39FIT_DESC = "A model description" 767FIT_DESC = "A model description"
768FIT_CONF_PREFIX = "foo-"
40""" 769"""
770 config = self._config_add_kernel_classes(config)
41 self.write_config(config) 771 self.write_config(config)
772 bb_vars = self._fit_get_bb_vars()
773 self._test_fitimage(bb_vars)
42 774
43 # fitImage is created as part of linux recipe 775 def test_get_compatible_from_dtb(self):
44 bitbake("virtual/kernel") 776 """Test the oe.fitimage.get_compatible_from_dtb function
45
46 image_type = "core-image-minimal"
47 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
48 machine = get_bb_var('MACHINE')
49 fitimage_its_path = os.path.join(deploy_dir_image,
50 "fitImage-its-%s-%s-%s" % (image_type, machine, machine))
51 fitimage_path = os.path.join(deploy_dir_image,
52 "fitImage-%s-%s-%s" % (image_type, machine, machine))
53
54 self.assertTrue(os.path.exists(fitimage_its_path),
55 "%s image tree source doesn't exist" % (fitimage_its_path))
56 self.assertTrue(os.path.exists(fitimage_path),
57 "%s FIT image doesn't exist" % (fitimage_path))
58
59 # Check that the type, load address, entrypoint address and default
60 # values for kernel and ramdisk in Image Tree Source are as expected.
61 # The order of fields in the below array is important. Not all the
62 # fields are tested, only the key fields that wont vary between
63 # different architectures.
64 its_field_check = [
65 'description = "A model description";',
66 'type = "kernel";',
67 'load = <0x80080000>;',
68 'entry = <0x80080000>;',
69 'type = "ramdisk";',
70 'load = <0x88000000>;',
71 'entry = <0x88000000>;',
72 'default = "conf-1";',
73 'kernel = "kernel-1";',
74 'ramdisk = "ramdisk-1";'
75 ]
76 777
77 with open(fitimage_its_path) as its_file: 778 1. bitbake bbb-dtbs-as-ext
78 field_index = 0 779 2. Check if symlink_points_below returns the path to the DTB
79 for line in its_file: 780 3. Check if the expected compatible string is found by get_compatible_from_dtb()
80 if field_index == len(its_field_check): 781 """
81 break 782 DTB_RECIPE = "bbb-dtbs-as-ext"
82 if its_field_check[field_index] in line: 783 DTB_FILE = "am335x-bonegreen-ext.dtb"
83 field_index +=1 784 DTB_SYMLINK = "am335x-bonegreen-ext-alias.dtb"
785 DTBO_FILE = "BBORG_RELAY-00A2.dtbo"
786 EXPECTED_COMP = ["ti,am335x-bone-green", "ti,am335x-bone-black", "ti,am335x-bone", "ti,am33xx"]
84 787
85 if field_index != len(its_field_check): # if its equal, the test passed 788 config = """
86 self.assertTrue(field_index == len(its_field_check), 789DISTRO = "poky"
87 "Fields in Image Tree Source File %s did not match, error in finding %s" 790MACHINE = "beaglebone-yocto"
88 % (fitimage_its_path, its_field_check[field_index])) 791"""
792 self.write_config(config)
793
794 # Provide the fdtget command called by get_compatible_from_dtb
795 dtc_bindir = FitImageTestCase._setup_native('dtc-native')
796 fdtget_path = os.path.join(dtc_bindir, "fdtget")
797 self.assertExists(fdtget_path)
798
799 # bitbake an external DTB with a symlink to it and a DTB overlay
800 bitbake(DTB_RECIPE)
801 deploy_dir_image = get_bb_var("DEPLOY_DIR_IMAGE", DTB_RECIPE)
802 devicetree_dir = os.path.join(deploy_dir_image, "devicetree")
803 dtb_path = os.path.join(devicetree_dir, DTB_FILE)
804 dtb_alias_path = os.path.join(devicetree_dir, DTB_SYMLINK)
805 dtbo_file = os.path.join(devicetree_dir, DTBO_FILE)
806 self.assertExists(dtb_path)
807 self.assertExists(dtb_alias_path)
808 self.assertExists(dtbo_file)
809
810 # Test symlink_points_below
811 linked_dtb = oe.fitimage.symlink_points_below(dtb_alias_path, devicetree_dir)
812 self.assertEqual(linked_dtb, DTB_FILE)
813
814 # Check if get_compatible_from_dtb finds the expected compatible string in the DTBs
815 comp = oe.fitimage.get_compatible_from_dtb(dtb_path, fdtget_path)
816 self.assertEqual(comp, EXPECTED_COMP)
817 comp_alias = oe.fitimage.get_compatible_from_dtb(dtb_alias_path, fdtget_path)
818 self.assertEqual(comp_alias, EXPECTED_COMP)
819 # The alias is a symlink, therefore the compatible string is equal
820 self.assertEqual(comp_alias, comp)
89 821
822 def test_fit_image_ext_dtb_dtbo(self):
823 """
824 Summary: Check if FIT image and Image Tree Source (its) are created correctly.
825 Expected: 1) its and FIT image are built successfully
826 2) The its file contains also the external devicetree overlay
827 3) Dumping the FIT image indicates the devicetree overlay
828 """
829 config = """
830# Enable creation of fitImage
831MACHINE = "beaglebone-yocto"
832# Add a devicetree overlay which does not need kernel sources
833PREFERRED_PROVIDER_virtual/dtb = "bbb-dtbs-as-ext"
834"""
835 config = self._config_add_kernel_classes(config)
836 config = self._config_add_uboot_env(config)
837 self.write_config(config)
838 bb_vars = self._fit_get_bb_vars()
839 self._test_fitimage(bb_vars)
840
841
842 def test_sign_fit_image_configurations(self):
843 """
844 Summary: Check if FIT image and Image Tree Source (its) are created
845 and the configuration nodes are signed correctly.
846 Expected: 1) its and FIT image are built successfully
847 2) Scanning the its file indicates signing is enabled
848 as requested by UBOOT_SIGN_ENABLE
849 3) Dumping the FIT image indicates signature values
850 are present (only for the configuration nodes as
851 FIT_SIGN_INDIVIDUAL is disabled)
852 4) Verify the FIT image contains the comments passed via
853 UBOOT_MKIMAGE_SIGN_ARGS once per configuration node.
854 """
855 # Generate a configuration section which gets included into the local.conf file
856 config = """
857# Enable creation of fitImage
858MACHINE = "beaglebone-yocto"
859UBOOT_SIGN_ENABLE = "1"
860UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
861UBOOT_SIGN_KEYNAME = "dev"
862UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
863FIT_CONF_DEFAULT_DTB = "am335x-bonegreen.dtb"
864"""
865 config = self._config_add_kernel_classes(config)
866 config = self._config_add_uboot_env(config)
867 self.write_config(config)
868
869 # Retrieve some variables from bitbake
870 bb_vars = self._fit_get_bb_vars([
871 'FIT_KEY_GENRSA_ARGS',
872 'FIT_KEY_REQ_ARGS',
873 'FIT_KEY_SIGN_PKCS',
874 'FIT_SIGN_NUMBITS',
875 'UBOOT_SIGN_KEYDIR',
876 ])
877
878 self._gen_signing_key(bb_vars)
879 self._test_fitimage(bb_vars)
90 880
91 def test_sign_fit_image(self): 881 def test_sign_fit_image_individual(self):
92 """ 882 """
93 Summary: Check if FIT image and Image Tree Source (its) are created 883 Summary: Check if FIT image and Image Tree Source (its) are created
94 and signed correctly. 884 and all nodes are signed correctly.
95 Expected: 1) its and FIT image are built successfully 885 Expected: 1) its and FIT image are built successfully
96 2) Scanning the its file indicates signing is enabled 886 2) Scanning the its file indicates signing is enabled
97 as requested by UBOOT_SIGN_ENABLE (using keys generated 887 as requested by UBOOT_SIGN_ENABLE
98 via FIT_GENERATE_KEYS)
99 3) Dumping the FIT image indicates signature values 888 3) Dumping the FIT image indicates signature values
100 are present (including for images as enabled via 889 are present (including for images as enabled via
101 FIT_SIGN_INDIVIDUAL) 890 FIT_SIGN_INDIVIDUAL)
102 4) Examination of the do_assemble_fitimage runfile/logfile 891 This also implies that FIT_GENERATE_KEYS = "1" works.
103 indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and 892 4) Verify the FIT image contains the comments passed via
104 UBOOT_MKIMAGE_SIGN_ARGS are working as expected. 893 UBOOT_MKIMAGE_SIGN_ARGS once per image and per
894 configuration node.
895 Note: This test is mostly for backward compatibility.
896 The recommended approach is to sign the configuration nodes
897 which include also the hashes of all the images. Signing
898 all the images individually is therefore redundant.
105 Product: oe-core 899 Product: oe-core
106 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon 900 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon
107 work by Usama Arif <usama.arif@arm.com> 901 work by Usama Arif <usama.arif@arm.com>
108 """ 902 """
903 # Generate a configuration section which gets included into the local.conf file
109 config = """ 904 config = """
110# Enable creation of fitImage 905# Enable creation of fitImage
111MACHINE = "beaglebone-yocto" 906MACHINE = "beaglebone-yocto"
112KERNEL_IMAGETYPES += " fitImage "
113KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
114UBOOT_SIGN_ENABLE = "1" 907UBOOT_SIGN_ENABLE = "1"
115FIT_GENERATE_KEYS = "1" 908FIT_GENERATE_KEYS = "1"
116UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 909UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
117UBOOT_SIGN_KEYNAME = "oe-selftest" 910UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
911UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
118FIT_SIGN_INDIVIDUAL = "1" 912FIT_SIGN_INDIVIDUAL = "1"
119UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" 913UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
120""" 914"""
915 config = self._config_add_kernel_classes(config)
916 config = self._config_add_uboot_env(config)
121 self.write_config(config) 917 self.write_config(config)
918 bb_vars = self._fit_get_bb_vars()
122 919
123 # fitImage is created as part of linux recipe 920 # Ensure new keys are generated and FIT_GENERATE_KEYS = "1" is tested
124 bitbake("virtual/kernel") 921 bitbake("kernel-signing-keys-native -c compile -f")
125
126 image_type = "core-image-minimal"
127 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
128 machine = get_bb_var('MACHINE')
129 fitimage_its_path = os.path.join(deploy_dir_image,
130 "fitImage-its-%s" % (machine,))
131 fitimage_path = os.path.join(deploy_dir_image,
132 "fitImage-%s.bin" % (machine,))
133
134 self.assertTrue(os.path.exists(fitimage_its_path),
135 "%s image tree source doesn't exist" % (fitimage_its_path))
136 self.assertTrue(os.path.exists(fitimage_path),
137 "%s FIT image doesn't exist" % (fitimage_path))
138
139 req_itspaths = [
140 ['/', 'images', 'kernel-1'],
141 ['/', 'images', 'kernel-1', 'signature-1'],
142 ['/', 'images', 'fdt-am335x-boneblack.dtb'],
143 ['/', 'images', 'fdt-am335x-boneblack.dtb', 'signature-1'],
144 ['/', 'configurations', 'conf-am335x-boneblack.dtb'],
145 ['/', 'configurations', 'conf-am335x-boneblack.dtb', 'signature-1'],
146 ]
147 922
148 itspath = [] 923 self._test_fitimage(bb_vars)
149 itspaths = []
150 linect = 0
151 sigs = {}
152 with open(fitimage_its_path) as its_file:
153 linect += 1
154 for line in its_file:
155 line = line.strip()
156 if line.endswith('};'):
157 itspath.pop()
158 elif line.endswith('{'):
159 itspath.append(line[:-1].strip())
160 itspaths.append(itspath[:])
161 elif itspath and itspath[-1] == 'signature-1':
162 itsdotpath = '.'.join(itspath)
163 if not itsdotpath in sigs:
164 sigs[itsdotpath] = {}
165 if not '=' in line or not line.endswith(';'):
166 self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
167 key, value = line.split('=', 1)
168 sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
169 924
170 for reqpath in req_itspaths: 925 def test_fit_image_sign_initramfs(self):
171 if not reqpath in itspaths: 926 """
172 self.fail('Missing section in its file: %s' % reqpath) 927 Summary: Verifies the content of the initramfs node in the FIT Image Tree Source (its)
928 The FIT settings are set by the test case.
929 The machine used is beaglebone-yocto.
930 Expected: 1. The ITS is generated with initramfs support
931 2. All the fields in the kernel node are as expected (matching the
932 conf settings)
933 3. The kernel is included in all the available configurations and
934 its hash is included in the configuration signature
173 935
174 reqsigvalues_image = { 936 Product: oe-core
175 'algo': '"sha256,rsa2048"', 937 Author: Abdellatif El Khlifi <abdellatif.elkhlifi@arm.com>
176 'key-name-hint': '"oe-selftest"', 938 """
177 }
178 reqsigvalues_config = {
179 'algo': '"sha256,rsa2048"',
180 'key-name-hint': '"oe-selftest"',
181 'sign-images': '"kernel", "fdt"',
182 }
183 939
184 for itspath, values in sigs.items(): 940 config = """
185 if 'conf-' in itspath: 941DISTRO = "poky"
186 reqsigvalues = reqsigvalues_config 942MACHINE = "beaglebone-yocto"
187 else: 943INITRAMFS_IMAGE = "core-image-minimal-initramfs"
188 reqsigvalues = reqsigvalues_image 944INITRAMFS_SCRIPTS = ""
189 for reqkey, reqvalue in reqsigvalues.items(): 945UBOOT_MACHINE = "am335x_evm_defconfig"
190 value = values.get(reqkey, None) 946UBOOT_SIGN_ENABLE = "1"
191 if value is None: 947UBOOT_SIGN_KEYNAME = "beaglebonekey"
192 self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath)) 948UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}"
193 self.assertEqual(value, reqvalue) 949UBOOT_DTB_BINARY = "u-boot.dtb"
194 950UBOOT_ENTRYPOINT = "0x80000000"
195 # Dump the image to see if it really got signed 951UBOOT_LOADADDRESS = "0x80000000"
196 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 952UBOOT_RD_LOADADDRESS = "0x88000000"
197 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 953UBOOT_RD_ENTRYPOINT = "0x88000000"
198 recipe_sysroot_native = result.output.split('=')[1].strip('"') 954UBOOT_DTB_LOADADDRESS = "0x82000000"
199 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage') 955UBOOT_ARCH = "arm"
200 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 956UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
201 in_signed = None 957UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
202 signed_sections = {} 958UBOOT_EXTLINUX = "0"
203 for line in result.output.splitlines(): 959KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
204 if line.startswith((' Configuration', ' Image')): 960FIT_KERNEL_COMP_ALG = "none"
205 in_signed = re.search('\((.*)\)', line).groups()[0] 961FIT_HASH_ALG = "sha256"
206 elif re.match('^ *', line) in (' ', ''): 962"""
207 in_signed = None 963 config = self._config_add_kernel_classes(config)
208 elif in_signed: 964 config = self._config_add_uboot_env(config)
209 if not in_signed in signed_sections: 965 self.write_config(config)
210 signed_sections[in_signed] = {} 966
211 key, value = line.split(':', 1) 967 # Retrieve some variables from bitbake
212 signed_sections[in_signed][key.strip()] = value.strip() 968 bb_vars = self._fit_get_bb_vars([
213 self.assertIn('kernel-1', signed_sections) 969 'FIT_KEY_GENRSA_ARGS',
214 self.assertIn('fdt-am335x-boneblack.dtb', signed_sections) 970 'FIT_KEY_REQ_ARGS',
215 self.assertIn('conf-am335x-boneblack.dtb', signed_sections) 971 'FIT_KEY_SIGN_PKCS',
216 for signed_section, values in signed_sections.items(): 972 'FIT_SIGN_NUMBITS',
217 value = values.get('Sign algo', None) 973 'UBOOT_SIGN_KEYDIR',
218 self.assertEqual(value, 'sha256,rsa2048:oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) 974 ])
219 value = values.get('Sign value', None) 975
220 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 976 self._gen_signing_key(bb_vars)
221 977 self._test_fitimage(bb_vars)
222 # Check for UBOOT_MKIMAGE_SIGN_ARGS 978
223 result = runCmd('bitbake -e virtual/kernel | grep ^T=') 979 def test_fit_image_sign_initramfs_bundle(self):
224 tempdir = result.output.split('=', 1)[1].strip().strip('')
225 result = runCmd('grep "a smart comment" %s/run.do_assemble_fitimage' % tempdir, ignore_status=True)
226 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN_ARGS value did not get used')
227
228 # Check for evidence of test-mkimage-wrapper class
229 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
230 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
231 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
232 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
233
234 def test_initramfs_bundle(self):
235 """ 980 """
236 Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its) 981 Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its)
237 The FIT settings are set by the test case. 982 The FIT settings are set by the test case.
@@ -247,14 +992,12 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
247 """ 992 """
248 993
249 config = """ 994 config = """
250DISTRO="poky" 995DISTRO = "poky"
251MACHINE = "beaglebone-yocto" 996MACHINE = "beaglebone-yocto"
252INITRAMFS_IMAGE_BUNDLE = "1" 997INITRAMFS_IMAGE_BUNDLE = "1"
253INITRAMFS_IMAGE = "core-image-minimal-initramfs" 998INITRAMFS_IMAGE = "core-image-minimal-initramfs"
254INITRAMFS_SCRIPTS = "" 999INITRAMFS_SCRIPTS = ""
255UBOOT_MACHINE = "am335x_evm_defconfig" 1000UBOOT_MACHINE = "am335x_evm_defconfig"
256KERNEL_CLASSES = " kernel-fitimage "
257KERNEL_IMAGETYPES = "fitImage"
258UBOOT_SIGN_ENABLE = "1" 1001UBOOT_SIGN_ENABLE = "1"
259UBOOT_SIGN_KEYNAME = "beaglebonekey" 1002UBOOT_SIGN_KEYNAME = "beaglebonekey"
260UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}" 1003UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}"
@@ -264,102 +1007,725 @@ UBOOT_LOADADDRESS = "0x80000000"
264UBOOT_DTB_LOADADDRESS = "0x82000000" 1007UBOOT_DTB_LOADADDRESS = "0x82000000"
265UBOOT_ARCH = "arm" 1008UBOOT_ARCH = "arm"
266UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 1009UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1010UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
267UBOOT_EXTLINUX = "0" 1011UBOOT_EXTLINUX = "0"
268FIT_GENERATE_KEYS = "1"
269KERNEL_IMAGETYPE_REPLACEMENT = "zImage" 1012KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
1013FIT_KERNEL_COMP_ALG = "none"
270FIT_HASH_ALG = "sha256" 1014FIT_HASH_ALG = "sha256"
271""" 1015"""
1016 config = self._config_add_kernel_classes(config)
1017 config = self._config_add_uboot_env(config)
272 self.write_config(config) 1018 self.write_config(config)
1019 bb_vars = self._fit_get_bb_vars()
1020 self._gen_signing_key(bb_vars)
1021 self._test_fitimage(bb_vars)
1022
1023class FitImagePyTests(KernelFitImageBase):
1024 """Test cases for the fitimage.py module without calling bitbake"""
273 1025
274 # fitImage is created as part of linux recipe 1026 def _test_fitimage_py(self, bb_vars_overrides=None):
275 bitbake("virtual/kernel") 1027 topdir = os.path.join(os.environ['BUILDDIR'])
1028 fitimage_its_path = os.path.join(topdir, self._testMethodName + '.its')
276 1029
277 image_type = get_bb_var('INITRAMFS_IMAGE') 1030 # Provide variables without calling bitbake
278 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 1031 bb_vars = {
279 machine = get_bb_var('MACHINE') 1032 # image-fitimage.conf
280 fitimage_its_path = os.path.join(deploy_dir_image, 1033 'FIT_DESC': "Kernel fitImage for a dummy distro",
281 "fitImage-its-%s-%s-%s" % (image_type, machine, machine)) 1034 'FIT_HASH_ALG': "sha256",
282 fitimage_path = os.path.join(deploy_dir_image,"fitImage") 1035 'FIT_SIGN_ALG': "rsa2048",
1036 'FIT_PAD_ALG': "pkcs-1.5",
1037 'FIT_GENERATE_KEYS': "0",
1038 'FIT_SIGN_NUMBITS': "2048",
1039 'FIT_KEY_GENRSA_ARGS': "-F4",
1040 'FIT_KEY_REQ_ARGS': "-batch -new",
1041 'FIT_KEY_SIGN_PKCS': "-x509",
1042 'FIT_SIGN_INDIVIDUAL': "0",
1043 'FIT_CONF_PREFIX': "conf-",
1044 'FIT_SUPPORTED_INITRAMFS_FSTYPES': "cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.zst cpio.gz ext2.gz cpio",
1045 'FIT_CONF_DEFAULT_DTB': "",
1046 'FIT_ADDRESS_CELLS': "1",
1047 'FIT_UBOOT_ENV': "",
1048 # kernel.bbclass
1049 'UBOOT_ENTRYPOINT': "0x20008000",
1050 'UBOOT_LOADADDRESS': "0x20008000",
1051 'INITRAMFS_IMAGE': "",
1052 'INITRAMFS_IMAGE_BUNDLE': "",
1053 # kernel-uboot.bbclass
1054 'FIT_KERNEL_COMP_ALG': "gzip",
1055 'FIT_KERNEL_COMP_ALG_EXTENSION': ".gz",
1056 'UBOOT_MKIMAGE_KERNEL_TYPE': "kernel",
1057 # uboot-config.bbclass
1058 'UBOOT_MKIMAGE_DTCOPTS': "",
1059 'UBOOT_MKIMAGE': "uboot-mkimage",
1060 'UBOOT_MKIMAGE_SIGN': "uboot-mkimage",
1061 'UBOOT_MKIMAGE_SIGN_ARGS': "",
1062 'UBOOT_SIGN_ENABLE': "0",
1063 'UBOOT_SIGN_KEYDIR': None,
1064 'UBOOT_SIGN_KEYNAME': None,
1065 'UBOOT_SIGN_IMG_KEYNAME': None,
1066 # others
1067 'MACHINE': "qemux86-64",
1068 'UBOOT_ARCH': "x86",
1069 'HOST_PREFIX': "x86_64-poky-linux-"
1070 }
1071 if bb_vars_overrides:
1072 bb_vars.update(bb_vars_overrides)
283 1073
284 self.assertTrue(os.path.exists(fitimage_its_path), 1074 root_node = oe.fitimage.ItsNodeRootKernel(
285 "%s image tree source doesn't exist" % (fitimage_its_path)) 1075 bb_vars["FIT_DESC"], bb_vars["FIT_ADDRESS_CELLS"],
286 self.assertTrue(os.path.exists(fitimage_path), 1076 bb_vars['HOST_PREFIX'], bb_vars['UBOOT_ARCH'], bb_vars["FIT_CONF_PREFIX"],
287 "%s FIT image doesn't exist" % (fitimage_path)) 1077 oe.types.boolean(bb_vars['UBOOT_SIGN_ENABLE']), bb_vars["UBOOT_SIGN_KEYDIR"],
1078 bb_vars["UBOOT_MKIMAGE"], bb_vars["UBOOT_MKIMAGE_DTCOPTS"],
1079 bb_vars["UBOOT_MKIMAGE_SIGN"], bb_vars["UBOOT_MKIMAGE_SIGN_ARGS"],
1080 bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG'], bb_vars['FIT_PAD_ALG'],
1081 bb_vars['UBOOT_SIGN_KEYNAME'],
1082 oe.types.boolean(bb_vars['FIT_SIGN_INDIVIDUAL']), bb_vars['UBOOT_SIGN_IMG_KEYNAME']
1083 )
288 1084
289 kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) 1085 root_node.fitimage_emit_section_kernel("kernel-1", "linux.bin", "none",
290 kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) 1086 bb_vars.get('UBOOT_LOADADDRESS'), bb_vars.get('UBOOT_ENTRYPOINT'),
291 initramfs_bundle_format = str(get_bb_var('KERNEL_IMAGETYPE_REPLACEMENT')) 1087 bb_vars.get('UBOOT_MKIMAGE_KERNEL_TYPE'), bb_vars.get("UBOOT_ENTRYSYMBOL")
292 uboot_arch = str(get_bb_var('UBOOT_ARCH')) 1088 )
293 initramfs_bundle = "arch/" + uboot_arch + "/boot/" + initramfs_bundle_format + ".initramfs"
294 fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
295 1089
296 its_file = open(fitimage_its_path) 1090 dtb_files, _ = FitImageTestCase._get_dtb_files(bb_vars)
1091 for dtb in dtb_files:
1092 root_node.fitimage_emit_section_dtb(dtb, os.path.join("a-dir", dtb),
1093 bb_vars.get("UBOOT_DTB_LOADADDRESS"), bb_vars.get("UBOOT_DTBO_LOADADDRESS"))
297 1094
298 its_lines = [line.strip() for line in its_file.readlines()] 1095 if bb_vars.get('FIT_UBOOT_ENV'):
1096 root_node.fitimage_emit_section_boot_script(
1097 "bootscr-" + bb_vars['FIT_UBOOT_ENV'], bb_vars['FIT_UBOOT_ENV'])
299 1098
300 exp_node_lines = [ 1099 if bb_vars['MACHINE'] == "qemux86-64": # Not really the right if
301 'kernel-1 {', 1100 root_node.fitimage_emit_section_setup("setup-1", "setup1.bin")
302 'description = "Linux kernel";', 1101
303 'data = /incbin/("' + initramfs_bundle + '");', 1102 if bb_vars.get('INITRAMFS_IMAGE') and bb_vars.get("INITRAMFS_IMAGE_BUNDLE") != "1":
304 'type = "kernel";', 1103 root_node.fitimage_emit_section_ramdisk("ramdisk-1", "a-dir/a-initramfs-1",
305 'arch = "' + uboot_arch + '";', 1104 "core-image-minimal-initramfs",
306 'os = "linux";', 1105 bb_vars.get("UBOOT_RD_LOADADDRESS"), bb_vars.get("UBOOT_RD_ENTRYPOINT"))
1106
1107 root_node.fitimage_emit_section_config(bb_vars['FIT_CONF_DEFAULT_DTB'])
1108 root_node.write_its_file(fitimage_its_path)
1109
1110 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
1111 self.logger.debug("Checking its: %s" % fitimage_its_path)
1112 self._check_its_file(bb_vars, fitimage_its_path)
1113
1114 def test_fitimage_py_default(self):
1115 self._test_fitimage_py()
1116
1117 def test_fitimage_py_default_dtb(self):
1118 bb_vars_overrides = {
1119 'KERNEL_DEVICETREE': "one.dtb two.dtb three.dtb",
1120 'FIT_CONF_DEFAULT_DTB': "two.dtb"
1121 }
1122 self._test_fitimage_py(bb_vars_overrides)
1123
1124
1125class UBootFitImageTests(FitImageTestCase):
1126 """Test cases for the uboot-sign bbclass"""
1127
1128 BOOTLOADER_RECIPE = "virtual/bootloader"
1129
1130 def _fit_get_bb_vars(self, additional_vars=[]):
1131 """Get bb_vars as needed by _test_sign_fit_image
1132
1133 Call the get_bb_vars function once and get all variables needed by the test case.
1134 """
1135 internal_used = {
1136 'DEPLOY_DIR_IMAGE',
1137 'FIT_HASH_ALG',
1138 'FIT_KEY_GENRSA_ARGS',
1139 'FIT_KEY_REQ_ARGS',
1140 'FIT_KEY_SIGN_PKCS',
1141 'FIT_SIGN_ALG',
1142 'FIT_SIGN_INDIVIDUAL',
1143 'FIT_SIGN_NUMBITS',
1144 'MACHINE',
1145 'SPL_MKIMAGE_SIGN_ARGS',
1146 'SPL_SIGN_ENABLE',
1147 'SPL_SIGN_KEYNAME',
1148 'UBOOT_ARCH',
1149 'UBOOT_DTB_BINARY',
1150 'UBOOT_DTB_IMAGE',
1151 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT',
1152 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS',
1153 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE',
1154 'UBOOT_FIT_CONF_USER_LOADABLES',
1155 'UBOOT_FIT_DESC',
1156 'UBOOT_FIT_HASH_ALG',
1157 'UBOOT_FIT_SIGN_ALG',
1158 'UBOOT_FIT_TEE_ENTRYPOINT',
1159 'UBOOT_FIT_TEE_LOADADDRESS',
1160 'UBOOT_FIT_TEE',
1161 'UBOOT_FIT_UBOOT_ENTRYPOINT',
1162 'UBOOT_FIT_UBOOT_LOADADDRESS',
1163 'UBOOT_FIT_USER_SETTINGS',
1164 'UBOOT_FITIMAGE_ENABLE',
1165 'UBOOT_NODTB_BINARY',
1166 'UBOOT_SIGN_ENABLE',
1167 'UBOOT_SIGN_IMG_KEYNAME',
1168 'UBOOT_SIGN_KEYDIR',
1169 'UBOOT_SIGN_KEYNAME',
1170 }
1171 bb_vars = get_bb_vars(list(internal_used | set(additional_vars)), UBootFitImageTests.BOOTLOADER_RECIPE)
1172 self.logger.debug("bb_vars: %s" % pprint.pformat(bb_vars, indent=4))
1173 return bb_vars
1174
1175 def _bitbake_fit_image(self, bb_vars):
1176 """Bitbake the bootloader and return the paths to the its file and the FIT image"""
1177 bitbake(UBootFitImageTests.BOOTLOADER_RECIPE)
1178
1179 deploy_dir_image = bb_vars['DEPLOY_DIR_IMAGE']
1180 machine = bb_vars['MACHINE']
1181 fitimage_its_path = os.path.join(deploy_dir_image, "u-boot-its-%s" % machine)
1182 fitimage_path = os.path.join(deploy_dir_image, "u-boot-fitImage-%s" % machine)
1183 return (fitimage_its_path, fitimage_path)
1184
1185 def _get_req_its_paths(self, bb_vars):
1186 # image nodes
1187 images = [ 'uboot', 'fdt', ]
1188 if bb_vars['UBOOT_FIT_TEE'] == "1":
1189 images.append('tee')
1190 if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1":
1191 images.append('atf')
1192 # if bb_vars['UBOOT_FIT_USER_SETTINGS']:
1193
1194 # configuration nodes
1195 configurations = [ 'conf']
1196
1197 # Create a list of paths for all image and configuration nodes
1198 req_its_paths = []
1199 for image in images:
1200 req_its_paths.append(['/', 'images', image])
1201 if bb_vars['SPL_SIGN_ENABLE'] == "1":
1202 req_its_paths.append(['/', 'images', image, 'signature'])
1203 for configuration in configurations:
1204 req_its_paths.append(['/', 'configurations', configuration])
1205 return req_its_paths
1206
1207 def _get_req_its_fields(self, bb_vars):
1208 loadables = ["uboot"]
1209 its_field_check = [
1210 'description = "%s";' % bb_vars['UBOOT_FIT_DESC'],
1211 'description = "U-Boot image";',
1212 'data = /incbin/("%s");' % bb_vars['UBOOT_NODTB_BINARY'],
1213 'type = "standalone";',
1214 'os = "u-boot";',
1215 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
1216 'compression = "none";',
1217 'load = <%s>;' % bb_vars['UBOOT_FIT_UBOOT_LOADADDRESS'],
1218 'entry = <%s>;' % bb_vars['UBOOT_FIT_UBOOT_ENTRYPOINT'],
1219 'description = "U-Boot FDT";',
1220 'data = /incbin/("%s");' % bb_vars['UBOOT_DTB_BINARY'],
1221 'type = "flat_dt";',
1222 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
307 'compression = "none";', 1223 'compression = "none";',
308 'load = <' + kernel_load + '>;',
309 'entry = <' + kernel_entry + '>;',
310 'hash-1 {',
311 'algo = "' + fit_hash_alg +'";',
312 '};',
313 '};'
314 ] 1224 ]
1225 if bb_vars['UBOOT_FIT_TEE'] == "1":
1226 its_field_check += [
1227 'description = "Trusted Execution Environment";',
1228 'data = /incbin/("%s");' % bb_vars['UBOOT_FIT_TEE_IMAGE'],
1229 'type = "tee";',
1230 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
1231 'os = "tee";',
1232 'load = <%s>;' % bb_vars['UBOOT_FIT_TEE_LOADADDRESS'],
1233 'entry = <%s>;' % bb_vars['UBOOT_FIT_TEE_ENTRYPOINT'],
1234 'compression = "none";',
1235 ]
1236 loadables.insert(0, "tee")
1237 if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1":
1238 its_field_check += [
1239 'description = "ARM Trusted Firmware";',
1240 'data = /incbin/("%s");' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE'],
1241 'type = "firmware";',
1242 'arch = "%s";' % bb_vars['UBOOT_ARCH'],
1243 'os = "arm-trusted-firmware";',
1244 'load = <%s>;' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS'],
1245 'entry = <%s>;' % bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT'],
1246 'compression = "none";',
1247 ]
1248 loadables.insert(0, "atf")
1249 its_field_check += [
1250 'default = "conf";',
1251 'description = "Boot with signed U-Boot FIT";',
1252 'loadables = "%s";' % '", "'.join(loadables),
1253 'fdt = "fdt";',
1254 ]
1255 return its_field_check
315 1256
316 node_str = exp_node_lines[0] 1257 def _get_req_sigvalues_config(self, bb_vars):
1258 # COnfigurations are not signed by uboot-sign
1259 return {}
317 1260
318 test_passed = False 1261 def _get_req_sigvalues_image(self, bb_vars):
1262 if bb_vars['SPL_SIGN_ENABLE'] != "1":
1263 return {}
1264 req_sigvalues_image = {
1265 'algo': '"%s,%s"' % (bb_vars['UBOOT_FIT_HASH_ALG'], bb_vars['UBOOT_FIT_SIGN_ALG']),
1266 'key-name-hint': '"%s"' % bb_vars['SPL_SIGN_KEYNAME'],
1267 }
1268 return req_sigvalues_image
319 1269
320 print ("checking kernel node\n") 1270 def _get_req_sections(self, bb_vars):
1271 """Generate the expected output of dumpimage for beaglebone targets
321 1272
322 if node_str in its_lines: 1273 The dict generated by this function is supposed to be compared against
323 node_start_idx = its_lines.index(node_str) 1274 the dict which is generated by the _dump_fitimage function.
324 node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))] 1275 """
325 if node == exp_node_lines: 1276 loadables = ['uboot']
326 print("kernel node verified") 1277 req_sections = {
327 else: 1278 "uboot": {
328 self.assertTrue(test_passed == True,"kernel node does not match expectation") 1279 "Type": "Standalone Program",
329 1280 "Load Address": bb_vars['UBOOT_FIT_UBOOT_LOADADDRESS'],
330 rx_configs = re.compile("^conf-.*") 1281 "Entry Point": bb_vars['UBOOT_FIT_UBOOT_ENTRYPOINT'],
331 its_configs = list(filter(rx_configs.match, its_lines)) 1282 },
332 1283 "fdt": {
333 for cfg_str in its_configs: 1284 "Type": "Flat Device Tree",
334 cfg_start_idx = its_lines.index(cfg_str) 1285 }
335 line_idx = cfg_start_idx + 2 1286 }
336 node_end = False 1287 if bb_vars['UBOOT_FIT_TEE'] == "1":
337 while node_end == False: 1288 loadables.insert(0, "tee")
338 if its_lines[line_idx] == "};" and its_lines[line_idx-1] == "};" : 1289 req_sections['tee'] = {
339 node_end = True 1290 "Type": "Trusted Execution Environment Image",
340 line_idx = line_idx + 1 1291 # "Load Address": bb_vars['UBOOT_FIT_TEE_LOADADDRESS'], not printed by mkimage?
341 1292 # "Entry Point": bb_vars['UBOOT_FIT_TEE_ENTRYPOINT'], not printed by mkimage?
342 node = its_lines[cfg_start_idx:line_idx] 1293 }
343 print("checking configuration " + cfg_str.rstrip(" {")) 1294 if bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE'] == "1":
344 rx_desc_line = re.compile("^description.*1 Linux kernel.*") 1295 loadables.insert(0, "atf")
345 if len(list(filter(rx_desc_line.match, node))) != 1: 1296 req_sections['atf'] = {
346 self.assertTrue(test_passed == True,"kernel keyword not found in the description line") 1297 "Type": "Firmware",
347 break 1298 "Load Address": bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS'],
348 else: 1299 # "Entry Point": bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT'], not printed by mkimage?
349 print("kernel keyword found in the description line") 1300 }
1301 req_sections["conf"] = {
1302 "Kernel": "unavailable",
1303 "FDT": "fdt",
1304 "Loadables": ','.join(loadables),
1305 }
350 1306
351 if 'kernel = "kernel-1";' not in node: 1307 # Add signing related properties if needed
352 self.assertTrue(test_passed == True,"kernel line not found") 1308 uboot_fit_hash_alg = bb_vars['UBOOT_FIT_HASH_ALG']
353 break 1309 uboot_fit_sign_alg = bb_vars['UBOOT_FIT_SIGN_ALG']
354 else: 1310 spl_sign_enable = bb_vars['SPL_SIGN_ENABLE']
355 print("kernel line found") 1311 spl_sign_keyname = bb_vars['SPL_SIGN_KEYNAME']
1312 num_signatures = 0
1313 if spl_sign_enable == "1":
1314 for section in req_sections:
1315 if not section.startswith('conf'):
1316 req_sections[section]['Sign algo'] = "%s,%s:%s" % \
1317 (uboot_fit_hash_alg, uboot_fit_sign_alg, spl_sign_keyname)
1318 num_signatures += 1
1319 return (req_sections, num_signatures)
1320
1321 def _check_signing(self, bb_vars, sections, num_signatures, uboot_tools_bindir, fitimage_path):
1322 if bb_vars['UBOOT_FITIMAGE_ENABLE'] == '1' and bb_vars['SPL_SIGN_ENABLE'] == "1":
1323 self.logger.debug("Verifying signatures in the FIT image")
1324 else:
1325 self.logger.debug("FIT image is not signed. Signature verification is not needed.")
1326 return
356 1327
357 rx_sign_line = re.compile("^sign-images.*kernel.*") 1328 uboot_fit_hash_alg = bb_vars['UBOOT_FIT_HASH_ALG']
358 if len(list(filter(rx_sign_line.match, node))) != 1: 1329 uboot_fit_sign_alg = bb_vars['UBOOT_FIT_SIGN_ALG']
359 self.assertTrue(test_passed == True,"kernel hash not signed") 1330 spl_sign_keyname = bb_vars['SPL_SIGN_KEYNAME']
360 break 1331 fit_sign_alg_len = FitImageTestCase.MKIMAGE_SIGNATURE_LENGTHS[uboot_fit_sign_alg]
1332 for section, values in sections.items():
1333 # Configuration nodes are always signed with UBOOT_SIGN_KEYNAME (if UBOOT_SIGN_ENABLE = "1")
1334 if section.startswith("conf"):
1335 # uboot-sign does not sign configuration nodes
1336 pass
361 else: 1337 else:
362 print("kernel hash signed") 1338 # uboot-sign does not add hash nodes, only image signatures
1339 sign_algo = values.get('Sign algo', None)
1340 req_sign_algo = "%s,%s:%s" % (uboot_fit_hash_alg, uboot_fit_sign_alg, spl_sign_keyname)
1341 self.assertEqual(sign_algo, req_sign_algo, 'Signature algorithm for %s not expected value' % section)
1342 sign_value = values.get('Sign value', None)
1343 self.assertEqual(len(sign_value), fit_sign_alg_len, 'Signature value for section %s not expected length' % section)
1344
1345 # Search for the string passed to mkimage in each signed section of the FIT image.
1346 # Looks like mkimage supports to add a comment but does not support to read it back.
1347 a_comment = FitImageTestCase._get_uboot_mkimage_sign_args(bb_vars['SPL_MKIMAGE_SIGN_ARGS'])
1348 self.logger.debug("a_comment: %s" % a_comment)
1349 if a_comment:
1350 found_comments = FitImageTestCase._find_string_in_bin_file(fitimage_path, a_comment)
1351 self.assertEqual(found_comments, num_signatures, "Expected %d signed and commented (%s) sections in the fitImage." %
1352 (num_signatures, a_comment))
1353
1354 def _check_kernel_dtb(self, bb_vars):
1355 """
1356 Check if the device-tree from U-Boot has the kernel public key(s).
1357
1358 The concat_dtb function of the uboot-sign.bbclass injects the public keys
1359 which are required for verifying the kernel at run-time into the DTB from
1360 U-Boot. The following example is from a build with FIT_SIGN_INDIVIDUAL
1361 set to "1". If it is set to "0" the key-the-kernel-image-key node is not
1362 present.
1363 / {
1364 ...
1365 signature {
1366 key-the-kernel-image-key {
1367 required = "image";
1368 algo = "sha256,rsa2048";
1369 ...
1370 };
1371 key-the-kernel-config-key {
1372 required = "conf";
1373 algo = "sha256,rsa2048";
1374 ...
1375 };
1376 };
1377 """
1378 # Setup u-boot-tools-native
1379 dtc_bindir = FitImageTestCase._setup_native('dtc-native')
1380
1381 # Check if 1 or 2 signature sections are in the DTB.
1382 uboot_dtb_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['UBOOT_DTB_IMAGE'])
1383 algo = "%s,%s" % (bb_vars['FIT_HASH_ALG'], bb_vars['FIT_SIGN_ALG'])
1384 if bb_vars['FIT_SIGN_INDIVIDUAL'] == "1":
1385 uboot_sign_img_keyname = bb_vars['UBOOT_SIGN_IMG_KEYNAME']
1386 key_dtb_path = "/signature/key-" + uboot_sign_img_keyname
1387 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "required", "image")
1388 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "algo", algo)
1389 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "key-name-hint", uboot_sign_img_keyname)
1390
1391 uboot_sign_keyname = bb_vars['UBOOT_SIGN_KEYNAME']
1392 key_dtb_path = "/signature/key-" + uboot_sign_keyname
1393 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "required", "conf")
1394 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "algo", algo)
1395 self._verify_dtb_property(dtc_bindir, uboot_dtb_path, key_dtb_path, "key-name-hint", uboot_sign_keyname)
1396
1397
1398 def test_uboot_fit_image(self):
1399 """
1400 Summary: Check if Uboot FIT image and Image Tree Source
1401 (its) are built and the Image Tree Source has the
1402 correct fields.
1403 Expected: 1. u-boot-fitImage and u-boot-its can be built
1404 2. The type, load address, entrypoint address and
1405 default values of U-boot image are correct in the
1406 Image Tree Source. Not all the fields are tested,
1407 only the key fields that wont vary between
1408 different architectures.
1409 Product: oe-core
1410 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
1411 based on work by Usama Arif <usama.arif@arm.com>
1412 """
1413 config = """
1414# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1415MACHINE = "qemuarm"
1416UBOOT_MACHINE = "am57xx_evm_defconfig"
1417SPL_BINARY = "MLO"
1418
1419# Enable creation of the U-Boot fitImage
1420UBOOT_FITIMAGE_ENABLE = "1"
1421
1422# (U-boot) fitImage properties
1423UBOOT_LOADADDRESS = "0x80080000"
1424UBOOT_ENTRYPOINT = "0x80080000"
1425UBOOT_FIT_DESC = "A model description"
1426"""
1427 self.write_config(config)
1428 bb_vars = self._fit_get_bb_vars()
1429 self._test_fitimage(bb_vars)
1430
1431
1432 def test_sign_standalone_uboot_fit_image(self):
1433 """
1434 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1435 created and signed correctly for the scenario where only
1436 the U-Boot proper fitImage is being created and signed.
1437 Expected: 1) U-Boot its and FIT image are built successfully
1438 2) Scanning the its file indicates signing is enabled
1439 as requested by SPL_SIGN_ENABLE (using keys generated
1440 via UBOOT_FIT_GENERATE_KEYS)
1441 3) Dumping the FIT image indicates signature values
1442 are present
1443 4) Examination of the do_uboot_assemble_fitimage
1444 runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
1445 and SPL_MKIMAGE_SIGN_ARGS are working as expected.
1446 Product: oe-core
1447 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
1448 work by Paul Eggleton <paul.eggleton@microsoft.com> and
1449 Usama Arif <usama.arif@arm.com>
1450 """
1451 config = """
1452# There's no U-boot defconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1453# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1454MACHINE = "qemuarm"
1455UBOOT_MACHINE = "am57xx_evm_defconfig"
1456SPL_BINARY = "MLO"
1457# Enable creation and signing of the U-Boot fitImage
1458UBOOT_FITIMAGE_ENABLE = "1"
1459SPL_SIGN_ENABLE = "1"
1460SPL_SIGN_KEYNAME = "spl-oe-selftest"
1461SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1462UBOOT_DTB_BINARY = "u-boot.dtb"
1463UBOOT_ENTRYPOINT = "0x80000000"
1464UBOOT_LOADADDRESS = "0x80000000"
1465UBOOT_DTB_LOADADDRESS = "0x82000000"
1466UBOOT_ARCH = "arm"
1467SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1468SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
1469UBOOT_EXTLINUX = "0"
1470UBOOT_FIT_GENERATE_KEYS = "1"
1471UBOOT_FIT_HASH_ALG = "sha256"
1472"""
1473 self.write_config(config)
1474 bb_vars = self._fit_get_bb_vars()
1475 self._test_fitimage(bb_vars)
1476
1477
1478 def test_sign_cascaded_uboot_fit_image(self):
1479 """
1480 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1481 created and signed correctly for the scenario where both
1482 U-Boot proper and Kernel fitImages are being created and
1483 signed.
1484 Expected: 1) U-Boot its and FIT image are built successfully
1485 2) Scanning the its file indicates signing is enabled
1486 as requested by SPL_SIGN_ENABLE (using keys generated
1487 via UBOOT_FIT_GENERATE_KEYS)
1488 3) Dumping the FIT image indicates signature values
1489 are present
1490 4) Examination of the do_uboot_assemble_fitimage that
1491 UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and SPL_MKIMAGE_SIGN_ARGS
1492 are working as expected.
1493 Product: oe-core
1494 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
1495 work by Paul Eggleton <paul.eggleton@microsoft.com> and
1496 Usama Arif <usama.arif@arm.com>
1497 """
1498 config = """
1499# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1500# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1501MACHINE = "qemuarm"
1502UBOOT_MACHINE = "am57xx_evm_defconfig"
1503SPL_BINARY = "MLO"
1504# Enable creation and signing of the U-Boot fitImage
1505UBOOT_FITIMAGE_ENABLE = "1"
1506SPL_SIGN_ENABLE = "1"
1507SPL_SIGN_KEYNAME = "spl-cascaded-oe-selftest"
1508SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1509UBOOT_DTB_BINARY = "u-boot.dtb"
1510UBOOT_ENTRYPOINT = "0x80000000"
1511UBOOT_LOADADDRESS = "0x80000000"
1512UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1513UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'"
1514UBOOT_DTB_LOADADDRESS = "0x82000000"
1515UBOOT_ARCH = "arm"
1516SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1517SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'"
1518UBOOT_EXTLINUX = "0"
1519UBOOT_FIT_GENERATE_KEYS = "1"
1520UBOOT_FIT_HASH_ALG = "sha256"
1521UBOOT_SIGN_ENABLE = "1"
1522UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1523UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
1524"""
1525 self.write_config(config)
1526 bb_vars = self._fit_get_bb_vars()
1527
1528 self._gen_signing_key(bb_vars)
1529 self._test_fitimage(bb_vars)
1530 self._check_kernel_dtb(bb_vars)
1531
1532 def test_uboot_atf_tee_fit_image(self):
1533 """
1534 Summary: Check if U-boot FIT image and Image Tree Source
1535 (its) are built and the Image Tree Source has the
1536 correct fields.
1537 Expected: 1. Create atf and tee dummy images
1538 2. Both u-boot-fitImage and u-boot-its can be built
1539 3. The os, load address, entrypoint address and
1540 default values of U-boot, ATF and TEE images are
1541 correct in the Image Tree Source. Not all the
1542 fields are tested, only the key fields that wont
1543 vary between different architectures.
1544 Product: oe-core
1545 Author: Jamin Lin <jamin_lin@aspeedtech.com>
1546 """
1547 config = """
1548# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1549MACHINE = "qemuarm"
1550UBOOT_MACHINE = "am57xx_evm_defconfig"
1551SPL_BINARY = "MLO"
1552
1553# Enable creation of the U-Boot fitImage
1554UBOOT_FITIMAGE_ENABLE = "1"
1555
1556# (U-boot) fitImage properties
1557UBOOT_LOADADDRESS = "0x80080000"
1558UBOOT_ENTRYPOINT = "0x80080000"
1559UBOOT_FIT_DESC = "A model description"
1560
1561# Enable creation of the TEE fitImage
1562UBOOT_FIT_TEE = "1"
1563
1564# TEE fitImage properties
1565UBOOT_FIT_TEE_IMAGE = "${TOPDIR}/tee-dummy.bin"
1566UBOOT_FIT_TEE_LOADADDRESS = "0x80180000"
1567UBOOT_FIT_TEE_ENTRYPOINT = "0x80180000"
1568
1569# Enable creation of the ATF fitImage
1570UBOOT_FIT_ARM_TRUSTED_FIRMWARE = "1"
1571
1572# ATF fitImage properties
1573UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE = "${TOPDIR}/atf-dummy.bin"
1574UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS = "0x80280000"
1575UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT = "0x80280000"
1576"""
1577 self.write_config(config)
1578
1579 bb_vars = self._fit_get_bb_vars([
1580 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE',
1581 'UBOOT_FIT_TEE_IMAGE',
1582 ])
1583
1584 # Create an ATF dummy image
1585 dummy_atf = os.path.join(self.builddir, bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE'])
1586 FitImageTestCase._gen_random_file(dummy_atf)
1587
1588 # Create a TEE dummy image
1589 dummy_tee = os.path.join(self.builddir, bb_vars['UBOOT_FIT_TEE_IMAGE'])
1590 FitImageTestCase._gen_random_file(dummy_tee)
1591
1592 self._test_fitimage(bb_vars)
1593
1594 def test_sign_standalone_uboot_atf_tee_fit_image(self):
1595 """
1596 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1597 created and signed correctly for the scenario where only
1598 the U-Boot proper fitImage is being created and signed.
1599 Expected: 1. Create atf and tee dummy images
1600 2. U-Boot its and FIT image are built successfully
1601 3. Scanning the its file indicates signing is enabled
1602 as requested by SPL_SIGN_ENABLE (using keys generated
1603 via UBOOT_FIT_GENERATE_KEYS)
1604 4. Dumping the FIT image indicates signature values
1605 are present
1606 5. Examination of the do_uboot_assemble_fitimage
1607 runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
1608 and SPL_MKIMAGE_SIGN_ARGS are working as expected.
1609 Product: oe-core
1610 Author: Jamin Lin <jamin_lin@aspeedtech.com>
1611 """
1612 config = """
1613# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1614# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1615MACHINE = "qemuarm"
1616UBOOT_MACHINE = "am57xx_evm_defconfig"
1617SPL_BINARY = "MLO"
1618# Enable creation and signing of the U-Boot fitImage
1619UBOOT_FITIMAGE_ENABLE = "1"
1620SPL_SIGN_ENABLE = "1"
1621SPL_SIGN_KEYNAME = "spl-oe-selftest"
1622SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1623UBOOT_DTB_BINARY = "u-boot.dtb"
1624UBOOT_ENTRYPOINT = "0x80000000"
1625UBOOT_LOADADDRESS = "0x80000000"
1626UBOOT_ARCH = "arm"
1627SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
1628SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot ATF TEE comment'"
1629UBOOT_EXTLINUX = "0"
1630UBOOT_FIT_GENERATE_KEYS = "1"
1631UBOOT_FIT_HASH_ALG = "sha256"
1632
1633# Enable creation of the TEE fitImage
1634UBOOT_FIT_TEE = "1"
1635
1636# TEE fitImage properties
1637UBOOT_FIT_TEE_IMAGE = "${TOPDIR}/tee-dummy.bin"
1638UBOOT_FIT_TEE_LOADADDRESS = "0x80180000"
1639UBOOT_FIT_TEE_ENTRYPOINT = "0x80180000"
1640
1641# Enable creation of the ATF fitImage
1642UBOOT_FIT_ARM_TRUSTED_FIRMWARE = "1"
1643
1644# ATF fitImage properties
1645UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE = "${TOPDIR}/atf-dummy.bin"
1646UBOOT_FIT_ARM_TRUSTED_FIRMWARE_LOADADDRESS = "0x80280000"
1647UBOOT_FIT_ARM_TRUSTED_FIRMWARE_ENTRYPOINT = "0x80280000"
1648"""
1649 self.write_config(config)
1650
1651 bb_vars = self._fit_get_bb_vars([
1652 'UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE',
1653 'UBOOT_FIT_TEE_IMAGE',
1654 ])
1655
1656 # Create an ATF dummy image
1657 dummy_atf = os.path.join(self.builddir, bb_vars['UBOOT_FIT_ARM_TRUSTED_FIRMWARE_IMAGE'])
1658 FitImageTestCase._gen_random_file(dummy_atf)
1659
1660 # Create a TEE dummy image
1661 dummy_tee = os.path.join(self.builddir, bb_vars['UBOOT_FIT_TEE_IMAGE'])
1662 FitImageTestCase._gen_random_file(dummy_tee)
1663
1664 self._test_fitimage(bb_vars)
1665
1666
1667 def test_sign_uboot_kernel_individual(self):
1668 """
1669 Summary: Check if the device-tree from U-Boot has two public keys
1670 for verifying the kernel FIT image created by the
1671 kernel-fitimage.bbclass included.
1672 This test sets: FIT_SIGN_INDIVIDUAL = "1"
1673 Expected: There must be two signature nodes. One is required for
1674 the individual image nodes, the other is required for the
1675 verification of the configuration section.
1676 """
1677 config = """
1678# Enable creation of fitImage
1679MACHINE = "beaglebone-yocto"
1680UBOOT_SIGN_ENABLE = "1"
1681UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1682UBOOT_SIGN_KEYNAME = "the-kernel-config-key"
1683UBOOT_SIGN_IMG_KEYNAME = "the-kernel-image-key"
1684UBOOT_MKIMAGE_DTCOPTS="-I dts -O dtb -p 2000"
1685FIT_SIGN_INDIVIDUAL = "1"
1686"""
1687 self.write_config(config)
1688 bb_vars = self._fit_get_bb_vars()
1689 self._gen_signing_key(bb_vars)
1690
1691 bitbake(UBootFitImageTests.BOOTLOADER_RECIPE)
1692
1693 # Just check the DTB of u-boot since there is no u-boot FIT image
1694 self._check_kernel_dtb(bb_vars)
1695
1696
1697 def test_sign_uboot_fit_image_without_spl(self):
1698 """
1699 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
1700 created and signed correctly for the scenario where only
1701 the U-Boot proper fitImage is being created and signed
1702 (no SPL included).
1703 Expected: 1) U-Boot its and FIT image are built successfully
1704 2) Scanning the its file indicates signing is enabled
1705 as requested by SPL_SIGN_ENABLE (using keys generated
1706 via UBOOT_FIT_GENERATE_KEYS)
1707 3) Dumping the FIT image indicates signature values
1708 are present
1709 4) Examination of the do_uboot_assemble_fitimage
1710 runfile/logfile indicate that UBOOT_MKIMAGE and
1711 UBOOT_MKIMAGE_SIGN are working as expected.
1712 Product: oe-core
1713 Author: Jamin Lin <jamin_lin@aspeedtech.com>
1714 """
1715 config = """
1716# There's no U-boot defconfig with CONFIG_FIT_SIGNATURE yet, so we need at
1717# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
1718MACHINE = "qemuarm"
1719UBOOT_MACHINE = "am57xx_evm_defconfig"
1720# Enable creation and signing of the U-Boot fitImage (no SPL)
1721UBOOT_FITIMAGE_ENABLE = "1"
1722SPL_DTB_BINARY = ""
1723SPL_SIGN_ENABLE = "1"
1724SPL_SIGN_KEYNAME = "spl-oe-selftest"
1725SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
1726UBOOT_FIT_GENERATE_KEYS = "1"
1727"""
1728 self.write_config(config)
1729 bb_vars = self._fit_get_bb_vars()
1730 self._test_fitimage(bb_vars)
363 1731
364 test_passed = True
365 self.assertTrue(test_passed == True,"Initramfs bundle test success")
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
index 3efe15228f..1bda29a72b 100644
--- a/meta/lib/oeqa/selftest/cases/gcc.py
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -1,9 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
7import time
3from oeqa.core.decorator import OETestTag 8from oeqa.core.decorator import OETestTag
4from oeqa.core.case import OEPTestResultTestCase 9from oeqa.core.case import OEPTestResultTestCase
5from oeqa.selftest.case import OESelftestTestCase 10from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command 11from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu
7 12
8def parse_values(content): 13def parse_values(content):
9 for i in content: 14 for i in content:
@@ -32,15 +37,20 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
32 features = [] 37 features = []
33 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets))) 38 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets)))
34 if ssh is not None: 39 if ssh is not None:
35 features.append('TOOLCHAIN_TEST_TARGET = "ssh"') 40 features.append('TOOLCHAIN_TEST_TARGET = "linux-ssh"')
36 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh)) 41 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
37 features.append('TOOLCHAIN_TEST_HOST_USER = "root"') 42 features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
38 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') 43 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
39 self.write_config("\n".join(features)) 44 self.write_config("\n".join(features))
40 45
41 recipe = "gcc-runtime" 46 recipe = "gcc-runtime"
47
48 start_time = time.time()
49
42 bitbake("{} -c check".format(recipe)) 50 bitbake("{} -c check".format(recipe))
43 51
52 end_time = time.time()
53
44 bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe) 54 bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
45 builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"] 55 builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
46 56
@@ -54,7 +64,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
54 64
55 ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite 65 ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
56 ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite 66 ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
57 self.ptest_section(ptestsuite, logfile = logpath) 67 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
58 with open(sumspath, "r") as f: 68 with open(sumspath, "r") as f:
59 for test, result in parse_values(f): 69 for test, result in parse_values(f):
60 self.ptest_result(ptestsuite, test, result) 70 self.ptest_result(ptestsuite, test, result)
@@ -73,6 +83,8 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
73 # validate that SSH is working 83 # validate that SSH is working
74 status, _ = qemu.run("uname") 84 status, _ = qemu.run("uname")
75 self.assertEqual(status, 0) 85 self.assertEqual(status, 0)
86 qemu.run('echo "MaxStartups 75:30:100" >> /etc/ssh/sshd_config')
87 qemu.run('service sshd restart')
76 88
77 return self.run_check(*args, ssh=qemu.ip, **kwargs) 89 return self.run_check(*args, ssh=qemu.ip, **kwargs)
78 90
@@ -114,37 +126,44 @@ class GccLibItmSelfTest(GccSelfTestBase):
114 self.run_check("libitm") 126 self.run_check("libitm")
115 127
116@OETestTag("toolchain-system") 128@OETestTag("toolchain-system")
129@OETestTag("runqemu")
117class GccCrossSelfTestSystemEmulated(GccSelfTestBase): 130class GccCrossSelfTestSystemEmulated(GccSelfTestBase):
118 def test_cross_gcc(self): 131 def test_cross_gcc(self):
119 self.run_check_emulated("gcc") 132 self.run_check_emulated("gcc")
120 133
121@OETestTag("toolchain-system") 134@OETestTag("toolchain-system")
135@OETestTag("runqemu")
122class GxxCrossSelfTestSystemEmulated(GccSelfTestBase): 136class GxxCrossSelfTestSystemEmulated(GccSelfTestBase):
123 def test_cross_gxx(self): 137 def test_cross_gxx(self):
124 self.run_check_emulated("g++") 138 self.run_check_emulated("g++")
125 139
126@OETestTag("toolchain-system") 140@OETestTag("toolchain-system")
141@OETestTag("runqemu")
127class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase): 142class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase):
128 def test_libatomic(self): 143 def test_libatomic(self):
129 self.run_check_emulated("libatomic") 144 self.run_check_emulated("libatomic")
130 145
131@OETestTag("toolchain-system") 146@OETestTag("toolchain-system")
147@OETestTag("runqemu")
132class GccLibGompSelfTestSystemEmulated(GccSelfTestBase): 148class GccLibGompSelfTestSystemEmulated(GccSelfTestBase):
133 def test_libgomp(self): 149 def test_libgomp(self):
134 self.run_check_emulated("libgomp") 150 self.run_check_emulated("libgomp")
135 151
136@OETestTag("toolchain-system") 152@OETestTag("toolchain-system")
153@OETestTag("runqemu")
137class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase): 154class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase):
138 def test_libstdcxx(self): 155 def test_libstdcxx(self):
139 self.run_check_emulated("libstdc++-v3") 156 self.run_check_emulated("libstdc++-v3")
140 157
141@OETestTag("toolchain-system") 158@OETestTag("toolchain-system")
159@OETestTag("runqemu")
142class GccLibSspSelfTestSystemEmulated(GccSelfTestBase): 160class GccLibSspSelfTestSystemEmulated(GccSelfTestBase):
143 def test_libssp(self): 161 def test_libssp(self):
144 self.check_skip("libssp") 162 self.check_skip("libssp")
145 self.run_check_emulated("libssp") 163 self.run_check_emulated("libssp")
146 164
147@OETestTag("toolchain-system") 165@OETestTag("toolchain-system")
166@OETestTag("runqemu")
148class GccLibItmSelfTestSystemEmulated(GccSelfTestBase): 167class GccLibItmSelfTestSystemEmulated(GccSelfTestBase):
149 def test_libitm(self): 168 def test_libitm(self):
150 self.check_skip("libitm") 169 self.check_skip("libitm")
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py
new file mode 100644
index 0000000000..b6b7c5c473
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gdbserver.py
@@ -0,0 +1,67 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import time
8import tempfile
9import shutil
10import concurrent.futures
11
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd
14
15class GdbServerTest(OESelftestTestCase):
16 def test_gdb_server(self):
17 target_arch = self.td["TARGET_ARCH"]
18 target_sys = self.td["TARGET_SYS"]
19
20 features = """
21IMAGE_GEN_DEBUGFS = "1"
22IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
23CORE_IMAGE_EXTRA_INSTALL = "gdbserver"
24 """
25 self.write_config(features)
26
27 gdb_recipe = "gdb-cross-" + target_arch
28 gdb_binary = target_sys + "-gdb"
29
30 bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe)
31
32 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
33 r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys)
34 self.assertEqual(r.status, 0)
35 self.assertIn("GNU gdb", r.output)
36 image = 'core-image-minimal'
37 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
38
39 with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs:
40 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
41 shutil.unpack_archive(filename, debugfs)
42 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
43 shutil.unpack_archive(filename, debugfs)
44
45 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
46 status, output = qemu.run_serial("kmod --help")
47 self.assertIn("modprobe", output)
48
49 with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
50 def run_gdb():
51 for _ in range(5):
52 time.sleep(2)
53 cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip)
54 self.logger.warning("starting gdb %s" % cmd)
55 r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys)
56 self.assertEqual(0, r.status)
57 line_re = r"Line \d+ of \".*\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>"
58 self.assertRegex(r.output, line_re)
59 break
60 else:
61 self.fail("Timed out connecting to gdb")
62 future = executor.submit(run_gdb)
63
64 status, output = qemu.run_serial("gdbserver --once :9999 kmod --help")
65 self.assertEqual(status, 1)
66 # The future either returns None, or raises an exception
67 future.result()
diff --git a/meta/lib/oeqa/selftest/cases/gitarchivetests.py b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
new file mode 100644
index 0000000000..71382089c1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
@@ -0,0 +1,136 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import sys
9basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
10lib_path = basepath + '/scripts/lib'
11sys.path = sys.path + [lib_path]
12import oeqa.utils.gitarchive as ga
13from oeqa.utils.git import GitError
14import tempfile
15import shutil
16import scriptutils
17import logging
18from oeqa.selftest.case import OESelftestTestCase
19
20logger = scriptutils.logger_create('resulttool')
21
22def create_fake_repository(commit, tag_list=[], add_remote=True):
23 """ Create a testing git directory
24
25 Initialize a simple git repository with one initial commit, and as many
26 tags on this commit as listed in tag_list
27 Returns both git directory path and gitarchive git object
28 If commit is true, fake data will be commited, otherwise it will stay in staging area
29 If commit is true and tag_lsit is non empty, all tags in tag_list will be
30 created on the initial commit
31 Fake remote will also be added to make git ls-remote work
32 """
33 fake_data_file = "fake_data.txt"
34 tempdir = tempfile.mkdtemp(prefix='fake_results.')
35 repo = ga.init_git_repo(tempdir, False, False, logger)
36 if add_remote:
37 repo.run_cmd(["remote", "add", "origin", "."])
38 with open(os.path.join(tempdir, fake_data_file), "w") as fake_data:
39 fake_data.write("Fake data")
40 if commit:
41 repo.run_cmd(["add", fake_data_file])
42 repo.run_cmd(["commit", "-m", "\"Add fake data\""])
43 for tag in tag_list:
44 repo.run_cmd(["tag", tag])
45
46 return tempdir, repo
47
48def delete_fake_repository(path):
49 shutil.rmtree(path)
50
51def tag_exists(git_obj, target_tag):
52 for tag in git_obj.run_cmd(["tag"]).splitlines():
53 if target_tag == tag:
54 return True
55 return False
56
57class GitArchiveTests(OESelftestTestCase):
58 TEST_BRANCH="main"
59 TEST_COMMIT="0f7d5df"
60 TEST_COMMIT_COUNT="42"
61
62 @classmethod
63 def setUpClass(cls):
64 super().setUpClass()
65 cls.log = logging.getLogger('gitarchivetests')
66 cls.log.setLevel(logging.DEBUG)
67
68 def test_create_first_test_tag(self):
69 path, git_obj = create_fake_repository(False)
70 keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
71 target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
72
73 ga.gitarchive(path, path, True, False,
74 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
75 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
76 'Test run #{tag_number} of {branch}:{commit}', '',
77 [], [], False, keywords, logger)
78 self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created")
79 delete_fake_repository(path)
80
81 def test_create_second_test_tag(self):
82 first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
83 second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1"
84 keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
85
86 path, git_obj = create_fake_repository(True, [first_tag])
87 ga.gitarchive(path, path, True, False,
88 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
89 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
90 'Test run #{tag_number} of {branch}:{commit}', '',
91 [], [], False, keywords, logger)
92 self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created")
93 delete_fake_repository(path)
94
95 def test_get_revs_on_branch(self):
96 fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
97 tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
98
99 path, git_obj = create_fake_repository(True, fake_tags_list)
100 revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main")
101 self.assertEqual(len(revs), 1)
102 self.assertEqual(revs[0].commit, "0f7d5df")
103 self.assertEqual(len(revs[0].tags), 2)
104 self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1'])
105 delete_fake_repository(path)
106
107 def test_get_tags_without_valid_remote(self):
108 url = 'git://git.yoctoproject.org/poky'
109 path, git_obj = create_fake_repository(False, None, False)
110
111 tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
112 """Test for some well established tags (released tags)"""
113 self.assertIn("yocto-4.0", tags)
114 self.assertIn("yocto-4.1", tags)
115 self.assertIn("yocto-4.2", tags)
116 delete_fake_repository(path)
117
118 def test_get_tags_with_only_local_tag(self):
119 fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
120 path, git_obj = create_fake_repository(True, fake_tags_list, False)
121
122 """No remote is configured and no url is passed: get_tags must fall
123 back to local tags
124 """
125 tags = ga.get_tags(git_obj, self.log)
126 self.assertCountEqual(tags, fake_tags_list)
127 delete_fake_repository(path)
128
129 def test_get_tags_without_valid_remote_and_wrong_url(self):
130 url = 'git://git.foo.org/bar'
131 path, git_obj = create_fake_repository(False, None, False)
132
133 """Test for some well established tags (released tags)"""
134 with self.assertRaises(GitError):
135 tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
136 delete_fake_repository(path)
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
index c687f6ef93..bd56b2f6e7 100644
--- a/meta/lib/oeqa/selftest/cases/glibc.py
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -1,10 +1,15 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
7import time
3import contextlib 8import contextlib
4from oeqa.core.decorator import OETestTag 9from oeqa.core.decorator import OETestTag
5from oeqa.core.case import OEPTestResultTestCase 10from oeqa.core.case import OEPTestResultTestCase
6from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command 12from oeqa.utils.commands import bitbake, get_bb_var, runqemu
8from oeqa.utils.nfs import unfs_server 13from oeqa.utils.nfs import unfs_server
9 14
10def parse_values(content): 15def parse_values(content):
@@ -24,16 +29,20 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
24 features.append('TOOLCHAIN_TEST_HOST_USER = "root"') 29 features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
25 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') 30 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
26 # force single threaded test execution 31 # force single threaded test execution
27 features.append('EGLIBCPARALLELISM_task-check_pn-glibc-testsuite = "PARALLELMFLAGS="-j1""') 32 features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
28 self.write_config("\n".join(features)) 33 self.write_config("\n".join(features))
29 34
35 start_time = time.time()
36
30 bitbake("glibc-testsuite -c check") 37 bitbake("glibc-testsuite -c check")
31 38
39 end_time = time.time()
40
32 builddir = get_bb_var("B", "glibc-testsuite") 41 builddir = get_bb_var("B", "glibc-testsuite")
33 42
34 ptestsuite = "glibc-user" if ssh is None else "glibc" 43 ptestsuite = "glibc-user" if ssh is None else "glibc"
35 self.ptest_section(ptestsuite) 44 self.ptest_section(ptestsuite, duration = int(end_time - start_time))
36 with open(os.path.join(builddir, "tests.sum"), "r") as f: 45 with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
37 for test, result in parse_values(f): 46 for test, result in parse_values(f):
38 self.ptest_result(ptestsuite, test, result) 47 self.ptest_result(ptestsuite, test, result)
39 48
@@ -41,7 +50,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
41 with contextlib.ExitStack() as s: 50 with contextlib.ExitStack() as s:
42 # use the base work dir, as the nfs mount, since the recipe directory may not exist 51 # use the base work dir, as the nfs mount, since the recipe directory may not exist
43 tmpdir = get_bb_var("BASE_WORKDIR") 52 tmpdir = get_bb_var("BASE_WORKDIR")
44 nfsport, mountport = s.enter_context(unfs_server(tmpdir)) 53 nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
45 54
46 # build core-image-minimal with required packages 55 # build core-image-minimal with required packages
47 default_installed_packages = [ 56 default_installed_packages = [
@@ -61,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
61 bitbake("core-image-minimal") 70 bitbake("core-image-minimal")
62 71
63 # start runqemu 72 # start runqemu
64 qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic")) 73 qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
65 74
66 # validate that SSH is working 75 # validate that SSH is working
67 status, _ = qemu.run("uname") 76 status, _ = qemu.run("uname")
@@ -70,7 +79,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
70 # setup nfs mount 79 # setup nfs mount
71 if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0: 80 if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
72 raise Exception("Failed to setup NFS mount directory on target") 81 raise Exception("Failed to setup NFS mount directory on target")
73 mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir) 82 mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
74 status, output = qemu.run(mountcmd) 83 status, output = qemu.run(mountcmd)
75 if status != 0: 84 if status != 0:
76 raise Exception("Failed to setup NFS mount on target ({})".format(repr(output))) 85 raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
@@ -83,6 +92,7 @@ class GlibcSelfTest(GlibcSelfTestBase):
83 self.run_check() 92 self.run_check()
84 93
85@OETestTag("toolchain-system") 94@OETestTag("toolchain-system")
95@OETestTag("runqemu")
86class GlibcSelfTestSystemEmulated(GlibcSelfTestBase): 96class GlibcSelfTestSystemEmulated(GlibcSelfTestBase):
87 def test_glibc(self): 97 def test_glibc(self):
88 self.run_check_emulated() 98 self.run_check_emulated()
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
index 4fc3605f42..ee2cf4b09a 100644
--- a/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -50,6 +52,9 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
50 cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name) 52 cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name)
51 cmd = cmd + ". %s; " % self.env_SDK 53 cmd = cmd + ". %s; " % self.env_SDK
52 cmd = cmd + "export GOPATH=%s; " % self.go_path 54 cmd = cmd + "export GOPATH=%s; " % self.go_path
55 cmd = cmd + "export GOFLAGS=-modcacherw; "
56 cmd = cmd + "export CGO_ENABLED=1; "
57 cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; "
53 cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd 58 cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
54 return runCmd(cmd).status 59 return runCmd(cmd).status
55 60
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py
index 52e1080f13..17c98baf14 100644
--- a/meta/lib/oeqa/selftest/cases/image_typedep.py
+++ b/meta/lib/oeqa/selftest/cases/image_typedep.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -9,7 +11,7 @@ from oeqa.utils.commands import bitbake
9 11
10class ImageTypeDepTests(OESelftestTestCase): 12class ImageTypeDepTests(OESelftestTestCase):
11 13
12 # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that 14 # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
13 # the conversion type bar gets added as a dep as well 15 # the conversion type bar gets added as a dep as well
14 def test_conversion_typedep_added(self): 16 def test_conversion_typedep_added(self):
15 17
@@ -22,7 +24,7 @@ LICENSE = "MIT"
22IMAGE_FSTYPES = "testfstype" 24IMAGE_FSTYPES = "testfstype"
23 25
24IMAGE_TYPES_MASKED += "testfstype" 26IMAGE_TYPES_MASKED += "testfstype"
25IMAGE_TYPEDEP_testfstype = "tar.bz2" 27IMAGE_TYPEDEP:testfstype = "tar.bz2"
26 28
27inherit image 29inherit image
28 30
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index 6723a8198f..94d01ba116 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu 8from oeqa.core.decorator import OETestTag
9from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
7from oeqa.utils.sshcontrol import SSHControl 10from oeqa.utils.sshcontrol import SSHControl
8import glob 11import glob
9import os 12import os
@@ -14,6 +17,7 @@ class ImageFeatures(OESelftestTestCase):
14 test_user = 'tester' 17 test_user = 'tester'
15 root_user = 'root' 18 root_user = 'root'
16 19
20 @OETestTag("runqemu")
17 def test_non_root_user_can_connect_via_ssh_without_password(self): 21 def test_non_root_user_can_connect_via_ssh_without_password(self):
18 """ 22 """
19 Summary: Check if non root user can connect via ssh without password 23 Summary: Check if non root user can connect via ssh without password
@@ -39,6 +43,7 @@ class ImageFeatures(OESelftestTestCase):
39 status, output = ssh.run("true") 43 status, output = ssh.run("true")
40 self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output)) 44 self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output))
41 45
46 @OETestTag("runqemu")
42 def test_all_users_can_connect_via_ssh_without_password(self): 47 def test_all_users_can_connect_via_ssh_without_password(self):
43 """ 48 """
44 Summary: Check if all users can connect via ssh without password 49 Summary: Check if all users can connect via ssh without password
@@ -68,18 +73,6 @@ class ImageFeatures(OESelftestTestCase):
68 self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output) 73 self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output)
69 74
70 75
71 def test_clutter_image_can_be_built(self):
72 """
73 Summary: Check if clutter image can be built
74 Expected: 1. core-image-clutter can be built
75 Product: oe-core
76 Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
77 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
78 """
79
80 # Build a core-image-clutter
81 bitbake('core-image-clutter')
82
83 def test_wayland_support_in_image(self): 76 def test_wayland_support_in_image(self):
84 """ 77 """
85 Summary: Check Wayland support in image 78 Summary: Check Wayland support in image
@@ -109,12 +102,11 @@ class ImageFeatures(OESelftestTestCase):
109 features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"' 102 features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
110 self.write_config(features) 103 self.write_config(features)
111 104
112 image_name = 'core-image-minimal' 105 image = 'core-image-minimal'
113 bitbake(image_name) 106 bitbake(image)
107 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
114 108
115 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 109 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME'])
116 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
117 image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name)
118 bmap_path = "%s.bmap" % image_path 110 bmap_path = "%s.bmap" % image_path
119 gzip_path = "%s.gz" % bmap_path 111 gzip_path = "%s.gz" % bmap_path
120 112
@@ -127,8 +119,8 @@ class ImageFeatures(OESelftestTestCase):
127 image_stat = os.stat(image_path) 119 image_stat = os.stat(image_path)
128 self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512) 120 self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
129 121
130 # check if the resulting gzip is valid 122 # check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink
131 self.assertTrue(runCmd('gzip -t %s' % gzip_path)) 123 self.assertTrue(runCmd('gzip --test --force %s' % gzip_path))
132 124
133 def test_hypervisor_fmts(self): 125 def test_hypervisor_fmts(self):
134 """ 126 """
@@ -143,17 +135,16 @@ class ImageFeatures(OESelftestTestCase):
143 img_types = [ 'vmdk', 'vdi', 'qcow2' ] 135 img_types = [ 'vmdk', 'vdi', 'qcow2' ]
144 features = "" 136 features = ""
145 for itype in img_types: 137 for itype in img_types:
146 features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype 138 features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype
147 self.write_config(features) 139 self.write_config(features)
148 140
149 image_name = 'core-image-minimal' 141 image = 'core-image-minimal'
150 bitbake(image_name) 142 bitbake(image)
143 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
151 144
152 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
153 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
154 for itype in img_types: 145 for itype in img_types:
155 image_path = os.path.join(deploy_dir_image, "%s.wic.%s" % 146 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" %
156 (link_name, itype)) 147 (bb_vars['IMAGE_LINK_NAME'], itype))
157 148
158 # check if result image file is in deploy directory 149 # check if result image file is in deploy directory
159 self.assertTrue(os.path.exists(image_path)) 150 self.assertTrue(os.path.exists(image_path))
@@ -173,24 +164,22 @@ class ImageFeatures(OESelftestTestCase):
173 """ 164 """
174 Summary: Check for chaining many CONVERSION_CMDs together 165 Summary: Check for chaining many CONVERSION_CMDs together
175 Expected: 1. core-image-minimal can be built with 166 Expected: 1. core-image-minimal can be built with
176 ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a 167 ext4.bmap.gz.bz2.zst.xz.u-boot and also create a
177 sha256sum 168 sha256sum
178 2. The above image has a valid sha256sum 169 2. The above image has a valid sha256sum
179 Product: oe-core 170 Product: oe-core
180 Author: Tom Rini <trini@konsulko.com> 171 Author: Tom Rini <trini@konsulko.com>
181 """ 172 """
182 173
183 conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot" 174 conv = "ext4.bmap.gz.bz2.zst.xz.u-boot"
184 features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv) 175 features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
185 self.write_config(features) 176 self.write_config(features)
186 177
187 image_name = 'core-image-minimal' 178 image = 'core-image-minimal'
188 bitbake(image_name) 179 bitbake(image)
189 180 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
190 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 181 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" %
191 link_name = get_bb_var('IMAGE_LINK_NAME', image_name) 182 (bb_vars['IMAGE_LINK_NAME'], conv))
192 image_path = os.path.join(deploy_dir_image, "%s.%s" %
193 (link_name, conv))
194 183
195 # check if resulting image is in the deploy directory 184 # check if resulting image is in the deploy directory
196 self.assertTrue(os.path.exists(image_path)) 185 self.assertTrue(os.path.exists(image_path))
@@ -198,7 +187,7 @@ class ImageFeatures(OESelftestTestCase):
198 187
199 # check if the resulting sha256sum agrees 188 # check if the resulting sha256sum agrees
200 self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' % 189 self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
201 (deploy_dir_image, link_name, conv))) 190 (bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv)))
202 191
203 def test_image_fstypes(self): 192 def test_image_fstypes(self):
204 """ 193 """
@@ -207,26 +196,43 @@ class ImageFeatures(OESelftestTestCase):
207 Product: oe-core 196 Product: oe-core
208 Author: Ed Bartosh <ed.bartosh@linux.intel.com> 197 Author: Ed Bartosh <ed.bartosh@linux.intel.com>
209 """ 198 """
210 image_name = 'core-image-minimal' 199 image = 'core-image-minimal'
211 200
212 all_image_types = set(get_bb_var("IMAGE_TYPES", image_name).split()) 201 all_image_types = set(get_bb_var("IMAGE_TYPES", image).split())
213 blacklist = set(('container', 'elf', 'f2fs', 'multiubi', 'tar.zst', 'wic.zst')) 202 skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat'))
214 img_types = all_image_types - blacklist 203 img_types = all_image_types - skip_image_types
215 204
216 config = 'IMAGE_FSTYPES += "%s"\n'\ 205 config = """
217 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\ 206IMAGE_FSTYPES += "%s"
218 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types) 207WKS_FILE = "wictestdisk.wks"
208MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"
209UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"
210MULTIUBI_BUILD += "mtd_2_128"
211MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047"
212UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512"
213MULTIUBI_BUILD += "mtd_4_256"
214MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096"
215UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB"
216""" % ' '.join(img_types)
219 self.write_config(config) 217 self.write_config(config)
220 218
221 bitbake(image_name) 219 bitbake(image)
220 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image)
222 221
223 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
224 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
225 for itype in img_types: 222 for itype in img_types:
226 image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype)) 223 if itype == 'multiubi':
227 # check if result image is in deploy directory 224 # For multiubi build we need to manage MULTIUBI_BUILD entry to append
228 self.assertTrue(os.path.exists(image_path), 225 # specific name to IMAGE_LINK_NAME
229 "%s image %s doesn't exist" % (itype, image_path)) 226 for vname in bb_vars['MULTIUBI_BUILD'].split():
227 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname))
228 # check if result image is in deploy directory
229 self.assertTrue(os.path.exists(image_path),
230 "%s image %s doesn't exist" % (itype, image_path))
231 else:
232 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype))
233 # check if result image is in deploy directory
234 self.assertTrue(os.path.exists(image_path),
235 "%s image %s doesn't exist" % (itype, image_path))
230 236
231 def test_useradd_static(self): 237 def test_useradd_static(self):
232 config = """ 238 config = """
@@ -240,16 +246,11 @@ USERADD_GID_TABLES += "files/static-group"
240 246
241 def test_no_busybox_base_utils(self): 247 def test_no_busybox_base_utils(self):
242 config = """ 248 config = """
243# Enable x11 249# Enable wayland
244DISTRO_FEATURES_append += "x11" 250DISTRO_FEATURES:append = " pam opengl wayland"
245 251
246# Switch to systemd 252# Switch to systemd
247DISTRO_FEATURES += "systemd" 253INIT_MANAGER = "systemd"
248VIRTUAL-RUNTIME_init_manager = "systemd"
249VIRTUAL-RUNTIME_initscripts = ""
250VIRTUAL-RUNTIME_syslog = ""
251VIRTUAL-RUNTIME_login_manager = "shadow-base"
252DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
253 254
254# Replace busybox 255# Replace busybox
255PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils" 256PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils"
@@ -257,12 +258,12 @@ VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils"
257VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock" 258VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock"
258VIRTUAL-RUNTIME_base-utils-syslog = "" 259VIRTUAL-RUNTIME_base-utils-syslog = ""
259 260
260# Blacklist busybox 261# Skip busybox
261PNBLACKLIST[busybox] = "Don't build this" 262SKIP_RECIPE[busybox] = "Don't build this"
262""" 263"""
263 self.write_config(config) 264 self.write_config(config)
264 265
265 bitbake("--graphviz core-image-sato") 266 bitbake("--graphviz core-image-weston")
266 267
267 def test_image_gen_debugfs(self): 268 def test_image_gen_debugfs(self):
268 """ 269 """
@@ -275,20 +276,20 @@ PNBLACKLIST[busybox] = "Don't build this"
275 Yeoh Ee Peng <ee.peng.yeoh@intel.com> 276 Yeoh Ee Peng <ee.peng.yeoh@intel.com>
276 """ 277 """
277 278
278 image_name = 'core-image-minimal' 279 image = 'core-image-minimal'
280 image_fstypes_debugfs = 'tar.bz2'
279 features = 'IMAGE_GEN_DEBUGFS = "1"\n' 281 features = 'IMAGE_GEN_DEBUGFS = "1"\n'
280 features += 'IMAGE_FSTYPES_DEBUGFS = "tar.bz2"\n' 282 features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs
281 features += 'MACHINE = "genericx86-64"\n'
282 self.write_config(features) 283 self.write_config(features)
283 284
284 bitbake(image_name) 285 bitbake(image)
285 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 286 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
286 dbg_tar_file = os.path.join(deploy_dir_image, "*-dbg.rootfs.tar.bz2") 287
287 debug_files = glob.glob(dbg_tar_file) 288 dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs))
288 self.assertNotEqual(len(debug_files), 0, 'debug filesystem not generated at %s' % dbg_tar_file) 289 self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file)
289 result = runCmd('cd %s; tar xvf %s' % (deploy_dir_image, dbg_tar_file)) 290 result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file))
290 self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output)) 291 self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output))
291 result = runCmd('find %s -name %s' % (deploy_dir_image, "udevadm")) 292 result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm"))
292 self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output) 293 self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output)
293 dbg_symbols_targets = result.output.splitlines() 294 dbg_symbols_targets = result.output.splitlines()
294 self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets) 295 self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets)
@@ -298,9 +299,33 @@ PNBLACKLIST[busybox] = "Don't build this"
298 299
299 def test_empty_image(self): 300 def test_empty_image(self):
300 """Test creation of image with no packages""" 301 """Test creation of image with no packages"""
301 bitbake('test-empty-image') 302 image = 'test-empty-image'
302 res_dir = get_bb_var('DEPLOY_DIR_IMAGE') 303 bitbake(image)
303 images = os.path.join(res_dir, "test-empty-image-*.manifest") 304 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
304 result = glob.glob(images) 305 manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME'])
305 with open(result[1],"r") as f: 306 self.assertTrue(os.path.exists(manifest))
307
308 with open(manifest, "r") as f:
306 self.assertEqual(len(f.read().strip()),0) 309 self.assertEqual(len(f.read().strip()),0)
310
311 def test_mandb(self):
312 """
313 Test that an image containing manpages has working man and apropos commands.
314 """
315 config = """
316DISTRO_FEATURES:append = " api-documentation"
317CORE_IMAGE_EXTRA_INSTALL = "man-pages"
318"""
319 self.write_config(config)
320 bitbake("core-image-minimal")
321
322 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
323 # This manpage is provided by man-pages
324 status, output = qemu.run_serial("apropos 8859")
325 self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
326 self.assertIn("iso_8859_15", output)
327
328 # This manpage is provided by man-pages
329 status, output = qemu.run_serial("man --pager=cat intro")
330 self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
331 self.assertIn("introduction to user commands", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
index 152da6332a..93884f5731 100644
--- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -1,10 +1,16 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.selftest.case import OESelftestTestCase 6from oeqa.selftest.case import OESelftestTestCase
2from oeqa.utils.commands import bitbake 7from oeqa.utils.commands import bitbake
3 8
4class IncompatibleLicenseTests(OESelftestTestCase): 9class IncompatibleLicenseTestObsolete(OESelftestTestCase):
5 10
6 def lic_test(self, pn, pn_lic, lic): 11 def lic_test(self, pn, pn_lic, lic, error_msg=None):
7 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic) 12 if not error_msg:
13 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
8 14
9 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic)) 15 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
10 16
@@ -12,72 +18,81 @@ class IncompatibleLicenseTests(OESelftestTestCase):
12 if error_msg not in result.output: 18 if error_msg not in result.output:
13 raise AssertionError(result.output) 19 raise AssertionError(result.output)
14 20
15 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) 21 # Verify that a package with an SPDX license cannot be built when
16 # cannot be built when INCOMPATIBLE_LICENSE contains this SPDX license 22 # INCOMPATIBLE_LICENSE contains an alias (in SPDXLICENSEMAP) of this SPDX
17 def test_incompatible_spdx_license(self): 23 # license
18 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
19
20 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
21 # cannot be built when INCOMPATIBLE_LICENSE contains an alias (in
22 # SPDXLICENSEMAP) of this SPDX license
23 def test_incompatible_alias_spdx_license(self): 24 def test_incompatible_alias_spdx_license(self):
24 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3') 25 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
25
26 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
27 # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded license
28 # matching this SPDX license
29 def test_incompatible_spdx_license_wildcard(self):
30 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPL-3.0-only')
31 26
32 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) 27 # Verify that a package with an SPDX license cannot be built when
33 # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded alias 28 # INCOMPATIBLE_LICENSE contains a wildcarded alias license matching this
34 # license matching this SPDX license 29 # SPDX license
35 def test_incompatible_alias_spdx_license_wildcard(self): 30 def test_incompatible_alias_spdx_license_wildcard(self):
36 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3') 31 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
37
38 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
39 # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
40 # license
41 def test_incompatible_spdx_license_alias(self):
42 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
43 32
44 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 33 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
45 # license cannot be built when INCOMPATIBLE_LICENSE contains this alias 34 # license cannot be built when INCOMPATIBLE_LICENSE contains this alias
46 def test_incompatible_alias_spdx_license_alias(self): 35 def test_incompatible_alias_spdx_license_alias(self):
47 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3') 36 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
48 37
49 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 38 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
50 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded 39 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
51 # license matching this SPDX license 40 # license matching this SPDX license
52 def test_incompatible_spdx_license_alias_wildcard(self): 41 def test_incompatible_spdx_license_alias_wildcard(self):
53 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0') 42 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0', "*GPL-3.0 is an invalid license wildcard entry")
54 43
55 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 44 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
56 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded 45 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
57 # alias license matching the SPDX license 46 # alias license matching the SPDX license
58 def test_incompatible_alias_spdx_license_alias_wildcard(self): 47 def test_incompatible_alias_spdx_license_alias_wildcard(self):
59 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3') 48 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
60 49
61 # Verify that a package with multiple SPDX licenses (from
62 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains
63 # some of them
64 def test_incompatible_spdx_licenses(self):
65 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
66 50
67 # Verify that a package with multiple SPDX licenses (from 51 # Verify that a package with multiple SPDX licenses cannot be built when
68 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a 52 # INCOMPATIBLE_LICENSE contains a wildcard to some of them
69 # wildcard to some of them
70 def test_incompatible_spdx_licenses_wildcard(self): 53 def test_incompatible_spdx_licenses_wildcard(self):
71 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only') 54 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only', "*GPL-3.0-only is an invalid license wildcard entry")
72 55
73 # Verify that a package with multiple SPDX licenses (from 56
74 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a 57 # Verify that a package with multiple SPDX licenses cannot be built when
75 # wildcard matching all licenses 58 # INCOMPATIBLE_LICENSE contains a wildcard matching all licenses
76 def test_incompatible_all_licenses_wildcard(self): 59 def test_incompatible_all_licenses_wildcard(self):
77 self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*') 60 self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*', "* is an invalid license wildcard entry")
61
62class IncompatibleLicenseTests(OESelftestTestCase):
63
64 def lic_test(self, pn, pn_lic, lic):
65 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
78 66
79 # Verify that a package with a non-SPDX license (neither in 67 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
80 # AVAILABLE_LICENSES nor in SPDXLICENSEMAP) cannot be built when 68
69 result = bitbake('%s --dry-run' % (pn), ignore_status=True)
70 if error_msg not in result.output:
71 raise AssertionError(result.output)
72
73 # Verify that a package with an SPDX license cannot be built when
74 # INCOMPATIBLE_LICENSE contains this SPDX license
75 def test_incompatible_spdx_license(self):
76 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
77
78 # Verify that a package with an SPDX license cannot be built when
79 # INCOMPATIBLE_LICENSE contains a wildcarded license matching this SPDX
80 # license
81 def test_incompatible_spdx_license_wildcard(self):
82 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0*')
83
84 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
85 # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
86 # license
87 def test_incompatible_spdx_license_alias(self):
88 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
89
90 # Verify that a package with multiple SPDX licenses cannot be built when
91 # INCOMPATIBLE_LICENSE contains some of them
92 def test_incompatible_spdx_licenses(self):
93 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
94
95 # Verify that a package with a non-SPDX license cannot be built when
81 # INCOMPATIBLE_LICENSE contains this license 96 # INCOMPATIBLE_LICENSE contains this license
82 def test_incompatible_nonspdx_license(self): 97 def test_incompatible_nonspdx_license(self):
83 self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense') 98 self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense')
@@ -85,8 +100,9 @@ class IncompatibleLicenseTests(OESelftestTestCase):
85class IncompatibleLicensePerImageTests(OESelftestTestCase): 100class IncompatibleLicensePerImageTests(OESelftestTestCase):
86 def default_config(self): 101 def default_config(self):
87 return """ 102 return """
88IMAGE_INSTALL_append = " bash" 103IMAGE_INSTALL:append = " bash"
89INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" 104INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
105MACHINE_ESSENTIAL_EXTRA_RDEPENDS:remove = "tar"
90""" 106"""
91 107
92 def test_bash_default(self): 108 def test_bash_default(self):
@@ -98,7 +114,8 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
98 raise AssertionError(result.output) 114 raise AssertionError(result.output)
99 115
100 def test_bash_and_license(self): 116 def test_bash_and_license(self):
101 self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " & SomeLicense"') 117 self.disable_class("create-spdx")
118 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"')
102 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" 119 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
103 120
104 result = bitbake('core-image-minimal', ignore_status=True) 121 result = bitbake('core-image-minimal', ignore_status=True)
@@ -106,30 +123,33 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
106 raise AssertionError(result.output) 123 raise AssertionError(result.output)
107 124
108 def test_bash_or_license(self): 125 def test_bash_or_license(self):
109 self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " | SomeLicense"') 126 self.disable_class("create-spdx")
127 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"\nERROR_QA:remove:pn-core-image-minimal = "license-file-missing"')
110 128
111 bitbake('core-image-minimal') 129 bitbake('core-image-minimal')
112 130
113 def test_bash_whitelist(self): 131 def test_bash_license_exceptions(self):
114 self.write_config(self.default_config() + '\nWHITELIST_GPL-3.0_pn-core-image-minimal = "bash"') 132 self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"\nERROR_QA:remove:pn-core-image-minimal = "license-exception"')
115 133
116 bitbake('core-image-minimal') 134 bitbake('core-image-minimal')
117 135
118class NoGPL3InImagesTests(OESelftestTestCase): 136class NoGPL3InImagesTests(OESelftestTestCase):
119 def test_core_image_minimal(self): 137 def test_core_image_minimal(self):
120 self.write_config(""" 138 self.write_config("""
121INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" 139INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
140
141require conf/distro/include/no-gplv3.inc
122""") 142""")
123 bitbake('core-image-minimal') 143 bitbake('core-image-minimal')
124 144
125 def test_core_image_full_cmdline(self): 145 def test_core_image_full_cmdline_weston(self):
126 self.write_config(""" 146 self.write_config("""
127INHERIT += "testimage"\n 147IMAGE_CLASSES += "testimage"
128INCOMPATIBLE_LICENSE_pn-core-image-full-cmdline = "GPL-3.0 LGPL-3.0"\n 148INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*"
129RDEPENDS_packagegroup-core-full-cmdline-utils_remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-fish mc-helpers mc-helpers-perl sed tar time"\n 149INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*"
130RDEPENDS_packagegroup-core-full-cmdline-dev-utils_remove = "diffutils m4 make patch"\n 150
131RDEPENDS_packagegroup-core-full-cmdline-multiuser_remove = "gzip"\n 151require conf/distro/include/no-gplv3.inc
132""") 152""")
133 bitbake('core-image-full-cmdline') 153 bitbake('core-image-full-cmdline core-image-weston')
134 bitbake('-c testimage core-image-full-cmdline') 154 bitbake('-c testimage core-image-full-cmdline core-image-weston')
135 155
diff --git a/meta/lib/oeqa/selftest/cases/intercept.py b/meta/lib/oeqa/selftest/cases/intercept.py
new file mode 100644
index 0000000000..12583c3099
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/intercept.py
@@ -0,0 +1,21 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class GitCheck(OESelftestTestCase):
11 def test_git_intercept(self):
12 """
13 Git binaries with CVE-2022-24765 fixed will refuse to operate on a
14 repository which is owned by a different user. This breaks our
15 do_install task as that runs inside pseudo, so the git repository is
16 owned by the build user but git is running as (fake)root.
17
18 We have an intercept which disables pseudo, so verify that it works.
19 """
20 bitbake("git-submodule-test -c test_git_as_user")
21 bitbake("git-submodule-test -c test_git_as_root")
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
index a61876ee61..b1f78a0cd1 100644
--- a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
+++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
@@ -1,3 +1,9 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
1import os 7import os
2from oeqa.selftest.case import OESelftestTestCase 8from oeqa.selftest.case import OESelftestTestCase
3from oeqa.utils.commands import runCmd, get_bb_var 9from oeqa.utils.commands import runCmd, get_bb_var
@@ -58,7 +64,8 @@ class KernelDev(OESelftestTestCase):
58 recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend') 64 recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
59 with open(recipe_append, 'w+') as fh: 65 with open(recipe_append, 'w+') as fh:
60 fh.write('SRC_URI += "file://%s"\n' % patch_name) 66 fh.write('SRC_URI += "file://%s"\n' % patch_name)
61 fh.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"') 67 fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n')
68 fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"')
62 69
63 runCmd('bitbake virtual/kernel -c clean') 70 runCmd('bitbake virtual/kernel -c clean')
64 runCmd('bitbake virtual/kernel -c patch') 71 runCmd('bitbake virtual/kernel -c patch')
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 05e9426fc6..64b17117cc 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import runCmd, bitbake, get_bb_var 10from oeqa.utils.commands import bitbake, get_bb_var
9import oeqa.utils.ftools as ftools 11import oeqa.utils.ftools as ftools
10 12
11class LayerAppendTests(OESelftestTestCase): 13class LayerAppendTests(OESelftestTestCase):
@@ -30,20 +32,20 @@ python do_build() {
30addtask build 32addtask build
31""" 33"""
32 append = """ 34 append = """
33FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" 35FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
34 36
35SRC_URI_append = " file://appendtest.txt" 37SRC_URI:append = " file://appendtest.txt"
36 38
37sysroot_stage_all_append() { 39sysroot_stage_all:append() {
38 install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ 40 install -m 644 ${UNPACKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
39} 41}
40 42
41""" 43"""
42 44
43 append2 = """ 45 append2 = """
44FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" 46FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
45 47
46SRC_URI_append = " file://appendtest.txt" 48SRC_URI:append = " file://appendtest.txt"
47""" 49"""
48 layerappend = '' 50 layerappend = ''
49 51
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py
index afe8f8809f..930354c931 100644
--- a/meta/lib/oeqa/selftest/cases/liboe.py
+++ b/meta/lib/oeqa/selftest/cases/liboe.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,11 +9,11 @@ from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake, runCmd
7import oe.path 9import oe.path
8import os 10import os
9 11
10class LibOE(OESelftestTestCase): 12class CopyTreeTests(OESelftestTestCase):
11 13
12 @classmethod 14 @classmethod
13 def setUpClass(cls): 15 def setUpClass(cls):
14 super(LibOE, cls).setUpClass() 16 super().setUpClass()
15 cls.tmp_dir = get_bb_var('TMPDIR') 17 cls.tmp_dir = get_bb_var('TMPDIR')
16 18
17 def test_copy_tree_special(self): 19 def test_copy_tree_special(self):
@@ -97,6 +99,39 @@ class LibOE(OESelftestTestCase):
97 99
98 dstcnt = len(os.listdir(dst)) 100 dstcnt = len(os.listdir(dst))
99 srccnt = len(os.listdir(src)) 101 srccnt = len(os.listdir(src))
100 self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt)) 102 self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
101 103
102 oe.path.remove(testloc) 104 oe.path.remove(testloc)
105
106class SubprocessTests(OESelftestTestCase):
107
108 def test_subprocess_tweak(self):
109 """
110 Test that the string representation of
111 oeqa.utils.subprocesstweak.OETestCalledProcessError includes stdout and
112 stderr, as expected.
113 """
114 script = """
115#! /bin/sh
116echo Ivn fgqbhg | tr '[a-zA-Z]' '[n-za-mN-ZA-M]'
117echo Ivn fgqree | tr '[a-zA-Z]' '[n-za-mN-ZA-M]' >&2
118exit 42
119 """
120
121 import subprocess
122 import unittest.mock
123 from oeqa.utils.subprocesstweak import OETestCalledProcessError
124
125 with self.assertRaises(OETestCalledProcessError) as cm:
126 with unittest.mock.patch("subprocess.CalledProcessError", OETestCalledProcessError):
127 subprocess.run(["bash", "-"], input=script, text=True, capture_output=True, check=True)
128
129 e = cm.exception
130 self.assertEqual(e.returncode, 42)
131 self.assertEqual("Via stdout\n", e.stdout)
132 self.assertEqual("Via stderr\n", e.stderr)
133
134 string = str(e)
135 self.assertIn("exit status 42", string)
136 self.assertIn("Standard Output: Via stdout", string)
137 self.assertIn("Standard Error: Via stderr", string)
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py
index bae935d697..2d0b805b90 100644
--- a/meta/lib/oeqa/selftest/cases/lic_checksum.py
+++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py
@@ -1,16 +1,36 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import tempfile 8import tempfile
9import urllib
7 10
8from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake 12from oeqa.utils.commands import bitbake
10from oeqa.utils import CommandError
11 13
12class LicenseTests(OESelftestTestCase): 14class LicenseTests(OESelftestTestCase):
13 15
16 def test_checksum_with_space(self):
17 bitbake_cmd = '-c populate_lic emptytest'
18
19 lic_file, lic_path = tempfile.mkstemp(" -afterspace")
20 os.close(lic_file)
21 #self.track_for_cleanup(lic_path)
22
23 self.write_config("INHERIT:remove = \"report-error\"")
24
25 self.write_recipeinc('emptytest', """
26INHIBIT_DEFAULT_DEPS = "1"
27LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
28SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
29""" % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path)))
30 result = bitbake(bitbake_cmd)
31 self.delete_recipeinc('emptytest')
32
33
14 # Verify that changing a license file that has an absolute path causes 34 # Verify that changing a license file that has an absolute path causes
15 # the license qa to fail due to a mismatched md5sum. 35 # the license qa to fail due to a mismatched md5sum.
16 def test_nonmatching_checksum(self): 36 def test_nonmatching_checksum(self):
@@ -21,7 +41,7 @@ class LicenseTests(OESelftestTestCase):
21 os.close(lic_file) 41 os.close(lic_file)
22 self.track_for_cleanup(lic_path) 42 self.track_for_cleanup(lic_path)
23 43
24 self.write_config("INHERIT_remove = \"report-error\"") 44 self.write_config("INHERIT:remove = \"report-error\"")
25 45
26 self.write_recipeinc('emptytest', """ 46 self.write_recipeinc('emptytest', """
27INHIBIT_DEFAULT_DEPS = "1" 47INHIBIT_DEFAULT_DEPS = "1"
@@ -34,5 +54,6 @@ SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
34 f.write("data") 54 f.write("data")
35 55
36 result = bitbake(bitbake_cmd, ignore_status=True) 56 result = bitbake(bitbake_cmd, ignore_status=True)
57 self.delete_recipeinc('emptytest')
37 if error_msg not in result.output: 58 if error_msg not in result.output:
38 raise AssertionError(result.output) 59 raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
new file mode 100644
index 0000000000..ac4888ef66
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -0,0 +1,54 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5from oeqa.selftest.case import OESelftestTestCase
6from oeqa.core.decorator import OETestTag
7from oeqa.utils.commands import bitbake, runqemu
8
9class LocalesTest(OESelftestTestCase):
10
11 @OETestTag("runqemu")
12
13 def run_locales_test(self, binary_enabled):
14 features = []
15 features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
16 features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
17 features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8 en_US.ISO-8859-1 de_DE.UTF-8 fr_FR.ISO-8859-1 zh_HK.BIG5-HKSCS tr_TR.UTF-8"')
18 features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
19 if binary_enabled:
20 features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
21 else:
22 features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"')
23 self.write_config("\n".join(features))
24
25 # Build a core-image-minimal
26 bitbake('core-image-minimal')
27
28 with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu:
29 cmd = "locale -a"
30 status, output = qemu.run_serial(cmd)
31 # output must includes fr_FR or fr_FR.UTF-8
32 self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output)
33 self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output)
34
35 cmd = "localedef --list-archive -v"
36 status, output = qemu.run_serial(cmd)
37 # output must includes fr_FR.utf8
38 self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output)
39 self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output)
40
41 def test_locales_on(self):
42 """
43 Summary: Test the locales are generated
44 Expected: 1. Check the locale exist in the locale-archive
45 2. Check the locale exist for the glibc
46 3. Check the locale can be generated
47 Product: oe-core
48 Author: Louis Rannou <lrannou@baylibre.com>
49 AutomatedBy: Louis Rannou <lrannou@baylibre.com>
50 """
51 self.run_locales_test(True)
52
53 def test_locales_off(self):
54 self.run_locales_test(False)
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py
index 5d13f35468..07a6c80489 100644
--- a/meta/lib/oeqa/selftest/cases/manifest.py
+++ b/meta/lib/oeqa/selftest/cases/manifest.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake 10from oeqa.utils.commands import get_bb_var, bitbake
9 11
10class ManifestEntry: 12class ManifestEntry:
11 '''A manifest item of a collection able to list missing packages''' 13 '''A manifest item of a collection able to list missing packages'''
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
index 6f10d30dc9..c3a7df4cdf 100644
--- a/meta/lib/oeqa/selftest/cases/meta_ide.py
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -16,13 +18,15 @@ class MetaIDE(OESelftestTestCase):
16 def setUpClass(cls): 18 def setUpClass(cls):
17 super(MetaIDE, cls).setUpClass() 19 super(MetaIDE, cls).setUpClass()
18 bitbake('meta-ide-support') 20 bitbake('meta-ide-support')
19 bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'TMPDIR', 'COREBASE']) 21 bitbake('build-sysroots -c build_native_sysroot')
20 cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] 22 bitbake('build-sysroots -c build_target_sysroot')
21 cls.tmpdir = bb_vars['TMPDIR'] 23 bb_vars = get_bb_vars(['MACHINE_ARCH', 'TARGET_VENDOR', 'TARGET_OS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
22 cls.environment_script_path = '%s/%s' % (cls.tmpdir, cls.environment_script) 24 cls.environment_script = 'environment-setup-%s%s-%s' % (bb_vars['MACHINE_ARCH'], bb_vars['TARGET_VENDOR'], bb_vars['TARGET_OS'])
25 cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
26 cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
23 cls.corebasedir = bb_vars['COREBASE'] 27 cls.corebasedir = bb_vars['COREBASE']
24 cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide') 28 cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
25 29
26 @classmethod 30 @classmethod
27 def tearDownClass(cls): 31 def tearDownClass(cls):
28 shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True) 32 shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
@@ -40,12 +44,17 @@ class MetaIDE(OESelftestTestCase):
40 def test_meta_ide_can_build_cpio_project(self): 44 def test_meta_ide_can_build_cpio_project(self):
41 dl_dir = self.td.get('DL_DIR', None) 45 dl_dir = self.td.get('DL_DIR', None)
42 self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path, 46 self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
43 "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz", 47 "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz",
44 self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir) 48 self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
45 self.project.download_archive() 49 self.project.download_archive()
46 self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS --disable-maintainer-mode','sed -i -e "/char \*program_name/d" src/global.c;'), 0, 50 self.assertEqual(self.project.run_configure('CFLAGS="-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration" $CONFIGURE_FLAGS'), 0,
47 msg="Running configure failed") 51 msg="Running configure failed")
48 self.assertEqual(self.project.run_make(), 0, 52 self.assertEqual(self.project.run_make(make_args="CFLAGS='-std=gnu17 -Dbool=int -Dtrue=1 -Dfalse=0 -Wno-error=implicit-function-declaration'"), 0,
49 msg="Running make failed") 53 msg="Running make failed")
50 self.assertEqual(self.project.run_install(), 0, 54 self.assertEqual(self.project.run_install(), 0,
51 msg="Running make install failed") 55 msg="Running make install failed")
56
57 def test_meta_ide_can_run_sdk_tests(self):
58 bitbake('-c populate_sysroot gtk+3')
59 bitbake('build-sysroots -c build_target_sysroot')
60 bitbake('-c testsdk meta-ide-support')
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
new file mode 100644
index 0000000000..a8923460f9
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -0,0 +1,60 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import subprocess
8import tempfile
9import shutil
10
11from oeqa.core.decorator import OETestTag
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
14
15
16class Minidebuginfo(OESelftestTestCase):
17 def test_minidebuginfo(self):
18 target_sys = get_bb_var("TARGET_SYS")
19 binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH"))
20
21 image = 'core-image-minimal'
22 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image)
23
24 self.write_config("""
25DISTRO_FEATURES:append = " minidebuginfo"
26IMAGE_FSTYPES = "tar.bz2"
27""")
28 bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils))
29
30 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils)
31
32 # confirm that executables and shared libraries contain an ELF section
33 # ".gnu_debugdata" which stores minidebuginfo.
34 with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs:
35 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME']))
36 shutil.unpack_archive(filename, unpackedfs)
37
38 r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")],
39 native_sysroot = native_sysroot, target_sys = target_sys)
40 self.assertIn(".gnu_debugdata", r.output)
41
42 r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")],
43 native_sysroot = native_sysroot, target_sys = target_sys)
44 self.assertIn(".gnu_debugdata", r.output)
45
46 @OETestTag("runqemu")
47 def test_minidebuginfo_qemu(self):
48 """
49 Test minidebuginfo inside a qemu.
50 This runs test_systemd_coredump_minidebuginfo and other minidebuginfo runtime tests which may be added in the future.
51 """
52
53 self.write_config("""
54DISTRO_FEATURES:append = " minidebuginfo"
55INIT_MANAGER = "systemd"
56IMAGE_CLASSES += "testimage"
57TEST_SUITES = "ping ssh systemd"
58 """)
59 bitbake('core-image-minimal')
60 bitbake('-c testimage core-image-minimal')
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py
index 39b92f2439..f509cbf607 100644
--- a/meta/lib/oeqa/selftest/cases/multiconfig.py
+++ b/meta/lib/oeqa/selftest/cases/multiconfig.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -17,7 +19,7 @@ class MultiConfig(OESelftestTestCase):
17 """ 19 """
18 20
19 config = """ 21 config = """
20IMAGE_INSTALL_append_pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl" 22IMAGE_INSTALL:append:pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl"
21BBMULTICONFIG = "tiny musl" 23BBMULTICONFIG = "tiny musl"
22""" 24"""
23 self.write_config(config) 25 self.write_config(config)
@@ -52,7 +54,7 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
52 self.write_config(config) 54 self.write_config(config)
53 55
54 testconfig = textwrap.dedent('''\ 56 testconfig = textwrap.dedent('''\
55 MCTESTVAR_append = "1" 57 MCTESTVAR:append = "1"
56 ''') 58 ''')
57 self.write_config(testconfig, 'test') 59 self.write_config(testconfig, 'test')
58 60
@@ -64,9 +66,22 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
64 self.assertIn('MCTESTVAR=test1', result.output.splitlines()) 66 self.assertIn('MCTESTVAR=test1', result.output.splitlines())
65 67
66 testconfig = textwrap.dedent('''\ 68 testconfig = textwrap.dedent('''\
67 MCTESTVAR_append = "2" 69 MCTESTVAR:append = "2"
68 ''') 70 ''')
69 self.write_config(testconfig, 'test') 71 self.write_config(testconfig, 'test')
70 72
71 result = bitbake('mc:test:multiconfig-test-parse -c showvar') 73 result = bitbake('mc:test:multiconfig-test-parse -c showvar')
72 self.assertIn('MCTESTVAR=test2', result.output.splitlines()) 74 self.assertIn('MCTESTVAR=test2', result.output.splitlines())
75
76 def test_multiconfig_inlayer(self):
77 """
78 Test that a multiconfig from meta-selftest works.
79 """
80
81 config = """
82BBMULTICONFIG = "muslmc"
83"""
84 self.write_config(config)
85
86 # Build a core-image-minimal, only dry run needed to check config is present
87 bitbake('mc:muslmc:bash -n')
diff --git a/meta/lib/oeqa/selftest/cases/newlib.py b/meta/lib/oeqa/selftest/cases/newlib.py
new file mode 100644
index 0000000000..fe57aa51f2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/newlib.py
@@ -0,0 +1,13 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class NewlibTest(OESelftestTestCase):
11 def test_newlib(self):
12 self.write_config('TCLIBC = "newlib"')
13 bitbake("newlib libgloss")
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
index 802a91a488..042ccdd2b4 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
8import sys
6from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
7import tempfile 10import tempfile
8import operator 11import operator
@@ -11,15 +14,14 @@ from oeqa.utils.commands import get_bb_var
11class TestBlobParsing(OESelftestTestCase): 14class TestBlobParsing(OESelftestTestCase):
12 15
13 def setUp(self): 16 def setUp(self):
14 import time
15 self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory', 17 self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
16 dir=get_bb_var('TOPDIR')) 18 dir=get_bb_var('TOPDIR'))
17 19
18 try: 20 try:
19 from git import Repo 21 from git import Repo
20 self.repo = Repo.init(self.repo_path) 22 self.repo = Repo.init(self.repo_path)
21 except ImportError: 23 except ImportError as e:
22 self.skipTest('Python module GitPython is not present') 24 self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path))
23 25
24 self.test_file = "test" 26 self.test_file = "test"
25 self.var_map = {} 27 self.var_map = {}
@@ -28,6 +30,16 @@ class TestBlobParsing(OESelftestTestCase):
28 import shutil 30 import shutil
29 shutil.rmtree(self.repo_path) 31 shutil.rmtree(self.repo_path)
30 32
33 @property
34 def heads_default(self):
35 """
36 Support repos defaulting to master or to main branch
37 """
38 try:
39 return self.repo.heads.main
40 except AttributeError:
41 return self.repo.heads.master
42
31 def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"): 43 def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
32 if len(to_add) == 0 and len(to_remove) == 0: 44 if len(to_add) == 0 and len(to_remove) == 0:
33 return 45 return
@@ -65,10 +77,10 @@ class TestBlobParsing(OESelftestTestCase):
65 changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")} 77 changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
66 78
67 self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" }) 79 self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
68 blob1 = self.repo.heads.master.commit.tree.blobs[0] 80 blob1 = self.heads_default.commit.tree.blobs[0]
69 81
70 self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" }) 82 self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
71 blob2 = self.repo.heads.master.commit.tree.blobs[0] 83 blob2 = self.heads_default.commit.tree.blobs[0]
72 84
73 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), 85 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
74 blob1, blob2, False, False) 86 blob1, blob2, False, False)
@@ -84,10 +96,10 @@ class TestBlobParsing(OESelftestTestCase):
84 defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]} 96 defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
85 97
86 self.commit_vars(to_add = { "foo" : "1" }) 98 self.commit_vars(to_add = { "foo" : "1" })
87 blob1 = self.repo.heads.master.commit.tree.blobs[0] 99 blob1 = self.heads_default.commit.tree.blobs[0]
88 100
89 self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" }) 101 self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
90 blob2 = self.repo.heads.master.commit.tree.blobs[0] 102 blob2 = self.heads_default.commit.tree.blobs[0]
91 103
92 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), 104 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
93 blob1, blob2, False, False) 105 blob1, blob2, False, False)
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py
index 5a5f9b4fdf..7bf550b6fd 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/elf.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py
index 6ebbee589f..5eea12e761 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/license.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/license.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -15,11 +17,11 @@ class SeenVisitor(oe.license.LicenseVisitor):
15 17
16class TestSingleLicense(TestCase): 18class TestSingleLicense(TestCase):
17 licenses = [ 19 licenses = [
18 "GPLv2", 20 "GPL-2.0-only",
19 "LGPL-2.0", 21 "LGPL-2.0-only",
20 "Artistic", 22 "Artistic-1.0",
21 "MIT", 23 "MIT",
22 "GPLv3+", 24 "GPL-3.0-or-later",
23 "FOO_BAR", 25 "FOO_BAR",
24 ] 26 ]
25 invalid_licenses = ["GPL/BSD"] 27 invalid_licenses = ["GPL/BSD"]
@@ -67,9 +69,9 @@ class TestComplexCombinations(TestSimpleCombinations):
67 "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"], 69 "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"],
68 "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"], 70 "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"],
69 "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"], 71 "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"],
70 "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"], 72 "(GPL-2.0-only|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0-only", "BSD-4-clause", "MIT"],
71 } 73 }
72 preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"] 74 preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0-only"]
73 75
74class TestIsIncluded(TestCase): 76class TestIsIncluded(TestCase):
75 tests = { 77 tests = {
@@ -87,12 +89,12 @@ class TestIsIncluded(TestCase):
87 [True, ["BAR", "FOOBAR"]], 89 [True, ["BAR", "FOOBAR"]],
88 ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"): 90 ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"):
89 [True, ["BAZ", "MOO", "BARFOO"]], 91 [True, ["BAZ", "MOO", "BARFOO"]],
90 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, None): 92 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, None):
91 [True, ["GPL-3.0", "GPL-2.0", "LGPL-2.1"]], 93 [True, ["GPL-3.0-or-later", "GPL-2.0-only", "LGPL-2.1-only"]],
92 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0"): 94 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later"):
93 [True, ["Proprietary"]], 95 [True, ["Proprietary"]],
94 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0 Proprietary"): 96 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later Proprietary"):
95 [False, ["GPL-3.0"]] 97 [False, ["GPL-3.0-or-later"]]
96 } 98 }
97 99
98 def test_tests(self): 100 def test_tests(self):
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py
index a1cfa08c09..b963e447e3 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/path.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/path.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py
index 7eb49e6f95..58318b18b2 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/types.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/types.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
index a7214beb4c..0cb46425a0 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/utils.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -64,7 +66,7 @@ class TestMultiprocessLaunch(TestCase):
64 import bb 66 import bb
65 67
66 def testfunction(item, d): 68 def testfunction(item, d):
67 if item == "2" or item == "1": 69 if item == "2":
68 raise KeyError("Invalid number %s" % item) 70 raise KeyError("Invalid number %s" % item)
69 return "Found %s" % item 71 return "Found %s" % item
70 72
@@ -99,5 +101,4 @@ class TestMultiprocessLaunch(TestCase):
99 # Assert the function prints exceptions 101 # Assert the function prints exceptions
100 with captured_output() as (out, err): 102 with captured_output() as (out, err):
101 self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,)) 103 self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,))
102 self.assertIn("KeyError: 'Invalid number 1'", out.getvalue())
103 self.assertIn("KeyError: 'Invalid number 2'", out.getvalue()) 104 self.assertIn("KeyError: 'Invalid number 2'", out.getvalue())
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index 8a10ff357b..3f9899b289 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,47 +9,19 @@ import shutil
7import importlib 9import importlib
8import unittest 10import unittest
9from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
10from oeqa.selftest.cases.buildhistory import BuildhistoryBase 12from oeqa.utils.commands import runCmd, bitbake, get_bb_var
11from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer
12from oeqa.utils import CommandError 13from oeqa.utils import CommandError
13 14
14class BuildhistoryDiffTests(BuildhistoryBase):
15
16 def test_buildhistory_diff(self):
17 target = 'xcursor-transparent-theme'
18 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
19 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
20 result = runCmd("oe-pkgdata-util read-value PKGV %s" % target)
21 pkgv = result.output.rstrip()
22 result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
23 expected_endlines = [
24 "xcursor-transparent-theme-dev: RDEPENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
25 "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
26 ]
27 for line in result.output.splitlines():
28 for el in expected_endlines:
29 if line.endswith(el):
30 expected_endlines.remove(el)
31 break
32 else:
33 self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines)))
34 if expected_endlines:
35 self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
36
37@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present") 15@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present")
38class OEScriptTests(OESelftestTestCase): 16class OEPybootchartguyTests(OESelftestTestCase):
39 17
40 @classmethod 18 @classmethod
41 def setUpClass(cls): 19 def setUpClass(cls):
42 super(OEScriptTests, cls).setUpClass() 20 super().setUpClass()
43 import cairo
44 bitbake("core-image-minimal -c rootfs -f") 21 bitbake("core-image-minimal -c rootfs -f")
45 cls.tmpdir = get_bb_var('TMPDIR') 22 cls.tmpdir = get_bb_var('TMPDIR')
46 cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1] 23 cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
47 24 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
48 scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
49
50class OEPybootchartguyTests(OEScriptTests):
51 25
52 def test_pybootchartguy_help(self): 26 def test_pybootchartguy_help(self):
53 runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir) 27 runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
@@ -67,7 +41,10 @@ class OEPybootchartguyTests(OEScriptTests):
67 41
68class OEGitproxyTests(OESelftestTestCase): 42class OEGitproxyTests(OESelftestTestCase):
69 43
70 scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') 44 @classmethod
45 def setUpClass(cls):
46 super().setUpClass()
47 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
71 48
72 def test_oegitproxy_help(self): 49 def test_oegitproxy_help(self):
73 try: 50 try:
@@ -125,15 +102,22 @@ class OEGitproxyTests(OESelftestTestCase):
125class OeRunNativeTest(OESelftestTestCase): 102class OeRunNativeTest(OESelftestTestCase):
126 def test_oe_run_native(self): 103 def test_oe_run_native(self):
127 bitbake("qemu-helper-native -c addto_recipe_sysroot") 104 bitbake("qemu-helper-native -c addto_recipe_sysroot")
128 result = runCmd("oe-run-native qemu-helper-native tunctl -h") 105 result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help")
129 self.assertIn("Delete: tunctl -d device-name [-f tun-clone-device]", result.output) 106 self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output)
107
108class OEListPackageconfigTests(OESelftestTestCase):
109
110 @classmethod
111 def setUpClass(cls):
112 super().setUpClass()
113 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
130 114
131class OEListPackageconfigTests(OEScriptTests):
132 #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags 115 #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
133 def check_endlines(self, results, expected_endlines): 116 def check_endlines(self, results, expected_endlines):
134 for line in results.output.splitlines(): 117 for line in results.output.splitlines():
135 for el in expected_endlines: 118 for el in expected_endlines:
136 if line.split() == el.split(): 119 if line and line.split()[0] == el.split()[0] and \
120 ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
137 expected_endlines.remove(el) 121 expected_endlines.remove(el)
138 break 122 break
139 123
@@ -149,8 +133,8 @@ class OEListPackageconfigTests(OEScriptTests):
149 results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir) 133 results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
150 expected_endlines = [] 134 expected_endlines = []
151 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") 135 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
152 expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") 136 expected_endlines.append("pinentry gtk2 ncurses qt secret")
153 expected_endlines.append("tar acl") 137 expected_endlines.append("tar acl selinux")
154 138
155 self.check_endlines(results, expected_endlines) 139 self.check_endlines(results, expected_endlines)
156 140
@@ -167,11 +151,10 @@ class OEListPackageconfigTests(OEScriptTests):
167 def test_packageconfig_flags_option_all(self): 151 def test_packageconfig_flags_option_all(self):
168 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) 152 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
169 expected_endlines = [] 153 expected_endlines = []
170 expected_endlines.append("pinentry-1.1.1") 154 expected_endlines.append("pinentry-1.3.1")
171 expected_endlines.append("PACKAGECONFIG ncurses libcap") 155 expected_endlines.append("PACKAGECONFIG ncurses")
172 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") 156 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
173 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") 157 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
174 expected_endlines.append("PACKAGECONFIG[libcap] --with-libcap, --without-libcap, libcap")
175 expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses") 158 expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
176 expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret") 159 expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
177 160
@@ -181,7 +164,7 @@ class OEListPackageconfigTests(OEScriptTests):
181 results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir) 164 results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
182 expected_endlines = [] 165 expected_endlines = []
183 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") 166 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
184 expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") 167 expected_endlines.append("pinentry gtk2 ncurses qt secret")
185 168
186 self.check_endlines(results, expected_endlines) 169 self.check_endlines(results, expected_endlines)
187 170
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
new file mode 100644
index 0000000000..580fbdcb9c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -0,0 +1,541 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, runqemu, get_bb_vars
9from oeqa.core.decorator import OETestTag
10from oeqa.core.decorator.data import skipIfNotMachine
11
12def getline_qemu(out, line):
13 for l in out.split('\n'):
14 if line in l:
15 return l
16
17def getline(res, line):
18 return getline_qemu(res.output, line)
19
20class OverlayFSTests(OESelftestTestCase):
21 """Overlayfs class usage tests"""
22
23 def add_overlay_conf_to_machine(self):
24 machine_inc = """
25OVERLAYFS_MOUNT_POINT[mnt-overlay] = "/mnt/overlay"
26"""
27 self.set_machine_config(machine_inc)
28
29 def test_distro_features_missing(self):
30 """
31 Summary: Check that required DISTRO_FEATURES are set
32 Expected: Fail when either systemd or overlayfs are not in DISTRO_FEATURES
33 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
34 """
35
36 config = """
37IMAGE_INSTALL:append = " overlayfs-user"
38"""
39 overlayfs_recipe_append = """
40inherit overlayfs
41"""
42 self.write_config(config)
43 self.add_overlay_conf_to_machine()
44 self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
45
46 res = bitbake('core-image-minimal', ignore_status=True)
47 line = getline(res, "overlayfs-user was skipped: missing required distro features")
48 self.assertTrue("overlayfs" in res.output, msg=res.output)
49 self.assertTrue("systemd" in res.output, msg=res.output)
50 self.assertTrue("ERROR: Required build target 'core-image-minimal' has no buildable providers." in res.output, msg=res.output)
51
52 def test_not_all_units_installed(self):
53 """
54 Summary: Test QA check that we have required mount units in the image
55 Expected: Fail because mount unit for overlay partition is not installed
56 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
57 """
58
59 config = """
60IMAGE_INSTALL:append = " overlayfs-user"
61DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
62"""
63
64 self.write_config(config)
65 self.add_overlay_conf_to_machine()
66
67 res = bitbake('core-image-minimal', ignore_status=True)
68 line = getline(res, " Mount path /mnt/overlay not found in fstab and unit mnt-overlay.mount not found in systemd unit directories")
69 self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
70 line = getline(res, "Not all mount paths and units are installed in the image")
71 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
72
73 def test_not_all_units_installed_but_qa_skipped(self):
74 """
75 Summary: Test skipping the QA check
76 Expected: Image is created successfully
77 Author: Claudius Heine <ch@denx.de>
78 """
79
80 config = """
81IMAGE_INSTALL:append = " overlayfs-user"
82DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
83OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
84"""
85
86 self.write_config(config)
87 self.add_overlay_conf_to_machine()
88
89 bitbake('core-image-minimal')
90
91 def test_mount_unit_not_set(self):
92 """
93 Summary: Test whether mount unit was set properly
94 Expected: Fail because mount unit was not set
95 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
96 """
97
98 config = """
99IMAGE_INSTALL:append = " overlayfs-user"
100DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
101"""
102
103 self.write_config(config)
104
105 res = bitbake('core-image-minimal', ignore_status=True)
106 line = getline(res, "A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
107 self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
108
109 def test_wrong_mount_unit_set(self):
110 """
111 Summary: Test whether mount unit was set properly
112 Expected: Fail because not the correct flag used for mount unit
113 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
114 """
115
116 config = """
117IMAGE_INSTALL:append = " overlayfs-user"
118DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
119"""
120
121 wrong_machine_config = """
122OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay"
123"""
124
125 self.write_config(config)
126 self.set_machine_config(wrong_machine_config)
127
128 res = bitbake('core-image-minimal', ignore_status=True)
129 line = getline(res, "Missing required mount point for OVERLAYFS_MOUNT_POINT[mnt-overlay] in your MACHINE configuration")
130 self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
131
132 def _test_correct_image(self, recipe, data):
133 """
134 Summary: Check that we can create an image when all parameters are
135 set correctly
136 Expected: Image is created successfully
137 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
138 """
139
140 config = """
141IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units"
142DISTRO_FEATURES:append = " overlayfs"
143
144# Use systemd as init manager
145INIT_MANAGER = "systemd"
146
147# enable overlayfs in the kernel
148KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
149"""
150
151 overlayfs_recipe_append = """
152OVERLAYFS_WRITABLE_PATHS[mnt-overlay] += "/usr/share/another-overlay-mount"
153
154SYSTEMD_SERVICE:${PN} += " \
155 my-application.service \
156"
157
158do_install:append() {
159 install -d ${D}${systemd_system_unitdir}
160 cat <<EOT > ${D}${systemd_system_unitdir}/my-application.service
161[Unit]
162Description=Sample application start-up unit
163After=overlayfs-user-overlays.service
164Requires=overlayfs-user-overlays.service
165
166[Service]
167Type=oneshot
168ExecStart=/bin/true
169RemainAfterExit=true
170
171[Install]
172WantedBy=multi-user.target
173EOT
174}
175"""
176
177 self.write_config(config)
178 self.add_overlay_conf_to_machine()
179 self.write_recipeinc(recipe, data)
180 self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
181
182 bitbake('core-image-minimal')
183
184 with runqemu('core-image-minimal') as qemu:
185 # Check that application service started
186 status, output = qemu.run_serial("systemctl status my-application")
187 self.assertTrue("active (exited)" in output, msg=output)
188
189 # Check that overlay mounts are dependencies of our application unit
190 status, output = qemu.run_serial("systemctl list-dependencies my-application")
191 self.assertTrue("overlayfs-user-overlays.service" in output, msg=output)
192
193 status, output = qemu.run_serial("systemctl list-dependencies overlayfs-user-overlays")
194 self.assertTrue("usr-share-another\\x2doverlay\\x2dmount.mount" in output, msg=output)
195 self.assertTrue("usr-share-my\\x2dapplication.mount" in output, msg=output)
196
197 # Check that we have /mnt/overlay fs mounted as tmpfs and
198 # /usr/share/my-application as an overlay (see overlayfs-user recipe)
199 status, output = qemu.run_serial("/bin/mount -t tmpfs,overlay")
200
201 line = getline_qemu(output, "on /mnt/overlay")
202 self.assertTrue(line and line.startswith("tmpfs"), msg=output)
203
204 line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/my-application")
205 self.assertTrue(line and line.startswith("overlay"), msg=output)
206
207 line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/another-overlay-mount")
208 self.assertTrue(line and line.startswith("overlay"), msg=output)
209
210 @OETestTag("runqemu")
211 def test_correct_image_fstab(self):
212 """
213 Summary: Check that we can create an image when all parameters are
214 set correctly via fstab
215 Expected: Image is created successfully
216 Author: Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
217 """
218
219 base_files_append = """
220do_install:append() {
221 cat <<EOT >> ${D}${sysconfdir}/fstab
222tmpfs /mnt/overlay tmpfs mode=1777,strictatime,nosuid,nodev 0 0
223EOT
224}
225"""
226
227 self._test_correct_image('base-files', base_files_append)
228
229 @OETestTag("runqemu")
230 def test_correct_image_unit(self):
231 """
232 Summary: Check that we can create an image when all parameters are
233 set correctly via mount unit
234 Expected: Image is created successfully
235 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
236 """
237
238 systemd_machine_unit_append = """
239SYSTEMD_SERVICE:${PN} += " \
240 mnt-overlay.mount \
241"
242
243do_install:append() {
244 install -d ${D}${systemd_system_unitdir}
245 cat <<EOT > ${D}${systemd_system_unitdir}/mnt-overlay.mount
246[Unit]
247Description=Tmpfs directory
248DefaultDependencies=no
249
250[Mount]
251What=tmpfs
252Where=/mnt/overlay
253Type=tmpfs
254Options=mode=1777,strictatime,nosuid,nodev
255
256[Install]
257WantedBy=multi-user.target
258EOT
259}
260
261"""
262
263 self._test_correct_image('systemd-machine-units', systemd_machine_unit_append)
264
265@OETestTag("runqemu")
266class OverlayFSEtcRunTimeTests(OESelftestTestCase):
267 """overlayfs-etc class tests"""
268
269 def test_all_required_variables_set(self):
270 """
271 Summary: Check that required variables are set
272 Expected: Fail when any of required variables is missing
273 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
274 """
275
276 configBase = """
277# Use systemd as init manager
278INIT_MANAGER = "systemd"
279
280# enable overlayfs in the kernel
281KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
282
283# Image configuration for overlayfs-etc
284EXTRA_IMAGE_FEATURES += "overlayfs-etc"
285IMAGE_FEATURES:remove = "package-management"
286"""
287 configMountPoint = """
288OVERLAYFS_ETC_MOUNT_POINT = "/data"
289"""
290 configDevice = """
291OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1"
292"""
293
294 self.write_config(configBase)
295 res = bitbake('core-image-minimal', ignore_status=True)
296 line = getline(res, "OVERLAYFS_ETC_MOUNT_POINT must be set in your MACHINE configuration")
297 self.assertTrue(line, msg=res.output)
298
299 self.append_config(configMountPoint)
300 res = bitbake('core-image-minimal', ignore_status=True)
301 line = getline(res, "OVERLAYFS_ETC_DEVICE must be set in your MACHINE configuration")
302 self.assertTrue(line, msg=res.output)
303
304 self.append_config(configDevice)
305 res = bitbake('core-image-minimal', ignore_status=True)
306 line = getline(res, "OVERLAYFS_ETC_FSTYPE should contain a valid file system type on /dev/mmcblk0p1")
307 self.assertTrue(line, msg=res.output)
308
309 def test_image_feature_conflict(self):
310 """
311 Summary: Overlayfs-etc is not allowed to be used with package-management
312 Expected: Feature conflict
313 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
314 """
315
316 config = """
317# Use systemd as init manager
318INIT_MANAGER = "systemd"
319
320# enable overlayfs in the kernel
321KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
322EXTRA_IMAGE_FEATURES += "overlayfs-etc"
323EXTRA_IMAGE_FEATURES += "package-management"
324"""
325
326 self.write_config(config)
327
328 res = bitbake('core-image-minimal', ignore_status=True)
329 line = getline(res, "contains conflicting IMAGE_FEATURES")
330 self.assertTrue("overlayfs-etc" in res.output, msg=res.output)
331 self.assertTrue("package-management" in res.output, msg=res.output)
332
333 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
334 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
335 def test_image_feature_is_missing(self):
336 """
337 Summary: Overlayfs-etc class is not applied when image feature is not set
338 Expected: Image is created successfully but /etc is not an overlay
339 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
340 """
341
342 config = """
343# Use systemd as init manager
344INIT_MANAGER = "systemd"
345
346# enable overlayfs in the kernel
347KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
348
349IMAGE_FSTYPES += "wic"
350WKS_FILE = "overlayfs_etc.wks.in"
351
352EXTRA_IMAGE_FEATURES += "read-only-rootfs"
353# Image configuration for overlayfs-etc
354OVERLAYFS_ETC_MOUNT_POINT = "/data"
355OVERLAYFS_ETC_DEVICE = "/dev/sda3"
356OVERLAYFS_ROOTFS_TYPE = "ext4"
357"""
358
359 self.write_config(config)
360
361 bitbake('core-image-minimal')
362
363 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
364 status, output = qemu.run_serial("/bin/mount")
365
366 line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
367 self.assertFalse(line, msg=output)
368
369 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
370 def test_sbin_init_preinit(self):
371 self.run_sbin_init(False, "ext4")
372
373 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
374 def test_sbin_init_original(self):
375 self.run_sbin_init(True, "ext4")
376
377 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
378 def test_sbin_init_read_only(self):
379 self.run_sbin_init(True, "squashfs")
380
381 def run_sbin_init(self, origInit, rootfsType):
382 """
383 Summary: Confirm we can replace original init and mount overlay on top of /etc
384 Expected: Image is created successfully and /etc is mounted as an overlay
385 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
386 """
387
388 config = self.get_working_config()
389
390 args = {
391 'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit",
392 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True),
393 'OVERLAYFS_ROOTFS_TYPE': rootfsType,
394 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4")
395 }
396
397 self.write_config(config.format(**args))
398
399 bitbake('core-image-minimal')
400 testFile = "/etc/my-test-data"
401
402 with runqemu('core-image-minimal', image_fstype='wic', discard_writes=False) as qemu:
403 status, output = qemu.run_serial("/bin/mount")
404
405 line = getline_qemu(output, "/dev/sda3")
406 self.assertTrue("/data" in output, msg=output)
407
408 line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
409 self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output)
410
411 # check that lower layer is not available
412 status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower")
413 line = getline_qemu(output, "No such file or directory")
414 self.assertTrue(line, msg=output)
415
416 status, output = qemu.run_serial("touch " + testFile)
417 status, output = qemu.run_serial("sync")
418 status, output = qemu.run_serial("ls -1 " + testFile)
419 line = getline_qemu(output, testFile)
420 self.assertTrue(line and line.startswith(testFile), msg=output)
421
422 # Check that file exists in /etc after reboot
423 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
424 status, output = qemu.run_serial("ls -1 " + testFile)
425 line = getline_qemu(output, testFile)
426 self.assertTrue(line and line.startswith(testFile), msg=output)
427
428 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
429 def test_lower_layer_access(self):
430 """
431 Summary: Test that lower layer of /etc is available read-only when configured
432 Expected: Can't write to lower layer. The files on lower and upper different after
433 modification
434 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
435 """
436
437 config = self.get_working_config()
438
439 configLower = """
440OVERLAYFS_ETC_EXPOSE_LOWER = "1"
441IMAGE_INSTALL:append = " overlayfs-user"
442"""
443 testFile = "lower-layer-test.txt"
444
445 args = {
446 'OVERLAYFS_INIT_OPTION': "",
447 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
448 'OVERLAYFS_ROOTFS_TYPE': "ext4",
449 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
450 }
451
452 self.write_config(config.format(**args))
453
454 self.append_config(configLower)
455 bitbake('core-image-minimal')
456
457 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
458 status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile)
459 status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile)
460 line = getline_qemu(output, "Modified in upper")
461 self.assertTrue(line, msg=output)
462 line = getline_qemu(output, "Original file")
463 self.assertTrue(line, msg=output)
464
465 status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt")
466 line = getline_qemu(output, "Read-only file system")
467 self.assertTrue(line, msg=output)
468
469 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
470 def test_postinst_on_target_for_read_only_rootfs(self):
471 """
472 Summary: The purpose of this test case is to verify that post-installation
473 on target scripts are executed even if using read-only rootfs when
474 read-only-rootfs-delayed-postinsts is set
475 Expected: The test files are created on first boot
476 """
477
478 import oe.path
479
480 vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
481 sysconfdir = vars["sysconfdir"]
482 self.assertIsNotNone(sysconfdir)
483 # Need to use oe.path here as sysconfdir starts with /
484 targettestdir = os.path.join(sysconfdir, "postinst-test")
485
486 config = self.get_working_config()
487
488 args = {
489 'OVERLAYFS_INIT_OPTION': "",
490 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
491 'OVERLAYFS_ROOTFS_TYPE': "ext4",
492 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
493 }
494
495 # read-only-rootfs is already set in get_working_config()
496 config += 'EXTRA_IMAGE_FEATURES += "read-only-rootfs-delayed-postinsts"\n'
497 config += 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
498
499 self.write_config(config.format(**args))
500
501 res = bitbake('core-image-minimal')
502
503 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
504 for filename in ("rootfs", "delayed-a", "delayed-b"):
505 status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
506 self.assertIn("found", output, "%s was not present on boot" % filename)
507
508 def get_working_config(self):
509 return """
510# Use systemd as init manager
511INIT_MANAGER = "systemd"
512
513# enable overlayfs in the kernel
514KERNEL_EXTRA_FEATURES:append = " \
515 features/overlayfs/overlayfs.scc \
516 cfg/fs/squashfs.scc"
517
518IMAGE_FSTYPES += "wic"
519OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
520OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}"
521OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}"
522WKS_FILE = "overlayfs_etc.wks.in"
523
524EXTRA_IMAGE_FEATURES += "read-only-rootfs"
525# Image configuration for overlayfs-etc
526EXTRA_IMAGE_FEATURES += "overlayfs-etc"
527IMAGE_FEATURES:remove = "package-management"
528OVERLAYFS_ETC_MOUNT_POINT = "/data"
529OVERLAYFS_ETC_FSTYPE = "ext4"
530OVERLAYFS_ETC_DEVICE = "/dev/sda3"
531OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
532
533ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs"
534
535ext4_rootfs() {{
536}}
537
538squashfs_rootfs() {{
539 mkdir -p ${{IMAGE_ROOTFS}}/data
540}}
541"""
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 7166c3991f..38ed7173fe 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -1,10 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu 8from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu
7import stat
8import subprocess, os 9import subprocess, os
9import oe.path 10import oe.path
10import re 11import re
@@ -88,6 +89,13 @@ class VersionOrdering(OESelftestTestCase):
88 self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort)) 89 self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
89 90
90class PackageTests(OESelftestTestCase): 91class PackageTests(OESelftestTestCase):
92 # Verify that a recipe cannot rename a package into an existing one
93 def test_package_name_conflict(self):
94 res = bitbake("packagenameconflict", ignore_status=True)
95 self.assertNotEqual(res.status, 0)
96 err = "package name already exists"
97 self.assertTrue(err in res.output)
98
91 # Verify that a recipe which sets up hardlink files has those preserved into split packages 99 # Verify that a recipe which sets up hardlink files has those preserved into split packages
92 # Also test file sparseness is preserved 100 # Also test file sparseness is preserved
93 def test_preserve_sparse_hardlinks(self): 101 def test_preserve_sparse_hardlinks(self):
@@ -95,11 +103,37 @@ class PackageTests(OESelftestTestCase):
95 103
96 dest = get_bb_var('PKGDEST', 'selftest-hardlink') 104 dest = get_bb_var('PKGDEST', 'selftest-hardlink')
97 bindir = get_bb_var('bindir', 'selftest-hardlink') 105 bindir = get_bb_var('bindir', 'selftest-hardlink')
106 libdir = get_bb_var('libdir', 'selftest-hardlink')
107 libexecdir = get_bb_var('libexecdir', 'selftest-hardlink')
98 108
99 def checkfiles(): 109 def checkfiles():
100 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/ 110 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/
101 # so expect 8 in total. 111 # so expect 8 in total.
102 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8) 112 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8)
113 self.assertEqual(os.stat(dest + "/selftest-hardlink" + libexecdir + "/hello3").st_nlink, 8)
114
115 # Check dbg version
116 # 2 items, a copy in both package/packages-split so 4
117 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + bindir + "/.debug/hello1").st_nlink, 4)
118 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello1").st_nlink, 4)
119
120 # Even though the libexecdir name is 'hello3' or 'hello4', that isn't the debug target name
121 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello3"), False)
122 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello4"), False)
123
124 # Check the staticdev libraries
125 # 101 items, a copy in both package/packages-split so 202
126 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello.a").st_nlink, 202)
127 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-25.a").st_nlink, 202)
128 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-50.a").st_nlink, 202)
129 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-75.a").st_nlink, 202)
130
131 # Check static dbg
132 # 101 items, a copy in both package/packages-split so 202
133 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello.a").st_nlink, 202)
134 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-25.a").st_nlink, 202)
135 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-50.a").st_nlink, 202)
136 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-75.a").st_nlink, 202)
103 137
104 # Test a sparse file remains sparse 138 # Test a sparse file remains sparse
105 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest") 139 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest")
@@ -116,9 +150,9 @@ class PackageTests(OESelftestTestCase):
116 150
117 # Verify gdb to read symbols from separated debug hardlink file correctly 151 # Verify gdb to read symbols from separated debug hardlink file correctly
118 def test_gdb_hardlink_debug(self): 152 def test_gdb_hardlink_debug(self):
119 features = 'IMAGE_INSTALL_append = " selftest-hardlink"\n' 153 features = 'IMAGE_INSTALL:append = " selftest-hardlink"\n'
120 features += 'IMAGE_INSTALL_append = " selftest-hardlink-dbg"\n' 154 features += 'IMAGE_INSTALL:append = " selftest-hardlink-dbg"\n'
121 features += 'IMAGE_INSTALL_append = " selftest-hardlink-gdb"\n' 155 features += 'IMAGE_INSTALL:append = " selftest-hardlink-gdb"\n'
122 self.write_config(features) 156 self.write_config(features)
123 bitbake("core-image-minimal") 157 bitbake("core-image-minimal")
124 158
@@ -134,8 +168,10 @@ class PackageTests(OESelftestTestCase):
134 self.logger.error("No debugging symbols found. GDB result:\n%s" % output) 168 self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
135 return False 169 return False
136 170
137 # Check debugging symbols works correctly 171 # Check debugging symbols works correctly. Don't look for a
138 elif re.match(r"Breakpoint 1.*hello\.c.*4", l): 172 # source file as optimisation can put the breakpoint inside
173 # stdio.h.
174 elif "Breakpoint 1 at" in l:
139 return True 175 return True
140 176
141 self.logger.error("GDB result:\n%d: %s", status, output) 177 self.logger.error("GDB result:\n%d: %s", status, output)
@@ -150,25 +186,25 @@ class PackageTests(OESelftestTestCase):
150 self.fail('GDB %s failed' % binary) 186 self.fail('GDB %s failed' % binary)
151 187
152 def test_preserve_ownership(self): 188 def test_preserve_ownership(self):
153 import os, stat, oe.cachedpath 189 features = 'IMAGE_INSTALL:append = " selftest-chown"\n'
154 features = 'IMAGE_INSTALL_append = " selftest-chown"\n'
155 self.write_config(features) 190 self.write_config(features)
156 bitbake("core-image-minimal") 191 bitbake("core-image-minimal")
157 192
158 sysconfdir = get_bb_var('sysconfdir', 'selftest-chown') 193 def check_ownership(qemu, expected_gid, expected_uid, path):
159 def check_ownership(qemu, gid, uid, path):
160 self.logger.info("Check ownership of %s", path) 194 self.logger.info("Check ownership of %s", path)
161 status, output = qemu.run_serial(r'/bin/stat -c "%U %G" ' + path, timeout=60) 195 status, output = qemu.run_serial('stat -c "%U %G" ' + path)
162 output = output.split(" ") 196 self.assertEqual(status, 1, "stat failed: " + output)
163 if output[0] != uid or output[1] != gid : 197 try:
164 self.logger.error("Incrrect ownership %s [%s:%s]", path, output[0], output[1]) 198 uid, gid = output.split()
165 return False 199 self.assertEqual(uid, expected_uid)
166 return True 200 self.assertEqual(gid, expected_gid)
201 except ValueError:
202 self.fail("Cannot parse output: " + output)
167 203
204 sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
168 with runqemu('core-image-minimal') as qemu: 205 with runqemu('core-image-minimal') as qemu:
169 for path in [ sysconfdir + "/selftest-chown/file", 206 for path in [ sysconfdir + "/selftest-chown/file",
170 sysconfdir + "/selftest-chown/dir", 207 sysconfdir + "/selftest-chown/dir",
171 sysconfdir + "/selftest-chown/symlink", 208 sysconfdir + "/selftest-chown/symlink",
172 sysconfdir + "/selftest-chown/fifotest/fifo"]: 209 sysconfdir + "/selftest-chown/fifotest/fifo"]:
173 if not check_ownership(qemu, "test", "test", path): 210 check_ownership(qemu, "test", "test", path)
174 self.fail('Test ownership %s failed' % path)
diff --git a/meta/lib/oeqa/selftest/cases/picolibc.py b/meta/lib/oeqa/selftest/cases/picolibc.py
new file mode 100644
index 0000000000..e40b4fc3d3
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/picolibc.py
@@ -0,0 +1,18 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, get_bb_var
9
10class PicolibcTest(OESelftestTestCase):
11
12 def test_picolibc(self):
13 compatible_machines = ['qemuarm', 'qemuarm64', 'qemuriscv32', 'qemuriscv64']
14 machine = get_bb_var('MACHINE')
15 if machine not in compatible_machines:
16 self.skipTest('This test only works with machines : %s' % ' '.join(compatible_machines))
17 self.write_config('TCLIBC = "picolibc"')
18 bitbake("picolibc-helloworld")
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py
index 254abc40c6..d786c33018 100644
--- a/meta/lib/oeqa/selftest/cases/pkgdata.py
+++ b/meta/lib/oeqa/selftest/cases/pkgdata.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -47,8 +49,8 @@ class OePkgdataUtilTests(OESelftestTestCase):
47 self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output) 49 self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
48 50
49 def test_find_path(self): 51 def test_find_path(self):
50 result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1') 52 result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1')
51 self.assertEqual(result.output, 'zlib: /lib/libz.so.1') 53 self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1')
52 result = runCmd('oe-pkgdata-util find-path /usr/bin/m4') 54 result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
53 self.assertEqual(result.output, 'm4: /usr/bin/m4') 55 self.assertEqual(result.output, 'm4: /usr/bin/m4')
54 result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True) 56 result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
@@ -120,8 +122,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
120 curpkg = line.split(':')[0] 122 curpkg = line.split(':')[0]
121 files[curpkg] = [] 123 files[curpkg] = []
122 return files 124 return files
123 bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir']) 125 bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir'])
124 base_libdir = bb_vars['base_libdir']
125 libdir = bb_vars['libdir'] 126 libdir = bb_vars['libdir']
126 includedir = bb_vars['includedir'] 127 includedir = bb_vars['includedir']
127 mandir = bb_vars['mandir'] 128 mandir = bb_vars['mandir']
@@ -138,7 +139,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
138 self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) 139 self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
139 self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) 140 self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
140 self.assertGreater(len(files['libz1']), 1) 141 self.assertGreater(len(files['libz1']), 1)
141 libspec = os.path.join(base_libdir, 'libz.so.1.*') 142 libspec = os.path.join(libdir, 'libz.so.1.*')
142 found = False 143 found = False
143 for fileitem in files['libz1']: 144 for fileitem in files['libz1']:
144 if fnmatch.fnmatchcase(fileitem, libspec): 145 if fnmatch.fnmatchcase(fileitem, libspec):
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
index 578b2b4dd9..8da3739c57 100644
--- a/meta/lib/oeqa/selftest/cases/prservice.py
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -12,6 +14,8 @@ from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var 14from oeqa.utils.commands import runCmd, bitbake, get_bb_var
13from oeqa.utils.network import get_free_port 15from oeqa.utils.network import get_free_port
14 16
17import bb.utils
18
15class BitbakePrTests(OESelftestTestCase): 19class BitbakePrTests(OESelftestTestCase):
16 20
17 @classmethod 21 @classmethod
@@ -19,6 +23,16 @@ class BitbakePrTests(OESelftestTestCase):
19 super(BitbakePrTests, cls).setUpClass() 23 super(BitbakePrTests, cls).setUpClass()
20 cls.pkgdata_dir = get_bb_var('PKGDATA_DIR') 24 cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
21 25
26 cls.exported_db_path = os.path.join(cls.builddir, 'export.inc')
27 cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
28
29 def cleanup(self):
30 # Ensure any memory resident bitbake is stopped
31 bitbake("-m")
32 # Remove any existing export file or prserv database
33 bb.utils.remove(self.exported_db_path)
34 bb.utils.remove(self.current_db_path + "*")
35
22 def get_pr_version(self, package_name): 36 def get_pr_version(self, package_name):
23 package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name) 37 package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
24 package_data = ftools.read_file(package_data_file) 38 package_data = ftools.read_file(package_data_file)
@@ -40,13 +54,14 @@ class BitbakePrTests(OESelftestTestCase):
40 return str(stamps[0]) 54 return str(stamps[0])
41 55
42 def increment_package_pr(self, package_name): 56 def increment_package_pr(self, package_name):
43 inc_data = "do_package_append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now() 57 inc_data = "do_package:append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now()
44 self.write_recipeinc(package_name, inc_data) 58 self.write_recipeinc(package_name, inc_data)
45 res = bitbake(package_name, ignore_status=True) 59 res = bitbake(package_name, ignore_status=True)
46 self.delete_recipeinc(package_name) 60 self.delete_recipeinc(package_name)
47 self.assertEqual(res.status, 0, msg=res.output) 61 self.assertEqual(res.status, 0, msg=res.output)
48 62
49 def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'): 63 def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
64 self.cleanup()
50 config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type 65 config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
51 self.write_config(config_package_data) 66 self.write_config(config_package_data)
52 config_server_data = 'PRSERV_HOST = "%s"' % pr_socket 67 config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
@@ -66,24 +81,24 @@ class BitbakePrTests(OESelftestTestCase):
66 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) 81 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
67 self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1) 82 self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
68 83
84 self.cleanup()
85
69 def run_test_pr_export_import(self, package_name, replace_current_db=True): 86 def run_test_pr_export_import(self, package_name, replace_current_db=True):
70 self.config_pr_tests(package_name) 87 self.config_pr_tests(package_name)
71 88
72 self.increment_package_pr(package_name) 89 self.increment_package_pr(package_name)
73 pr_1 = self.get_pr_version(package_name) 90 pr_1 = self.get_pr_version(package_name)
74 91
75 exported_db_path = os.path.join(self.builddir, 'export.inc') 92 export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True)
76 export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
77 self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output) 93 self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
78 self.assertTrue(os.path.exists(exported_db_path)) 94 self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output))
79 95
80 if replace_current_db: 96 if replace_current_db:
81 current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3') 97 self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path)
82 self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path) 98 os.remove(self.current_db_path)
83 os.remove(current_db_path)
84 99
85 import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True) 100 import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True)
86 os.remove(exported_db_path) 101 #os.remove(self.exported_db_path)
87 self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output) 102 self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
88 103
89 self.increment_package_pr(package_name) 104 self.increment_package_pr(package_name)
@@ -91,6 +106,8 @@ class BitbakePrTests(OESelftestTestCase):
91 106
92 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) 107 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
93 108
109 self.cleanup()
110
94 def test_import_export_replace_db(self): 111 def test_import_export_replace_db(self):
95 self.run_test_pr_export_import('m4') 112 self.run_test_pr_export_import('m4')
96 113
diff --git a/meta/lib/oeqa/selftest/cases/pseudo.py b/meta/lib/oeqa/selftest/cases/pseudo.py
index 33593d5ce9..3ef8786022 100644
--- a/meta/lib/oeqa/selftest/cases/pseudo.py
+++ b/meta/lib/oeqa/selftest/cases/pseudo.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 9d56e9e1e3..0bd724c8ee 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -1,7 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import errno
5import os 8import os
6import shutil 9import shutil
7import tempfile 10import tempfile
@@ -25,7 +28,17 @@ def tearDownModule():
25 runCmd('rm -rf %s' % templayerdir) 28 runCmd('rm -rf %s' % templayerdir)
26 29
27 30
28class RecipetoolBase(devtool.DevtoolBase): 31def needTomllib(test):
32 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
33 try:
34 import tomllib
35 except ImportError:
36 try:
37 import tomli
38 except ImportError:
39 test.skipTest('Test requires python 3.11 or above for tomllib module or tomli module')
40
41class RecipetoolBase(devtool.DevtoolTestCase):
29 42
30 def setUpLocal(self): 43 def setUpLocal(self):
31 super(RecipetoolBase, self).setUpLocal() 44 super(RecipetoolBase, self).setUpLocal()
@@ -35,6 +48,8 @@ class RecipetoolBase(devtool.DevtoolBase):
35 self.testfile = os.path.join(self.tempdir, 'testfile') 48 self.testfile = os.path.join(self.tempdir, 'testfile')
36 with open(self.testfile, 'w') as f: 49 with open(self.testfile, 'w') as f:
37 f.write('Test file\n') 50 f.write('Test file\n')
51 config = 'BBMASK += "meta-poky/recipes-core/base-files/base-files_%.bbappend"\n'
52 self.append_config(config)
38 53
39 def tearDownLocal(self): 54 def tearDownLocal(self):
40 runCmd('rm -rf %s/recipes-*' % self.templayerdir) 55 runCmd('rm -rf %s/recipes-*' % self.templayerdir)
@@ -68,17 +83,16 @@ class RecipetoolBase(devtool.DevtoolBase):
68 return bbappendfile, result.output 83 return bbappendfile, result.output
69 84
70 85
71class RecipetoolTests(RecipetoolBase): 86class RecipetoolAppendTests(RecipetoolBase):
72 87
73 @classmethod 88 @classmethod
74 def setUpClass(cls): 89 def setUpClass(cls):
75 super(RecipetoolTests, cls).setUpClass() 90 super(RecipetoolAppendTests, cls).setUpClass()
76 # Ensure we have the right data in shlibs/pkgdata 91 # Ensure we have the right data in shlibs/pkgdata
77 cls.logger.info('Running bitbake to generate pkgdata') 92 cls.logger.info('Running bitbake to generate pkgdata')
78 bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile') 93 bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile')
79 bb_vars = get_bb_vars(['COREBASE', 'BBPATH']) 94 bb_vars = get_bb_vars(['COREBASE'])
80 cls.corebase = bb_vars['COREBASE'] 95 cls.corebase = bb_vars['COREBASE']
81 cls.bbpath = bb_vars['BBPATH']
82 96
83 def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles): 97 def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles):
84 cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options) 98 cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options)
@@ -94,7 +108,7 @@ class RecipetoolTests(RecipetoolBase):
94 108
95 def test_recipetool_appendfile_basic(self): 109 def test_recipetool_appendfile_basic(self):
96 # Basic test 110 # Basic test
97 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 111 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
98 '\n'] 112 '\n']
99 _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd']) 113 _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd'])
100 self.assertNotIn('WARNING: ', output) 114 self.assertNotIn('WARNING: ', output)
@@ -106,23 +120,29 @@ class RecipetoolTests(RecipetoolBase):
106 self._try_recipetool_appendfile_fail('/dev/console', self.testfile, ['ERROR: /dev/console cannot be handled by this tool']) 120 self._try_recipetool_appendfile_fail('/dev/console', self.testfile, ['ERROR: /dev/console cannot be handled by this tool'])
107 121
108 def test_recipetool_appendfile_alternatives(self): 122 def test_recipetool_appendfile_alternatives(self):
123 lspath = '/bin/ls'
124 dirname = "base_bindir"
125 if "usrmerge" in get_bb_var('DISTRO_FEATURES'):
126 lspath = '/usr/bin/ls'
127 dirname = "bindir"
128
109 # Now try with a file we know should be an alternative 129 # Now try with a file we know should be an alternative
110 # (this is very much a fake example, but one we know is reliably an alternative) 130 # (this is very much a fake example, but one we know is reliably an alternative)
111 self._try_recipetool_appendfile_fail('/bin/ls', self.testfile, ['ERROR: File /bin/ls is an alternative possibly provided by the following recipes:', 'coreutils', 'busybox']) 131 self._try_recipetool_appendfile_fail(lspath, self.testfile, ['ERROR: File %s is an alternative possibly provided by the following recipes:' % lspath, 'coreutils', 'busybox'])
112 # Need a test file - should be executable 132 # Need a test file - should be executable
113 testfile2 = os.path.join(self.corebase, 'oe-init-build-env') 133 testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
114 testfile2name = os.path.basename(testfile2) 134 testfile2name = os.path.basename(testfile2)
115 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 135 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
116 '\n', 136 '\n',
117 'SRC_URI += "file://%s"\n' % testfile2name, 137 'SRC_URI += "file://%s"\n' % testfile2name,
118 '\n', 138 '\n',
119 'do_install_append() {\n', 139 'do_install:append() {\n',
120 ' install -d ${D}${base_bindir}\n', 140 ' install -d ${D}${%s}\n' % dirname,
121 ' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name, 141 ' install -m 0755 ${UNPACKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname),
122 '}\n'] 142 '}\n']
123 self._try_recipetool_appendfile('coreutils', '/bin/ls', testfile2, '-r coreutils', expectedlines, [testfile2name]) 143 self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name])
124 # Now try bbappending the same file again, contents should not change 144 # Now try bbappending the same file again, contents should not change
125 bbappendfile, _ = self._try_recipetool_appendfile('coreutils', '/bin/ls', self.testfile, '-r coreutils', expectedlines, [testfile2name]) 145 bbappendfile, _ = self._try_recipetool_appendfile('coreutils', lspath, self.testfile, '-r coreutils', expectedlines, [testfile2name])
126 # But file should have 146 # But file should have
127 copiedfile = os.path.join(os.path.dirname(bbappendfile), 'coreutils', testfile2name) 147 copiedfile = os.path.join(os.path.dirname(bbappendfile), 'coreutils', testfile2name)
128 result = runCmd('diff -q %s %s' % (testfile2, copiedfile), ignore_status=True) 148 result = runCmd('diff -q %s %s' % (testfile2, copiedfile), ignore_status=True)
@@ -138,117 +158,117 @@ class RecipetoolTests(RecipetoolBase):
138 158
139 def test_recipetool_appendfile_add(self): 159 def test_recipetool_appendfile_add(self):
140 # Try arbitrary file add to a recipe 160 # Try arbitrary file add to a recipe
141 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 161 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
142 '\n', 162 '\n',
143 'SRC_URI += "file://testfile"\n', 163 'SRC_URI += "file://testfile"\n',
144 '\n', 164 '\n',
145 'do_install_append() {\n', 165 'do_install:append() {\n',
146 ' install -d ${D}${datadir}\n', 166 ' install -d ${D}${datadir}\n',
147 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 167 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
148 '}\n'] 168 '}\n']
149 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile']) 169 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile'])
150 # Try adding another file, this time where the source file is executable 170 # Try adding another file, this time where the source file is executable
151 # (so we're testing that, plus modifying an existing bbappend) 171 # (so we're testing that, plus modifying an existing bbappend)
152 testfile2 = os.path.join(self.corebase, 'oe-init-build-env') 172 testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
153 testfile2name = os.path.basename(testfile2) 173 testfile2name = os.path.basename(testfile2)
154 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 174 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
155 '\n', 175 '\n',
156 'SRC_URI += "file://testfile \\\n', 176 'SRC_URI += "file://testfile \\\n',
157 ' file://%s \\\n' % testfile2name, 177 ' file://%s \\\n' % testfile2name,
158 ' "\n', 178 ' "\n',
159 '\n', 179 '\n',
160 'do_install_append() {\n', 180 'do_install:append() {\n',
161 ' install -d ${D}${datadir}\n', 181 ' install -d ${D}${datadir}\n',
162 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 182 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
163 ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, 183 ' install -m 0755 ${UNPACKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
164 '}\n'] 184 '}\n']
165 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name]) 185 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name])
166 186
167 def test_recipetool_appendfile_add_bindir(self): 187 def test_recipetool_appendfile_add_bindir(self):
168 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable 188 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
169 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 189 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
170 '\n', 190 '\n',
171 'SRC_URI += "file://testfile"\n', 191 'SRC_URI += "file://testfile"\n',
172 '\n', 192 '\n',
173 'do_install_append() {\n', 193 'do_install:append() {\n',
174 ' install -d ${D}${bindir}\n', 194 ' install -d ${D}${bindir}\n',
175 ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', 195 ' install -m 0755 ${UNPACKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
176 '}\n'] 196 '}\n']
177 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile']) 197 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile'])
178 self.assertNotIn('WARNING: ', output) 198 self.assertNotIn('WARNING: ', output)
179 199
180 def test_recipetool_appendfile_add_machine(self): 200 def test_recipetool_appendfile_add_machine(self):
181 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable 201 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
182 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 202 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
183 '\n', 203 '\n',
184 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n', 204 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
185 '\n', 205 '\n',
186 'SRC_URI_append_mymachine = " file://testfile"\n', 206 'SRC_URI:append:mymachine = " file://testfile"\n',
187 '\n', 207 '\n',
188 'do_install_append_mymachine() {\n', 208 'do_install:append:mymachine() {\n',
189 ' install -d ${D}${datadir}\n', 209 ' install -d ${D}${datadir}\n',
190 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 210 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
191 '}\n'] 211 '}\n']
192 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile']) 212 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile'])
193 self.assertNotIn('WARNING: ', output) 213 self.assertNotIn('WARNING: ', output)
194 214
195 def test_recipetool_appendfile_orig(self): 215 def test_recipetool_appendfile_orig(self):
196 # A file that's in SRC_URI and in do_install with the same name 216 # A file that's in SRC_URI and in do_install with the same name
197 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 217 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
198 '\n'] 218 '\n']
199 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig']) 219 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig'])
200 self.assertNotIn('WARNING: ', output) 220 self.assertNotIn('WARNING: ', output)
201 221
202 def test_recipetool_appendfile_todir(self): 222 def test_recipetool_appendfile_todir(self):
203 # A file that's in SRC_URI and in do_install with destination directory rather than file 223 # A file that's in SRC_URI and in do_install with destination directory rather than file
204 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 224 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
205 '\n'] 225 '\n']
206 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir']) 226 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir'])
207 self.assertNotIn('WARNING: ', output) 227 self.assertNotIn('WARNING: ', output)
208 228
209 def test_recipetool_appendfile_renamed(self): 229 def test_recipetool_appendfile_renamed(self):
210 # A file that's in SRC_URI with a different name to the destination file 230 # A file that's in SRC_URI with a different name to the destination file
211 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 231 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
212 '\n'] 232 '\n']
213 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1']) 233 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1'])
214 self.assertNotIn('WARNING: ', output) 234 self.assertNotIn('WARNING: ', output)
215 235
216 def test_recipetool_appendfile_subdir(self): 236 def test_recipetool_appendfile_subdir(self):
217 # A file that's in SRC_URI in a subdir 237 # A file that's in SRC_URI in a subdir
218 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 238 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
219 '\n', 239 '\n',
220 'SRC_URI += "file://testfile"\n', 240 'SRC_URI += "file://testfile"\n',
221 '\n', 241 '\n',
222 'do_install_append() {\n', 242 'do_install:append() {\n',
223 ' install -d ${D}${datadir}\n', 243 ' install -d ${D}${datadir}\n',
224 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', 244 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
225 '}\n'] 245 '}\n']
226 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile']) 246 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile'])
227 self.assertNotIn('WARNING: ', output) 247 self.assertNotIn('WARNING: ', output)
228 248
229 def test_recipetool_appendfile_inst_glob(self): 249 def test_recipetool_appendfile_inst_glob(self):
230 # A file that's in do_install as a glob 250 # A file that's in do_install as a glob
231 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 251 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
232 '\n'] 252 '\n']
233 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile']) 253 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile'])
234 self.assertNotIn('WARNING: ', output) 254 self.assertNotIn('WARNING: ', output)
235 255
236 def test_recipetool_appendfile_inst_todir_glob(self): 256 def test_recipetool_appendfile_inst_todir_glob(self):
237 # A file that's in do_install as a glob with destination as a directory 257 # A file that's in do_install as a glob with destination as a directory
238 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 258 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
239 '\n'] 259 '\n']
240 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile']) 260 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile'])
241 self.assertNotIn('WARNING: ', output) 261 self.assertNotIn('WARNING: ', output)
242 262
243 def test_recipetool_appendfile_patch(self): 263 def test_recipetool_appendfile_patch(self):
244 # A file that's added by a patch in SRC_URI 264 # A file that's added by a patch in SRC_URI
245 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 265 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
246 '\n', 266 '\n',
247 'SRC_URI += "file://testfile"\n', 267 'SRC_URI += "file://testfile"\n',
248 '\n', 268 '\n',
249 'do_install_append() {\n', 269 'do_install:append() {\n',
250 ' install -d ${D}${sysconfdir}\n', 270 ' install -d ${D}${sysconfdir}\n',
251 ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', 271 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
252 '}\n'] 272 '}\n']
253 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile']) 273 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile'])
254 for line in output.splitlines(): 274 for line in output.splitlines():
@@ -260,20 +280,20 @@ class RecipetoolTests(RecipetoolBase):
260 280
261 def test_recipetool_appendfile_script(self): 281 def test_recipetool_appendfile_script(self):
262 # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install) 282 # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install)
263 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 283 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
264 '\n', 284 '\n',
265 'SRC_URI += "file://testfile"\n', 285 'SRC_URI += "file://testfile"\n',
266 '\n', 286 '\n',
267 'do_install_append() {\n', 287 'do_install:append() {\n',
268 ' install -d ${D}${datadir}\n', 288 ' install -d ${D}${datadir}\n',
269 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', 289 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
270 '}\n'] 290 '}\n']
271 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile']) 291 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile'])
272 self.assertNotIn('WARNING: ', output) 292 self.assertNotIn('WARNING: ', output)
273 293
274 def test_recipetool_appendfile_inst_func(self): 294 def test_recipetool_appendfile_inst_func(self):
275 # A file that's installed from a function called by do_install 295 # A file that's installed from a function called by do_install
276 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 296 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
277 '\n'] 297 '\n']
278 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func']) 298 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func'])
279 self.assertNotIn('WARNING: ', output) 299 self.assertNotIn('WARNING: ', output)
@@ -283,13 +303,13 @@ class RecipetoolTests(RecipetoolBase):
283 # First try without specifying recipe 303 # First try without specifying recipe
284 self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile']) 304 self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile'])
285 # Now specify recipe 305 # Now specify recipe
286 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 306 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
287 '\n', 307 '\n',
288 'SRC_URI += "file://testfile"\n', 308 'SRC_URI += "file://testfile"\n',
289 '\n', 309 '\n',
290 'do_install_append() {\n', 310 'do_install:append() {\n',
291 ' install -d ${D}${datadir}\n', 311 ' install -d ${D}${datadir}\n',
292 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', 312 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
293 '}\n'] 313 '}\n']
294 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile']) 314 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile'])
295 315
@@ -332,6 +352,9 @@ class RecipetoolTests(RecipetoolBase):
332 filename = try_appendfile_wc('-w') 352 filename = try_appendfile_wc('-w')
333 self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend') 353 self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend')
334 354
355
356class RecipetoolCreateTests(RecipetoolBase):
357
335 def test_recipetool_create(self): 358 def test_recipetool_create(self):
336 # Try adding a recipe 359 # Try adding a recipe
337 tempsrc = os.path.join(self.tempdir, 'srctree') 360 tempsrc = os.path.join(self.tempdir, 'srctree')
@@ -341,14 +364,13 @@ class RecipetoolTests(RecipetoolBase):
341 result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc)) 364 result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
342 self.assertTrue(os.path.isfile(recipefile)) 365 self.assertTrue(os.path.isfile(recipefile))
343 checkvars = {} 366 checkvars = {}
344 checkvars['LICENSE'] = 'GPLv2' 367 checkvars['LICENSE'] = 'GPL-2.0-only'
345 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' 368 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
346 checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz' 369 checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz'
347 checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288'
348 checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07' 370 checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07'
349 self._test_recipe_contents(recipefile, checkvars, []) 371 self._test_recipe_contents(recipefile, checkvars, [])
350 372
351 def test_recipetool_create_git(self): 373 def test_recipetool_create_autotools(self):
352 if 'x11' not in get_bb_var('DISTRO_FEATURES'): 374 if 'x11' not in get_bb_var('DISTRO_FEATURES'):
353 self.skipTest('Test requires x11 as distro feature') 375 self.skipTest('Test requires x11 as distro feature')
354 # Ensure we have the right data in shlibs/pkgdata 376 # Ensure we have the right data in shlibs/pkgdata
@@ -357,15 +379,15 @@ class RecipetoolTests(RecipetoolBase):
357 tempsrc = os.path.join(self.tempdir, 'srctree') 379 tempsrc = os.path.join(self.tempdir, 'srctree')
358 os.makedirs(tempsrc) 380 os.makedirs(tempsrc)
359 recipefile = os.path.join(self.tempdir, 'libmatchbox.bb') 381 recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
360 srcuri = 'git://git.yoctoproject.org/libmatchbox' 382 srcuri = 'git://git.yoctoproject.org/libmatchbox;protocol=https'
361 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc]) 383 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
362 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) 384 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
363 checkvars = {} 385 checkvars = {}
364 checkvars['LICENSE'] = 'LGPLv2.1' 386 checkvars['LICENSE'] = 'LGPL-2.1-only'
365 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34' 387 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
366 checkvars['S'] = '${WORKDIR}/git' 388 checkvars['S'] = None
367 checkvars['PV'] = '1.11+git${SRCPV}' 389 checkvars['PV'] = '1.11+git'
368 checkvars['SRC_URI'] = srcuri 390 checkvars['SRC_URI'] = srcuri + ';branch=master'
369 checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango']) 391 checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango'])
370 inherits = ['autotools', 'pkgconfig'] 392 inherits = ['autotools', 'pkgconfig']
371 self._test_recipe_contents(recipefile, checkvars, inherits) 393 self._test_recipe_contents(recipefile, checkvars, inherits)
@@ -374,8 +396,8 @@ class RecipetoolTests(RecipetoolBase):
374 # Try adding a recipe 396 # Try adding a recipe
375 temprecipe = os.path.join(self.tempdir, 'recipe') 397 temprecipe = os.path.join(self.tempdir, 'recipe')
376 os.makedirs(temprecipe) 398 os.makedirs(temprecipe)
377 pv = '1.7.3.0' 399 pv = '1.7.4.1'
378 srcuri = 'http://www.dest-unreach.org/socat/download/socat-%s.tar.bz2' % pv 400 srcuri = 'http://www.dest-unreach.org/socat/download/Archive/socat-%s.tar.bz2' % pv
379 result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe)) 401 result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe))
380 dirlist = os.listdir(temprecipe) 402 dirlist = os.listdir(temprecipe)
381 if len(dirlist) > 1: 403 if len(dirlist) > 1:
@@ -384,7 +406,7 @@ class RecipetoolTests(RecipetoolBase):
384 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist))) 406 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
385 self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named') 407 self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named')
386 checkvars = {} 408 checkvars = {}
387 checkvars['LICENSE'] = set(['Unknown', 'GPLv2']) 409 checkvars['LICENSE'] = set(['Unknown', 'GPL-2.0-only'])
388 checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263']) 410 checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'])
389 # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot 411 # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot
390 checkvars['S'] = None 412 checkvars['S'] = None
@@ -400,9 +422,8 @@ class RecipetoolTests(RecipetoolBase):
400 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 422 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
401 self.assertTrue(os.path.isfile(recipefile)) 423 self.assertTrue(os.path.isfile(recipefile))
402 checkvars = {} 424 checkvars = {}
403 checkvars['LICENSE'] = set(['LGPLv2.1', 'MPL-1.1']) 425 checkvars['LICENSE'] = set(['LGPL-2.1-only', 'MPL-1.1-only'])
404 checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz' 426 checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz'
405 checkvars['SRC_URI[md5sum]'] = 'cee7be0ccfc892fa433d6c837df9522a'
406 checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b' 427 checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b'
407 checkvars['DEPENDS'] = set(['boost', 'zlib']) 428 checkvars['DEPENDS'] = set(['boost', 'zlib'])
408 inherits = ['cmake'] 429 inherits = ['cmake']
@@ -424,77 +445,271 @@ class RecipetoolTests(RecipetoolBase):
424 checkvars = {} 445 checkvars = {}
425 checkvars['SUMMARY'] = 'Node Server Example' 446 checkvars['SUMMARY'] = 'Node Server Example'
426 checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme' 447 checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme'
427 checkvars['LICENSE'] = set(['MIT', 'ISC', 'Unknown']) 448 checkvars['LICENSE'] = 'BSD-3-Clause & ISC & MIT & Unknown'
428 urls = [] 449 urls = []
429 urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}') 450 urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}')
430 urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json') 451 urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json')
431 checkvars['SRC_URI'] = set(urls) 452 checkvars['SRC_URI'] = set(urls)
432 checkvars['S'] = '${WORKDIR}/npm' 453 checkvars['S'] = '${WORKDIR}/npm'
433 checkvars['LICENSE_${PN}'] = 'MIT' 454 checkvars['LICENSE:${PN}'] = 'MIT'
434 checkvars['LICENSE_${PN}-base64'] = 'Unknown' 455 checkvars['LICENSE:${PN}-base64'] = 'Unknown'
435 checkvars['LICENSE_${PN}-accepts'] = 'MIT' 456 checkvars['LICENSE:${PN}-accepts'] = 'MIT'
436 checkvars['LICENSE_${PN}-inherits'] = 'ISC' 457 checkvars['LICENSE:${PN}-inherits'] = 'ISC'
437 inherits = ['npm'] 458 inherits = ['npm']
438 self._test_recipe_contents(recipefile, checkvars, inherits) 459 self._test_recipe_contents(recipefile, checkvars, inherits)
439 460
440 def test_recipetool_create_github(self): 461 def test_recipetool_create_github(self):
441 # Basic test to see if github URL mangling works 462 # Basic test to see if github URL mangling works. Deliberately use an
463 # older release of Meson at present so we don't need a toml parser.
442 temprecipe = os.path.join(self.tempdir, 'recipe') 464 temprecipe = os.path.join(self.tempdir, 'recipe')
443 os.makedirs(temprecipe) 465 os.makedirs(temprecipe)
444 recipefile = os.path.join(temprecipe, 'meson_git.bb') 466 recipefile = os.path.join(temprecipe, 'python3-meson_git.bb')
445 srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0' 467 srcuri = 'https://github.com/mesonbuild/meson;rev=0.52.1'
446 result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri]) 468 cmd = ['recipetool', 'create', '-o', temprecipe, srcuri]
447 self.assertTrue(os.path.isfile(recipefile)) 469 result = runCmd(cmd)
470 self.assertTrue(os.path.isfile(recipefile), msg="recipe %s not created for command %s, output %s" % (recipefile, " ".join(cmd), result.output))
448 checkvars = {} 471 checkvars = {}
449 checkvars['LICENSE'] = set(['Apache-2.0']) 472 checkvars['LICENSE'] = set(['Apache-2.0', "Unknown"])
450 checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https' 473 checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=0.52'
451 inherits = ['setuptools3'] 474 inherits = ['setuptools3']
452 self._test_recipe_contents(recipefile, checkvars, inherits) 475 self._test_recipe_contents(recipefile, checkvars, inherits)
453 476
454 def test_recipetool_create_python3_setuptools(self): 477 def test_recipetool_create_python3_setuptools(self):
455 # Test creating python3 package from tarball (using setuptools3 class) 478 # Test creating python3 package from tarball (using setuptools3 class)
479 # Use the --no-pypi switch to avoid creating a pypi enabled recipe and
480 # and check the created recipe as if it was a more general tarball
456 temprecipe = os.path.join(self.tempdir, 'recipe') 481 temprecipe = os.path.join(self.tempdir, 'recipe')
457 os.makedirs(temprecipe) 482 os.makedirs(temprecipe)
458 pn = 'python-magic' 483 pn = 'python-magic'
459 pv = '0.4.15' 484 pv = '0.4.15'
460 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) 485 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
461 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv 486 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
462 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 487 result = runCmd('recipetool create --no-pypi -o %s %s' % (temprecipe, srcuri))
463 self.assertTrue(os.path.isfile(recipefile)) 488 self.assertTrue(os.path.isfile(recipefile))
464 checkvars = {} 489 checkvars = {}
465 checkvars['LICENSE'] = set(['MIT']) 490 checkvars['LICENSE'] = set(['MIT'])
466 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' 491 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
467 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz' 492 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz'
468 checkvars['SRC_URI[md5sum]'] = 'e384c95a47218f66c6501cd6dd45ff59'
469 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' 493 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
470 inherits = ['setuptools3'] 494 inherits = ['setuptools3']
471 self._test_recipe_contents(recipefile, checkvars, inherits) 495 self._test_recipe_contents(recipefile, checkvars, inherits)
472 496
473 def test_recipetool_create_python3_distutils(self): 497 def test_recipetool_create_python3_setuptools_pypi_tarball(self):
474 # Test creating python3 package from tarball (using distutils3 class) 498 # Test creating python3 package from tarball (using setuptools3 and pypi classes)
499 temprecipe = os.path.join(self.tempdir, 'recipe')
500 os.makedirs(temprecipe)
501 pn = 'python-magic'
502 pv = '0.4.15'
503 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
504 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
505 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
506 self.assertTrue(os.path.isfile(recipefile))
507 checkvars = {}
508 checkvars['LICENSE'] = set(['MIT'])
509 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
510 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
511 checkvars['PYPI_PACKAGE'] = pn
512 inherits = ['setuptools3', 'pypi']
513 self._test_recipe_contents(recipefile, checkvars, inherits)
514
515 def test_recipetool_create_python3_setuptools_pypi(self):
516 # Test creating python3 package from pypi url (using setuptools3 and pypi classes)
517 # Intentionnaly using setuptools3 class here instead of any of the pep517 class
518 # to avoid the toml dependency and allows this test to run on host autobuilders
519 # with older version of python
475 temprecipe = os.path.join(self.tempdir, 'recipe') 520 temprecipe = os.path.join(self.tempdir, 'recipe')
476 os.makedirs(temprecipe) 521 os.makedirs(temprecipe)
477 pn = 'docutils' 522 pn = 'python-magic'
478 pv = '0.14' 523 pv = '0.4.15'
479 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) 524 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
480 srcuri = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-%s.tar.gz' % pv 525 # First specify the required version in the url
526 srcuri = 'https://pypi.org/project/%s/%s' % (pn, pv)
527 runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
528 self.assertTrue(os.path.isfile(recipefile))
529 checkvars = {}
530 checkvars['LICENSE'] = set(['MIT'])
531 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
532 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
533 checkvars['PYPI_PACKAGE'] = pn
534 inherits = ['setuptools3', "pypi"]
535 self._test_recipe_contents(recipefile, checkvars, inherits)
536
537 # Now specify the version as a recipetool parameter
538 runCmd('rm -rf %s' % recipefile)
539 self.assertFalse(os.path.isfile(recipefile))
540 srcuri = 'https://pypi.org/project/%s' % pn
541 runCmd('recipetool create -o %s %s --version %s' % (temprecipe, srcuri, pv))
542 self.assertTrue(os.path.isfile(recipefile))
543 checkvars = {}
544 checkvars['LICENSE'] = set(['MIT'])
545 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
546 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
547 checkvars['PYPI_PACKAGE'] = pn
548 inherits = ['setuptools3', "pypi"]
549 self._test_recipe_contents(recipefile, checkvars, inherits)
550
551 # Now, try to grab latest version of the package, so we cannot guess the name of the recipe,
552 # unless hardcoding the latest version but it means we will need to update the test for each release,
553 # so use a regexp
554 runCmd('rm -rf %s' % recipefile)
555 self.assertFalse(os.path.isfile(recipefile))
556 recipefile_re = r'%s_(.*)\.bb' % pn
557 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
558 dirlist = os.listdir(temprecipe)
559 if len(dirlist) > 1:
560 self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
561 if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])):
562 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
563 import re
564 match = re.match(recipefile_re, dirlist[0])
565 self.assertTrue(match)
566 latest_pv = match.group(1)
567 self.assertTrue(latest_pv != pv)
568 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, latest_pv))
569 # Do not check LIC_FILES_CHKSUM and SRC_URI checksum here to avoid having updating the test on each release
570 checkvars = {}
571 checkvars['LICENSE'] = set(['MIT'])
572 checkvars['PYPI_PACKAGE'] = pn
573 inherits = ['setuptools3', "pypi"]
574 self._test_recipe_contents(recipefile, checkvars, inherits)
575
576 def test_recipetool_create_python3_pep517_setuptools_build_meta(self):
577 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
578 needTomllib(self)
579
580 # Test creating python3 package from tarball (using setuptools.build_meta class)
581 temprecipe = os.path.join(self.tempdir, 'recipe')
582 os.makedirs(temprecipe)
583 pn = 'webcolors'
584 pv = '1.13'
585 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
586 srcuri = 'https://files.pythonhosted.org/packages/a1/fb/f95560c6a5d4469d9c49e24cf1b5d4d21ffab5608251c6020a965fb7791c/%s-%s.tar.gz' % (pn, pv)
587 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
588 self.assertTrue(os.path.isfile(recipefile))
589 checkvars = {}
590 checkvars['SUMMARY'] = 'A library for working with the color formats defined by HTML and CSS.'
591 checkvars['LICENSE'] = set(['BSD-3-Clause'])
592 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19'
593 checkvars['SRC_URI[sha256sum]'] = 'c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a'
594 inherits = ['python_setuptools_build_meta', 'pypi']
595
596 self._test_recipe_contents(recipefile, checkvars, inherits)
597
598 def test_recipetool_create_python3_pep517_poetry_core_masonry_api(self):
599 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
600 needTomllib(self)
601
602 # Test creating python3 package from tarball (using poetry.core.masonry.api class)
603 temprecipe = os.path.join(self.tempdir, 'recipe')
604 os.makedirs(temprecipe)
605 pn = 'iso8601'
606 pv = '2.1.0'
607 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
608 srcuri = 'https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/%s-%s.tar.gz' % (pn, pv)
609 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
610 self.assertTrue(os.path.isfile(recipefile))
611 checkvars = {}
612 checkvars['SUMMARY'] = 'Simple module to parse ISO 8601 dates'
613 checkvars['LICENSE'] = set(['MIT'])
614 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367'
615 checkvars['SRC_URI[sha256sum]'] = '6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df'
616 inherits = ['python_poetry_core', 'pypi']
617
618 self._test_recipe_contents(recipefile, checkvars, inherits)
619
620 def test_recipetool_create_python3_pep517_flit_core_buildapi(self):
621 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
622 needTomllib(self)
623
624 # Test creating python3 package from tarball (using flit_core.buildapi class)
625 temprecipe = os.path.join(self.tempdir, 'recipe')
626 os.makedirs(temprecipe)
627 pn = 'typing-extensions'
628 pv = '4.8.0'
629 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
630 srcuri = 'https://files.pythonhosted.org/packages/1f/7a/8b94bb016069caa12fc9f587b28080ac33b4fbb8ca369b98bc0a4828543e/typing_extensions-%s.tar.gz' % pv
631 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
632 self.assertTrue(os.path.isfile(recipefile))
633 checkvars = {}
634 checkvars['SUMMARY'] = 'Backported and Experimental Type Hints for Python 3.8+'
635 checkvars['LICENSE'] = set(['PSF-2.0'])
636 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2'
637 checkvars['SRC_URI[sha256sum]'] = 'df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef'
638 inherits = ['python_flit_core', 'pypi']
639
640 self._test_recipe_contents(recipefile, checkvars, inherits)
641
642 def test_recipetool_create_python3_pep517_hatchling(self):
643 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
644 needTomllib(self)
645
646 # Test creating python3 package from tarball (using hatchling class)
647 temprecipe = os.path.join(self.tempdir, 'recipe')
648 os.makedirs(temprecipe)
649 pn = 'jsonschema'
650 pv = '4.19.1'
651 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
652 srcuri = 'https://files.pythonhosted.org/packages/e4/43/087b24516db11722c8687e0caf0f66c7785c0b1c51b0ab951dfde924e3f5/jsonschema-%s.tar.gz' % pv
653 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
654 self.assertTrue(os.path.isfile(recipefile))
655 checkvars = {}
656 checkvars['SUMMARY'] = 'An implementation of JSON Schema validation for Python'
657 checkvars['HOMEPAGE'] = 'https://github.com/python-jsonschema/jsonschema'
658 checkvars['LICENSE'] = set(['MIT'])
659 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af'
660 checkvars['SRC_URI[sha256sum]'] = 'ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf'
661 inherits = ['python_hatchling', 'pypi']
662
663 self._test_recipe_contents(recipefile, checkvars, inherits)
664
665 def test_recipetool_create_python3_pep517_maturin(self):
666 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
667 needTomllib(self)
668
669 # Test creating python3 package from tarball (using maturin class)
670 temprecipe = os.path.join(self.tempdir, 'recipe')
671 os.makedirs(temprecipe)
672 pn = 'pydantic-core'
673 pv = '2.14.5'
674 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
675 srcuri = 'https://files.pythonhosted.org/packages/64/26/cffb93fe9c6b5a91c497f37fae14a4b073ecbc47fc36a9979c7aa888b245/pydantic_core-%s.tar.gz' % pv
676 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
677 self.assertTrue(os.path.isfile(recipefile))
678 checkvars = {}
679 checkvars['HOMEPAGE'] = 'https://github.com/pydantic/pydantic-core'
680 checkvars['LICENSE'] = set(['MIT'])
681 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=ab599c188b4a314d2856b3a55030c75c'
682 checkvars['SRC_URI[sha256sum]'] = '6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71'
683 inherits = ['python_maturin', 'pypi']
684
685 self._test_recipe_contents(recipefile, checkvars, inherits)
686
687 def test_recipetool_create_python3_pep517_mesonpy(self):
688 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
689 needTomllib(self)
690
691 # Test creating python3 package from tarball (using mesonpy class)
692 temprecipe = os.path.join(self.tempdir, 'recipe')
693 os.makedirs(temprecipe)
694 pn = 'siphash24'
695 pv = '1.4'
696 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
697 srcuri = 'https://files.pythonhosted.org/packages/c2/32/b934a70592f314afcfa86c7f7e388804a8061be65b822e2aa07e573b6477/%s-%s.tar.gz' % (pn, pv)
481 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 698 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
482 self.assertTrue(os.path.isfile(recipefile)) 699 self.assertTrue(os.path.isfile(recipefile))
483 checkvars = {} 700 checkvars = {}
484 checkvars['LICENSE'] = set(['PSF', '&', 'BSD', 'GPL']) 701 checkvars['SRC_URI[sha256sum]'] = '7fd65e39b2a7c8c4ddc3a168a687f4610751b0ac2ebb518783c0cdfc30bec4a0'
485 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING.txt;md5=35a23d42b615470583563132872c97d6' 702 inherits = ['python_mesonpy', 'pypi']
486 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-${PV}.tar.gz' 703
487 checkvars['SRC_URI[md5sum]'] = 'c53768d63db3873b7d452833553469de'
488 checkvars['SRC_URI[sha256sum]'] = '51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274'
489 inherits = ['distutils3']
490 self._test_recipe_contents(recipefile, checkvars, inherits) 704 self._test_recipe_contents(recipefile, checkvars, inherits)
491 705
492 def test_recipetool_create_github_tarball(self): 706 def test_recipetool_create_github_tarball(self):
493 # Basic test to ensure github URL mangling doesn't apply to release tarballs 707 # Basic test to ensure github URL mangling doesn't apply to release tarballs.
708 # Deliberately use an older release of Meson at present so we don't need a toml parser.
494 temprecipe = os.path.join(self.tempdir, 'recipe') 709 temprecipe = os.path.join(self.tempdir, 'recipe')
495 os.makedirs(temprecipe) 710 os.makedirs(temprecipe)
496 pv = '0.32.0' 711 pv = '0.52.1'
497 recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv) 712 recipefile = os.path.join(temprecipe, 'python3-meson_%s.bb' % pv)
498 srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv) 713 srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv)
499 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 714 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
500 self.assertTrue(os.path.isfile(recipefile)) 715 self.assertTrue(os.path.isfile(recipefile))
@@ -504,19 +719,93 @@ class RecipetoolTests(RecipetoolBase):
504 inherits = ['setuptools3'] 719 inherits = ['setuptools3']
505 self._test_recipe_contents(recipefile, checkvars, inherits) 720 self._test_recipe_contents(recipefile, checkvars, inherits)
506 721
507 def test_recipetool_create_git_http(self): 722 def _test_recipetool_create_git(self, srcuri, branch=None):
508 # Basic test to check http git URL mangling works 723 # Basic test to check http git URL mangling works
509 temprecipe = os.path.join(self.tempdir, 'recipe') 724 temprecipe = os.path.join(self.tempdir, 'recipe')
510 os.makedirs(temprecipe) 725 os.makedirs(temprecipe)
511 recipefile = os.path.join(temprecipe, 'matchbox-terminal_git.bb') 726 name = srcuri.split(';')[0].split('/')[-1]
512 srcuri = 'http://git.yoctoproject.org/git/matchbox-terminal' 727 recipefile = os.path.join(temprecipe, name + '_git.bb')
513 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 728 options = ' -B %s' % branch if branch else ''
729 result = runCmd('recipetool create -o %s%s "%s"' % (temprecipe, options, srcuri))
514 self.assertTrue(os.path.isfile(recipefile)) 730 self.assertTrue(os.path.isfile(recipefile))
515 checkvars = {} 731 checkvars = {}
516 checkvars['LICENSE'] = set(['GPLv2']) 732 checkvars['SRC_URI'] = srcuri
517 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http' 733 for scheme in ['http', 'https']:
518 inherits = ['pkgconfig', 'autotools'] 734 if srcuri.startswith(scheme + ":"):
735 checkvars['SRC_URI'] = 'git%s;protocol=%s' % (srcuri[len(scheme):], scheme)
736 if ';branch=' not in srcuri:
737 checkvars['SRC_URI'] += ';branch=' + (branch or 'master')
738 self._test_recipe_contents(recipefile, checkvars, [])
739
740 def test_recipetool_create_git_http(self):
741 self._test_recipetool_create_git('http://git.yoctoproject.org/git/matchbox-keyboard')
742
743 def test_recipetool_create_git_srcuri_master(self):
744 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master;protocol=https')
745
746 def test_recipetool_create_git_srcuri_branch(self):
747 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1;protocol=https')
748
749 def test_recipetool_create_git_srcbranch(self):
750 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;protocol=https', 'matchbox-keyboard-0-1')
751
752 def _go_urifiy(self, url, version, modulepath = None, pathmajor = None, subdir = None):
753 modulepath = ",path='%s'" % modulepath if len(modulepath) else ''
754 pathmajor = ",pathmajor='%s'" % pathmajor if len(pathmajor) else ''
755 subdir = ",subdir='%s'" % subdir if len(subdir) else ''
756 return "${@go_src_uri('%s','%s'%s%s%s)}" % (url, version, modulepath, pathmajor, subdir)
757
758 def test_recipetool_create_go(self):
759 # Basic test to check go recipe generation
760 self.maxDiff = None
761
762 temprecipe = os.path.join(self.tempdir, 'recipe')
763 os.makedirs(temprecipe)
764
765 recipefile = os.path.join(temprecipe, 'recipetool-go-test_git.bb')
766
767 srcuri = 'https://git.yoctoproject.org/recipetool-go-test.git'
768 srcrev = "c3e213c01b6c1406b430df03ef0d1ae77de5d2f7"
769 srcbranch = "main"
770
771 result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch))
772
773 inherits = ['go-mod', 'go-mod-update-modules']
774
775 checkvars = {}
776 checkvars['GO_IMPORT'] = "git.yoctoproject.org/recipetool-go-test"
777 checkvars['SRC_URI'] = {'git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'}
778 checkvars['LIC_FILES_CHKSUM'] = {
779 'file://src/${GO_IMPORT}/LICENSE;md5=4e3933dd47afbf115e484d11385fb3bd',
780 'file://src/${GO_IMPORT}/is/LICENSE;md5=62beaee5a116dd1e80161667b1df39ab'
781 }
782
519 self._test_recipe_contents(recipefile, checkvars, inherits) 783 self._test_recipe_contents(recipefile, checkvars, inherits)
784 self.assertNotIn('Traceback', result.output)
785
786 lics_require_file = os.path.join(temprecipe, 'recipetool-go-test-licenses.inc')
787 self.assertFileExists(lics_require_file)
788 checkvars = {}
789 checkvars['LIC_FILES_CHKSUM'] = {'file://pkg/mod/github.com/godbus/dbus/v5@v5.1.0/LICENSE;md5=09042bd5c6c96a2b9e45ddf1bc517eed;spdx=BSD-2-Clause'}
790 self._test_recipe_contents(lics_require_file, checkvars, [])
791
792 deps_require_file = os.path.join(temprecipe, 'recipetool-go-test-go-mods.inc')
793 self.assertFileExists(deps_require_file)
794 checkvars = {}
795 checkvars['SRC_URI'] = {'gomod://github.com/godbus/dbus/v5;version=v5.1.0;sha256sum=03dfa8e71089a6f477310d15c4d3a036d82d028532881b50fee254358e782ad9'}
796 self._test_recipe_contents(deps_require_file, checkvars, [])
797
798class RecipetoolTests(RecipetoolBase):
799
800 @classmethod
801 def setUpClass(cls):
802 import sys
803
804 super(RecipetoolTests, cls).setUpClass()
805 bb_vars = get_bb_vars(['BBPATH'])
806 cls.bbpath = bb_vars['BBPATH']
807 libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'recipetool')
808 sys.path.insert(0, libpath)
520 809
521 def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths): 810 def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths):
522 dstdir = basedstdir 811 dstdir = basedstdir
@@ -524,7 +813,15 @@ class RecipetoolTests(RecipetoolBase):
524 for p in paths: 813 for p in paths:
525 dstdir = os.path.join(dstdir, p) 814 dstdir = os.path.join(dstdir, p)
526 if not os.path.exists(dstdir): 815 if not os.path.exists(dstdir):
527 os.makedirs(dstdir) 816 try:
817 os.makedirs(dstdir)
818 except PermissionError:
819 return False
820 except OSError as e:
821 if e.errno == errno.EROFS:
822 return False
823 else:
824 raise e
528 if p == "lib": 825 if p == "lib":
529 # Can race with other tests 826 # Can race with other tests
530 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) 827 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -532,8 +829,12 @@ class RecipetoolTests(RecipetoolBase):
532 self.track_for_cleanup(dstdir) 829 self.track_for_cleanup(dstdir)
533 dstfile = os.path.join(dstdir, os.path.basename(srcfile)) 830 dstfile = os.path.join(dstdir, os.path.basename(srcfile))
534 if srcfile != dstfile: 831 if srcfile != dstfile:
535 shutil.copy(srcfile, dstfile) 832 try:
833 shutil.copy(srcfile, dstfile)
834 except PermissionError:
835 return False
536 self.track_for_cleanup(dstfile) 836 self.track_for_cleanup(dstfile)
837 return True
537 838
538 def test_recipetool_load_plugin(self): 839 def test_recipetool_load_plugin(self):
539 """Test that recipetool loads only the first found plugin in BBPATH.""" 840 """Test that recipetool loads only the first found plugin in BBPATH."""
@@ -547,20 +848,148 @@ class RecipetoolTests(RecipetoolBase):
547 plugincontent = fh.readlines() 848 plugincontent = fh.readlines()
548 try: 849 try:
549 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') 850 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
550 for path in searchpath: 851 searchpath = [
551 self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool') 852 path for path in searchpath
853 if self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
854 ]
552 result = runCmd("recipetool --quiet count") 855 result = runCmd("recipetool --quiet count")
553 self.assertEqual(result.output, '1') 856 self.assertEqual(result.output, '1')
554 result = runCmd("recipetool --quiet multiloaded") 857 result = runCmd("recipetool --quiet multiloaded")
555 self.assertEqual(result.output, "no") 858 self.assertEqual(result.output, "no")
556 for path in searchpath: 859 for path in searchpath:
557 result = runCmd("recipetool --quiet bbdir") 860 result = runCmd("recipetool --quiet bbdir")
558 self.assertEqual(result.output, path) 861 self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
559 os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py')) 862 os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py'))
560 finally: 863 finally:
561 with open(srcfile, 'w') as fh: 864 with open(srcfile, 'w') as fh:
562 fh.writelines(plugincontent) 865 fh.writelines(plugincontent)
563 866
867 def test_recipetool_handle_license_vars(self):
868 from create import handle_license_vars
869 from unittest.mock import Mock
870
871 commonlicdir = get_bb_var('COMMON_LICENSE_DIR')
872
873 class DataConnectorCopy(bb.tinfoil.TinfoilDataStoreConnector):
874 pass
875
876 d = DataConnectorCopy
877 d.getVar = Mock(return_value=commonlicdir)
878 d.expand = Mock(side_effect=lambda x: x)
879
880 srctree = tempfile.mkdtemp(prefix='recipetoolqa')
881 self.track_for_cleanup(srctree)
882
883 # Multiple licenses
884 licenses = ['MIT', 'ISC', 'BSD-3-Clause', 'Apache-2.0']
885 for licence in licenses:
886 shutil.copy(os.path.join(commonlicdir, licence), os.path.join(srctree, 'LICENSE.' + licence))
887 # Duplicate license
888 shutil.copy(os.path.join(commonlicdir, 'MIT'), os.path.join(srctree, 'LICENSE'))
889
890 extravalues = {
891 # Duplicate and missing licenses
892 'LICENSE': 'Zlib & BSD-2-Clause & Zlib',
893 'LIC_FILES_CHKSUM': [
894 'file://README.md;md5=0123456789abcdef0123456789abcd'
895 ]
896 }
897 lines_before = []
898 handled = []
899 licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d)
900 expected_lines_before = [
901 '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is',
902 '# your responsibility to verify that the values are complete and correct.',
903 '# NOTE: Original package / source metadata indicates license is: BSD-2-Clause & Zlib',
904 '#',
905 '# NOTE: multiple licenses have been detected; they have been separated with &',
906 '# in the LICENSE value for now since it is a reasonable assumption that all',
907 '# of the licenses apply. If instead there is a choice between the multiple',
908 '# licenses then you should change the value to separate the licenses with |',
909 '# instead of &. If there is any doubt, check the accompanying documentation',
910 '# to determine which situation is applicable.',
911 'LICENSE = "Apache-2.0 & BSD-2-Clause & BSD-3-Clause & ISC & MIT & Zlib"',
912 'LIC_FILES_CHKSUM = "file://LICENSE;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
913 ' file://LICENSE.Apache-2.0;md5=89aea4e17d99a7cacdbeed46a0096b10 \\\n'
914 ' file://LICENSE.BSD-3-Clause;md5=550794465ba0ec5312d6919e203a55f9 \\\n'
915 ' file://LICENSE.ISC;md5=f3b90e78ea0cffb20bf5cca7947a896d \\\n'
916 ' file://LICENSE.MIT;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
917 ' file://README.md;md5=0123456789abcdef0123456789abcd"',
918 ''
919 ]
920 self.assertEqual(lines_before, expected_lines_before)
921 expected_licvalues = [
922 ('MIT', 'LICENSE', '0835ade698e0bcf8506ecda2f7b4f302'),
923 ('Apache-2.0', 'LICENSE.Apache-2.0', '89aea4e17d99a7cacdbeed46a0096b10'),
924 ('BSD-3-Clause', 'LICENSE.BSD-3-Clause', '550794465ba0ec5312d6919e203a55f9'),
925 ('ISC', 'LICENSE.ISC', 'f3b90e78ea0cffb20bf5cca7947a896d'),
926 ('MIT', 'LICENSE.MIT', '0835ade698e0bcf8506ecda2f7b4f302')
927 ]
928 self.assertEqual(handled, [('license', expected_licvalues)])
929 self.assertEqual(extravalues, {})
930 self.assertEqual(licvalues, expected_licvalues)
931
932
933 def test_recipetool_split_pkg_licenses(self):
934 from create import split_pkg_licenses
935 licvalues = [
936 # Duplicate licenses
937 ('BSD-2-Clause', 'x/COPYING', None),
938 ('BSD-2-Clause', 'x/LICENSE', None),
939 # Multiple licenses
940 ('MIT', 'x/a/LICENSE.MIT', None),
941 ('ISC', 'x/a/LICENSE.ISC', None),
942 # Alternative licenses
943 ('(MIT | ISC)', 'x/b/LICENSE', None),
944 # Alternative licenses without brackets
945 ('MIT | BSD-2-Clause', 'x/c/LICENSE', None),
946 # Multi licenses with alternatives
947 ('MIT', 'x/d/COPYING', None),
948 ('MIT | BSD-2-Clause', 'x/d/LICENSE', None),
949 # Multi licenses with alternatives and brackets
950 ('Apache-2.0 & ((MIT | ISC) & BSD-3-Clause)', 'x/e/LICENSE', None)
951 ]
952 packages = {
953 '${PN}': '',
954 'a': 'x/a',
955 'b': 'x/b',
956 'c': 'x/c',
957 'd': 'x/d',
958 'e': 'x/e',
959 'f': 'x/f',
960 'g': 'x/g',
961 }
962 fallback_licenses = {
963 # Ignored
964 'a': 'BSD-3-Clause',
965 # Used
966 'f': 'BSD-3-Clause'
967 }
968 outlines = []
969 outlicenses = split_pkg_licenses(licvalues, packages, outlines, fallback_licenses)
970 expected_outlicenses = {
971 '${PN}': ['BSD-2-Clause'],
972 'a': ['ISC', 'MIT'],
973 'b': ['(ISC | MIT)'],
974 'c': ['(BSD-2-Clause | MIT)'],
975 'd': ['(BSD-2-Clause | MIT)', 'MIT'],
976 'e': ['(ISC | MIT)', 'Apache-2.0', 'BSD-3-Clause'],
977 'f': ['BSD-3-Clause'],
978 'g': ['Unknown']
979 }
980 self.assertEqual(outlicenses, expected_outlicenses)
981 expected_outlines = [
982 'LICENSE:${PN} = "BSD-2-Clause"',
983 'LICENSE:a = "ISC & MIT"',
984 'LICENSE:b = "(ISC | MIT)"',
985 'LICENSE:c = "(BSD-2-Clause | MIT)"',
986 'LICENSE:d = "(BSD-2-Clause | MIT) & MIT"',
987 'LICENSE:e = "(ISC | MIT) & Apache-2.0 & BSD-3-Clause"',
988 'LICENSE:f = "BSD-3-Clause"',
989 'LICENSE:g = "Unknown"'
990 ]
991 self.assertEqual(outlines, expected_outlines)
992
564 993
565class RecipetoolAppendsrcBase(RecipetoolBase): 994class RecipetoolAppendsrcBase(RecipetoolBase):
566 def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles): 995 def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles):
@@ -593,9 +1022,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
593 for uri in src_uri: 1022 for uri in src_uri:
594 p = urllib.parse.urlparse(uri) 1023 p = urllib.parse.urlparse(uri)
595 if p.scheme == 'file': 1024 if p.scheme == 'file':
596 return p.netloc + p.path 1025 return p.netloc + p.path, uri
597 1026
598 def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''): 1027 def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, remove=None, machine=None , options=''):
599 if newfile is None: 1028 if newfile is None:
600 newfile = self.testfile 1029 newfile = self.testfile
601 1030
@@ -620,14 +1049,42 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
620 else: 1049 else:
621 destpath = '.' + os.sep 1050 destpath = '.' + os.sep
622 1051
623 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1052 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
624 '\n'] 1053 '\n']
1054
1055 override = ""
1056 if machine:
1057 options += ' -m %s' % machine
1058 override = ':append:%s' % machine
1059 expectedlines.extend(['PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
1060 '\n'])
1061
1062 if remove:
1063 for entry in remove:
1064 if machine:
1065 entry_remove_line = 'SRC_URI:remove:%s = " %s"\n' % (machine, entry)
1066 else:
1067 entry_remove_line = 'SRC_URI:remove = "%s"\n' % entry
1068
1069 expectedlines.extend([entry_remove_line,
1070 '\n'])
1071
625 if has_src_uri: 1072 if has_src_uri:
626 uri = 'file://%s' % filename 1073 uri = 'file://%s' % filename
627 if expected_subdir: 1074 if expected_subdir:
628 uri += ';subdir=%s' % expected_subdir 1075 uri += ';subdir=%s' % expected_subdir
629 expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri, 1076 if machine:
630 '\n'] 1077 src_uri_line = 'SRC_URI%s = " %s"\n' % (override, uri)
1078 else:
1079 src_uri_line = 'SRC_URI += "%s"\n' % uri
1080
1081 expectedlines.extend([src_uri_line, '\n'])
1082
1083 with open("/tmp/tmp.txt", "w") as file:
1084 print(expectedlines, file=file)
1085
1086 if machine:
1087 filename = '%s/%s' % (machine, filename)
631 1088
632 return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename]) 1089 return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
633 1090
@@ -674,34 +1131,62 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
674 1131
675 def test_recipetool_appendsrcfile_srcdir_basic(self): 1132 def test_recipetool_appendsrcfile_srcdir_basic(self):
676 testrecipe = 'bash' 1133 testrecipe = 'bash'
677 bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) 1134 bb_vars = get_bb_vars(['S', 'UNPACKDIR'], testrecipe)
678 srcdir = bb_vars['S'] 1135 srcdir = bb_vars['S']
679 workdir = bb_vars['WORKDIR'] 1136 unpackdir = bb_vars['UNPACKDIR']
680 subdir = os.path.relpath(srcdir, workdir) 1137 subdir = os.path.relpath(srcdir, unpackdir)
681 self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir) 1138 self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir)
682 1139
683 def test_recipetool_appendsrcfile_existing_in_src_uri(self): 1140 def test_recipetool_appendsrcfile_existing_in_src_uri(self):
684 testrecipe = 'base-files' 1141 testrecipe = 'base-files'
685 filepath = self._get_first_file_uri(testrecipe) 1142 filepath,_ = self._get_first_file_uri(testrecipe)
686 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) 1143 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
687 self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False) 1144 self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
688 1145
689 def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self): 1146 def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self, machine=None):
690 testrecipe = 'base-files' 1147 testrecipe = 'base-files'
691 subdir = 'tmp' 1148 subdir = 'tmp'
692 filepath = self._get_first_file_uri(testrecipe) 1149 filepath, srcuri_entry = self._get_first_file_uri(testrecipe)
693 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) 1150 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
694 1151
695 output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False) 1152 self._test_appendsrcfile(testrecipe, filepath, subdir, machine=machine, remove=[srcuri_entry])
696 self.assertTrue(any('with different parameters' in l for l in output)) 1153
1154 def test_recipetool_appendsrcfile_machine(self):
1155 # A very basic test
1156 self._test_appendsrcfile('base-files', 'a-file', machine='mymachine')
1157
1158 # Force cleaning the output of previous test
1159 self.tearDownLocal()
1160
1161 # A more complex test: existing entry in src_uri with different param
1162 self.test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(machine='mymachine')
1163
1164 def test_recipetool_appendsrcfile_update_recipe_basic(self):
1165 testrecipe = "mtd-utils-selftest"
1166 recipefile = get_bb_var('FILE', testrecipe)
1167 self.assertIn('meta-selftest', recipefile, 'This test expect %s recipe to be in meta-selftest')
1168 cmd = 'recipetool appendsrcfile -W -u meta-selftest %s %s' % (testrecipe, self.testfile)
1169 result = runCmd(cmd)
1170 self.assertNotIn('Traceback', result.output)
1171 self.add_command_to_tearDown('cd %s; rm -f %s/%s; git checkout .' % (os.path.dirname(recipefile), testrecipe, os.path.basename(self.testfile)))
1172
1173 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1174 ('??', '.*/%s/%s$' % (testrecipe, os.path.basename(self.testfile)))]
1175 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1176 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
1177 removelines = []
1178 addlines = [
1179 'file://%s \\\\' % os.path.basename(self.testfile),
1180 ]
1181 self._check_diff(result.output, addlines, removelines)
697 1182
698 def test_recipetool_appendsrcfile_replace_file_srcdir(self): 1183 def test_recipetool_appendsrcfile_replace_file_srcdir(self):
699 testrecipe = 'bash' 1184 testrecipe = 'bash'
700 filepath = 'Makefile.in' 1185 filepath = 'Makefile.in'
701 bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) 1186 bb_vars = get_bb_vars(['S', 'UNPACKDIR'], testrecipe)
702 srcdir = bb_vars['S'] 1187 srcdir = bb_vars['S']
703 workdir = bb_vars['WORKDIR'] 1188 unpackdir = bb_vars['UNPACKDIR']
704 subdir = os.path.relpath(srcdir, workdir) 1189 subdir = os.path.relpath(srcdir, unpackdir)
705 1190
706 self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir) 1191 self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir)
707 bitbake('%s:do_unpack' % testrecipe) 1192 bitbake('%s:do_unpack' % testrecipe)
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
index 747870383b..e697fd2920 100644
--- a/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -1,15 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os
6import re
7import time
8import logging
9import bb.tinfoil 7import bb.tinfoil
10 8
11from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, get_test_layer 10from oeqa.utils.commands import get_test_layer
13 11
14 12
15def setUpModule(): 13def setUpModule():
@@ -40,7 +38,7 @@ class RecipeUtilsTests(OESelftestTestCase):
40 SUMMARY = "Python framework to process interdependent tasks in a pool of workers" 38 SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
41 HOMEPAGE = "http://github.com/gitpython-developers/async" 39 HOMEPAGE = "http://github.com/gitpython-developers/async"
42 SECTION = "devel/python" 40 SECTION = "devel/python"
43-LICENSE = "BSD" 41-LICENSE = "BSD-3-Clause"
44+LICENSE = "something" 42+LICENSE = "something"
45 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e" 43 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
46 44
@@ -52,7 +50,7 @@ class RecipeUtilsTests(OESelftestTestCase):
52+SRC_URI[md5sum] = "aaaaaa" 50+SRC_URI[md5sum] = "aaaaaa"
53 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051" 51 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
54 52
55 RDEPENDS_${PN} += "${PYTHON_PN}-threading" 53 RDEPENDS:${PN} += "python3-threading"
56""" 54"""
57 patchlines = [] 55 patchlines = []
58 for f in patches: 56 for f in patches:
@@ -74,13 +72,13 @@ class RecipeUtilsTests(OESelftestTestCase):
74 expected_patch = """ 72 expected_patch = """
75--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 73--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
76+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 74+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
77@@ -8,6 +8,4 @@ 75@@ -10,6 +10,4 @@
78 76
79 BBCLASSEXTEND = "native nativesdk" 77 BBCLASSEXTEND = "native nativesdk"
80 78
81-SRC_URI += "file://somefile" 79-SRC_URI += "file://somefile"
82- 80-
83 SRC_URI_append = " file://anotherfile" 81 SRC_URI:append = " file://anotherfile"
84""" 82"""
85 patchlines = [] 83 patchlines = []
86 for f in patches: 84 for f in patches:
@@ -99,13 +97,13 @@ class RecipeUtilsTests(OESelftestTestCase):
99 expected_patch = """ 97 expected_patch = """
100--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 98--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
101+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 99+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
102@@ -8,6 +8,3 @@ 100@@ -10,6 +10,3 @@
103 101
104 BBCLASSEXTEND = "native nativesdk" 102 BBCLASSEXTEND = "native nativesdk"
105 103
106-SRC_URI += "file://somefile" 104-SRC_URI += "file://somefile"
107- 105-
108-SRC_URI_append = " file://anotherfile" 106-SRC_URI:append = " file://anotherfile"
109""" 107"""
110 patchlines = [] 108 patchlines = []
111 for f in patches: 109 for f in patches:
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 0d0259477e..f06027cb03 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -9,35 +9,13 @@ import bb.utils
9import functools 9import functools
10import multiprocessing 10import multiprocessing
11import textwrap 11import textwrap
12import json
13import unittest
14import tempfile 12import tempfile
15import shutil 13import shutil
16import stat 14import stat
17import os 15import os
18import datetime 16import datetime
19 17
20# For sample packages, see:
21# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-0t7wr_oo/
22# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-4s9ejwyp/
23# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-haiwdlbr/
24# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-hwds3mcl/
25# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201203-sua0pzvc/
26# (both packages/ and packages-excluded/)
27
28# ruby-ri-docs, meson:
29#https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20210215-0_td9la2/packages/diff-html/
30exclude_packages = [ 18exclude_packages = [
31 'glide',
32 'go-dep',
33 'go-helloworld',
34 'go-runtime',
35 'go_',
36 'go-',
37 'meson',
38 'ovmf-shell-efi',
39 'perf',
40 'ruby-ri-docs'
41 ] 19 ]
42 20
43def is_excluded(package): 21def is_excluded(package):
@@ -65,13 +43,14 @@ class CompareResult(object):
65 return (self.status, self.test) < (other.status, other.test) 43 return (self.status, self.test) < (other.status, other.test)
66 44
67class PackageCompareResults(object): 45class PackageCompareResults(object):
68 def __init__(self): 46 def __init__(self, exclusions):
69 self.total = [] 47 self.total = []
70 self.missing = [] 48 self.missing = []
71 self.different = [] 49 self.different = []
72 self.different_excluded = [] 50 self.different_excluded = []
73 self.same = [] 51 self.same = []
74 self.active_exclusions = set() 52 self.active_exclusions = set()
53 exclude_packages.extend((exclusions or "").split())
75 54
76 def add_result(self, r): 55 def add_result(self, r):
77 self.total.append(r) 56 self.total.append(r)
@@ -118,8 +97,11 @@ def compare_file(reference, test, diffutils_sysroot):
118 result.status = SAME 97 result.status = SAME
119 return result 98 return result
120 99
121def run_diffoscope(a_dir, b_dir, html_dir, **kwargs): 100def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, max_diff_block_lines=1024, max_diff_block_lines_saved=0, **kwargs):
122 return runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], 101 return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size),
102 '--max-diff-block-lines-saved', str(max_diff_block_lines_saved),
103 '--max-diff-block-lines', str(max_diff_block_lines),
104 '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir],
123 **kwargs) 105 **kwargs)
124 106
125class DiffoscopeTests(OESelftestTestCase): 107class DiffoscopeTests(OESelftestTestCase):
@@ -149,10 +131,21 @@ class ReproducibleTests(OESelftestTestCase):
149 131
150 package_classes = ['deb', 'ipk', 'rpm'] 132 package_classes = ['deb', 'ipk', 'rpm']
151 133
134 # Maximum report size, in bytes
135 max_report_size = 250 * 1024 * 1024
136
137 # Maximum diff blocks size, in lines
138 max_diff_block_lines = 1024
139 # Maximum diff blocks size (saved in memory), in lines
140 max_diff_block_lines_saved = max_diff_block_lines
141
152 # targets are the things we want to test the reproducibility of 142 # targets are the things we want to test the reproducibility of
153 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] 143 # Have to add the virtual targets manually for now as builds may or may not include them as they're exclude from world
144 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world', 'virtual/librpc', 'virtual/libsdl2', 'virtual/crypt']
145
154 # sstate targets are things to pull from sstate to potentially cut build/debugging time 146 # sstate targets are things to pull from sstate to potentially cut build/debugging time
155 sstate_targets = [] 147 sstate_targets = []
148
156 save_results = False 149 save_results = False
157 if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ: 150 if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ:
158 save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT'] 151 save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT']
@@ -167,20 +160,40 @@ class ReproducibleTests(OESelftestTestCase):
167 160
168 def setUpLocal(self): 161 def setUpLocal(self):
169 super().setUpLocal() 162 super().setUpLocal()
170 needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS'] 163 needed_vars = [
164 'TOPDIR',
165 'TARGET_PREFIX',
166 'BB_NUMBER_THREADS',
167 'BB_HASHSERVE',
168 'OEQA_REPRODUCIBLE_TEST_PACKAGE',
169 'OEQA_REPRODUCIBLE_TEST_TARGET',
170 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS',
171 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES',
172 'OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS',
173 ]
171 bb_vars = get_bb_vars(needed_vars) 174 bb_vars = get_bb_vars(needed_vars)
172 for v in needed_vars: 175 for v in needed_vars:
173 setattr(self, v.lower(), bb_vars[v]) 176 setattr(self, v.lower(), bb_vars[v])
174 177
178 if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']:
179 self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split()
180
181 if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'] or bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS']:
182 self.targets = (bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'] or "").split() + (bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS'] or "").split()
183
184 if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']:
185 self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split()
186
187 if bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS']:
188 # Setup to build every DEPENDS of leaf recipes using sstate
189 for leaf_recipe in bb_vars['OEQA_REPRODUCIBLE_TEST_LEAF_TARGETS'].split():
190 self.sstate_targets.extend(get_bb_var('DEPENDS', leaf_recipe).split())
191
175 self.extraresults = {} 192 self.extraresults = {}
176 self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = ''
177 self.extraresults.setdefault('reproducible', {}).setdefault('files', {}) 193 self.extraresults.setdefault('reproducible', {}).setdefault('files', {})
178 194
179 def append_to_log(self, msg):
180 self.extraresults['reproducible.rawlogs']['log'] += msg
181
182 def compare_packages(self, reference_dir, test_dir, diffutils_sysroot): 195 def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
183 result = PackageCompareResults() 196 result = PackageCompareResults(self.oeqa_reproducible_excluded_packages)
184 197
185 old_cwd = os.getcwd() 198 old_cwd = os.getcwd()
186 try: 199 try:
@@ -205,7 +218,7 @@ class ReproducibleTests(OESelftestTestCase):
205 218
206 def write_package_list(self, package_class, name, packages): 219 def write_package_list(self, package_class, name, packages):
207 self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [ 220 self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [
208 {'reference': p.reference, 'test': p.test} for p in packages] 221 p.reference.split("/./")[1] for p in packages]
209 222
210 def copy_file(self, source, dest): 223 def copy_file(self, source, dest):
211 bb.utils.mkdirhier(os.path.dirname(dest)) 224 bb.utils.mkdirhier(os.path.dirname(dest))
@@ -217,14 +230,11 @@ class ReproducibleTests(OESelftestTestCase):
217 tmpdir = os.path.join(self.topdir, name, 'tmp') 230 tmpdir = os.path.join(self.topdir, name, 'tmp')
218 if os.path.exists(tmpdir): 231 if os.path.exists(tmpdir):
219 bb.utils.remove(tmpdir, recurse=True) 232 bb.utils.remove(tmpdir, recurse=True)
220
221 config = textwrap.dedent('''\ 233 config = textwrap.dedent('''\
222 INHERIT += "reproducible_build"
223 PACKAGE_CLASSES = "{package_classes}" 234 PACKAGE_CLASSES = "{package_classes}"
224 INHIBIT_PACKAGE_STRIP = "1"
225 TMPDIR = "{tmpdir}" 235 TMPDIR = "{tmpdir}"
226 LICENSE_FLAGS_WHITELIST = "commercial" 236 LICENSE_FLAGS_ACCEPTED = "commercial"
227 DISTRO_FEATURES_append = ' systemd pam' 237 DISTRO_FEATURES:append = ' pam'
228 USERADDEXTENSION = "useradd-staticids" 238 USERADDEXTENSION = "useradd-staticids"
229 USERADD_ERROR_DYNAMIC = "skip" 239 USERADD_ERROR_DYNAMIC = "skip"
230 USERADD_UID_TABLES += "files/static-passwd" 240 USERADD_UID_TABLES += "files/static-passwd"
@@ -232,25 +242,70 @@ class ReproducibleTests(OESelftestTestCase):
232 ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes), 242 ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes),
233 tmpdir=tmpdir) 243 tmpdir=tmpdir)
234 244
245 # Export BB_CONSOLELOG to the calling function and make it constant to
246 # avoid a case where bitbake would get a timestamp-based filename but
247 # oe-selftest would, later, get another.
248 capture_vars.append("BB_CONSOLELOG")
249 config += 'BB_CONSOLELOG = "${LOG_DIR}/cooker/${MACHINE}/console.log"\n'
250
251 # We want different log files for each build, but a persistent bitbake
252 # may reuse the previous log file so restart the bitbake server.
253 bitbake("--kill-server")
254
255 def print_condensed_error_log(logs, context_lines=10, tail_lines=20):
256 """Prints errors with context and the end of the log."""
257
258 logs = logs.split("\n")
259 for i, line in enumerate(logs):
260 if line.startswith("ERROR"):
261 self.logger.info("Found ERROR (line %d):" % (i + 1))
262 for l in logs[i-context_lines:i+context_lines]:
263 self.logger.info(" " + l)
264
265 self.logger.info("End of log:")
266 for l in logs[-tail_lines:]:
267 self.logger.info(" " + l)
268
269 bitbake_failure_count = 0
235 if not use_sstate: 270 if not use_sstate:
236 if self.sstate_targets: 271 if self.sstate_targets:
237 self.logger.info("Building prebuild for %s (sstate allowed)..." % (name)) 272 self.logger.info("Building prebuild for %s (sstate allowed)..." % (name))
238 self.write_config(config) 273 self.write_config(config)
239 bitbake(' '.join(self.sstate_targets)) 274 try:
275 bitbake("--continue "+' '.join(self.sstate_targets))
276 except AssertionError as e:
277 bitbake_failure_count += 1
278 self.logger.error("Bitbake failed! but keep going... Log:")
279 print_condensed_error_log(str(e))
240 280
241 # This config fragment will disable using shared and the sstate 281 # This config fragment will disable using shared and the sstate
242 # mirror, forcing a complete build from scratch 282 # mirror, forcing a complete build from scratch
243 config += textwrap.dedent('''\ 283 config += textwrap.dedent('''\
244 SSTATE_DIR = "${TMPDIR}/sstate" 284 SSTATE_DIR = "${TMPDIR}/sstate"
245 SSTATE_MIRRORS = "" 285 SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
246 ''') 286 ''')
247 287
248 self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) 288 self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
249 self.write_config(config) 289 self.write_config(config)
250 d = get_bb_vars(capture_vars) 290 d = get_bb_vars(capture_vars)
251 # targets used to be called images 291 try:
252 bitbake(' '.join(getattr(self, 'images', self.targets))) 292 # targets used to be called images
253 return d 293 bitbake("--continue "+' '.join(getattr(self, 'images', self.targets)))
294 except AssertionError as e:
295 bitbake_failure_count += 1
296 self.logger.error("Bitbake failed! but keep going... Log:")
297 print_condensed_error_log(str(e))
298
299 # The calling function expects the existence of the deploy
300 # directories containing the packages.
301 # If bitbake failed to create them, do it manually
302 for c in self.package_classes:
303 deploy = d['DEPLOY_DIR_' + c.upper()]
304 if not os.path.exists(deploy):
305 self.logger.info("Manually creating %s" % deploy)
306 bb.utils.mkdirhier(deploy)
307
308 return (d, bitbake_failure_count)
254 309
255 def test_reproducible_builds(self): 310 def test_reproducible_builds(self):
256 def strip_topdir(s): 311 def strip_topdir(s):
@@ -272,15 +327,30 @@ class ReproducibleTests(OESelftestTestCase):
272 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) 327 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
273 self.logger.info('Non-reproducible packages will be copied to %s', save_dir) 328 self.logger.info('Non-reproducible packages will be copied to %s', save_dir)
274 329
275 vars_A = self.do_test_build('reproducibleA', self.build_from_sstate) 330 # The below bug shows that a few reproducible issues are depends on build dir path length.
331 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=15554
332 # So, the reproducibleA & reproducibleB directories are changed to reproducibleA & reproducibleB-extended to have different size.
276 333
277 vars_B = self.do_test_build('reproducibleB', False) 334 fails = []
335 vars_list = [None, None]
336
337 for i, (name, use_sstate) in enumerate(
338 (('reproducibleA', self.build_from_sstate),
339 ('reproducibleB-extended', False))):
340 (variables, bitbake_failure_count) = self.do_test_build(name, use_sstate)
341 if bitbake_failure_count > 0:
342 self.logger.error('%s build failed. Trying to compute built packages differences but the test will fail.' % name)
343 fails.append("Bitbake %s failure" % name)
344 if self.save_results:
345 failure_log_path = os.path.join(save_dir, "bitbake-%s.log" % name)
346 self.logger.info('Failure log for %s will be copied to %s'% (name, failure_log_path))
347 self.copy_file(variables["BB_CONSOLELOG"], failure_log_path)
348 vars_list[i] = variables
278 349
350 vars_A, vars_B = vars_list
279 # NOTE: The temp directories from the reproducible build are purposely 351 # NOTE: The temp directories from the reproducible build are purposely
280 # kept after the build so it can be diffed for debugging. 352 # kept after the build so it can be diffed for debugging.
281 353
282 fails = []
283
284 for c in self.package_classes: 354 for c in self.package_classes:
285 with self.subTest(package_class=c): 355 with self.subTest(package_class=c):
286 package_class = 'package_' + c 356 package_class = 'package_' + c
@@ -293,8 +363,6 @@ class ReproducibleTests(OESelftestTestCase):
293 363
294 self.logger.info('Reproducibility summary for %s: %s' % (c, result)) 364 self.logger.info('Reproducibility summary for %s: %s' % (c, result))
295 365
296 self.append_to_log('\n'.join("%s: %s" % (r.status, r.test) for r in result.total))
297
298 self.write_package_list(package_class, 'missing', result.missing) 366 self.write_package_list(package_class, 'missing', result.missing)
299 self.write_package_list(package_class, 'different', result.different) 367 self.write_package_list(package_class, 'different', result.different)
300 self.write_package_list(package_class, 'different_excluded', result.different_excluded) 368 self.write_package_list(package_class, 'different_excluded', result.different_excluded)
@@ -309,9 +377,13 @@ class ReproducibleTests(OESelftestTestCase):
309 self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)])) 377 self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)]))
310 self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)])) 378 self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)]))
311 379
312 if result.missing or result.different: 380 if result.different:
313 fails.append("The following %s packages are missing or different and not in exclusion list: %s" % 381 fails.append("The following %s packages are different and not in exclusion list:\n%s" %
314 (c, '\n'.join(r.test for r in (result.missing + result.different)))) 382 (c, '\n'.join(r.test for r in (result.different))))
383
384 if result.missing and len(self.sstate_targets) == 0:
385 fails.append("The following %s packages are missing and not in exclusion list:\n%s" %
386 (c, '\n'.join(r.test for r in (result.missing))))
315 387
316 # Clean up empty directories 388 # Clean up empty directories
317 if self.save_results: 389 if self.save_results:
@@ -325,7 +397,9 @@ class ReproducibleTests(OESelftestTestCase):
325 # Copy jquery to improve the diffoscope output usability 397 # Copy jquery to improve the diffoscope output usability
326 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) 398 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
327 399
328 run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, 400 run_diffoscope('reproducibleA', 'reproducibleB-extended', package_html_dir, max_report_size=self.max_report_size,
401 max_diff_block_lines_saved=self.max_diff_block_lines_saved,
402 max_diff_block_lines=self.max_diff_block_lines,
329 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) 403 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
330 404
331 if fails: 405 if fails:
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py
index dac5c46801..c3303f3fbb 100644
--- a/meta/lib/oeqa/selftest/cases/resulttooltests.py
+++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -69,7 +71,7 @@ class ResultToolTests(OESelftestTestCase):
69 self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results) 71 self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
70 self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results) 72 self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
71 73
72 def test_regrresion_can_get_regression_result(self): 74 def test_regression_can_get_regression_result(self):
73 base_result_data = {'result': {'test1': {'status': 'PASSED'}, 75 base_result_data = {'result': {'test1': {'status': 'PASSED'},
74 'test2': {'status': 'PASSED'}, 76 'test2': {'status': 'PASSED'},
75 'test3': {'status': 'FAILED'}, 77 'test3': {'status': 'FAILED'},
@@ -96,3 +98,278 @@ class ResultToolTests(OESelftestTestCase):
96 resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map) 98 resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
97 self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results)) 99 self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
98 100
101 def test_results_without_metadata_can_be_compared(self):
102 base_configuration = {"configuration": {
103 "TEST_TYPE": "oeselftest",
104 "TESTSERIES": "series1",
105 "IMAGE_BASENAME": "image",
106 "IMAGE_PKGTYPE": "ipk",
107 "DISTRO": "mydistro",
108 "MACHINE": "qemux86",
109 "STARTTIME": 1672527600
110 }, "result": {}}
111 target_configuration = {"configuration": {
112 "TEST_TYPE": "oeselftest",
113 "TESTSERIES": "series1",
114 "IMAGE_BASENAME": "image",
115 "IMAGE_PKGTYPE": "ipk",
116 "DISTRO": "mydistro",
117 "MACHINE": "qemux86",
118 "STARTTIME": 1672527600
119 }, "result": {}}
120 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
121 msg="incorrect metadata filtering, tests without metadata should be compared")
122
123 def test_target_result_with_missing_metadata_can_not_be_compared(self):
124 base_configuration = {"configuration": {
125 "TEST_TYPE": "oeselftest",
126 "TESTSERIES": "series1",
127 "IMAGE_BASENAME": "image",
128 "IMAGE_PKGTYPE": "ipk",
129 "DISTRO": "mydistro",
130 "MACHINE": "qemux86",
131 "OESELFTEST_METADATA": {
132 "run_all_tests": True,
133 "run_tests": None,
134 "skips": None,
135 "machine": None,
136 "select_tags": ["toolchain-user", "toolchain-system"],
137 "exclude_tags": None
138 }}, "result": {}}
139 target_configuration = {"configuration": {"TEST_TYPE": "oeselftest",
140 "TESTSERIES": "series1",
141 "IMAGE_BASENAME": "image",
142 "IMAGE_PKGTYPE": "ipk",
143 "DISTRO": "mydistro",
144 "MACHINE": "qemux86",
145 "STARTTIME": 1672527600
146 }, "result": {}}
147 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
148 msg="incorrect metadata filtering, tests should not be compared")
149
150 def test_results_with_matching_metadata_can_be_compared(self):
151 base_configuration = {"configuration": {
152 "TEST_TYPE": "oeselftest",
153 "TESTSERIES": "series1",
154 "IMAGE_BASENAME": "image",
155 "IMAGE_PKGTYPE": "ipk",
156 "DISTRO": "mydistro",
157 "MACHINE": "qemux86",
158 "STARTTIME": 1672527600,
159 "OESELFTEST_METADATA": {"run_all_tests": True,
160 "run_tests": None,
161 "skips": None,
162 "machine": None,
163 "select_tags": ["toolchain-user", "toolchain-system"],
164 "exclude_tags": None}
165 }, "result": {}}
166 target_configuration = {"configuration": {
167 "TEST_TYPE": "oeselftest",
168 "TESTSERIES": "series1",
169 "IMAGE_BASENAME": "image",
170 "IMAGE_PKGTYPE": "ipk",
171 "DISTRO": "mydistro",
172 "MACHINE": "qemux86",
173 "STARTTIME": 1672527600,
174 "OESELFTEST_METADATA": {"run_all_tests": True,
175 "run_tests": None,
176 "skips": None,
177 "machine": None,
178 "select_tags": ["toolchain-user", "toolchain-system"],
179 "exclude_tags": None}
180 }, "result": {}}
181 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
182 msg="incorrect metadata filtering, tests with matching metadata should be compared")
183
184 def test_results_with_mismatching_metadata_can_not_be_compared(self):
185 base_configuration = {"configuration": {
186 "TEST_TYPE": "oeselftest",
187 "TESTSERIES": "series1",
188 "IMAGE_BASENAME": "image",
189 "IMAGE_PKGTYPE": "ipk",
190 "DISTRO": "mydistro",
191 "MACHINE": "qemux86",
192 "STARTTIME": 1672527600,
193 "OESELFTEST_METADATA": {"run_all_tests": True,
194 "run_tests": None,
195 "skips": None,
196 "machine": None,
197 "select_tags": ["toolchain-user", "toolchain-system"],
198 "exclude_tags": None}
199 }, "result": {}}
200 target_configuration = {"configuration": {
201 "TEST_TYPE": "oeselftest",
202 "TESTSERIES": "series1",
203 "IMAGE_BASENAME": "image",
204 "IMAGE_PKGTYPE": "ipk",
205 "DISTRO": "mydistro",
206 "MACHINE": "qemux86",
207 "STARTTIME": 1672527600,
208 "OESELFTEST_METADATA": {"run_all_tests": True,
209 "run_tests": None,
210 "skips": None,
211 "machine": None,
212 "select_tags": ["machine"],
213 "exclude_tags": None}
214 }, "result": {}}
215 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
216 msg="incorrect metadata filtering, tests with mismatching metadata should not be compared")
217
218 def test_metadata_matching_is_only_checked_for_relevant_test_type(self):
219 base_configuration = {"configuration": {"TEST_TYPE": "runtime",
220 "TESTSERIES": "series1",
221 "IMAGE_BASENAME": "image",
222 "IMAGE_PKGTYPE": "ipk",
223 "DISTRO": "mydistro",
224 "MACHINE": "qemux86",
225 "STARTTIME": 1672527600,
226 "OESELFTEST_METADATA": {"run_all_tests": True,
227 "run_tests": None,
228 "skips": None,
229 "machine": None,
230 "select_tags": ["toolchain-user", "toolchain-system"],
231 "exclude_tags": None}}, "result": {}}
232 target_configuration = {"configuration": {"TEST_TYPE": "runtime",
233 "TESTSERIES": "series1",
234 "IMAGE_BASENAME": "image",
235 "IMAGE_PKGTYPE": "ipk",
236 "DISTRO": "mydistro",
237 "MACHINE": "qemux86",
238 "STARTTIME": 1672527600,
239 "OESELFTEST_METADATA": {"run_all_tests": True,
240 "run_tests": None,
241 "skips": None,
242 "machine": None,
243 "select_tags": ["machine"],
244 "exclude_tags": None}}, "result": {}}
245 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
246 msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE'])
247
248 def test_machine_matches(self):
249 base_configuration = {"configuration": {
250 "TEST_TYPE": "runtime",
251 "MACHINE": "qemux86"}, "result": {}}
252 target_configuration = {"configuration": {
253 "TEST_TYPE": "runtime",
254 "MACHINE": "qemux86"
255 }, "result": {}}
256 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
257 msg="incorrect machine filtering, identical machine tests should be compared")
258
259 def test_machine_mismatches(self):
260 base_configuration = {"configuration": {
261 "TEST_TYPE": "runtime",
262 "MACHINE": "qemux86"
263 }, "result": {}}
264 target_configuration = {"configuration": {
265 "TEST_TYPE": "runtime",
266 "MACHINE": "qemux86_64"
267 }, "result": {}}
268 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
269 msg="incorrect machine filtering, mismatching machine tests should not be compared")
270
271 def test_can_not_compare_non_ltp_tests(self):
272 base_configuration = {"configuration": {
273 "TEST_TYPE": "runtime",
274 "MACHINE": "qemux86"
275 }, "result": {
276 "ltpresult_foo": {
277 "status": "PASSED"
278 }}}
279 target_configuration = {"configuration": {
280 "TEST_TYPE": "runtime",
281 "MACHINE": "qemux86_64"
282 }, "result": {
283 "bar": {
284 "status": "PASSED"
285 }}}
286 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
287 msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared")
288
289 def test_can_compare_ltp_tests(self):
290 base_configuration = {"configuration": {
291 "TEST_TYPE": "runtime",
292 "MACHINE": "qemux86"
293 }, "result": {
294 "ltpresult_foo": {
295 "status": "PASSED"
296 }}}
297 target_configuration = {"configuration": {
298 "TEST_TYPE": "runtime",
299 "MACHINE": "qemux86"
300 }, "result": {
301 "ltpresult_foo": {
302 "status": "PASSED"
303 }}}
304 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
305 msg="incorrect ltpresult filtering, matching ltpresult content should be compared")
306
307 def test_can_match_non_static_ptest_names(self):
308 base_configuration = {"a": {
309 "conf_X": {
310 "configuration": {
311 "TEST_TYPE": "runtime",
312 "MACHINE": "qemux86"
313 }, "result": {
314 "ptestresult.lttng-tools.foo_-_bar_-_moo": {
315 "status": "PASSED"
316 },
317 "ptestresult.babeltrace.bar_-_moo_-_foo": {
318 "status": "PASSED"
319 },
320 "ptestresult.babeltrace2.moo_-_foo_-_bar": {
321 "status": "PASSED"
322 },
323 "ptestresult.curl.test_0000__foo_out_of_bar": {
324 "status": "PASSED"
325 },
326 "ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": {
327 "status": "PASSED"
328 },
329 "ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": {
330 "status": "PASSED"
331 },
332 "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": {
333 "status": "PASSED"
334 },
335 "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
336 "status": "PASSED"
337 }
338 }}}}
339 target_configuration = {"a": {
340 "conf_Y": {
341 "configuration": {
342 "TEST_TYPE": "runtime",
343 "MACHINE": "qemux86"
344 }, "result": {
345 "ptestresult.lttng-tools.foo_-_yyy_-_zzz": {
346 "status": "PASSED"
347 },
348 "ptestresult.babeltrace.bar_-_zzz_-_xxx": {
349 "status": "PASSED"
350 },
351 "ptestresult.babeltrace2.moo_-_xxx_-_yyy": {
352 "status": "PASSED"
353 },
354 "ptestresult.curl.test_0000__xxx_out_of_yyy": {
355 "status": "PASSED"
356 },
357 "ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": {
358 "status": "PASSED"
359 },
360 "ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": {
361 "status": "PASSED"
362 },
363 "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": {
364 "status": "PASSED"
365 },
366 "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
367 "status": "PASSED"
368 }
369 }}}}
370 regression.fixup_ptest_names(base_configuration, self.logger)
371 regression.fixup_ptest_names(target_configuration, self.logger)
372 result, resultstring = regression.compare_result(
373 self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"])
374 self.assertDictEqual(
375 result, {}, msg=f"ptests should be compared: {resultstring}")
diff --git a/meta/lib/oeqa/selftest/cases/retain.py b/meta/lib/oeqa/selftest/cases/retain.py
new file mode 100644
index 0000000000..892be45857
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/retain.py
@@ -0,0 +1,241 @@
1# Tests for retain.bbclass
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import os
9import glob
10import fnmatch
11import oe.path
12import shutil
13import tarfile
14from oeqa.utils.commands import bitbake, get_bb_vars
15from oeqa.selftest.case import OESelftestTestCase
16
17class Retain(OESelftestTestCase):
18
19 def test_retain_always(self):
20 """
21 Summary: Test retain class with RETAIN_DIRS_ALWAYS
22 Expected: Archive written to RETAIN_OUTDIR when build of test recipe completes
23 Product: oe-core
24 Author: Paul Eggleton <paul.eggleton@microsoft.com>
25 """
26
27 test_recipe = 'quilt-native'
28
29 features = 'INHERIT += "retain"\n'
30 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
31 self.write_config(features)
32
33 bitbake('-c clean %s' % test_recipe)
34
35 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
36 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
37 tmpdir = bb_vars['TMPDIR']
38 if len(retain_outdir) < 5:
39 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
40 if not oe.path.is_path_parent(tmpdir, retain_outdir):
41 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
42 try:
43 shutil.rmtree(retain_outdir)
44 except FileNotFoundError:
45 pass
46
47 bitbake(test_recipe)
48 if not glob.glob(os.path.join(retain_outdir, '%s_temp_*.tar.gz' % test_recipe)):
49 self.fail('No output archive for %s created' % test_recipe)
50
51
52 def test_retain_failure(self):
53 """
54 Summary: Test retain class default behaviour
55 Expected: Archive written to RETAIN_OUTDIR only when build of test
56 recipe fails, and archive contents are as expected
57 Product: oe-core
58 Author: Paul Eggleton <paul.eggleton@microsoft.com>
59 """
60
61 test_recipe_fail = 'error'
62
63 features = 'INHERIT += "retain"\n'
64 self.write_config(features)
65
66 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'RETAIN_DIRS_ALWAYS', 'RETAIN_DIRS_GLOBAL_ALWAYS'])
67 if bb_vars['RETAIN_DIRS_ALWAYS']:
68 self.fail('RETAIN_DIRS_ALWAYS is set, this interferes with the test')
69 if bb_vars['RETAIN_DIRS_GLOBAL_ALWAYS']:
70 self.fail('RETAIN_DIRS_GLOBAL_ALWAYS is set, this interferes with the test')
71 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
72 tmpdir = bb_vars['TMPDIR']
73 if len(retain_outdir) < 5:
74 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
75 if not oe.path.is_path_parent(tmpdir, retain_outdir):
76 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
77
78 try:
79 shutil.rmtree(retain_outdir)
80 except FileNotFoundError:
81 pass
82
83 bitbake('-c clean %s' % test_recipe_fail)
84
85 if os.path.exists(retain_outdir):
86 retain_dirlist = os.listdir(retain_outdir)
87 if retain_dirlist:
88 self.fail('RETAIN_OUTDIR should be empty without failure, contents:\n%s' % '\n'.join(retain_dirlist))
89
90 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
91 if result.status == 0:
92 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
93
94 archives = glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % test_recipe_fail))
95 if not archives:
96 self.fail('No output archive for %s created' % test_recipe_fail)
97 if len(archives) > 1:
98 self.fail('More than one archive for %s created' % test_recipe_fail)
99 for archive in archives:
100 found = False
101 archive_prefix = os.path.basename(archive).split('.tar')[0]
102 expected_prefix_start = '%s_workdir' % test_recipe_fail
103 if not archive_prefix.startswith(expected_prefix_start):
104 self.fail('Archive %s name does not start with expected prefix "%s"' % (os.path.basename(archive), expected_prefix_start))
105 with tarfile.open(archive) as tf:
106 for ti in tf:
107 if not fnmatch.fnmatch(ti.name, '%s/*' % archive_prefix):
108 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
109 if ti.name.endswith('/temp/log.do_compile'):
110 found = True
111 if not found:
112 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
113
114
115 def test_retain_global(self):
116 """
117 Summary: Test retain class RETAIN_DIRS_GLOBAL_* behaviour
118 Expected: Ensure RETAIN_DIRS_GLOBAL_ALWAYS always causes an
119 archive to be created, and RETAIN_DIRS_GLOBAL_FAILURE
120 only causes an archive to be created on failure.
121 Also test archive naming (with : character) as an
122 added bonus.
123 Product: oe-core
124 Author: Paul Eggleton <paul.eggleton@microsoft.com>
125 """
126
127 test_recipe = 'quilt-native'
128 test_recipe_fail = 'error'
129
130 features = 'INHERIT += "retain"\n'
131 features += 'RETAIN_DIRS_GLOBAL_ALWAYS = "${LOG_DIR};prefix=buildlogs"\n'
132 features += 'RETAIN_DIRS_GLOBAL_FAILURE = "${STAMPS_DIR}"\n'
133 self.write_config(features)
134
135 bitbake('-c clean %s' % test_recipe)
136
137 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'STAMPS_DIR'])
138 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
139 tmpdir = bb_vars['TMPDIR']
140 if len(retain_outdir) < 5:
141 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
142 if not oe.path.is_path_parent(tmpdir, retain_outdir):
143 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
144 try:
145 shutil.rmtree(retain_outdir)
146 except FileNotFoundError:
147 pass
148
149 # Test success case
150 bitbake(test_recipe)
151 if not glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz')):
152 self.fail('No output archive for LOG_DIR created')
153 stamps_dir = bb_vars['STAMPS_DIR']
154 if glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
155 self.fail('Output archive for STAMPS_DIR created when it should not have been')
156
157 # Test failure case
158 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
159 if result.status == 0:
160 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
161 if not glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
162 self.fail('Output archive for STAMPS_DIR not created')
163 if len(glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz'))) != 2:
164 self.fail('Should be exactly two buildlogs archives in output dir')
165
166
167 def test_retain_misc(self):
168 """
169 Summary: Test retain class with RETAIN_ENABLED and RETAIN_TARBALL_SUFFIX
170 Expected: Archive written to RETAIN_OUTDIR only when RETAIN_ENABLED is set
171 and archive contents are as expected. Also test archive naming
172 (with : character) as an added bonus.
173 Product: oe-core
174 Author: Paul Eggleton <paul.eggleton@microsoft.com>
175 """
176
177 test_recipe_fail = 'error'
178
179 features = 'INHERIT += "retain"\n'
180 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
181 features += 'RETAIN_ENABLED = "0"\n'
182 self.write_config(features)
183
184 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
185 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
186 tmpdir = bb_vars['TMPDIR']
187 if len(retain_outdir) < 5:
188 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
189 if not oe.path.is_path_parent(tmpdir, retain_outdir):
190 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
191
192 try:
193 shutil.rmtree(retain_outdir)
194 except FileNotFoundError:
195 pass
196
197 bitbake('-c clean %s' % test_recipe_fail)
198 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
199 if result.status == 0:
200 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
201
202 if os.path.exists(retain_outdir) and os.listdir(retain_outdir):
203 self.fail('RETAIN_OUTDIR should be empty with RETAIN_ENABLED = "0"')
204
205 features = 'INHERIT += "retain"\n'
206 features += 'RETAIN_DIRS_ALWAYS = "${T};prefix=recipelogs"\n'
207 features += 'RETAIN_TARBALL_SUFFIX = "${DATETIME}-testsuffix.tar.bz2"\n'
208 features += 'RETAIN_ENABLED = "1"\n'
209 self.write_config(features)
210
211 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
212 if result.status == 0:
213 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
214
215 archives = glob.glob(os.path.join(retain_outdir, '%s_*-testsuffix.tar.bz2' % test_recipe_fail))
216 if not archives:
217 self.fail('No output archive for %s created' % test_recipe_fail)
218 if len(archives) != 2:
219 self.fail('Two archives for %s expected, but %d exist' % (test_recipe_fail, len(archives)))
220 recipelogs_found = False
221 workdir_found = False
222 for archive in archives:
223 contents_found = False
224 archive_prefix = os.path.basename(archive).split('.tar')[0]
225 if archive_prefix.startswith('%s_recipelogs' % test_recipe_fail):
226 recipelogs_found = True
227 if archive_prefix.startswith('%s_workdir' % test_recipe_fail):
228 workdir_found = True
229 with tarfile.open(archive, 'r:bz2') as tf:
230 for ti in tf:
231 if not fnmatch.fnmatch(ti.name, '%s/*' % (archive_prefix)):
232 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
233 if ti.name.endswith('/log.do_compile'):
234 contents_found = True
235 if not contents_found:
236 # Both archives should contain this file
237 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
238 if not recipelogs_found:
239 self.fail('No archive with expected "recipelogs" prefix found')
240 if not workdir_found:
241 self.fail('No archive with expected "workdir" prefix found')
diff --git a/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
new file mode 100644
index 0000000000..44e2c09a6f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
@@ -0,0 +1,97 @@
1# SPDX-FileCopyrightText: Huawei Inc.
2#
3# SPDX-License-Identifier: MIT
4
5import os
6import oe
7import unittest
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, get_bb_vars
10
11class ShadowUtilsTidyFiles(OESelftestTestCase):
12 """
13 Check if shadow image rootfs files are tidy.
14
15 The tests are focused on testing the functionality provided by the
16 'tidy_shadowutils_files' rootfs postprocess command (via
17 SORT_PASSWD_POSTPROCESS_COMMAND).
18 """
19
20 def sysconf_build(self):
21 """
22 Verify if shadow tidy files tests are to be run and if yes, build a
23 test image and return its sysconf rootfs path.
24 """
25
26 test_image = "core-image-minimal"
27
28 config = 'IMAGE_CLASSES += "extrausers"\n'
29 config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n'
30 config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n'
31 self.write_config(config)
32
33 vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"),
34 test_image)
35 passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"]
36 self.assertIsNotNone(passwd_postprocess_cmd)
37 if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'):
38 raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' "
39 "rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.")
40
41 rootfs = vars["IMAGE_ROOTFS"]
42 self.assertIsNotNone(rootfs)
43 sysconfdir = vars["sysconfdir"]
44 bitbake(test_image)
45 self.assertIsNotNone(sysconfdir)
46
47 return oe.path.join(rootfs, sysconfdir)
48
49 def test_shadowutils_backup_files(self):
50 """
51 Test that the rootfs doesn't include any known shadow backup files.
52 """
53
54 backup_files = (
55 'group-',
56 'gshadow-',
57 'passwd-',
58 'shadow-',
59 'subgid-',
60 'subuid-',
61 )
62
63 rootfs_sysconfdir = self.sysconf_build()
64 found = []
65 for backup_file in backup_files:
66 backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file)
67 if os.path.exists(backup_filepath):
68 found.append(backup_file)
69 if (found):
70 raise Exception('The following shadow backup files were found in '
71 'the rootfs: %s' % found)
72
73 def test_shadowutils_sorted_files(self):
74 """
75 Test that the 'passwd' and the 'group' shadow utils files are ordered
76 by ID.
77 """
78
79 files = (
80 'passwd',
81 'group',
82 )
83
84 rootfs_sysconfdir = self.sysconf_build()
85 unsorted = []
86 for file in files:
87 filepath = oe.path.join(rootfs_sysconfdir, file)
88 with open(filepath, 'rb') as f:
89 ids = []
90 lines = f.readlines()
91 for line in lines:
92 entries = line.split(b':')
93 ids.append(int(entries[2]))
94 if (ids != sorted(ids)):
95 unsorted.append(file)
96 if (unsorted):
97 raise Exception("The following files were not sorted by ID as expected: %s" % unsorted)
diff --git a/meta/lib/oeqa/selftest/cases/rpmtests.py b/meta/lib/oeqa/selftest/cases/rpmtests.py
new file mode 100644
index 0000000000..902d7dca3d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rpmtests.py
@@ -0,0 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class BitbakeTests(OESelftestTestCase):
11
12 def test_rpm_filenames(self):
13 test_recipe = "testrpm"
14 bitbake(test_recipe)
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
index fa6113d7fa..70047ca0ca 100644
--- a/meta/lib/oeqa/selftest/cases/runcmd.py
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -27,8 +29,8 @@ class RunCmdTests(OESelftestTestCase):
27 29
28 # The delta is intentionally smaller than the timeout, to detect cases where 30 # The delta is intentionally smaller than the timeout, to detect cases where
29 # we incorrectly apply the timeout more than once. 31 # we incorrectly apply the timeout more than once.
30 TIMEOUT = 5 32 TIMEOUT = 10
31 DELTA = 3 33 DELTA = 8
32 34
33 def test_result_okay(self): 35 def test_result_okay(self):
34 result = runCmd("true") 36 result = runCmd("true")
@@ -56,11 +58,11 @@ class RunCmdTests(OESelftestTestCase):
56 self.assertEqual(result.status, 0) 58 self.assertEqual(result.status, 0)
57 59
58 def test_result_assertion(self): 60 def test_result_assertion(self):
59 self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar", 61 self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
60 runCmd, "echo foobar >&2; false", shell=True) 62 runCmd, "echo foobar >&2; false", shell=True)
61 63
62 def test_result_exception(self): 64 def test_result_exception(self):
63 self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar", 65 self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
64 runCmd, "echo foobar >&2; false", shell=True, assert_error=False) 66 runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
65 67
66 def test_output(self): 68 def test_output(self):
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py
index 7e676bcb41..f01e1eec66 100644
--- a/meta/lib/oeqa/selftest/cases/runqemu.py
+++ b/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -4,14 +4,17 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import os
7import re 8import re
8import tempfile
9import time 9import time
10import oe.types 10import oe.types
11from oeqa.core.decorator import OETestTag 11from oeqa.core.decorator import OETestTag
12from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine
12from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, runqemu, get_bb_var, runCmd 14from oeqa.utils.commands import bitbake, runqemu, get_bb_var
14 15
16
17@OETestTag("runqemu")
15class RunqemuTests(OESelftestTestCase): 18class RunqemuTests(OESelftestTestCase):
16 """Runqemu test class""" 19 """Runqemu test class"""
17 20
@@ -21,23 +24,26 @@ class RunqemuTests(OESelftestTestCase):
21 def setUpLocal(self): 24 def setUpLocal(self):
22 super(RunqemuTests, self).setUpLocal() 25 super(RunqemuTests, self).setUpLocal()
23 self.recipe = 'core-image-minimal' 26 self.recipe = 'core-image-minimal'
24 self.machine = 'qemux86-64' 27 self.machine = self.td['MACHINE']
25 self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi" 28 self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe)
26 self.cmd_common = "runqemu nographic"
27 29
28 kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), 'x86_64') 30 self.fstypes = "ext4"
31 if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'):
32 self.fstypes += " iso hddimg"
33 if self.machine == "qemux86-64":
34 self.fstypes += " wic.vmdk wic.qcow2 wic.vdi"
35
36 self.cmd_common = "runqemu nographic"
37 kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"])
29 if kvm: 38 if kvm:
30 self.cmd_common += " kvm" 39 self.cmd_common += " kvm"
31 40
32 self.write_config( 41 self.write_config(
33""" 42"""
34MACHINE = "%s"
35IMAGE_FSTYPES = "%s" 43IMAGE_FSTYPES = "%s"
36# 10 means 1 second 44# 10 means 1 second
37SYSLINUX_TIMEOUT = "10" 45SYSLINUX_TIMEOUT = "10"
38""" 46""" % self.fstypes)
39% (self.machine, self.fstypes)
40 )
41 47
42 if not RunqemuTests.image_is_ready: 48 if not RunqemuTests.image_is_ready:
43 RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 49 RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -56,14 +62,17 @@ SYSLINUX_TIMEOUT = "10"
56 cmd = "%s %s ext4" % (self.cmd_common, self.machine) 62 cmd = "%s %s ext4" % (self.cmd_common, self.machine)
57 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 63 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
58 with open(qemu.qemurunnerlog) as f: 64 with open(qemu.qemurunnerlog) as f:
59 self.assertIn('rootfs.ext4', f.read(), "Failed: %s" % cmd) 65 regexp = r'\nROOTFS: .*\.ext4]\n'
66 self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd))
60 67
68 @skipIfNotArch(['i586', 'i686', 'x86_64'])
61 def test_boot_machine_iso(self): 69 def test_boot_machine_iso(self):
62 """Test runqemu machine iso""" 70 """Test runqemu machine iso"""
63 cmd = "%s %s iso" % (self.cmd_common, self.machine) 71 cmd = "%s %s iso" % (self.cmd_common, self.machine)
64 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 72 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
65 with open(qemu.qemurunnerlog) as f: 73 with open(qemu.qemurunnerlog) as f:
66 self.assertIn('media=cdrom', f.read(), "Failed: %s" % cmd) 74 text_in = 'media=cdrom'
75 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
67 76
68 def test_boot_recipe_image(self): 77 def test_boot_recipe_image(self):
69 """Test runqemu recipe-image""" 78 """Test runqemu recipe-image"""
@@ -72,20 +81,24 @@ SYSLINUX_TIMEOUT = "10"
72 with open(qemu.qemurunnerlog) as f: 81 with open(qemu.qemurunnerlog) as f:
73 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) 82 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
74 83
75 84 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
85 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
76 def test_boot_recipe_image_vmdk(self): 86 def test_boot_recipe_image_vmdk(self):
77 """Test runqemu recipe-image vmdk""" 87 """Test runqemu recipe-image vmdk"""
78 cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe) 88 cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
79 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 89 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
80 with open(qemu.qemurunnerlog) as f: 90 with open(qemu.qemurunnerlog) as f:
81 self.assertIn('format=vmdk', f.read(), "Failed: %s" % cmd) 91 text_in = 'format=vmdk'
92 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
82 93
94 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
83 def test_boot_recipe_image_vdi(self): 95 def test_boot_recipe_image_vdi(self):
84 """Test runqemu recipe-image vdi""" 96 """Test runqemu recipe-image vdi"""
85 cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe) 97 cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
86 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 98 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
87 with open(qemu.qemurunnerlog) as f: 99 with open(qemu.qemurunnerlog) as f:
88 self.assertIn('format=vdi', f.read(), "Failed: %s" % cmd) 100 text_in = 'format=vdi'
101 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
89 102
90 def test_boot_deploy(self): 103 def test_boot_deploy(self):
91 """Test runqemu deploy_dir_image""" 104 """Test runqemu deploy_dir_image"""
@@ -94,7 +107,7 @@ SYSLINUX_TIMEOUT = "10"
94 with open(qemu.qemurunnerlog) as f: 107 with open(qemu.qemurunnerlog) as f:
95 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) 108 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
96 109
97 110 @skipIfNotArch(['i586', 'i686', 'x86_64'])
98 def test_boot_deploy_hddimg(self): 111 def test_boot_deploy_hddimg(self):
99 """Test runqemu deploy_dir_image hddimg""" 112 """Test runqemu deploy_dir_image hddimg"""
100 cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image) 113 cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
@@ -109,6 +122,7 @@ SYSLINUX_TIMEOUT = "10"
109 with open(qemu.qemurunnerlog) as f: 122 with open(qemu.qemurunnerlog) as f:
110 self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd) 123 self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
111 124
125 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
112 def test_boot_machine_slirp_qcow2(self): 126 def test_boot_machine_slirp_qcow2(self):
113 """Test runqemu machine slirp qcow2""" 127 """Test runqemu machine slirp qcow2"""
114 cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine) 128 cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
@@ -118,7 +132,7 @@ SYSLINUX_TIMEOUT = "10"
118 132
119 def test_boot_qemu_boot(self): 133 def test_boot_qemu_boot(self):
120 """Test runqemu /path/to/image.qemuboot.conf""" 134 """Test runqemu /path/to/image.qemuboot.conf"""
121 qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine) 135 qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name)
122 qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf) 136 qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
123 if not os.path.exists(qemuboot_conf): 137 if not os.path.exists(qemuboot_conf):
124 self.skipTest("%s not found" % qemuboot_conf) 138 self.skipTest("%s not found" % qemuboot_conf)
@@ -129,7 +143,7 @@ SYSLINUX_TIMEOUT = "10"
129 143
130 def test_boot_rootfs(self): 144 def test_boot_rootfs(self):
131 """Test runqemu /path/to/rootfs.ext4""" 145 """Test runqemu /path/to/rootfs.ext4"""
132 rootfs = "%s-%s.ext4" % (self.recipe, self.machine) 146 rootfs = "%s.ext4" % (self.image_link_name)
133 rootfs = os.path.join(self.deploy_dir_image, rootfs) 147 rootfs = os.path.join(self.deploy_dir_image, rootfs)
134 if not os.path.exists(rootfs): 148 if not os.path.exists(rootfs):
135 self.skipTest("%s not found" % rootfs) 149 self.skipTest("%s not found" % rootfs)
@@ -149,26 +163,27 @@ SYSLINUX_TIMEOUT = "10"
149# bootup various filesystem types, including live image(iso and hddimg) 163# bootup various filesystem types, including live image(iso and hddimg)
150# where live image was not supported on all qemu architecture. 164# where live image was not supported on all qemu architecture.
151@OETestTag("machine") 165@OETestTag("machine")
166@OETestTag("runqemu")
152class QemuTest(OESelftestTestCase): 167class QemuTest(OESelftestTestCase):
153 168
154 @classmethod 169 @classmethod
155 def setUpClass(cls): 170 def setUpClass(cls):
156 super(QemuTest, cls).setUpClass() 171 super(QemuTest, cls).setUpClass()
157 cls.recipe = 'core-image-minimal' 172 cls.recipe = 'core-image-minimal'
158 cls.machine = get_bb_var('MACHINE') 173 cls.machine = get_bb_var('MACHINE')
159 cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 174 cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
175 cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe)
160 cls.cmd_common = "runqemu nographic" 176 cls.cmd_common = "runqemu nographic"
161 cls.qemuboot_conf = "%s-%s.qemuboot.conf" % (cls.recipe, cls.machine) 177 cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name)
162 cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf) 178 cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
163 bitbake(cls.recipe) 179 bitbake(cls.recipe)
164 180
165 def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout): 181 def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout):
182 # Allow the runner's LoggingThread instance to exit without errors
183 # (such as the exception "Console connection closed unexpectedly")
184 # as qemu will disappear when we shut it down
185 qemu.runner.allowexit()
166 qemu.run_serial("shutdown -h now") 186 qemu.run_serial("shutdown -h now")
167 # Stop thread will stop the LoggingThread instance used for logging
168 # qemu through serial console, stop thread will prevent this code
169 # from facing exception (Console connection closed unexpectedly)
170 # when qemu was shutdown by the above shutdown command
171 qemu.runner.stop_thread()
172 time_track = 0 187 time_track = 0
173 try: 188 try:
174 while True: 189 while True:
@@ -190,22 +205,12 @@ class QemuTest(OESelftestTestCase):
190 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) 205 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
191 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) 206 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
192 207
193 # Need to have portmap/rpcbind running to allow this test to work and 208 def test_qemu_can_boot_nfs_and_shutdown(self):
194 # current autobuilder setup does not have this. 209 rootfs_tar = "%s.tar.bz2" % (self.image_link_name)
195 def disabled_test_qemu_can_boot_nfs_and_shutdown(self):
196 self.assertExists(self.qemuboot_conf)
197 bitbake('meta-ide-support')
198 rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine)
199 rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar) 210 rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
200 self.assertExists(rootfs_tar) 211 self.assertExists(rootfs_tar)
201 tmpdir = tempfile.mkdtemp(prefix='qemu_nfs') 212 cmd = "%s %s" % (self.cmd_common, rootfs_tar)
202 tmpdir_nfs = os.path.join(tmpdir, 'nfs')
203 cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs)
204 result = runCmd(cmd_extract_nfs)
205 self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output)
206 cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs)
207 shutdown_timeout = 120 213 shutdown_timeout = 120
208 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 214 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
209 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) 215 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
210 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) 216 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
211 runCmd('rm -rf %s' % tmpdir)
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index b20c5b427b..d58ffa80f5 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -1,24 +1,20 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu 8from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
7from oeqa.utils.sshcontrol import SSHControl 9from oeqa.core.decorator import OETestTag
8import os 10import os
9import re
10import tempfile 11import tempfile
11import shutil
12import oe.lsb 12import oe.lsb
13from oeqa.core.decorator.data import skipIfNotQemu 13from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine
14 14
15class TestExport(OESelftestTestCase): 15class TestExport(OESelftestTestCase):
16 16
17 @classmethod 17 @OETestTag("runqemu")
18 def tearDownClass(cls):
19 runCmd("rm -rf /tmp/sdk")
20 super(TestExport, cls).tearDownClass()
21
22 def test_testexport_basic(self): 18 def test_testexport_basic(self):
23 """ 19 """
24 Summary: Check basic testexport functionality with only ping test enabled. 20 Summary: Check basic testexport functionality with only ping test enabled.
@@ -29,7 +25,7 @@ class TestExport(OESelftestTestCase):
29 Author: Mariano Lopez <mariano.lopez@intel.com> 25 Author: Mariano Lopez <mariano.lopez@intel.com>
30 """ 26 """
31 27
32 features = 'INHERIT += "testexport"\n' 28 features = 'IMAGE_CLASSES += "testexport"\n'
33 # These aren't the actual IP addresses but testexport class needs something defined 29 # These aren't the actual IP addresses but testexport class needs something defined
34 features += 'TEST_SERVER_IP = "192.168.7.1"\n' 30 features += 'TEST_SERVER_IP = "192.168.7.1"\n'
35 features += 'TEST_TARGET_IP = "192.168.7.1"\n' 31 features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -70,7 +66,7 @@ class TestExport(OESelftestTestCase):
70 Author: Mariano Lopez <mariano.lopez@intel.com> 66 Author: Mariano Lopez <mariano.lopez@intel.com>
71 """ 67 """
72 68
73 features = 'INHERIT += "testexport"\n' 69 features = 'IMAGE_CLASSES += "testexport"\n'
74 # These aren't the actual IP addresses but testexport class needs something defined 70 # These aren't the actual IP addresses but testexport class needs something defined
75 features += 'TEST_SERVER_IP = "192.168.7.1"\n' 71 features += 'TEST_SERVER_IP = "192.168.7.1"\n'
76 features += 'TEST_TARGET_IP = "192.168.7.1"\n' 72 features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -95,21 +91,23 @@ class TestExport(OESelftestTestCase):
95 msg = "Couldn't find SDK tarball: %s" % tarball_path 91 msg = "Couldn't find SDK tarball: %s" % tarball_path
96 self.assertEqual(os.path.isfile(tarball_path), True, msg) 92 self.assertEqual(os.path.isfile(tarball_path), True, msg)
97 93
98 # Extract SDK and run tar from SDK 94 with tempfile.TemporaryDirectory() as tmpdirname:
99 result = runCmd("%s -y -d /tmp/sdk" % tarball_path) 95 # Extract SDK and run tar from SDK
100 self.assertEqual(0, result.status, "Couldn't extract SDK") 96 result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname))
97 self.assertEqual(0, result.status, "Couldn't extract SDK")
101 98
102 env_script = result.output.split()[-1] 99 env_script = result.output.split()[-1]
103 result = runCmd(". %s; which tar" % env_script, shell=True) 100 result = runCmd(". %s; which tar" % env_script, shell=True)
104 self.assertEqual(0, result.status, "Couldn't setup SDK environment") 101 self.assertEqual(0, result.status, "Couldn't setup SDK environment")
105 is_sdk_tar = True if "/tmp/sdk" in result.output else False 102 is_sdk_tar = True if tmpdirname in result.output else False
106 self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") 103 self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
107 104
108 tar_sdk = result.output 105 tar_sdk = result.output
109 result = runCmd("%s --version" % tar_sdk) 106 result = runCmd("%s --version" % tar_sdk)
110 self.assertEqual(0, result.status, "Couldn't run tar from SDK") 107 self.assertEqual(0, result.status, "Couldn't run tar from SDK")
111 108
112 109
110@OETestTag("runqemu")
113class TestImage(OESelftestTestCase): 111class TestImage(OESelftestTestCase):
114 112
115 def test_testimage_install(self): 113 def test_testimage_install(self):
@@ -123,15 +121,30 @@ class TestImage(OESelftestTestCase):
123 if get_bb_var('DISTRO') == 'poky-tiny': 121 if get_bb_var('DISTRO') == 'poky-tiny':
124 self.skipTest('core-image-full-cmdline not buildable for poky-tiny') 122 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
125 123
126 features = 'INHERIT += "testimage"\n' 124 features = 'IMAGE_CLASSES += "testimage"\n'
127 features += 'IMAGE_INSTALL_append = " libssl"\n' 125 features += 'IMAGE_INSTALL:append = " libssl"\n'
128 features += 'TEST_SUITES = "ping ssh selftest"\n' 126 features += 'TEST_SUITES = "ping ssh selftest"\n'
129 self.write_config(features) 127 self.write_config(features)
130 128
131 # Build core-image-sato and testimage
132 bitbake('core-image-full-cmdline socat') 129 bitbake('core-image-full-cmdline socat')
133 bitbake('-c testimage core-image-full-cmdline') 130 bitbake('-c testimage core-image-full-cmdline')
134 131
132 def test_testimage_slirp(self):
133 """
134 Summary: Check basic testimage functionality with qemu and slirp networking.
135 """
136
137 features = '''
138IMAGE_CLASSES:append = " testimage"
139IMAGE_FEATURES:append = " ssh-server-dropbear"
140IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}"
141TEST_RUNQEMUPARAMS += " slirp"
142'''
143 self.write_config(features)
144
145 bitbake('core-image-minimal')
146 bitbake('-c testimage core-image-minimal')
147
135 def test_testimage_dnf(self): 148 def test_testimage_dnf(self):
136 """ 149 """
137 Summary: Check package feeds functionality for dnf 150 Summary: Check package feeds functionality for dnf
@@ -142,7 +155,7 @@ class TestImage(OESelftestTestCase):
142 if get_bb_var('DISTRO') == 'poky-tiny': 155 if get_bb_var('DISTRO') == 'poky-tiny':
143 self.skipTest('core-image-full-cmdline not buildable for poky-tiny') 156 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
144 157
145 features = 'INHERIT += "testimage"\n' 158 features = 'IMAGE_CLASSES += "testimage"\n'
146 features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n' 159 features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
147 # We don't yet know what the server ip and port will be - they will be patched 160 # We don't yet know what the server ip and port will be - they will be patched
148 # in at the start of the on-image test 161 # in at the start of the on-image test
@@ -161,13 +174,50 @@ class TestImage(OESelftestTestCase):
161 features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n' 174 features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
162 features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase') 175 features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
163 features += 'GPG_PATH = "%s"\n' % self.gpg_home 176 features += 'GPG_PATH = "%s"\n' % self.gpg_home
164 features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home 177 self.write_config(features)
178
179 bitbake('core-image-full-cmdline socat')
180 bitbake('-c testimage core-image-full-cmdline')
181
182 def test_testimage_apt(self):
183 """
184 Summary: Check package feeds functionality for apt
185 Expected: 1. Check that remote package feeds can be accessed
186 Product: oe-core
187 Author: Ferry Toth <fntoth@gmail.com>
188 """
189 if get_bb_var('DISTRO') == 'poky-tiny':
190 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
191
192 features = 'IMAGE_CLASSES += "testimage"\n'
193 features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n'
194 # We don't yet know what the server ip and port will be - they will be patched
195 # in at the start of the on-image test
196 features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n'
197 features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
198 features += 'PACKAGE_CLASSES = "package_deb"\n'
199 # We need gnupg on the target to install keys
200 features += 'IMAGE_INSTALL:append:pn-core-image-full-cmdline = " gnupg"\n'
201
202 bitbake('gnupg-native -c addto_recipe_sysroot')
203
204 # Enable package feed signing
205 self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
206 self.track_for_cleanup(self.gpg_home)
207 signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
208 runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True)
209 features += 'INHERIT += "sign_package_feed"\n'
210 features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
211 features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
212 features += 'GPG_PATH = "%s"\n' % self.gpg_home
165 self.write_config(features) 213 self.write_config(features)
166 214
167 # Build core-image-sato and testimage 215 # Build core-image-sato and testimage
168 bitbake('core-image-full-cmdline socat') 216 bitbake('core-image-full-cmdline socat')
169 bitbake('-c testimage core-image-full-cmdline') 217 bitbake('-c testimage core-image-full-cmdline')
170 218
219 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966
220 @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
171 def test_testimage_virgl_gtk_sdl(self): 221 def test_testimage_virgl_gtk_sdl(self):
172 """ 222 """
173 Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends 223 Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
@@ -190,25 +240,26 @@ class TestImage(OESelftestTestCase):
190 240
191 qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') 241 qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
192 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') 242 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
193 features = 'INHERIT += "testimage"\n' 243 features = 'IMAGE_CLASSES += "testimage"\n'
194 if 'gtk+' not in qemu_packageconfig: 244 if 'gtk+' not in qemu_packageconfig:
195 features += 'PACKAGECONFIG_append_pn-qemu-system-native = " gtk+"\n' 245 features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n'
196 if 'sdl' not in qemu_packageconfig: 246 if 'sdl' not in qemu_packageconfig:
197 features += 'PACKAGECONFIG_append_pn-qemu-system-native = " sdl"\n' 247 features += 'PACKAGECONFIG:append:pn-qemu-system-native = " sdl"\n'
198 if 'opengl' not in qemu_distrofeatures: 248 if 'opengl' not in qemu_distrofeatures:
199 features += 'DISTRO_FEATURES_append = " opengl"\n' 249 features += 'DISTRO_FEATURES:append = " opengl"\n'
200 features += 'TEST_SUITES = "ping ssh virgl"\n' 250 features += 'TEST_SUITES = "ping ssh virgl"\n'
201 features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' 251 features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
202 features += 'IMAGE_INSTALL_append = " kmscube"\n' 252 features += 'IMAGE_INSTALL:append = " kmscube"\n'
203 features_gtk = features + 'TEST_RUNQEMUPARAMS = "gtk gl"\n' 253 features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n'
204 self.write_config(features_gtk) 254 self.write_config(features_gtk)
205 bitbake('core-image-minimal') 255 bitbake('core-image-minimal')
206 bitbake('-c testimage core-image-minimal') 256 bitbake('-c testimage core-image-minimal')
207 features_sdl = features + 'TEST_RUNQEMUPARAMS = "sdl gl"\n' 257 features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n'
208 self.write_config(features_sdl) 258 self.write_config(features_sdl)
209 bitbake('core-image-minimal') 259 bitbake('core-image-minimal')
210 bitbake('-c testimage core-image-minimal') 260 bitbake('-c testimage core-image-minimal')
211 261
262 @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
212 def test_testimage_virgl_headless(self): 263 def test_testimage_virgl_headless(self):
213 """ 264 """
214 Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend 265 Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
@@ -218,28 +269,27 @@ class TestImage(OESelftestTestCase):
218 Author: Alexander Kanavin <alex.kanavin@gmail.com> 269 Author: Alexander Kanavin <alex.kanavin@gmail.com>
219 """ 270 """
220 import subprocess, os 271 import subprocess, os
221 try: 272
222 content = os.listdir("/dev/dri") 273 distro = oe.lsb.distro_identifier()
223 if len([i for i in content if i.startswith('render')]) == 0: 274 # Merge request to address the issue on centos/rhel/derivatives:
224 self.skipTest("No render nodes found in /dev/dri: %s" %(content)) 275 # https://gitlab.com/cki-project/kernel-ark/-/merge_requests/3449
225 except FileNotFoundError: 276 if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or
226 self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.") 277 distro.startswith('almalinux') or distro.startswith('rocky')):
227 try: 278 self.skipTest('virgl headless cannot be tested with %s' %(distro))
228 dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True) 279
229 except subprocess.CalledProcessError as e:
230 self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
231 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') 280 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
232 features = 'INHERIT += "testimage"\n' 281 features = 'IMAGE_CLASSES += "testimage"\n'
233 if 'opengl' not in qemu_distrofeatures: 282 if 'opengl' not in qemu_distrofeatures:
234 features += 'DISTRO_FEATURES_append = " opengl"\n' 283 features += 'DISTRO_FEATURES:append = " opengl"\n'
235 features += 'TEST_SUITES = "ping ssh virgl"\n' 284 features += 'TEST_SUITES = "ping ssh virgl"\n'
236 features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' 285 features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
237 features += 'IMAGE_INSTALL_append = " kmscube"\n' 286 features += 'IMAGE_INSTALL:append = " kmscube"\n'
238 features += 'TEST_RUNQEMUPARAMS = "egl-headless"\n' 287 features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n'
239 self.write_config(features) 288 self.write_config(features)
240 bitbake('core-image-minimal') 289 bitbake('core-image-minimal')
241 bitbake('-c testimage core-image-minimal') 290 bitbake('-c testimage core-image-minimal')
242 291
292@OETestTag("runqemu")
243class Postinst(OESelftestTestCase): 293class Postinst(OESelftestTestCase):
244 294
245 def init_manager_loop(self, init_manager): 295 def init_manager_loop(self, init_manager):
@@ -260,10 +310,7 @@ class Postinst(OESelftestTestCase):
260 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' 310 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
261 features += 'PACKAGE_CLASSES = "%s"\n' % classes 311 features += 'PACKAGE_CLASSES = "%s"\n' % classes
262 if init_manager == "systemd": 312 if init_manager == "systemd":
263 features += 'DISTRO_FEATURES_append = " systemd"\n' 313 features += 'INIT_MANAGER = "systemd"\n'
264 features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
265 features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
266 features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
267 self.write_config(features) 314 self.write_config(features)
268 315
269 bitbake('core-image-minimal') 316 bitbake('core-image-minimal')
@@ -280,7 +327,7 @@ class Postinst(OESelftestTestCase):
280 327
281 328
282 329
283 @skipIfNotQemu('qemuall', 'Test only runs in qemu') 330 @skipIfNotQemu()
284 def test_postinst_rootfs_and_boot_sysvinit(self): 331 def test_postinst_rootfs_and_boot_sysvinit(self):
285 """ 332 """
286 Summary: The purpose of this test case is to verify Post-installation 333 Summary: The purpose of this test case is to verify Post-installation
@@ -301,7 +348,7 @@ class Postinst(OESelftestTestCase):
301 self.init_manager_loop("sysvinit") 348 self.init_manager_loop("sysvinit")
302 349
303 350
304 @skipIfNotQemu('qemuall', 'Test only runs in qemu') 351 @skipIfNotQemu()
305 def test_postinst_rootfs_and_boot_systemd(self): 352 def test_postinst_rootfs_and_boot_systemd(self):
306 """ 353 """
307 Summary: The purpose of this test case is to verify Post-installation 354 Summary: The purpose of this test case is to verify Post-installation
@@ -357,6 +404,7 @@ class Postinst(OESelftestTestCase):
357 self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")), 404 self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")),
358 "rootfs-after-failure file was created") 405 "rootfs-after-failure file was created")
359 406
407@OETestTag("runqemu")
360class SystemTap(OESelftestTestCase): 408class SystemTap(OESelftestTestCase):
361 """ 409 """
362 Summary: The purpose of this test case is to verify native crosstap 410 Summary: The purpose of this test case is to verify native crosstap
@@ -377,14 +425,14 @@ TEST_SERVER_IP = "192.168.7.1"
377TEST_TARGET_IP = "192.168.7.2" 425TEST_TARGET_IP = "192.168.7.2"
378 426
379EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs" 427EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs"
380IMAGE_FEATURES_append = " ssh-server-dropbear" 428IMAGE_FEATURES:append = " ssh-server-dropbear"
381 429
382# enables kernel debug symbols 430# enables kernel debug symbols
383KERNEL_EXTRA_FEATURES_append = " features/debug/debug-kernel.scc" 431KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
384KERNEL_EXTRA_FEATURES_append = " features/systemtap/systemtap.scc" 432KERNEL_EXTRA_FEATURES:append = " features/systemtap/systemtap.scc"
385 433
386# add systemtap run-time into target image if it is not there yet 434# add systemtap run-time into target image if it is not there yet
387IMAGE_INSTALL_append = " systemtap-runtime" 435IMAGE_INSTALL:append = " systemtap-runtime"
388""" 436"""
389 437
390 def test_crosstap_helloworld(self): 438 def test_crosstap_helloworld(self):
@@ -433,4 +481,3 @@ IMAGE_INSTALL_append = " systemtap-runtime"
433 cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples 481 cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples
434 result = runCmd(cmd) 482 result = runCmd(cmd)
435 self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output) 483 self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output)
436
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
new file mode 100644
index 0000000000..d99a58d6b9
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -0,0 +1,135 @@
1# SPDX-License-Identifier: MIT
2import subprocess
3import time
4from oeqa.core.decorator import OETestTag
5from oeqa.core.decorator.data import skipIfArch
6from oeqa.core.case import OEPTestResultTestCase
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
9from oeqa.utils.sshcontrol import SSHControl
10
11def parse_results(filename):
12 tests = {}
13 with open(filename, "r") as f:
14 lines = f.readlines()
15 for line in lines:
16 if "..." in line and "test [" in line:
17 test = line.split("test ")[1].split(" ... ")[0]
18 if "] " in test:
19 test = test.split("] ", 1)[1]
20 result = line.split(" ... ")[1].strip()
21 if result == "ok":
22 result = "PASS"
23 elif result == "failed":
24 result = "FAIL"
25 elif "ignored" in result:
26 result = "SKIPPED"
27 if test in tests:
28 if tests[test] != result:
29 print("Duplicate and mismatching result %s for %s" % (result, test))
30 else:
31 print("Duplicate result %s for %s" % (result, test))
32 else:
33 tests[test] = result
34 return tests
35
36# Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs.
37@OETestTag("toolchain-system")
38@OETestTag("toolchain-user")
39@OETestTag("runqemu")
40class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
41
42 @skipIfArch(['mips', 'mips64'])
43 def test_rust(self, *args, **kwargs):
44 # build remote-test-server before image build
45 recipe = "rust"
46 start_time = time.time()
47 bitbake("{} -c test_compile".format(recipe))
48 builddir = get_bb_var("RUSTSRC", "rust")
49 # build core-image-minimal with required packages
50 default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
51 features = []
52 features.append('IMAGE_FEATURES += "ssh-server-dropbear"')
53 features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
54 self.write_config("\n".join(features))
55 bitbake("core-image-minimal")
56
57 # Exclude the test folders that error out while building
58 # TODO: Fix the errors and include them for testing
59 # no-fail-fast: Run all tests regardless of failure.
60 # bless: First runs rustfmt to format the codebase,
61 # then runs tidy checks.
62 exclude_list = [
63 'src/bootstrap',
64 'src/doc/rustc',
65 'src/doc/rustdoc',
66 'src/doc/unstable-book',
67 'src/etc/test-float-parse',
68 'src/librustdoc',
69 'src/rustdoc-json-types',
70 'src/tools/jsondoclint',
71 'src/tools/lint-docs',
72 'src/tools/replace-version-placeholder',
73 'src/tools/rust-analyzer',
74 'src/tools/rustdoc-themes',
75 'src/tools/rust-installer',
76 'src/tools/suggest-tests',
77 'tests/assembly/asm/aarch64-outline-atomics.rs',
78 'tests/codegen/issues/issue-122805.rs',
79 'tests/codegen/thread-local.rs',
80 'tests/mir-opt/',
81 'tests/run-make',
82 'tests/run-make-fulldeps',
83 'tests/rustdoc',
84 'tests/rustdoc-json',
85 'tests/rustdoc-js-std',
86 'tests/ui/abi/stack-probes-lto.rs',
87 'tests/ui/abi/stack-probes.rs',
88 'tests/ui/codegen/mismatched-data-layouts.rs',
89 'tests/codegen/rust-abi-arch-specific-adjustment.rs',
90 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
91 'tests/ui/feature-gates/version_check.rs',
92 'tests/ui-fulldeps/',
93 'tests/ui/process/nofile-limit.rs',
94 'tidyselftest'
95 ]
96
97 exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
98 # Add exclude_fail_tests with other test arguments
99 testargs = exclude_fail_tests + " --no-fail-fast --bless"
100
101 # wrap the execution with a qemu instance.
102 # Tests are run with 512 tasks in parallel to execute all tests very quickly
103 with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu:
104 # Copy remote-test-server to image through scp
105 host_sys = get_bb_var("RUST_BUILD_SYS", "rust")
106 ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root")
107 ssh.copy_to(builddir + "/build/" + host_sys + "/stage2-tools-bin/remote-test-server","~/")
108 # Execute remote-test-server on image through background ssh
109 command = '~/remote-test-server --bind 0.0.0.0:12345 -v'
110 sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
111 # Get the values of variables.
112 tcpath = get_bb_var("TARGET_SYS", "rust")
113 targetsys = get_bb_var("RUST_TARGET_SYS", "rust")
114 rustlibpath = get_bb_var("WORKDIR", "rust")
115 tmpdir = get_bb_var("TMPDIR", "rust")
116
117 # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
118 cmd = "export TARGET_VENDOR=\"-poky\";"
119 cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/python3-native:%s/recipe-sysroot-native/usr/bin:%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, rustlibpath, rustlibpath, tcpath, tmpdir)
120 cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
121 # Trigger testing.
122 cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
123 cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys)
124 retval = runCmd(cmd)
125 end_time = time.time()
126
127 resultlog = rustlibpath + "/results-log.txt"
128 with open(resultlog, "w") as f:
129 f.write(retval.output)
130
131 ptestsuite = "rust"
132 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog)
133 test_results = parse_results(resultlog)
134 for test in test_results:
135 self.ptest_result(ptestsuite, test, test_results[test])
diff --git a/meta/lib/oeqa/selftest/cases/sdk.py b/meta/lib/oeqa/selftest/cases/sdk.py
new file mode 100644
index 0000000000..3971365029
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/sdk.py
@@ -0,0 +1,39 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os.path
8
9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_vars
11
12class SDKTests(OESelftestTestCase):
13
14 def load_manifest(self, filename):
15 manifest = {}
16 with open(filename) as f:
17 for line in f:
18 name, arch, version = line.split(maxsplit=3)
19 manifest[name] = (version, arch)
20 return manifest
21
22 def test_sdk_manifests(self):
23 image = "core-image-minimal"
24
25 self.write_config("""
26TOOLCHAIN_HOST_TASK:append = " nativesdk-selftest-hello"
27IMAGE_INSTALL:append = " selftest-hello"
28""")
29
30 bitbake(f"{image} -c populate_sdk")
31 vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAIN_OUTPUTNAME'], image)
32
33 path = os.path.join(vars["SDK_DEPLOY"], vars["TOOLCHAIN_OUTPUTNAME"] + ".host.manifest")
34 self.assertNotEqual(os.path.getsize(path), 0, msg="Host manifest is empty")
35 self.assertIn("nativesdk-selftest-hello", self.load_manifest(path))
36
37 path = os.path.join(vars["SDK_DEPLOY"], vars["TOOLCHAIN_OUTPUTNAME"] + ".target.manifest")
38 self.assertNotEqual(os.path.getsize(path), 0, msg="Target manifest is empty")
39 self.assertIn("selftest-hello", self.load_manifest(path))
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py
index af080dcf03..a80a8651a5 100644
--- a/meta/lib/oeqa/selftest/cases/selftest.py
+++ b/meta/lib/oeqa/selftest/cases/selftest.py
@@ -1,9 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import importlib 7import importlib
6from oeqa.utils.commands import runCmd
7import oeqa.selftest 8import oeqa.selftest
8from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
9 10
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py
index a28c7eb19a..4df45ba032 100644
--- a/meta/lib/oeqa/selftest/cases/signing.py
+++ b/meta/lib/oeqa/selftest/cases/signing.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -81,6 +83,8 @@ class Signing(OESelftestTestCase):
81 feature += 'RPM_GPG_PASSPHRASE = "test123"\n' 83 feature += 'RPM_GPG_PASSPHRASE = "test123"\n'
82 feature += 'RPM_GPG_NAME = "testuser"\n' 84 feature += 'RPM_GPG_NAME = "testuser"\n'
83 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir 85 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
86 feature += 'PACKAGECONFIG:append:pn-rpm-native = " sequoia"\n'
87 feature += 'PACKAGECONFIG:append:pn-rpm = " sequoia"\n'
84 88
85 self.write_config(feature) 89 self.write_config(feature)
86 90
@@ -145,7 +149,7 @@ class Signing(OESelftestTestCase):
145 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir 149 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
146 feature += 'SSTATE_DIR = "%s"\n' % sstatedir 150 feature += 'SSTATE_DIR = "%s"\n' % sstatedir
147 # Any mirror might have partial sstate without .sig files, triggering failures 151 # Any mirror might have partial sstate without .sig files, triggering failures
148 feature += 'SSTATE_MIRRORS_forcevariable = ""\n' 152 feature += 'SSTATE_MIRRORS:forcevariable = ""\n'
149 153
150 self.write_config(feature) 154 self.write_config(feature)
151 155
@@ -159,13 +163,13 @@ class Signing(OESelftestTestCase):
159 bitbake('-c clean %s' % test_recipe) 163 bitbake('-c clean %s' % test_recipe)
160 bitbake('-c populate_lic %s' % test_recipe) 164 bitbake('-c populate_lic %s' % test_recipe)
161 165
162 recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz.sig') 166 recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst.sig')
163 recipe_tgz = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz') 167 recipe_archive = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst')
164 168
165 self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.') 169 self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
166 self.assertEqual(len(recipe_tgz), 1, 'Failed to find .tgz file.') 170 self.assertEqual(len(recipe_archive), 1, 'Failed to find .tar.zst file.')
167 171
168 ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_tgz[0])) 172 ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_archive[0]))
169 # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30 173 # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
170 # gpg: Good signature from "testuser (nocomment) <testuser@email.com>" 174 # gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
171 self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.') 175 self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
@@ -189,7 +193,7 @@ class LockedSignatures(OESelftestTestCase):
189 193
190 bitbake(test_recipe) 194 bitbake(test_recipe)
191 # Generate locked sigs include file 195 # Generate locked sigs include file
192 bitbake('-S none %s' % test_recipe) 196 bitbake('-S lockedsigs %s' % test_recipe)
193 197
194 feature = 'require %s\n' % locked_sigs_file 198 feature = 'require %s\n' % locked_sigs_file
195 feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n' 199 feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
@@ -206,7 +210,7 @@ class LockedSignatures(OESelftestTestCase):
206 # Use uuid so hash equivalance server isn't triggered 210 # Use uuid so hash equivalance server isn't triggered
207 recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend' 211 recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend'
208 recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file) 212 recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file)
209 feature = 'SUMMARY_${PN} = "test locked signature%s"\n' % uuid.uuid4() 213 feature = 'SUMMARY:${PN} = "test locked signature%s"\n' % uuid.uuid4()
210 214
211 os.mkdir(os.path.join(templayerdir, 'recipes-test')) 215 os.mkdir(os.path.join(templayerdir, 'recipes-test'))
212 os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe)) 216 os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe))
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
new file mode 100644
index 0000000000..8cd4e83ca2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -0,0 +1,288 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
9import textwrap
10import hashlib
11from pathlib import Path
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
14import oe.spdx30
15
16
17class SPDX22Check(OESelftestTestCase):
18 @classmethod
19 def setUpClass(cls):
20 super().setUpClass()
21 bitbake("python3-spdx-tools-native")
22 bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
23
24 def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
25 config = textwrap.dedent(
26 """\
27 INHERIT:remove = "create-spdx"
28 INHERIT += "create-spdx-2.2"
29 """
30 )
31 self.write_config(config)
32
33 deploy_dir = get_bb_var("DEPLOY_DIR")
34 arch_dir = get_bb_var("PACKAGE_ARCH", target_name)
35 spdx_version = get_bb_var("SPDX_VERSION")
36 # qemux86-64 creates the directory qemux86_64
37 #arch_dir = arch_var.replace("-", "_")
38
39 full_file_path = os.path.join(
40 deploy_dir, "spdx", spdx_version, arch_dir, high_level_dir, spdx_file
41 )
42
43 try:
44 os.remove(full_file_path)
45 except FileNotFoundError:
46 pass
47
48 bitbake("%s -c create_spdx" % target_name)
49
50 def check_spdx_json(filename):
51 with open(filename) as f:
52 report = json.load(f)
53 self.assertNotEqual(report, None)
54 self.assertNotEqual(report["SPDXID"], None)
55
56 python = os.path.join(
57 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"),
58 "nativepython3",
59 )
60 validator = os.path.join(
61 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"), "pyspdxtools"
62 )
63 result = runCmd("{} {} -i {}".format(python, validator, filename))
64
65 self.assertExists(full_file_path)
66 result = check_spdx_json(full_file_path)
67
68 def test_spdx_base_files(self):
69 self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
70
71 def test_spdx_tar(self):
72 self.check_recipe_spdx("packages", "tar.spdx.json", "tar")
73
74
75class SPDX3CheckBase(object):
76 """
77 Base class for checking SPDX 3 based tests
78 """
79
80 def check_spdx_file(self, filename):
81 self.assertExists(filename)
82
83 # Read the file
84 objset = oe.spdx30.SHACLObjectSet()
85 with open(filename, "r") as f:
86 d = oe.spdx30.JSONLDDeserializer()
87 d.read(f, objset)
88
89 return objset
90
91 def check_recipe_spdx(self, target_name, spdx_path, *, task=None, extraconf=""):
92 config = (
93 textwrap.dedent(
94 f"""\
95 INHERIT:remove = "create-spdx"
96 INHERIT += "{self.SPDX_CLASS}"
97 """
98 )
99 + textwrap.dedent(extraconf)
100 )
101
102 self.write_config(config)
103
104 if task:
105 bitbake(f"-c {task} {target_name}")
106 else:
107 bitbake(target_name)
108
109 filename = spdx_path.format(
110 **get_bb_vars(
111 [
112 "DEPLOY_DIR_IMAGE",
113 "DEPLOY_DIR_SPDX",
114 "MACHINE",
115 "MACHINE_ARCH",
116 "SDKMACHINE",
117 "SDK_DEPLOY",
118 "SPDX_VERSION",
119 "SSTATE_PKGARCH",
120 "TOOLCHAIN_OUTPUTNAME",
121 ],
122 target_name,
123 )
124 )
125
126 return self.check_spdx_file(filename)
127
128 def check_objset_missing_ids(self, objset):
129 for o in objset.foreach_type(oe.spdx30.SpdxDocument):
130 doc = o
131 break
132 else:
133 self.assertTrue(False, "Unable to find SpdxDocument")
134
135 missing_ids = objset.missing_ids - set(i.externalSpdxId for i in doc.import_)
136 if missing_ids:
137 self.assertTrue(
138 False,
139 "The following SPDXIDs are unresolved:\n " + "\n ".join(missing_ids),
140 )
141
142
143class SPDX30Check(SPDX3CheckBase, OESelftestTestCase):
144 SPDX_CLASS = "create-spdx-3.0"
145
146 def test_base_files(self):
147 self.check_recipe_spdx(
148 "base-files",
149 "{DEPLOY_DIR_SPDX}/{MACHINE_ARCH}/packages/package-base-files.spdx.json",
150 )
151
152 def test_gcc_include_source(self):
153 objset = self.check_recipe_spdx(
154 "gcc",
155 "{DEPLOY_DIR_SPDX}/{SSTATE_PKGARCH}/recipes/recipe-gcc.spdx.json",
156 extraconf="""\
157 SPDX_INCLUDE_SOURCES = "1"
158 """,
159 )
160
161 gcc_pv = get_bb_var("PV", "gcc")
162 filename = f"gcc-{gcc_pv}/README"
163 found = False
164 for software_file in objset.foreach_type(oe.spdx30.software_File):
165 if software_file.name == filename:
166 found = True
167 self.logger.info(
168 f"The spdxId of {filename} in recipe-gcc.spdx.json is {software_file.spdxId}"
169 )
170 break
171
172 self.assertTrue(
173 found, f"Not found source file {filename} in recipe-gcc.spdx.json\n"
174 )
175
176 def test_core_image_minimal(self):
177 objset = self.check_recipe_spdx(
178 "core-image-minimal",
179 "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json",
180 )
181
182 # Document should be fully linked
183 self.check_objset_missing_ids(objset)
184
185 def test_core_image_minimal_sdk(self):
186 objset = self.check_recipe_spdx(
187 "core-image-minimal",
188 "{SDK_DEPLOY}/{TOOLCHAIN_OUTPUTNAME}.spdx.json",
189 task="populate_sdk",
190 )
191
192 # Document should be fully linked
193 self.check_objset_missing_ids(objset)
194
195 def test_baremetal_helloworld(self):
196 objset = self.check_recipe_spdx(
197 "baremetal-helloworld",
198 "{DEPLOY_DIR_IMAGE}/baremetal-helloworld-image-{MACHINE}.spdx.json",
199 extraconf="""\
200 TCLIBC = "baremetal"
201 """,
202 )
203
204 # Document should be fully linked
205 self.check_objset_missing_ids(objset)
206
207 def test_extra_opts(self):
208 HOST_SPDXID = "http://foo.bar/spdx/bar2"
209
210 EXTRACONF = textwrap.dedent(
211 f"""\
212 SPDX_INVOKED_BY_name = "CI Tool"
213 SPDX_INVOKED_BY_type = "software"
214
215 SPDX_ON_BEHALF_OF_name = "John Doe"
216 SPDX_ON_BEHALF_OF_type = "person"
217 SPDX_ON_BEHALF_OF_id_email = "John.Doe@noreply.com"
218
219 SPDX_PACKAGE_SUPPLIER_name = "ACME Embedded Widgets"
220 SPDX_PACKAGE_SUPPLIER_type = "organization"
221
222 SPDX_AUTHORS += "authorA"
223 SPDX_AUTHORS_authorA_ref = "SPDX_ON_BEHALF_OF"
224
225 SPDX_BUILD_HOST = "host"
226
227 SPDX_IMPORTS += "host"
228 SPDX_IMPORTS_host_spdxid = "{HOST_SPDXID}"
229
230 SPDX_INCLUDE_BUILD_VARIABLES = "1"
231 SPDX_INCLUDE_BITBAKE_PARENT_BUILD = "1"
232 SPDX_INCLUDE_TIMESTAMPS = "1"
233
234 SPDX_PRETTY = "1"
235 """
236 )
237 extraconf_hash = hashlib.sha1(EXTRACONF.encode("utf-8")).hexdigest()
238
239 objset = self.check_recipe_spdx(
240 "core-image-minimal",
241 "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json",
242 # Many SPDX variables do not trigger a rebuild, since they are
243 # intended to record information at the time of the build. As such,
244 # the extra configuration alone may not trigger a rebuild, and even
245 # if it does, the task hash won't necessarily be unique. In order
246 # to make sure rebuilds happen, but still allow these test objects
247 # to be pulled from sstate (e.g. remain reproducible), change the
248 # namespace prefix to include the hash of the extra configuration
249 extraconf=textwrap.dedent(
250 f"""\
251 SPDX_NAMESPACE_PREFIX = "http://spdx.org/spdxdocs/{extraconf_hash}"
252 """
253 )
254 + EXTRACONF,
255 )
256
257 # Document should be fully linked
258 self.check_objset_missing_ids(objset)
259
260 for o in objset.foreach_type(oe.spdx30.SoftwareAgent):
261 if o.name == "CI Tool":
262 break
263 else:
264 self.assertTrue(False, "Unable to find software tool")
265
266 for o in objset.foreach_type(oe.spdx30.Person):
267 if o.name == "John Doe":
268 break
269 else:
270 self.assertTrue(False, "Unable to find person")
271
272 for o in objset.foreach_type(oe.spdx30.Organization):
273 if o.name == "ACME Embedded Widgets":
274 break
275 else:
276 self.assertTrue(False, "Unable to find organization")
277
278 for o in objset.foreach_type(oe.spdx30.SpdxDocument):
279 doc = o
280 break
281 else:
282 self.assertTrue(False, "Unable to find SpdxDocument")
283
284 for i in doc.import_:
285 if i.externalSpdxId == HOST_SPDXID:
286 break
287 else:
288 self.assertTrue(False, "Unable to find imported Host SpdxID")
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py
deleted file mode 100644
index 80ce9e353c..0000000000
--- a/meta/lib/oeqa/selftest/cases/sstate.py
+++ /dev/null
@@ -1,67 +0,0 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import datetime
6import unittest
7import os
8import re
9import shutil
10
11import oeqa.utils.ftools as ftools
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_test_layer
14
15
16class SStateBase(OESelftestTestCase):
17
18 def setUpLocal(self):
19 super(SStateBase, self).setUpLocal()
20 self.temp_sstate_location = None
21 needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
22 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
23 bb_vars = get_bb_vars(needed_vars)
24 self.sstate_path = bb_vars['SSTATE_DIR']
25 self.hostdistro = bb_vars['NATIVELSBSTRING']
26 self.tclibc = bb_vars['TCLIBC']
27 self.tune_arch = bb_vars['TUNE_ARCH']
28 self.topdir = bb_vars['TOPDIR']
29 self.target_vendor = bb_vars['TARGET_VENDOR']
30 self.target_os = bb_vars['TARGET_OS']
31 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
32
33 # Creates a special sstate configuration with the option to add sstate mirrors
34 def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
35 self.temp_sstate_location = temp_sstate_location
36
37 if self.temp_sstate_location:
38 temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
39 config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
40 self.append_config(config_temp_sstate)
41 self.track_for_cleanup(temp_sstate_path)
42 bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
43 self.sstate_path = bb_vars['SSTATE_DIR']
44 self.hostdistro = bb_vars['NATIVELSBSTRING']
45 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
46
47 if add_local_mirrors:
48 config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
49 self.append_config(config_set_sstate_if_not_set)
50 for local_mirror in add_local_mirrors:
51 self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
52 config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
53 self.append_config(config_sstate_mirror)
54
55 # Returns a list containing sstate files
56 def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
57 result = []
58 for root, dirs, files in os.walk(self.sstate_path):
59 if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
60 for f in files:
61 if re.search(filename_regex, f):
62 result.append(f)
63 if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
64 for f in files:
65 if re.search(filename_regex, f):
66 result.append(f)
67 return result
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index c46e8ba489..08f94b168a 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,17 +9,205 @@ import shutil
7import glob 9import glob
8import subprocess 10import subprocess
9import tempfile 11import tempfile
12import datetime
13import re
10 14
15from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, get_bb_vars
11from oeqa.selftest.case import OESelftestTestCase 16from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer, create_temp_layer 17from oeqa.core.decorator import OETestTag
13from oeqa.selftest.cases.sstate import SStateBase
14 18
19import oe
15import bb.siggen 20import bb.siggen
16 21
22# Set to True to preserve stamp files after test execution for debugging failures
23keep_temp_files = False
24
25class SStateBase(OESelftestTestCase):
26
27 def setUpLocal(self):
28 super(SStateBase, self).setUpLocal()
29 self.temp_sstate_location = None
30 needed_vars = ['SSTATE_DIR', 'TCLIBC', 'TUNE_ARCH',
31 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
32 bb_vars = get_bb_vars(needed_vars)
33 self.sstate_path = bb_vars['SSTATE_DIR']
34 self.tclibc = bb_vars['TCLIBC']
35 self.tune_arch = bb_vars['TUNE_ARCH']
36 self.topdir = bb_vars['TOPDIR']
37 self.target_vendor = bb_vars['TARGET_VENDOR']
38 self.target_os = bb_vars['TARGET_OS']
39
40 def track_for_cleanup(self, path):
41 if not keep_temp_files:
42 super().track_for_cleanup(path)
43
44 # Creates a special sstate configuration with the option to add sstate mirrors
45 def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
46 self.temp_sstate_location = temp_sstate_location
47
48 if self.temp_sstate_location:
49 temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
50 config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
51 self.append_config(config_temp_sstate)
52 self.track_for_cleanup(temp_sstate_path)
53 self.sstate_path = get_bb_var('SSTATE_DIR')
54
55 if add_local_mirrors:
56 config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
57 self.append_config(config_set_sstate_if_not_set)
58 for local_mirror in add_local_mirrors:
59 self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
60 config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
61 self.append_config(config_sstate_mirror)
62
63 def set_hostdistro(self):
64 # This needs to be read after a BuildStarted event in case it gets changed by event
65 # handling in uninative.bbclass
66 self.hostdistro = get_bb_var('NATIVELSBSTRING')
67 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
68
69 # Returns a list containing sstate files
70 def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
71 self.set_hostdistro()
72
73 result = []
74 for root, dirs, files in os.walk(self.sstate_path):
75 if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
76 for f in files:
77 if re.search(filename_regex, f):
78 result.append(f)
79 if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
80 for f in files:
81 if re.search(filename_regex, f):
82 result.append(f)
83 return result
84
85 # Test sstate files creation and their location and directory perms
86 def run_test_sstate_creation(self, targets, hostdistro_specific):
87 self.config_sstate(True, [self.sstate_path])
88
89 bitbake(['-cclean'] + targets)
90
91 # Set it to a umask we know will be 'wrong'
92 with bb.utils.umask(0o022):
93 bitbake(targets)
94
95 # Distro specific files
96 distro_specific_files = self.search_sstate('|'.join(map(str, targets)), True, False)
97
98 # Distro non-specific
99 distro_non_specific_files = []
100 results = self.search_sstate('|'.join(map(str, targets)), False, True)
101 for r in results:
102 if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")):
103 continue
104 distro_non_specific_files.append(r)
105
106 if hostdistro_specific:
107 self.assertTrue(distro_specific_files , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
108 self.assertFalse(distro_non_specific_files, msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(distro_non_specific_files)))
109 else:
110 self.assertTrue(distro_non_specific_files , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
111 self.assertFalse(distro_specific_files, msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(distro_specific_files)))
112
113 # Now we'll walk the tree to check the mode and see if things are incorrect.
114 badperms = []
115 for root, dirs, files in os.walk(self.sstate_path):
116 for directory in dirs:
117 mode = os.stat(os.path.join(root, directory)).st_mode & 0o777
118 if mode != 0o775:
119 badperms.append("%s: %s vs %s" % (os.path.join(root, directory), mode, 0o775))
120
121 # Check badperms is empty
122 self.assertFalse(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms)))
123
124 # Test the sstate files deletion part of the do_cleansstate task
125 def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
126 self.config_sstate(temp_sstate_location, [self.sstate_path])
127
128 bitbake(['-ccleansstate'] + targets)
129
130 bitbake(targets)
131 archives_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
132 self.assertTrue(archives_created, msg="Could not find sstate .tar.zst files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_created)))
133
134 siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
135 self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
136
137 bitbake(['-ccleansstate'] + targets)
138 archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
139 self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed)))
140
141 # Test rebuilding of distro-specific sstate files
142 def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
143 self.config_sstate(temp_sstate_location, [self.sstate_path])
144
145 bitbake(['-ccleansstate'] + targets)
146
147 self.set_hostdistro()
148
149 bitbake(targets)
150 results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True)
151 filtered_results = []
152 for r in results:
153 if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo")):
154 continue
155 filtered_results.append(r)
156 self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
157 file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
158 self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
159
160 self.track_for_cleanup(self.distro_specific_sstate + "_old")
161 shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
162 shutil.rmtree(self.distro_specific_sstate)
163
164 bitbake(['-cclean'] + targets)
165 bitbake(targets)
166 file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
167 self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
168
169 not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
170 self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated)))
171
172 created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
173 self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
174
175 def sstate_common_samesigs(self, configA, configB, allarch=False):
176
177 self.write_config(configA)
178 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
179 bitbake("world meta-toolchain -S none")
180 self.write_config(configB)
181 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
182 bitbake("world meta-toolchain -S none")
183
184 def get_files(d, result):
185 for root, dirs, files in os.walk(d):
186 for name in files:
187 if "meta-environment" in root or "cross-canadian" in root:
188 continue
189 if "do_build" not in name:
190 # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
191 (_, task, _, shash) = name.rsplit(".", 3)
192 result[os.path.join(os.path.basename(root), task)] = shash
193
194 files1 = {}
195 files2 = {}
196 subdirs = sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux"))
197 if allarch:
198 subdirs.extend(sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")))
199
200 for subdir in subdirs:
201 nativesdkdir = os.path.basename(subdir)
202 get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir, files1)
203 get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir, files2)
204
205 self.maxDiff = None
206 self.assertEqual(files1, files2)
207
17class SStateTests(SStateBase): 208class SStateTests(SStateBase):
18 def test_autorev_sstate_works(self): 209 def test_autorev_sstate_works(self):
19 # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV} 210 # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
20 # when PV does not contain SRCPV
21 211
22 tempdir = tempfile.mkdtemp(prefix='sstate_autorev') 212 tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
23 tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir') 213 tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
@@ -39,7 +229,7 @@ class SStateTests(SStateBase):
39 229
40 recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb') 230 recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
41 os.makedirs(os.path.dirname(recipefile)) 231 os.makedirs(os.path.dirname(recipefile))
42 srcuri = 'git://' + srcdir + ';protocol=file' 232 srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
43 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri]) 233 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
44 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) 234 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
45 235
@@ -53,61 +243,14 @@ class SStateTests(SStateBase):
53 result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir) 243 result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
54 bitbake("dbus-wait-test -c unpack") 244 bitbake("dbus-wait-test -c unpack")
55 245
246class SStateCreation(SStateBase):
247 def test_sstate_creation_distro_specific(self):
248 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], hostdistro_specific=True)
56 249
57 # Test sstate files creation and their location 250 def test_sstate_creation_distro_nonspecific(self):
58 def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True): 251 self.run_test_sstate_creation(['linux-libc-headers'], hostdistro_specific=False)
59 self.config_sstate(temp_sstate_location, [self.sstate_path])
60
61 if self.temp_sstate_location:
62 bitbake(['-cclean'] + targets)
63 else:
64 bitbake(['-ccleansstate'] + targets)
65
66 bitbake(targets)
67 file_tracker = []
68 results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
69 if distro_nonspecific:
70 for r in results:
71 if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo", "_fetch.tgz.siginfo", "_unpack.tgz.siginfo", "_patch.tgz.siginfo")):
72 continue
73 file_tracker.append(r)
74 else:
75 file_tracker = results
76
77 if should_pass:
78 self.assertTrue(file_tracker , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
79 else:
80 self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker)))
81
82 def test_sstate_creation_distro_specific_pass(self):
83 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
84
85 def test_sstate_creation_distro_specific_fail(self):
86 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
87
88 def test_sstate_creation_distro_nonspecific_pass(self):
89 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
90
91 def test_sstate_creation_distro_nonspecific_fail(self):
92 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
93
94 # Test the sstate files deletion part of the do_cleansstate task
95 def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
96 self.config_sstate(temp_sstate_location, [self.sstate_path])
97
98 bitbake(['-ccleansstate'] + targets)
99
100 bitbake(targets)
101 tgz_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
102 self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_created)))
103
104 siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
105 self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
106
107 bitbake(['-ccleansstate'] + targets)
108 tgz_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
109 self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_removed)))
110 252
253class SStateCleanup(SStateBase):
111 def test_cleansstate_task_distro_specific_nonspecific(self): 254 def test_cleansstate_task_distro_specific_nonspecific(self):
112 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] 255 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
113 targets.append('linux-libc-headers') 256 targets.append('linux-libc-headers')
@@ -121,39 +264,7 @@ class SStateTests(SStateBase):
121 targets.append('linux-libc-headers') 264 targets.append('linux-libc-headers')
122 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) 265 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
123 266
124 267class SStateDistroTests(SStateBase):
125 # Test rebuilding of distro-specific sstate files
126 def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
127 self.config_sstate(temp_sstate_location, [self.sstate_path])
128
129 bitbake(['-ccleansstate'] + targets)
130
131 bitbake(targets)
132 results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True)
133 filtered_results = []
134 for r in results:
135 if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo")):
136 continue
137 filtered_results.append(r)
138 self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
139 file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
140 self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
141
142 self.track_for_cleanup(self.distro_specific_sstate + "_old")
143 shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
144 shutil.rmtree(self.distro_specific_sstate)
145
146 bitbake(['-cclean'] + targets)
147 bitbake(targets)
148 file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
149 self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
150
151 not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
152 self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated)))
153
154 created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
155 self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once)))
156
157 def test_rebuild_distro_specific_sstate_cross_native_targets(self): 268 def test_rebuild_distro_specific_sstate_cross_native_targets(self):
158 self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) 269 self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
159 270
@@ -163,48 +274,48 @@ class SStateTests(SStateBase):
163 def test_rebuild_distro_specific_sstate_native_target(self): 274 def test_rebuild_distro_specific_sstate_native_target(self):
164 self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True) 275 self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
165 276
166 277class SStateCacheManagement(SStateBase):
167 # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list 278 # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
168 # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE) 279 # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.py (such as changing the value of MACHINE)
169 def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]): 280 def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
170 self.assertTrue(global_config) 281 self.assertTrue(global_config)
171 self.assertTrue(target_config) 282 self.assertTrue(target_config)
172 self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements') 283 self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
173 self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
174 284
175 # If buildhistory is enabled, we need to disable version-going-backwards 285 for idx in range(len(target_config)):
176 # QA checks for this test. It may report errors otherwise. 286 self.append_config(global_config[idx])
177 self.append_config('ERROR_QA_remove = "version-going-backwards"') 287 self.append_recipeinc(target, target_config[idx])
288 bitbake(target)
289 self.remove_config(global_config[idx])
290 self.remove_recipeinc(target, target_config[idx])
178 291
179 # For not this only checks if random sstate tasks are handled correctly as a group. 292 self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
293
294 # For now this only checks if random sstate tasks are handled correctly as a group.
180 # In the future we should add control over what tasks we check for. 295 # In the future we should add control over what tasks we check for.
181 296
182 sstate_archs_list = []
183 expected_remaining_sstate = [] 297 expected_remaining_sstate = []
184 for idx in range(len(target_config)): 298 for idx in range(len(target_config)):
185 self.append_config(global_config[idx]) 299 self.append_config(global_config[idx])
186 self.append_recipeinc(target, target_config[idx]) 300 self.append_recipeinc(target, target_config[idx])
187 sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
188 if not sstate_arch in sstate_archs_list:
189 sstate_archs_list.append(sstate_arch)
190 if target_config[idx] == target_config[-1]: 301 if target_config[idx] == target_config[-1]:
191 target_sstate_before_build = self.search_sstate(target + r'.*?\.tgz$') 302 target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$')
192 bitbake("-cclean %s" % target) 303 bitbake("-cclean %s" % target)
193 result = bitbake(target, ignore_status=True) 304 result = bitbake(target, ignore_status=True)
194 if target_config[idx] == target_config[-1]: 305 if target_config[idx] == target_config[-1]:
195 target_sstate_after_build = self.search_sstate(target + r'.*?\.tgz$') 306 target_sstate_after_build = self.search_sstate(target + r'.*?\.tar.zst$')
196 expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)] 307 expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)]
197 self.remove_config(global_config[idx]) 308 self.remove_config(global_config[idx])
198 self.remove_recipeinc(target, target_config[idx]) 309 self.remove_recipeinc(target, target_config[idx])
199 self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output)) 310 self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
200 311
201 runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list)))) 312 runCmd("sstate-cache-management.py -y --cache-dir=%s --remove-duplicated" % (self.sstate_path))
202 actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)] 313 actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)]
203 314
204 actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate] 315 actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
205 self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected))) 316 self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected)))
206 expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate] 317 expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
207 self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual))) 318 self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual)))
208 319
209 def test_sstate_cache_management_script_using_pr_1(self): 320 def test_sstate_cache_management_script_using_pr_1(self):
210 global_config = [] 321 global_config = []
@@ -242,18 +353,12 @@ class SStateTests(SStateBase):
242 target_config.append('') 353 target_config.append('')
243 self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic']) 354 self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
244 355
245 def test_sstate_32_64_same_hash(self): 356class SStateHashSameSigs(SStateBase):
246 """ 357 def sstate_hashtest(self, sdkmachine):
247 The sstate checksums for both native and target should not vary whether
248 they're built on a 32 or 64 bit system. Rather than requiring two different
249 build machines and running a builds, override the variables calling uname()
250 manually and check using bitbake -S.
251 """
252 358
253 self.write_config(""" 359 self.write_config("""
254MACHINE = "qemux86" 360MACHINE = "qemux86"
255TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 361TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
256TCLIBCAPPEND = ""
257BUILD_ARCH = "x86_64" 362BUILD_ARCH = "x86_64"
258BUILD_OS = "linux" 363BUILD_OS = "linux"
259SDKMACHINE = "x86_64" 364SDKMACHINE = "x86_64"
@@ -261,24 +366,23 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
261BB_SIGNATURE_HANDLER = "OEBasicHash" 366BB_SIGNATURE_HANDLER = "OEBasicHash"
262""") 367""")
263 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 368 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
264 bitbake("core-image-sato -S none") 369 bitbake("core-image-weston -S none")
265 self.write_config(""" 370 self.write_config("""
266MACHINE = "qemux86" 371MACHINE = "qemux86"
267TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 372TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
268TCLIBCAPPEND = ""
269BUILD_ARCH = "i686" 373BUILD_ARCH = "i686"
270BUILD_OS = "linux" 374BUILD_OS = "linux"
271SDKMACHINE = "i686" 375SDKMACHINE = "%s"
272PACKAGE_CLASSES = "package_rpm package_ipk package_deb" 376PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
273BB_SIGNATURE_HANDLER = "OEBasicHash" 377BB_SIGNATURE_HANDLER = "OEBasicHash"
274""") 378""" % sdkmachine)
275 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 379 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
276 bitbake("core-image-sato -S none") 380 bitbake("core-image-weston -S none")
277 381
278 def get_files(d): 382 def get_files(d):
279 f = [] 383 f = []
280 for root, dirs, files in os.walk(d): 384 for root, dirs, files in os.walk(d):
281 if "core-image-sato" in root: 385 if "core-image-weston" in root:
282 # SDKMACHINE changing will change 386 # SDKMACHINE changing will change
283 # do_rootfs/do_testimage/do_build stamps of images which 387 # do_rootfs/do_testimage/do_build stamps of images which
284 # is safe to ignore. 388 # is safe to ignore.
@@ -291,6 +395,20 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
291 self.maxDiff = None 395 self.maxDiff = None
292 self.assertCountEqual(files1, files2) 396 self.assertCountEqual(files1, files2)
293 397
398 def test_sstate_32_64_same_hash(self):
399 """
400 The sstate checksums for both native and target should not vary whether
401 they're built on a 32 or 64 bit system. Rather than requiring two different
402 build machines and running a builds, override the variables calling uname()
403 manually and check using bitbake -S.
404 """
405 self.sstate_hashtest("i686")
406
407 def test_sstate_sdk_arch_same_hash(self):
408 """
409 Similarly, test an arm SDK has the same hashes
410 """
411 self.sstate_hashtest("aarch64")
294 412
295 def test_sstate_nativelsbstring_same_hash(self): 413 def test_sstate_nativelsbstring_same_hash(self):
296 """ 414 """
@@ -301,20 +419,18 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
301 419
302 self.write_config(""" 420 self.write_config("""
303TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 421TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
304TCLIBCAPPEND = \"\"
305NATIVELSBSTRING = \"DistroA\" 422NATIVELSBSTRING = \"DistroA\"
306BB_SIGNATURE_HANDLER = "OEBasicHash" 423BB_SIGNATURE_HANDLER = "OEBasicHash"
307""") 424""")
308 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 425 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
309 bitbake("core-image-sato -S none") 426 bitbake("core-image-weston -S none")
310 self.write_config(""" 427 self.write_config("""
311TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 428TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
312TCLIBCAPPEND = \"\"
313NATIVELSBSTRING = \"DistroB\" 429NATIVELSBSTRING = \"DistroB\"
314BB_SIGNATURE_HANDLER = "OEBasicHash" 430BB_SIGNATURE_HANDLER = "OEBasicHash"
315""") 431""")
316 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 432 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
317 bitbake("core-image-sato -S none") 433 bitbake("core-image-weston -S none")
318 434
319 def get_files(d): 435 def get_files(d):
320 f = [] 436 f = []
@@ -327,6 +443,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
327 self.maxDiff = None 443 self.maxDiff = None
328 self.assertCountEqual(files1, files2) 444 self.assertCountEqual(files1, files2)
329 445
446class SStateHashSameSigs2(SStateBase):
330 def test_sstate_allarch_samesigs(self): 447 def test_sstate_allarch_samesigs(self):
331 """ 448 """
332 The sstate checksums of allarch packages should be independent of whichever 449 The sstate checksums of allarch packages should be independent of whichever
@@ -337,17 +454,19 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
337 454
338 configA = """ 455 configA = """
339TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 456TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
340TCLIBCAPPEND = \"\"
341MACHINE = \"qemux86-64\" 457MACHINE = \"qemux86-64\"
342BB_SIGNATURE_HANDLER = "OEBasicHash" 458BB_SIGNATURE_HANDLER = "OEBasicHash"
343""" 459"""
460 #OLDEST_KERNEL is arch specific so set to a different value here for testing
344 configB = """ 461 configB = """
345TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 462TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
346TCLIBCAPPEND = \"\"
347MACHINE = \"qemuarm\" 463MACHINE = \"qemuarm\"
464OLDEST_KERNEL = \"3.3.0\"
348BB_SIGNATURE_HANDLER = "OEBasicHash" 465BB_SIGNATURE_HANDLER = "OEBasicHash"
466ERROR_QA:append = " somenewoption"
467WARN_QA:append = " someotheroption"
349""" 468"""
350 self.sstate_allarch_samesigs(configA, configB) 469 self.sstate_common_samesigs(configA, configB, allarch=True)
351 470
352 def test_sstate_nativesdk_samesigs_multilib(self): 471 def test_sstate_nativesdk_samesigs_multilib(self):
353 """ 472 """
@@ -356,51 +475,22 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
356 475
357 configA = """ 476 configA = """
358TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 477TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
359TCLIBCAPPEND = \"\"
360MACHINE = \"qemux86-64\" 478MACHINE = \"qemux86-64\"
361require conf/multilib.conf 479require conf/multilib.conf
362MULTILIBS = \"multilib:lib32\" 480MULTILIBS = \"multilib:lib32\"
363DEFAULTTUNE_virtclass-multilib-lib32 = \"x86\" 481DEFAULTTUNE:virtclass-multilib-lib32 = \"x86\"
364BB_SIGNATURE_HANDLER = "OEBasicHash" 482BB_SIGNATURE_HANDLER = "OEBasicHash"
365""" 483"""
366 configB = """ 484 configB = """
367TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 485TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
368TCLIBCAPPEND = \"\"
369MACHINE = \"qemuarm\" 486MACHINE = \"qemuarm\"
370require conf/multilib.conf 487require conf/multilib.conf
371MULTILIBS = \"\" 488MULTILIBS = \"\"
372BB_SIGNATURE_HANDLER = "OEBasicHash" 489BB_SIGNATURE_HANDLER = "OEBasicHash"
373""" 490"""
374 self.sstate_allarch_samesigs(configA, configB) 491 self.sstate_common_samesigs(configA, configB)
375
376 def sstate_allarch_samesigs(self, configA, configB):
377
378 self.write_config(configA)
379 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
380 bitbake("world meta-toolchain -S none")
381 self.write_config(configB)
382 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
383 bitbake("world meta-toolchain -S none")
384
385 def get_files(d):
386 f = {}
387 for root, dirs, files in os.walk(d):
388 for name in files:
389 if "meta-environment" in root or "cross-canadian" in root:
390 continue
391 if "do_build" not in name:
392 # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
393 (_, task, _, shash) = name.rsplit(".", 3)
394 f[os.path.join(os.path.basename(root), task)] = shash
395 return f
396
397 nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0])
398
399 files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir)
400 files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir)
401 self.maxDiff = None
402 self.assertEqual(files1, files2)
403 492
493class SStateHashSameSigs3(SStateBase):
404 def test_sstate_sametune_samesigs(self): 494 def test_sstate_sametune_samesigs(self):
405 """ 495 """
406 The sstate checksums of two identical machines (using the same tune) should be the 496 The sstate checksums of two identical machines (using the same tune) should be the
@@ -410,22 +500,20 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
410 500
411 self.write_config(""" 501 self.write_config("""
412TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 502TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
413TCLIBCAPPEND = \"\"
414MACHINE = \"qemux86\" 503MACHINE = \"qemux86\"
415require conf/multilib.conf 504require conf/multilib.conf
416MULTILIBS = "multilib:lib32" 505MULTILIBS = "multilib:lib32"
417DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 506DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
418BB_SIGNATURE_HANDLER = "OEBasicHash" 507BB_SIGNATURE_HANDLER = "OEBasicHash"
419""") 508""")
420 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 509 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
421 bitbake("world meta-toolchain -S none") 510 bitbake("world meta-toolchain -S none")
422 self.write_config(""" 511 self.write_config("""
423TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 512TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
424TCLIBCAPPEND = \"\"
425MACHINE = \"qemux86copy\" 513MACHINE = \"qemux86copy\"
426require conf/multilib.conf 514require conf/multilib.conf
427MULTILIBS = "multilib:lib32" 515MULTILIBS = "multilib:lib32"
428DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 516DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
429BB_SIGNATURE_HANDLER = "OEBasicHash" 517BB_SIGNATURE_HANDLER = "OEBasicHash"
430""") 518""")
431 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 519 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
@@ -435,7 +523,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
435 f = [] 523 f = []
436 for root, dirs, files in os.walk(d): 524 for root, dirs, files in os.walk(d):
437 for name in files: 525 for name in files:
438 if "meta-environment" in root or "cross-canadian" in root: 526 if "meta-environment" in root or "cross-canadian" in root or 'meta-ide-support' in root:
439 continue 527 continue
440 if "qemux86copy-" in root or "qemux86-" in root: 528 if "qemux86copy-" in root or "qemux86-" in root:
441 continue 529 continue
@@ -458,18 +546,16 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
458 546
459 self.write_config(""" 547 self.write_config("""
460TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 548TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
461TCLIBCAPPEND = \"\"
462MACHINE = \"qemux86\" 549MACHINE = \"qemux86\"
463require conf/multilib.conf 550require conf/multilib.conf
464MULTILIBS = "multilib:lib32" 551MULTILIBS = "multilib:lib32"
465DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 552DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
466BB_SIGNATURE_HANDLER = "OEBasicHash" 553BB_SIGNATURE_HANDLER = "OEBasicHash"
467""") 554""")
468 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 555 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
469 bitbake("binutils-native -S none") 556 bitbake("binutils-native -S none")
470 self.write_config(""" 557 self.write_config("""
471TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 558TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
472TCLIBCAPPEND = \"\"
473MACHINE = \"qemux86copy\" 559MACHINE = \"qemux86copy\"
474BB_SIGNATURE_HANDLER = "OEBasicHash" 560BB_SIGNATURE_HANDLER = "OEBasicHash"
475""") 561""")
@@ -488,7 +574,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
488 self.maxDiff = None 574 self.maxDiff = None
489 self.assertCountEqual(files1, files2) 575 self.assertCountEqual(files1, files2)
490 576
491 577class SStateHashSameSigs4(SStateBase):
492 def test_sstate_noop_samesigs(self): 578 def test_sstate_noop_samesigs(self):
493 """ 579 """
494 The sstate checksums of two builds with these variables changed or 580 The sstate checksums of two builds with these variables changed or
@@ -497,13 +583,12 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
497 583
498 self.write_config(""" 584 self.write_config("""
499TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 585TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
500TCLIBCAPPEND = ""
501BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}" 586BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}"
502PARALLEL_MAKE = "-j 1" 587PARALLEL_MAKE = "-j 1"
503DL_DIR = "${TOPDIR}/download1" 588DL_DIR = "${TOPDIR}/download1"
504TIME = "111111" 589TIME = "111111"
505DATE = "20161111" 590DATE = "20161111"
506INHERIT_remove = "buildstats-summary buildhistory uninative" 591INHERIT:remove = "buildstats-summary buildhistory uninative"
507http_proxy = "" 592http_proxy = ""
508BB_SIGNATURE_HANDLER = "OEBasicHash" 593BB_SIGNATURE_HANDLER = "OEBasicHash"
509""") 594""")
@@ -512,14 +597,13 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
512 bitbake("world meta-toolchain -S none") 597 bitbake("world meta-toolchain -S none")
513 self.write_config(""" 598 self.write_config("""
514TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 599TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
515TCLIBCAPPEND = ""
516BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}" 600BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}"
517PARALLEL_MAKE = "-j 2" 601PARALLEL_MAKE = "-j 2"
518DL_DIR = "${TOPDIR}/download2" 602DL_DIR = "${TOPDIR}/download2"
519TIME = "222222" 603TIME = "222222"
520DATE = "20161212" 604DATE = "20161212"
521# Always remove uninative as we're changing proxies 605# Always remove uninative as we're changing proxies
522INHERIT_remove = "uninative" 606INHERIT:remove = "uninative"
523INHERIT += "buildstats-summary buildhistory" 607INHERIT += "buildstats-summary buildhistory"
524http_proxy = "http://example.com/" 608http_proxy = "http://example.com/"
525BB_SIGNATURE_HANDLER = "OEBasicHash" 609BB_SIGNATURE_HANDLER = "OEBasicHash"
@@ -573,3 +657,334 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
573 compare_sigfiles(rest, files1, files2, compare=False) 657 compare_sigfiles(rest, files1, files2, compare=False)
574 658
575 self.fail("sstate hashes not identical.") 659 self.fail("sstate hashes not identical.")
660
661 def test_sstate_movelayer_samesigs(self):
662 """
663 The sstate checksums of two builds with the same oe-core layer in two
664 different locations should be the same.
665 """
666 core_layer = os.path.join(
667 self.tc.td["COREBASE"], 'meta')
668 copy_layer_1 = self.topdir + "/meta-copy1/meta"
669 copy_layer_2 = self.topdir + "/meta-copy2/meta"
670
671 oe.path.copytree(core_layer, copy_layer_1)
672 os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy1/scripts")
673 self.write_config("""
674TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
675""")
676 bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_1, core_layer)
677 self.write_bblayers_config(bblayers_conf)
678 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
679 bitbake("bash -S none")
680
681 oe.path.copytree(core_layer, copy_layer_2)
682 os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy2/scripts")
683 self.write_config("""
684TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
685""")
686 bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_2, core_layer)
687 self.write_bblayers_config(bblayers_conf)
688 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
689 bitbake("bash -S none")
690
691 def get_files(d):
692 f = []
693 for root, dirs, files in os.walk(d):
694 for name in files:
695 f.append(os.path.join(root, name))
696 return f
697 files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps")
698 files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps")
699 files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
700 self.maxDiff = None
701 self.assertCountEqual(files1, files2)
702
703class SStateFindSiginfo(SStateBase):
704 def test_sstate_compare_sigfiles_and_find_siginfo(self):
705 """
706 Test the functionality of the find_siginfo: basic function and callback in compare_sigfiles
707 """
708 self.write_config("""
709TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
710MACHINE = \"qemux86-64\"
711require conf/multilib.conf
712MULTILIBS = "multilib:lib32"
713DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
714BB_SIGNATURE_HANDLER = "OEBasicHash"
715""")
716 self.track_for_cleanup(self.topdir + "/tmp-sstates-findsiginfo")
717
718 pns = ["binutils", "binutils-native", "lib32-binutils"]
719 target_configs = [
720"""
721TMPVAL1 = "tmpval1"
722TMPVAL2 = "tmpval2"
723do_tmptask1() {
724 echo ${TMPVAL1}
725}
726do_tmptask2() {
727 echo ${TMPVAL2}
728}
729addtask do_tmptask1
730addtask tmptask2 before do_tmptask1
731""",
732"""
733TMPVAL3 = "tmpval3"
734TMPVAL4 = "tmpval4"
735do_tmptask1() {
736 echo ${TMPVAL3}
737}
738do_tmptask2() {
739 echo ${TMPVAL4}
740}
741addtask do_tmptask1
742addtask tmptask2 before do_tmptask1
743"""
744 ]
745
746 for target_config in target_configs:
747 self.write_recipeinc("binutils", target_config)
748 for pn in pns:
749 bitbake("%s -c do_tmptask1 -S none" % pn)
750 self.delete_recipeinc("binutils")
751
752 with bb.tinfoil.Tinfoil() as tinfoil:
753 tinfoil.prepare(config_only=True)
754
755 def find_siginfo(pn, taskname, sigs=None):
756 result = None
757 command_complete = False
758 tinfoil.set_event_mask(["bb.event.FindSigInfoResult",
759 "bb.command.CommandCompleted"])
760 ret = tinfoil.run_command("findSigInfo", pn, taskname, sigs)
761 if ret:
762 while result is None or not command_complete:
763 event = tinfoil.wait_event(1)
764 if event:
765 if isinstance(event, bb.command.CommandCompleted):
766 command_complete = True
767 elif isinstance(event, bb.event.FindSigInfoResult):
768 result = event.result
769 return result
770
771 def recursecb(key, hash1, hash2):
772 nonlocal recursecb_count
773 recursecb_count += 1
774 hashes = [hash1, hash2]
775 hashfiles = find_siginfo(key, None, hashes)
776 self.assertCountEqual(hashes, hashfiles)
777 bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
778
779 for pn in pns:
780 recursecb_count = 0
781 matches = find_siginfo(pn, "do_tmptask1")
782 self.assertGreaterEqual(len(matches), 2)
783 latesthashes = sorted(matches.keys(), key=lambda h: matches[h]['time'])[-2:]
784 bb.siggen.compare_sigfiles(matches[latesthashes[-2]]['path'], matches[latesthashes[-1]]['path'], recursecb)
785 self.assertEqual(recursecb_count,1)
786
787class SStatePrintdiff(SStateBase):
788 def run_test_printdiff_changerecipe(self, target, change_recipe, change_bbtask, change_content, expected_sametmp_output, expected_difftmp_output):
789 import time
790 self.write_config("""
791TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
792""".format(time.time()))
793 # Use runall do_build to ensure any indirect sstate is created, e.g. tzcode-native on both x86 and
794 # aarch64 hosts since only allarch target recipes depend upon it and it may not be built otherwise.
795 # A bitbake -c cleansstate tzcode-native would cause some of these tests to error for example.
796 bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
797 bitbake("-S none {}".format(target))
798 bitbake(change_bbtask)
799 self.write_recipeinc(change_recipe, change_content)
800 result_sametmp = bitbake("-S printdiff {}".format(target))
801
802 self.write_config("""
803TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
804""".format(time.time()))
805 result_difftmp = bitbake("-S printdiff {}".format(target))
806
807 self.delete_recipeinc(change_recipe)
808 for item in expected_sametmp_output:
809 self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
810 for item in expected_difftmp_output:
811 self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
812
813 def run_test_printdiff_changeconfig(self, target, change_bbtasks, change_content, expected_sametmp_output, expected_difftmp_output):
814 import time
815 self.write_config("""
816TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
817""".format(time.time()))
818 bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
819 bitbake("-S none {}".format(target))
820 bitbake(" ".join(change_bbtasks))
821 self.append_config(change_content)
822 result_sametmp = bitbake("-S printdiff {}".format(target))
823
824 self.write_config("""
825TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
826""".format(time.time()))
827 self.append_config(change_content)
828 result_difftmp = bitbake("-S printdiff {}".format(target))
829
830 for item in expected_sametmp_output:
831 self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
832 for item in expected_difftmp_output:
833 self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
834
835
836 # Check if printdiff walks the full dependency chain from the image target to where the change is in a specific recipe
837 def test_image_minimal_vs_perlcross(self):
838 expected_output = ("Task perlcross-native:do_install couldn't be used from the cache because:",
839"We need hash",
840"most recent matching task was")
841 expected_sametmp_output = expected_output + (
842"Variable do_install value changed",
843'+ echo "this changes the task signature"')
844 expected_difftmp_output = expected_output
845
846 self.run_test_printdiff_changerecipe("core-image-minimal", "perlcross", "-c do_install perlcross-native",
847"""
848do_install:append() {
849 echo "this changes the task signature"
850}
851""",
852expected_sametmp_output, expected_difftmp_output)
853
854 # Check if changes to gcc-source (which uses tmp/work-shared) are correctly discovered
855 def test_gcc_runtime_vs_gcc_source(self):
856 gcc_source_pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
857
858 expected_output = ("Task {}:do_preconfigure couldn't be used from the cache because:".format(gcc_source_pn),
859"We need hash",
860"most recent matching task was")
861 expected_sametmp_output = expected_output + (
862"Variable do_preconfigure value changed",
863'+ print("this changes the task signature")')
864 expected_difftmp_output = expected_output
865
866 self.run_test_printdiff_changerecipe("gcc-runtime", "gcc-source", "-c do_preconfigure {}".format(gcc_source_pn),
867"""
868python do_preconfigure:append() {
869 print("this changes the task signature")
870}
871""",
872expected_sametmp_output, expected_difftmp_output)
873
874 # Check if changing a really base task definiton is reported against multiple core recipes using it
875 def test_image_minimal_vs_base_do_configure(self):
876 change_bbtasks = ('zstd-native:do_configure',
877'texinfo-dummy-native:do_configure',
878'ldconfig-native:do_configure',
879'gettext-minimal-native:do_configure',
880'tzcode-native:do_configure',
881'makedevs-native:do_configure',
882'pigz-native:do_configure',
883'update-rc.d-native:do_configure',
884'unzip-native:do_configure',
885'gnu-config-native:do_configure')
886
887 expected_output = ["Task {} couldn't be used from the cache because:".format(t) for t in change_bbtasks] + [
888"We need hash",
889"most recent matching task was"]
890
891 expected_sametmp_output = expected_output + [
892"Variable base_do_configure value changed",
893'+ echo "this changes base_do_configure() definiton "']
894 expected_difftmp_output = expected_output
895
896 self.run_test_printdiff_changeconfig("core-image-minimal",change_bbtasks,
897"""
898INHERIT += "base-do-configure-modified"
899""",
900expected_sametmp_output, expected_difftmp_output)
901
902class SStateCheckObjectPresence(SStateBase):
903 def check_bb_output(self, output, targets, exceptions, check_cdn):
904 def is_exception(object, exceptions):
905 for e in exceptions:
906 if re.search(e, object):
907 return True
908 return False
909
910 # sstate is checked for existence of these, but they never get written out to begin with
911 exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
912 exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
913 exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
914 exceptions += ["linux-yocto.*shared_workdir"]
915 # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
916 # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
917 # which makes tracing other changes difficult
918 exceptions += ["{}.*create_.*spdx".format(t) for t in targets.split()]
919
920 output_l = output.splitlines()
921 for l in output_l:
922 if l.startswith("Sstate summary"):
923 for idx, item in enumerate(l.split()):
924 if item == 'Missed':
925 missing_objects = int(l.split()[idx+1])
926 break
927 else:
928 self.fail("Did not find missing objects amount in sstate summary: {}".format(l))
929 break
930 else:
931 self.fail("Did not find 'Sstate summary' line in bitbake output")
932
933 failed_urls = []
934 failed_urls_extrainfo = []
935 for l in output_l:
936 if "SState: Unsuccessful fetch test for" in l and check_cdn:
937 missing_object = l.split()[6]
938 elif "SState: Looked for but didn't find file" in l and not check_cdn:
939 missing_object = l.split()[8]
940 else:
941 missing_object = None
942 if missing_object:
943 if not is_exception(missing_object, exceptions):
944 failed_urls.append(missing_object)
945 else:
946 missing_objects -= 1
947
948 if "urlopen failed for" in l and not is_exception(l, exceptions):
949 failed_urls_extrainfo.append(l)
950
951 self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
952 self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
953
954@OETestTag("yocto-mirrors")
955class SStateMirrors(SStateCheckObjectPresence):
956 def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
957 if check_cdn:
958 self.config_sstate(True)
959 self.append_config("""
960MACHINE = "{}"
961BB_HASHSERVE_UPSTREAM = "hashserv.yoctoproject.org:8686"
962SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
963""".format(machine))
964 else:
965 self.append_config("""
966MACHINE = "{}"
967""".format(machine))
968 result = bitbake("-DD -n {}".format(targets))
969 bitbake("-S none {}".format(targets))
970 if ignore_errors:
971 return
972 self.check_bb_output(result.output, targets, exceptions, check_cdn)
973
974 def test_cdn_mirror_qemux86_64(self):
975 exceptions = []
976 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
977 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
978
979 def test_cdn_mirror_qemuarm64(self):
980 exceptions = []
981 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
982 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
983
984 def test_local_cache_qemux86_64(self):
985 exceptions = []
986 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
987
988 def test_local_cache_qemuarm64(self):
989 exceptions = []
990 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
diff --git a/meta/lib/oeqa/selftest/cases/sysroot.py b/meta/lib/oeqa/selftest/cases/sysroot.py
index 6e34927c90..ef854f6fee 100644
--- a/meta/lib/oeqa/selftest/cases/sysroot.py
+++ b/meta/lib/oeqa/selftest/cases/sysroot.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import uuid 7import uuid
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake 10from oeqa.utils.commands import bitbake
9 11
10class SysrootTests(OESelftestTestCase): 12class SysrootTests(OESelftestTestCase):
11 def test_sysroot_cleanup(self): 13 def test_sysroot_cleanup(self):
@@ -24,14 +26,61 @@ class SysrootTests(OESelftestTestCase):
24 self.write_config(""" 26 self.write_config("""
25PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1" 27PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1"
26MACHINE = "qemux86" 28MACHINE = "qemux86"
27TESTSTRING_pn-sysroot-test-arch1 = "%s" 29TESTSTRING:pn-sysroot-test-arch1 = "%s"
28TESTSTRING_pn-sysroot-test-arch2 = "%s" 30TESTSTRING:pn-sysroot-test-arch2 = "%s"
29""" % (uuid1, uuid2)) 31""" % (uuid1, uuid2))
30 bitbake("sysroot-test") 32 bitbake("sysroot-test")
31 self.write_config(""" 33 self.write_config("""
32PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2" 34PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2"
33MACHINE = "qemux86copy" 35MACHINE = "qemux86copy"
34TESTSTRING_pn-sysroot-test-arch1 = "%s" 36TESTSTRING:pn-sysroot-test-arch1 = "%s"
35TESTSTRING_pn-sysroot-test-arch2 = "%s" 37TESTSTRING:pn-sysroot-test-arch2 = "%s"
36""" % (uuid1, uuid2)) 38""" % (uuid1, uuid2))
37 bitbake("sysroot-test") 39 bitbake("sysroot-test")
40
41 def test_sysroot_max_shebang(self):
42 """
43 Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed.
44 Expected: Fail when a shebang bigger than the max shebang-size is reached.
45 Author: Paulo Neves <ptsneves@gmail.com>
46 """
47 expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]"
48 res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True)
49 self.assertTrue(expected in res.output, msg=res.output)
50 self.assertTrue(res.status != 0)
51
52 def test_sysroot_la(self):
53 """
54 Summary: Check that workdir paths are not contained in .la files.
55 Expected: Fail when a workdir path is found in the file content.
56 Author: Paulo Neves <ptsneves@gmail.com>
57 """
58 expected = "la-test.la failed sanity test (workdir) in path"
59
60 res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True)
61 self.assertTrue(expected in res.output, msg=res.output)
62 self.assertTrue('[la]' in res.output, msg=res.output)
63 self.assertTrue(res.status != 0)
64
65 res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True)
66 self.assertTrue(expected in res.output, msg=res.output)
67 self.assertTrue('[la]' in res.output, msg=res.output)
68 self.assertTrue(res.status != 0)
69
70 def test_sysroot_pkgconfig(self):
71 """
72 Summary: Check that tmpdir paths are not contained in .pc files.
73 Expected: Fail when a tmpdir path is found in the file content.
74 Author: Paulo Neves <ptsneves@gmail.com>
75 """
76 expected = "test.pc failed sanity test (tmpdir) in path"
77
78 res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True)
79 self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
80 self.assertTrue(expected in res.output, msg=res.output)
81 self.assertTrue(res.status != 0)
82
83 res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True)
84 self.assertTrue(expected in res.output, msg=res.output)
85 self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
86 self.assertTrue(res.status != 0)
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
index a51c6048d3..21c8686b2a 100644
--- a/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -9,7 +11,6 @@ import logging
9import bb.tinfoil 11import bb.tinfoil
10 12
11from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd
13 14
14class TinfoilTests(OESelftestTestCase): 15class TinfoilTests(OESelftestTestCase):
15 """ Basic tests for the tinfoil API """ 16 """ Basic tests for the tinfoil API """
@@ -47,6 +48,17 @@ class TinfoilTests(OESelftestTestCase):
47 rd = tinfoil.parse_recipe_file(best[3]) 48 rd = tinfoil.parse_recipe_file(best[3])
48 self.assertEqual(testrecipe, rd.getVar('PN')) 49 self.assertEqual(testrecipe, rd.getVar('PN'))
49 50
51 def test_parse_virtual_recipe(self):
52 with bb.tinfoil.Tinfoil() as tinfoil:
53 tinfoil.prepare(config_only=False, quiet=2)
54 testrecipe = 'nativesdk-gcc'
55 best = tinfoil.find_best_provider(testrecipe)
56 if not best:
57 self.fail('Unable to find recipe providing %s' % testrecipe)
58 rd = tinfoil.parse_recipe_file(best[3])
59 self.assertEqual(testrecipe, rd.getVar('PN'))
60 self.assertIsNotNone(rd.getVar('FILE_LAYERNAME'))
61
50 def test_parse_recipe_copy_expand(self): 62 def test_parse_recipe_copy_expand(self):
51 with bb.tinfoil.Tinfoil() as tinfoil: 63 with bb.tinfoil.Tinfoil() as tinfoil:
52 tinfoil.prepare(config_only=False, quiet=2) 64 tinfoil.prepare(config_only=False, quiet=2)
@@ -65,6 +77,32 @@ class TinfoilTests(OESelftestTestCase):
65 localdata.setVar('PN', 'hello') 77 localdata.setVar('PN', 'hello')
66 self.assertEqual('hello', localdata.getVar('BPN')) 78 self.assertEqual('hello', localdata.getVar('BPN'))
67 79
80 # The config_data API to parse_recipe_file is used by:
81 # layerindex-web layerindex/update_layer.py
82 def test_parse_recipe_custom_data(self):
83 with bb.tinfoil.Tinfoil() as tinfoil:
84 tinfoil.prepare(config_only=False, quiet=2)
85 localdata = bb.data.createCopy(tinfoil.config_data)
86 localdata.setVar("TESTVAR", "testval")
87 testrecipe = 'mdadm'
88 best = tinfoil.find_best_provider(testrecipe)
89 if not best:
90 self.fail('Unable to find recipe providing %s' % testrecipe)
91 rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
92 self.assertEqual("testval", rd.getVar('TESTVAR'))
93
94 def test_parse_virtual_recipe_custom_data(self):
95 with bb.tinfoil.Tinfoil() as tinfoil:
96 tinfoil.prepare(config_only=False, quiet=2)
97 localdata = bb.data.createCopy(tinfoil.config_data)
98 localdata.setVar("TESTVAR", "testval")
99 testrecipe = 'nativesdk-gcc'
100 best = tinfoil.find_best_provider(testrecipe)
101 if not best:
102 self.fail('Unable to find recipe providing %s' % testrecipe)
103 rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
104 self.assertEqual("testval", rd.getVar('TESTVAR'))
105
68 def test_list_recipes(self): 106 def test_list_recipes(self):
69 with bb.tinfoil.Tinfoil() as tinfoil: 107 with bb.tinfoil.Tinfoil() as tinfoil:
70 tinfoil.prepare(config_only=False, quiet=2) 108 tinfoil.prepare(config_only=False, quiet=2)
@@ -87,21 +125,20 @@ class TinfoilTests(OESelftestTestCase):
87 with bb.tinfoil.Tinfoil() as tinfoil: 125 with bb.tinfoil.Tinfoil() as tinfoil:
88 tinfoil.prepare(config_only=True) 126 tinfoil.prepare(config_only=True)
89 127
90 tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) 128 tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit'])
91 129
92 # Need to drain events otherwise events that were masked may still be in the queue 130 # Need to drain events otherwise events that were masked may still be in the queue
93 while tinfoil.wait_event(): 131 while tinfoil.wait_event():
94 pass 132 pass
95 133
96 pattern = 'conf' 134 pattern = 'conf'
97 res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') 135 res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False)
98 self.assertTrue(res) 136 self.assertTrue(res)
99 137
100 eventreceived = False 138 eventreceived = False
101 commandcomplete = False 139 commandcomplete = False
102 start = time.time() 140 start = time.time()
103 # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example 141 # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example
104 # The test is IO load sensitive too
105 while (not (eventreceived == True and commandcomplete == True) 142 while (not (eventreceived == True and commandcomplete == True)
106 and (time.time() - start < 60)): 143 and (time.time() - start < 60)):
107 # if we received both events (on let's say a good day), we are done 144 # if we received both events (on let's say a good day), we are done
@@ -111,14 +148,15 @@ class TinfoilTests(OESelftestTestCase):
111 commandcomplete = True 148 commandcomplete = True
112 elif isinstance(event, bb.event.FilesMatchingFound): 149 elif isinstance(event, bb.event.FilesMatchingFound):
113 self.assertEqual(pattern, event._pattern) 150 self.assertEqual(pattern, event._pattern)
114 self.assertIn('qemuarm.conf', event._matches) 151 self.assertIn('A', event._matches)
152 self.assertIn('B', event._matches)
115 eventreceived = True 153 eventreceived = True
116 elif isinstance(event, logging.LogRecord): 154 elif isinstance(event, logging.LogRecord):
117 continue 155 continue
118 else: 156 else:
119 self.fail('Unexpected event: %s' % event) 157 self.fail('Unexpected event: %s' % event)
120 158
121 self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server') 159 self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived))
122 self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') 160 self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
123 161
124 def test_setvariable_clean(self): 162 def test_setvariable_clean(self):
@@ -173,8 +211,8 @@ class TinfoilTests(OESelftestTestCase):
173 self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') 211 self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name')
174 # Test overrides 212 # Test overrides
175 tinfoil.config_data.setVar('TESTVAR', 'original') 213 tinfoil.config_data.setVar('TESTVAR', 'original')
176 tinfoil.config_data.setVar('TESTVAR_overrideone', 'one') 214 tinfoil.config_data.setVar('TESTVAR:overrideone', 'one')
177 tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two') 215 tinfoil.config_data.setVar('TESTVAR:overridetwo', 'two')
178 tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') 216 tinfoil.config_data.appendVar('OVERRIDES', ':overrideone')
179 value = tinfoil.config_data.getVar('TESTVAR') 217 value = tinfoil.config_data.getVar('TESTVAR')
180 self.assertEqual(value, 'one', 'Variable overrides not functioning correctly') 218 self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
diff --git a/meta/lib/oeqa/selftest/cases/toolchain.py b/meta/lib/oeqa/selftest/cases/toolchain.py
new file mode 100644
index 0000000000..b4b280d037
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/toolchain.py
@@ -0,0 +1,71 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import subprocess
9import tempfile
10from types import SimpleNamespace
11
12import oe.path
13from oeqa.selftest.case import OESelftestTestCase
14from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
15
16class ToolchainTests(OESelftestTestCase):
17
18 def test_toolchain_switching(self):
19 """
20 Test that a configuration that uses GCC by default but clang for one
21 specific recipe does infact do that.
22 """
23
24 def extract_comment(objcopy, filename):
25 """
26 Using the specified `objcopy`, return the .comment segment from
27 `filename` as a bytes().
28 """
29 with tempfile.NamedTemporaryFile(prefix="comment-") as f:
30 cmd = [objcopy, "--dump-section", ".comment=" + f.name, filename]
31 subprocess.run(cmd, check=True)
32 # clang's objcopy writes to a temporary file and renames, so we need to re-open.
33 with open(f.name, "rb") as f2:
34 return f2.read()
35
36 def check_recipe(recipe, filename, override, comment_present, comment_absent=None):
37 """
38 Check that `filename` in `recipe`'s bindir contains `comment`, and
39 the overrides contain `override`.
40 """
41 d = SimpleNamespace(**get_bb_vars(("D", "bindir", "OBJCOPY", "OVERRIDES", "PATH"), target=recipe))
42
43 self.assertIn(override, d.OVERRIDES)
44
45 binary = oe.path.join(d.D, d.bindir, filename)
46
47 objcopy = shutil.which(d.OBJCOPY, path=d.PATH)
48 self.assertIsNotNone(objcopy)
49
50 comment = extract_comment(objcopy, binary)
51 self.assertIn(comment_present, comment)
52 if comment_absent:
53 self.assertNotIn(comment_absent, comment)
54
55
56 # GCC by default, clang for selftest-hello.
57 self.write_config("""
58TOOLCHAIN = "gcc"
59TOOLCHAIN:pn-selftest-hello = "clang"
60 """)
61
62 # Force these recipes to re-install so we can extract the .comments from
63 # the install directory, as they're stripped out of the final packages.
64 bitbake("m4 selftest-hello -C install")
65
66 # m4 should be built with GCC and only GCC
67 check_recipe("m4", "m4", "toolchain-gcc", b"GCC: (GNU)", b"clang")
68
69 # helloworld should be built with clang. We can't assert that GCC is not
70 # present as it will be linked against glibc which is built with GCC.
71 check_recipe("selftest-hello", "helloworld", "toolchain-clang", b"clang version")
diff --git a/meta/lib/oeqa/selftest/cases/uboot.py b/meta/lib/oeqa/selftest/cases/uboot.py
new file mode 100644
index 0000000000..980ea327f0
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/uboot.py
@@ -0,0 +1,98 @@
1# Qemu-based u-boot bootloader integration testing
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, runqemu, get_bb_var, get_bb_vars, runCmd
10from oeqa.core.decorator.data import skipIfNotArch, skipIfNotBuildArch
11from oeqa.core.decorator import OETestTag
12
13uboot_boot_patterns = {
14 'search_reached_prompt': "stop autoboot",
15 'search_login_succeeded': "=>",
16 'search_cmd_finished': "=>"
17 }
18
19
20class UBootTest(OESelftestTestCase):
21
22 @skipIfNotArch(['arm', 'aarch64'])
23 @OETestTag("runqemu")
24 def test_boot_uboot(self):
25 """
26 Tests building u-boot and booting it with QEMU
27 """
28
29 self.write_config("""
30QB_DEFAULT_BIOS = "u-boot.bin"
31PREFERRED_PROVIDER_virtual/bootloader = "u-boot"
32QEMU_USE_KVM = "False"
33""")
34 bitbake("virtual/bootloader core-image-minimal")
35
36 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic',
37 boot_patterns=uboot_boot_patterns) as qemu:
38
39 # test if u-boot console works
40 cmd = "version"
41 status, output = qemu.run_serial(cmd)
42 self.assertEqual(status, 1, msg=output)
43 self.assertTrue("U-Boot" in output, msg=output)
44
45 @skipIfNotArch(['aarch64'])
46 @skipIfNotBuildArch(['aarch64'])
47 @OETestTag("runqemu")
48 def test_boot_uboot_kvm_to_full_target(self):
49 """
50 Tests building u-boot and booting it with QEMU and KVM.
51 Requires working KVM on build host. See "kvm-ok" output.
52 """
53
54 runCmd("kvm-ok")
55
56 image = "core-image-minimal"
57 vars = get_bb_vars(['HOST_ARCH', 'BUILD_ARCH'], image)
58 host_arch = vars['HOST_ARCH']
59 build_arch = vars['BUILD_ARCH']
60
61 self.assertEqual(host_arch, build_arch, 'HOST_ARCH %s and BUILD_ARCH %s must match for KVM' % (host_arch, build_arch))
62
63 self.write_config("""
64QEMU_USE_KVM = "1"
65
66# Using u-boot in EFI mode, need ESP partition for grub/systemd-boot/kernel etc
67IMAGE_FSTYPES:pn-core-image-minimal:append = " wic"
68
69# easiest to follow genericarm64 setup with wks file, initrd and EFI loader
70INITRAMFS_IMAGE = "core-image-initramfs-boot"
71EFI_PROVIDER = "${@bb.utils.contains("DISTRO_FEATURES", "systemd", "systemd-boot", "grub-efi", d)}"
72WKS_FILE = "genericarm64.wks.in"
73
74# use wic image with ESP for u-boot, not ext4
75QB_DEFAULT_FSTYPE = "wic"
76
77PREFERRED_PROVIDER_virtual/bootloader = "u-boot"
78QB_DEFAULT_BIOS = "u-boot.bin"
79
80# let u-boot or EFI loader load kernel from ESP
81QB_DEFAULT_KERNEL = "none"
82
83# virt pci, not scsi because support not in u-boot to find ESP
84QB_DRIVE_TYPE = "/dev/vd"
85""")
86 bitbake("virtual/bootloader %s" % image)
87
88 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or ""
89 with runqemu(image, ssh=False, runqemuparams='nographic kvm %s' % runqemu_params) as qemu:
90
91 # boot to target and login worked, should have been fast with kvm
92 cmd = "dmesg"
93 status, output = qemu.run_serial(cmd)
94 self.assertEqual(status, 1, msg=output)
95 # Machine is qemu
96 self.assertTrue("Machine model: linux,dummy-virt" in output, msg=output)
97 # with KVM enabled
98 self.assertTrue("KVM: hypervisor services detected" in output, msg=output)
diff --git a/meta/lib/oeqa/selftest/cases/uki.py b/meta/lib/oeqa/selftest/cases/uki.py
new file mode 100644
index 0000000000..9a1aa4e269
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/uki.py
@@ -0,0 +1,141 @@
1# Based on runqemu.py test file
2#
3# Copyright (c) 2017 Wind River Systems, Inc.
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, runqemu, get_bb_var
10from oeqa.core.decorator.data import skipIfNotArch
11from oeqa.core.decorator import OETestTag
12import oe.types
13
14class UkiTest(OESelftestTestCase):
15 """Boot Unified Kernel Image (UKI) generated with uki.bbclass on UEFI firmware (omvf/edk2)"""
16
17 @skipIfNotArch(['i586', 'i686', 'x86_64'])
18 @OETestTag("runqemu")
19 def test_uki_boot_systemd(self):
20 """Build and boot into UEFI firmware (omvf/edk2), systemd-boot, initrd without systemd, rootfs with systemd"""
21 image = "core-image-minimal"
22 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', image) or ""
23 cmd = "runqemu %s nographic serial wic ovmf" % (runqemu_params)
24 if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
25 cmd += " kvm"
26
27 self.write_config("""
28# efi firmware must load systemd-boot, not grub
29EFI_PROVIDER = "systemd-boot"
30
31# image format must be wic, needs esp partition for firmware etc
32IMAGE_FSTYPES:pn-%s:append = " wic"
33WKS_FILE = "efi-uki-bootdisk.wks.in"
34
35# efi, uki and systemd features must be enabled
36INIT_MANAGER = "systemd"
37MACHINE_FEATURES:append = " efi"
38IMAGE_CLASSES:append:pn-core-image-minimal = " uki"
39
40# uki embeds also an initrd
41INITRAMFS_IMAGE = "core-image-minimal-initramfs"
42
43# runqemu must not load kernel separately, it's in the uki
44QB_KERNEL_ROOT = ""
45QB_DEFAULT_KERNEL = "none"
46
47# boot command line provided via uki, not via bootloader
48UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}"
49
50# disable kvm, breaks boot
51QEMU_USE_KVM = ""
52
53IMAGE_CLASSES:remove = 'testimage'
54""" % (image))
55
56 uki_filename = get_bb_var('UKI_FILENAME', image)
57
58 bitbake(image + " ovmf")
59 with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
60 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
61
62 # Verify from efivars that firmware was:
63 # x86_64, qemux86_64, ovmf = edk2
64 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderFirmwareInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep 'EDK II'"
65 status, output = qemu.run_serial(cmd)
66 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
67
68 # Check that systemd-boot was the loader
69 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-boot"
70 status, output = qemu.run_serial(cmd)
71 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
72
73 # Check that systemd-stub was used
74 cmd = "echo $( cat /sys/firmware/efi/efivars/StubInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-stub"
75 status, output = qemu.run_serial(cmd)
76 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
77
78 # Check that the compiled uki file was booted into
79 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep '%s'" % (uki_filename)
80 status, output = qemu.run_serial(cmd)
81 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
82
83 @skipIfNotArch(['i586', 'i686', 'x86_64'])
84 @OETestTag("runqemu")
85 def test_uki_sysvinit(self):
86 """Build and boot into UEFI firmware (omvf/edk2), systemd-boot, initrd with sysvinit, rootfs with sysvinit"""
87 config = """
88# efi firmware must load systemd-boot, not grub
89EFI_PROVIDER = "systemd-boot"
90
91# image format must be wic, needs esp partition for firmware etc
92IMAGE_FSTYPES:pn-core-image-base:append = " wic"
93WKS_FILE = "efi-uki-bootdisk.wks.in"
94
95# efi, uki and systemd features must be enabled
96MACHINE_FEATURES:append = " efi"
97IMAGE_CLASSES:append:pn-core-image-base = " uki"
98
99# uki embeds also an initrd, no systemd or udev
100INITRAMFS_IMAGE = "core-image-initramfs-boot"
101
102# runqemu must not load kernel separately, it's in the uki
103QB_KERNEL_ROOT = ""
104QB_DEFAULT_KERNEL = "none"
105
106# boot command line provided via uki, not via bootloader
107UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}"
108
109# disable kvm, breaks boot
110QEMU_USE_KVM = ""
111
112IMAGE_CLASSES:remove = 'testimage'
113"""
114 self.append_config(config)
115 bitbake('core-image-base ovmf')
116 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
117 uki_filename = get_bb_var('UKI_FILENAME', 'core-image-base')
118 self.remove_config(config)
119
120 with runqemu('core-image-base', ssh=False,
121 runqemuparams='%s slirp nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
122 # Verify from efivars that firmware was:
123 # x86_64, qemux86_64, ovmf = edk2
124 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderFirmwareInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep 'EDK II'"
125 status, output = qemu.run_serial(cmd)
126 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
127
128 # Check that systemd-boot was the loader
129 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-boot"
130 status, output = qemu.run_serial(cmd)
131 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
132
133 # Check that systemd-stub was used
134 cmd = "echo $( cat /sys/firmware/efi/efivars/StubInfo-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep systemd-stub"
135 status, output = qemu.run_serial(cmd)
136 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
137
138 # Check that the compiled uki file was booted into
139 cmd = "echo $( cat /sys/firmware/efi/efivars/LoaderEntrySelected-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f ) | grep '%s'" % (uki_filename)
140 status, output = qemu.run_serial(cmd)
141 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
diff --git a/meta/lib/oeqa/selftest/cases/usergrouptests.py b/meta/lib/oeqa/selftest/cases/usergrouptests.py
new file mode 100644
index 0000000000..3c59b0f290
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/usergrouptests.py
@@ -0,0 +1,57 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake
11from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer
12
13class UserGroupTests(OESelftestTestCase):
14 def test_group_from_dep_package(self):
15 self.logger.info("Building creategroup2")
16 bitbake(' creategroup2 creategroup1')
17 bitbake(' creategroup2 creategroup1 -c clean')
18 self.logger.info("Packaging creategroup2")
19 self.assertTrue(bitbake(' creategroup2 -c package'))
20
21 def test_add_task_between_p_sysroot_and_package(self):
22 # Test for YOCTO #14961
23 self.assertTrue(bitbake('useraddbadtask -C fetch'))
24
25 def test_postinst_order(self):
26 self.logger.info("Building dcreategroup")
27 self.assertTrue(bitbake(' dcreategroup'))
28
29 def test_static_useradd_from_dynamic(self):
30 metaselftestpath = get_test_layer()
31 self.logger.info("Building core-image-minimal to generate passwd/group file")
32 bitbake(' core-image-minimal')
33 self.logger.info("Setting up useradd-staticids")
34 repropassdir = os.path.join(metaselftestpath, "conf/include")
35 os.makedirs(repropassdir)
36 etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \
37 os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc")))
38 shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd"))
39 shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group"))
40 # Copy the original local.conf
41 shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'))
42
43 self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
44 self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
45 self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"")
46 self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"")
47 self.logger.info("Rebuild with staticids")
48 bitbake(' core-image-minimal')
49 shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'))
50 self.logger.info("Rebuild without staticids")
51 bitbake(' core-image-minimal')
52 self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
53 self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
54 self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"")
55 self.write_config("USERADD_GID_TABLES += \"files/static-group\"")
56 self.logger.info("Rebuild with other staticids")
57 self.assertTrue(bitbake(' core-image-minimal'))
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py
index 2bf5cb9a86..680f99d381 100644
--- a/meta/lib/oeqa/selftest/cases/wic.py
+++ b/meta/lib/oeqa/selftest/cases/wic.py
@@ -11,39 +11,20 @@
11import os 11import os
12import sys 12import sys
13import unittest 13import unittest
14import hashlib
15import subprocess
14 16
15from glob import glob 17from glob import glob
16from shutil import rmtree, copy 18from shutil import rmtree, copy
17from functools import wraps, lru_cache
18from tempfile import NamedTemporaryFile 19from tempfile import NamedTemporaryFile
20from tempfile import TemporaryDirectory
19 21
20from oeqa.selftest.case import OESelftestTestCase 22from oeqa.selftest.case import OESelftestTestCase
23from oeqa.core.decorator import OETestTag
24from oeqa.core.decorator.data import skipIfNotArch
21from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu 25from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
22 26
23 27
24@lru_cache(maxsize=32)
25def get_host_arch(recipe):
26 """A cached call to get_bb_var('HOST_ARCH', <recipe>)"""
27 return get_bb_var('HOST_ARCH', recipe)
28
29
30def only_for_arch(archs, image='core-image-minimal'):
31 """Decorator for wrapping test cases that can be run only for specific target
32 architectures. A list of compatible architectures is passed in `archs`.
33 Current architecture will be determined by parsing bitbake output for
34 `image` recipe.
35 """
36 def wrapper(func):
37 @wraps(func)
38 def wrapped_f(*args, **kwargs):
39 arch = get_host_arch(image)
40 if archs and arch not in archs:
41 raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch)
42 return func(*args, **kwargs)
43 wrapped_f.__name__ = func.__name__
44 return wrapped_f
45 return wrapper
46
47def extract_files(debugfs_output): 28def extract_files(debugfs_output):
48 """ 29 """
49 extract file names from the output of debugfs -R 'ls -p', 30 extract file names from the output of debugfs -R 'ls -p',
@@ -77,22 +58,18 @@ class WicTestCase(OESelftestTestCase):
77 58
78 def setUpLocal(self): 59 def setUpLocal(self):
79 """This code is executed before each test method.""" 60 """This code is executed before each test method."""
80 self.resultdir = self.builddir + "/wic-tmp/" 61 self.resultdir = os.path.join(self.builddir, "wic-tmp")
81 super(WicTestCase, self).setUpLocal() 62 super(WicTestCase, self).setUpLocal()
82 63
83 # Do this here instead of in setUpClass as the base setUp does some 64 # Do this here instead of in setUpClass as the base setUp does some
84 # clean up which can result in the native tools built earlier in 65 # clean up which can result in the native tools built earlier in
85 # setUpClass being unavailable. 66 # setUpClass being unavailable.
86 if not WicTestCase.image_is_ready: 67 if not WicTestCase.image_is_ready:
87 if get_bb_var('USE_NLS') == 'yes': 68 if self.td['USE_NLS'] != 'yes':
88 bitbake('wic-tools') 69 self.skipTest('wic-tools needs USE_NLS=yes')
89 else:
90 self.skipTest('wic-tools cannot be built due its (intltool|gettext)-native dependency and NLS disable')
91 70
92 bitbake('core-image-minimal') 71 bitbake('wic-tools core-image-minimal core-image-minimal-mtdutils')
93 bitbake('core-image-minimal-mtdutils')
94 WicTestCase.image_is_ready = True 72 WicTestCase.image_is_ready = True
95
96 rmtree(self.resultdir, ignore_errors=True) 73 rmtree(self.resultdir, ignore_errors=True)
97 74
98 def tearDownLocal(self): 75 def tearDownLocal(self):
@@ -103,15 +80,13 @@ class WicTestCase(OESelftestTestCase):
103 def _get_image_env_path(self, image): 80 def _get_image_env_path(self, image):
104 """Generate and obtain the path to <image>.env""" 81 """Generate and obtain the path to <image>.env"""
105 if image not in WicTestCase.wicenv_cache: 82 if image not in WicTestCase.wicenv_cache:
106 self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status) 83 bitbake('%s -c do_rootfs_wicenv' % image)
107 bb_vars = get_bb_vars(['STAGING_DIR', 'MACHINE'], image) 84 stdir = get_bb_var('STAGING_DIR', image)
108 stdir = bb_vars['STAGING_DIR'] 85 machine = self.td["MACHINE"]
109 machine = bb_vars['MACHINE']
110 WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata') 86 WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata')
111 return WicTestCase.wicenv_cache[image] 87 return WicTestCase.wicenv_cache[image]
112 88
113class Wic(WicTestCase): 89class CLITests(OESelftestTestCase):
114
115 def test_version(self): 90 def test_version(self):
116 """Test wic --version""" 91 """Test wic --version"""
117 runCmd('wic --version') 92 runCmd('wic --version')
@@ -172,68 +147,136 @@ class Wic(WicTestCase):
172 """Test wic without command""" 147 """Test wic without command"""
173 self.assertEqual(1, runCmd('wic', ignore_status=True).status) 148 self.assertEqual(1, runCmd('wic', ignore_status=True).status)
174 149
150class Wic(WicTestCase):
151 def test_skip_kernel_install(self):
152 """Test the functionality of not installing the kernel in the boot directory using the wic plugin"""
153 # create a temporary file for the WKS content
154 with NamedTemporaryFile("w", suffix=".wks") as wks:
155 wks.write(
156 'part --source bootimg_efi '
157 '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" '
158 '--label boot --active\n'
159 )
160 wks.flush()
161 # create a temporary directory to extract the disk image to
162 with TemporaryDirectory() as tmpdir:
163 img = 'core-image-minimal'
164 # build the image using the WKS file
165 cmd = "wic create %s -e %s -o %s" % (
166 wks.name, img, self.resultdir)
167 runCmd(cmd)
168 wksname = os.path.splitext(os.path.basename(wks.name))[0]
169 out = glob(os.path.join(
170 self.resultdir, "%s-*.direct" % wksname))
171 self.assertEqual(1, len(out))
172 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
173 # extract the content of the disk image to the temporary directory
174 cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
175 runCmd(cmd)
176 # check if the kernel is installed or not
177 kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
178 for file in os.listdir(tmpdir):
179 if file == kimgtype:
180 raise AssertionError(
181 "The kernel image '{}' was found in the partition".format(kimgtype)
182 )
183
184 def test_kernel_install(self):
185 """Test the installation of the kernel to the boot directory in the wic plugin"""
186 # create a temporary file for the WKS content
187 with NamedTemporaryFile("w", suffix=".wks") as wks:
188 wks.write(
189 'part --source bootimg_efi '
190 '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" '
191 '--label boot --active\n'
192 )
193 wks.flush()
194 # create a temporary directory to extract the disk image to
195 with TemporaryDirectory() as tmpdir:
196 img = 'core-image-minimal'
197 # build the image using the WKS file
198 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
199 runCmd(cmd)
200 wksname = os.path.splitext(os.path.basename(wks.name))[0]
201 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
202 self.assertEqual(1, len(out))
203 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
204 # extract the content of the disk image to the temporary directory
205 cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
206 runCmd(cmd)
207 # check if the kernel is installed or not
208 kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
209 found = False
210 for file in os.listdir(tmpdir):
211 if file == kimgtype:
212 found = True
213 break
214 self.assertTrue(
215 found, "The kernel image '{}' was not found in the boot partition".format(kimgtype)
216 )
217
175 def test_build_image_name(self): 218 def test_build_image_name(self):
176 """Test wic create wictestdisk --image-name=core-image-minimal""" 219 """Test wic create wictestdisk --image-name=core-image-minimal"""
177 cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir 220 cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir
178 runCmd(cmd) 221 runCmd(cmd)
179 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 222 self.assertEqual(1, len(glob(os.path.join (self.resultdir, "wictestdisk-*.direct"))))
180 223
181 @only_for_arch(['i586', 'i686', 'x86_64']) 224 @skipIfNotArch(['i586', 'i686', 'x86_64'])
182 def test_gpt_image(self): 225 def test_gpt_image(self):
183 """Test creation of core-image-minimal with gpt table and UUID boot""" 226 """Test creation of core-image-minimal with gpt table and UUID boot"""
184 cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir 227 cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir
185 runCmd(cmd) 228 runCmd(cmd)
186 self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) 229 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
187 230
188 @only_for_arch(['i586', 'i686', 'x86_64']) 231 @skipIfNotArch(['i586', 'i686', 'x86_64'])
189 def test_iso_image(self): 232 def test_iso_image(self):
190 """Test creation of hybrid iso image with legacy and EFI boot""" 233 """Test creation of hybrid iso image with legacy and EFI boot"""
191 config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ 234 config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
192 'MACHINE_FEATURES_append = " efi"\n'\ 235 'MACHINE_FEATURES:append = " efi"\n'\
193 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 236 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
194 self.append_config(config) 237 self.append_config(config)
195 bitbake('core-image-minimal core-image-minimal-initramfs') 238 bitbake('core-image-minimal core-image-minimal-initramfs')
196 self.remove_config(config) 239 self.remove_config(config)
197 cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir 240 cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir
198 runCmd(cmd) 241 runCmd(cmd)
199 self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct"))) 242 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "HYBRID_ISO_IMG-*.direct"))))
200 self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso"))) 243 self.assertEqual(1, len(glob(os.path.join (self.resultdir, "HYBRID_ISO_IMG-*.iso"))))
201 244
202 @only_for_arch(['i586', 'i686', 'x86_64']) 245 @skipIfNotArch(['i586', 'i686', 'x86_64'])
203 def test_qemux86_directdisk(self): 246 def test_qemux86_directdisk(self):
204 """Test creation of qemux-86-directdisk image""" 247 """Test creation of qemux-86-directdisk image"""
205 cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir 248 cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir
206 runCmd(cmd) 249 runCmd(cmd)
207 self.assertEqual(1, len(glob(self.resultdir + "qemux86-directdisk-*direct"))) 250 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "qemux86-directdisk-*direct"))))
208 251
209 @only_for_arch(['i586', 'i686', 'x86_64']) 252 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
210 def test_mkefidisk(self): 253 def test_mkefidisk(self):
211 """Test creation of mkefidisk image""" 254 """Test creation of mkefidisk image"""
212 cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir 255 cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
213 runCmd(cmd) 256 runCmd(cmd)
214 self.assertEqual(1, len(glob(self.resultdir + "mkefidisk-*direct"))) 257 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "mkefidisk-*direct"))))
215 258
216 @only_for_arch(['i586', 'i686', 'x86_64']) 259 @skipIfNotArch(['i586', 'i686', 'x86_64'])
217 def test_bootloader_config(self): 260 def test_bootloader_config(self):
218 """Test creation of directdisk-bootloader-config image""" 261 """Test creation of directdisk-bootloader-config image"""
219 config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 262 config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
220 self.append_config(config) 263 self.append_config(config)
221 bitbake('core-image-minimal') 264 bitbake('core-image-minimal')
222 self.remove_config(config) 265 self.remove_config(config)
223 cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir 266 cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir
224 runCmd(cmd) 267 runCmd(cmd)
225 self.assertEqual(1, len(glob(self.resultdir + "directdisk-bootloader-config-*direct"))) 268 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-bootloader-config-*direct"))))
226 269
227 @only_for_arch(['i586', 'i686', 'x86_64']) 270 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
228 def test_systemd_bootdisk(self): 271 def test_systemd_bootdisk(self):
229 """Test creation of systemd-bootdisk image""" 272 """Test creation of systemd-bootdisk image"""
230 config = 'MACHINE_FEATURES_append = " efi"\n' 273 config = 'MACHINE_FEATURES:append = " efi"\n'
231 self.append_config(config) 274 self.append_config(config)
232 bitbake('core-image-minimal') 275 bitbake('core-image-minimal')
233 self.remove_config(config) 276 self.remove_config(config)
234 cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir 277 cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir
235 runCmd(cmd) 278 runCmd(cmd)
236 self.assertEqual(1, len(glob(self.resultdir + "systemd-bootdisk-*direct"))) 279 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "systemd-bootdisk-*direct"))))
237 280
238 def test_efi_bootpart(self): 281 def test_efi_bootpart(self):
239 """Test creation of efi-bootpart image""" 282 """Test creation of efi-bootpart image"""
@@ -242,7 +285,7 @@ class Wic(WicTestCase):
242 self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype) 285 self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype)
243 runCmd(cmd) 286 runCmd(cmd)
244 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 287 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
245 images = glob(self.resultdir + "mkefidisk-*.direct") 288 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
246 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 289 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
247 self.assertIn("kernel",result.output) 290 self.assertIn("kernel",result.output)
248 291
@@ -252,14 +295,15 @@ class Wic(WicTestCase):
252 kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal') 295 kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal')
253 self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype) 296 self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype)
254 runCmd(cmd) 297 runCmd(cmd)
255 self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) 298 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
256 299
257 @only_for_arch(['i586', 'i686', 'x86_64']) 300 # TODO this doesn't have to be x86-specific
301 @skipIfNotArch(['i586', 'i686', 'x86_64'])
258 def test_default_output_dir(self): 302 def test_default_output_dir(self):
259 """Test default output location""" 303 """Test default output location"""
260 for fname in glob("directdisk-*.direct"): 304 for fname in glob("directdisk-*.direct"):
261 os.remove(fname) 305 os.remove(fname)
262 config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 306 config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
263 self.append_config(config) 307 self.append_config(config)
264 bitbake('core-image-minimal') 308 bitbake('core-image-minimal')
265 self.remove_config(config) 309 self.remove_config(config)
@@ -267,7 +311,7 @@ class Wic(WicTestCase):
267 runCmd(cmd) 311 runCmd(cmd)
268 self.assertEqual(1, len(glob("directdisk-*.direct"))) 312 self.assertEqual(1, len(glob("directdisk-*.direct")))
269 313
270 @only_for_arch(['i586', 'i686', 'x86_64']) 314 @skipIfNotArch(['i586', 'i686', 'x86_64'])
271 def test_build_artifacts(self): 315 def test_build_artifacts(self):
272 """Test wic create directdisk providing all artifacts.""" 316 """Test wic create directdisk providing all artifacts."""
273 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], 317 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -282,28 +326,28 @@ class Wic(WicTestCase):
282 "-n %(recipe_sysroot_native)s " 326 "-n %(recipe_sysroot_native)s "
283 "-r %(image_rootfs)s " 327 "-r %(image_rootfs)s "
284 "-o %(resultdir)s" % bbvars) 328 "-o %(resultdir)s" % bbvars)
285 self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) 329 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
286 330
287 def test_compress_gzip(self): 331 def test_compress_gzip(self):
288 """Test compressing an image with gzip""" 332 """Test compressing an image with gzip"""
289 runCmd("wic create wictestdisk " 333 runCmd("wic create wictestdisk "
290 "--image-name core-image-minimal " 334 "--image-name core-image-minimal "
291 "-c gzip -o %s" % self.resultdir) 335 "-c gzip -o %s" % self.resultdir)
292 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.gz"))) 336 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.gz"))))
293 337
294 def test_compress_bzip2(self): 338 def test_compress_bzip2(self):
295 """Test compressing an image with bzip2""" 339 """Test compressing an image with bzip2"""
296 runCmd("wic create wictestdisk " 340 runCmd("wic create wictestdisk "
297 "--image-name=core-image-minimal " 341 "--image-name=core-image-minimal "
298 "-c bzip2 -o %s" % self.resultdir) 342 "-c bzip2 -o %s" % self.resultdir)
299 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.bz2"))) 343 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.bz2"))))
300 344
301 def test_compress_xz(self): 345 def test_compress_xz(self):
302 """Test compressing an image with xz""" 346 """Test compressing an image with xz"""
303 runCmd("wic create wictestdisk " 347 runCmd("wic create wictestdisk "
304 "--image-name=core-image-minimal " 348 "--image-name=core-image-minimal "
305 "--compress-with=xz -o %s" % self.resultdir) 349 "--compress-with=xz -o %s" % self.resultdir)
306 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.xz"))) 350 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.xz"))))
307 351
308 def test_wrong_compressor(self): 352 def test_wrong_compressor(self):
309 """Test how wic breaks if wrong compressor is provided""" 353 """Test how wic breaks if wrong compressor is provided"""
@@ -317,23 +361,23 @@ class Wic(WicTestCase):
317 runCmd("wic create wictestdisk " 361 runCmd("wic create wictestdisk "
318 "--image-name=core-image-minimal " 362 "--image-name=core-image-minimal "
319 "-D -o %s" % self.resultdir) 363 "-D -o %s" % self.resultdir)
320 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 364 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
321 self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) 365 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
322 366
323 def test_debug_long(self): 367 def test_debug_long(self):
324 """Test --debug option""" 368 """Test --debug option"""
325 runCmd("wic create wictestdisk " 369 runCmd("wic create wictestdisk "
326 "--image-name=core-image-minimal " 370 "--image-name=core-image-minimal "
327 "--debug -o %s" % self.resultdir) 371 "--debug -o %s" % self.resultdir)
328 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 372 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
329 self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) 373 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
330 374
331 def test_skip_build_check_short(self): 375 def test_skip_build_check_short(self):
332 """Test -s option""" 376 """Test -s option"""
333 runCmd("wic create wictestdisk " 377 runCmd("wic create wictestdisk "
334 "--image-name=core-image-minimal " 378 "--image-name=core-image-minimal "
335 "-s -o %s" % self.resultdir) 379 "-s -o %s" % self.resultdir)
336 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 380 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
337 381
338 def test_skip_build_check_long(self): 382 def test_skip_build_check_long(self):
339 """Test --skip-build-check option""" 383 """Test --skip-build-check option"""
@@ -341,14 +385,14 @@ class Wic(WicTestCase):
341 "--image-name=core-image-minimal " 385 "--image-name=core-image-minimal "
342 "--skip-build-check " 386 "--skip-build-check "
343 "--outdir %s" % self.resultdir) 387 "--outdir %s" % self.resultdir)
344 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 388 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
345 389
346 def test_build_rootfs_short(self): 390 def test_build_rootfs_short(self):
347 """Test -f option""" 391 """Test -f option"""
348 runCmd("wic create wictestdisk " 392 runCmd("wic create wictestdisk "
349 "--image-name=core-image-minimal " 393 "--image-name=core-image-minimal "
350 "-f -o %s" % self.resultdir) 394 "-f -o %s" % self.resultdir)
351 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 395 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
352 396
353 def test_build_rootfs_long(self): 397 def test_build_rootfs_long(self):
354 """Test --build-rootfs option""" 398 """Test --build-rootfs option"""
@@ -356,9 +400,10 @@ class Wic(WicTestCase):
356 "--image-name=core-image-minimal " 400 "--image-name=core-image-minimal "
357 "--build-rootfs " 401 "--build-rootfs "
358 "--outdir %s" % self.resultdir) 402 "--outdir %s" % self.resultdir)
359 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 403 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
360 404
361 @only_for_arch(['i586', 'i686', 'x86_64']) 405 # TODO this doesn't have to be x86-specific
406 @skipIfNotArch(['i586', 'i686', 'x86_64'])
362 def test_rootfs_indirect_recipes(self): 407 def test_rootfs_indirect_recipes(self):
363 """Test usage of rootfs plugin with rootfs recipes""" 408 """Test usage of rootfs plugin with rootfs recipes"""
364 runCmd("wic create directdisk-multi-rootfs " 409 runCmd("wic create directdisk-multi-rootfs "
@@ -366,9 +411,10 @@ class Wic(WicTestCase):
366 "--rootfs rootfs1=core-image-minimal " 411 "--rootfs rootfs1=core-image-minimal "
367 "--rootfs rootfs2=core-image-minimal " 412 "--rootfs rootfs2=core-image-minimal "
368 "--outdir %s" % self.resultdir) 413 "--outdir %s" % self.resultdir)
369 self.assertEqual(1, len(glob(self.resultdir + "directdisk-multi-rootfs*.direct"))) 414 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-multi-rootfs*.direct"))))
370 415
371 @only_for_arch(['i586', 'i686', 'x86_64']) 416 # TODO this doesn't have to be x86-specific
417 @skipIfNotArch(['i586', 'i686', 'x86_64'])
372 def test_rootfs_artifacts(self): 418 def test_rootfs_artifacts(self):
373 """Test usage of rootfs plugin with rootfs paths""" 419 """Test usage of rootfs plugin with rootfs paths"""
374 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], 420 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -385,7 +431,7 @@ class Wic(WicTestCase):
385 "--rootfs-dir rootfs1=%(image_rootfs)s " 431 "--rootfs-dir rootfs1=%(image_rootfs)s "
386 "--rootfs-dir rootfs2=%(image_rootfs)s " 432 "--rootfs-dir rootfs2=%(image_rootfs)s "
387 "--outdir %(resultdir)s" % bbvars) 433 "--outdir %(resultdir)s" % bbvars)
388 self.assertEqual(1, len(glob(self.resultdir + "%(wks)s-*.direct" % bbvars))) 434 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "%(wks)s-*.direct" % bbvars))))
389 435
390 def test_exclude_path(self): 436 def test_exclude_path(self):
391 """Test --exclude-path wks option.""" 437 """Test --exclude-path wks option."""
@@ -400,19 +446,20 @@ class Wic(WicTestCase):
400 wks.write(""" 446 wks.write("""
401part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr 447part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr
402part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr 448part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr
403part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr""" 449part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr
404 % (rootfs_dir, rootfs_dir)) 450part /mnt --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/whoami --rootfs-dir %s/usr"""
451 % (rootfs_dir, rootfs_dir, rootfs_dir))
405 runCmd("wic create %s -e core-image-minimal -o %s" \ 452 runCmd("wic create %s -e core-image-minimal -o %s" \
406 % (wks_file, self.resultdir)) 453 % (wks_file, self.resultdir))
407 454
408 os.remove(wks_file) 455 os.remove(wks_file)
409 wicout = glob(self.resultdir + "%s-*direct" % 'temp') 456 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % 'temp'))
410 self.assertEqual(1, len(wicout)) 457 self.assertEqual(1, len(wicout))
411 458
412 wicimg = wicout[0] 459 wicimg = wicout[0]
413 460
414 # verify partition size with wic 461 # verify partition size with wic
415 res = runCmd("parted -m %s unit b p 2>/dev/null" % wicimg) 462 res = runCmd("parted -m %s unit b p" % wicimg, stderr=subprocess.PIPE)
416 463
417 # parse parted output which looks like this: 464 # parse parted output which looks like this:
418 # BYT;\n 465 # BYT;\n
@@ -420,9 +467,9 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
420 # 1:0.00MiB:200MiB:200MiB:ext4::;\n 467 # 1:0.00MiB:200MiB:200MiB:ext4::;\n
421 partlns = res.output.splitlines()[2:] 468 partlns = res.output.splitlines()[2:]
422 469
423 self.assertEqual(3, len(partlns)) 470 self.assertEqual(4, len(partlns))
424 471
425 for part in [1, 2, 3]: 472 for part in [1, 2, 3, 4]:
426 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) 473 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part)
427 partln = partlns[part-1].split(":") 474 partln = partlns[part-1].split(":")
428 self.assertEqual(7, len(partln)) 475 self.assertEqual(7, len(partln))
@@ -433,16 +480,16 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
433 480
434 # Test partition 1, should contain the normal root directories, except 481 # Test partition 1, should contain the normal root directories, except
435 # /usr. 482 # /usr.
436 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ 483 res = runCmd("debugfs -R 'ls -p' %s" % \
437 os.path.join(self.resultdir, "selftest_img.part1")) 484 os.path.join(self.resultdir, "selftest_img.part1"), stderr=subprocess.PIPE)
438 files = extract_files(res.output) 485 files = extract_files(res.output)
439 self.assertIn("etc", files) 486 self.assertIn("etc", files)
440 self.assertNotIn("usr", files) 487 self.assertNotIn("usr", files)
441 488
442 # Partition 2, should contain common directories for /usr, not root 489 # Partition 2, should contain common directories for /usr, not root
443 # directories. 490 # directories.
444 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ 491 res = runCmd("debugfs -R 'ls -p' %s" % \
445 os.path.join(self.resultdir, "selftest_img.part2")) 492 os.path.join(self.resultdir, "selftest_img.part2"), stderr=subprocess.PIPE)
446 files = extract_files(res.output) 493 files = extract_files(res.output)
447 self.assertNotIn("etc", files) 494 self.assertNotIn("etc", files)
448 self.assertNotIn("usr", files) 495 self.assertNotIn("usr", files)
@@ -450,27 +497,78 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
450 497
451 # Partition 3, should contain the same as partition 2, including the bin 498 # Partition 3, should contain the same as partition 2, including the bin
452 # directory, but not the files inside it. 499 # directory, but not the files inside it.
453 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ 500 res = runCmd("debugfs -R 'ls -p' %s" % \
454 os.path.join(self.resultdir, "selftest_img.part3")) 501 os.path.join(self.resultdir, "selftest_img.part3"), stderr=subprocess.PIPE)
455 files = extract_files(res.output) 502 files = extract_files(res.output)
456 self.assertNotIn("etc", files) 503 self.assertNotIn("etc", files)
457 self.assertNotIn("usr", files) 504 self.assertNotIn("usr", files)
458 self.assertIn("share", files) 505 self.assertIn("share", files)
459 self.assertIn("bin", files) 506 self.assertIn("bin", files)
460 res = runCmd("debugfs -R 'ls -p bin' %s 2>/dev/null" % \ 507 res = runCmd("debugfs -R 'ls -p bin' %s" % \
461 os.path.join(self.resultdir, "selftest_img.part3")) 508 os.path.join(self.resultdir, "selftest_img.part3"), stderr=subprocess.PIPE)
462 files = extract_files(res.output) 509 files = extract_files(res.output)
463 self.assertIn(".", files) 510 self.assertIn(".", files)
464 self.assertIn("..", files) 511 self.assertIn("..", files)
465 self.assertEqual(2, len(files)) 512 self.assertEqual(2, len(files))
466 513
467 for part in [1, 2, 3]: 514 # Partition 4, should contain the same as partition 2, including the bin
515 # directory, but not whoami (a symlink to busybox.nosuid) inside it.
516 res = runCmd("debugfs -R 'ls -p' %s" % \
517 os.path.join(self.resultdir, "selftest_img.part4"), stderr=subprocess.PIPE)
518 files = extract_files(res.output)
519 self.assertNotIn("etc", files)
520 self.assertNotIn("usr", files)
521 self.assertIn("share", files)
522 self.assertIn("bin", files)
523 res = runCmd("debugfs -R 'ls -p bin' %s" % \
524 os.path.join(self.resultdir, "selftest_img.part4"), stderr=subprocess.PIPE)
525 files = extract_files(res.output)
526 self.assertIn(".", files)
527 self.assertIn("..", files)
528 self.assertIn("who", files)
529 self.assertNotIn("whoami", files)
530
531 for part in [1, 2, 3, 4]:
468 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) 532 part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part)
469 os.remove(part_file) 533 os.remove(part_file)
470 534
471 finally: 535 finally:
472 os.environ['PATH'] = oldpath 536 os.environ['PATH'] = oldpath
473 537
538 def test_exclude_path_with_extra_space(self):
539 """Test having --exclude-path with IMAGE_ROOTFS_EXTRA_SPACE. [Yocto #15555]"""
540
541 with NamedTemporaryFile("w", suffix=".wks") as wks:
542 wks.writelines(
543 ['bootloader --ptable gpt\n',
544 'part /boot --size=100M --active --fstype=ext4 --label boot\n',
545 'part / --source rootfs --fstype=ext4 --label root --exclude-path boot/\n'])
546 wks.flush()
547 config = 'IMAGE_ROOTFS_EXTRA_SPACE = "500000"\n'\
548 'DEPENDS:pn-core-image-minimal += "wic-tools"\n'\
549 'IMAGE_FSTYPES += "wic ext4"\n'\
550 'WKS_FILE = "%s"\n' % wks.name
551 self.append_config(config)
552 bitbake('core-image-minimal')
553
554 """
555 the output of "wic ls <image>.wic" will look something like:
556 Num Start End Size Fstype
557 1 17408 136332287 136314880 ext4
558 2 136332288 171464703 35132416 ext4
559 we are looking for the size of partition 2
560 i.e. in this case the number 35,132,416
561 without the fix the size will be around 85,403,648
562 with the fix the size should be around 799,960,064
563 """
564 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'], 'core-image-minimal')
565 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
566 machine = bb_vars['MACHINE']
567 nativesysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
568 wicout = glob(os.path.join(deploy_dir, "core-image-minimal-%s.rootfs-*.wic" % machine))[0]
569 size_of_root_partition = int(runCmd("wic ls %s --native-sysroot %s" % (wicout, nativesysroot)).output.split('\n')[2].split()[3])
570 self.assertGreater(size_of_root_partition, 500000000)
571
474 def test_include_path(self): 572 def test_include_path(self):
475 """Test --include-path wks option.""" 573 """Test --include-path wks option."""
476 574
@@ -496,13 +594,13 @@ part /part2 --source rootfs --ondisk mmcblk0 --fstype=ext4 --include-path %s"""
496 part2 = glob(os.path.join(self.resultdir, 'temp-*.direct.p2'))[0] 594 part2 = glob(os.path.join(self.resultdir, 'temp-*.direct.p2'))[0]
497 595
498 # Test partition 1, should not contain 'test-file' 596 # Test partition 1, should not contain 'test-file'
499 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) 597 res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE)
500 files = extract_files(res.output) 598 files = extract_files(res.output)
501 self.assertNotIn('test-file', files) 599 self.assertNotIn('test-file', files)
502 self.assertEqual(True, files_own_by_root(res.output)) 600 self.assertEqual(True, files_own_by_root(res.output))
503 601
504 # Test partition 2, should contain 'test-file' 602 # Test partition 2, should contain 'test-file'
505 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part2)) 603 res = runCmd("debugfs -R 'ls -p' %s" % (part2), stderr=subprocess.PIPE)
506 files = extract_files(res.output) 604 files = extract_files(res.output)
507 self.assertIn('test-file', files) 605 self.assertIn('test-file', files)
508 self.assertEqual(True, files_own_by_root(res.output)) 606 self.assertEqual(True, files_own_by_root(res.output))
@@ -531,12 +629,12 @@ part / --source rootfs --fstype=ext4 --include-path %s --include-path core-imag
531 629
532 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] 630 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0]
533 631
534 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) 632 res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE)
535 files = extract_files(res.output) 633 files = extract_files(res.output)
536 self.assertIn('test-file', files) 634 self.assertIn('test-file', files)
537 self.assertEqual(True, files_own_by_root(res.output)) 635 self.assertEqual(True, files_own_by_root(res.output))
538 636
539 res = runCmd("debugfs -R 'ls -p /export/etc/' %s 2>/dev/null" % (part1)) 637 res = runCmd("debugfs -R 'ls -p /export/etc/' %s" % (part1), stderr=subprocess.PIPE)
540 files = extract_files(res.output) 638 files = extract_files(res.output)
541 self.assertIn('passwd', files) 639 self.assertIn('passwd', files)
542 self.assertEqual(True, files_own_by_root(res.output)) 640 self.assertEqual(True, files_own_by_root(res.output))
@@ -623,7 +721,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
623 % (wks_file, self.resultdir)) 721 % (wks_file, self.resultdir))
624 722
625 for part in glob(os.path.join(self.resultdir, 'temp-*.direct.p*')): 723 for part in glob(os.path.join(self.resultdir, 'temp-*.direct.p*')):
626 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part)) 724 res = runCmd("debugfs -R 'ls -p' %s" % (part), stderr=subprocess.PIPE)
627 self.assertEqual(True, files_own_by_root(res.output)) 725 self.assertEqual(True, files_own_by_root(res.output))
628 726
629 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "%s"\n' % wks_file 727 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "%s"\n' % wks_file
@@ -633,7 +731,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
633 731
634 # check each partition for permission 732 # check each partition for permission
635 for part in glob(os.path.join(tmpdir, 'temp-*.direct.p*')): 733 for part in glob(os.path.join(tmpdir, 'temp-*.direct.p*')):
636 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part)) 734 res = runCmd("debugfs -R 'ls -p' %s" % (part), stderr=subprocess.PIPE)
637 self.assertTrue(files_own_by_root(res.output) 735 self.assertTrue(files_own_by_root(res.output)
638 ,msg='Files permission incorrect using wks set "%s"' % test) 736 ,msg='Files permission incorrect using wks set "%s"' % test)
639 737
@@ -661,7 +759,7 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
661 759
662 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0] 760 part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0]
663 761
664 res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1)) 762 res = runCmd("debugfs -R 'ls -p' %s" % (part1), stderr=subprocess.PIPE)
665 files = extract_files(res.output) 763 files = extract_files(res.output)
666 self.assertIn('passwd', files) 764 self.assertIn('passwd', files)
667 765
@@ -686,21 +784,185 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
686 % (wks_file, self.resultdir), ignore_status=True).status) 784 % (wks_file, self.resultdir), ignore_status=True).status)
687 os.remove(wks_file) 785 os.remove(wks_file)
688 786
787 def test_no_fstab_update(self):
788 """Test --no-fstab-update wks option."""
789
790 oldpath = os.environ['PATH']
791 os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
792
793 # Get stock fstab from base-files recipe
794 bitbake('base-files -c do_install')
795 bf_fstab = os.path.join(get_bb_var('D', 'base-files'), 'etc', 'fstab')
796 self.assertEqual(True, os.path.exists(bf_fstab))
797 bf_fstab_md5sum = runCmd('md5sum %s ' % bf_fstab).output.split(" ")[0]
798
799 try:
800 no_fstab_update_path = os.path.join(self.resultdir, 'test-no-fstab-update')
801 os.makedirs(no_fstab_update_path)
802 wks_file = os.path.join(no_fstab_update_path, 'temp.wks')
803 with open(wks_file, 'w') as wks:
804 wks.writelines(['part / --source rootfs --fstype=ext4 --label rootfs\n',
805 'part /mnt/p2 --source rootfs --rootfs-dir=core-image-minimal ',
806 '--fstype=ext4 --label p2 --no-fstab-update\n'])
807 runCmd("wic create %s -e core-image-minimal -o %s" \
808 % (wks_file, self.resultdir))
809
810 part_fstab_md5sum = []
811 for i in range(1, 3):
812 part = glob(os.path.join(self.resultdir, 'temp-*.direct.p') + str(i))[0]
813 part_fstab = runCmd("debugfs -R 'cat etc/fstab' %s" % (part), stderr=subprocess.PIPE)
814 part_fstab_md5sum.append(hashlib.md5((part_fstab.output + "\n\n").encode('utf-8')).hexdigest())
815
816 # '/etc/fstab' in partition 2 should contain the same stock fstab file
817 # as the one installed by the base-file recipe.
818 self.assertEqual(bf_fstab_md5sum, part_fstab_md5sum[1])
819
820 # '/etc/fstab' in partition 1 should contain an updated fstab file.
821 self.assertNotEqual(bf_fstab_md5sum, part_fstab_md5sum[0])
822
823 finally:
824 os.environ['PATH'] = oldpath
825
826 def test_no_fstab_update_errors(self):
827 """Test --no-fstab-update wks option error handling."""
828 wks_file = 'temp.wks'
829
830 # Absolute argument.
831 with open(wks_file, 'w') as wks:
832 wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update /etc")
833 self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
834 % (wks_file, self.resultdir), ignore_status=True).status)
835 os.remove(wks_file)
836
837 # Argument pointing to parent directory.
838 with open(wks_file, 'w') as wks:
839 wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update ././..")
840 self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
841 % (wks_file, self.resultdir), ignore_status=True).status)
842 os.remove(wks_file)
843
844 def test_extra_space(self):
845 """Test --extra-space wks option."""
846 extraspace = 1024**3
847 runCmd("wic create wictestdisk "
848 "--image-name core-image-minimal "
849 "--extra-space %i -o %s" % (extraspace ,self.resultdir))
850 wicout = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
851 self.assertEqual(1, len(wicout))
852 size = os.path.getsize(wicout[0])
853 self.assertTrue(size > extraspace, msg="Extra space not present (%s vs %s)" % (size, extraspace))
854
855 def test_no_table(self):
856 """Test --no-table wks option."""
857 wks_file = 'temp.wks'
858
859 # Absolute argument.
860 with open(wks_file, 'w') as wks:
861 wks.write("part testspace --no-table --fixed-size 16k --offset 4080k")
862 runCmd("wic create %s --image-name core-image-minimal -o %s" % (wks_file, self.resultdir))
863
864 wicout = glob(os.path.join(self.resultdir, "*.*"))
865
866 self.assertEqual(1, len(wicout))
867 size = os.path.getsize(wicout[0])
868 self.assertEqual(size, 4 * 1024 * 1024)
869
870 os.remove(wks_file)
871
872 def test_partition_hidden_attributes(self):
873 """Test --hidden wks option."""
874 wks_file = 'temp.wks'
875 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
876 try:
877 with open(wks_file, 'w') as wks:
878 wks.write("""
879part / --source rootfs --fstype=ext4
880part / --source rootfs --fstype=ext4 --hidden
881bootloader --ptable gpt""")
882
883 runCmd("wic create %s -e core-image-minimal -o %s" \
884 % (wks_file, self.resultdir))
885 wicout = os.path.join(self.resultdir, "*.direct")
886
887 result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 1" % (sysroot, wicout))
888 self.assertEqual('', result.output)
889 result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 2" % (sysroot, wicout))
890 self.assertEqual('RequiredPartition', result.output)
891
892 finally:
893 os.remove(wks_file)
894
895 def test_wic_sector_size(self):
896 """Test generation image sector size"""
897
898 oldpath = os.environ['PATH']
899 os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
900
901 try:
902 # Add WIC_SECTOR_SIZE into config
903 config = 'WIC_SECTOR_SIZE = "4096"\n'\
904 'WICVARS:append = " WIC_SECTOR_SIZE"\n'
905 self.append_config(config)
906 bitbake('core-image-minimal')
907
908 # Check WIC_SECTOR_SIZE apply to bitbake variable
909 wic_sector_size_str = get_bb_var('WIC_SECTOR_SIZE', 'core-image-minimal')
910 wic_sector_size = int(wic_sector_size_str)
911 self.assertEqual(4096, wic_sector_size)
912
913 self.logger.info("Test wic_sector_size: %d \n" % wic_sector_size)
914
915 with NamedTemporaryFile("w", suffix=".wks") as wks:
916 wks.writelines(
917 ['bootloader --ptable gpt\n',
918 'part --fstype ext4 --source rootfs --label rofs-a --mkfs-extraopts "-b 4096"\n',
919 'part --fstype ext4 --source rootfs --use-uuid --mkfs-extraopts "-b 4096"\n'])
920 wks.flush()
921 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir)
922 runCmd(cmd)
923 wksname = os.path.splitext(os.path.basename(wks.name))[0]
924 images = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
925 self.assertEqual(1, len(images))
926
927 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
928 # list partitions
929 result = runCmd("wic ls %s -n %s" % (images[0], sysroot))
930 self.assertEqual(3, len(result.output.split('\n')))
931
932 # verify partition size with wic
933 res = runCmd("export PARTED_SECTOR_SIZE=%d; parted -m %s unit b p" % (wic_sector_size, images[0]),
934 stderr=subprocess.PIPE)
935
936 # parse parted output which looks like this:
937 # BYT;\n
938 # /var/tmp/wic/build/tmpgjzzefdd-202410281021-sda.direct:78569472B:file:4096:4096:gpt::;\n
939 # 1:139264B:39284735B:39145472B:ext4:rofs-a:;\n
940 # 2:39284736B:78430207B:39145472B:ext4:primary:;\n
941 disk_info = res.output.splitlines()[1]
942 # Check sector sizes
943 sector_size_logical = int(disk_info.split(":")[3])
944 sector_size_physical = int(disk_info.split(":")[4])
945 self.assertEqual(wic_sector_size, sector_size_logical, "Logical sector size is not %d." % wic_sector_size)
946 self.assertEqual(wic_sector_size, sector_size_physical, "Physical sector size is not %d." % wic_sector_size)
947
948 finally:
949 os.environ['PATH'] = oldpath
950
689class Wic2(WicTestCase): 951class Wic2(WicTestCase):
690 952
691 def test_bmap_short(self): 953 def test_bmap_short(self):
692 """Test generation of .bmap file -m option""" 954 """Test generation of .bmap file -m option"""
693 cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir 955 cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir
694 runCmd(cmd) 956 runCmd(cmd)
695 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 957 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
696 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) 958 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
697 959
698 def test_bmap_long(self): 960 def test_bmap_long(self):
699 """Test generation of .bmap file --bmap option""" 961 """Test generation of .bmap file --bmap option"""
700 cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir 962 cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir
701 runCmd(cmd) 963 runCmd(cmd)
702 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 964 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
703 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) 965 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
704 966
705 def test_image_env(self): 967 def test_image_env(self):
706 """Test generation of <image>.env files.""" 968 """Test generation of <image>.env files."""
@@ -711,7 +973,7 @@ class Wic2(WicTestCase):
711 basename = bb_vars['IMAGE_BASENAME'] 973 basename = bb_vars['IMAGE_BASENAME']
712 self.assertEqual(basename, image) 974 self.assertEqual(basename, image)
713 path = os.path.join(imgdatadir, basename) + '.env' 975 path = os.path.join(imgdatadir, basename) + '.env'
714 self.assertTrue(os.path.isfile(path)) 976 self.assertTrue(os.path.isfile(path), msg="File %s wasn't generated as expected" % path)
715 977
716 wicvars = set(bb_vars['WICVARS'].split()) 978 wicvars = set(bb_vars['WICVARS'].split())
717 # filter out optional variables 979 # filter out optional variables
@@ -724,7 +986,7 @@ class Wic2(WicTestCase):
724 # test if variables used by wic present in the .env file 986 # test if variables used by wic present in the .env file
725 for var in wicvars: 987 for var in wicvars:
726 self.assertTrue(var in content, "%s is not in .env file" % var) 988 self.assertTrue(var in content, "%s is not in .env file" % var)
727 self.assertTrue(content[var]) 989 self.assertTrue(content[var], "%s doesn't have a value (%s)" % (var, content[var]))
728 990
729 def test_image_vars_dir_short(self): 991 def test_image_vars_dir_short(self):
730 """Test image vars directory selection -v option""" 992 """Test image vars directory selection -v option"""
@@ -736,7 +998,7 @@ class Wic2(WicTestCase):
736 "--image-name=%s -v %s -n %s -o %s" 998 "--image-name=%s -v %s -n %s -o %s"
737 % (image, imgenvdir, native_sysroot, 999 % (image, imgenvdir, native_sysroot,
738 self.resultdir)) 1000 self.resultdir))
739 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 1001 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
740 1002
741 def test_image_vars_dir_long(self): 1003 def test_image_vars_dir_long(self):
742 """Test image vars directory selection --vars option""" 1004 """Test image vars directory selection --vars option"""
@@ -751,58 +1013,99 @@ class Wic2(WicTestCase):
751 "--outdir %s" 1013 "--outdir %s"
752 % (image, imgenvdir, native_sysroot, 1014 % (image, imgenvdir, native_sysroot,
753 self.resultdir)) 1015 self.resultdir))
754 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 1016 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
755 1017
756 @only_for_arch(['i586', 'i686', 'x86_64']) 1018 # TODO this test could also work on aarch64
1019 @skipIfNotArch(['i586', 'i686', 'x86_64'])
757 def test_wic_image_type(self): 1020 def test_wic_image_type(self):
758 """Test building wic images by bitbake""" 1021 """Test building wic images by bitbake"""
759 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ 1022 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
760 'MACHINE_FEATURES_append = " efi"\n' 1023 'MACHINE_FEATURES:append = " efi"\n'
1024 image_recipe_append = """
1025do_image_wic[postfuncs] += "run_wic_cmd"
1026run_wic_cmd() {
1027 echo "test" >> ${WORKDIR}/test.wic-cp
1028 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1029 wic ls --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1030 wic rm --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/test.wic-cp
1031 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1032}
1033"""
1034 self.write_recipeinc('images', image_recipe_append)
1035
761 self.append_config(config) 1036 self.append_config(config)
762 self.assertEqual(0, bitbake('wic-image-minimal').status) 1037 image = 'wic-image-minimal'
1038 bitbake(image)
763 self.remove_config(config) 1039 self.remove_config(config)
764 1040
765 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) 1041 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
766 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] 1042 prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME'])
767 machine = bb_vars['MACHINE'] 1043
768 prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine) 1044 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1045 # check if file is there
1046 result = runCmd("wic ls %s:1/ -n %s" % (prefix+"wic", sysroot))
1047 self.assertIn("test.wic-cp", result.output)
1048
769 # check if we have result image and manifests symlinks 1049 # check if we have result image and manifests symlinks
770 # pointing to existing files 1050 # pointing to existing files
771 for suffix in ('wic', 'manifest'): 1051 for suffix in ('wic', 'manifest'):
772 path = prefix + suffix 1052 path = prefix + suffix
773 self.assertTrue(os.path.islink(path)) 1053 self.assertTrue(os.path.islink(path), msg="Link %s wasn't generated as expected" % path)
774 self.assertTrue(os.path.isfile(os.path.realpath(path))) 1054 self.assertTrue(os.path.isfile(os.path.realpath(path)), msg="File linked to by %s wasn't generated as expected" % path)
775 1055
776 @only_for_arch(['i586', 'i686', 'x86_64']) 1056 # TODO this should work on aarch64
1057 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1058 @OETestTag("runqemu")
777 def test_qemu(self): 1059 def test_qemu(self):
778 """Test wic-image-minimal under qemu""" 1060 """Test wic-image-minimal under qemu"""
779 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ 1061 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
780 'MACHINE_FEATURES_append = " efi"\n' 1062 'MACHINE_FEATURES:append = " efi"\n'
781 self.append_config(config) 1063 self.append_config(config)
782 self.assertEqual(0, bitbake('wic-image-minimal').status) 1064 image_recipe_append = """
1065do_image_wic[postfuncs] += "run_wic_cmd"
1066run_wic_cmd() {
1067 echo "test" >> ${WORKDIR}/test.wic-cp
1068 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1069 wic ls --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1070 wic rm --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/test.wic-cp
1071 wic cp --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" ${WORKDIR}/test.wic-cp ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic:1/
1072}
1073"""
1074 self.write_recipeinc('images', image_recipe_append)
1075 bitbake('wic-image-minimal')
1076
1077 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1078 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], "wic-image-minimal")
1079 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'])
1080 # check if file is there
1081 result = runCmd("wic ls %s:1/ -n %s" % (image_path+".wic", sysroot))
1082 self.assertIn("test.wic-cp", result.output)
783 self.remove_config(config) 1083 self.remove_config(config)
784 1084
785 with runqemu('wic-image-minimal', ssh=False) as qemu: 1085 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'wic-image-minimal') or ""
1086 with runqemu('wic-image-minimal', ssh=False, runqemuparams='%s nographic' % (runqemu_params)) as qemu:
786 cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \ 1087 cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \
787 "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'" 1088 "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'"
788 status, output = qemu.run_serial(cmd) 1089 status, output = qemu.run_serial(cmd)
789 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1090 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
790 self.assertEqual(output, '4') 1091 self.assertEqual(output, '4')
791 cmd = "grep UUID= /etc/fstab" 1092 cmd = "grep UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba /etc/fstab"
792 status, output = qemu.run_serial(cmd) 1093 status, output = qemu.run_serial(cmd)
793 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1094 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
794 self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0') 1095 self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0')
795 1096
796 @only_for_arch(['i586', 'i686', 'x86_64']) 1097 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1098 @OETestTag("runqemu")
797 def test_qemu_efi(self): 1099 def test_qemu_efi(self):
798 """Test core-image-minimal efi image under qemu""" 1100 """Test core-image-minimal efi image under qemu"""
799 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n' 1101 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n'
800 self.append_config(config) 1102 self.append_config(config)
801 self.assertEqual(0, bitbake('core-image-minimal ovmf').status) 1103 bitbake('core-image-minimal ovmf')
802 self.remove_config(config) 1104 self.remove_config(config)
803 1105
1106 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or ""
804 with runqemu('core-image-minimal', ssh=False, 1107 with runqemu('core-image-minimal', ssh=False,
805 runqemuparams='ovmf', image_fstype='wic') as qemu: 1108 runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
806 cmd = "grep sda. /proc/partitions |wc -l" 1109 cmd = "grep sda. /proc/partitions |wc -l"
807 status, output = qemu.run_serial(cmd) 1110 status, output = qemu.run_serial(cmd)
808 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1111 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
@@ -831,7 +1134,7 @@ class Wic2(WicTestCase):
831 1134
832 wksname = os.path.splitext(os.path.basename(wkspath))[0] 1135 wksname = os.path.splitext(os.path.basename(wkspath))[0]
833 1136
834 wicout = glob(self.resultdir + "%s-*direct" % wksname) 1137 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
835 1138
836 if not wicout: 1139 if not wicout:
837 return (p, None) 1140 return (p, None)
@@ -842,8 +1145,8 @@ class Wic2(WicTestCase):
842 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools") 1145 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
843 1146
844 # verify partition size with wic 1147 # verify partition size with wic
845 res = runCmd("parted -m %s unit kib p 2>/dev/null" % wicimg, 1148 res = runCmd("parted -m %s unit kib p" % wicimg,
846 native_sysroot=native_sysroot) 1149 native_sysroot=native_sysroot, stderr=subprocess.PIPE)
847 1150
848 # parse parted output which looks like this: 1151 # parse parted output which looks like this:
849 # BYT;\n 1152 # BYT;\n
@@ -882,71 +1185,71 @@ class Wic2(WicTestCase):
882 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1185 with NamedTemporaryFile("w", suffix=".wks") as tempf:
883 # Test that partitions are placed at the correct offsets, default KB 1186 # Test that partitions are placed at the correct offsets, default KB
884 tempf.write("bootloader --ptable gpt\n" \ 1187 tempf.write("bootloader --ptable gpt\n" \
885 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \ 1188 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 200M --fstype=ext4\n" \
886 "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n") 1189 "part /bar --ondisk hda --offset 204832 --fixed-size 100M --fstype=ext4\n")
887 tempf.flush() 1190 tempf.flush()
888 1191
889 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1192 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
890 self.assertEqual(partlns, [ 1193 self.assertEqual(partlns, [
891 "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", 1194 "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;",
892 "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", 1195 "2:204832kiB:307232kiB:102400kiB:ext4:primary:;",
893 ]) 1196 ])
894 1197
895 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1198 with NamedTemporaryFile("w", suffix=".wks") as tempf:
896 # Test that partitions are placed at the correct offsets, same with explicit KB 1199 # Test that partitions are placed at the correct offsets, same with explicit KB
897 tempf.write("bootloader --ptable gpt\n" \ 1200 tempf.write("bootloader --ptable gpt\n" \
898 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \ 1201 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 200M --fstype=ext4\n" \
899 "part /bar --ondisk hda --offset 102432K --fixed-size 100M --fstype=ext4\n") 1202 "part /bar --ondisk hda --offset 204832K --fixed-size 100M --fstype=ext4\n")
900 tempf.flush() 1203 tempf.flush()
901 1204
902 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1205 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
903 self.assertEqual(partlns, [ 1206 self.assertEqual(partlns, [
904 "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", 1207 "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;",
905 "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", 1208 "2:204832kiB:307232kiB:102400kiB:ext4:primary:;",
906 ]) 1209 ])
907 1210
908 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1211 with NamedTemporaryFile("w", suffix=".wks") as tempf:
909 # Test that partitions are placed at the correct offsets using MB 1212 # Test that partitions are placed at the correct offsets using MB
910 tempf.write("bootloader --ptable gpt\n" \ 1213 tempf.write("bootloader --ptable gpt\n" \
911 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \ 1214 "part / --source rootfs --ondisk hda --offset 32K --fixed-size 200M --fstype=ext4\n" \
912 "part /bar --ondisk hda --offset 101M --fixed-size 100M --fstype=ext4\n") 1215 "part /bar --ondisk hda --offset 201M --fixed-size 100M --fstype=ext4\n")
913 tempf.flush() 1216 tempf.flush()
914 1217
915 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1218 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
916 self.assertEqual(partlns, [ 1219 self.assertEqual(partlns, [
917 "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;", 1220 "1:32.0kiB:204832kiB:204800kiB:ext4:primary:;",
918 "2:103424kiB:205824kiB:102400kiB:ext4:primary:;", 1221 "2:205824kiB:308224kiB:102400kiB:ext4:primary:;",
919 ]) 1222 ])
920 1223
921 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1224 with NamedTemporaryFile("w", suffix=".wks") as tempf:
922 # Test that partitions can be placed on a 512 byte sector boundary 1225 # Test that partitions can be placed on a 512 byte sector boundary
923 tempf.write("bootloader --ptable gpt\n" \ 1226 tempf.write("bootloader --ptable gpt\n" \
924 "part / --source rootfs --ondisk hda --offset 65s --fixed-size 99M --fstype=ext4\n" \ 1227 "part / --source rootfs --ondisk hda --offset 65s --fixed-size 199M --fstype=ext4\n" \
925 "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n") 1228 "part /bar --ondisk hda --offset 204832 --fixed-size 100M --fstype=ext4\n")
926 tempf.flush() 1229 tempf.flush()
927 1230
928 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1231 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
929 self.assertEqual(partlns, [ 1232 self.assertEqual(partlns, [
930 "1:32.5kiB:101408kiB:101376kiB:ext4:primary:;", 1233 "1:32.5kiB:203808kiB:203776kiB:ext4:primary:;",
931 "2:102432kiB:204832kiB:102400kiB:ext4:primary:;", 1234 "2:204832kiB:307232kiB:102400kiB:ext4:primary:;",
932 ]) 1235 ])
933 1236
934 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1237 with NamedTemporaryFile("w", suffix=".wks") as tempf:
935 # Test that a partition can be placed immediately after a MSDOS partition table 1238 # Test that a partition can be placed immediately after a MSDOS partition table
936 tempf.write("bootloader --ptable msdos\n" \ 1239 tempf.write("bootloader --ptable msdos\n" \
937 "part / --source rootfs --ondisk hda --offset 1s --fixed-size 100M --fstype=ext4\n") 1240 "part / --source rootfs --ondisk hda --offset 1s --fixed-size 200M --fstype=ext4\n")
938 tempf.flush() 1241 tempf.flush()
939 1242
940 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot) 1243 _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
941 self.assertEqual(partlns, [ 1244 self.assertEqual(partlns, [
942 "1:0.50kiB:102400kiB:102400kiB:ext4::;", 1245 "1:0.50kiB:204800kiB:204800kiB:ext4::;",
943 ]) 1246 ])
944 1247
945 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1248 with NamedTemporaryFile("w", suffix=".wks") as tempf:
946 # Test that image creation fails if the partitions would overlap 1249 # Test that image creation fails if the partitions would overlap
947 tempf.write("bootloader --ptable gpt\n" \ 1250 tempf.write("bootloader --ptable gpt\n" \
948 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \ 1251 "part / --source rootfs --ondisk hda --offset 32 --fixed-size 200M --fstype=ext4\n" \
949 "part /bar --ondisk hda --offset 102431 --fixed-size 100M --fstype=ext4\n") 1252 "part /bar --ondisk hda --offset 204831 --fixed-size 100M --fstype=ext4\n")
950 tempf.flush() 1253 tempf.flush()
951 1254
952 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) 1255 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True)
@@ -955,7 +1258,7 @@ class Wic2(WicTestCase):
955 with NamedTemporaryFile("w", suffix=".wks") as tempf: 1258 with NamedTemporaryFile("w", suffix=".wks") as tempf:
956 # Test that partitions are not allowed to overlap with the booloader 1259 # Test that partitions are not allowed to overlap with the booloader
957 tempf.write("bootloader --ptable gpt\n" \ 1260 tempf.write("bootloader --ptable gpt\n" \
958 "part / --source rootfs --ondisk hda --offset 8 --fixed-size 100M --fstype=ext4\n") 1261 "part / --source rootfs --ondisk hda --offset 8 --fixed-size 200M --fstype=ext4\n")
959 tempf.flush() 1262 tempf.flush()
960 1263
961 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True) 1264 p, _ = self._get_wic_partitions(tempf.name, ignore_status=True)
@@ -976,50 +1279,74 @@ class Wic2(WicTestCase):
976 size = int(size[:-3]) 1279 size = int(size[:-3])
977 self.assertGreaterEqual(size, 204800) 1280 self.assertGreaterEqual(size, 204800)
978 1281
979 @only_for_arch(['i586', 'i686', 'x86_64']) 1282 # TODO this test could also work on aarch64
1283 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1284 @OETestTag("runqemu")
980 def test_rawcopy_plugin_qemu(self): 1285 def test_rawcopy_plugin_qemu(self):
981 """Test rawcopy plugin in qemu""" 1286 """Test rawcopy plugin in qemu"""
982 # build ext4 and wic images 1287 # build ext4 and then use it for a wic image
983 for fstype in ("ext4", "wic"): 1288 config = 'IMAGE_FSTYPES = "ext4"\n'
984 config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype 1289 self.append_config(config)
985 self.append_config(config) 1290 bitbake('core-image-minimal')
986 self.assertEqual(0, bitbake('core-image-minimal').status) 1291 image_link_name = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal')
987 self.remove_config(config) 1292 self.remove_config(config)
988 1293
989 with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: 1294 config = 'IMAGE_FSTYPES = "wic"\n' \
1295 'IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL = "%s"\n'\
1296 'WKS_FILE = "test_rawcopy_plugin.wks.in"\n'\
1297 % image_link_name
1298 self.append_config(config)
1299 bitbake('core-image-minimal-mtdutils')
1300 self.remove_config(config)
1301
1302 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal-mtdutils') or ""
1303 with runqemu('core-image-minimal-mtdutils', ssh=False,
1304 runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu:
990 cmd = "grep sda. /proc/partitions |wc -l" 1305 cmd = "grep sda. /proc/partitions |wc -l"
991 status, output = qemu.run_serial(cmd) 1306 status, output = qemu.run_serial(cmd)
992 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1307 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
993 self.assertEqual(output, '2') 1308 self.assertEqual(output, '2')
994 1309
995 def test_rawcopy_plugin(self): 1310 def _rawcopy_plugin(self, fstype):
996 """Test rawcopy plugin""" 1311 """Test rawcopy plugin"""
997 img = 'core-image-minimal' 1312 image = 'core-image-minimal'
998 machine = get_bb_var('MACHINE', img) 1313 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1314 params = ',unpack' if fstype.endswith('.gz') else ''
999 with NamedTemporaryFile("w", suffix=".wks") as wks: 1315 with NamedTemporaryFile("w", suffix=".wks") as wks:
1000 wks.writelines(['part /boot --active --source bootimg-pcbios\n', 1316 wks.write('part / --source rawcopy --sourceparams="file=%s.%s%s"\n'\
1001 'part / --source rawcopy --sourceparams="file=%s-%s.ext4" --use-uuid\n'\ 1317 % (bb_vars['IMAGE_LINK_NAME'], fstype, params))
1002 % (img, machine),
1003 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1004 wks.flush() 1318 wks.flush()
1005 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1319 cmd = "wic create %s -e %s -o %s" % (wks.name, image, self.resultdir)
1006 runCmd(cmd) 1320 runCmd(cmd)
1007 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1321 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1008 out = glob(self.resultdir + "%s-*direct" % wksname) 1322 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1009 self.assertEqual(1, len(out)) 1323 self.assertEqual(1, len(out))
1010 1324
1325 def test_rawcopy_plugin(self):
1326 config = 'IMAGE_FSTYPES = "ext4"\n'
1327 self.append_config(config)
1328 self.assertEqual(0, bitbake('core-image-minimal').status)
1329 self.remove_config(config)
1330 self._rawcopy_plugin('ext4')
1331
1332 def test_rawcopy_plugin_unpack(self):
1333 fstype = 'ext4.gz'
1334 config = 'IMAGE_FSTYPES = "%s"\n' % fstype
1335 self.append_config(config)
1336 self.assertEqual(0, bitbake('core-image-minimal').status)
1337 self.remove_config(config)
1338 self._rawcopy_plugin(fstype)
1339
1011 def test_empty_plugin(self): 1340 def test_empty_plugin(self):
1012 """Test empty plugin""" 1341 """Test empty plugin"""
1013 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n' 1342 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n'
1014 self.append_config(config) 1343 self.append_config(config)
1015 self.assertEqual(0, bitbake('core-image-minimal').status) 1344 image = 'core-image-minimal'
1345 bitbake(image)
1016 self.remove_config(config) 1346 self.remove_config(config)
1017 1347 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1018 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) 1348 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1019 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] 1349 self.assertTrue(os.path.exists(image_path), msg="Image file %s wasn't generated as expected" % image_path)
1020 machine = bb_vars['MACHINE']
1021 image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
1022 self.assertEqual(True, os.path.exists(image_path))
1023 1350
1024 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1351 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1025 1352
@@ -1028,15 +1355,18 @@ class Wic2(WicTestCase):
1028 result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot)) 1355 result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot))
1029 self.assertEqual('1', result.output) 1356 self.assertEqual('1', result.output)
1030 1357
1031 @only_for_arch(['i586', 'i686', 'x86_64']) 1358 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1359 @OETestTag("runqemu")
1032 def test_biosplusefi_plugin_qemu(self): 1360 def test_biosplusefi_plugin_qemu(self):
1033 """Test biosplusefi plugin in qemu""" 1361 """Test biosplusefi plugin in qemu"""
1034 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' 1362 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
1035 self.append_config(config) 1363 self.append_config(config)
1036 self.assertEqual(0, bitbake('core-image-minimal').status) 1364 bitbake('core-image-minimal')
1037 self.remove_config(config) 1365 self.remove_config(config)
1038 1366
1039 with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: 1367 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or ""
1368 with runqemu('core-image-minimal', ssh=False,
1369 runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu:
1040 # Check that we have ONLY two /dev/sda* partitions (/boot and /) 1370 # Check that we have ONLY two /dev/sda* partitions (/boot and /)
1041 cmd = "grep sda. /proc/partitions | wc -l" 1371 cmd = "grep sda. /proc/partitions | wc -l"
1042 status, output = qemu.run_serial(cmd) 1372 status, output = qemu.run_serial(cmd)
@@ -1059,32 +1389,178 @@ class Wic2(WicTestCase):
1059 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1389 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1060 self.assertEqual(output, '*') 1390 self.assertEqual(output, '*')
1061 1391
1062 @only_for_arch(['i586', 'i686', 'x86_64']) 1392 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1063 def test_biosplusefi_plugin(self): 1393 def test_biosplusefi_plugin(self):
1064 """Test biosplusefi plugin""" 1394 """Test biosplusefi plugin"""
1065 # Wic generation below may fail depending on the order of the unittests 1395 # Wic generation below may fail depending on the order of the unittests
1066 # This is because bootimg-pcbios (that bootimg-biosplusefi uses) generate its MBR inside STAGING_DATADIR directory 1396 # This is because bootimg_pcbios (that bootimg_biosplusefi uses) generate its MBR inside STAGING_DATADIR directory
1067 # which may or may not exists depending on what was built already 1397 # which may or may not exists depending on what was built already
1068 # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir() 1398 # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir()
1069 # will raise with "Couldn't find correct bootimg_dir" 1399 # will raise with "Couldn't find correct bootimg_dir"
1070 # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call 1400 # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call
1071 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' 1401 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
1072 self.append_config(config) 1402 self.append_config(config)
1073 self.assertEqual(0, bitbake('core-image-minimal').status) 1403 bitbake('core-image-minimal')
1404 self.remove_config(config)
1405
1406 img = 'core-image-minimal'
1407 with NamedTemporaryFile("w", suffix=".wks") as wks:
1408 wks.writelines(['part /boot --active --source bootimg_biosplusefi --sourceparams="loader=grub-efi"\n',
1409 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
1410 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1411 wks.flush()
1412 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1413 runCmd(cmd)
1414 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1415 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
1416 self.assertEqual(1, len(out))
1417
1418 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
1419 def test_uefi_kernel(self):
1420 """ Test uefi-kernel in wic """
1421 config = 'IMAGE_EFI_BOOT_FILES="/etc/fstab;testfile"\nIMAGE_FSTYPES = "wic"\nWKS_FILE = "test_uefikernel.wks"\nMACHINE_FEATURES:append = " efi"\n'
1422 self.append_config(config)
1423 bitbake('core-image-minimal')
1074 self.remove_config(config) 1424 self.remove_config(config)
1075 1425
1076 img = 'core-image-minimal' 1426 img = 'core-image-minimal'
1077 with NamedTemporaryFile("w", suffix=".wks") as wks: 1427 with NamedTemporaryFile("w", suffix=".wks") as wks:
1078 wks.writelines(['part /boot --active --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\n', 1428 wks.writelines(['part /boot --source bootimg_efi --sourceparams="loader=uefi-kernel"\n'
1079 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\ 1429 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
1080 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) 1430 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1081 wks.flush() 1431 wks.flush()
1082 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1432 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1083 runCmd(cmd) 1433 runCmd(cmd)
1084 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1434 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1085 out = glob(self.resultdir + "%s-*.direct" % wksname) 1435 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
1086 self.assertEqual(1, len(out)) 1436 self.assertEqual(1, len(out))
1087 1437
1438 # TODO this test could also work on aarch64
1439 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1440 @OETestTag("runqemu")
1441 def test_efi_plugin_unified_kernel_image_qemu(self):
1442 """Test Unified Kernel Image feature in qemu without systemd in initramfs or rootfs"""
1443 config = """
1444# efi firmware must load systemd-boot, not grub
1445EFI_PROVIDER = "systemd-boot"
1446
1447# image format must be wic, needs esp partition for firmware etc
1448IMAGE_FSTYPES:pn-core-image-base:append = " wic"
1449WKS_FILE = "test_efi_plugin.wks"
1450
1451# efi, uki and systemd features must be enabled
1452MACHINE_FEATURES:append = " efi"
1453IMAGE_CLASSES:append:pn-core-image-base = " uki"
1454
1455# uki embeds also an initrd, no systemd or udev
1456INITRAMFS_IMAGE = "core-image-initramfs-boot"
1457
1458# runqemu must not load kernel separately, it's in the uki
1459QB_KERNEL_ROOT = ""
1460QB_DEFAULT_KERNEL = "none"
1461
1462# boot command line provided via uki, not via bootloader
1463UKI_CMDLINE = "rootwait root=LABEL=root console=${KERNEL_CONSOLE}"
1464
1465"""
1466 self.append_config(config)
1467 bitbake('core-image-base ovmf')
1468 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
1469 uki_filename = get_bb_var('UKI_FILENAME', 'core-image-base')
1470 self.remove_config(config)
1471
1472 with runqemu('core-image-base', ssh=False,
1473 runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
1474 # Check that /boot has EFI boot*.efi (required for EFI)
1475 cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l"
1476 status, output = qemu.run_serial(cmd)
1477 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1478 self.assertEqual(output, '1')
1479 # Check that /boot has EFI/Linux/${UKI_FILENAME} (required for Unified Kernel Images auto detection)
1480 cmd = "ls /boot/EFI/Linux/%s | wc -l" % (uki_filename)
1481 status, output = qemu.run_serial(cmd)
1482 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1483 self.assertEqual(output, '1')
1484 # Check that /boot doesn't have loader/entries/boot.conf (Unified Kernel Images are auto detected by the bootloader)
1485 cmd = "ls /boot/loader/entries/boot.conf 2&>/dev/null | wc -l"
1486 status, output = qemu.run_serial(cmd)
1487 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1488 self.assertEqual(output, '0')
1489
1490 @skipIfNotArch(['aarch64'])
1491 @OETestTag("runqemu")
1492 def test_efi_plugin_plain_systemd_boot_qemu_aarch64(self):
1493 """Test plain systemd-boot in qemu with systemd"""
1494 config = """
1495INIT_MANAGER = "systemd"
1496EFI_PROVIDER = "systemd-boot"
1497
1498# image format must be wic, needs esp partition for firmware etc
1499IMAGE_FSTYPES:pn-core-image-base:append = " wic"
1500WKS_FILE = "test_efi_plugin_plain_systemd-boot.wks"
1501
1502INITRAMFS_IMAGE = "core-image-initramfs-boot"
1503
1504# to configure runqemu
1505IMAGE_CLASSES += "qemuboot"
1506# u-boot efi firmware
1507QB_DEFAULT_BIOS = "u-boot.bin"
1508# need to use virtio, scsi not supported by u-boot by default
1509QB_DRIVE_TYPE = "/dev/vd"
1510
1511# disable kvm, breaks boot
1512QEMU_USE_KVM = ""
1513
1514IMAGE_CLASSES:remove = 'testimage'
1515"""
1516 self.append_config(config)
1517 bitbake('core-image-base u-boot')
1518 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
1519
1520 with runqemu('core-image-base', ssh=False,
1521 runqemuparams='%s nographic' % (runqemu_params), image_fstype='wic') as qemu:
1522 # Check that /boot has EFI boot*.efi (required for EFI)
1523 cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l"
1524 status, output = qemu.run_serial(cmd)
1525 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1526 self.assertEqual(output, '1')
1527 # Check that boot.conf exists
1528 cmd = "cat /boot/loader/entries/boot.conf"
1529 status, output = qemu.run_serial(cmd)
1530 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1531 self.remove_config(config)
1532
1533 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1534 @OETestTag("runqemu")
1535 def test_efi_plugin_plain_systemd_boot_qemu_x86(self):
1536 """Test plain systemd-boot to systemd in qemu"""
1537 config = """
1538INIT_MANAGER = "systemd"
1539EFI_PROVIDER = "systemd-boot"
1540
1541# image format must be wic, needs esp partition for firmware etc
1542IMAGE_FSTYPES:pn-core-image-base:append = " wic"
1543WKS_FILE = "test_efi_plugin_plain_systemd-boot.wks"
1544
1545INITRAMFS_IMAGE = "core-image-initramfs-boot"
1546"""
1547 self.append_config(config)
1548 bitbake('core-image-base ovmf')
1549 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-base') or ""
1550 self.remove_config(config)
1551
1552 with runqemu('core-image-base', ssh=False,
1553 runqemuparams='%s nographic ovmf' % (runqemu_params), image_fstype='wic') as qemu:
1554 # Check that /boot has EFI boot*.efi (required for EFI)
1555 cmd = "ls /boot/EFI/BOOT/boot*.efi | wc -l"
1556 status, output = qemu.run_serial(cmd)
1557 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1558 self.assertEqual(output, '1')
1559 # Check that boot.conf exists
1560 cmd = "cat /boot/loader/entries/boot.conf"
1561 status, output = qemu.run_serial(cmd)
1562 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1563
1088 def test_fs_types(self): 1564 def test_fs_types(self):
1089 """Test filesystem types for empty and not empty partitions""" 1565 """Test filesystem types for empty and not empty partitions"""
1090 img = 'core-image-minimal' 1566 img = 'core-image-minimal'
@@ -1101,7 +1577,7 @@ class Wic2(WicTestCase):
1101 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1577 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1102 runCmd(cmd) 1578 runCmd(cmd)
1103 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1579 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1104 out = glob(self.resultdir + "%s-*direct" % wksname) 1580 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1105 self.assertEqual(1, len(out)) 1581 self.assertEqual(1, len(out))
1106 1582
1107 def test_kickstart_parser(self): 1583 def test_kickstart_parser(self):
@@ -1113,7 +1589,7 @@ class Wic2(WicTestCase):
1113 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir) 1589 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir)
1114 runCmd(cmd) 1590 runCmd(cmd)
1115 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1591 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1116 out = glob(self.resultdir + "%s-*direct" % wksname) 1592 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1117 self.assertEqual(1, len(out)) 1593 self.assertEqual(1, len(out))
1118 1594
1119 def test_image_bootpart_globbed(self): 1595 def test_image_bootpart_globbed(self):
@@ -1124,11 +1600,11 @@ class Wic2(WicTestCase):
1124 self.append_config(config) 1600 self.append_config(config)
1125 runCmd(cmd) 1601 runCmd(cmd)
1126 self.remove_config(config) 1602 self.remove_config(config)
1127 self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) 1603 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
1128 1604
1129 def test_sparse_copy(self): 1605 def test_sparse_copy(self):
1130 """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs""" 1606 """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs"""
1131 libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'wic') 1607 libpath = os.path.join(self.td['COREBASE'], 'scripts', 'lib', 'wic')
1132 sys.path.insert(0, libpath) 1608 sys.path.insert(0, libpath)
1133 from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp 1609 from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp
1134 with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse: 1610 with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse:
@@ -1154,12 +1630,148 @@ class Wic2(WicTestCase):
1154 self.assertEqual(dest_stat.st_blocks, 8) 1630 self.assertEqual(dest_stat.st_blocks, 8)
1155 os.unlink(dest) 1631 os.unlink(dest)
1156 1632
1633 def test_mkfs_extraopts(self):
1634 """Test wks option --mkfs-extraopts for empty and not empty partitions"""
1635 img = 'core-image-minimal'
1636 with NamedTemporaryFile("w", suffix=".wks") as wks:
1637 wks.writelines(
1638 ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
1639 "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
1640 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
1641 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1642 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1643 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
1644 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
1645 wks.flush()
1646 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1647 runCmd(cmd)
1648 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1649 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1650 self.assertEqual(1, len(out))
1651
1652 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1653 @OETestTag("runqemu")
1654 def test_expand_mbr_image(self):
1655 """Test wic write --expand command for mbr image"""
1656 # build an image
1657 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
1658 self.append_config(config)
1659 image = 'core-image-minimal'
1660 bitbake(image)
1661
1662 # get path to the image
1663 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1664 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1665
1666 self.remove_config(config)
1667
1668 try:
1669 # expand image to 1G
1670 new_image_path = None
1671 with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
1672 dir=bb_vars['DEPLOY_DIR_IMAGE'], delete=False) as sparse:
1673 sparse.truncate(1024 ** 3)
1674 new_image_path = sparse.name
1675
1676 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1677 cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
1678 runCmd(cmd)
1679
1680 # check if partitions are expanded
1681 orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
1682 exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
1683 orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
1684 exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
1685 self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
1686 self.assertTrue(orig_sizes[1] < exp_sizes[1], msg="Parition size wasn't enlarged (%s vs %s)" % (orig_sizes[1], exp_sizes[1]))
1687
1688 # Check if all free space is partitioned
1689 result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
1690 self.assertIn("0 B, 0 bytes, 0 sectors", result.output)
1691
1692 os.rename(image_path, image_path + '.bak')
1693 os.rename(new_image_path, image_path)
1694
1695 runqemu_params = get_bb_var('TEST_RUNQEMUPARAMS', 'core-image-minimal') or ""
1696 with runqemu('core-image-minimal', ssh=False, runqemuparams='%s nographic' % (runqemu_params)) as qemu:
1697 cmd = "ls /etc/"
1698 status, output = qemu.run_serial('true')
1699 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1700 finally:
1701 if os.path.exists(new_image_path):
1702 os.unlink(new_image_path)
1703 if os.path.exists(image_path + '.bak'):
1704 os.rename(image_path + '.bak', image_path)
1705
1706 def test_gpt_partition_name(self):
1707 """Test --part-name argument to set partition name in GPT table"""
1708 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "test_gpt_partition_name.wks"\n'
1709 self.append_config(config)
1710 image = 'core-image-minimal'
1711 bitbake(image)
1712 self.remove_config(config)
1713 deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
1714 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1715 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1716
1717 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1718
1719 # Image is created
1720 self.assertTrue(os.path.exists(image_path), "image file %s doesn't exist" % image_path)
1721
1722 # Check the names of the three partitions
1723 # as listed in test_gpt_partition_name.wks
1724 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 1" % (sysroot, image_path))
1725 self.assertEqual('boot-A', result.output)
1726 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 2" % (sysroot, image_path))
1727 self.assertEqual('root-A', result.output)
1728 # When the --part-name is not defined, the partition name is equal to the --label
1729 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 3" % (sysroot, image_path))
1730 self.assertEqual('ext-space', result.output)
1731
1732 def test_empty_zeroize_plugin(self):
1733 img = 'core-image-minimal'
1734 expected_size = [ 1024*1024, # 1M
1735 512*1024, # 512K
1736 2*1024*1024] # 2M
1737 # Check combination of sourceparams
1738 with NamedTemporaryFile("w", suffix=".wks") as wks:
1739 wks.writelines(
1740 ['part empty --source empty --sourceparams="fill" --ondisk sda --fixed-size 1M\n',
1741 'part empty --source empty --sourceparams="size=512K" --ondisk sda --size 1M --align 1024\n',
1742 'part empty --source empty --sourceparams="size=2048k,bs=512K" --ondisk sda --size 4M --align 1024\n'
1743 ])
1744 wks.flush()
1745 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1746 runCmd(cmd)
1747 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1748 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1749 # Skip the complete image and just look at the single partitions
1750 for idx, value in enumerate(wicout[1:]):
1751 self.logger.info(wicout[idx])
1752 # Check if partitions are actually zeroized
1753 with open(wicout[idx], mode="rb") as fd:
1754 ba = bytearray(fd.read())
1755 for b in ba:
1756 self.assertEqual(b, 0)
1757 self.assertEqual(expected_size[idx], os.path.getsize(wicout[idx]))
1758
1759 # Check inconsistancy check between "fill" and "--size" parameter
1760 with NamedTemporaryFile("w", suffix=".wks") as wks:
1761 wks.writelines(['part empty --source empty --sourceparams="fill" --ondisk sda --size 1M\n'])
1762 wks.flush()
1763 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1764 result = runCmd(cmd, ignore_status=True)
1765 self.assertIn("Source parameter 'fill' only works with the '--fixed-size' option, exiting.", result.output)
1766 self.assertNotEqual(0, result.status)
1767
1768class ModifyTests(WicTestCase):
1157 def test_wic_ls(self): 1769 def test_wic_ls(self):
1158 """Test listing image content using 'wic ls'""" 1770 """Test listing image content using 'wic ls'"""
1159 runCmd("wic create wictestdisk " 1771 runCmd("wic create wictestdisk "
1160 "--image-name=core-image-minimal " 1772 "--image-name=core-image-minimal "
1161 "-D -o %s" % self.resultdir) 1773 "-D -o %s" % self.resultdir)
1162 images = glob(self.resultdir + "wictestdisk-*.direct") 1774 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1163 self.assertEqual(1, len(images)) 1775 self.assertEqual(1, len(images))
1164 1776
1165 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1777 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1177,7 +1789,7 @@ class Wic2(WicTestCase):
1177 runCmd("wic create wictestdisk " 1789 runCmd("wic create wictestdisk "
1178 "--image-name=core-image-minimal " 1790 "--image-name=core-image-minimal "
1179 "-D -o %s" % self.resultdir) 1791 "-D -o %s" % self.resultdir)
1180 images = glob(self.resultdir + "wictestdisk-*.direct") 1792 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1181 self.assertEqual(1, len(images)) 1793 self.assertEqual(1, len(images))
1182 1794
1183 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1795 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1195,7 +1807,7 @@ class Wic2(WicTestCase):
1195 # check if file is there 1807 # check if file is there
1196 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 1808 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
1197 self.assertEqual(7, len(result.output.split('\n'))) 1809 self.assertEqual(7, len(result.output.split('\n')))
1198 self.assertTrue(os.path.basename(testfile.name) in result.output) 1810 self.assertIn(os.path.basename(testfile.name), result.output)
1199 1811
1200 # prepare directory 1812 # prepare directory
1201 testdir = os.path.join(self.resultdir, 'wic-test-cp-dir') 1813 testdir = os.path.join(self.resultdir, 'wic-test-cp-dir')
@@ -1209,13 +1821,13 @@ class Wic2(WicTestCase):
1209 # check if directory is there 1821 # check if directory is there
1210 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 1822 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
1211 self.assertEqual(8, len(result.output.split('\n'))) 1823 self.assertEqual(8, len(result.output.split('\n')))
1212 self.assertTrue(os.path.basename(testdir) in result.output) 1824 self.assertIn(os.path.basename(testdir), result.output)
1213 1825
1214 # copy the file from the partition and check if it success 1826 # copy the file from the partition and check if it success
1215 dest = '%s-cp' % testfile.name 1827 dest = '%s-cp' % testfile.name
1216 runCmd("wic cp %s:1/%s %s -n %s" % (images[0], 1828 runCmd("wic cp %s:1/%s %s -n %s" % (images[0],
1217 os.path.basename(testfile.name), dest, sysroot)) 1829 os.path.basename(testfile.name), dest, sysroot))
1218 self.assertTrue(os.path.exists(dest)) 1830 self.assertTrue(os.path.exists(dest), msg="File %s wasn't generated as expected" % dest)
1219 1831
1220 1832
1221 def test_wic_rm(self): 1833 def test_wic_rm(self):
@@ -1223,105 +1835,35 @@ class Wic2(WicTestCase):
1223 runCmd("wic create mkefidisk " 1835 runCmd("wic create mkefidisk "
1224 "--image-name=core-image-minimal " 1836 "--image-name=core-image-minimal "
1225 "-D -o %s" % self.resultdir) 1837 "-D -o %s" % self.resultdir)
1226 images = glob(self.resultdir + "mkefidisk-*.direct") 1838 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
1227 self.assertEqual(1, len(images)) 1839 self.assertEqual(1, len(images))
1228 1840
1229 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1841 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1842 # Not bulletproof but hopefully sufficient
1843 kerneltype = get_bb_var('KERNEL_IMAGETYPE', 'virtual/kernel')
1230 1844
1231 # list directory content of the first partition 1845 # list directory content of the first partition
1232 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) 1846 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
1233 self.assertIn('\nBZIMAGE ', result.output) 1847 self.assertIn('\n%s ' % kerneltype.upper(), result.output)
1234 self.assertIn('\nEFI <DIR> ', result.output) 1848 self.assertIn('\nEFI <DIR> ', result.output)
1235 1849
1236 # remove file 1850 # remove file. EFI partitions are case-insensitive so exercise that too
1237 runCmd("wic rm %s:1/bzimage -n %s" % (images[0], sysroot)) 1851 runCmd("wic rm %s:1/%s -n %s" % (images[0], kerneltype.lower(), sysroot))
1238 1852
1239 # remove directory 1853 # remove directory
1240 runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot)) 1854 runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot))
1241 1855
1242 # check if they're removed 1856 # check if they're removed
1243 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) 1857 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
1244 self.assertNotIn('\nBZIMAGE ', result.output) 1858 self.assertNotIn('\n%s ' % kerneltype.upper(), result.output)
1245 self.assertNotIn('\nEFI <DIR> ', result.output) 1859 self.assertNotIn('\nEFI <DIR> ', result.output)
1246 1860
1247 def test_mkfs_extraopts(self):
1248 """Test wks option --mkfs-extraopts for empty and not empty partitions"""
1249 img = 'core-image-minimal'
1250 with NamedTemporaryFile("w", suffix=".wks") as wks:
1251 wks.writelines(
1252 ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
1253 "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
1254 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
1255 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1256 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1257 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
1258 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
1259 wks.flush()
1260 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1261 runCmd(cmd)
1262 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1263 out = glob(self.resultdir + "%s-*direct" % wksname)
1264 self.assertEqual(1, len(out))
1265
1266 def test_expand_mbr_image(self):
1267 """Test wic write --expand command for mbr image"""
1268 # build an image
1269 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
1270 self.append_config(config)
1271 self.assertEqual(0, bitbake('core-image-minimal').status)
1272
1273 # get path to the image
1274 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'])
1275 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
1276 machine = bb_vars['MACHINE']
1277 image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
1278
1279 self.remove_config(config)
1280
1281 try:
1282 # expand image to 1G
1283 new_image_path = None
1284 with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
1285 dir=deploy_dir, delete=False) as sparse:
1286 sparse.truncate(1024 ** 3)
1287 new_image_path = sparse.name
1288
1289 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1290 cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
1291 runCmd(cmd)
1292
1293 # check if partitions are expanded
1294 orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
1295 exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
1296 orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
1297 exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
1298 self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
1299 self.assertTrue(orig_sizes[1] < exp_sizes[1])
1300
1301 # Check if all free space is partitioned
1302 result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
1303 self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output)
1304
1305 os.rename(image_path, image_path + '.bak')
1306 os.rename(new_image_path, image_path)
1307
1308 # Check if it boots in qemu
1309 with runqemu('core-image-minimal', ssh=False) as qemu:
1310 cmd = "ls /etc/"
1311 status, output = qemu.run_serial('true')
1312 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1313 finally:
1314 if os.path.exists(new_image_path):
1315 os.unlink(new_image_path)
1316 if os.path.exists(image_path + '.bak'):
1317 os.rename(image_path + '.bak', image_path)
1318
1319 def test_wic_ls_ext(self): 1861 def test_wic_ls_ext(self):
1320 """Test listing content of the ext partition using 'wic ls'""" 1862 """Test listing content of the ext partition using 'wic ls'"""
1321 runCmd("wic create wictestdisk " 1863 runCmd("wic create wictestdisk "
1322 "--image-name=core-image-minimal " 1864 "--image-name=core-image-minimal "
1323 "-D -o %s" % self.resultdir) 1865 "-D -o %s" % self.resultdir)
1324 images = glob(self.resultdir + "wictestdisk-*.direct") 1866 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1325 self.assertEqual(1, len(images)) 1867 self.assertEqual(1, len(images))
1326 1868
1327 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1869 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1329,14 +1871,14 @@ class Wic2(WicTestCase):
1329 # list directory content of the second ext4 partition 1871 # list directory content of the second ext4 partition
1330 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1872 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1331 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset( 1873 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(
1332 set(line.split()[-1] for line in result.output.split('\n') if line))) 1874 set(line.split()[-1] for line in result.output.split('\n') if line)), msg="Expected directories not present %s" % result.output)
1333 1875
1334 def test_wic_cp_ext(self): 1876 def test_wic_cp_ext(self):
1335 """Test copy files and directories to the ext partition.""" 1877 """Test copy files and directories to the ext partition."""
1336 runCmd("wic create wictestdisk " 1878 runCmd("wic create wictestdisk "
1337 "--image-name=core-image-minimal " 1879 "--image-name=core-image-minimal "
1338 "-D -o %s" % self.resultdir) 1880 "-D -o %s" % self.resultdir)
1339 images = glob(self.resultdir + "wictestdisk-*.direct") 1881 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1340 self.assertEqual(1, len(images)) 1882 self.assertEqual(1, len(images))
1341 1883
1342 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1884 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1344,7 +1886,7 @@ class Wic2(WicTestCase):
1344 # list directory content of the ext4 partition 1886 # list directory content of the ext4 partition
1345 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1887 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1346 dirs = set(line.split()[-1] for line in result.output.split('\n') if line) 1888 dirs = set(line.split()[-1] for line in result.output.split('\n') if line)
1347 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs)) 1889 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs), msg="Expected directories not present %s" % dirs)
1348 1890
1349 with NamedTemporaryFile("w", suffix=".wic-cp") as testfile: 1891 with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
1350 testfile.write("test") 1892 testfile.write("test")
@@ -1359,12 +1901,12 @@ class Wic2(WicTestCase):
1359 1901
1360 # check if the file to copy is in the partition 1902 # check if the file to copy is in the partition
1361 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1903 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1362 self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) 1904 self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1363 1905
1364 # copy file from the partition, replace the temporary file content with it and 1906 # copy file from the partition, replace the temporary file content with it and
1365 # check for the file size to validate the copy 1907 # check for the file size to validate the copy
1366 runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot)) 1908 runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot))
1367 self.assertTrue(os.stat(testfile.name).st_size > 0) 1909 self.assertTrue(os.stat(testfile.name).st_size > 0, msg="Filesize not as expected %s" % os.stat(testfile.name).st_size)
1368 1910
1369 1911
1370 def test_wic_rm_ext(self): 1912 def test_wic_rm_ext(self):
@@ -1372,25 +1914,25 @@ class Wic2(WicTestCase):
1372 runCmd("wic create mkefidisk " 1914 runCmd("wic create mkefidisk "
1373 "--image-name=core-image-minimal " 1915 "--image-name=core-image-minimal "
1374 "-D -o %s" % self.resultdir) 1916 "-D -o %s" % self.resultdir)
1375 images = glob(self.resultdir + "mkefidisk-*.direct") 1917 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
1376 self.assertEqual(1, len(images)) 1918 self.assertEqual(1, len(images))
1377 1919
1378 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1920 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1379 1921
1380 # list directory content of the /etc directory on ext4 partition 1922 # list directory content of the /etc directory on ext4 partition
1381 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1923 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1382 self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) 1924 self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1383 1925
1384 # remove file 1926 # remove file
1385 runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot)) 1927 runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
1386 1928
1387 # check if it's removed 1929 # check if it's removed
1388 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1930 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1389 self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line]) 1931 self.assertNotIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1390 1932
1391 # remove non-empty directory 1933 # remove non-empty directory
1392 runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot)) 1934 runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot))
1393 1935
1394 # check if it's removed 1936 # check if it's removed
1395 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1937 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1396 self.assertTrue('etc' not in [line.split()[-1] for line in result.output.split('\n') if line]) 1938 self.assertNotIn('etc', [line.split()[-1] for line in result.output.split('\n') if line])
diff --git a/meta/lib/oeqa/selftest/cases/wrapper.py b/meta/lib/oeqa/selftest/cases/wrapper.py
new file mode 100644
index 0000000000..f2be44262c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/wrapper.py
@@ -0,0 +1,16 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake
8
9class WrapperTests(OESelftestTestCase):
10 def test_shebang_wrapper(self):
11 """
12 Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective.
13 Expected: Exit status to be 0.
14 Author: Paulo Neves <ptsneves@gmail.com>
15 """
16 res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False)
diff --git a/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
new file mode 100644
index 0000000000..312edb6431
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
@@ -0,0 +1,39 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import sys
9import subprocess
10import shutil
11from oeqa.selftest.case import OESelftestTestCase
12from yocto_testresults_query import get_sha1, create_workdir
13basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
14lib_path = basepath + '/scripts/lib'
15sys.path = sys.path + [lib_path]
16
17
18class TestResultsQueryTests(OESelftestTestCase):
19 def test_get_sha1(self):
20 test_data_get_sha1 = [
21 {"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"},
22 {"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"},
23 ]
24 for data in test_data_get_sha1:
25 test_name = data["input"]
26 with self.subTest(f"Test SHA1 from {test_name}"):
27 self.assertEqual(
28 get_sha1(basepath, data["input"]), data["expected"])
29
30 def test_create_workdir(self):
31 workdir = create_workdir()
32 try:
33 url = subprocess.check_output(
34 ["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8")
35 except:
36 shutil.rmtree(workdir, ignore_errors=True)
37 self.fail(f"Can not execute git commands in {workdir}")
38 shutil.rmtree(workdir)
39 self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults")
diff --git a/meta/lib/oeqa/selftest/context.py b/meta/lib/oeqa/selftest/context.py
index 1659926975..16f82c6737 100644
--- a/meta/lib/oeqa/selftest/context.py
+++ b/meta/lib/oeqa/selftest/context.py
@@ -16,19 +16,32 @@ from random import choice
16import oeqa 16import oeqa
17import oe 17import oe
18import bb.utils 18import bb.utils
19import bb.tinfoil
19 20
20from oeqa.core.context import OETestContext, OETestContextExecutor 21from oeqa.core.context import OETestContext, OETestContextExecutor
21from oeqa.core.exception import OEQAPreRun, OEQATestNotFound 22from oeqa.core.exception import OEQAPreRun, OEQATestNotFound
22 23
23from oeqa.utils.commands import runCmd, get_bb_vars, get_test_layer 24from oeqa.utils.commands import runCmd, get_bb_vars, get_test_layer
24 25
26OESELFTEST_METADATA=["run_all_tests", "run_tests", "skips", "machine", "select_tags", "exclude_tags"]
27
28def get_oeselftest_metadata(args):
29 result = {}
30 raw_args = vars(args)
31 for metadata in OESELFTEST_METADATA:
32 if metadata in raw_args:
33 result[metadata] = raw_args[metadata]
34
35 return result
36
25class NonConcurrentTestSuite(unittest.TestSuite): 37class NonConcurrentTestSuite(unittest.TestSuite):
26 def __init__(self, suite, processes, setupfunc, removefunc): 38 def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
27 super().__init__([suite]) 39 super().__init__([suite])
28 self.processes = processes 40 self.processes = processes
29 self.suite = suite 41 self.suite = suite
30 self.setupfunc = setupfunc 42 self.setupfunc = setupfunc
31 self.removefunc = removefunc 43 self.removefunc = removefunc
44 self.bb_vars = bb_vars
32 45
33 def run(self, result): 46 def run(self, result):
34 (builddir, newbuilddir) = self.setupfunc("-st", None, self.suite) 47 (builddir, newbuilddir) = self.setupfunc("-st", None, self.suite)
@@ -39,7 +52,7 @@ class NonConcurrentTestSuite(unittest.TestSuite):
39 52
40def removebuilddir(d): 53def removebuilddir(d):
41 delay = 5 54 delay = 5
42 while delay and os.path.exists(d + "/bitbake.lock"): 55 while delay and (os.path.exists(d + "/bitbake.lock") or os.path.exists(d + "/cache/hashserv.db-wal")):
43 time.sleep(1) 56 time.sleep(1)
44 delay = delay - 1 57 delay = delay - 1
45 # Deleting these directories takes a lot of time, use autobuilder 58 # Deleting these directories takes a lot of time, use autobuilder
@@ -57,8 +70,6 @@ class OESelftestTestContext(OETestContext):
57 def __init__(self, td=None, logger=None, machines=None, config_paths=None, newbuilddir=None, keep_builddir=None): 70 def __init__(self, td=None, logger=None, machines=None, config_paths=None, newbuilddir=None, keep_builddir=None):
58 super(OESelftestTestContext, self).__init__(td, logger) 71 super(OESelftestTestContext, self).__init__(td, logger)
59 72
60 self.machines = machines
61 self.custommachine = None
62 self.config_paths = config_paths 73 self.config_paths = config_paths
63 self.newbuilddir = newbuilddir 74 self.newbuilddir = newbuilddir
64 75
@@ -67,10 +78,15 @@ class OESelftestTestContext(OETestContext):
67 else: 78 else:
68 self.removebuilddir = removebuilddir 79 self.removebuilddir = removebuilddir
69 80
81 def set_variables(self, vars):
82 self.bb_vars = vars
83
70 def setup_builddir(self, suffix, selftestdir, suite): 84 def setup_builddir(self, suffix, selftestdir, suite):
85 sstatedir = self.bb_vars['SSTATE_DIR']
86
71 builddir = os.environ['BUILDDIR'] 87 builddir = os.environ['BUILDDIR']
72 if not selftestdir: 88 if not selftestdir:
73 selftestdir = get_test_layer() 89 selftestdir = get_test_layer(self.bb_vars['BBLAYERS'])
74 if self.newbuilddir: 90 if self.newbuilddir:
75 newbuilddir = os.path.join(self.newbuilddir, 'build' + suffix) 91 newbuilddir = os.path.join(self.newbuilddir, 'build' + suffix)
76 else: 92 else:
@@ -86,16 +102,40 @@ class OESelftestTestContext(OETestContext):
86 oe.path.copytree(builddir + "/cache", newbuilddir + "/cache") 102 oe.path.copytree(builddir + "/cache", newbuilddir + "/cache")
87 oe.path.copytree(selftestdir, newselftestdir) 103 oe.path.copytree(selftestdir, newselftestdir)
88 104
105 # if the last line of local.conf in newbuilddir is not empty and does not end with newline then add one
106 localconf_path = newbuilddir + "/conf/local.conf"
107 with open(localconf_path, "r+", encoding="utf-8") as f:
108 last_line = f.readlines()[-1]
109 if last_line and not last_line.endswith("\n"):
110 f.write("\n")
111
112 subprocess.check_output("git init && git add * && git commit -a -m 'initial'", cwd=newselftestdir, shell=True)
113
114 # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
115 subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
116
117 # Relative paths in BBLAYERS only works when the new build dir share the same ascending node
118 if self.newbuilddir:
119 bblayers = subprocess.check_output("bitbake-getvar --value BBLAYERS | tail -1", cwd=builddir, shell=True, text=True)
120 if '..' in bblayers:
121 bblayers_abspath = [os.path.abspath(path) for path in bblayers.split()]
122 with open("%s/conf/bblayers.conf" % newbuilddir, "a") as f:
123 newbblayers = "# new bblayers to be used by selftest in the new build dir '%s'\n" % newbuilddir
124 newbblayers += 'unset BBLAYERS\n'
125 newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath)
126 f.write(newbblayers)
127
128 # Rewrite builddir paths seen in environment variables
89 for e in os.environ: 129 for e in os.environ:
90 if builddir + "/" in os.environ[e]: 130 # Rewrite paths that absolutely point inside builddir
131 # (e.g $builddir/conf/ would be rewritten but not $builddir/../bitbake/)
132 if builddir + "/" in os.environ[e] and builddir + "/" in os.path.abspath(os.environ[e]):
91 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/") 133 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
92 if os.environ[e].endswith(builddir): 134 if os.environ[e].endswith(builddir):
93 os.environ[e] = os.environ[e].replace(builddir, newbuilddir) 135 os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
94 136
95 subprocess.check_output("git init; git add *; git commit -a -m 'initial'", cwd=newselftestdir, shell=True) 137 # Set SSTATE_DIR to match the parent SSTATE_DIR
96 138 subprocess.check_output("echo 'SSTATE_DIR ?= \"%s\"' >> %s/conf/local.conf" % (sstatedir, newbuilddir), cwd=newbuilddir, shell=True)
97 # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
98 subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
99 139
100 os.chdir(newbuilddir) 140 os.chdir(newbuilddir)
101 141
@@ -124,17 +164,11 @@ class OESelftestTestContext(OETestContext):
124 if processes: 164 if processes:
125 from oeqa.core.utils.concurrencytest import ConcurrentTestSuite 165 from oeqa.core.utils.concurrencytest import ConcurrentTestSuite
126 166
127 return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir) 167 return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
128 else: 168 else:
129 return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir) 169 return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
130 170
131 def runTests(self, processes=None, machine=None, skips=[]): 171 def runTests(self, processes=None, machine=None, skips=[]):
132 if machine:
133 self.custommachine = machine
134 if machine == 'random':
135 self.custommachine = choice(self.machines)
136 self.logger.info('Run tests with custom MACHINE set to: %s' % \
137 self.custommachine)
138 return super(OESelftestTestContext, self).runTests(processes, skips) 172 return super(OESelftestTestContext, self).runTests(processes, skips)
139 173
140 def listTests(self, display_type, machine=None): 174 def listTests(self, display_type, machine=None):
@@ -154,9 +188,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
154 group.add_argument('-a', '--run-all-tests', default=False, 188 group.add_argument('-a', '--run-all-tests', default=False,
155 action="store_true", dest="run_all_tests", 189 action="store_true", dest="run_all_tests",
156 help='Run all (unhidden) tests') 190 help='Run all (unhidden) tests')
157 group.add_argument('-R', '--skip-tests', required=False, action='store',
158 nargs='+', dest="skips", default=None,
159 help='Run all (unhidden) tests except the ones specified. Format should be <module>[.<class>[.<test_method>]]')
160 group.add_argument('-r', '--run-tests', required=False, action='store', 191 group.add_argument('-r', '--run-tests', required=False, action='store',
161 nargs='+', dest="run_tests", default=None, 192 nargs='+', dest="run_tests", default=None,
162 help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>') 193 help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
@@ -171,11 +202,26 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
171 action="store_true", default=False, 202 action="store_true", default=False,
172 help='List all available tests.') 203 help='List all available tests.')
173 204
174 parser.add_argument('-j', '--num-processes', dest='processes', action='store', 205 parser.add_argument('-R', '--skip-tests', required=False, action='store',
175 type=int, help="number of processes to execute in parallel with") 206 nargs='+', dest="skips", default=None,
207 help='Skip the tests specified. Format should be <module>[.<class>[.<test_method>]]')
208
209 def check_parallel_support(parameter):
210 if not parameter.isdigit():
211 import argparse
212 raise argparse.ArgumentTypeError("argument -j/--num-processes: invalid int value: '%s' " % str(parameter))
213
214 processes = int(parameter)
215 if processes:
216 try:
217 import testtools, subunit
218 except ImportError:
219 print("Failed to import testtools or subunit, the testcases will run serially")
220 processes = None
221 return processes
176 222
177 parser.add_argument('--machine', required=False, choices=['random', 'all'], 223 parser.add_argument('-j', '--num-processes', dest='processes', action='store',
178 help='Run tests on different machines (random/all).') 224 type=check_parallel_support, help="number of processes to execute in parallel with")
179 225
180 parser.add_argument('-t', '--select-tag', dest="select_tags", 226 parser.add_argument('-t', '--select-tag', dest="select_tags",
181 action='append', default=None, 227 action='append', default=None,
@@ -191,20 +237,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
191 parser.add_argument('-v', '--verbose', action='store_true') 237 parser.add_argument('-v', '--verbose', action='store_true')
192 parser.set_defaults(func=self.run) 238 parser.set_defaults(func=self.run)
193 239
194 def _get_available_machines(self):
195 machines = []
196
197 bbpath = self.tc_kwargs['init']['td']['BBPATH'].split(':')
198
199 for path in bbpath:
200 found_machines = glob.glob(os.path.join(path, 'conf', 'machine', '*.conf'))
201 if found_machines:
202 for i in found_machines:
203 # eg: '/home/<user>/poky/meta-intel/conf/machine/intel-core2-32.conf'
204 machines.append(os.path.splitext(os.path.basename(i))[0])
205
206 return machines
207
208 def _get_cases_paths(self, bbpath): 240 def _get_cases_paths(self, bbpath):
209 cases_paths = [] 241 cases_paths = []
210 for layer in bbpath: 242 for layer in bbpath:
@@ -235,11 +267,10 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
235 args.list_tests = 'name' 267 args.list_tests = 'name'
236 268
237 self.tc_kwargs['init']['td'] = bbvars 269 self.tc_kwargs['init']['td'] = bbvars
238 self.tc_kwargs['init']['machines'] = self._get_available_machines()
239 270
240 builddir = os.environ.get("BUILDDIR") 271 builddir = os.environ.get("BUILDDIR")
241 self.tc_kwargs['init']['config_paths'] = {} 272 self.tc_kwargs['init']['config_paths'] = {}
242 self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer() 273 self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer(bbvars["BBLAYERS"])
243 self.tc_kwargs['init']['config_paths']['builddir'] = builddir 274 self.tc_kwargs['init']['config_paths']['builddir'] = builddir
244 self.tc_kwargs['init']['config_paths']['localconf'] = os.path.join(builddir, "conf/local.conf") 275 self.tc_kwargs['init']['config_paths']['localconf'] = os.path.join(builddir, "conf/local.conf")
245 self.tc_kwargs['init']['config_paths']['bblayers'] = os.path.join(builddir, "conf/bblayers.conf") 276 self.tc_kwargs['init']['config_paths']['bblayers'] = os.path.join(builddir, "conf/bblayers.conf")
@@ -275,14 +306,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
275 os.chdir(builddir) 306 os.chdir(builddir)
276 307
277 if not "meta-selftest" in self.tc.td["BBLAYERS"]: 308 if not "meta-selftest" in self.tc.td["BBLAYERS"]:
278 self.tc.logger.warning("meta-selftest layer not found in BBLAYERS, adding it") 309 self.tc.logger.info("meta-selftest layer not found in BBLAYERS, adding it")
279 meta_selftestdir = os.path.join( 310 meta_selftestdir = os.path.join(
280 self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest') 311 self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest')
281 if os.path.isdir(meta_selftestdir): 312 if os.path.isdir(meta_selftestdir):
282 runCmd("bitbake-layers add-layer %s" %meta_selftestdir) 313 runCmd("bitbake-layers add-layer %s" % meta_selftestdir)
283 # reload data is needed because a meta-selftest layer was add 314 # reload data is needed because a meta-selftest layer was add
284 self.tc.td = get_bb_vars() 315 self.tc.td = get_bb_vars()
285 self.tc.config_paths['testlayer_path'] = get_test_layer() 316 self.tc.config_paths['testlayer_path'] = get_test_layer(self.tc.td["BBLAYERS"])
286 else: 317 else:
287 self.tc.logger.error("could not locate meta-selftest in:\n%s" % meta_selftestdir) 318 self.tc.logger.error("could not locate meta-selftest in:\n%s" % meta_selftestdir)
288 raise OEQAPreRun 319 raise OEQAPreRun
@@ -320,8 +351,15 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
320 351
321 _add_layer_libs() 352 _add_layer_libs()
322 353
323 self.tc.logger.info("Running bitbake -e to test the configuration is valid/parsable") 354 self.tc.logger.info("Checking base configuration is valid/parsable")
324 runCmd("bitbake -e") 355
356 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
357 tinfoil.prepare(quiet=2, config_only=True)
358 d = tinfoil.config_data
359 vars = {}
360 vars['SSTATE_DIR'] = str(d.getVar('SSTATE_DIR'))
361 vars['BBLAYERS'] = str(d.getVar('BBLAYERS'))
362 self.tc.set_variables(vars)
325 363
326 def get_json_result_dir(self, args): 364 def get_json_result_dir(self, args):
327 json_result_dir = os.path.join(self.tc.td["LOG_DIR"], 'oeqa') 365 json_result_dir = os.path.join(self.tc.td["LOG_DIR"], 'oeqa')
@@ -334,12 +372,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
334 import platform 372 import platform
335 from oeqa.utils.metadata import metadata_from_bb 373 from oeqa.utils.metadata import metadata_from_bb
336 metadata = metadata_from_bb() 374 metadata = metadata_from_bb()
375 oeselftest_metadata = get_oeselftest_metadata(args)
337 configuration = {'TEST_TYPE': 'oeselftest', 376 configuration = {'TEST_TYPE': 'oeselftest',
338 'STARTTIME': args.test_start_time, 377 'STARTTIME': args.test_start_time,
339 'MACHINE': self.tc.td["MACHINE"], 378 'MACHINE': self.tc.td["MACHINE"],
340 'HOST_DISTRO': oe.lsb.distro_identifier().replace(' ', '-'), 379 'HOST_DISTRO': oe.lsb.distro_identifier().replace(' ', '-'),
341 'HOST_NAME': metadata['hostname'], 380 'HOST_NAME': metadata['hostname'],
342 'LAYERS': metadata['layers']} 381 'LAYERS': metadata['layers'],
382 'OESELFTEST_METADATA': oeselftest_metadata}
343 return configuration 383 return configuration
344 384
345 def get_result_id(self, configuration): 385 def get_result_id(self, configuration):
@@ -374,37 +414,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
374 414
375 rc = None 415 rc = None
376 try: 416 try:
377 if args.machine: 417 rc = self._internal_run(logger, args)
378 logger.info('Custom machine mode enabled. MACHINE set to %s' %
379 args.machine)
380
381 if args.machine == 'all':
382 results = []
383 for m in self.tc_kwargs['init']['machines']:
384 self.tc_kwargs['run']['machine'] = m
385 results.append(self._internal_run(logger, args))
386
387 # XXX: the oe-selftest script only needs to know if one
388 # machine run fails
389 for r in results:
390 rc = r
391 if not r.wasSuccessful():
392 break
393
394 else:
395 self.tc_kwargs['run']['machine'] = args.machine
396 return self._internal_run(logger, args)
397
398 else:
399 self.tc_kwargs['run']['machine'] = args.machine
400 rc = self._internal_run(logger, args)
401 finally: 418 finally:
402 config_paths = self.tc_kwargs['init']['config_paths'] 419 config_paths = self.tc_kwargs['init']['config_paths']
403 420
404 output_link = os.path.join(os.path.dirname(args.output_log), 421 output_link = os.path.join(os.path.dirname(args.output_log),
405 "%s-results.log" % self.name) 422 "%s-results.log" % self.name)
406 if os.path.lexists(output_link): 423 if os.path.lexists(output_link):
407 os.remove(output_link) 424 os.unlink(output_link)
408 os.symlink(args.output_log, output_link) 425 os.symlink(args.output_log, output_link)
409 426
410 return rc 427 return rc
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py
index 19f5a4ea7e..cdf382ee21 100644
--- a/meta/lib/oeqa/targetcontrol.py
+++ b/meta/lib/oeqa/targetcontrol.py
@@ -7,17 +7,14 @@
7# This module is used by testimage.bbclass for setting up and controlling a target machine. 7# This module is used by testimage.bbclass for setting up and controlling a target machine.
8 8
9import os 9import os
10import shutil
11import subprocess 10import subprocess
12import bb 11import bb
13import traceback
14import sys
15import logging 12import logging
16from oeqa.utils.sshcontrol import SSHControl 13from oeqa.utils.sshcontrol import SSHControl
17from oeqa.utils.qemurunner import QemuRunner 14from oeqa.utils.qemurunner import QemuRunner
18from oeqa.utils.qemutinyrunner import QemuTinyRunner 15from oeqa.utils.qemutinyrunner import QemuTinyRunner
19from oeqa.utils.dump import TargetDumper 16from oeqa.utils.dump import TargetDumper
20from oeqa.controllers.testtargetloader import TestTargetLoader 17from oeqa.utils.dump import MonitorDumper
21from abc import ABCMeta, abstractmethod 18from abc import ABCMeta, abstractmethod
22 19
23class BaseTarget(object, metaclass=ABCMeta): 20class BaseTarget(object, metaclass=ABCMeta):
@@ -41,7 +38,7 @@ class BaseTarget(object, metaclass=ABCMeta):
41 if os.path.islink(sshloglink): 38 if os.path.islink(sshloglink):
42 os.unlink(sshloglink) 39 os.unlink(sshloglink)
43 os.symlink(self.sshlog, sshloglink) 40 os.symlink(self.sshlog, sshloglink)
44 self.logger.info("SSH log file: %s" % self.sshlog) 41 self.logger.info("SSH log file: %s" % self.sshlog)
45 42
46 @abstractmethod 43 @abstractmethod
47 def start(self, params=None, ssh=True, extra_bootparams=None): 44 def start(self, params=None, ssh=True, extra_bootparams=None):
@@ -91,7 +88,7 @@ class QemuTarget(BaseTarget):
91 88
92 supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] 89 supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic']
93 90
94 def __init__(self, d, logger, image_fstype=None): 91 def __init__(self, d, logger, image_fstype=None, boot_patterns=None):
95 92
96 import oe.types 93 import oe.types
97 94
@@ -106,8 +103,7 @@ class QemuTarget(BaseTarget):
106 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype) 103 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype)
107 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') 104 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
108 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime) 105 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime)
109 dump_target_cmds = d.getVar("testimage_dump_target") 106 dump_monitor_cmds = d.getVar("testimage_dump_monitor")
110 dump_host_cmds = d.getVar("testimage_dump_host")
111 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR") 107 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR")
112 if not dump_dir: 108 if not dump_dir:
113 dump_dir = os.path.join(d.getVar('LOG_DIR'), 'runtime-hostdump') 109 dump_dir = os.path.join(d.getVar('LOG_DIR'), 'runtime-hostdump')
@@ -131,6 +127,7 @@ class QemuTarget(BaseTarget):
131 logfile = self.qemulog, 127 logfile = self.qemulog,
132 kernel = self.kernel, 128 kernel = self.kernel,
133 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), 129 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
130 tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"),
134 logger = logger) 131 logger = logger)
135 else: 132 else:
136 self.runner = QemuRunner(machine=d.getVar("MACHINE"), 133 self.runner = QemuRunner(machine=d.getVar("MACHINE"),
@@ -142,11 +139,14 @@ class QemuTarget(BaseTarget):
142 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), 139 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
143 use_kvm = use_kvm, 140 use_kvm = use_kvm,
144 dump_dir = dump_dir, 141 dump_dir = dump_dir,
145 dump_host_cmds = d.getVar("testimage_dump_host"),
146 logger = logger, 142 logger = logger,
147 serial_ports = len(d.getVar("SERIAL_CONSOLES").split())) 143 tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"),
144 serial_ports = len(d.getVar("SERIAL_CONSOLES").split()),
145 boot_patterns = boot_patterns)
148 146
149 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner) 147 self.monitor_dumper = MonitorDumper(dump_monitor_cmds, dump_dir, self.runner)
148 if (self.monitor_dumper):
149 self.monitor_dumper.create_dir("qmp")
150 150
151 def deploy(self): 151 def deploy(self):
152 bb.utils.mkdirhier(self.testdir) 152 bb.utils.mkdirhier(self.testdir)
@@ -156,7 +156,7 @@ class QemuTarget(BaseTarget):
156 os.unlink(qemuloglink) 156 os.unlink(qemuloglink)
157 os.symlink(self.qemulog, qemuloglink) 157 os.symlink(self.qemulog, qemuloglink)
158 158
159 self.logger.info("rootfs file: %s" % self.rootfs) 159 self.logger.info("rootfs file: %s" % self.rootfs)
160 self.logger.info("Qemu log file: %s" % self.qemulog) 160 self.logger.info("Qemu log file: %s" % self.qemulog)
161 super(QemuTarget, self).deploy() 161 super(QemuTarget, self).deploy()
162 162
@@ -198,7 +198,7 @@ class QemuTarget(BaseTarget):
198 self.server_ip = self.runner.server_ip 198 self.server_ip = self.runner.server_ip
199 self.connection = SSHControl(ip=self.ip, logfile=self.sshlog) 199 self.connection = SSHControl(ip=self.ip, logfile=self.sshlog)
200 else: 200 else:
201 raise RuntimError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn) 201 raise RuntimeError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn)
202 202
203 def run_serial(self, command, timeout=60): 203 def run_serial(self, command, timeout=60):
204 return self.runner.run_serial(command, timeout=timeout) 204 return self.runner.run_serial(command, timeout=timeout)
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py
index 6d1ec4cb99..e03f7e33bb 100644
--- a/meta/lib/oeqa/utils/__init__.py
+++ b/meta/lib/oeqa/utils/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4# Enable other layers to have modules in the same named directory 6# Enable other layers to have modules in the same named directory
@@ -88,3 +90,16 @@ def load_test_components(logger, executor):
88 "_executor_class defined." % (comp_name, comp_context)) 90 "_executor_class defined." % (comp_name, comp_context))
89 91
90 return components 92 return components
93
94def get_json_result_dir(d):
95 json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
96 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
97 if custom_json_result_dir:
98 json_result_dir = custom_json_result_dir
99 return json_result_dir
100
101def get_artefact_dir(d):
102 custom_json_result_dir = d.getVar("OEQA_ARTEFACT_DIR")
103 if custom_json_result_dir:
104 return custom_json_result_dir
105 return os.path.join(d.getVar("LOG_DIR"), 'oeqa-artefacts')
diff --git a/meta/lib/oeqa/utils/buildproject.py b/meta/lib/oeqa/utils/buildproject.py
index e6d80cc8dc..dfb9661868 100644
--- a/meta/lib/oeqa/utils/buildproject.py
+++ b/meta/lib/oeqa/utils/buildproject.py
@@ -18,6 +18,7 @@ class BuildProject(metaclass=ABCMeta):
18 def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None): 18 def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None):
19 self.uri = uri 19 self.uri = uri
20 self.archive = os.path.basename(uri) 20 self.archive = os.path.basename(uri)
21 self.tempdirobj = None
21 if not tmpdir: 22 if not tmpdir:
22 self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-') 23 self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-')
23 tmpdir = self.tempdirobj.name 24 tmpdir = self.tempdirobj.name
@@ -57,6 +58,8 @@ class BuildProject(metaclass=ABCMeta):
57 return self._run('cd %s; make install %s' % (self.targetdir, install_args)) 58 return self._run('cd %s; make install %s' % (self.targetdir, install_args))
58 59
59 def clean(self): 60 def clean(self):
61 if self.tempdirobj:
62 self.tempdirobj.cleanup()
60 if not self.needclean: 63 if not self.needclean:
61 return 64 return
62 self._run('rm -rf %s' % self.targetdir) 65 self._run('rm -rf %s' % self.targetdir)
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index a71c16ab14..b60a6e6c38 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -8,11 +8,8 @@
8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest 8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest
9# It provides a class and methods for running commands on the host in a convienent way for tests. 9# It provides a class and methods for running commands on the host in a convienent way for tests.
10 10
11
12
13import os 11import os
14import sys 12import sys
15import signal
16import subprocess 13import subprocess
17import threading 14import threading
18import time 15import time
@@ -21,6 +18,7 @@ from oeqa.utils import CommandError
21from oeqa.utils import ftools 18from oeqa.utils import ftools
22import re 19import re
23import contextlib 20import contextlib
21import errno
24# Export test doesn't require bb 22# Export test doesn't require bb
25try: 23try:
26 import bb 24 import bb
@@ -85,7 +83,7 @@ class Command(object):
85 except OSError as ex: 83 except OSError as ex:
86 # It's not an error when the command does not consume all 84 # It's not an error when the command does not consume all
87 # of our data. subprocess.communicate() also ignores that. 85 # of our data. subprocess.communicate() also ignores that.
88 if ex.errno != EPIPE: 86 if ex.errno != errno.EPIPE:
89 raise 87 raise
90 88
91 # We write in a separate thread because then we can read 89 # We write in a separate thread because then we can read
@@ -117,7 +115,7 @@ class Command(object):
117 else: 115 else:
118 deadline = time.time() + self.timeout 116 deadline = time.time() + self.timeout
119 for thread in self.threads: 117 for thread in self.threads:
120 timeout = deadline - time.time() 118 timeout = deadline - time.time()
121 if timeout < 0: 119 if timeout < 0:
122 timeout = 0 120 timeout = 0
123 thread.join(timeout) 121 thread.join(timeout)
@@ -168,18 +166,22 @@ class Result(object):
168 166
169 167
170def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True, 168def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True,
171 native_sysroot=None, limit_exc_output=0, output_log=None, **options): 169 native_sysroot=None, target_sys=None, limit_exc_output=0, output_log=None, **options):
172 result = Result() 170 result = Result()
173 171
174 if native_sysroot: 172 if native_sysroot:
175 extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \ 173 new_env = dict(options.get('env', os.environ))
176 (native_sysroot, native_sysroot, native_sysroot) 174 paths = new_env["PATH"].split(":")
177 extra_libpaths = "%s/lib:%s/usr/lib" % \ 175 paths = [
178 (native_sysroot, native_sysroot) 176 os.path.join(native_sysroot, "bin"),
179 nenv = dict(options.get('env', os.environ)) 177 os.path.join(native_sysroot, "sbin"),
180 nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '') 178 os.path.join(native_sysroot, "usr", "bin"),
181 nenv['LD_LIBRARY_PATH'] = extra_libpaths + ':' + nenv.get('LD_LIBRARY_PATH', '') 179 os.path.join(native_sysroot, "usr", "sbin"),
182 options['env'] = nenv 180 ] + paths
181 if target_sys:
182 paths = [os.path.join(native_sysroot, "usr", "bin", target_sys)] + paths
183 new_env["PATH"] = ":".join(paths)
184 options['env'] = new_env
183 185
184 cmd = Command(command, timeout=timeout, output_log=output_log, **options) 186 cmd = Command(command, timeout=timeout, output_log=output_log, **options)
185 cmd.run() 187 cmd.run()
@@ -201,6 +203,8 @@ def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=T
201 203
202 if result.status and not ignore_status: 204 if result.status and not ignore_status:
203 exc_output = result.output 205 exc_output = result.output
206 if result.error:
207 exc_output = exc_output + result.error
204 if limit_exc_output > 0: 208 if limit_exc_output > 0:
205 split = result.output.splitlines() 209 split = result.output.splitlines()
206 if len(split) > limit_exc_output: 210 if len(split) > limit_exc_output:
@@ -281,10 +285,25 @@ def get_bb_vars(variables=None, target=None, postconfig=None):
281 return values 285 return values
282 286
283def get_bb_var(var, target=None, postconfig=None): 287def get_bb_var(var, target=None, postconfig=None):
284 return get_bb_vars([var], target, postconfig)[var] 288 if postconfig:
285 289 return bitbake("-e %s" % target or "", postconfig=postconfig).output
286def get_test_layer(): 290 else:
287 layers = get_bb_var("BBLAYERS").split() 291 # Fast-path for the non-postconfig case
292 cmd = ["bitbake-getvar", "--quiet", "--value", var]
293 if target:
294 cmd.extend(["--recipe", target])
295 try:
296 return subprocess.run(cmd, check=True, text=True, stdout=subprocess.PIPE).stdout.strip()
297 except subprocess.CalledProcessError as e:
298 # We need to return None not the empty string if the variable hasn't been set.
299 if e.returncode == 1:
300 return None
301 raise
302
303def get_test_layer(bblayers=None):
304 if bblayers is None:
305 bblayers = get_bb_var("BBLAYERS")
306 layers = bblayers.split()
288 testlayer = None 307 testlayer = None
289 for l in layers: 308 for l in layers:
290 if '~' in l: 309 if '~' in l:
@@ -296,6 +315,7 @@ def get_test_layer():
296 315
297def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'): 316def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
298 os.makedirs(os.path.join(templayerdir, 'conf')) 317 os.makedirs(os.path.join(templayerdir, 'conf'))
318 corenames = get_bb_var('LAYERSERIES_CORENAMES')
299 with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f: 319 with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
300 f.write('BBPATH .= ":${LAYERDIR}"\n') 320 f.write('BBPATH .= ":${LAYERDIR}"\n')
301 f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec) 321 f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
@@ -304,12 +324,29 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec=
304 f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername) 324 f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
305 f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority)) 325 f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
306 f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername) 326 f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
307 f.write('LAYERSERIES_COMPAT_%s = "${LAYERSERIES_COMPAT_core}"\n' % templayername) 327 f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames))
308 328
309@contextlib.contextmanager 329@contextlib.contextmanager
310def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): 330def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, boot_patterns = {}, discard_writes=True):
311 """ 331 """
312 launch_cmd means directly run the command, don't need set rootfs or env vars. 332 Starts a context manager for a 'oeqa.targetcontrol.QemuTarget' resource.
333 The underlying Qemu will be booted into a shell when the generator yields
334 and stopped when the 'with' block exits.
335
336 Usage:
337
338 with runqemu('core-image-minimal') as qemu:
339 qemu.run_serial('cat /proc/cpuinfo')
340
341 Args:
342 pn (str): (image) recipe to run on
343 ssh (boolean): whether or not to enable SSH (network access)
344 runqemuparams (str): space-separated list of params to pass to 'runqemu' script (like 'nographics', 'ovmf', etc.)
345 image_fstype (str): IMAGE_FSTYPE to use
346 launch_cmd (str): directly run this command and bypass automatic runqemu parameter generation
347 overrides (dict): dict of "'<bitbake-variable>': value" pairs that allows overriding bitbake variables
348 boot_patterns (dict): dict of "'<pattern-name>': value" pairs to override default boot patterns, e.g. when not booting Linux
349 discard_writes (boolean): enables qemu -snapshot feature to prevent modifying original image
313 """ 350 """
314 351
315 import bb.tinfoil 352 import bb.tinfoil
@@ -340,7 +377,7 @@ def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None,
340 377
341 logdir = recipedata.getVar("TEST_LOG_DIR") 378 logdir = recipedata.getVar("TEST_LOG_DIR")
342 379
343 qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype) 380 qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype, boot_patterns=boot_patterns)
344 finally: 381 finally:
345 # We need to shut down tinfoil early here in case we actually want 382 # We need to shut down tinfoil early here in case we actually want
346 # to run tinfoil-using utilities with the running QEMU instance. 383 # to run tinfoil-using utilities with the running QEMU instance.
diff --git a/meta/lib/oeqa/utils/decorators.py b/meta/lib/oeqa/utils/decorators.py
index aabf4110cb..ea90164e5e 100644
--- a/meta/lib/oeqa/utils/decorators.py
+++ b/meta/lib/oeqa/utils/decorators.py
@@ -16,91 +16,6 @@ import threading
16import signal 16import signal
17from functools import wraps 17from functools import wraps
18 18
19#get the "result" object from one of the upper frames provided that one of these upper frames is a unittest.case frame
20class getResults(object):
21 def __init__(self):
22 #dynamically determine the unittest.case frame and use it to get the name of the test method
23 ident = threading.current_thread().ident
24 upperf = sys._current_frames()[ident]
25 while (upperf.f_globals['__name__'] != 'unittest.case'):
26 upperf = upperf.f_back
27
28 def handleList(items):
29 ret = []
30 # items is a list of tuples, (test, failure) or (_ErrorHandler(), Exception())
31 for i in items:
32 s = i[0].id()
33 #Handle the _ErrorHolder objects from skipModule failures
34 if "setUpModule (" in s:
35 ret.append(s.replace("setUpModule (", "").replace(")",""))
36 else:
37 ret.append(s)
38 # Append also the test without the full path
39 testname = s.split('.')[-1]
40 if testname:
41 ret.append(testname)
42 return ret
43 self.faillist = handleList(upperf.f_locals['result'].failures)
44 self.errorlist = handleList(upperf.f_locals['result'].errors)
45 self.skiplist = handleList(upperf.f_locals['result'].skipped)
46
47 def getFailList(self):
48 return self.faillist
49
50 def getErrorList(self):
51 return self.errorlist
52
53 def getSkipList(self):
54 return self.skiplist
55
56class skipIfFailure(object):
57
58 def __init__(self,testcase):
59 self.testcase = testcase
60
61 def __call__(self,f):
62 @wraps(f)
63 def wrapped_f(*args, **kwargs):
64 res = getResults()
65 if self.testcase in (res.getFailList() or res.getErrorList()):
66 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
67 return f(*args, **kwargs)
68 wrapped_f.__name__ = f.__name__
69 return wrapped_f
70
71class skipIfSkipped(object):
72
73 def __init__(self,testcase):
74 self.testcase = testcase
75
76 def __call__(self,f):
77 @wraps(f)
78 def wrapped_f(*args, **kwargs):
79 res = getResults()
80 if self.testcase in res.getSkipList():
81 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
82 return f(*args, **kwargs)
83 wrapped_f.__name__ = f.__name__
84 return wrapped_f
85
86class skipUnlessPassed(object):
87
88 def __init__(self,testcase):
89 self.testcase = testcase
90
91 def __call__(self,f):
92 @wraps(f)
93 def wrapped_f(*args, **kwargs):
94 res = getResults()
95 if self.testcase in res.getSkipList() or \
96 self.testcase in res.getFailList() or \
97 self.testcase in res.getErrorList():
98 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
99 return f(*args, **kwargs)
100 wrapped_f.__name__ = f.__name__
101 wrapped_f._depends_on = self.testcase
102 return wrapped_f
103
104class testcase(object): 19class testcase(object):
105 def __init__(self, test_case): 20 def __init__(self, test_case):
106 self.test_case = test_case 21 self.test_case = test_case
diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py
index 09a44329e0..d4d271369f 100644
--- a/meta/lib/oeqa/utils/dump.py
+++ b/meta/lib/oeqa/utils/dump.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import sys 8import sys
9import json
7import errno 10import errno
8import datetime 11import datetime
9import itertools 12import itertools
@@ -17,6 +20,7 @@ class BaseDumper(object):
17 # Some testing doesn't inherit testimage, so it is needed 20 # Some testing doesn't inherit testimage, so it is needed
18 # to set some defaults. 21 # to set some defaults.
19 self.parent_dir = parent_dir 22 self.parent_dir = parent_dir
23 self.dump_dir = parent_dir
20 dft_cmds = """ top -bn1 24 dft_cmds = """ top -bn1
21 iostat -x -z -N -d -p ALL 20 2 25 iostat -x -z -N -d -p ALL 20 2
22 ps -ef 26 ps -ef
@@ -46,11 +50,11 @@ class BaseDumper(object):
46 raise err 50 raise err
47 self.dump_dir = dump_dir 51 self.dump_dir = dump_dir
48 52
49 def _write_dump(self, command, output): 53 def _construct_filename(self, command):
50 if isinstance(self, HostDumper): 54 if isinstance(self, TargetDumper):
51 prefix = "host"
52 elif isinstance(self, TargetDumper):
53 prefix = "target" 55 prefix = "target"
56 elif isinstance(self, MonitorDumper):
57 prefix = "qmp"
54 else: 58 else:
55 prefix = "unknown" 59 prefix = "unknown"
56 for i in itertools.count(): 60 for i in itertools.count():
@@ -58,41 +62,80 @@ class BaseDumper(object):
58 fullname = os.path.join(self.dump_dir, filename) 62 fullname = os.path.join(self.dump_dir, filename)
59 if not os.path.exists(fullname): 63 if not os.path.exists(fullname):
60 break 64 break
61 with open(fullname, 'w') as dump_file: 65 return fullname
62 dump_file.write(output)
63
64
65class HostDumper(BaseDumper):
66 """ Class to get dumps from the host running the tests """
67
68 def __init__(self, cmds, parent_dir):
69 super(HostDumper, self).__init__(cmds, parent_dir)
70 66
71 def dump_host(self, dump_dir=""): 67 def _write_dump(self, command, output):
72 if dump_dir: 68 fullname = self._construct_filename(command)
73 self.dump_dir = dump_dir 69 os.makedirs(os.path.dirname(fullname), exist_ok=True)
74 env = os.environ.copy() 70 if isinstance(self, MonitorDumper):
75 env['PATH'] = '/usr/sbin:/sbin:/usr/bin:/bin' 71 with open(fullname, 'w') as json_file:
76 env['COLUMNS'] = '9999' 72 json.dump(output, json_file, indent=4)
77 for cmd in self.cmds: 73 else:
78 result = runCmd(cmd, ignore_status=True, env=env) 74 with open(fullname, 'w') as dump_file:
79 self._write_dump(cmd.split()[0], result.output) 75 dump_file.write(output)
80 76
81class TargetDumper(BaseDumper): 77class TargetDumper(BaseDumper):
82 """ Class to get dumps from target, it only works with QemuRunner """ 78 """ Class to get dumps from target, it only works with QemuRunner.
79 Will give up permanently after 5 errors from running commands over
80 serial console. This helps to end testing when target is really dead, hanging
81 or unresponsive.
82 """
83 83
84 def __init__(self, cmds, parent_dir, runner): 84 def __init__(self, cmds, parent_dir, runner):
85 super(TargetDumper, self).__init__(cmds, parent_dir) 85 super(TargetDumper, self).__init__(cmds, parent_dir)
86 self.runner = runner 86 self.runner = runner
87 self.errors = 0
87 88
88 def dump_target(self, dump_dir=""): 89 def dump_target(self, dump_dir=""):
90 if self.errors >= 5:
91 print("Too many errors when dumping data from target, assuming it is dead! Will not dump data anymore!")
92 return
89 if dump_dir: 93 if dump_dir:
90 self.dump_dir = dump_dir 94 self.dump_dir = dump_dir
91 for cmd in self.cmds: 95 for cmd in self.cmds:
92 # We can continue with the testing if serial commands fail 96 # We can continue with the testing if serial commands fail
93 try: 97 try:
94 (status, output) = self.runner.run_serial(cmd) 98 (status, output) = self.runner.run_serial(cmd)
99 if status == 0:
100 self.errors = self.errors + 1
95 self._write_dump(cmd.split()[0], output) 101 self._write_dump(cmd.split()[0], output)
96 except: 102 except:
103 self.errors = self.errors + 1
97 print("Tried to dump info from target but " 104 print("Tried to dump info from target but "
98 "serial console failed") 105 "serial console failed")
106 print("Failed CMD: %s" % (cmd))
107
108class MonitorDumper(BaseDumper):
109 """ Class to get dumps via the Qemu Monitor, it only works with QemuRunner
110 Will stop completely if there are more than 5 errors when dumping monitor data.
111 This helps to end testing when target is really dead, hanging or unresponsive.
112 """
113
114 def __init__(self, cmds, parent_dir, runner):
115 super(MonitorDumper, self).__init__(cmds, parent_dir)
116 self.runner = runner
117 self.errors = 0
118
119 def dump_monitor(self, dump_dir=""):
120 if self.runner is None:
121 return
122 if dump_dir:
123 self.dump_dir = dump_dir
124 if self.errors >= 5:
125 print("Too many errors when dumping data from qemu monitor, assuming it is dead! Will not dump data anymore!")
126 return
127 for cmd in self.cmds:
128 cmd_name = cmd.split()[0]
129 try:
130 if len(cmd.split()) > 1:
131 cmd_args = cmd.split()[1]
132 if "%s" in cmd_args:
133 filename = self._construct_filename(cmd_name)
134 cmd_data = json.loads(cmd_args % (filename))
135 output = self.runner.run_monitor(cmd_name, cmd_data)
136 else:
137 output = self.runner.run_monitor(cmd_name)
138 self._write_dump(cmd_name, output)
139 except Exception as e:
140 self.errors = self.errors + 1
141 print("Failed to dump QMP CMD: %s with\nException: %s" % (cmd_name, e))
diff --git a/meta/lib/oeqa/utils/ftools.py b/meta/lib/oeqa/utils/ftools.py
index 3093419cc7..a50aaa84c2 100644
--- a/meta/lib/oeqa/utils/ftools.py
+++ b/meta/lib/oeqa/utils/ftools.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/gitarchive.py b/meta/lib/oeqa/utils/gitarchive.py
index 6e8040eb5c..7e1d505748 100644
--- a/meta/lib/oeqa/utils/gitarchive.py
+++ b/meta/lib/oeqa/utils/gitarchive.py
@@ -67,7 +67,7 @@ def git_commit_data(repo, data_dir, branch, message, exclude, notes, log):
67 67
68 # Remove files that are excluded 68 # Remove files that are excluded
69 if exclude: 69 if exclude:
70 repo.run_cmd(['rm', '--cached'] + [f for f in exclude], env_update) 70 repo.run_cmd(['rm', '--cached', '--ignore-unmatch'] + [f for f in exclude], env_update)
71 71
72 tree = repo.run_cmd('write-tree', env_update) 72 tree = repo.run_cmd('write-tree', env_update)
73 73
@@ -100,9 +100,44 @@ def git_commit_data(repo, data_dir, branch, message, exclude, notes, log):
100 if os.path.exists(tmp_index): 100 if os.path.exists(tmp_index):
101 os.unlink(tmp_index) 101 os.unlink(tmp_index)
102 102
103def get_tags(repo, log, pattern=None, url=None):
104 """ Fetch remote tags from current repository
105
106 A pattern can be provided to filter returned tags list
107 An URL can be provided if local repository has no valid remote configured
108 """
109
110 base_cmd = ['ls-remote', '--refs', '--tags', '-q']
111 cmd = base_cmd.copy()
112
113 # First try to fetch tags from repository configured remote
114 cmd.append('origin')
115 if pattern:
116 cmd.append("refs/tags/"+pattern)
117 try:
118 tags_refs = repo.run_cmd(cmd)
119 tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
120 except GitError as e:
121 # If it fails, retry with repository url if one is provided
122 if url:
123 log.info("No remote repository configured, use provided url")
124 cmd = base_cmd.copy()
125 cmd.append(url)
126 if pattern:
127 cmd.append(pattern)
128 tags_refs = repo.run_cmd(cmd)
129 tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
130 else:
131 log.info("Read local tags only, some remote tags may be missed")
132 cmd = ["tag"]
133 if pattern:
134 cmd += ["-l", pattern]
135 tags = repo.run_cmd(cmd).splitlines()
136
137 return tags
103 138
104def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern, 139def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
105 keywords): 140 url, log, keywords):
106 """Generate tag name and message, with support for running id number""" 141 """Generate tag name and message, with support for running id number"""
107 keyws = keywords.copy() 142 keyws = keywords.copy()
108 # Tag number is handled specially: if not defined, we autoincrement it 143 # Tag number is handled specially: if not defined, we autoincrement it
@@ -111,12 +146,12 @@ def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
111 keyws['tag_number'] = '{tag_number}' 146 keyws['tag_number'] = '{tag_number}'
112 tag_re = format_str(name_pattern, keyws) 147 tag_re = format_str(name_pattern, keyws)
113 # Replace parentheses for proper regex matching 148 # Replace parentheses for proper regex matching
114 tag_re = tag_re.replace('(', '\(').replace(')', '\)') + '$' 149 tag_re = tag_re.replace('(', r'\(').replace(')', r'\)') + '$'
115 # Inject regex group pattern for 'tag_number' 150 # Inject regex group pattern for 'tag_number'
116 tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})') 151 tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})')
117 152
118 keyws['tag_number'] = 0 153 keyws['tag_number'] = 0
119 for existing_tag in repo.run_cmd('tag').splitlines(): 154 for existing_tag in get_tags(repo, log, url=url):
120 match = re.match(tag_re, existing_tag) 155 match = re.match(tag_re, existing_tag)
121 156
122 if match and int(match.group('tag_number')) >= keyws['tag_number']: 157 if match and int(match.group('tag_number')) >= keyws['tag_number']:
@@ -143,7 +178,8 @@ def gitarchive(data_dir, git_dir, no_create, bare, commit_msg_subject, commit_ms
143 if not no_tag and tagname: 178 if not no_tag and tagname:
144 tag_name, tag_msg = expand_tag_strings(data_repo, tagname, 179 tag_name, tag_msg = expand_tag_strings(data_repo, tagname,
145 tag_msg_subject, 180 tag_msg_subject,
146 tag_msg_body, keywords) 181 tag_msg_body,
182 push, log, keywords)
147 183
148 # Commit data 184 # Commit data
149 commit = git_commit_data(data_repo, data_dir, branch_name, 185 commit = git_commit_data(data_repo, data_dir, branch_name,
@@ -166,6 +202,8 @@ def gitarchive(data_dir, git_dir, no_create, bare, commit_msg_subject, commit_ms
166 log.info("Pushing data to remote") 202 log.info("Pushing data to remote")
167 data_repo.run_cmd(cmd) 203 data_repo.run_cmd(cmd)
168 204
205 return tag_name
206
169# Container class for tester revisions 207# Container class for tester revisions
170TestedRev = namedtuple('TestedRev', 'commit commit_number tags') 208TestedRev = namedtuple('TestedRev', 'commit commit_number tags')
171 209
@@ -181,7 +219,7 @@ def get_test_runs(log, repo, tag_name, **kwargs):
181 219
182 # Get a list of all matching tags 220 # Get a list of all matching tags
183 tag_pattern = tag_name.format(**str_fields) 221 tag_pattern = tag_name.format(**str_fields)
184 tags = repo.run_cmd(['tag', '-l', tag_pattern]).splitlines() 222 tags = get_tags(repo, log, pattern=tag_pattern)
185 log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern) 223 log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern)
186 224
187 # Parse undefined fields from tag names 225 # Parse undefined fields from tag names
@@ -199,6 +237,8 @@ def get_test_runs(log, repo, tag_name, **kwargs):
199 revs = [] 237 revs = []
200 for tag in tags: 238 for tag in tags:
201 m = tag_re.match(tag) 239 m = tag_re.match(tag)
240 if not m:
241 continue
202 groups = m.groupdict() 242 groups = m.groupdict()
203 revs.append([groups[f] for f in undef_fields] + [tag]) 243 revs.append([groups[f] for f in undef_fields] + [tag])
204 244
@@ -219,7 +259,15 @@ def get_test_revs(log, repo, tag_name, **kwargs):
219 if not commit in revs: 259 if not commit in revs:
220 revs[commit] = TestedRev(commit, commit_num, [tag]) 260 revs[commit] = TestedRev(commit, commit_num, [tag])
221 else: 261 else:
222 assert commit_num == revs[commit].commit_number, "Commit numbers do not match" 262 if commit_num != revs[commit].commit_number:
263 # Historically we have incorrect commit counts of '1' in the repo so fix these up
264 if int(revs[commit].commit_number) < 5:
265 tags = revs[commit].tags
266 revs[commit] = TestedRev(commit, commit_num, [tags])
267 elif int(commit_num) < 5:
268 pass
269 else:
270 sys.exit("Commit numbers for commit %s don't match (%s vs %s)" % (commit, commit_num, revs[commit].commit_number))
223 revs[commit].tags.append(tag) 271 revs[commit].tags.append(tag)
224 272
225 # Return in sorted table 273 # Return in sorted table
diff --git a/meta/lib/oeqa/utils/httpserver.py b/meta/lib/oeqa/utils/httpserver.py
index 58d3c3b3f8..80752c1377 100644
--- a/meta/lib/oeqa/utils/httpserver.py
+++ b/meta/lib/oeqa/utils/httpserver.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import http.server 7import http.server
8import logging
6import multiprocessing 9import multiprocessing
7import os 10import os
8import traceback
9import signal 11import signal
10from socketserver import ThreadingMixIn 12from socketserver import ThreadingMixIn
11 13
@@ -13,20 +15,24 @@ class HTTPServer(ThreadingMixIn, http.server.HTTPServer):
13 15
14 def server_start(self, root_dir, logger): 16 def server_start(self, root_dir, logger):
15 os.chdir(root_dir) 17 os.chdir(root_dir)
18 self.logger = logger
16 self.serve_forever() 19 self.serve_forever()
17 20
18class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler): 21class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
19 22
20 def log_message(self, format_str, *args): 23 def log_message(self, format_str, *args):
21 pass 24 self.server.logger.info(format_str, *args)
22 25
23class HTTPService(object): 26class HTTPService:
24 27
25 def __init__(self, root_dir, host='', port=0, logger=None): 28 def __init__(self, root_dir, host='', port=0, logger=None):
26 self.root_dir = root_dir 29 self.root_dir = root_dir
27 self.host = host 30 self.host = host
28 self.port = port 31 self.port = port
29 self.logger = logger 32 if logger:
33 self.logger = logger.getChild("HTTPService")
34 else:
35 self.logger = logging.getLogger("HTTPService")
30 36
31 def start(self): 37 def start(self):
32 if not os.path.exists(self.root_dir): 38 if not os.path.exists(self.root_dir):
@@ -38,6 +44,12 @@ class HTTPService(object):
38 self.port = self.server.server_port 44 self.port = self.server.server_port
39 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger]) 45 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger])
40 46
47 def handle_error(self, request, client_address):
48 import traceback
49 exception = traceback.format_exc()
50 self.logger.warn("Exception when handling %s: %s" % (request, exception))
51 self.server.handle_error = handle_error
52
41 # The signal handler from testimage.bbclass can cause deadlocks here 53 # The signal handler from testimage.bbclass can cause deadlocks here
42 # if the HTTPServer is terminated before it can restore the standard 54 # if the HTTPServer is terminated before it can restore the standard
43 #signal behaviour 55 #signal behaviour
@@ -47,7 +59,7 @@ class HTTPService(object):
47 signal.signal(signal.SIGTERM, orig) 59 signal.signal(signal.SIGTERM, orig)
48 60
49 if self.logger: 61 if self.logger:
50 self.logger.info("Started HTTPService on %s:%s" % (self.host, self.port)) 62 self.logger.info("Started HTTPService for %s on %s:%s" % (self.root_dir, self.host, self.port))
51 63
52 64
53 def stop(self): 65 def stop(self):
@@ -59,3 +71,10 @@ class HTTPService(object):
59 if self.logger: 71 if self.logger:
60 self.logger.info("Stopped HTTPService on %s:%s" % (self.host, self.port)) 72 self.logger.info("Stopped HTTPService on %s:%s" % (self.host, self.port))
61 73
74if __name__ == "__main__":
75 import sys, logging
76
77 logger = logging.getLogger(__name__)
78 logging.basicConfig(level=logging.DEBUG)
79 httpd = HTTPService(sys.argv[1], port=8888, logger=logger)
80 httpd.start()
diff --git a/meta/lib/oeqa/utils/logparser.py b/meta/lib/oeqa/utils/logparser.py
index 60e16d500e..496d9e0c90 100644
--- a/meta/lib/oeqa/utils/logparser.py
+++ b/meta/lib/oeqa/utils/logparser.py
@@ -1,8 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import sys 7import enum
6import os 8import os
7import re 9import re
8 10
@@ -42,6 +44,8 @@ class PtestParser(object):
42 result = section_regex['begin'].search(line) 44 result = section_regex['begin'].search(line)
43 if result: 45 if result:
44 current_section['name'] = result.group(1) 46 current_section['name'] = result.group(1)
47 if current_section['name'] not in self.results:
48 self.results[current_section['name']] = {}
45 continue 49 continue
46 50
47 result = section_regex['end'].search(line) 51 result = section_regex['end'].search(line)
@@ -73,9 +77,10 @@ class PtestParser(object):
73 for t in test_regex: 77 for t in test_regex:
74 result = test_regex[t].search(line) 78 result = test_regex[t].search(line)
75 if result: 79 if result:
76 if current_section['name'] not in self.results: 80 try:
77 self.results[current_section['name']] = {} 81 self.results[current_section['name']][result.group(1).strip()] = t
78 self.results[current_section['name']][result.group(1).strip()] = t 82 except KeyError:
83 bb.warn("Result with no section: %s - %s" % (t, result.group(1).strip()))
79 84
80 # Python performance for repeatedly joining long strings is poor, do it all at once at the end. 85 # Python performance for repeatedly joining long strings is poor, do it all at once at the end.
81 # For 2.1 million lines in a log this reduces 18 hours to 12s. 86 # For 2.1 million lines in a log this reduces 18 hours to 12s.
@@ -101,30 +106,48 @@ class PtestParser(object):
101 f.write(status + ": " + test_name + "\n") 106 f.write(status + ": " + test_name + "\n")
102 107
103 108
104# ltp log parsing 109class LtpParser:
105class LtpParser(object): 110 """
106 def __init__(self): 111 Parse the machine-readable LTP log output into a ptest-friendly data structure.
107 self.results = {} 112 """
108 self.section = {'duration': "", 'log': ""}
109
110 def parse(self, logfile): 113 def parse(self, logfile):
111 test_regex = {} 114 results = {}
112 test_regex['PASSED'] = re.compile(r"PASS") 115 # Aaccumulate the duration here but as the log rounds quick tests down
113 test_regex['FAILED'] = re.compile(r"FAIL") 116 # to 0 seconds this is very much a lower bound. The caller can replace
114 test_regex['SKIPPED'] = re.compile(r"SKIP") 117 # the value.
115 118 section = {"duration": 0, "log": ""}
116 with open(logfile, errors='replace') as f: 119
120 class LtpExitCode(enum.IntEnum):
121 # Exit codes as defined in ltp/include/tst_res_flags.h
122 TPASS = 0 # Test passed flag
123 TFAIL = 1 # Test failed flag
124 TBROK = 2 # Test broken flag
125 TWARN = 4 # Test warning flag
126 TINFO = 16 # Test information flag
127 TCONF = 32 # Test not appropriate for configuration flag
128
129 with open(logfile, errors="replace") as f:
130 # Lines look like this:
131 # tag=cfs_bandwidth01 stime=1689762564 dur=0 exit=exited stat=32 core=no cu=0 cs=0
117 for line in f: 132 for line in f:
118 for t in test_regex: 133 if not line.startswith("tag="):
119 result = test_regex[t].search(line) 134 continue
120 if result:
121 self.results[line.split()[0].strip()] = t
122 135
123 for test in self.results: 136 values = dict(s.split("=") for s in line.strip().split())
124 result = self.results[test]
125 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip()))
126 137
127 return self.results, self.section 138 section["duration"] += int(values["dur"])
139 exitcode = int(values["stat"])
140 if values["exit"] == "exited" and exitcode == LtpExitCode.TCONF:
141 # Exited normally with the "invalid configuration" code
142 results[values["tag"]] = "SKIPPED"
143 elif exitcode == LtpExitCode.TPASS:
144 # Successful exit
145 results[values["tag"]] = "PASSED"
146 else:
147 # Other exit
148 results[values["tag"]] = "FAILED"
149
150 return results, section
128 151
129 152
130# ltp Compliance log parsing 153# ltp Compliance log parsing
@@ -135,30 +158,27 @@ class LtpComplianceParser(object):
135 158
136 def parse(self, logfile): 159 def parse(self, logfile):
137 test_regex = {} 160 test_regex = {}
138 test_regex['PASSED'] = re.compile(r"^PASS") 161 test_regex['FAILED'] = re.compile(r"FAIL")
139 test_regex['FAILED'] = re.compile(r"^FAIL")
140 test_regex['SKIPPED'] = re.compile(r"(?:UNTESTED)|(?:UNSUPPORTED)")
141 162
142 section_regex = {} 163 section_regex = {}
143 section_regex['test'] = re.compile(r"^Testing") 164 section_regex['test'] = re.compile(r"^Executing")
144 165
145 with open(logfile, errors='replace') as f: 166 with open(logfile, errors='replace') as f:
167 name = logfile
168 result = "PASSED"
146 for line in f: 169 for line in f:
147 result = section_regex['test'].search(line) 170 regex_result = section_regex['test'].search(line)
148 if result: 171 if regex_result:
149 self.name = "" 172 name = line.split()[1].strip()
150 self.name = line.split()[1].strip()
151 self.results[self.name] = "PASSED"
152 failed = 0
153 173
154 failed_result = test_regex['FAILED'].search(line) 174 regex_result = test_regex['FAILED'].search(line)
155 if failed_result: 175 if regex_result:
156 failed = line.split()[1].strip() 176 result = "FAILED"
157 if int(failed) > 0: 177 self.results[name] = result
158 self.results[self.name] = "FAILED"
159 178
160 for test in self.results: 179 for test in self.results:
161 result = self.results[test] 180 result = self.results[test]
181 print (self.results)
162 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip())) 182 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip()))
163 183
164 return self.results, self.section 184 return self.results, self.section
diff --git a/meta/lib/oeqa/utils/metadata.py b/meta/lib/oeqa/utils/metadata.py
index 8013aa684d..b320df67e0 100644
--- a/meta/lib/oeqa/utils/metadata.py
+++ b/meta/lib/oeqa/utils/metadata.py
@@ -27,9 +27,9 @@ def metadata_from_bb():
27 data_dict = get_bb_vars() 27 data_dict = get_bb_vars()
28 28
29 # Distro information 29 # Distro information
30 info_dict['distro'] = {'id': data_dict['DISTRO'], 30 info_dict['distro'] = {'id': data_dict.get('DISTRO', 'NODISTRO'),
31 'version_id': data_dict['DISTRO_VERSION'], 31 'version_id': data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'),
32 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])} 32 'pretty_name': '%s %s' % (data_dict.get('DISTRO', 'NODISTRO'), data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'))}
33 33
34 # Host distro information 34 # Host distro information
35 os_release = get_os_release() 35 os_release = get_os_release()
@@ -76,6 +76,10 @@ def git_rev_info(path):
76 info['commit_count'] = int(subprocess.check_output(["git", "rev-list", "--count", "HEAD"], cwd=path).decode('utf-8').strip()) 76 info['commit_count'] = int(subprocess.check_output(["git", "rev-list", "--count", "HEAD"], cwd=path).decode('utf-8').strip())
77 except subprocess.CalledProcessError: 77 except subprocess.CalledProcessError:
78 pass 78 pass
79 try:
80 info['commit_time'] = int(subprocess.check_output(["git", "show", "--no-patch", "--format=%ct", "HEAD"], cwd=path).decode('utf-8').strip())
81 except subprocess.CalledProcessError:
82 pass
79 return info 83 return info
80 try: 84 try:
81 repo = Repo(path, search_parent_directories=True) 85 repo = Repo(path, search_parent_directories=True)
@@ -83,6 +87,7 @@ def git_rev_info(path):
83 return info 87 return info
84 info['commit'] = repo.head.commit.hexsha 88 info['commit'] = repo.head.commit.hexsha
85 info['commit_count'] = repo.head.commit.count() 89 info['commit_count'] = repo.head.commit.count()
90 info['commit_time'] = repo.head.commit.committed_date
86 try: 91 try:
87 info['branch'] = repo.active_branch.name 92 info['branch'] = repo.active_branch.name
88 except TypeError: 93 except TypeError:
diff --git a/meta/lib/oeqa/utils/network.py b/meta/lib/oeqa/utils/network.py
index 59d01723a1..da4ffda9a9 100644
--- a/meta/lib/oeqa/utils/network.py
+++ b/meta/lib/oeqa/utils/network.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/nfs.py b/meta/lib/oeqa/utils/nfs.py
index a37686c914..903469bfee 100644
--- a/meta/lib/oeqa/utils/nfs.py
+++ b/meta/lib/oeqa/utils/nfs.py
@@ -1,4 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
3import sys 7import sys
4import tempfile 8import tempfile
@@ -8,7 +12,7 @@ from oeqa.utils.commands import bitbake, get_bb_var, Command
8from oeqa.utils.network import get_free_port 12from oeqa.utils.network import get_free_port
9 13
10@contextlib.contextmanager 14@contextlib.contextmanager
11def unfs_server(directory, logger = None): 15def unfs_server(directory, logger = None, udp = True):
12 unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native") 16 unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native")
13 if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")): 17 if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")):
14 # build native tool 18 # build native tool
@@ -22,11 +26,11 @@ def unfs_server(directory, logger = None):
22 exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode()) 26 exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode())
23 27
24 # find some ports for the server 28 # find some ports for the server
25 nfsport, mountport = get_free_port(udp = True), get_free_port(udp = True) 29 nfsport, mountport = get_free_port(udp), get_free_port(udp)
26 30
27 nenv = dict(os.environ) 31 nenv = dict(os.environ)
28 nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '') 32 nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '')
29 cmd = Command(["unfsd", "-d", "-p", "-N", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)], 33 cmd = Command(["unfsd", "-d", "-p", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)],
30 bg = True, env = nenv, output_log = logger) 34 bg = True, env = nenv, output_log = logger)
31 cmd.run() 35 cmd.run()
32 yield nfsport, mountport 36 yield nfsport, mountport
diff --git a/meta/lib/oeqa/utils/package_manager.py b/meta/lib/oeqa/utils/package_manager.py
index 6b67f22fdd..db799b64d6 100644
--- a/meta/lib/oeqa/utils/package_manager.py
+++ b/meta/lib/oeqa/utils/package_manager.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py
new file mode 100644
index 0000000000..c69481db6c
--- /dev/null
+++ b/meta/lib/oeqa/utils/postactions.py
@@ -0,0 +1,102 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# Run a set of actions after tests. The runner provides internal data
8# dictionary as well as test context to any action to run.
9
10import datetime
11import io
12import os
13import stat
14import subprocess
15import tempfile
16from oeqa.utils import get_artefact_dir
17
18##################################################################
19# Host/target statistics
20##################################################################
21
22def get_target_disk_usage(d, tc, artifacts_list, outputdir):
23 output_file = os.path.join(outputdir, "target_disk_usage.txt")
24 try:
25 (status, output) = tc.target.run('df -h')
26 with open(output_file, 'w') as f:
27 f.write(output)
28 f.write("\n")
29 except Exception as e:
30 bb.warn(f"Can not get target disk usage: {e}")
31
32def get_host_disk_usage(d, tc, artifacts_list, outputdir):
33 import subprocess
34
35 output_file = os.path.join(outputdir, "host_disk_usage.txt")
36 try:
37 with open(output_file, 'w') as f:
38 output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={})
39 except Exception as e:
40 bb.warn(f"Can not get host disk usage: {e}")
41
42##################################################################
43# Artifacts retrieval
44##################################################################
45
46def get_artifacts_list(target, raw_list):
47 result = []
48 # Passed list may contains patterns in paths, expand them directly on target
49 for raw_path in raw_list.split():
50 cmd = f"for p in {raw_path}; do if [ -e $p ]; then echo $p; fi; done"
51 try:
52 status, output = target.run(cmd)
53 if status != 0 or not output:
54 raise Exception()
55 result += output.split()
56 except:
57 bb.note(f"No file/directory matching path {raw_path}")
58
59 return result
60
61def list_and_fetch_failed_tests_artifacts(d, tc, artifacts_list, outputdir):
62 artifacts_list = get_artifacts_list(tc.target, artifacts_list)
63 if not artifacts_list:
64 bb.warn("Could not load artifacts list, skip artifacts retrieval")
65 return
66 try:
67 # We need gnu tar for sparse files, not busybox
68 cmd = "tar --sparse -zcf - " + " ".join(artifacts_list)
69 (status, output) = tc.target.run(cmd, raw = True)
70 if status != 0 or not output:
71 raise Exception("Error while fetching compressed artifacts")
72 archive_name = os.path.join(outputdir, "tests_artifacts.tar.gz")
73 with open(archive_name, "wb") as f:
74 f.write(output)
75 except Exception as e:
76 bb.warn(f"Can not retrieve artifacts from test target: {e}")
77
78
79##################################################################
80# General post actions runner
81##################################################################
82
83def run_failed_tests_post_actions(d, tc):
84 artifacts = d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS")
85 # Allow all the code to be disabled by having no artifacts set, e.g. for systems with no ssh support
86 if not artifacts:
87 return
88
89 outputdir = get_artefact_dir(d)
90 os.makedirs(outputdir, exist_ok=True)
91 datestr = datetime.datetime.now().strftime('%Y%m%d')
92 outputdir = tempfile.mkdtemp(prefix='oeqa-target-artefacts-%s-' % datestr, dir=outputdir)
93 os.chmod(outputdir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
94
95 post_actions=[
96 list_and_fetch_failed_tests_artifacts,
97 get_target_disk_usage,
98 get_host_disk_usage
99 ]
100
101 for action in post_actions:
102 action(d, tc, artifacts, outputdir)
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py
index 77ec939ad7..c4db0cf038 100644
--- a/meta/lib/oeqa/utils/qemurunner.py
+++ b/meta/lib/oeqa/utils/qemurunner.py
@@ -19,20 +19,33 @@ import errno
19import string 19import string
20import threading 20import threading
21import codecs 21import codecs
22import logging 22import tempfile
23from oeqa.utils.dump import HostDumper
24from collections import defaultdict 23from collections import defaultdict
24from contextlib import contextmanager
25import importlib
26import traceback
25 27
26# Get Unicode non printable control chars 28# Get Unicode non printable control chars
27control_range = list(range(0,32))+list(range(127,160)) 29control_range = list(range(0,32))+list(range(127,160))
28control_chars = [chr(x) for x in control_range 30control_chars = [chr(x) for x in control_range
29 if chr(x) not in string.printable] 31 if chr(x) not in string.printable]
30re_control_char = re.compile('[%s]' % re.escape("".join(control_chars))) 32re_control_char = re.compile('[%s]' % re.escape("".join(control_chars)))
33# Regex to remove the ANSI (color) control codes from console strings in order to match the text only
34re_vt100 = re.compile(r'(\x1b\[|\x9b)[^@-_a-z]*[@-_a-z]|\x1b[@-_a-z]')
35
36def getOutput(o):
37 import fcntl
38 fl = fcntl.fcntl(o, fcntl.F_GETFL)
39 fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
40 try:
41 return os.read(o.fileno(), 1000000).decode("utf-8")
42 except BlockingIOError:
43 return ""
31 44
32class QemuRunner: 45class QemuRunner:
33 46
34 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, dump_host_cmds, 47 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, use_kvm, logger, use_slirp=False,
35 use_kvm, logger, use_slirp=False, serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None): 48 serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None, tmpfsdir=None):
36 49
37 # Popen object for runqemu 50 # Popen object for runqemu
38 self.runqemu = None 51 self.runqemu = None
@@ -55,21 +68,24 @@ class QemuRunner:
55 self.boottime = boottime 68 self.boottime = boottime
56 self.logged = False 69 self.logged = False
57 self.thread = None 70 self.thread = None
71 self.threadsock = None
58 self.use_kvm = use_kvm 72 self.use_kvm = use_kvm
59 self.use_ovmf = use_ovmf 73 self.use_ovmf = use_ovmf
60 self.use_slirp = use_slirp 74 self.use_slirp = use_slirp
61 self.serial_ports = serial_ports 75 self.serial_ports = serial_ports
62 self.msg = '' 76 self.msg = ''
63 self.boot_patterns = boot_patterns 77 self.boot_patterns = boot_patterns
78 self.tmpfsdir = tmpfsdir
64 79
65 self.runqemutime = 120 80 self.runqemutime = 300
66 if not workdir: 81 if not workdir:
67 workdir = os.getcwd() 82 workdir = os.getcwd()
68 self.qemu_pidfile = workdir + '/pidfile_' + str(os.getpid()) 83 self.qemu_pidfile = workdir + '/pidfile_' + str(os.getpid())
69 self.host_dumper = HostDumper(dump_host_cmds, dump_dir)
70 self.monitorpipe = None 84 self.monitorpipe = None
71 85
72 self.logger = logger 86 self.logger = logger
87 # Whether we're expecting an exit and should show related errors
88 self.canexit = False
73 89
74 # Enable testing other OS's 90 # Enable testing other OS's
75 # Set commands for target communication, and default to Linux ALWAYS 91 # Set commands for target communication, and default to Linux ALWAYS
@@ -80,20 +96,21 @@ class QemuRunner:
80 accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished'] 96 accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished']
81 default_boot_patterns = defaultdict(str) 97 default_boot_patterns = defaultdict(str)
82 # Default to the usual paterns used to communicate with the target 98 # Default to the usual paterns used to communicate with the target
83 default_boot_patterns['search_reached_prompt'] = b' login:' 99 default_boot_patterns['search_reached_prompt'] = ' login:'
84 default_boot_patterns['send_login_user'] = 'root\n' 100 default_boot_patterns['send_login_user'] = 'root\n'
85 default_boot_patterns['search_login_succeeded'] = r"root@[a-zA-Z0-9\-]+:~#" 101 default_boot_patterns['search_login_succeeded'] = r"root@[a-zA-Z0-9\-]+:~#"
86 default_boot_patterns['search_cmd_finished'] = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#" 102 default_boot_patterns['search_cmd_finished'] = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#"
87 103
88 # Only override patterns that were set e.g. login user TESTIMAGE_BOOT_PATTERNS[send_login_user] = "webserver\n" 104 # Only override patterns that were set e.g. login user TESTIMAGE_BOOT_PATTERNS[send_login_user] = "webserver\n"
89 for pattern in accepted_patterns: 105 for pattern in accepted_patterns:
90 if not self.boot_patterns[pattern]: 106 if pattern not in self.boot_patterns or not self.boot_patterns[pattern]:
91 self.boot_patterns[pattern] = default_boot_patterns[pattern] 107 self.boot_patterns[pattern] = default_boot_patterns[pattern]
92 108
93 def create_socket(self): 109 def create_socket(self):
94 try: 110 try:
95 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 111 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
96 sock.setblocking(0) 112 sock.setblocking(0)
113 sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
97 sock.bind(("127.0.0.1",0)) 114 sock.bind(("127.0.0.1",0))
98 sock.listen(2) 115 sock.listen(2)
99 port = sock.getsockname()[1] 116 port = sock.getsockname()[1]
@@ -104,30 +121,24 @@ class QemuRunner:
104 sock.close() 121 sock.close()
105 raise 122 raise
106 123
107 def log(self, msg): 124 def decode_qemulog(self, todecode):
108 if self.logfile: 125 # Sanitize the data received from qemu as it may contain control characters
109 # It is needed to sanitize the data received from qemu 126 msg = todecode.decode("utf-8", errors='backslashreplace')
110 # because is possible to have control characters 127 msg = re_control_char.sub('', msg)
111 msg = msg.decode("utf-8", errors='ignore') 128 return msg
112 msg = re_control_char.sub('', msg)
113 self.msg += msg
114 with codecs.open(self.logfile, "a", encoding="utf-8") as f:
115 f.write("%s" % msg)
116
117 def getOutput(self, o):
118 import fcntl
119 fl = fcntl.fcntl(o, fcntl.F_GETFL)
120 fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
121 return os.read(o.fileno(), 1000000).decode("utf-8")
122 129
130 def log(self, msg, extension=""):
131 if self.logfile:
132 with codecs.open(self.logfile + extension, "ab") as f:
133 f.write(msg)
134 self.msg += self.decode_qemulog(msg)
123 135
124 def handleSIGCHLD(self, signum, frame): 136 def handleSIGCHLD(self, signum, frame):
125 if self.runqemu and self.runqemu.poll(): 137 if self.runqemu and self.runqemu.poll():
126 if self.runqemu.returncode: 138 if self.runqemu.returncode:
127 self.logger.error('runqemu exited with code %d' % self.runqemu.returncode) 139 self.logger.error('runqemu exited with code %d' % self.runqemu.returncode)
128 self.logger.error('Output from runqemu:\n%s' % self.getOutput(self.runqemu.stdout)) 140 self.logger.error('Output from runqemu:\n%s' % getOutput(self.runqemu.stdout))
129 self.stop() 141 self.stop()
130 self._dump_host()
131 142
132 def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True): 143 def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True):
133 env = os.environ.copy() 144 env = os.environ.copy()
@@ -150,6 +161,9 @@ class QemuRunner:
150 else: 161 else:
151 env["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image 162 env["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
152 163
164 if self.tmpfsdir:
165 env["RUNQEMU_TMPFS_DIR"] = self.tmpfsdir
166
153 if not launch_cmd: 167 if not launch_cmd:
154 launch_cmd = 'runqemu %s' % ('snapshot' if discard_writes else '') 168 launch_cmd = 'runqemu %s' % ('snapshot' if discard_writes else '')
155 if self.use_kvm: 169 if self.use_kvm:
@@ -163,11 +177,38 @@ class QemuRunner:
163 launch_cmd += ' slirp' 177 launch_cmd += ' slirp'
164 if self.use_ovmf: 178 if self.use_ovmf:
165 launch_cmd += ' ovmf' 179 launch_cmd += ' ovmf'
166 launch_cmd += ' %s %s %s' % (runqemuparams, self.machine, self.rootfs) 180 launch_cmd += ' %s %s' % (runqemuparams, self.machine)
181 if self.rootfs.endswith('.vmdk'):
182 self.logger.debug('Bypassing VMDK rootfs for runqemu')
183 else:
184 launch_cmd += ' %s' % (self.rootfs)
167 185
168 return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env) 186 return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env)
169 187
170 def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None, env = None): 188 def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None, env = None):
189 # use logfile to determine the recipe-sysroot-native path and
190 # then add in the site-packages path components and add that
191 # to the python sys.path so the qmp module can be found.
192 python_path = os.path.dirname(os.path.dirname(self.logfile))
193 python_path += "/recipe-sysroot-native/usr/lib/qemu-python"
194 sys.path.append(python_path)
195 importlib.invalidate_caches()
196 try:
197 qmp = importlib.import_module("qmp")
198 except Exception as e:
199 self.logger.error("qemurunner: qmp module missing, please ensure it's installed in %s (%s)" % (python_path, str(e)))
200 return False
201 # Path relative to tmpdir used as cwd for qemu below to avoid unix socket path length issues
202 qmp_file = "." + next(tempfile._get_candidate_names())
203 qmp_param = ' -S -qmp unix:./%s,server,wait' % (qmp_file)
204 qmp_port = self.tmpdir + "/" + qmp_file
205 # Create a second socket connection for debugging use,
206 # note this will NOT cause qemu to block waiting for the connection
207 qmp_file2 = "." + next(tempfile._get_candidate_names())
208 qmp_param += ' -qmp unix:./%s,server,nowait' % (qmp_file2)
209 qmp_port2 = self.tmpdir + "/" + qmp_file2
210 self.logger.info("QMP Available for connection at %s" % (qmp_port2))
211
171 try: 212 try:
172 if self.serial_ports >= 2: 213 if self.serial_ports >= 2:
173 self.threadsock, threadport = self.create_socket() 214 self.threadsock, threadport = self.create_socket()
@@ -176,7 +217,7 @@ class QemuRunner:
176 self.logger.error("Failed to create listening socket: %s" % msg[1]) 217 self.logger.error("Failed to create listening socket: %s" % msg[1])
177 return False 218 return False
178 219
179 bootparams = 'console=tty1 console=ttyS0,115200n8 printk.time=1' 220 bootparams = ' printk.time=1'
180 if extra_bootparams: 221 if extra_bootparams:
181 bootparams = bootparams + ' ' + extra_bootparams 222 bootparams = bootparams + ' ' + extra_bootparams
182 223
@@ -184,7 +225,8 @@ class QemuRunner:
184 # and analyze descendents in order to determine it. 225 # and analyze descendents in order to determine it.
185 if os.path.exists(self.qemu_pidfile): 226 if os.path.exists(self.qemu_pidfile):
186 os.remove(self.qemu_pidfile) 227 os.remove(self.qemu_pidfile)
187 self.qemuparams = 'bootparams="{0}" qemuparams="-pidfile {1}"'.format(bootparams, self.qemu_pidfile) 228 self.qemuparams = 'bootparams="{0}" qemuparams="-pidfile {1} {2}"'.format(bootparams, self.qemu_pidfile, qmp_param)
229
188 if qemuparams: 230 if qemuparams:
189 self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"' 231 self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"'
190 232
@@ -196,14 +238,15 @@ class QemuRunner:
196 self.origchldhandler = signal.getsignal(signal.SIGCHLD) 238 self.origchldhandler = signal.getsignal(signal.SIGCHLD)
197 signal.signal(signal.SIGCHLD, self.handleSIGCHLD) 239 signal.signal(signal.SIGCHLD, self.handleSIGCHLD)
198 240
199 self.logger.debug('launchcmd=%s'%(launch_cmd)) 241 self.logger.debug('launchcmd=%s' % (launch_cmd))
200 242
201 # FIXME: We pass in stdin=subprocess.PIPE here to work around stty 243 # FIXME: We pass in stdin=subprocess.PIPE here to work around stty
202 # blocking at the end of the runqemu script when using this within 244 # blocking at the end of the runqemu script when using this within
203 # oe-selftest (this makes stty error out immediately). There ought 245 # oe-selftest (this makes stty error out immediately). There ought
204 # to be a proper fix but this will suffice for now. 246 # to be a proper fix but this will suffice for now.
205 self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, preexec_fn=os.setpgrp, env=env) 247 self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, preexec_fn=os.setpgrp, env=env, cwd=self.tmpdir)
206 output = self.runqemu.stdout 248 output = self.runqemu.stdout
249 launch_time = time.time()
207 250
208 # 251 #
209 # We need the preexec_fn above so that all runqemu processes can easily be killed 252 # We need the preexec_fn above so that all runqemu processes can easily be killed
@@ -224,35 +267,41 @@ class QemuRunner:
224 self.monitorpipe = os.fdopen(w, "w") 267 self.monitorpipe = os.fdopen(w, "w")
225 else: 268 else:
226 # child process 269 # child process
227 os.setpgrp() 270 try:
228 os.close(w) 271 os.setpgrp()
229 r = os.fdopen(r) 272 os.close(w)
230 x = r.read() 273 r = os.fdopen(r)
231 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) 274 x = r.read()
232 sys.exit(0) 275 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM)
276 finally:
277 # We must exit under all circumstances
278 os._exit(0)
233 279
234 self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid) 280 self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid)
235 self.logger.debug("waiting at most %s seconds for qemu pid (%s)" % 281 self.logger.debug("waiting at most %d seconds for qemu pid (%s)" %
236 (self.runqemutime, time.strftime("%D %H:%M:%S"))) 282 (self.runqemutime, time.strftime("%D %H:%M:%S")))
237 endtime = time.time() + self.runqemutime 283 endtime = time.time() + self.runqemutime
238 while not self.is_alive() and time.time() < endtime: 284 while not self.is_alive() and time.time() < endtime:
239 if self.runqemu.poll(): 285 if self.runqemu.poll():
240 if self.runqemu_exited: 286 if self.runqemu_exited:
287 self.logger.warning("runqemu during is_alive() test")
241 return False 288 return False
242 if self.runqemu.returncode: 289 if self.runqemu.returncode:
243 # No point waiting any longer 290 # No point waiting any longer
244 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode) 291 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
245 self._dump_host() 292 self.logger.warning("Output from runqemu:\n%s" % getOutput(output))
246 self.logger.warning("Output from runqemu:\n%s" % self.getOutput(output))
247 self.stop() 293 self.stop()
248 return False 294 return False
249 time.sleep(0.5) 295 time.sleep(0.5)
250 296
251 if self.runqemu_exited: 297 if self.runqemu_exited:
252 return False 298 self.logger.warning("runqemu after timeout")
299
300 if self.runqemu.returncode:
301 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
253 302
254 if not self.is_alive(): 303 if not self.is_alive():
255 self.logger.error("Qemu pid didn't appear in %s seconds (%s)" % 304 self.logger.error("Qemu pid didn't appear in %d seconds (%s)" %
256 (self.runqemutime, time.strftime("%D %H:%M:%S"))) 305 (self.runqemutime, time.strftime("%D %H:%M:%S")))
257 306
258 qemu_pid = None 307 qemu_pid = None
@@ -267,8 +316,7 @@ class QemuRunner:
267 ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,pri,ni,command '], stdout=subprocess.PIPE).communicate()[0] 316 ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,pri,ni,command '], stdout=subprocess.PIPE).communicate()[0]
268 processes = ps.decode("utf-8") 317 processes = ps.decode("utf-8")
269 self.logger.debug("Running processes:\n%s" % processes) 318 self.logger.debug("Running processes:\n%s" % processes)
270 self._dump_host() 319 op = getOutput(output)
271 op = self.getOutput(output)
272 self.stop() 320 self.stop()
273 if op: 321 if op:
274 self.logger.error("Output from runqemu:\n%s" % op) 322 self.logger.error("Output from runqemu:\n%s" % op)
@@ -276,10 +324,79 @@ class QemuRunner:
276 self.logger.error("No output from runqemu.\n") 324 self.logger.error("No output from runqemu.\n")
277 return False 325 return False
278 326
327 # Create the client socket for the QEMU Monitor Control Socket
328 # This will allow us to read status from Qemu if the the process
329 # is still alive
330 self.logger.debug("QMP Initializing to %s" % (qmp_port))
331 # chdir dance for path length issues with unix sockets
332 origpath = os.getcwd()
333 try:
334 os.chdir(os.path.dirname(qmp_port))
335 try:
336 from qmp.legacy import QEMUMonitorProtocol
337 self.qmp = QEMUMonitorProtocol(os.path.basename(qmp_port))
338 except OSError as msg:
339 self.logger.warning("Failed to initialize qemu monitor socket: %s File: %s" % (msg, msg.filename))
340 return False
341
342 self.logger.debug("QMP Connecting to %s" % (qmp_port))
343 if not os.path.exists(qmp_port) and self.is_alive():
344 self.logger.debug("QMP Port does not exist waiting for it to be created")
345 endtime = time.time() + self.runqemutime
346 while not os.path.exists(qmp_port) and self.is_alive() and time.time() < endtime:
347 self.logger.info("QMP port does not exist yet!")
348 time.sleep(0.5)
349 if not os.path.exists(qmp_port) and self.is_alive():
350 self.logger.warning("QMP Port still does not exist but QEMU is alive")
351 return False
352
353 try:
354 # set timeout value for all QMP calls
355 self.qmp.settimeout(self.runqemutime)
356 self.qmp.connect()
357 connect_time = time.time()
358 self.logger.info("QMP connected to QEMU at %s and took %.2f seconds" %
359 (time.strftime("%D %H:%M:%S"),
360 time.time() - launch_time))
361 except OSError as msg:
362 self.logger.warning("Failed to connect qemu monitor socket: %s File: %s" % (msg, msg.filename))
363 return False
364 except qmp.legacy.QMPError as msg:
365 self.logger.warning("Failed to communicate with qemu monitor: %s" % (msg))
366 return False
367 finally:
368 os.chdir(origpath)
369
370 # We worry that mmap'd libraries may cause page faults which hang the qemu VM for periods
371 # causing failures. Before we "start" qemu, read through it's mapped files to try and
372 # ensure we don't hit page faults later
373 mapdir = "/proc/" + str(self.qemupid) + "/map_files/"
374 try:
375 for f in os.listdir(mapdir):
376 try:
377 linktarget = os.readlink(os.path.join(mapdir, f))
378 if not linktarget.startswith("/") or linktarget.startswith("/dev") or "deleted" in linktarget:
379 continue
380 with open(linktarget, "rb") as readf:
381 data = True
382 while data:
383 data = readf.read(4096)
384 except FileNotFoundError:
385 continue
386 # Centos7 doesn't allow us to read /map_files/
387 except PermissionError:
388 pass
389
390 # Release the qemu process to continue running
391 self.run_monitor('cont')
392 self.logger.info("QMP released QEMU at %s and took %.2f seconds from connect" %
393 (time.strftime("%D %H:%M:%S"),
394 time.time() - connect_time))
395
279 # We are alive: qemu is running 396 # We are alive: qemu is running
280 out = self.getOutput(output) 397 out = getOutput(output)
281 netconf = False # network configuration is not required by default 398 netconf = False # network configuration is not required by default
282 self.logger.debug("qemu started in %s seconds - qemu procces pid is %s (%s)" % 399 self.logger.debug("qemu started in %.2f seconds - qemu procces pid is %s (%s)" %
283 (time.time() - (endtime - self.runqemutime), 400 (time.time() - (endtime - self.runqemutime),
284 self.qemupid, time.strftime("%D %H:%M:%S"))) 401 self.qemupid, time.strftime("%D %H:%M:%S")))
285 cmdline = '' 402 cmdline = ''
@@ -291,9 +408,10 @@ class QemuRunner:
291 cmdline = re_control_char.sub(' ', cmdline) 408 cmdline = re_control_char.sub(' ', cmdline)
292 try: 409 try:
293 if self.use_slirp: 410 if self.use_slirp:
294 tcp_ports = cmdline.split("hostfwd=tcp::")[1] 411 tcp_ports = cmdline.split("hostfwd=tcp:")[1]
412 ip, tcp_ports = tcp_ports.split(":")[:2]
295 host_port = tcp_ports[:tcp_ports.find('-')] 413 host_port = tcp_ports[:tcp_ports.find('-')]
296 self.ip = "localhost:%s" % host_port 414 self.ip = "%s:%s" % (ip, host_port)
297 else: 415 else:
298 ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1]) 416 ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1])
299 self.ip = ips[0] 417 self.ip = ips[0]
@@ -301,8 +419,8 @@ class QemuRunner:
301 self.logger.debug("qemu cmdline used:\n{}".format(cmdline)) 419 self.logger.debug("qemu cmdline used:\n{}".format(cmdline))
302 except (IndexError, ValueError): 420 except (IndexError, ValueError):
303 # Try to get network configuration from runqemu output 421 # Try to get network configuration from runqemu output
304 match = re.match(r'.*Network configuration: (?:ip=)*([0-9.]+)::([0-9.]+):([0-9.]+)$.*', 422 match = re.match(r'.*Network configuration: (?:ip=)*([0-9.]+)::([0-9.]+):([0-9.]+).*',
305 out, re.MULTILINE|re.DOTALL) 423 out, re.MULTILINE | re.DOTALL)
306 if match: 424 if match:
307 self.ip, self.server_ip, self.netmask = match.groups() 425 self.ip, self.server_ip, self.netmask = match.groups()
308 # network configuration is required as we couldn't get it 426 # network configuration is required as we couldn't get it
@@ -313,16 +431,16 @@ class QemuRunner:
313 self.logger.error("Couldn't get ip from qemu command line and runqemu output! " 431 self.logger.error("Couldn't get ip from qemu command line and runqemu output! "
314 "Here is the qemu command line used:\n%s\n" 432 "Here is the qemu command line used:\n%s\n"
315 "and output from runqemu:\n%s" % (cmdline, out)) 433 "and output from runqemu:\n%s" % (cmdline, out))
316 self._dump_host()
317 self.stop() 434 self.stop()
318 return False 435 return False
319 436
320 self.logger.debug("Target IP: %s" % self.ip) 437 self.logger.debug("Target IP: %s" % self.ip)
321 self.logger.debug("Server IP: %s" % self.server_ip) 438 self.logger.debug("Server IP: %s" % self.server_ip)
322 439
440 self.thread = LoggingThread(self.log, self.threadsock, self.logger, self.runqemu.stdout)
441 self.thread.start()
442
323 if self.serial_ports >= 2: 443 if self.serial_ports >= 2:
324 self.thread = LoggingThread(self.log, self.threadsock, self.logger)
325 self.thread.start()
326 if not self.thread.connection_established.wait(self.boottime): 444 if not self.thread.connection_established.wait(self.boottime):
327 self.logger.error("Didn't receive a console connection from qemu. " 445 self.logger.error("Didn't receive a console connection from qemu. "
328 "Here is the qemu command line used:\n%s\nand " 446 "Here is the qemu command line used:\n%s\nand "
@@ -334,7 +452,7 @@ class QemuRunner:
334 self.logger.debug("Waiting at most %d seconds for login banner (%s)" % 452 self.logger.debug("Waiting at most %d seconds for login banner (%s)" %
335 (self.boottime, time.strftime("%D %H:%M:%S"))) 453 (self.boottime, time.strftime("%D %H:%M:%S")))
336 endtime = time.time() + self.boottime 454 endtime = time.time() + self.boottime
337 socklist = [self.server_socket] 455 filelist = [self.server_socket]
338 reachedlogin = False 456 reachedlogin = False
339 stopread = False 457 stopread = False
340 qemusock = None 458 qemusock = None
@@ -342,64 +460,84 @@ class QemuRunner:
342 data = b'' 460 data = b''
343 while time.time() < endtime and not stopread: 461 while time.time() < endtime and not stopread:
344 try: 462 try:
345 sread, swrite, serror = select.select(socklist, [], [], 5) 463 sread, swrite, serror = select.select(filelist, [], [], 5)
346 except InterruptedError: 464 except InterruptedError:
347 continue 465 continue
348 for sock in sread: 466 for file in sread:
349 if sock is self.server_socket: 467 if file is self.server_socket:
350 qemusock, addr = self.server_socket.accept() 468 qemusock, addr = self.server_socket.accept()
351 qemusock.setblocking(0) 469 qemusock.setblocking(False)
352 socklist.append(qemusock) 470 filelist.append(qemusock)
353 socklist.remove(self.server_socket) 471 filelist.remove(self.server_socket)
354 self.logger.debug("Connection from %s:%s" % addr) 472 self.logger.debug("Connection from %s:%s" % addr)
355 else: 473 else:
356 data = data + sock.recv(1024) 474 # try to avoid reading only a single character at a time
475 time.sleep(0.1)
476 if hasattr(file, 'read'):
477 read = file.read(1024)
478 elif hasattr(file, 'recv'):
479 read = file.recv(1024)
480 else:
481 self.logger.error('Invalid file type: %s\n%s' % (file))
482 read = b''
483
484 self.logger.debug2('Partial boot log:\n%s' % (read.decode('utf-8', errors='backslashreplace')))
485 data = data + read
357 if data: 486 if data:
358 bootlog += data 487 bootlog += data
359 if self.serial_ports < 2: 488 self.log(data, extension = ".2")
360 # this socket has mixed console/kernel data, log it to logfile
361 self.log(data)
362
363 data = b'' 489 data = b''
364 if self.boot_patterns['search_reached_prompt'] in bootlog: 490
491 if bytes(self.boot_patterns['search_reached_prompt'], 'utf-8') in bootlog:
492 self.server_socket.close()
365 self.server_socket = qemusock 493 self.server_socket = qemusock
366 stopread = True 494 stopread = True
367 reachedlogin = True 495 reachedlogin = True
368 self.logger.debug("Reached login banner in %s seconds (%s)" % 496 self.logger.debug("Reached login banner in %.2f seconds (%s)" %
369 (time.time() - (endtime - self.boottime), 497 (time.time() - (endtime - self.boottime),
370 time.strftime("%D %H:%M:%S"))) 498 time.strftime("%D %H:%M:%S")))
371 else: 499 else:
372 # no need to check if reachedlogin unless we support multiple connections 500 # no need to check if reachedlogin unless we support multiple connections
373 self.logger.debug("QEMU socket disconnected before login banner reached. (%s)" % 501 self.logger.debug("QEMU socket disconnected before login banner reached. (%s)" %
374 time.strftime("%D %H:%M:%S")) 502 time.strftime("%D %H:%M:%S"))
375 socklist.remove(sock) 503 filelist.remove(file)
376 sock.close() 504 file.close()
377 stopread = True 505 stopread = True
378 506
379
380 if not reachedlogin: 507 if not reachedlogin:
381 if time.time() >= endtime: 508 if time.time() >= endtime:
382 self.logger.warning("Target didn't reach login banner in %d seconds (%s)" % 509 self.logger.warning("Target didn't reach login banner in %d seconds (%s)" %
383 (self.boottime, time.strftime("%D %H:%M:%S"))) 510 (self.boottime, time.strftime("%D %H:%M:%S")))
384 tail = lambda l: "\n".join(l.splitlines()[-25:]) 511 tail = lambda l: "\n".join(l.splitlines()[-25:])
385 bootlog = bootlog.decode("utf-8") 512 bootlog = self.decode_qemulog(bootlog)
386 # in case bootlog is empty, use tail qemu log store at self.msg 513 self.logger.warning("Last 25 lines of login console (%d):\n%s" % (len(bootlog), tail(bootlog)))
387 lines = tail(bootlog if bootlog else self.msg) 514 self.logger.warning("Last 25 lines of all logging (%d):\n%s" % (len(self.msg), tail(self.msg)))
388 self.logger.warning("Last 25 lines of text:\n%s" % lines)
389 self.logger.warning("Check full boot log: %s" % self.logfile) 515 self.logger.warning("Check full boot log: %s" % self.logfile)
390 self._dump_host()
391 self.stop() 516 self.stop()
517 data = True
518 while data:
519 try:
520 time.sleep(1)
521 data = qemusock.recv(1024)
522 self.log(data, extension = ".2")
523 self.logger.warning('Extra log data read: %s\n' % (data.decode('utf-8', errors='backslashreplace')))
524 except Exception as e:
525 self.logger.warning('Extra log data exception %s' % repr(e))
526 data = None
392 return False 527 return False
393 528
529 with self.thread.serial_lock:
530 self.thread.set_serialsock(self.server_socket)
531
394 # If we are not able to login the tests can continue 532 # If we are not able to login the tests can continue
395 try: 533 try:
396 (status, output) = self.run_serial(self.boot_patterns['send_login_user'], raw=True, timeout=120) 534 (status, output) = self.run_serial(self.boot_patterns['send_login_user'], raw=True, timeout=120)
397 if re.search(self.boot_patterns['search_login_succeeded'], output): 535 if re.search(self.boot_patterns['search_login_succeeded'], output):
398 self.logged = True 536 self.logged = True
399 self.logger.debug("Logged as root in serial console") 537 self.logger.debug("Logged in as %s in serial console" % self.boot_patterns['send_login_user'].replace("\n", ""))
400 if netconf: 538 if netconf:
401 # configure guest networking 539 # configure guest networking
402 cmd = "ifconfig eth0 %s netmask %s up\n" % (self.ip, self.netmask) 540 cmd = "ip addr add %s/%s dev eth0\nip link set dev eth0 up\n" % (self.ip, self.netmask)
403 output = self.run_serial(cmd, raw=True)[1] 541 output = self.run_serial(cmd, raw=True)[1]
404 if re.search(r"root@[a-zA-Z0-9\-]+:~#", output): 542 if re.search(r"root@[a-zA-Z0-9\-]+:~#", output):
405 self.logger.debug("configured ip address %s", self.ip) 543 self.logger.debug("configured ip address %s", self.ip)
@@ -407,7 +545,7 @@ class QemuRunner:
407 self.logger.debug("Couldn't configure guest networking") 545 self.logger.debug("Couldn't configure guest networking")
408 else: 546 else:
409 self.logger.warning("Couldn't login into serial console" 547 self.logger.warning("Couldn't login into serial console"
410 " as root using blank password") 548 " as %s using blank password" % self.boot_patterns['send_login_user'].replace("\n", ""))
411 self.logger.warning("The output:\n%s" % output) 549 self.logger.warning("The output:\n%s" % output)
412 except: 550 except:
413 self.logger.warning("Serial console failed while trying to login") 551 self.logger.warning("Serial console failed while trying to login")
@@ -427,16 +565,24 @@ class QemuRunner:
427 except OSError as e: 565 except OSError as e:
428 if e.errno != errno.ESRCH: 566 if e.errno != errno.ESRCH:
429 raise 567 raise
430 endtime = time.time() + self.runqemutime 568 try:
431 while self.runqemu.poll() is None and time.time() < endtime: 569 outs, errs = self.runqemu.communicate(timeout=self.runqemutime)
432 time.sleep(1) 570 if outs:
433 if self.runqemu.poll() is None: 571 self.logger.info("Output from runqemu:\n%s", outs.decode("utf-8"))
572 if errs:
573 self.logger.info("Stderr from runqemu:\n%s", errs.decode("utf-8"))
574 except subprocess.TimeoutExpired:
434 self.logger.debug("Sending SIGKILL to runqemu") 575 self.logger.debug("Sending SIGKILL to runqemu")
435 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL) 576 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL)
577 if not self.runqemu.stdout.closed:
578 self.logger.info("Output from runqemu:\n%s" % getOutput(self.runqemu.stdout))
436 self.runqemu.stdin.close() 579 self.runqemu.stdin.close()
437 self.runqemu.stdout.close() 580 self.runqemu.stdout.close()
438 self.runqemu_exited = True 581 self.runqemu_exited = True
439 582
583 if hasattr(self, 'qmp') and self.qmp:
584 self.qmp.close()
585 self.qmp = None
440 if hasattr(self, 'server_socket') and self.server_socket: 586 if hasattr(self, 'server_socket') and self.server_socket:
441 self.server_socket.close() 587 self.server_socket.close()
442 self.server_socket = None 588 self.server_socket = None
@@ -467,6 +613,11 @@ class QemuRunner:
467 self.thread.stop() 613 self.thread.stop()
468 self.thread.join() 614 self.thread.join()
469 615
616 def allowexit(self):
617 self.canexit = True
618 if self.thread:
619 self.thread.allowexit()
620
470 def restart(self, qemuparams = None): 621 def restart(self, qemuparams = None):
471 self.logger.warning("Restarting qemu process") 622 self.logger.warning("Restarting qemu process")
472 if self.runqemu.poll() is None: 623 if self.runqemu.poll() is None:
@@ -483,8 +634,12 @@ class QemuRunner:
483 # so it's possible that the file has been created but the content is empty 634 # so it's possible that the file has been created but the content is empty
484 pidfile_timeout = time.time() + 3 635 pidfile_timeout = time.time() + 3
485 while time.time() < pidfile_timeout: 636 while time.time() < pidfile_timeout:
486 with open(self.qemu_pidfile, 'r') as f: 637 try:
487 qemu_pid = f.read().strip() 638 with open(self.qemu_pidfile, 'r') as f:
639 qemu_pid = f.read().strip()
640 except FileNotFoundError:
641 # Can be used to detect shutdown so the pid file can disappear
642 return False
488 # file created but not yet written contents 643 # file created but not yet written contents
489 if not qemu_pid: 644 if not qemu_pid:
490 time.sleep(0.5) 645 time.sleep(0.5)
@@ -495,34 +650,49 @@ class QemuRunner:
495 return True 650 return True
496 return False 651 return False
497 652
653 def run_monitor(self, command, args=None, timeout=60):
654 if hasattr(self, 'qmp') and self.qmp:
655 self.qmp.settimeout(timeout)
656 if args is not None:
657 return self.qmp.cmd_raw(command, args)
658 else:
659 return self.qmp.cmd_raw(command)
660
498 def run_serial(self, command, raw=False, timeout=60): 661 def run_serial(self, command, raw=False, timeout=60):
662 # Returns (status, output) where status is 1 on success and 0 on error
663
499 # We assume target system have echo to get command status 664 # We assume target system have echo to get command status
500 if not raw: 665 if not raw:
501 command = "%s; echo $?\n" % command 666 command = "%s; echo $?\n" % command
502 667
503 data = '' 668 data = ''
504 status = 0 669 status = 0
505 self.server_socket.sendall(command.encode('utf-8')) 670 with self.thread.serial_lock:
506 start = time.time() 671 self.server_socket.sendall(command.encode('utf-8'))
507 end = start + timeout 672 start = time.time()
508 while True: 673 end = start + timeout
509 now = time.time() 674 while True:
510 if now >= end: 675 now = time.time()
511 data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout 676 if now >= end:
512 break 677 data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout
513 try: 678 break
514 sread, _, _ = select.select([self.server_socket],[],[], end - now) 679 try:
515 except InterruptedError: 680 sread, _, _ = select.select([self.server_socket],[],[], end - now)
516 continue 681 except InterruptedError:
517 if sread: 682 continue
518 answer = self.server_socket.recv(1024) 683 if sread:
519 if answer: 684 # try to avoid reading single character at a time
520 data += answer.decode('utf-8') 685 time.sleep(0.1)
521 # Search the prompt to stop 686 answer = self.server_socket.recv(1024)
522 if re.search(self.boot_patterns['search_cmd_finished'], data): 687 if answer:
523 break 688 data += re_vt100.sub("", answer.decode('utf-8'))
524 else: 689 # Search the prompt to stop
525 raise Exception("No data on serial console socket") 690 if re.search(self.boot_patterns['search_cmd_finished'], data):
691 break
692 else:
693 if self.canexit:
694 return (1, "")
695 raise Exception("No data on serial console socket, connection closed?")
526 696
527 if data: 697 if data:
528 if raw: 698 if raw:
@@ -541,34 +711,48 @@ class QemuRunner:
541 status = 1 711 status = 1
542 return (status, str(data)) 712 return (status, str(data))
543 713
544 714@contextmanager
545 def _dump_host(self): 715def nonblocking_lock(lock):
546 self.host_dumper.create_dir("qemu") 716 locked = lock.acquire(False)
547 self.logger.warning("Qemu ended unexpectedly, dump data from host" 717 try:
548 " is in %s" % self.host_dumper.dump_dir) 718 yield locked
549 self.host_dumper.dump_host() 719 finally:
720 if locked:
721 lock.release()
550 722
551# This class is for reading data from a socket and passing it to logfunc 723# This class is for reading data from a socket and passing it to logfunc
552# to be processed. It's completely event driven and has a straightforward 724# to be processed. It's completely event driven and has a straightforward
553# event loop. The mechanism for stopping the thread is a simple pipe which 725# event loop. The mechanism for stopping the thread is a simple pipe which
554# will wake up the poll and allow for tearing everything down. 726# will wake up the poll and allow for tearing everything down.
555class LoggingThread(threading.Thread): 727class LoggingThread(threading.Thread):
556 def __init__(self, logfunc, sock, logger): 728 def __init__(self, logfunc, sock, logger, qemuoutput):
557 self.connection_established = threading.Event() 729 self.connection_established = threading.Event()
730 self.serial_lock = threading.Lock()
731
558 self.serversock = sock 732 self.serversock = sock
733 self.serialsock = None
734 self.qemuoutput = qemuoutput
559 self.logfunc = logfunc 735 self.logfunc = logfunc
560 self.logger = logger 736 self.logger = logger
561 self.readsock = None 737 self.readsock = None
562 self.running = False 738 self.running = False
739 self.canexit = False
563 740
564 self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL 741 self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL
565 self.readevents = select.POLLIN | select.POLLPRI 742 self.readevents = select.POLLIN | select.POLLPRI
566 743
567 threading.Thread.__init__(self, target=self.threadtarget) 744 threading.Thread.__init__(self, target=self.threadtarget)
568 745
746 def set_serialsock(self, serialsock):
747 self.serialsock = serialsock
748
569 def threadtarget(self): 749 def threadtarget(self):
570 try: 750 try:
571 self.eventloop() 751 self.eventloop()
752 except Exception:
753 exc_type, exc_value, exc_traceback = sys.exc_info()
754 self.logger.warning("Exception %s in logging thread" %
755 traceback.format_exception(exc_type, exc_value, exc_traceback))
572 finally: 756 finally:
573 self.teardown() 757 self.teardown()
574 758
@@ -584,7 +768,8 @@ class LoggingThread(threading.Thread):
584 768
585 def teardown(self): 769 def teardown(self):
586 self.logger.debug("Tearing down logging thread") 770 self.logger.debug("Tearing down logging thread")
587 self.close_socket(self.serversock) 771 if self.serversock:
772 self.close_socket(self.serversock)
588 773
589 if self.readsock is not None: 774 if self.readsock is not None:
590 self.close_socket(self.readsock) 775 self.close_socket(self.readsock)
@@ -593,30 +778,37 @@ class LoggingThread(threading.Thread):
593 self.close_ignore_error(self.writepipe) 778 self.close_ignore_error(self.writepipe)
594 self.running = False 779 self.running = False
595 780
781 def allowexit(self):
782 self.canexit = True
783
596 def eventloop(self): 784 def eventloop(self):
597 poll = select.poll() 785 poll = select.poll()
598 event_read_mask = self.errorevents | self.readevents 786 event_read_mask = self.errorevents | self.readevents
599 poll.register(self.serversock.fileno()) 787 if self.serversock:
788 poll.register(self.serversock.fileno())
789 serial_registered = False
790 poll.register(self.qemuoutput.fileno())
600 poll.register(self.readpipe, event_read_mask) 791 poll.register(self.readpipe, event_read_mask)
601 792
602 breakout = False 793 breakout = False
603 self.running = True 794 self.running = True
604 self.logger.debug("Starting thread event loop") 795 self.logger.debug("Starting thread event loop")
605 while not breakout: 796 while not breakout:
606 events = poll.poll() 797 events = poll.poll(2)
607 for event in events: 798 for fd, event in events:
799
608 # An error occurred, bail out 800 # An error occurred, bail out
609 if event[1] & self.errorevents: 801 if event & self.errorevents:
610 raise Exception(self.stringify_event(event[1])) 802 raise Exception(self.stringify_event(event))
611 803
612 # Event to stop the thread 804 # Event to stop the thread
613 if self.readpipe == event[0]: 805 if self.readpipe == fd:
614 self.logger.debug("Stop event received") 806 self.logger.debug("Stop event received")
615 breakout = True 807 breakout = True
616 break 808 break
617 809
618 # A connection request was received 810 # A connection request was received
619 elif self.serversock.fileno() == event[0]: 811 elif self.serversock and self.serversock.fileno() == fd:
620 self.logger.debug("Connection request received") 812 self.logger.debug("Connection request received")
621 self.readsock, _ = self.serversock.accept() 813 self.readsock, _ = self.serversock.accept()
622 self.readsock.setblocking(0) 814 self.readsock.setblocking(0)
@@ -627,18 +819,40 @@ class LoggingThread(threading.Thread):
627 self.connection_established.set() 819 self.connection_established.set()
628 820
629 # Actual data to be logged 821 # Actual data to be logged
630 elif self.readsock.fileno() == event[0]: 822 elif self.readsock and self.readsock.fileno() == fd:
631 data = self.recv(1024) 823 data = self.recv(1024, self.readsock)
632 self.logfunc(data) 824 self.logfunc(data)
825 elif self.qemuoutput.fileno() == fd:
826 data = self.qemuoutput.read()
827 self.logger.debug("Data received on qemu stdout %s" % data)
828 self.logfunc(data, ".stdout")
829 elif self.serialsock and self.serialsock.fileno() == fd:
830 if self.serial_lock.acquire(blocking=False):
831 try:
832 data = self.recv(1024, self.serialsock)
833 self.logger.debug("Data received serial thread %s" % data.decode('utf-8', 'replace'))
834 self.logfunc(data, ".2")
835 finally:
836 self.serial_lock.release()
837 else:
838 serial_registered = False
839 poll.unregister(self.serialsock.fileno())
840
841 if not serial_registered and self.serialsock:
842 with nonblocking_lock(self.serial_lock) as l:
843 if l:
844 serial_registered = True
845 poll.register(self.serialsock.fileno(), event_read_mask)
846
633 847
634 # Since the socket is non-blocking make sure to honor EAGAIN 848 # Since the socket is non-blocking make sure to honor EAGAIN
635 # and EWOULDBLOCK. 849 # and EWOULDBLOCK.
636 def recv(self, count): 850 def recv(self, count, sock):
637 try: 851 try:
638 data = self.readsock.recv(count) 852 data = sock.recv(count)
639 except socket.error as e: 853 except socket.error as e:
640 if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK: 854 if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK:
641 return '' 855 return b''
642 else: 856 else:
643 raise 857 raise
644 858
@@ -649,7 +863,9 @@ class LoggingThread(threading.Thread):
649 # happened. But for this code it counts as an 863 # happened. But for this code it counts as an
650 # error since the connection shouldn't go away 864 # error since the connection shouldn't go away
651 # until qemu exits. 865 # until qemu exits.
652 raise Exception("Console connection closed unexpectedly") 866 if not self.canexit:
867 raise Exception("Console connection closed unexpectedly")
868 return b''
653 869
654 return data 870 return data
655 871
@@ -661,6 +877,9 @@ class LoggingThread(threading.Thread):
661 val = 'POLLHUP' 877 val = 'POLLHUP'
662 elif select.POLLNVAL == event: 878 elif select.POLLNVAL == event:
663 val = 'POLLNVAL' 879 val = 'POLLNVAL'
880 else:
881 val = "0x%x" % (event)
882
664 return val 883 return val
665 884
666 def close_socket(self, sock): 885 def close_socket(self, sock):
diff --git a/meta/lib/oeqa/utils/qemutinyrunner.py b/meta/lib/oeqa/utils/qemutinyrunner.py
index 5c92941c0a..20009401ca 100644
--- a/meta/lib/oeqa/utils/qemutinyrunner.py
+++ b/meta/lib/oeqa/utils/qemutinyrunner.py
@@ -19,7 +19,7 @@ from .qemurunner import QemuRunner
19 19
20class QemuTinyRunner(QemuRunner): 20class QemuTinyRunner(QemuRunner):
21 21
22 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, kernel, boottime, logger): 22 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, kernel, boottime, logger, tmpfsdir=None):
23 23
24 # Popen object for runqemu 24 # Popen object for runqemu
25 self.runqemu = None 25 self.runqemu = None
@@ -37,6 +37,7 @@ class QemuTinyRunner(QemuRunner):
37 self.deploy_dir_image = deploy_dir_image 37 self.deploy_dir_image = deploy_dir_image
38 self.logfile = logfile 38 self.logfile = logfile
39 self.boottime = boottime 39 self.boottime = boottime
40 self.tmpfsdir = tmpfsdir
40 41
41 self.runqemutime = 60 42 self.runqemutime = 60
42 self.socketfile = "console.sock" 43 self.socketfile = "console.sock"
@@ -83,6 +84,9 @@ class QemuTinyRunner(QemuRunner):
83 return False 84 return False
84 else: 85 else:
85 os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image 86 os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
87 if self.tmpfsdir:
88 env["RUNQEMU_TMPFS_DIR"] = self.tmpfsdir
89
86 90
87 # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact 91 # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact
88 # badly with screensavers. 92 # badly with screensavers.
diff --git a/meta/lib/oeqa/utils/sshcontrol.py b/meta/lib/oeqa/utils/sshcontrol.py
index 36c2ecb3db..88a61aff63 100644
--- a/meta/lib/oeqa/utils/sshcontrol.py
+++ b/meta/lib/oeqa/utils/sshcontrol.py
@@ -57,8 +57,10 @@ class SSHProcess(object):
57 if select.select([self.process.stdout], [], [], 5)[0] != []: 57 if select.select([self.process.stdout], [], [], 5)[0] != []:
58 data = os.read(self.process.stdout.fileno(), 1024) 58 data = os.read(self.process.stdout.fileno(), 1024)
59 if not data: 59 if not data:
60 self.process.stdout.close() 60 self.process.poll()
61 eof = True 61 if self.process.returncode is not None:
62 self.process.stdout.close()
63 eof = True
62 else: 64 else:
63 data = data.decode("utf-8") 65 data = data.decode("utf-8")
64 output += data 66 output += data
diff --git a/meta/lib/oeqa/utils/subprocesstweak.py b/meta/lib/oeqa/utils/subprocesstweak.py
index b47975a4bc..1774513023 100644
--- a/meta/lib/oeqa/utils/subprocesstweak.py
+++ b/meta/lib/oeqa/utils/subprocesstweak.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4import subprocess 6import subprocess
@@ -6,16 +8,11 @@ import subprocess
6class OETestCalledProcessError(subprocess.CalledProcessError): 8class OETestCalledProcessError(subprocess.CalledProcessError):
7 def __str__(self): 9 def __str__(self):
8 def strify(o): 10 def strify(o):
9 if isinstance(o, bytes): 11 return o.decode("utf-8", errors="replace") if isinstance(o, bytes) else o
10 return o.decode("utf-8", errors="replace")
11 else:
12 return o
13 12
14 s = "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) 13 s = super().__str__()
15 if hasattr(self, "output") and self.output: 14 s = s + "\nStandard Output: " + strify(self.output)
16 s = s + "\nStandard Output: " + strify(self.output) 15 s = s + "\nStandard Error: " + strify(self.stderr)
17 if hasattr(self, "stderr") and self.stderr:
18 s = s + "\nStandard Error: " + strify(self.stderr)
19 return s 16 return s
20 17
21def errors_have_output(): 18def errors_have_output():
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py
index 1055810ca3..09738add1d 100644
--- a/meta/lib/oeqa/utils/targetbuild.py
+++ b/meta/lib/oeqa/utils/targetbuild.py
@@ -19,6 +19,7 @@ class BuildProject(metaclass=ABCMeta):
19 self.d = d 19 self.d = d
20 self.uri = uri 20 self.uri = uri
21 self.archive = os.path.basename(uri) 21 self.archive = os.path.basename(uri)
22 self.tempdirobj = None
22 if not tmpdir: 23 if not tmpdir:
23 tmpdir = self.d.getVar('WORKDIR') 24 tmpdir = self.d.getVar('WORKDIR')
24 if not tmpdir: 25 if not tmpdir:
@@ -71,9 +72,10 @@ class BuildProject(metaclass=ABCMeta):
71 return self._run('cd %s; make install %s' % (self.targetdir, install_args)) 72 return self._run('cd %s; make install %s' % (self.targetdir, install_args))
72 73
73 def clean(self): 74 def clean(self):
75 if self.tempdirobj:
76 self.tempdirobj.cleanup()
74 self._run('rm -rf %s' % self.targetdir) 77 self._run('rm -rf %s' % self.targetdir)
75 subprocess.check_call('rm -f %s' % self.localarchive, shell=True) 78 subprocess.check_call('rm -f %s' % self.localarchive, shell=True)
76 pass
77 79
78class TargetBuildProject(BuildProject): 80class TargetBuildProject(BuildProject):
79 81
diff --git a/meta/lib/oeqa/utils/testexport.py b/meta/lib/oeqa/utils/testexport.py
index e89d130a9c..3ab024d9e9 100644
--- a/meta/lib/oeqa/utils/testexport.py
+++ b/meta/lib/oeqa/utils/testexport.py
@@ -60,17 +60,17 @@ def process_binaries(d, params):
60 export_env = d.getVar("TEST_EXPORT_ONLY") 60 export_env = d.getVar("TEST_EXPORT_ONLY")
61 61
62 def extract_binary(pth_to_pkg, dest_pth=None): 62 def extract_binary(pth_to_pkg, dest_pth=None):
63 cpio_command = runCmd("which cpio") 63 tar_command = runCmd("which tar")
64 rpm2cpio_command = runCmd("ls /usr/bin/rpm2cpio") 64 rpm2archive_command = runCmd("ls /usr/bin/rpm2archive")
65 if (cpio_command.status != 0) and (rpm2cpio_command.status != 0): 65 if (tar_command.status != 0) and (rpm2archive_command.status != 0):
66 bb.fatal("Either \"rpm2cpio\" or \"cpio\" tools are not available on your system." 66 bb.fatal("Either \"rpm2archive\" or \"tar\" tools are not available on your system."
67 "All binaries extraction processes will not be available, crashing all related tests." 67 "All binaries extraction processes will not be available, crashing all related tests."
68 "Please install them according to your OS recommendations") # will exit here 68 "Please install them according to your OS recommendations") # will exit here
69 if dest_pth: 69 if dest_pth:
70 os.chdir(dest_pth) 70 os.chdir(dest_pth)
71 else: 71 else:
72 os.chdir("%s" % os.sep)# this is for native package 72 os.chdir("%s" % os.sep)# this is for native package
73 extract_bin_command = runCmd("%s %s | %s -idm" % (rpm2cpio_command.output, pth_to_pkg, cpio_command.output)) # semi-hardcoded because of a bug on poky's rpm2cpio 73 extract_bin_command = runCmd("%s -n %s | %s xv" % (rpm2archive_command.output, pth_to_pkg, tar_command.output)) # semi-hardcoded because of a bug on poky's rpm2cpio
74 return extract_bin_command 74 return extract_bin_command
75 75
76 if determine_if_poky_env(): # machine with poky environment 76 if determine_if_poky_env(): # machine with poky environment
diff --git a/meta/lib/patchtest/README.md b/meta/lib/patchtest/README.md
new file mode 100644
index 0000000000..27cc61c802
--- /dev/null
+++ b/meta/lib/patchtest/README.md
@@ -0,0 +1,20 @@
1# patchtest selftests for openembedded-core
2
3This directory provides a test suite and selftest script for use with the
4patchtest repository: <https://git.yoctoproject.org/patchtest/>
5
6To setup for use:
7
81. Clone <https://git.openembedded.org/openembedded-core> (this repo) and <https://git.openembedded.org/bitbake/>
92. Clone <https://git.yoctoproject.org/patchtest>
103. Install the necessary Python modules: in meta/lib/patchtest or the patchtest
11 repo, do `pip install -r requirements.txt`
124. Add patchtest to PATH: `export PATH=/path/to/patchtest/repo:$PATH`
135. Initialize the environment: `source oe-init-build-env`
146. Add meta-selftest to bblayers.conf: `bitbake-layers add-layer
15 /path/to/meta-selftest/` (the selftests use this layer's recipes as test
16 targets)
177. Finally, run the selftest script: `./meta/lib/patchtest/selftest/selftest`
18
19For more information on using patchtest, see the patchtest repo at
20<https://git.yoctoproject.org/patchtest/>.
diff --git a/meta/lib/patchtest/mbox.py b/meta/lib/patchtest/mbox.py
new file mode 100644
index 0000000000..1d95819b7a
--- /dev/null
+++ b/meta/lib/patchtest/mbox.py
@@ -0,0 +1,108 @@
1#! /usr/bin/env python3
2
3# series.py
4#
5# Read a series' mbox file and get information about the patches
6# contained
7#
8# Copyright (C) 2024 BayLibre SAS
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13import email
14import re
15
16# From: https://stackoverflow.com/questions/59681461/read-a-big-mbox-file-with-python
17class MboxReader:
18 def __init__(self, filepath):
19 self.handle = open(filepath, 'rb')
20 assert self.handle.readline().startswith(b'From ')
21
22 def __enter__(self):
23 return self
24
25 def __exit__(self, exc_type, exc_value, exc_traceback):
26 self.handle.close()
27
28 def __iter__(self):
29 return iter(self.__next__())
30
31 def __next__(self):
32 lines = []
33 while True:
34 line = self.handle.readline()
35 if line == b'' or line.startswith(b'From '):
36 yield email.message_from_bytes(b''.join(lines))
37 if line == b'':
38 break
39 lines = []
40 continue
41 lines.append(line)
42
43class Patch:
44 def __init__(self, data):
45 self.author = data['From']
46 self.to = data['To']
47 self.cc = data['Cc']
48 self.subject = data['Subject']
49 self.split_body = re.split('---', data.get_payload(), maxsplit=1)
50 self.commit_message = self.split_body[0]
51 self.diff = self.split_body[1]
52
53class PatchSeries:
54 def __init__(self, filepath):
55 with MboxReader(filepath) as mbox:
56 self.patches = [Patch(message) for message in mbox]
57
58 assert self.patches
59 self.patch_count = len(self.patches)
60 self.path = filepath
61
62 @property
63 def path(self):
64 return self.path
65
66 self.branch = self.get_branch()
67
68 def get_branch(self):
69 fullprefix = ""
70 pattern = re.compile(r"(\[.*\])", re.DOTALL)
71
72 # There should be at least one patch in the series and it should
73 # include the branch name in the subject, so parse that
74 match = pattern.search(self.patches[0].subject)
75 if match:
76 fullprefix = match.group(1)
77
78 branch, branches, valid_branches = None, [], []
79
80 if fullprefix:
81 prefix = fullprefix.strip('[]')
82 branches = [ b.strip() for b in prefix.split(',')]
83 valid_branches = [b for b in branches if PatchSeries.valid_branch(b)]
84
85 if len(valid_branches):
86 branch = valid_branches[0]
87
88 # Get the branch name excluding any brackets. If nothing was
89 # found, then assume there was no branch tag in the subject line
90 # and that the patch targets master
91 if branch is not None:
92 return branch.split(']')[0]
93 else:
94 return "master"
95
96 @staticmethod
97 def valid_branch(branch):
98 """ Check if branch is valid name """
99 lbranch = branch.lower()
100
101 invalid = lbranch.startswith('patch') or \
102 lbranch.startswith('rfc') or \
103 lbranch.startswith('resend') or \
104 re.search(r'^v\d+', lbranch) or \
105 re.search(r'^\d+/\d+', lbranch)
106
107 return not invalid
108
diff --git a/meta/lib/patchtest/patchtest_parser.py b/meta/lib/patchtest/patchtest_parser.py
new file mode 100644
index 0000000000..2a11cb76c2
--- /dev/null
+++ b/meta/lib/patchtest/patchtest_parser.py
@@ -0,0 +1,78 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# patchtestdata: module used to share command line arguments between
5# patchtest & test suite and a data store between test cases
6#
7# Copyright (C) 2016 Intel Corporation
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11# NOTE: Strictly speaking, unit test should be isolated from outside,
12# but patchtest test suites uses command line input data and
13# pretest and test test cases may use the datastore defined
14# on this module
15
16import os
17import argparse
18
19default_testdir = os.path.abspath(os.path.dirname(__file__) + "/tests")
20default_repodir = os.path.abspath(os.path.dirname(__file__) + "/../../..")
21
22class PatchtestParser(object):
23 """Abstract the patchtest argument parser"""
24
25 @classmethod
26 def set_namespace(cls):
27 parser = cls.get_parser()
28 parser.parse_args(namespace=cls)
29
30 @classmethod
31 def get_parser(cls):
32 parser = argparse.ArgumentParser()
33
34 target_patch_group = parser.add_mutually_exclusive_group(required=True)
35
36 target_patch_group.add_argument('--patch', metavar='PATCH', dest='patch_path',
37 help='The patch to be tested')
38
39 target_patch_group.add_argument('--directory', metavar='DIRECTORY', dest='patch_path',
40 help='The directory containing patches to be tested')
41
42 parser.add_argument('--repodir', metavar='REPO',
43 default=default_repodir,
44 help="Name of the repository where patch is merged")
45
46 parser.add_argument('--testdir', metavar='TESTDIR',
47 default=default_testdir,
48 help="Directory where test cases are located")
49
50 parser.add_argument('--top-level-directory', '-t',
51 dest='topdir',
52 default=None,
53 help="Top level directory of project (defaults to start directory)")
54
55 parser.add_argument('--pattern', '-p',
56 dest='pattern',
57 default='test*.py',
58 help="Pattern to match test files")
59
60 parser.add_argument('--base-branch', '-b',
61 dest='basebranch',
62 help="Branch name used by patchtest to branch from. By default, it uses the current one.")
63
64 parser.add_argument('--base-commit', '-c',
65 dest='basecommit',
66 help="Commit ID used by patchtest to branch from. By default, it uses HEAD.")
67
68 parser.add_argument('--debug', '-d',
69 action='store_true',
70 help='Enable debug output')
71
72 parser.add_argument('--log-results',
73 action='store_true',
74 help='Enable logging to a file matching the target patch name with ".testresult" appended')
75
76
77 return parser
78
diff --git a/meta/lib/patchtest/patchtest_patterns.py b/meta/lib/patchtest/patchtest_patterns.py
new file mode 100644
index 0000000000..50637cf499
--- /dev/null
+++ b/meta/lib/patchtest/patchtest_patterns.py
@@ -0,0 +1,98 @@
1# common pyparsing variables
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6
7import pyparsing
8import re
9
10# general
11colon = pyparsing.Literal(":")
12line_start = pyparsing.LineStart()
13line_end = pyparsing.LineEnd()
14lessthan = pyparsing.Literal("<")
15greaterthan = pyparsing.Literal(">")
16inappropriate = pyparsing.CaselessLiteral("Inappropriate")
17submitted = pyparsing.CaselessLiteral("Submitted")
18
19# word related
20nestexpr = pyparsing.nestedExpr(opener='[', closer=']')
21inappropriateinfo = pyparsing.Literal("Inappropriate") + nestexpr
22submittedinfo = pyparsing.Literal("Submitted") + nestexpr
23word = pyparsing.Word(pyparsing.alphas)
24worddot = pyparsing.Word(pyparsing.alphas+".")
25
26# metadata
27
28metadata_lic = 'LICENSE'
29invalid_license = 'PATCHTESTINVALID'
30metadata_chksum = 'LIC_FILES_CHKSUM'
31license_var = 'LICENSE'
32closed = 'CLOSED'
33lictag_re = pyparsing.AtLineStart("License-Update:")
34lic_chksum_added = pyparsing.AtLineStart("+" + metadata_chksum)
35lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum)
36add_mark = pyparsing.Regex('\\+ ')
37patch_max_line_length = 200
38metadata_src_uri = "SRC_URI"
39metadata_summary = "SUMMARY"
40cve_check_ignore_var = "CVE_CHECK_IGNORE"
41cve_status_var = "CVE_STATUS"
42endcommit_messages_regex = re.compile(
43 r"\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n"
44)
45patchmetadata_regex = re.compile(
46 r"-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+"
47)
48
49# mbox
50auh_email = 'auh@yoctoproject.org'
51
52invalid_submitters = [pyparsing.Regex("^Upgrade Helper.+"),
53 pyparsing.Regex(auh_email),
54 pyparsing.Regex("uh@not\.set"),
55 pyparsing.Regex("\S+@example\.com")]
56
57mbox_bugzilla = pyparsing.Regex('\[\s?YOCTO.*\]')
58mbox_bugzilla_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]')
59mbox_revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"')
60mbox_shortlog_maxlength = 90
61# based on https://stackoverflow.com/questions/30281026/regex-parsing-github-usernames-javascript
62mbox_github_username = pyparsing.Regex('\B@([a-z0-9](?:-(?=[a-z0-9])|[a-z0-9]){0,38}(?<=[a-z0-9]))')
63
64# patch
65
66cve = pyparsing.Regex("CVE\-\d{4}\-\d+")
67cve_payload_tag = pyparsing.Regex("\+CVE:(\s+CVE\-\d{4}\-\d+)+")
68upstream_status_regex = pyparsing.AtLineStart("+" + "Upstream-Status")
69
70# shortlog
71
72shortlog_target = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables.replace(':','')))
73shortlog_summary = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables))
74shortlog = line_start + shortlog_target + colon + shortlog_summary + line_end
75
76# signed-off-bys
77
78email_pattern = pyparsing.Regex(r"(?P<user>[A-Za-z0-9._%+-]+)@(?P<hostname>[A-Za-z0-9.-]+)\.(?P<domain>[A-Za-z]{2,})")
79
80signed_off_by_prefix = pyparsing.Literal("Signed-off-by:")
81signed_off_by_name = pyparsing.Regex('\S+.*(?= <)')
82signed_off_by_email = lessthan + email_pattern + greaterthan
83signed_off_by = pyparsing.AtLineStart(signed_off_by_prefix + signed_off_by_name + signed_off_by_email)
84patch_signed_off_by = pyparsing.AtLineStart("+" + signed_off_by_prefix + signed_off_by_name + signed_off_by_email)
85
86# upstream-status
87
88upstream_status_literal_valid_status = ["Pending", "Backport", "Denied", "Inappropriate", "Submitted", "Inactive-Upstream"]
89upstream_status_nonliteral_valid_status = ["Pending", "Backport", "Denied", "Inappropriate [reason]", "Submitted [where]", "Inactive-Upstream [lastcommit: when (and/or) lastrelease: when]"]
90
91upstream_status_valid_status = pyparsing.Or(
92 [pyparsing.Literal(status) for status in upstream_status_literal_valid_status]
93)
94
95upstream_status_prefix = pyparsing.Literal("Upstream-Status")
96upstream_status = line_start + upstream_status_prefix + colon + upstream_status_valid_status
97upstream_status_inappropriate_info = line_start + upstream_status_prefix + colon + inappropriateinfo
98upstream_status_submitted_info = line_start + upstream_status_prefix + colon + submittedinfo
diff --git a/meta/lib/patchtest/repo.py b/meta/lib/patchtest/repo.py
new file mode 100644
index 0000000000..8ec8f68a0b
--- /dev/null
+++ b/meta/lib/patchtest/repo.py
@@ -0,0 +1,85 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# patchtestrepo: PatchTestRepo class used mainly to control a git repo from patchtest
5#
6# Copyright (C) 2016 Intel Corporation
7#
8# SPDX-License-Identifier: GPL-2.0-only
9#
10
11import git
12import os
13import mbox
14
15class PatchTestRepo(object):
16
17 # prefixes used for temporal branches/stashes
18 prefix = 'patchtest'
19
20 def __init__(self, patch, repodir, commit=None, branch=None):
21 self.repodir = repodir
22 self.repo = git.Repo.init(repodir)
23 self.patch = mbox.PatchSeries(patch)
24 self.current_branch = self.repo.active_branch.name
25
26 # targeted branch defined on the patch may be invalid, so make sure there
27 # is a corresponding remote branch
28 valid_patch_branch = None
29 if self.patch.branch in self.repo.branches:
30 valid_patch_branch = self.patch.branch
31
32 # Target Commit
33 # Priority (top has highest priority):
34 # 1. commit given at cmd line
35 # 2. branch given at cmd line
36 # 3. branch given at the patch
37 # 3. current HEAD
38 self._commit = self._get_commitid(commit) or \
39 self._get_commitid(branch) or \
40 self._get_commitid(valid_patch_branch) or \
41 self._get_commitid('HEAD')
42
43 self._workingbranch = "%s_%s" % (PatchTestRepo.prefix, os.getpid())
44
45 # create working branch. Use the '-B' flag so that we just
46 # check out the existing one if it's there
47 self.repo.git.execute(['git', 'checkout', '-B', self._workingbranch, self._commit])
48
49 self._patchmerged = False
50
51 # Check if patch can be merged using git-am
52 self._patchcanbemerged = True
53 try:
54 # Make sure to get the absolute path of the file
55 self.repo.git.execute(['git', 'apply', '--check', os.path.abspath(self.patch.path)], with_exceptions=True)
56 except git.exc.GitCommandError as ce:
57 self._patchcanbemerged = False
58
59 def ismerged(self):
60 return self._patchmerged
61
62 def canbemerged(self):
63 return self._patchcanbemerged
64
65 def _get_commitid(self, commit):
66
67 if not commit:
68 return None
69
70 try:
71 return self.repo.rev_parse(commit).hexsha
72 except Exception as e:
73 print(f"Couldn't find commit {commit} in repo")
74
75 return None
76
77 def merge(self):
78 if self._patchcanbemerged:
79 self.repo.git.execute(['git', 'am', '--keep-cr', os.path.abspath(self.patch.path)])
80 self._patchmerged = True
81
82 def clean(self):
83 self.repo.git.execute(['git', 'checkout', self.current_branch])
84 self.repo.git.execute(['git', 'branch', '-D', self._workingbranch])
85 self._patchmerged = False
diff --git a/meta/lib/patchtest/requirements.txt b/meta/lib/patchtest/requirements.txt
new file mode 100644
index 0000000000..4247b91f09
--- /dev/null
+++ b/meta/lib/patchtest/requirements.txt
@@ -0,0 +1,7 @@
1boto3
2git-pw>=2.5.0
3GitPython
4jinja2
5pylint
6pyparsing>=3.0.9
7unidiff
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail
new file mode 100644
index 0000000000..30c1bc4624
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail
@@ -0,0 +1,43 @@
1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: First Last <first.last@example.com>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should fail the test_author_valid test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..f3dec1b220c
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,21 @@
20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25+
26+SRC_URI = "file://helloworld.c"
27+
28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
40+BBCLASSEXTEND = "native nativesdk"
41--
422.45.1
43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass
new file mode 100644
index 0000000000..6e82b08bc6
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass
@@ -0,0 +1,43 @@
1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: First Last <first.last@address.com>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should pass the test_author_valid test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..f3dec1b220c
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,21 @@
20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25+
26+SRC_URI = "file://helloworld.c"
27+
28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
40+BBCLASSEXTEND = "native nativesdk"
41--
422.45.1
43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail
new file mode 100644
index 0000000000..745a8f45d9
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail
@@ -0,0 +1,43 @@
1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: Upgrade Helper <auh@auh.yoctoproject.org>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should fail the test_author_valid test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..f3dec1b220c
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,21 @@
20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25+
26+SRC_URI = "file://helloworld.c"
27+
28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
40+BBCLASSEXTEND = "native nativesdk"
41--
422.45.1
43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass
new file mode 100644
index 0000000000..56cb77fa69
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass
@@ -0,0 +1,43 @@
1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: First Last <averylongemailaddressthatishardtoread.from@address.com>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should pass the test_author_valid test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..f3dec1b220c
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,21 @@
20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25+
26+SRC_URI = "file://helloworld.c"
27+
28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
40+BBCLASSEXTEND = "native nativesdk"
41--
422.45.1
43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail
new file mode 100644
index 0000000000..6facb8c756
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail
@@ -0,0 +1,67 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_bugzilla_entry_format test.
7
8[YOCTO 1234]
9CVE: CVE-1234-56789
10
11Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
12---
13 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
14 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
15 2 files changed, 29 insertions(+), 1 deletion(-)
16 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
17
18diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
19new file mode 100644
20index 00000000000..8a4f9329303
21--- /dev/null
22+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
23@@ -0,0 +1,26 @@
24+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
25+From: Trevor Gamblin <tgamblin@baylibre.com>
26+Date: Tue, 29 Aug 2023 14:08:20 -0400
27+Subject: [PATCH] Fix CVE-NOT-REAL
28+
29+CVE: CVE-1234-56789
30+Upstream-Status: Backport(http://example.com/example)
31+
32+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
33+---
34+ strlen.c | 1 +
35+ 1 file changed, 1 insertion(+)
36+
37+diff --git a/strlen.c b/strlen.c
38+index 1788f38..83d7918 100644
39+--- a/strlen.c
40++++ b/strlen.c
41+
42+int main() {
43+
44+ printf("%d\n", str_len(string1));
45+ printf("%d\n", str_len(string2));
46+ printf("CVE FIXED!!!\n");
47+
48+ return 0;
49+}
50diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51index 2dc352d479e..d937759f157 100644
52--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
54@@ -3,7 +3,9 @@ SECTION = "examples"
55 LICENSE = "MIT"
56 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
57
58-SRC_URI = "file://helloworld.c"
59+SRC_URI = "file://helloworld.c \
60+ file://0001-Fix-CVE-1234-56789.patch \
61+ "
62
63 S = "${WORKDIR}/sources"
64 UNPACKDIR = "${S}"
65--
662.45.1
67
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass
new file mode 100644
index 0000000000..2f35458b4f
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass
@@ -0,0 +1,67 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_bugzilla_entry_format test.
7
8[YOCTO #1234]
9CVE: CVE-1234-56789
10
11Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
12---
13 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
14 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
15 2 files changed, 29 insertions(+), 1 deletion(-)
16 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
17
18diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
19new file mode 100644
20index 00000000000..8a4f9329303
21--- /dev/null
22+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
23@@ -0,0 +1,26 @@
24+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
25+From: Trevor Gamblin <tgamblin@baylibre.com>
26+Date: Tue, 29 Aug 2023 14:08:20 -0400
27+Subject: [PATCH] Fix CVE-NOT-REAL
28+
29+CVE: CVE-1234-56789
30+Upstream-Status: Backport(http://example.com/example)
31+
32+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
33+---
34+ strlen.c | 1 +
35+ 1 file changed, 1 insertion(+)
36+
37+diff --git a/strlen.c b/strlen.c
38+index 1788f38..83d7918 100644
39+--- a/strlen.c
40++++ b/strlen.c
41+
42+int main() {
43+
44+ printf("%d\n", str_len(string1));
45+ printf("%d\n", str_len(string2));
46+ printf("CVE FIXED!!!\n");
47+
48+ return 0;
49+}
50diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51index 2dc352d479e..d937759f157 100644
52--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
54@@ -3,7 +3,9 @@ SECTION = "examples"
55 LICENSE = "MIT"
56 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
57
58-SRC_URI = "file://helloworld.c"
59+SRC_URI = "file://helloworld.c \
60+ file://0001-Fix-CVE-1234-56789.patch \
61+ "
62
63 S = "${WORKDIR}/sources"
64 UNPACKDIR = "${S}"
65--
662.45.1
67
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail
new file mode 100644
index 0000000000..6f4e61c0da
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail
@@ -0,0 +1,62 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
7---
8 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
9 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
10 2 files changed, 29 insertions(+), 1 deletion(-)
11 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
12
13diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14new file mode 100644
15index 00000000000..8a4f9329303
16--- /dev/null
17+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18@@ -0,0 +1,26 @@
19+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
20+From: Trevor Gamblin <tgamblin@baylibre.com>
21+Date: Tue, 29 Aug 2023 14:08:20 -0400
22+Subject: [PATCH] Fix CVE-NOT-REAL
23+
24+CVE: CVE-1234-56789
25+Upstream-Status: Backport(http://example.com/example)
26+
27+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
28+---
29+ strlen.c | 1 +
30+ 1 file changed, 1 insertion(+)
31+
32+diff --git a/strlen.c b/strlen.c
33+index 1788f38..83d7918 100644
34+--- a/strlen.c
35++++ b/strlen.c
36+
37+int main() {
38+
39+ printf("%d\n", str_len(string1));
40+ printf("%d\n", str_len(string2));
41+ printf("CVE FIXED!!!\n");
42+
43+ return 0;
44+}
45diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
46index 2dc352d479e..d937759f157 100644
47--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
48+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49@@ -3,7 +3,9 @@ SECTION = "examples"
50 LICENSE = "MIT"
51 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
52
53-SRC_URI = "file://helloworld.c"
54+SRC_URI = "file://helloworld.c \
55+ file://0001-Fix-CVE-1234-56789.patch \
56+ "
57
58 S = "${WORKDIR}/sources"
59 UNPACKDIR = "${S}"
60--
612.45.1
62
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass
new file mode 100644
index 0000000000..3fbc23fd00
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_commit_message_presence test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.fail
new file mode 100644
index 0000000000..9d54af9644
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.fail
@@ -0,0 +1,65 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_commit_message_user_tags test because of this
7string: @teststring
8
9Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
10---
11 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
12 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
13 2 files changed, 29 insertions(+), 1 deletion(-)
14 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
15
16diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
17new file mode 100644
18index 00000000000..8a4f9329303
19--- /dev/null
20+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
21@@ -0,0 +1,26 @@
22+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
23+From: Trevor Gamblin <tgamblin@baylibre.com>
24+Date: Tue, 29 Aug 2023 14:08:20 -0400
25+Subject: [PATCH] Fix CVE-NOT-REAL
26+
27+CVE: CVE-1234-56789
28+Upstream-Status: Backport(http://example.com/example)
29+
30+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
31+---
32+ strlen.c | 1 +
33+ 1 file changed, 1 insertion(+)
34+
35+diff --git a/strlen.c b/strlen.c
36+index 1788f38..83d7918 100644
37+--- a/strlen.c
38++++ b/strlen.c
39+
40+int main() {
41+
42+ printf("%d\n", str_len(string1));
43+ printf("%d\n", str_len(string2));
44+ printf("CVE FIXED!!!\n");
45+
46+ return 0;
47+}
48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49index 2dc352d479e..d937759f157 100644
50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52@@ -3,7 +3,9 @@ SECTION = "examples"
53 LICENSE = "MIT"
54 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
55
56-SRC_URI = "file://helloworld.c"
57+SRC_URI = "file://helloworld.c \
58+ file://0001-Fix-CVE-1234-56789.patch \
59+ "
60
61 S = "${WORKDIR}/sources"
62 UNPACKDIR = "${S}"
63--
642.45.1
65
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.pass
new file mode 100644
index 0000000000..57f2fc8a8e
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_user_tags.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_commit_message_user_tags test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail
new file mode 100644
index 0000000000..0dda6802d1
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_mbox_format test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59%+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass
new file mode 100644
index 0000000000..f06ae11d04
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_mbox_format test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip
new file mode 100644
index 0000000000..072ccc28c0
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip
@@ -0,0 +1,35 @@
1From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Wed, 30 Aug 2023 12:15:00 -0400
4Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
5
6This file should skip the test_series_merge_on_head test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++-
11 1 file changed, 2 insertions(+), 1 deletion(-)
12 rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%)
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
15similarity index 88%
16rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
18index 547587bef4..acc388ec2c 100644
19--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
20+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
21@@ -1,3 +1,4 @@
22+SUMMARY = "Hello!"
23 DESCRIPTION = "Simple helloworld application -- selftest variant"
24 SECTION = "examples"
25 LICENSE = "MIT"
26@@ -16,4 +17,4 @@ do_install() {
27 install -m 0755 helloworld ${D}${bindir}
28 }
29
30-BBCLASSEXTEND = "native nativesdk"
31\ No newline at end of file
32+BBCLASSEXTEND = "native nativesdk"
33--
342.41.0
35
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip
new file mode 100644
index 0000000000..49bd1f8ede
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip
@@ -0,0 +1,41 @@
1From 55208224f492af0ad929555ffc9b95ff1d301c5f Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Thu, 17 Aug 2023 15:02:38 -0400
4Subject: [PATCH] python3-dtc: upgrade 1.6.1 -> 1.7.0
5
6Changelog: https://kernel.googlesource.com/pub/scm/utils/dtc/dtc/+log/039a99414e778332d8f9c04cbd3072e1dcc62798
7
8Remove custom PV from the recipe since the relevant functionality is in
91.7.0:
10
11[tgamblin@megalith dtc]$ git tag --contains c001fc01a43e7a06447c06ea3d50bd60641322b8
12v1.7.0
13
14Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
15Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
16---
17 .../python/{python3-dtc_1.6.1.bb => python3-dtc_1.7.0.bb} | 3 +--
18 1 file changed, 1 insertion(+), 2 deletions(-)
19 rename meta/recipes-devtools/python/{python3-dtc_1.6.1.bb => python3-dtc_1.7.0.bb} (92%)
20
21diff --git a/meta/recipes-devtools/python/python3-dtc_1.6.1.bb b/meta/recipes-devtools/python/python3-dtc_1.7.0.bb
22similarity index 92%
23rename from meta/recipes-devtools/python/python3-dtc_1.6.1.bb
24rename to meta/recipes-devtools/python/python3-dtc_1.7.0.bb
25index 95ab0be474..85e48d4694 100644
26--- a/meta/recipes-devtools/python/python3-dtc_1.6.1.bb
27+++ b/meta/recipes-devtools/python/python3-dtc_1.7.0.bb
28@@ -14,9 +14,8 @@ UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
29
30 LIC_FILES_CHKSUM = "file://pylibfdt/libfdt.i;beginline=1;endline=6;md5=afda088c974174a29108c8d80b5dce90"
31
32-SRCREV = "c001fc01a43e7a06447c06ea3d50bd60641322b8"
33+SRCREV = "039a99414e778332d8f9c04cbd3072e1dcc62798"
34
35-PV = "1.6.1+git"
36 S = "${WORKDIR}/git"
37
38 PYPA_WHEEL = "${S}/dist/libfdt-1.6.2*.whl"
39--
402.41.0
41
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail
new file mode 100644
index 0000000000..c5e4df2549
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello% fix CVE-1234-56789
5
6This should fail the test_shortlog_format test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass
new file mode 100644
index 0000000000..4948e26afc
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_shortlog_format test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail
new file mode 100644
index 0000000000..4ed1242821
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 this is a very long commit shortlog with way too many words included in it to pass the test
5
6This should fail the test_shortlong_length test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass
new file mode 100644
index 0000000000..ef5066a650
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_shortlog_length test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail
new file mode 100644
index 0000000000..4ede7271ee
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail
@@ -0,0 +1,65 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_signed_off_by_presence test.
7
8CVE: CVE-1234-56789
9
10---
11 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
12 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
13 2 files changed, 29 insertions(+), 1 deletion(-)
14 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
15
16diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
17new file mode 100644
18index 00000000000..8a4f9329303
19--- /dev/null
20+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
21@@ -0,0 +1,26 @@
22+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
23+From: Trevor Gamblin <tgamblin@baylibre.com>
24+Date: Tue, 29 Aug 2023 14:08:20 -0400
25+Subject: [PATCH] Fix CVE-NOT-REAL
26+
27+CVE: CVE-1234-56789
28+Upstream-Status: Backport(http://example.com/example)
29+
30+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
31+---
32+ strlen.c | 1 +
33+ 1 file changed, 1 insertion(+)
34+
35+diff --git a/strlen.c b/strlen.c
36+index 1788f38..83d7918 100644
37+--- a/strlen.c
38++++ b/strlen.c
39+
40+int main() {
41+
42+ printf("%d\n", str_len(string1));
43+ printf("%d\n", str_len(string2));
44+ printf("CVE FIXED!!!\n");
45+
46+ return 0;
47+}
48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49index 2dc352d479e..d937759f157 100644
50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52@@ -3,7 +3,9 @@ SECTION = "examples"
53 LICENSE = "MIT"
54 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
55
56-SRC_URI = "file://helloworld.c"
57+SRC_URI = "file://helloworld.c \
58+ file://0001-Fix-CVE-1234-56789.patch \
59+ "
60
61 S = "${WORKDIR}/sources"
62 UNPACKDIR = "${S}"
63--
642.45.1
65
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail
new file mode 100644
index 0000000000..f7c3f5145a
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_signed_off_by_presence test.
7
8CVE: CVE-1234-56789
9
10Approved-of-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass
new file mode 100644
index 0000000000..2661c1416f
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_signed_off_by_presence test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail
new file mode 100644
index 0000000000..dccafcd9bc
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail
@@ -0,0 +1,25 @@
1From 60450eefbc2c438a37c5e08759d021b18f0df0a3 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:18:17 -0400
4Subject: [PATCH] selftest-hello: add CVE_CHECK_IGNORE
5
6This should fail the test_cve_tag_format selftest.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 1 +
11 1 file changed, 1 insertion(+)
12
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
14index 2dc352d479e..cc103de6e2e 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17@@ -17,4 +17,5 @@ do_install() {
18 install -m 0755 helloworld ${D}${bindir}
19 }
20
21+CVE_CHECK_IGNORE = "CVE-2024-12345"
22 BBCLASSEXTEND = "native nativesdk"
23--
242.45.1
25
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass
new file mode 100644
index 0000000000..93a6cc91fb
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass
@@ -0,0 +1,25 @@
1From f91073242268d2b2c1a1a705e7fd585679f78a59 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:18:17 -0400
4Subject: [PATCH] selftest-hello: add CVE_STATUS
5
6This should pass the test_cve_tag_format selftest.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 1 +
11 1 file changed, 1 insertion(+)
12
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
14index 2dc352d479e..88c5c98608f 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17@@ -17,4 +17,5 @@ do_install() {
18 install -m 0755 helloworld ${D}${bindir}
19 }
20
21+CVE_STATUS[CVE-2024-12345] = "not-applicable-platform: Issue only applies on Windows"
22 BBCLASSEXTEND = "native nativesdk"
23--
242.45.1
25
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail
new file mode 100644
index 0000000000..61b3784e3c
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail
@@ -0,0 +1,28 @@
1From 974c3a143bc67faaff9abcc0a06a3d5e692fc660 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 11:51:15 -0400
4Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM
5
6This should fail the test_lic_files_chksum_modified_not_mentioned test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 +-
11 1 file changed, 1 insertion(+), 1 deletion(-)
12
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
14index 2dc352d479e..356921db1dd 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17@@ -1,7 +1,7 @@
18 DESCRIPTION = "Simple helloworld application -- selftest variant"
19 SECTION = "examples"
20 LICENSE = "MIT"
21-LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
22+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f303"
23
24 SRC_URI = "file://helloworld.c"
25
26--
272.45.1
28
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass
new file mode 100644
index 0000000000..b7be1e8e55
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass
@@ -0,0 +1,30 @@
1From 974c3a143bc67faaff9abcc0a06a3d5e692fc660 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 11:51:15 -0400
4Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM
5
6This should pass the test_lic_files_chksum_modified_not_mentioned test.
7
8License-Update: Stuff happened!
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 +-
13 1 file changed, 1 insertion(+), 1 deletion(-)
14
15diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
16index 2dc352d479e..356921db1dd 100644
17--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
19@@ -1,7 +1,7 @@
20 DESCRIPTION = "Simple helloworld application -- selftest variant"
21 SECTION = "examples"
22 LICENSE = "MIT"
23-LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f303"
25
26 SRC_URI = "file://helloworld.c"
27
28--
292.45.1
30
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail
new file mode 100644
index 0000000000..a7a0b0bacb
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail
@@ -0,0 +1,42 @@
1From 74bc209a4fbe4da2f57e153ccfff3d2241dada8d Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should fail the test_lic_files_chksum_presence test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 20 +++++++++++++++++++
11 1 file changed, 20 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..875bcbef859
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,20 @@
20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+
25+SRC_URI = "file://helloworld.c"
26+
27+S = "${WORKDIR}/sources"
28+UNPACKDIR = "${S}"
29+
30+do_compile() {
31+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
32+}
33+
34+do_install() {
35+ install -d ${D}${bindir}
36+ install -m 0755 helloworld ${D}${bindir}
37+}
38+
39+BBCLASSEXTEND = "native nativesdk"
40--
412.45.1
42
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass
new file mode 100644
index 0000000000..8ffa97ec56
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass
@@ -0,0 +1,43 @@
1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should pass the test_lic_files_chksum_presence test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..f3dec1b220c
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,21 @@
20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25+
26+SRC_URI = "file://helloworld.c"
27+
28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
40+BBCLASSEXTEND = "native nativesdk"
41--
422.45.1
43
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail
new file mode 100644
index 0000000000..0a402d0a3e
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail
@@ -0,0 +1,28 @@
1From f2f7b6bcb831289bc3ba2343ad7dc5bee6d6e0cd Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 08:45:41 -0400
4Subject: [PATCH] selftest-hello: remove helloworld.c
5
6This should fail the test_src_uri_left_files selftest.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 --
11 1 file changed, 2 deletions(-)
12
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
14index 2dc352d479e..e95270adaeb 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17@@ -3,8 +3,6 @@ SECTION = "examples"
18 LICENSE = "MIT"
19 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
20
21-SRC_URI = "file://helloworld.c"
22-
23 S = "${WORKDIR}/sources"
24 UNPACKDIR = "${S}"
25
26--
272.45.1
28
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass
new file mode 100644
index 0000000000..a675c028d0
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass
@@ -0,0 +1,44 @@
1From e79933e2fc68570066eca66f0b599d259b7a1731 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 08:18:48 -0400
4Subject: [PATCH] selftest-hello: remove helloworld.c
5
6This should pass the test_src_uri_left_files selftest.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../recipes-test/selftest-hello/files/helloworld.c | 8 --------
11 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 --
12 2 files changed, 10 deletions(-)
13 delete mode 100644 meta-selftest/recipes-test/selftest-hello/files/helloworld.c
14
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
16deleted file mode 100644
17index fc7169b7b83..00000000000
18--- a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
19+++ /dev/null
20@@ -1,8 +0,0 @@
21-#include <stdio.h>
22-
23-int main(void)
24-{
25- printf("Hello world!\n");
26-
27- return 0;
28-}
29diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
30index 2dc352d479e..e95270adaeb 100644
31--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
32+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
33@@ -3,8 +3,6 @@ SECTION = "examples"
34 LICENSE = "MIT"
35 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
36
37-SRC_URI = "file://helloworld.c"
38-
39 S = "${WORKDIR}/sources"
40 UNPACKDIR = "${S}"
41
42--
432.45.1
44
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail
new file mode 100644
index 0000000000..1087843619
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail
@@ -0,0 +1,42 @@
1From f4b72cc24f5e2a290a8637775c4d41c16d5d83aa Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should fail the test_summary_presence test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 20 +++++++++++++++++++
11 1 file changed, 20 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..2dc352d479e
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,20 @@
20+DESCRIPTION = "Simple helloworld application -- selftest variant"
21+SECTION = "examples"
22+LICENSE = "MIT"
23+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
24+
25+SRC_URI = "file://helloworld.c"
26+
27+S = "${WORKDIR}/sources"
28+UNPACKDIR = "${S}"
29+
30+do_compile() {
31+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
32+}
33+
34+do_install() {
35+ install -d ${D}${bindir}
36+ install -m 0755 helloworld ${D}${bindir}
37+}
38+
39+BBCLASSEXTEND = "native nativesdk"
40--
412.45.1
42
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass
new file mode 100644
index 0000000000..3d35a8d8fb
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass
@@ -0,0 +1,43 @@
1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5
6This should pass the test_summary_presence test.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15new file mode 100644
16index 00000000000..f3dec1b220c
17--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19@@ -0,0 +1,21 @@
20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25+
26+SRC_URI = "file://helloworld.c"
27+
28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
40+BBCLASSEXTEND = "native nativesdk"
41--
422.45.1
43
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail
new file mode 100644
index 0000000000..f64f2a40b0
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_cve_tag_format test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-BAD_FORMAT
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass
new file mode 100644
index 0000000000..3819487041
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_cve_tag format test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail
new file mode 100644
index 0000000000..b2d0fab9e3
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail
@@ -0,0 +1,65 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_signed_off_by_presence test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 25 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 28 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,25 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+---
32+ strlen.c | 1 +
33+ 1 file changed, 1 insertion(+)
34+
35+diff --git a/strlen.c b/strlen.c
36+index 1788f38..83d7918 100644
37+--- a/strlen.c
38++++ b/strlen.c
39+
40+int main() {
41+
42+ printf("%d\n", str_len(string1));
43+ printf("%d\n", str_len(string2));
44+ printf("CVE FIXED!!!\n");
45+
46+ return 0;
47+}
48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49index 2dc352d479e..d937759f157 100644
50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52@@ -3,7 +3,9 @@ SECTION = "examples"
53 LICENSE = "MIT"
54 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
55
56-SRC_URI = "file://helloworld.c"
57+SRC_URI = "file://helloworld.c \
58+ file://0001-Fix-CVE-1234-56789.patch \
59+ "
60
61 S = "${WORKDIR}/sources"
62 UNPACKDIR = "${S}"
63--
642.45.1
65
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass
new file mode 100644
index 0000000000..2661c1416f
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_signed_off_by_presence test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/selftest b/meta/lib/patchtest/selftest/selftest
new file mode 100755
index 0000000000..3cf1c361f7
--- /dev/null
+++ b/meta/lib/patchtest/selftest/selftest
@@ -0,0 +1,94 @@
1#!/usr/bin/env python3
2
3# Test every patch from files folder and output error on failure
4#
5# Copyright (C) 2016 Intel Corporation
6#
7# SPDX-License-Identifier: GPL-2.0-only
8
9import os
10import subprocess
11import sys
12
13currentdir = os.path.dirname(os.path.abspath(__file__))
14patchesdir = os.path.join(currentdir, 'files')
15topdir = os.path.dirname(currentdir)
16parentdir = os.path.dirname(topdir)
17
18# path to the repo root
19repodir = os.path.dirname(os.path.dirname(parentdir))
20
21def print_results(passcount, failcount, skipcount, xpasscount, xfailcount, xskipcount, errorcount):
22 total = passcount + skipcount + failcount + xpasscount + xfailcount + xskipcount + errorcount
23 print("============================================================================")
24 print("Testsuite summary for %s" % os.path.basename(topdir))
25 print("============================================================================")
26 print("# TOTAL: %s" % str(total))
27 print("# XPASS: %s" % str(xpasscount))
28 print("# XFAIL: %s" % str(xfailcount))
29 print("# XSKIP: %s" % str(xskipcount))
30 print("# PASS: %s" % str(passcount))
31 print("# FAIL: %s" % str(failcount))
32 print("# SKIP: %s" % str(skipcount))
33 print("# ERROR: %s" % str(errorcount))
34 print("============================================================================")
35
36# Once the tests are in oe-core, we can remove the testdir param and use os.path.dirname to get relative paths
37def test(root, patch):
38 res = True
39 patchpath = os.path.abspath(os.path.join(root, patch))
40
41 cmd = 'patchtest --base-commit HEAD --repodir %s --testdir %s/tests --patch %s' % (repodir, topdir, patchpath)
42 results = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True, shell=True)
43
44 return results
45
46if __name__ == '__main__':
47 passcount = 0
48 failcount = 0
49 skipcount = 0
50 xpasscount = 0
51 xfailcount = 0
52 xskipcount = 0
53 errorcount = 0
54
55 results = None
56
57 for root, dirs, patches in os.walk(patchesdir):
58 for patch in patches:
59 results = test(root, patch)
60
61 a = patch.split('.')
62 klass, testname = a[0], a[1]
63 expected_result = a[-1]
64 testid = ".%s.%s" % (klass,testname)
65
66 for resultline in results.splitlines():
67 if testid in resultline:
68 result, _ = resultline.split(':', 1)
69
70 if expected_result.upper() == "FAIL" and result.upper() == "FAIL":
71 xfailcount = xfailcount + 1
72 print("XFAIL: %s (file: %s)" % (testid.strip("."), os.path.basename(patch)))
73 elif expected_result.upper() == "PASS" and result.upper() == "PASS":
74 xpasscount = xpasscount + 1
75 print("XPASS: %s (file: %s)" % (testid.strip("."), os.path.basename(patch)))
76 elif expected_result.upper() == "SKIP" and result.upper() == "SKIP":
77 xskipcount = xskipcount + 1
78 print("XSKIP: %s (file: %s)" % (testid.strip("."), os.path.basename(patch)))
79 else:
80 print("%s: %s (%s)" % (result.upper(), testid.strip("."), os.path.basename(patch)))
81 if result.upper() == "PASS":
82 passcount = passcount + 1
83 elif result.upper() == "FAIL":
84 failcount = failcount + 1
85 elif result.upper() == "SKIP":
86 skipcount = skipcount + 1
87 else:
88 print("Bad result on test %s against %s" % (testid.strip("."), os.path.basename(patch)))
89 errorcount = errorcount + 1
90 break
91 else:
92 print ("No test for=%s" % patch)
93
94 print_results(passcount, failcount, skipcount, xpasscount, xfailcount, xskipcount, errorcount)
diff --git a/meta/lib/patchtest/tests/__init__.py b/meta/lib/patchtest/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/meta/lib/patchtest/tests/__init__.py
diff --git a/meta/lib/patchtest/tests/base.py b/meta/lib/patchtest/tests/base.py
new file mode 100644
index 0000000000..919ca136bb
--- /dev/null
+++ b/meta/lib/patchtest/tests/base.py
@@ -0,0 +1,252 @@
1# Base class to be used by all test cases defined in the suite
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6
7import unittest
8import logging
9import json
10import unidiff
11from patchtest_parser import PatchtestParser
12import mailbox
13import patchtest_patterns
14import collections
15import sys
16import os
17import re
18
19logger = logging.getLogger("patchtest")
20debug = logger.debug
21info = logger.info
22warn = logger.warn
23error = logger.error
24
25Commit = collections.namedtuple(
26 "Commit", ["author", "subject", "commit_message", "shortlog", "payload"]
27)
28
29Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload'])
30
31class PatchtestOEError(Exception):
32 """Exception for handling patchtest-oe errors"""
33 def __init__(self, message, exitcode=1):
34 super().__init__(message)
35 self.exitcode = exitcode
36
37class Base(unittest.TestCase):
38 # if unit test fails, fail message will throw at least the following JSON: {"id": <testid>}
39
40 @staticmethod
41 def msg_to_commit(msg):
42 payload = msg.get_payload()
43 return Commit(subject=msg['subject'].replace('\n', ' ').replace(' ', ' '),
44 author=msg.get('From'),
45 shortlog=Base.shortlog(msg['subject']),
46 commit_message=Base.commit_message(payload),
47 payload=payload)
48
49 @staticmethod
50 def commit_message(payload):
51 commit_message = payload.__str__()
52 match = patchtest_patterns.endcommit_messages_regex.search(payload)
53 if match:
54 commit_message = payload[:match.start()]
55 return commit_message
56
57 @staticmethod
58 def shortlog(shlog):
59 # remove possible prefix (between brackets) before colon
60 start = shlog.find(']', 0, shlog.find(':'))
61 # remove also newlines and spaces at both sides
62 return shlog[start + 1:].replace('\n', '').strip()
63
64 @classmethod
65 def setUpClass(cls):
66
67 # General objects: mailbox.mbox and patchset
68 cls.mbox = mailbox.mbox(PatchtestParser.repo.patch.path)
69
70 # Patch may be malformed, so try parsing it
71 cls.unidiff_parse_error = ''
72 cls.patchset = None
73 try:
74 cls.patchset = unidiff.PatchSet.from_filename(
75 PatchtestParser.repo.patch.path, encoding="UTF-8"
76 )
77 except unidiff.UnidiffParseError as upe:
78 cls.patchset = []
79 cls.unidiff_parse_error = str(upe)
80
81 # Easy to iterate list of commits
82 cls.commits = []
83 for msg in cls.mbox:
84 if msg['subject'] and msg.get_payload():
85 cls.commits.append(Base.msg_to_commit(msg))
86
87 cls.setUpClassLocal()
88
89 @classmethod
90 def tearDownClass(cls):
91 cls.tearDownClassLocal()
92
93 @classmethod
94 def setUpClassLocal(cls):
95 pass
96
97 @classmethod
98 def tearDownClassLocal(cls):
99 pass
100
101 def fail(self, issue, fix=None, commit=None, data=None):
102 """ Convert to a JSON string failure data"""
103 value = {'id': self.id(),
104 'issue': issue}
105
106 if fix:
107 value['fix'] = fix
108 if commit:
109 value['commit'] = {'subject': commit.subject,
110 'shortlog': commit.shortlog}
111
112 # extend return value with other useful info
113 if data:
114 value['data'] = data
115
116 return super(Base, self).fail(json.dumps(value))
117
118 def skip(self, issue, data=None):
119 """ Convert the skip string to JSON"""
120 value = {'id': self.id(),
121 'issue': issue}
122
123 # extend return value with other useful info
124 if data:
125 value['data'] = data
126
127 return super(Base, self).skipTest(json.dumps(value))
128
129 def shortid(self):
130 return self.id().split('.')[-1]
131
132 def __str__(self):
133 return json.dumps({'id': self.id()})
134
135class Metadata(Base):
136 @classmethod
137 def setUpClassLocal(cls):
138 cls.tinfoil = cls.setup_tinfoil()
139
140 # get info about added/modified/remove recipes
141 cls.added, cls.modified, cls.removed = cls.get_metadata_stats(cls.patchset)
142
143 @classmethod
144 def tearDownClassLocal(cls):
145 cls.tinfoil.shutdown()
146
147 @classmethod
148 def setup_tinfoil(cls, config_only=False):
149 """Initialize tinfoil api from bitbake"""
150
151 # import relevant libraries
152 try:
153 scripts_path = os.path.join(PatchtestParser.repodir, "scripts", "lib")
154 if scripts_path not in sys.path:
155 sys.path.insert(0, scripts_path)
156 import scriptpath
157 scriptpath.add_bitbake_lib_path()
158 import bb.tinfoil
159 except ImportError:
160 raise PatchtestOEError('Could not import tinfoil module')
161
162 orig_cwd = os.path.abspath(os.curdir)
163
164 # Load tinfoil
165 tinfoil = None
166 try:
167 builddir = os.environ.get('BUILDDIR')
168 if not builddir:
169 logger.warn('Bitbake environment not loaded?')
170 return tinfoil
171 os.chdir(builddir)
172 tinfoil = bb.tinfoil.Tinfoil()
173 tinfoil.prepare(config_only=config_only)
174 except bb.tinfoil.TinfoilUIException as te:
175 if tinfoil:
176 tinfoil.shutdown()
177 raise PatchtestOEError('Could not prepare properly tinfoil (TinfoilUIException)')
178 except Exception as e:
179 if tinfoil:
180 tinfoil.shutdown()
181 raise e
182 finally:
183 os.chdir(orig_cwd)
184
185 return tinfoil
186
187 @classmethod
188 def get_metadata_stats(cls, patchset):
189 """Get lists of added, modified and removed metadata files"""
190
191 def find_pn(data, path):
192 """Find the PN from data"""
193 pn = None
194 pn_native = None
195 for _path, _pn in data:
196 if path in _path:
197 if 'native' in _pn:
198 # store the native PN but look for the non-native one first
199 pn_native = _pn
200 else:
201 pn = _pn
202 break
203 else:
204 # sent the native PN if found previously
205 if pn_native:
206 return pn_native
207
208 # on renames (usually upgrades), we need to check (FILE) base names
209 # because the unidiff library does not provided the new filename, just the modified one
210 # and tinfoil datastore, once the patch is merged, will contain the new filename
211 path_basename = path.split('_')[0]
212 for _path, _pn in data:
213 _path_basename = _path.split('_')[0]
214 if path_basename == _path_basename:
215 pn = _pn
216 return pn
217
218 if not cls.tinfoil:
219 cls.tinfoil = cls.setup_tinfoil()
220
221 added_paths, modified_paths, removed_paths = [], [], []
222 added, modified, removed = [], [], []
223
224 # get metadata filename additions, modification and removals
225 for patch in patchset:
226 if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'):
227 if patch.is_added_file:
228 added_paths.append(
229 os.path.join(
230 os.path.abspath(PatchtestParser.repodir), patch.path
231 )
232 )
233 elif patch.is_modified_file:
234 modified_paths.append(
235 os.path.join(
236 os.path.abspath(PatchtestParser.repodir), patch.path
237 )
238 )
239 elif patch.is_removed_file:
240 removed_paths.append(
241 os.path.join(
242 os.path.abspath(PatchtestParser.repodir), patch.path
243 )
244 )
245
246 data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items()
247
248 added = [find_pn(data,path) for path in added_paths]
249 modified = [find_pn(data,path) for path in modified_paths]
250 removed = [find_pn(data,path) for path in removed_paths]
251
252 return [a for a in added if a], [m for m in modified if m], [r for r in removed if r]
diff --git a/meta/lib/patchtest/tests/test_mbox.py b/meta/lib/patchtest/tests/test_mbox.py
new file mode 100644
index 0000000000..dab733ea77
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_mbox.py
@@ -0,0 +1,179 @@
1# Checks related to the patch's author
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6
7import base
8import collections
9import patchtest_patterns
10import pyparsing
11import re
12import subprocess
13from patchtest_parser import PatchtestParser
14
15def headlog():
16 output = subprocess.check_output(
17 "cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchtestParser.repodir,
18 universal_newlines=True,
19 shell=True
20 )
21 return output.split('#')
22
23class TestMbox(base.Base):
24
25 # base paths of main yocto project sub-projects
26 paths = {
27 'oe-core': ['meta-selftest', 'meta-skeleton', 'meta', 'scripts'],
28 'bitbake': ['bitbake'],
29 'documentation': ['documentation'],
30 'poky': ['meta-poky','meta-yocto-bsp'],
31 'oe': ['meta-gpe', 'meta-gnome', 'meta-efl', 'meta-networking', 'meta-multimedia','meta-initramfs', 'meta-ruby', 'contrib', 'meta-xfce', 'meta-filesystems', 'meta-perl', 'meta-webserver', 'meta-systemd', 'meta-oe', 'meta-python']
32 }
33
34 # scripts folder is a mix of oe-core and poky, most is oe-core code except:
35 poky_scripts = ['scripts/yocto-bsp', 'scripts/yocto-kernel', 'scripts/yocto-layer', 'scripts/lib/bsp']
36
37 Project = collections.namedtuple('Project', ['name', 'listemail', 'gitrepo', 'paths'])
38
39 bitbake = Project(name='Bitbake', listemail='bitbake-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/bitbake/', paths=paths['bitbake'])
40 doc = Project(name='Documentantion', listemail='yocto@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/', paths=paths['documentation'])
41 poky = Project(name='Poky', listemail='poky@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/poky/', paths=paths['poky'])
42 oe = Project(name='oe', listemail='openembedded-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/meta-openembedded/', paths=paths['oe'])
43
44
45 def test_signed_off_by_presence(self):
46 for commit in self.commits:
47 # skip those patches that revert older commits, these do not required the tag presence
48 if patchtest_patterns.mbox_revert_shortlog_regex.search_string(commit.shortlog):
49 continue
50 if not patchtest_patterns.signed_off_by.search_string(commit.payload):
51 self.fail(
52 'Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"',
53 commit=commit,
54 )
55
56 def test_shortlog_format(self):
57 for commit in self.commits:
58 shortlog = commit.shortlog
59 if not shortlog.strip():
60 self.skip('Empty shortlog, no reason to execute shortlog format test')
61 else:
62 # no reason to re-check on revert shortlogs
63 if shortlog.startswith('Revert "'):
64 continue
65 try:
66 patchtest_patterns.shortlog.parseString(shortlog)
67 except pyparsing.ParseException as pe:
68 self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"',
69 commit=commit)
70
71 def test_shortlog_length(self):
72 for commit in self.commits:
73 # no reason to re-check on revert shortlogs
74 shortlog = re.sub('^(\[.*?\])+ ', '', commit.shortlog)
75 if shortlog.startswith('Revert "'):
76 continue
77 l = len(shortlog)
78 if l > patchtest_patterns.mbox_shortlog_maxlength:
79 self.fail(
80 "Edit shortlog so that it is %d characters or less (currently %d characters)"
81 % (patchtest_patterns.mbox_shortlog_maxlength, l),
82 commit=commit,
83 )
84
85 def test_series_merge_on_head(self):
86 self.skip("Merge test is disabled for now")
87 if PatchtestParser.repo.patch.branch != "master":
88 self.skip(
89 "Skipping merge test since patch is not intended"
90 " for master branch. Target detected is %s"
91 % PatchtestParser.repo.patch.branch
92 )
93 if not PatchtestParser.repo.canbemerged:
94 commithash, author, date, shortlog = headlog()
95 self.fail(
96 "Series does not apply on top of target branch %s"
97 % PatchtestParser.repo.patch.branch,
98 data=[
99 (
100 "Targeted branch",
101 "%s (currently at %s)"
102 % (PatchtestParser.repo.patch.branch, commithash),
103 )
104 ],
105 )
106
107 def test_target_mailing_list(self):
108 """Check for other targeted projects"""
109
110 # a meta project may be indicted in the message subject, if this is the case, just fail
111 # TODO: there may be other project with no-meta prefix, we also need to detect these
112 project_regex = pyparsing.Regex("\[(?P<project>meta-.+)\]")
113 for commit in self.commits:
114 match = project_regex.search_string(commit.subject)
115 if match:
116 self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
117 commit=commit)
118
119 for patch in self.patchset:
120 folders = patch.path.split('/')
121 base_path = folders[0]
122 for project in [self.bitbake, self.doc, self.oe, self.poky]:
123 if base_path in project.paths:
124 self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
125 data=[('Suggested ML', '%s [%s]' % (project.listemail, project.gitrepo)),
126 ('Patch\'s path:', patch.path)])
127
128 # check for poky's scripts code
129 if base_path.startswith('scripts'):
130 for poky_file in self.poky_scripts:
131 if patch.path.startswith(poky_file):
132 self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
133 data=[('Suggested ML', '%s [%s]' % (self.poky.listemail, self.poky.gitrepo)),('Patch\'s path:', patch.path)])
134
135 def test_mbox_format(self):
136 if self.unidiff_parse_error:
137 self.fail('Series has malformed diff lines. Create the series again using git-format-patch and ensure it applies using git am',
138 data=[('Diff line',self.unidiff_parse_error)])
139
140 def test_commit_message_presence(self):
141 for commit in self.commits:
142 if not commit.commit_message.strip():
143 self.fail('Please include a commit message on your patch explaining the change', commit=commit)
144
145 # This may incorrectly report a failure if something such as a
146 # Python decorator is included in the commit message, but this
147 # scenario is much less common than the username case it is written
148 # to protect against
149 def test_commit_message_user_tags(self):
150 for commit in self.commits:
151 if patchtest_patterns.mbox_github_username.search_string(commit.commit_message):
152 self.fail('Mbox includes one or more GitHub-style username tags. Ensure that any "@" symbols are stripped out of usernames', commit=commit)
153
154 def test_bugzilla_entry_format(self):
155 for commit in self.commits:
156 if not patchtest_patterns.mbox_bugzilla.search_string(commit.commit_message):
157 self.skip("No bug ID found")
158 elif not patchtest_patterns.mbox_bugzilla_validation.search_string(
159 commit.commit_message
160 ):
161 self.fail(
162 'Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"',
163 commit=commit,
164 )
165
166 def test_author_valid(self):
167 for commit in self.commits:
168 for invalid in patchtest_patterns.invalid_submitters:
169 if invalid.search_string(commit.author):
170 self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit)
171
172 def test_non_auh_upgrade(self):
173 for commit in self.commits:
174 if patchtest_patterns.auh_email in commit.commit_message:
175 self.fail(
176 "Invalid author %s. Resend the series with a valid patch author"
177 % patchtest_patterns.auh_email,
178 commit=commit,
179 )
diff --git a/meta/lib/patchtest/tests/test_metadata.py b/meta/lib/patchtest/tests/test_metadata.py
new file mode 100644
index 0000000000..2dee80b002
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_metadata.py
@@ -0,0 +1,212 @@
1# Checks related to the patch's LIC_FILES_CHKSUM metadata variable
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6
7import base
8import collections
9import os
10import patchtest_patterns
11import pyparsing
12from patchtest_parser import PatchtestParser
13
14# Data store commonly used to share values between pre and post-merge tests
15PatchTestDataStore = collections.defaultdict(str)
16
17class TestMetadata(base.Metadata):
18
19 def test_license_presence(self):
20 if not self.added:
21 self.skip('No added recipes, skipping test')
22
23 # TODO: this is a workaround so we can parse the recipe not
24 # containing the LICENSE var: add some default license instead
25 # of INVALID into auto.conf, then remove this line at the end
26 auto_conf = os.path.join(os.environ.get('BUILDDIR'), 'conf', 'auto.conf')
27 open_flag = 'w'
28 if os.path.exists(auto_conf):
29 open_flag = 'a'
30 with open(auto_conf, open_flag) as fd:
31 for pn in self.added:
32 fd.write('LICENSE ??= "%s"\n' % patchtest_patterns.invalid_license)
33
34 no_license = False
35 for pn in self.added:
36 rd = self.tinfoil.parse_recipe(pn)
37 license = rd.getVar(patchtest_patterns.metadata_lic)
38 if license == patchtest_patterns.invalid_license:
39 no_license = True
40 break
41
42 # remove auto.conf line or the file itself
43 if open_flag == 'w':
44 os.remove(auto_conf)
45 else:
46 fd = open(auto_conf, 'r')
47 lines = fd.readlines()
48 fd.close()
49 with open(auto_conf, 'w') as fd:
50 fd.write(''.join(lines[:-1]))
51
52 if no_license:
53 self.fail('Recipe does not have the LICENSE field set.')
54
55 def test_lic_files_chksum_presence(self):
56 if not self.added:
57 self.skip('No added recipes, skipping test')
58
59 for pn in self.added:
60 rd = self.tinfoil.parse_recipe(pn)
61 pathname = rd.getVar('FILE')
62 # we are not interested in images
63 if '/images/' in pathname:
64 continue
65 lic_files_chksum = rd.getVar(patchtest_patterns.metadata_chksum)
66 if rd.getVar(patchtest_patterns.license_var) == patchtest_patterns.closed:
67 continue
68 if not lic_files_chksum:
69 self.fail(
70 "%s is missing in newly added recipe" % patchtest_patterns.metadata_chksum
71 )
72
73 def test_lic_files_chksum_modified_not_mentioned(self):
74 if not self.modified:
75 self.skip('No modified recipes, skipping test')
76
77 for patch in self.patchset:
78 # for the moment, we are just interested in metadata
79 if patch.path.endswith('.patch'):
80 continue
81 payload = str(patch)
82 if patchtest_patterns.lic_chksum_added.search_string(
83 payload
84 ) or patchtest_patterns.lic_chksum_removed.search_string(payload):
85 # if any patch on the series contain reference on the metadata, fail
86 for commit in self.commits:
87 if patchtest_patterns.lictag_re.search_string(commit.commit_message):
88 break
89 else:
90 self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message')
91
92 def test_max_line_length(self):
93 for patch in self.patchset:
94 # for the moment, we are just interested in metadata
95 if patch.path.endswith('.patch'):
96 continue
97 payload = str(patch)
98 for line in payload.splitlines():
99 if patchtest_patterns.add_mark.search_string(line):
100 current_line_length = len(line[1:])
101 if current_line_length > patchtest_patterns.patch_max_line_length:
102 self.fail(
103 "Patch line too long (current length %s, maximum is %s)"
104 % (current_line_length, patchtest_patterns.patch_max_line_length),
105 data=[
106 ("Patch", patch.path),
107 ("Line", "%s ..." % line[0:80]),
108 ],
109 )
110
111 def pretest_src_uri_left_files(self):
112 # these tests just make sense on patches that can be merged
113 if not PatchtestParser.repo.canbemerged:
114 self.skip("Patch cannot be merged")
115 if not self.modified:
116 self.skip('No modified recipes, skipping pretest')
117
118 # get the proper metadata values
119 for pn in self.modified:
120 # we are not interested in images
121 if 'core-image' in pn:
122 continue
123 rd = self.tinfoil.parse_recipe(pn)
124 PatchTestDataStore[
125 "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
126 ] = rd.getVar(patchtest_patterns.metadata_src_uri)
127
128 def test_src_uri_left_files(self):
129 # these tests just make sense on patches that can be merged
130 if not PatchtestParser.repo.canbemerged:
131 self.skip("Patch cannot be merged")
132 if not self.modified:
133 self.skip('No modified recipes, skipping pretest')
134
135 # get the proper metadata values
136 for pn in self.modified:
137 # we are not interested in images
138 if 'core-image' in pn:
139 continue
140 rd = self.tinfoil.parse_recipe(pn)
141 PatchTestDataStore[
142 "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
143 ] = rd.getVar(patchtest_patterns.metadata_src_uri)
144
145 for pn in self.modified:
146 pretest_src_uri = PatchTestDataStore[
147 "pre%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
148 ].split()
149 test_src_uri = PatchTestDataStore[
150 "%s-%s-%s" % (self.shortid(), patchtest_patterns.metadata_src_uri, pn)
151 ].split()
152
153 pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')])
154 test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')])
155
156 # check if files were removed
157 if len(test_files) < len(pretest_files):
158
159 # get removals from patchset
160 filesremoved_from_patchset = set()
161 for patch in self.patchset:
162 if patch.is_removed_file:
163 filesremoved_from_patchset.add(os.path.basename(patch.path))
164
165 # get the deleted files from the SRC_URI
166 filesremoved_from_usr_uri = pretest_files - test_files
167
168 # finally, get those patches removed at SRC_URI and not removed from the patchset
169 # TODO: we are not taking into account renames, so test may raise false positives
170 not_removed = filesremoved_from_usr_uri - filesremoved_from_patchset
171 if not_removed:
172 self.fail('Patches not removed from tree. Remove them and amend the submitted mbox',
173 data=[('Patch', f) for f in not_removed])
174
175 def test_summary_presence(self):
176 if not self.added:
177 self.skip('No added recipes, skipping test')
178
179 for pn in self.added:
180 # we are not interested in images
181 if 'core-image' in pn:
182 continue
183 rd = self.tinfoil.parse_recipe(pn)
184 summary = rd.getVar(patchtest_patterns.metadata_summary)
185
186 # "${PN} version ${PN}-${PR}" is the default, so fail if default
187 if summary.startswith("%s version" % pn):
188 self.fail(
189 "%s is missing in newly added recipe" % patchtest_patterns.metadata_summary
190 )
191
192 def test_cve_check_ignore(self):
193 # Skip if we neither modified a recipe or target branches are not
194 # Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield.
195 if (
196 not self.modified
197 or PatchtestParser.repo.patch.branch == "kirkstone"
198 or PatchtestParser.repo.patch.branch == "dunfell"
199 ):
200 self.skip("No modified recipes or older target branch, skipping test")
201 for pn in self.modified:
202 # we are not interested in images
203 if 'core-image' in pn:
204 continue
205 rd = self.tinfoil.parse_recipe(pn)
206 cve_check_ignore = rd.getVar(patchtest_patterns.cve_check_ignore_var)
207
208 if cve_check_ignore is not None:
209 self.fail(
210 "%s is deprecated and should be replaced by %s"
211 % (patchtest_patterns.cve_check_ignore_var, patchtest_patterns.cve_status_var)
212 )
diff --git a/meta/lib/patchtest/tests/test_patch.py b/meta/lib/patchtest/tests/test_patch.py
new file mode 100644
index 0000000000..d08b8a5019
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_patch.py
@@ -0,0 +1,131 @@
1# Checks related to the patch's CVE lines
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import base
9import os
10import patchtest_patterns
11import pyparsing
12
13class TestPatch(base.Base):
14
15 @classmethod
16 def setUpClassLocal(cls):
17 cls.newpatches = []
18 # get just those relevant patches: new software patches
19 for patch in cls.patchset:
20 if patch.path.endswith('.patch') and patch.is_added_file:
21 cls.newpatches.append(patch)
22
23 cls.mark = str(patchtest_patterns.signed_off_by_prefix).strip('"')
24
25 # match PatchSignedOffBy.mark with '+' preceding it
26 cls.prog = patchtest_patterns.patch_signed_off_by
27
28 def setUp(self):
29 if self.unidiff_parse_error:
30 self.skip('Parse error %s' % self.unidiff_parse_error)
31
32 self.valid_status = ", ".join(patchtest_patterns.upstream_status_nonliteral_valid_status)
33 self.standard_format = "Upstream-Status: <Valid status>"
34
35 # we are just interested in series that introduce CVE patches, thus discard other
36 # possibilities: modification to current CVEs, patch directly introduced into the
37 # recipe, upgrades already including the CVE, etc.
38 new_cves = [p for p in self.patchset if p.path.endswith('.patch') and p.is_added_file]
39 if not new_cves:
40 self.skip('No new CVE patches introduced')
41
42 def test_upstream_status_presence_format(self):
43 if not TestPatch.newpatches:
44 self.skip("There are no new software patches, no reason to test Upstream-Status presence/format")
45
46 for newpatch in TestPatch.newpatches:
47 payload = newpatch.__str__()
48 if not patchtest_patterns.upstream_status_regex.search_string(payload):
49 self.fail(
50 "Added patch file is missing Upstream-Status: <Valid status> in the commit message",
51 data=[
52 ("Standard format", self.standard_format),
53 ("Valid status", self.valid_status),
54 ],
55 )
56 for line in payload.splitlines():
57 if patchtest_patterns.patchmetadata_regex.match(line):
58 continue
59 if patchtest_patterns.upstream_status_regex.search_string(line):
60 if patchtest_patterns.inappropriate.searchString(line):
61 try:
62 patchtest_patterns.upstream_status_inappropriate_info.parseString(
63 line.lstrip("+")
64 )
65 except pyparsing.ParseException as pe:
66 self.fail(
67 "Upstream-Status is Inappropriate, but no reason was provided",
68 data=[
69 ("Current", pe.pstr),
70 (
71 "Standard format",
72 "Upstream-Status: Inappropriate [reason]",
73 ),
74 ],
75 )
76 elif patchtest_patterns.submitted.searchString(line):
77 try:
78 patchtest_patterns.upstream_status_submitted_info.parseString(
79 line.lstrip("+")
80 )
81 except pyparsing.ParseException as pe:
82 self.fail(
83 "Upstream-Status is Submitted, but it is not mentioned where",
84 data=[
85 ("Current", pe.pstr),
86 (
87 "Standard format",
88 "Upstream-Status: Submitted [where]",
89 ),
90 ],
91 )
92 else:
93 try:
94 patchtest_patterns.upstream_status.parseString(line.lstrip("+"))
95 except pyparsing.ParseException as pe:
96 self.fail(
97 "Upstream-Status is in incorrect format",
98 data=[
99 ("Current", pe.pstr),
100 ("Standard format", self.standard_format),
101 ("Valid status", self.valid_status),
102 ],
103 )
104
105 def test_signed_off_by_presence(self):
106 if not TestPatch.newpatches:
107 self.skip("There are no new software patches, no reason to test %s presence" % PatchSignedOffBy.mark)
108
109 for newpatch in TestPatch.newpatches:
110 payload = newpatch.__str__()
111 for line in payload.splitlines():
112 if patchtest_patterns.patchmetadata_regex.match(line):
113 continue
114 if TestPatch.prog.search_string(payload):
115 break
116 else:
117 self.fail('A patch file has been added without a Signed-off-by tag: \'%s\'' % os.path.basename(newpatch.path))
118
119 def test_cve_tag_format(self):
120 for commit in TestPatch.commits:
121 if patchtest_patterns.cve.search_string(
122 commit.shortlog
123 ) or patchtest_patterns.cve.search_string(commit.commit_message):
124 tag_found = False
125 for line in commit.payload.splitlines():
126 if patchtest_patterns.cve_payload_tag.search_string(line):
127 tag_found = True
128 break
129 if not tag_found:
130 self.fail('Missing or incorrectly formatted CVE tag in patch file. Correct or include the CVE tag in the patch with format: "CVE: CVE-YYYY-XXXX"',
131 commit=commit)
diff --git a/meta/lib/patchtest/tests/test_python_pylint.py b/meta/lib/patchtest/tests/test_python_pylint.py
new file mode 100644
index 0000000000..ec9129bc79
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_python_pylint.py
@@ -0,0 +1,65 @@
1# Checks related to the python code done with pylint
2#
3# Copyright (C) 2016 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6
7import base
8from io import StringIO
9from patchtest_parser import PatchtestParser
10from pylint.reporters.text import TextReporter
11import pylint.lint as lint
12
13
14class PyLint(base.Base):
15 pythonpatches = []
16 pylint_pretest = {}
17 pylint_test = {}
18 pylint_options = " -E --disable='E0611, E1101, F0401, E0602' --msg-template='L:{line} F:{module} I:{msg}'"
19
20 @classmethod
21 def setUpClassLocal(cls):
22 # get just those patches touching python files
23 cls.pythonpatches = []
24 for patch in cls.patchset:
25 if patch.path.endswith('.py'):
26 if not patch.is_removed_file:
27 cls.pythonpatches.append(patch)
28
29 def setUp(self):
30 if self.unidiff_parse_error:
31 self.skip('Python-unidiff parse error')
32 if not PyLint.pythonpatches:
33 self.skip('No python related patches, skipping test')
34
35 def pretest_pylint(self):
36 for pythonpatch in self.pythonpatches:
37 if pythonpatch.is_modified_file:
38 pylint_output = StringIO()
39 reporter = TextReporter(pylint_output)
40 lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False)
41 for line in pylint_output.readlines():
42 if not '*' in line:
43 if line.strip():
44 self.pylint_pretest[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1]
45
46 def test_pylint(self):
47 for pythonpatch in self.pythonpatches:
48 # a condition checking whether a file is renamed or not
49 # unidiff doesn't support this yet
50 if pythonpatch.target_file is not pythonpatch.path:
51 path = pythonpatch.target_file[2:]
52 else:
53 path = pythonpatch.path
54 pylint_output = StringIO()
55 reporter = TextReporter(pylint_output)
56 lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False)
57 for line in pylint_output.readlines():
58 if not '*' in line:
59 if line.strip():
60 self.pylint_test[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1]
61
62 for issue in self.pylint_test:
63 if self.pylint_test[issue] not in self.pylint_pretest.values():
64 self.fail('Errors in your Python code were encountered. Please check your code with a linter and resubmit',
65 data=[('Output', 'Please, fix the listed issues:'), ('', issue + ' ' + self.pylint_test[issue])])