summaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/oeqa/runtime/cases/dldt_inference_engine.py109
-rw-r--r--lib/oeqa/runtime/cases/dldt_model_optimizer.py38
-rw-r--r--lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py135
-rw-r--r--lib/oeqa/runtime/miutils/dldtutils.py3
-rw-r--r--lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py56
-rw-r--r--lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py23
-rw-r--r--lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py25
-rw-r--r--lib/oeqa/selftest/cases/secureboot.py176
8 files changed, 0 insertions, 565 deletions
diff --git a/lib/oeqa/runtime/cases/dldt_inference_engine.py b/lib/oeqa/runtime/cases/dldt_inference_engine.py
deleted file mode 100644
index fb35d52f..00000000
--- a/lib/oeqa/runtime/cases/dldt_inference_engine.py
+++ /dev/null
@@ -1,109 +0,0 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget
5from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest
6from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest
7from oeqa.runtime.miutils.tests.dldt_inference_engine_test import DldtInferenceEngineTest
8from oeqa.runtime.miutils.dldtutils import get_testdata_config
9
10class DldtInferenceEngine(OERuntimeTestCase):
11
12 @classmethod
13 def setUpClass(cls):
14 cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/ie/md')
15 cls.sqn_download.setup()
16 cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/ie/ir')
17 cls.dldt_mo.setup()
18 cls.dldt_ie = DldtInferenceEngineTest(OEQATarget(cls.tc.target), '/tmp/ie/inputs')
19 cls.dldt_ie.setup()
20 cls.ir_files_dir = cls.dldt_mo.work_dir
21
22 @classmethod
23 def tearDownClass(cls):
24 cls.dldt_ie.tear_down()
25 cls.dldt_mo.tear_down()
26 cls.sqn_download.tear_down()
27
28 @OEHasPackage(['dldt-model-optimizer'])
29 @OEHasPackage(['wget'])
30 def test_dldt_ie_can_create_ir_and_download_input(self):
31 proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY')
32 if not proxy_port:
33 self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").')
34 (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port)
35 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
36 (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port)
37 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
38
39 mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR')
40 if not mo_exe_dir:
41 self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").')
42 mo_files_dir = self.sqn_download.work_dir
43 (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir)
44 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
45
46 (status, output) = self.dldt_ie.test_can_download_input_file(proxy_port)
47 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
48
49 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
50 @OEHasPackage(['dldt-inference-engine'])
51 @OEHasPackage(['dldt-inference-engine-samples'])
52 def test_dldt_ie_classification_with_cpu(self):
53 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('CPU', self.ir_files_dir)
54 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
55
56 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
57 @OEHasPackage(['dldt-inference-engine'])
58 @OEHasPackage(['dldt-inference-engine-samples'])
59 @OEHasPackage(['intel-compute-runtime'])
60 @OEHasPackage(['ocl-icd'])
61 def test_dldt_ie_classification_with_gpu(self):
62 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('GPU', self.ir_files_dir)
63 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
64
65 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
66 @OEHasPackage(['dldt-inference-engine'])
67 @OEHasPackage(['dldt-inference-engine-samples'])
68 @OEHasPackage(['dldt-inference-engine-vpu-firmware'])
69 def test_dldt_ie_classification_with_myriad(self):
70 device = 'MYRIAD'
71 (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device)
72 if not status:
73 self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output))
74 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device(device, self.ir_files_dir)
75 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
76
77 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
78 @OEHasPackage(['dldt-inference-engine'])
79 @OEHasPackage(['dldt-inference-engine-python3'])
80 @OEHasPackage(['python3-opencv'])
81 @OEHasPackage(['python3-numpy'])
82 def test_dldt_ie_classification_python_api_with_cpu(self):
83 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('CPU', self.ir_files_dir)
84 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
85
86 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
87 @OEHasPackage(['dldt-inference-engine'])
88 @OEHasPackage(['dldt-inference-engine-python3'])
89 @OEHasPackage(['intel-compute-runtime'])
90 @OEHasPackage(['ocl-icd'])
91 @OEHasPackage(['python3-opencv'])
92 @OEHasPackage(['python3-numpy'])
93 def test_dldt_ie_classification_python_api_with_gpu(self):
94 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('GPU', self.ir_files_dir)
95 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
96
97 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
98 @OEHasPackage(['dldt-inference-engine'])
99 @OEHasPackage(['dldt-inference-engine-python3'])
100 @OEHasPackage(['dldt-inference-engine-vpu-firmware'])
101 @OEHasPackage(['python3-opencv'])
102 @OEHasPackage(['python3-numpy'])
103 def test_dldt_ie_classification_python_api_with_myriad(self):
104 device = 'MYRIAD'
105 (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device)
106 if not status:
107 self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output))
108 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device(device, self.ir_files_dir)
109 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/dldt_model_optimizer.py b/lib/oeqa/runtime/cases/dldt_model_optimizer.py
deleted file mode 100644
index 736ea661..00000000
--- a/lib/oeqa/runtime/cases/dldt_model_optimizer.py
+++ /dev/null
@@ -1,38 +0,0 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget
4from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest
5from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest
6from oeqa.runtime.miutils.dldtutils import get_testdata_config
7
8class DldtModelOptimizer(OERuntimeTestCase):
9
10 @classmethod
11 def setUpClass(cls):
12 cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/mo/md')
13 cls.sqn_download.setup()
14 cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/mo/ir')
15 cls.dldt_mo.setup()
16
17 @classmethod
18 def tearDownClass(cls):
19 cls.dldt_mo.tear_down()
20 cls.sqn_download.tear_down()
21
22 @OEHasPackage(['dldt-model-optimizer'])
23 @OEHasPackage(['wget'])
24 def test_dldt_mo_can_create_ir(self):
25 proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY')
26 if not proxy_port:
27 self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").')
28 (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port)
29 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
30 (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port)
31 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
32
33 mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR')
34 if not mo_exe_dir:
35 self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").')
36 mo_files_dir = self.sqn_download.work_dir
37 (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir)
38 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py b/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py
deleted file mode 100644
index 1906e9fe..00000000
--- a/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py
+++ /dev/null
@@ -1,135 +0,0 @@
1#!/usr/bin/env python3
2"""
3 Copyright (C) 2018-2019 Intel Corporation
4
5 Licensed under the Apache License, Version 2.0 (the "License");
6 you may not use this file except in compliance with the License.
7 You may obtain a copy of the License at
8
9 http://www.apache.org/licenses/LICENSE-2.0
10
11 Unless required by applicable law or agreed to in writing, software
12 distributed under the License is distributed on an "AS IS" BASIS,
13 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 See the License for the specific language governing permissions and
15 limitations under the License.
16"""
17from __future__ import print_function
18import sys
19import os
20from argparse import ArgumentParser, SUPPRESS
21import cv2
22import numpy as np
23import logging as log
24from time import time
25from openvino.inference_engine import IENetwork, IECore
26
27
28def build_argparser():
29 parser = ArgumentParser(add_help=False)
30 args = parser.add_argument_group('Options')
31 args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Show this help message and exit.')
32 args.add_argument("-m", "--model", help="Required. Path to an .xml file with a trained model.", required=True,
33 type=str)
34 args.add_argument("-i", "--input", help="Required. Path to a folder with images or path to an image files",
35 required=True,
36 type=str, nargs="+")
37 args.add_argument("-l", "--cpu_extension",
38 help="Optional. Required for CPU custom layers. "
39 "MKLDNN (CPU)-targeted custom layers. Absolute path to a shared library with the"
40 " kernels implementations.", type=str, default=None)
41 args.add_argument("-d", "--device",
42 help="Optional. Specify the target device to infer on; CPU, GPU, FPGA, HDDL, MYRIAD or HETERO: is "
43 "acceptable. The sample will look for a suitable plugin for device specified. Default "
44 "value is CPU",
45 default="CPU", type=str)
46 args.add_argument("--labels", help="Optional. Path to a labels mapping file", default=None, type=str)
47 args.add_argument("-nt", "--number_top", help="Optional. Number of top results", default=10, type=int)
48
49 return parser
50
51
52def main():
53 log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
54 args = build_argparser().parse_args()
55 model_xml = args.model
56 model_bin = os.path.splitext(model_xml)[0] + ".bin"
57
58 # Plugin initialization for specified device and load extensions library if specified
59 log.info("Creating Inference Engine")
60 ie = IECore()
61 if args.cpu_extension and 'CPU' in args.device:
62 ie.add_extension(args.cpu_extension, "CPU")
63 # Read IR
64 log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin))
65 net = IENetwork(model=model_xml, weights=model_bin)
66
67 if "CPU" in args.device:
68 supported_layers = ie.query_network(net, "CPU")
69 not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers]
70 if len(not_supported_layers) != 0:
71 log.error("Following layers are not supported by the plugin for specified device {}:\n {}".
72 format(args.device, ', '.join(not_supported_layers)))
73 log.error("Please try to specify cpu extensions library path in sample's command line parameters using -l "
74 "or --cpu_extension command line argument")
75 sys.exit(1)
76
77 assert len(net.inputs.keys()) == 1, "Sample supports only single input topologies"
78 assert len(net.outputs) == 1, "Sample supports only single output topologies"
79
80 log.info("Preparing input blobs")
81 input_blob = next(iter(net.inputs))
82 out_blob = next(iter(net.outputs))
83 net.batch_size = len(args.input)
84
85 # Read and pre-process input images
86 n, c, h, w = net.inputs[input_blob].shape
87 images = np.ndarray(shape=(n, c, h, w))
88 for i in range(n):
89 image = cv2.imread(args.input[i])
90 if image.shape[:-1] != (h, w):
91 log.warning("Image {} is resized from {} to {}".format(args.input[i], image.shape[:-1], (h, w)))
92 image = cv2.resize(image, (w, h))
93 image = image.transpose((2, 0, 1)) # Change data layout from HWC to CHW
94 images[i] = image
95 log.info("Batch size is {}".format(n))
96
97 # Loading model to the plugin
98 log.info("Loading model to the plugin")
99 exec_net = ie.load_network(network=net, device_name=args.device)
100
101 # Start sync inference
102 log.info("Starting inference in synchronous mode")
103 res = exec_net.infer(inputs={input_blob: images})
104
105 # Processing output blob
106 log.info("Processing output blob")
107 res = res[out_blob]
108 log.info("Top {} results: ".format(args.number_top))
109 if args.labels:
110 with open(args.labels, 'r') as f:
111 labels_map = [x.split(sep=' ', maxsplit=1)[-1].strip() for x in f]
112 else:
113 labels_map = None
114 classid_str = "classid"
115 probability_str = "probability"
116 for i, probs in enumerate(res):
117 probs = np.squeeze(probs)
118 top_ind = np.argsort(probs)[-args.number_top:][::-1]
119 print("Image {}\n".format(args.input[i]))
120 print(classid_str, probability_str)
121 print("{} {}".format('-' * len(classid_str), '-' * len(probability_str)))
122 for id in top_ind:
123 det_label = labels_map[id] if labels_map else "{}".format(id)
124 label_length = len(det_label)
125 space_num_before = (len(classid_str) - label_length) // 2
126 space_num_after = len(classid_str) - (space_num_before + label_length) + 2
127 space_num_before_prob = (len(probability_str) - len(str(probs[id]))) // 2
128 print("{}{}{}{}{:.7f}".format(' ' * space_num_before, det_label,
129 ' ' * space_num_after, ' ' * space_num_before_prob,
130 probs[id]))
131 print("\n")
132 log.info("This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n")
133
134if __name__ == '__main__':
135 sys.exit(main() or 0)
diff --git a/lib/oeqa/runtime/miutils/dldtutils.py b/lib/oeqa/runtime/miutils/dldtutils.py
deleted file mode 100644
index 45bf2e12..00000000
--- a/lib/oeqa/runtime/miutils/dldtutils.py
+++ /dev/null
@@ -1,3 +0,0 @@
1
2def get_testdata_config(testdata, config):
3 return testdata.get(config)
diff --git a/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py b/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py
deleted file mode 100644
index 31bfb539..00000000
--- a/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py
+++ /dev/null
@@ -1,56 +0,0 @@
1import os
2script_path = os.path.dirname(os.path.realpath(__file__))
3files_path = os.path.join(script_path, '../../files/')
4
5class DldtInferenceEngineTest(object):
6 ie_input_files = {'ie_python_sample': 'classification_sample.py',
7 'input': 'chicky_512.png',
8 'input_download': 'https://raw.githubusercontent.com/opencv/opencv/master/samples/data/chicky_512.png',
9 'model': 'squeezenet_v1.1.xml'}
10
11 def __init__(self, target, work_dir):
12 self.target = target
13 self.work_dir = work_dir
14
15 def setup(self):
16 self.target.run('mkdir -p %s' % self.work_dir)
17 self.target.copy_to(os.path.join(files_path, 'dldt-inference-engine', self.ie_input_files['ie_python_sample']),
18 self.work_dir)
19 python_cmd = 'from openvino.inference_engine import IENetwork, IECore; ie = IECore(); print(ie.available_devices)'
20 __, output = self.target.run('python3 -c "%s"' % python_cmd)
21 self.available_devices = output
22
23 def tear_down(self):
24 self.target.run('rm -rf %s' % self.work_dir)
25
26 def test_check_if_openvino_device_available(self, device):
27 if device not in self.available_devices:
28 return False, self.available_devices
29 return True, self.available_devices
30
31 def test_can_download_input_file(self, proxy_port):
32 return self.target.run('cd %s; wget %s -e https_proxy=%s' %
33 (self.work_dir,
34 self.ie_input_files['input_download'],
35 proxy_port))
36
37 def test_dldt_ie_classification_with_device(self, device, ir_files_dir):
38 return self.target.run('classification_sample_async -d %s -i %s -m %s' %
39 (device,
40 os.path.join(self.work_dir, self.ie_input_files['input']),
41 os.path.join(ir_files_dir, self.ie_input_files['model'])))
42
43 def test_dldt_ie_classification_python_api_with_device(self, device, ir_files_dir, extension=''):
44 if extension:
45 return self.target.run('python3 %s -d %s -i %s -m %s -l %s' %
46 (os.path.join(self.work_dir, self.ie_input_files['ie_python_sample']),
47 device,
48 os.path.join(self.work_dir, self.ie_input_files['input']),
49 os.path.join(ir_files_dir, self.ie_input_files['model']),
50 extension))
51 else:
52 return self.target.run('python3 %s -d %s -i %s -m %s' %
53 (os.path.join(self.work_dir, self.ie_input_files['ie_python_sample']),
54 device,
55 os.path.join(self.work_dir, self.ie_input_files['input']),
56 os.path.join(ir_files_dir, self.ie_input_files['model'])))
diff --git a/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py b/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py
deleted file mode 100644
index 7d3db15b..00000000
--- a/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py
+++ /dev/null
@@ -1,23 +0,0 @@
1import os
2
3class DldtModelOptimizerTest(object):
4 mo_input_files = {'model': 'squeezenet_v1.1.caffemodel',
5 'prototxt': 'deploy.prototxt'}
6 mo_exe = 'mo.py'
7
8 def __init__(self, target, work_dir):
9 self.target = target
10 self.work_dir = work_dir
11
12 def setup(self):
13 self.target.run('mkdir -p %s' % self.work_dir)
14
15 def tear_down(self):
16 self.target.run('rm -rf %s' % self.work_dir)
17
18 def test_dldt_mo_can_create_ir(self, mo_exe_dir, mo_files_dir):
19 return self.target.run('python3 %s --input_model %s --input_proto %s --output_dir %s --data_type FP16' %
20 (os.path.join(mo_exe_dir, self.mo_exe),
21 os.path.join(mo_files_dir, self.mo_input_files['model']),
22 os.path.join(mo_files_dir, self.mo_input_files['prototxt']),
23 self.work_dir))
diff --git a/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py b/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py
deleted file mode 100644
index a3e46a0a..00000000
--- a/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py
+++ /dev/null
@@ -1,25 +0,0 @@
1class SqueezenetModelDownloadTest(object):
2 download_files = {'squeezenet1.1.prototxt': 'https://raw.githubusercontent.com/DeepScale/SqueezeNet/a47b6f13d30985279789d08053d37013d67d131b/SqueezeNet_v1.1/deploy.prototxt',
3 'squeezenet1.1.caffemodel': 'https://github.com/DeepScale/SqueezeNet/raw/a47b6f13d30985279789d08053d37013d67d131b/SqueezeNet_v1.1/squeezenet_v1.1.caffemodel'}
4
5 def __init__(self, target, work_dir):
6 self.target = target
7 self.work_dir = work_dir
8
9 def setup(self):
10 self.target.run('mkdir -p %s' % self.work_dir)
11
12 def tear_down(self):
13 self.target.run('rm -rf %s' % self.work_dir)
14
15 def test_can_download_squeezenet_model(self, proxy_port):
16 return self.target.run('cd %s; wget %s -e https_proxy=%s' %
17 (self.work_dir,
18 self.download_files['squeezenet1.1.caffemodel'],
19 proxy_port))
20
21 def test_can_download_squeezenet_prototxt(self, proxy_port):
22 return self.target.run('cd %s; wget %s -e https_proxy=%s' %
23 (self.work_dir,
24 self.download_files['squeezenet1.1.prototxt'],
25 proxy_port))
diff --git a/lib/oeqa/selftest/cases/secureboot.py b/lib/oeqa/selftest/cases/secureboot.py
deleted file mode 100644
index 4c059e25..00000000
--- a/lib/oeqa/selftest/cases/secureboot.py
+++ /dev/null
@@ -1,176 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (c) 2017, Intel Corporation.
6# All rights reserved.
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21# AUTHORS
22# Mikko Ylinen <mikko.ylinen@linux.intel.com>
23#
24# Based on meta/lib/oeqa/selftest/* and meta-refkit/lib/oeqa/selftest/*
25
26"""Test cases for secure boot with QEMU running OVMF."""
27
28import os
29import unittest
30import re
31import glob
32from shutil import rmtree, copy
33
34from oeqa.core.decorator.depends import OETestDepends
35from oeqa.selftest.case import OESelftestTestCase
36from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
37
38class SecureBootTests(OESelftestTestCase):
39 """Secure Boot test class."""
40
41 ovmf_keys_enrolled = False
42 ovmf_qemuparams = ''
43 ovmf_dir = ''
44 test_image_unsigned = 'secureboot-selftest-image-unsigned'
45 test_image_signed = 'secureboot-selftest-image-signed'
46 correct_key = 'refkit-db'
47 incorrect_key = 'incorrect'
48
49 @classmethod
50 def setUpLocal(self):
51
52 if not SecureBootTests.ovmf_keys_enrolled:
53 bitbake('ovmf ovmf-shell-image-enrollkeys', output_log=self.logger)
54
55 bb_vars = get_bb_vars(['TMPDIR', 'DEPLOY_DIR_IMAGE'])
56
57 SecureBootTests.ovmf_dir = os.path.join(bb_vars['TMPDIR'], 'oeselftest', 'secureboot', 'ovmf')
58 bb.utils.mkdirhier(SecureBootTests.ovmf_dir)
59
60 # Copy (all) OVMF in a temporary location
61 for src in glob.glob('%s/ovmf.*' % bb_vars['DEPLOY_DIR_IMAGE']):
62 copy(src, SecureBootTests.ovmf_dir)
63
64 SecureBootTests.ovmf_qemuparams = '-drive if=pflash,format=qcow2,file=%s/ovmf.secboot.qcow2' % SecureBootTests.ovmf_dir
65
66 cmd = ("runqemu "
67 "qemuparams='%s' "
68 "ovmf-shell-image-enrollkeys wic intel-corei7-64 "
69 "nographic slirp") % SecureBootTests.ovmf_qemuparams
70 print('Running "%s"' % cmd)
71 status = runCmd(cmd)
72
73 if not re.search('info: success', status.output, re.M):
74 self.fail('Failed to enroll keys. EFI shell log:\n%s' % status.output)
75 else:
76 # keys enrolled in ovmf.secboot.vars
77 SecureBootTests.ovmf_keys_enrolled = True
78
79 @classmethod
80 def tearDownLocal(self):
81 # Seems this is mandatory between the tests (a signed image is booted
82 # when running test_boot_unsigned_image after test_boot_signed_image).
83 # bitbake('-c clean %s' % test_image, output_log=self.logger)
84 #
85 # Whatever the problem was, it no longer seems to be necessary, so
86 # we can skip the time-consuming clean + full rebuild (5:04 min instead
87 # of 6:55min here).
88 pass
89
90 @classmethod
91 def tearDownClass(self):
92 bitbake('ovmf-shell-image-enrollkeys:do_cleanall', output_log=self.logger)
93 rmtree(self.ovmf_dir, ignore_errors=True)
94
95 def secureboot_with_image(self, boot_timeout=300, signing_key=None):
96 """Boot the image with UEFI SecureBoot enabled and see the result. """
97
98 config = ""
99
100 if signing_key:
101 test_image = self.test_image_signed
102 config += 'SECURE_BOOT_SIGNING_KEY = "${THISDIR}/files/%s.key"\n' % signing_key
103 config += 'SECURE_BOOT_SIGNING_CERT = "${THISDIR}/files/%s.crt"\n' % signing_key
104 else:
105 test_image = self.test_image_unsigned
106
107 self.write_config(config)
108 bitbake(test_image, output_log=self.logger)
109 self.remove_config(config)
110
111 # Some of the cases depend on the timeout to expire. Allow overrides
112 # so that we don't have to wait 1000s which is the default.
113 overrides = {
114 'TEST_QEMUBOOT_TIMEOUT': boot_timeout,
115 }
116
117 print('Booting %s' % test_image)
118
119 try:
120 with runqemu(test_image, ssh=False,
121 runqemuparams='nographic slirp',
122 qemuparams=self.ovmf_qemuparams,
123 overrides=overrides,
124 image_fstype='wic') as qemu:
125
126 cmd = 'uname -a'
127
128 status, output = qemu.run_serial(cmd)
129
130 self.assertTrue(status, 'Could not run \'uname -a\' (status=%s):\n%s' % (status, output))
131
132 # if we got this far without a correctly signed image, something went wrong
133 if signing_key != self.correct_key:
134 self.fail('The image not give a Security violation when expected. Boot log:\n%s' % output)
135
136
137 except Exception:
138
139 # Currently runqemu() fails if 'login:' prompt is not seen and it's
140 # not possible to login as 'root'. Those conditions aren't met when
141 # booting to EFI shell (See [YOCTO #11438]). We catch the failure
142 # and parse the boot log to determine the success. Note: the
143 # timeout triggers verbose bb.error() but that's normal with some
144 # of the test cases.
145
146 workdir = get_bb_var('WORKDIR', test_image)
147 bootlog = "%s/testimage/qemu_boot_log" % workdir
148
149 with open(bootlog, "r") as log:
150
151 # This isn't right but all we can do at this point. The right
152 # approach would run commands in the EFI shell to determine
153 # the BIOS rejects unsigned and/or images signed with keys in
154 # dbx key store but that needs changes in oeqa framework.
155
156 output = log.read()
157
158 # PASS if we see a security violation on unsigned or incorrectly signed images, otherwise fail
159 if signing_key == self.correct_key:
160 self.fail('Correctly signed image failed to boot. Boot log:\n%s' % output)
161 elif not re.search('Security Violation', output):
162 self.fail('The image not give a Security violation when expected. Boot log:\n%s' % output)
163
164 def test_boot_unsigned_image(self):
165 """ Boot unsigned image with secureboot enabled in UEFI."""
166 self.secureboot_with_image(boot_timeout=120, signing_key=None)
167
168 @OETestDepends(['secureboot.SecureBootTests.test_boot_unsigned_image'])
169 def test_boot_incorrectly_signed_image(self):
170 """ Boot (correctly) signed image with secureboot enabled in UEFI."""
171 self.secureboot_with_image(boot_timeout=120, signing_key=self.incorrect_key)
172
173 @OETestDepends(['secureboot.SecureBootTests.test_boot_incorrectly_signed_image'])
174 def test_boot_correctly_signed_image(self):
175 """ Boot (correctly) signed image with secureboot enabled in UEFI."""
176 self.secureboot_with_image(boot_timeout=150, signing_key=self.correct_key)