summaryrefslogtreecommitdiffstats
path: root/lib/oeqa/runtime
diff options
context:
space:
mode:
Diffstat (limited to 'lib/oeqa/runtime')
-rw-r--r--lib/oeqa/runtime/cases/cyclictest.py39
-rw-r--r--lib/oeqa/runtime/cases/dldt_inference_engine.py109
-rw-r--r--lib/oeqa/runtime/cases/dldt_model_optimizer.py38
-rw-r--r--lib/oeqa/runtime/cases/intel_mediasdk.py34
-rw-r--r--lib/oeqa/runtime/cases/intel_vaapi_driver.py27
-rw-r--r--lib/oeqa/runtime/cases/isal.py24
-rw-r--r--lib/oeqa/runtime/cases/libipt.py23
-rw-r--r--lib/oeqa/runtime/cases/libxcam.py37
-rw-r--r--lib/oeqa/runtime/cases/microcode.py31
-rw-r--r--lib/oeqa/runtime/cases/mkl_dnn.py67
-rw-r--r--lib/oeqa/runtime/cases/parselogs-ignores-intel-core2-32.txt9
-rw-r--r--lib/oeqa/runtime/cases/parselogs-ignores-intel-corei7-64.txt14
-rw-r--r--lib/oeqa/runtime/cases/thermald.py47
-rw-r--r--lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py135
-rw-r--r--lib/oeqa/runtime/miutils/dldtutils.py3
-rw-r--r--lib/oeqa/runtime/miutils/targets/oeqatarget.py11
-rw-r--r--lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py56
-rw-r--r--lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py23
-rw-r--r--lib/oeqa/runtime/miutils/tests/mkl_dnn_test.py55
-rw-r--r--lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py25
20 files changed, 807 insertions, 0 deletions
diff --git a/lib/oeqa/runtime/cases/cyclictest.py b/lib/oeqa/runtime/cases/cyclictest.py
new file mode 100644
index 00000000..8890651a
--- /dev/null
+++ b/lib/oeqa/runtime/cases/cyclictest.py
@@ -0,0 +1,39 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends
3from oeqa.runtime.decorator.package import OEHasPackage
4import os
5import subprocess
6import datetime
7
8class CyclicTest(OERuntimeTestCase):
9
10 @OEHasPackage(['rt-tests'])
11 @OETestDepends(['ssh.SSHTest.test_ssh'])
12 def test_cyclic(self):
13 # Cyclictest command and argument based on public setup for Intel(R) Core(TM) i7-6700
14 # https://www.osadl.org/Latency-plot-of-system-in-rack-9-slot.qa-latencyplot-r9s5.0.html?shadow=1
15 # Command line: cyclictest -l100000000 -m -Sp99 -i200 -h400 -q
16 status, output = self.target.run('cyclictest -l100000000 -m -Sp99 -i200 -h400')
17 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
18 test_log_dir = self.td.get('TEST_LOG_DIR', '')
19
20 if not test_log_dir:
21 test_log_dir = os.path.join(self.td.get('WORKDIR', ''), 'testimage')
22
23 cyclic_log_dir = os.path.join(test_log_dir, '%s.%s' % ('cyclic_test', datetime.datetime.now().strftime('%Y%m%d%H%M%S')))
24 os.makedirs(cyclic_log_dir)
25 log_path = os.path.join(cyclic_log_dir, 'cyclic_log')
26 with open(log_path, 'w') as f:
27 f.write(output)
28
29 max_latency = subprocess.check_output(('grep "Max Latencies" %s | tr " " "\n" | sort -n | tail -1 | sed s/^0*//') % log_path, shell=True).strip()
30 max_latency = int(max_latency)
31
32 # Default target latency based on max latency (24us) captured at public execution multiple by 1.2 (20% buffer)
33 # https://www.osadl.org/Latency-plot-of-system-in-rack-9-slot.qa-latencyplot-r9s5.0.html?shadow=1
34 target_latency = 1.2*24
35 user_defined_target_latency = self.tc.td.get("RTKERNEL_TARGET_LATENCY")
36 if user_defined_target_latency:
37 target_latency = int(user_defined_target_latency)
38 self.assertTrue(max_latency < target_latency,
39 msg="Max latency (%sus) is greater than target (%sus)." % (max_latency, target_latency))
diff --git a/lib/oeqa/runtime/cases/dldt_inference_engine.py b/lib/oeqa/runtime/cases/dldt_inference_engine.py
new file mode 100644
index 00000000..fb35d52f
--- /dev/null
+++ b/lib/oeqa/runtime/cases/dldt_inference_engine.py
@@ -0,0 +1,109 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget
5from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest
6from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest
7from oeqa.runtime.miutils.tests.dldt_inference_engine_test import DldtInferenceEngineTest
8from oeqa.runtime.miutils.dldtutils import get_testdata_config
9
10class DldtInferenceEngine(OERuntimeTestCase):
11
12 @classmethod
13 def setUpClass(cls):
14 cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/ie/md')
15 cls.sqn_download.setup()
16 cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/ie/ir')
17 cls.dldt_mo.setup()
18 cls.dldt_ie = DldtInferenceEngineTest(OEQATarget(cls.tc.target), '/tmp/ie/inputs')
19 cls.dldt_ie.setup()
20 cls.ir_files_dir = cls.dldt_mo.work_dir
21
22 @classmethod
23 def tearDownClass(cls):
24 cls.dldt_ie.tear_down()
25 cls.dldt_mo.tear_down()
26 cls.sqn_download.tear_down()
27
28 @OEHasPackage(['dldt-model-optimizer'])
29 @OEHasPackage(['wget'])
30 def test_dldt_ie_can_create_ir_and_download_input(self):
31 proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY')
32 if not proxy_port:
33 self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").')
34 (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port)
35 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
36 (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port)
37 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
38
39 mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR')
40 if not mo_exe_dir:
41 self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").')
42 mo_files_dir = self.sqn_download.work_dir
43 (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir)
44 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
45
46 (status, output) = self.dldt_ie.test_can_download_input_file(proxy_port)
47 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
48
49 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
50 @OEHasPackage(['dldt-inference-engine'])
51 @OEHasPackage(['dldt-inference-engine-samples'])
52 def test_dldt_ie_classification_with_cpu(self):
53 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('CPU', self.ir_files_dir)
54 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
55
56 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
57 @OEHasPackage(['dldt-inference-engine'])
58 @OEHasPackage(['dldt-inference-engine-samples'])
59 @OEHasPackage(['intel-compute-runtime'])
60 @OEHasPackage(['ocl-icd'])
61 def test_dldt_ie_classification_with_gpu(self):
62 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('GPU', self.ir_files_dir)
63 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
64
65 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
66 @OEHasPackage(['dldt-inference-engine'])
67 @OEHasPackage(['dldt-inference-engine-samples'])
68 @OEHasPackage(['dldt-inference-engine-vpu-firmware'])
69 def test_dldt_ie_classification_with_myriad(self):
70 device = 'MYRIAD'
71 (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device)
72 if not status:
73 self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output))
74 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device(device, self.ir_files_dir)
75 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
76
77 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
78 @OEHasPackage(['dldt-inference-engine'])
79 @OEHasPackage(['dldt-inference-engine-python3'])
80 @OEHasPackage(['python3-opencv'])
81 @OEHasPackage(['python3-numpy'])
82 def test_dldt_ie_classification_python_api_with_cpu(self):
83 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('CPU', self.ir_files_dir)
84 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
85
86 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
87 @OEHasPackage(['dldt-inference-engine'])
88 @OEHasPackage(['dldt-inference-engine-python3'])
89 @OEHasPackage(['intel-compute-runtime'])
90 @OEHasPackage(['ocl-icd'])
91 @OEHasPackage(['python3-opencv'])
92 @OEHasPackage(['python3-numpy'])
93 def test_dldt_ie_classification_python_api_with_gpu(self):
94 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('GPU', self.ir_files_dir)
95 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
96
97 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
98 @OEHasPackage(['dldt-inference-engine'])
99 @OEHasPackage(['dldt-inference-engine-python3'])
100 @OEHasPackage(['dldt-inference-engine-vpu-firmware'])
101 @OEHasPackage(['python3-opencv'])
102 @OEHasPackage(['python3-numpy'])
103 def test_dldt_ie_classification_python_api_with_myriad(self):
104 device = 'MYRIAD'
105 (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device)
106 if not status:
107 self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output))
108 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device(device, self.ir_files_dir)
109 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/dldt_model_optimizer.py b/lib/oeqa/runtime/cases/dldt_model_optimizer.py
new file mode 100644
index 00000000..736ea661
--- /dev/null
+++ b/lib/oeqa/runtime/cases/dldt_model_optimizer.py
@@ -0,0 +1,38 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget
4from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest
5from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest
6from oeqa.runtime.miutils.dldtutils import get_testdata_config
7
8class DldtModelOptimizer(OERuntimeTestCase):
9
10 @classmethod
11 def setUpClass(cls):
12 cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/mo/md')
13 cls.sqn_download.setup()
14 cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/mo/ir')
15 cls.dldt_mo.setup()
16
17 @classmethod
18 def tearDownClass(cls):
19 cls.dldt_mo.tear_down()
20 cls.sqn_download.tear_down()
21
22 @OEHasPackage(['dldt-model-optimizer'])
23 @OEHasPackage(['wget'])
24 def test_dldt_mo_can_create_ir(self):
25 proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY')
26 if not proxy_port:
27 self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").')
28 (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port)
29 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
30 (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port)
31 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
32
33 mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR')
34 if not mo_exe_dir:
35 self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").')
36 mo_files_dir = self.sqn_download.work_dir
37 (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir)
38 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/intel_mediasdk.py b/lib/oeqa/runtime/cases/intel_mediasdk.py
new file mode 100644
index 00000000..4ae7d580
--- /dev/null
+++ b/lib/oeqa/runtime/cases/intel_mediasdk.py
@@ -0,0 +1,34 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4
5class MsdkTest(OERuntimeTestCase):
6
7 @classmethod
8 def tearDownClass(cls):
9 cls.tc.target.run("rm /tmp/mtest_h264.mp4")
10
11 @OEHasPackage(['gstreamer1.0-plugins-base'])
12 @OEHasPackage(['gstreamer1.0-plugins-good'])
13 @OEHasPackage(['gstreamer1.0-plugins-bad'])
14 @OEHasPackage(['intel-mediasdk'])
15 @OEHasPackage(['intel-media-driver', 'libigfxcmrt7'])
16 def test_gstreamer_can_encode_with_msdk_and_intel_media_driver(self):
17 (status, output) = self.target.run('gst-inspect-1.0 msdk')
18 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
19
20 (status, output) = self.target.run('export LIBVA_DRIVER_NAME=iHD; '
21 'gst-launch-1.0 -ev videotestsrc num-buffers=120 ! timeoverlay ! '
22 'msdkh264enc ! video/x-h264,profile=main ! h264parse ! '
23 'filesink location=/tmp/mtest_h264.mp4')
24 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
25
26 @OETestDepends(['intel_mediasdk.MsdkTest.test_gstreamer_can_encode_with_msdk_and_intel_media_driver'])
27 def test_gstreamer_can_decode_with_msdk_and_intel_media_driver(self):
28 (status, output) = self.target.run('export LIBVA_DRIVER_NAME=iHD; '
29 'gst-launch-1.0 filesrc location=/tmp/mtest_h264.mp4 ! '
30 'h264parse ! msdkh264dec ! '
31 'msdkh265enc rate-control=cbr bitrate=5000 gop-size=30 b-frames=2 ! '
32 'video/x-h265,profile=main ! h265parse ! fakesink')
33 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
34
diff --git a/lib/oeqa/runtime/cases/intel_vaapi_driver.py b/lib/oeqa/runtime/cases/intel_vaapi_driver.py
new file mode 100644
index 00000000..31e11a81
--- /dev/null
+++ b/lib/oeqa/runtime/cases/intel_vaapi_driver.py
@@ -0,0 +1,27 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4
5class VaapiDriverTest(OERuntimeTestCase):
6
7 @classmethod
8 def tearDownClass(cls):
9 cls.tc.target.run("rm /tmp/vtest_h264.mp4")
10
11 @OEHasPackage(['gstreamer1.0-plugins-base'])
12 @OEHasPackage(['gstreamer1.0-plugins-good'])
13 @OEHasPackage(['gstreamer1.0-vaapi'])
14 @OEHasPackage(['intel-vaapi-driver'])
15 def test_gstreamer_can_encode_with_intel_vaapi_driver(self):
16 (status, output) = self.target.run('gst-inspect-1.0 vaapi')
17 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
18
19 (status, output) = self.target.run('gst-launch-1.0 -ev videotestsrc num-buffers=60 ! '
20 'timeoverlay ! vaapih264enc ! h264parse ! mp4mux ! filesink location=/tmp/vtest_h264.mp4')
21 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
22
23 @OETestDepends(['intel_vaapi_driver.VaapiDriverTest.test_gstreamer_can_encode_with_intel_vaapi_driver'])
24 def test_gstreamer_can_decode_with_intel_vaapi_driver(self):
25 (status, output) = self.target.run('gst-launch-1.0 filesrc location=/tmp/vtest_h264.mp4 ! '
26 'qtdemux ! h264parse ! vaapih264dec ! vaapisink')
27 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/isal.py b/lib/oeqa/runtime/cases/isal.py
new file mode 100644
index 00000000..5025f986
--- /dev/null
+++ b/lib/oeqa/runtime/cases/isal.py
@@ -0,0 +1,24 @@
1import os
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.runtime.case import OERuntimeTestCase
4from oeqa.core.decorator.depends import OETestDepends
5
6class IsalTest(OERuntimeTestCase):
7
8 @OEHasPackage(['isa-l'])
9 def test_isal_igzip_version(self):
10 command = 'igzip -V'
11 (status, output) = self.target.run(command)
12 self.assertEqual(status, 0, msg="Error messages: %s" % output)
13
14 @OETestDepends(['isal.IsalTest.test_isal_igzip_version'])
15 def test_isal_igzip_can_compress(self):
16 command = 'echo "hello" > /tmp/igzip_sample'
17 (status, output) = self.target.run(command)
18 self.assertEqual(status, 0, msg="Error messages: %s" % output)
19 command = 'igzip -z /tmp/igzip_sample'
20 (status, output) = self.target.run(command)
21 self.assertEqual(status, 0, msg="Error messages: %s" % output)
22 command = 'rm /tmp/igzip_sample*'
23 (status, output) = self.target.run(command)
24 self.assertEqual(status, 0, msg="Error messages: %s" % output)
diff --git a/lib/oeqa/runtime/cases/libipt.py b/lib/oeqa/runtime/cases/libipt.py
new file mode 100644
index 00000000..4adb13f0
--- /dev/null
+++ b/lib/oeqa/runtime/cases/libipt.py
@@ -0,0 +1,23 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4
5class LibiptTest(OERuntimeTestCase):
6 libipt_bin_dir = '/usr/bin/libipt/'
7
8 @classmethod
9 def tearDownClass(cls):
10 cls.tc.target.run('rm /tmp/loop-tnt*')
11
12 @OEHasPackage(['libipt', 'libipt2'])
13 @OEHasPackage(['libipt-test'])
14 @OEHasPackage(['yasm'])
15 def test_libipt_can_generate_trace_packet(self):
16 (status, output) = self.target.run('cd /tmp; %spttc %s/tests/loop-tnt.ptt' %
17 (self.libipt_bin_dir, self.libipt_bin_dir))
18 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
19
20 @OETestDepends(['libipt.LibiptTest.test_libipt_can_generate_trace_packet'])
21 def test_libipt_can_perform_trace_packet_dump(self):
22 (status, output) = self.target.run('cd /tmp; %sptdump loop-tnt.pt' % self.libipt_bin_dir)
23 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/libxcam.py b/lib/oeqa/runtime/cases/libxcam.py
new file mode 100644
index 00000000..57192f07
--- /dev/null
+++ b/lib/oeqa/runtime/cases/libxcam.py
@@ -0,0 +1,37 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4
5class LibxcamTest(OERuntimeTestCase):
6 yuv_file = 'vtest.yuv'
7 soft_test_app_file = 'test-soft-image'
8 libxcam_test_app_dir = '/usr/bin/libxcam/'
9 libxcam_file_dir = '/tmp/'
10
11 @classmethod
12 def tearDownClass(cls):
13 cls.tc.target.run("rm %s%s" % (cls.libxcam_file_dir, cls.yuv_file))
14
15 @OEHasPackage(['gstreamer1.0-plugins-base'])
16 @OEHasPackage(['gstreamer1.0-plugins-good'])
17 @OEHasPackage(['gstreamer1.0-vaapi'])
18 @OEHasPackage(['intel-vaapi-driver'])
19 def test_libxcam_can_generate_yuv_file_with_gstreamer(self):
20 (status, output) = self.target.run('gst-inspect-1.0 vaapi')
21 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
22
23 (status, output) = self.target.run('gst-launch-1.0 -ev videotestsrc num-buffers=60 ! '
24 'timeoverlay ! filesink location=%s%s' %
25 (self.libxcam_file_dir, self.yuv_file))
26 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
27
28 @OEHasPackage(['libxcam'])
29 @OEHasPackage(['libxcam-test'])
30 @OETestDepends(['libxcam.LibxcamTest.test_libxcam_can_generate_yuv_file_with_gstreamer'])
31 def test_libxcam_can_execute_soft_image_sample_app(self):
32 (status, output) = self.target.run('%s%s --type remap --input0 %s%s --output soft_out.nv12 --save false' %
33 (self.libxcam_test_app_dir,
34 self.soft_test_app_file,
35 self.libxcam_file_dir,
36 self.yuv_file))
37 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/microcode.py b/lib/oeqa/runtime/cases/microcode.py
new file mode 100644
index 00000000..52c1cdb4
--- /dev/null
+++ b/lib/oeqa/runtime/cases/microcode.py
@@ -0,0 +1,31 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3import re
4
5class MicrocodeTest(OERuntimeTestCase):
6
7 def get_revision_from_microcode_string_list(self, microcode_string_list, re_pattern):
8 re_compile = re.compile(re_pattern)
9 rev_list = []
10 for s in microcode_string_list:
11 matched_revs = re_compile.findall(s)
12 if matched_revs:
13 for mr in matched_revs:
14 rev_list.append(int(mr, 16))
15 return rev_list
16
17 @OEHasPackage(["iucode-tool"])
18 def test_microcode_update(self):
19 (status, output) = self.target.run('iucode_tool /lib/firmware/intel-ucode/ -tb -l --scan-system=2 | grep rev')
20
21 selected_microcodes = output.splitlines()
22 selected_rev_list = self.get_revision_from_microcode_string_list(selected_microcodes, "rev (\w*)")
23
24 (status, output) = self.target.run("dmesg | grep 'microcode updated early'")
25
26 updated_microcodes = output.splitlines()
27 updated_rev_list = self.get_revision_from_microcode_string_list(updated_microcodes, "revision (\w*)")
28
29 for ul in updated_rev_list:
30 self.assertTrue(ul in selected_rev_list, msg="Updated revision, %s, not in selected revision list (%s)" %
31 (ul, selected_rev_list))
diff --git a/lib/oeqa/runtime/cases/mkl_dnn.py b/lib/oeqa/runtime/cases/mkl_dnn.py
new file mode 100644
index 00000000..8d50df54
--- /dev/null
+++ b/lib/oeqa/runtime/cases/mkl_dnn.py
@@ -0,0 +1,67 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget
5from oeqa.runtime.miutils.tests.mkl_dnn_test import MkldnnTest
6
7class MklDnn(OERuntimeTestCase):
8
9 @classmethod
10 def setUpClass(cls):
11 cls.mkldnntest = MkldnnTest(OEQATarget(cls.tc.target))
12
13 @classmethod
14 def tearDownClass(cls):
15 cls.mkldnntest.tear_down()
16
17 @OEHasPackage(['onednn', 'libdnnl2'])
18 @OEHasPackage(['onednn-src', 'libdnnl-src'])
19 @OEHasPackage(['onednn-dev', 'libdnnl-dev'])
20 @OEHasPackage(['gcc'])
21 @OEHasPackage(['gcc-symlinks'])
22 @OEHasPackage(['libstdc++-dev'])
23 @OEHasPackage(['binutils'])
24 def test_mkldnn_can_compile_and_execute(self):
25 (status, output) = self.mkldnntest.test_mkldnn_can_compile_and_execute()
26 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
27
28 @OEHasPackage(['onednn', 'libdnnl2'])
29 @OEHasPackage(['onednn-test', 'libdnnl-test'])
30 def test_mkldnn_benchdnn_package_available(self):
31 (status, output) = self.mkldnntest.test_mkldnn_benchdnn_package_available()
32 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
33
34 @OETestDepends(['mkl_dnn.MklDnn.test_mkldnn_benchdnn_package_available'])
35 def test_mkldnn_conv_api(self):
36 (status, output) = self.mkldnntest.test_mkldnn_conv_api()
37 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
38
39 @OETestDepends(['mkl_dnn.MklDnn.test_mkldnn_benchdnn_package_available'])
40 def test_mkldnn_bnorm_api(self):
41 (status, output) = self.mkldnntest.test_mkldnn_bnorm_api()
42 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
43
44 @OETestDepends(['mkl_dnn.MklDnn.test_mkldnn_benchdnn_package_available'])
45 def test_mkldnn_deconv_api(self):
46 (status, output) = self.mkldnntest.test_mkldnn_deconv_api()
47 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
48
49 @OETestDepends(['mkl_dnn.MklDnn.test_mkldnn_benchdnn_package_available'])
50 def test_mkldnn_ip_api(self):
51 (status, output) = self.mkldnntest.test_mkldnn_ip_api()
52 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
53
54 @OETestDepends(['mkl_dnn.MklDnn.test_mkldnn_benchdnn_package_available'])
55 def test_mkldnn_reorder_api(self):
56 (status, output) = self.mkldnntest.test_mkldnn_reorder_api()
57 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
58
59 @OETestDepends(['mkl_dnn.MklDnn.test_mkldnn_benchdnn_package_available'])
60 def test_mkldnn_rnn_api(self):
61 (status, output) = self.mkldnntest.test_mkldnn_rnn_api()
62 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
63
64 @OETestDepends(['mkl_dnn.MklDnn.test_mkldnn_benchdnn_package_available'])
65 def test_mkldnn_shuffle_api(self):
66 (status, output) = self.mkldnntest.test_mkldnn_shuffle_api()
67 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/parselogs-ignores-intel-core2-32.txt b/lib/oeqa/runtime/cases/parselogs-ignores-intel-core2-32.txt
new file mode 100644
index 00000000..84ce8168
--- /dev/null
+++ b/lib/oeqa/runtime/cases/parselogs-ignores-intel-core2-32.txt
@@ -0,0 +1,9 @@
1# These should be reviewed to see if they are still needed
2ACPI: No _BQC method, cannot determine initial brightness
3[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness
4(EE) Failed to load module "psb"
5(EE) Failed to load module "psbdrv"
6(EE) open /dev/fb0: No such file or directory
7(EE) AIGLX: reverting to software rendering
8dmi: Firmware registration failed.
9ioremap error for 0x78
diff --git a/lib/oeqa/runtime/cases/parselogs-ignores-intel-corei7-64.txt b/lib/oeqa/runtime/cases/parselogs-ignores-intel-corei7-64.txt
new file mode 100644
index 00000000..5c9b4bc7
--- /dev/null
+++ b/lib/oeqa/runtime/cases/parselogs-ignores-intel-corei7-64.txt
@@ -0,0 +1,14 @@
1# These should be reviewed to see if they are still needed
2can't set Max Payload Size to 256
3intel_punit_ipc: can't request region for resource
4[drm] parse error at position 4 in video mode 'efifb'
5ACPI Error: Could not enable RealTimeClock event
6ACPI Warning: Could not enable fixed event - RealTimeClock
7hci_intel INT33E1:00: Unable to retrieve gpio
8hci_intel: probe of INT33E1:00 failed
9can't derive routing for PCI INT A
10failed to read out thermal zone
11Bluetooth: hci0: Setting Intel event mask failed
12ttyS2 - failed to request DMA
13Bluetooth: hci0: Failed to send firmware data (-38)
14atkbd serio0: Failed to enable keyboard on isa0060/serio0
diff --git a/lib/oeqa/runtime/cases/thermald.py b/lib/oeqa/runtime/cases/thermald.py
new file mode 100644
index 00000000..a0b6a92b
--- /dev/null
+++ b/lib/oeqa/runtime/cases/thermald.py
@@ -0,0 +1,47 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends
3from oeqa.runtime.decorator.package import OEHasPackage
4import threading
5import time
6import re
7
8class ThermaldTest(OERuntimeTestCase):
9 def get_thermal_zone_with_target_type(self, target_type):
10 i = 0
11 while True:
12 status, output = self.target.run('cat /sys/class/thermal/thermal_zone%s/type' % i)
13 if status:
14 return -1
15 if output == target_type:
16 return i
17 i = i + 1
18
19 def run_thermald_emulation_to_exceed_setpoint_then_end_thermald_process(self, run_args):
20 time.sleep(2)
21 self.target.run('echo 106000 > /sys/class/thermal/thermal_zone%s/emul_temp' % run_args)
22 time.sleep(5)
23 __, output = self.target.run('pidof thermald')
24 self.target.run('kill -9 %s' % output)
25
26 def test_thermald_emulation_mode(self):
27 # Thermald test depend on thermal emulation, where CONFIG_THERMAL_EMULATION=y was required
28 # To enable thermal emulation, refer to https://github.com/intel/thermal_daemon/blob/master/test/readme_test.txt
29 (status, output) = self.target.run('gzip -dc /proc/config.gz | grep CONFIG_THERMAL_EMULATION=y')
30 if status:
31 self.skipTest("CONFIG_THERMAL_EMULATION is not set")
32
33 @OEHasPackage(['thermald'])
34 @OETestDepends(['thermald.ThermaldTest.test_thermald_emulation_mode'])
35 def test_thermald_can_track_thermal_exceed_setpoint(self):
36 x86_thermal_zone_index = self.get_thermal_zone_with_target_type('x86_pkg_temp')
37 if x86_thermal_zone_index < 0:
38 self.skipTest('Could not get the thermal zone index for target type (%s)' % 'x86_pkg_temp')
39 td_thread = threading.Thread(target=self.run_thermald_emulation_to_exceed_setpoint_then_end_thermald_process,
40 args=(x86_thermal_zone_index,))
41 td_thread.start()
42 td_thread.join()
43 status, output = self.target.run('timeout 3s thermald --no-daemon --loglevel=info')
44 regex_search = ".*thd_cdev_set_state.*106000"
45 regex_comp = re.compile(regex_search)
46 m = regex_comp.search(output)
47 self.assertTrue(m, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py b/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py
new file mode 100644
index 00000000..1906e9fe
--- /dev/null
+++ b/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py
@@ -0,0 +1,135 @@
1#!/usr/bin/env python3
2"""
3 Copyright (C) 2018-2019 Intel Corporation
4
5 Licensed under the Apache License, Version 2.0 (the "License");
6 you may not use this file except in compliance with the License.
7 You may obtain a copy of the License at
8
9 http://www.apache.org/licenses/LICENSE-2.0
10
11 Unless required by applicable law or agreed to in writing, software
12 distributed under the License is distributed on an "AS IS" BASIS,
13 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 See the License for the specific language governing permissions and
15 limitations under the License.
16"""
17from __future__ import print_function
18import sys
19import os
20from argparse import ArgumentParser, SUPPRESS
21import cv2
22import numpy as np
23import logging as log
24from time import time
25from openvino.inference_engine import IENetwork, IECore
26
27
28def build_argparser():
29 parser = ArgumentParser(add_help=False)
30 args = parser.add_argument_group('Options')
31 args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Show this help message and exit.')
32 args.add_argument("-m", "--model", help="Required. Path to an .xml file with a trained model.", required=True,
33 type=str)
34 args.add_argument("-i", "--input", help="Required. Path to a folder with images or path to an image files",
35 required=True,
36 type=str, nargs="+")
37 args.add_argument("-l", "--cpu_extension",
38 help="Optional. Required for CPU custom layers. "
39 "MKLDNN (CPU)-targeted custom layers. Absolute path to a shared library with the"
40 " kernels implementations.", type=str, default=None)
41 args.add_argument("-d", "--device",
42 help="Optional. Specify the target device to infer on; CPU, GPU, FPGA, HDDL, MYRIAD or HETERO: is "
43 "acceptable. The sample will look for a suitable plugin for device specified. Default "
44 "value is CPU",
45 default="CPU", type=str)
46 args.add_argument("--labels", help="Optional. Path to a labels mapping file", default=None, type=str)
47 args.add_argument("-nt", "--number_top", help="Optional. Number of top results", default=10, type=int)
48
49 return parser
50
51
52def main():
53 log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
54 args = build_argparser().parse_args()
55 model_xml = args.model
56 model_bin = os.path.splitext(model_xml)[0] + ".bin"
57
58 # Plugin initialization for specified device and load extensions library if specified
59 log.info("Creating Inference Engine")
60 ie = IECore()
61 if args.cpu_extension and 'CPU' in args.device:
62 ie.add_extension(args.cpu_extension, "CPU")
63 # Read IR
64 log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin))
65 net = IENetwork(model=model_xml, weights=model_bin)
66
67 if "CPU" in args.device:
68 supported_layers = ie.query_network(net, "CPU")
69 not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers]
70 if len(not_supported_layers) != 0:
71 log.error("Following layers are not supported by the plugin for specified device {}:\n {}".
72 format(args.device, ', '.join(not_supported_layers)))
73 log.error("Please try to specify cpu extensions library path in sample's command line parameters using -l "
74 "or --cpu_extension command line argument")
75 sys.exit(1)
76
77 assert len(net.inputs.keys()) == 1, "Sample supports only single input topologies"
78 assert len(net.outputs) == 1, "Sample supports only single output topologies"
79
80 log.info("Preparing input blobs")
81 input_blob = next(iter(net.inputs))
82 out_blob = next(iter(net.outputs))
83 net.batch_size = len(args.input)
84
85 # Read and pre-process input images
86 n, c, h, w = net.inputs[input_blob].shape
87 images = np.ndarray(shape=(n, c, h, w))
88 for i in range(n):
89 image = cv2.imread(args.input[i])
90 if image.shape[:-1] != (h, w):
91 log.warning("Image {} is resized from {} to {}".format(args.input[i], image.shape[:-1], (h, w)))
92 image = cv2.resize(image, (w, h))
93 image = image.transpose((2, 0, 1)) # Change data layout from HWC to CHW
94 images[i] = image
95 log.info("Batch size is {}".format(n))
96
97 # Loading model to the plugin
98 log.info("Loading model to the plugin")
99 exec_net = ie.load_network(network=net, device_name=args.device)
100
101 # Start sync inference
102 log.info("Starting inference in synchronous mode")
103 res = exec_net.infer(inputs={input_blob: images})
104
105 # Processing output blob
106 log.info("Processing output blob")
107 res = res[out_blob]
108 log.info("Top {} results: ".format(args.number_top))
109 if args.labels:
110 with open(args.labels, 'r') as f:
111 labels_map = [x.split(sep=' ', maxsplit=1)[-1].strip() for x in f]
112 else:
113 labels_map = None
114 classid_str = "classid"
115 probability_str = "probability"
116 for i, probs in enumerate(res):
117 probs = np.squeeze(probs)
118 top_ind = np.argsort(probs)[-args.number_top:][::-1]
119 print("Image {}\n".format(args.input[i]))
120 print(classid_str, probability_str)
121 print("{} {}".format('-' * len(classid_str), '-' * len(probability_str)))
122 for id in top_ind:
123 det_label = labels_map[id] if labels_map else "{}".format(id)
124 label_length = len(det_label)
125 space_num_before = (len(classid_str) - label_length) // 2
126 space_num_after = len(classid_str) - (space_num_before + label_length) + 2
127 space_num_before_prob = (len(probability_str) - len(str(probs[id]))) // 2
128 print("{}{}{}{}{:.7f}".format(' ' * space_num_before, det_label,
129 ' ' * space_num_after, ' ' * space_num_before_prob,
130 probs[id]))
131 print("\n")
132 log.info("This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n")
133
134if __name__ == '__main__':
135 sys.exit(main() or 0)
diff --git a/lib/oeqa/runtime/miutils/dldtutils.py b/lib/oeqa/runtime/miutils/dldtutils.py
new file mode 100644
index 00000000..45bf2e12
--- /dev/null
+++ b/lib/oeqa/runtime/miutils/dldtutils.py
@@ -0,0 +1,3 @@
1
2def get_testdata_config(testdata, config):
3 return testdata.get(config)
diff --git a/lib/oeqa/runtime/miutils/targets/oeqatarget.py b/lib/oeqa/runtime/miutils/targets/oeqatarget.py
new file mode 100644
index 00000000..a9f7f1b4
--- /dev/null
+++ b/lib/oeqa/runtime/miutils/targets/oeqatarget.py
@@ -0,0 +1,11 @@
1
2class OEQATarget(object):
3
4 def __init__(self, target):
5 self.target = target
6
7 def run(self, cmd):
8 return self.target.run(cmd)
9
10 def copy_to(self, source, destination_dir):
11 self.target.copyTo(source, destination_dir)
diff --git a/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py b/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py
new file mode 100644
index 00000000..31bfb539
--- /dev/null
+++ b/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py
@@ -0,0 +1,56 @@
1import os
2script_path = os.path.dirname(os.path.realpath(__file__))
3files_path = os.path.join(script_path, '../../files/')
4
5class DldtInferenceEngineTest(object):
6 ie_input_files = {'ie_python_sample': 'classification_sample.py',
7 'input': 'chicky_512.png',
8 'input_download': 'https://raw.githubusercontent.com/opencv/opencv/master/samples/data/chicky_512.png',
9 'model': 'squeezenet_v1.1.xml'}
10
11 def __init__(self, target, work_dir):
12 self.target = target
13 self.work_dir = work_dir
14
15 def setup(self):
16 self.target.run('mkdir -p %s' % self.work_dir)
17 self.target.copy_to(os.path.join(files_path, 'dldt-inference-engine', self.ie_input_files['ie_python_sample']),
18 self.work_dir)
19 python_cmd = 'from openvino.inference_engine import IENetwork, IECore; ie = IECore(); print(ie.available_devices)'
20 __, output = self.target.run('python3 -c "%s"' % python_cmd)
21 self.available_devices = output
22
23 def tear_down(self):
24 self.target.run('rm -rf %s' % self.work_dir)
25
26 def test_check_if_openvino_device_available(self, device):
27 if device not in self.available_devices:
28 return False, self.available_devices
29 return True, self.available_devices
30
31 def test_can_download_input_file(self, proxy_port):
32 return self.target.run('cd %s; wget %s -e https_proxy=%s' %
33 (self.work_dir,
34 self.ie_input_files['input_download'],
35 proxy_port))
36
37 def test_dldt_ie_classification_with_device(self, device, ir_files_dir):
38 return self.target.run('classification_sample_async -d %s -i %s -m %s' %
39 (device,
40 os.path.join(self.work_dir, self.ie_input_files['input']),
41 os.path.join(ir_files_dir, self.ie_input_files['model'])))
42
43 def test_dldt_ie_classification_python_api_with_device(self, device, ir_files_dir, extension=''):
44 if extension:
45 return self.target.run('python3 %s -d %s -i %s -m %s -l %s' %
46 (os.path.join(self.work_dir, self.ie_input_files['ie_python_sample']),
47 device,
48 os.path.join(self.work_dir, self.ie_input_files['input']),
49 os.path.join(ir_files_dir, self.ie_input_files['model']),
50 extension))
51 else:
52 return self.target.run('python3 %s -d %s -i %s -m %s' %
53 (os.path.join(self.work_dir, self.ie_input_files['ie_python_sample']),
54 device,
55 os.path.join(self.work_dir, self.ie_input_files['input']),
56 os.path.join(ir_files_dir, self.ie_input_files['model'])))
diff --git a/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py b/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py
new file mode 100644
index 00000000..7d3db15b
--- /dev/null
+++ b/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py
@@ -0,0 +1,23 @@
1import os
2
3class DldtModelOptimizerTest(object):
4 mo_input_files = {'model': 'squeezenet_v1.1.caffemodel',
5 'prototxt': 'deploy.prototxt'}
6 mo_exe = 'mo.py'
7
8 def __init__(self, target, work_dir):
9 self.target = target
10 self.work_dir = work_dir
11
12 def setup(self):
13 self.target.run('mkdir -p %s' % self.work_dir)
14
15 def tear_down(self):
16 self.target.run('rm -rf %s' % self.work_dir)
17
18 def test_dldt_mo_can_create_ir(self, mo_exe_dir, mo_files_dir):
19 return self.target.run('python3 %s --input_model %s --input_proto %s --output_dir %s --data_type FP16' %
20 (os.path.join(mo_exe_dir, self.mo_exe),
21 os.path.join(mo_files_dir, self.mo_input_files['model']),
22 os.path.join(mo_files_dir, self.mo_input_files['prototxt']),
23 self.work_dir))
diff --git a/lib/oeqa/runtime/miutils/tests/mkl_dnn_test.py b/lib/oeqa/runtime/miutils/tests/mkl_dnn_test.py
new file mode 100644
index 00000000..869a4cbe
--- /dev/null
+++ b/lib/oeqa/runtime/miutils/tests/mkl_dnn_test.py
@@ -0,0 +1,55 @@
1
2class MkldnnTest(object):
3 mkldnn_target_test_filename = 'mkl-dnn-c'
4
5 def __init__(self, target):
6 self.target = target
7
8 def tear_down(self):
9 self.target.run('rm /tmp/%s' % self.mkldnn_target_test_filename)
10
11 def test_mkldnn_can_compile_and_execute(self):
12 mkldnn_src_dir = '/usr/src/debug/onednn/'
13 mkldnn_src_test_filename = 'api.c'
14 mkldnn_src_test_file = ''
15
16 (__, output) = self.target.run('cd %s; find -name %s' % (mkldnn_src_dir, mkldnn_src_test_filename))
17 if 'No such file or directory' in output:
18 return -1, output
19
20 mkldnn_src_test_file = os.path.join(mkldnn_src_dir, output)
21 (status, output) = self.target.run('gcc %s -o /tmp/%s -ldnnl' % (mkldnn_src_test_file, self.mkldnn_target_test_filename))
22 if status:
23 return status, output
24
25 (status, output) = self.target.run('cd /tmp; ./%s' % self.mkldnn_target_test_filename)
26 return status, output
27
28 def test_mkldnn_benchdnn_package_available(self):
29 (status, output) = self.target.run('ls /usr/bin/mkl-dnn/tests/benchdnn')
30 return status, output
31
32 def _run_mkldnn_benchdnn_test(self, cmd):
33 (status, output) = self.target.run('cd /usr/bin/mkl-dnn/tests/benchdnn; %s' % cmd)
34 return status, output
35
36 def test_mkldnn_conv_api(self):
37 return self._run_mkldnn_benchdnn_test('./benchdnn --conv --batch=inputs/conv/test_conv_3d')
38
39 def test_mkldnn_bnorm_api(self):
40 return self._run_mkldnn_benchdnn_test('./benchdnn --bnorm --batch=inputs/bnorm/test_bnorm_regressions')
41
42 def test_mkldnn_deconv_api(self):
43 return self._run_mkldnn_benchdnn_test('./benchdnn --deconv --batch=inputs/deconv/test_deconv_bfloat16')
44
45 def test_mkldnn_ip_api(self):
46 return self._run_mkldnn_benchdnn_test('./benchdnn --ip --batch=inputs/ip/test_ip_bfloat16')
47
48 def test_mkldnn_reorder_api(self):
49 return self._run_mkldnn_benchdnn_test('./benchdnn --reorder --batch=inputs/reorder/test_reorder_bfloat16')
50
51 def test_mkldnn_rnn_api(self):
52 return self._run_mkldnn_benchdnn_test('./benchdnn --rnn --batch=inputs/rnn/test_rnn_all')
53
54 def test_mkldnn_shuffle_api(self):
55 return self._run_mkldnn_benchdnn_test('./benchdnn --shuffle --batch=inputs/shuffle/test_shuffle_bfloat16') \ No newline at end of file
diff --git a/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py b/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py
new file mode 100644
index 00000000..a3e46a0a
--- /dev/null
+++ b/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py
@@ -0,0 +1,25 @@
1class SqueezenetModelDownloadTest(object):
2 download_files = {'squeezenet1.1.prototxt': 'https://raw.githubusercontent.com/DeepScale/SqueezeNet/a47b6f13d30985279789d08053d37013d67d131b/SqueezeNet_v1.1/deploy.prototxt',
3 'squeezenet1.1.caffemodel': 'https://github.com/DeepScale/SqueezeNet/raw/a47b6f13d30985279789d08053d37013d67d131b/SqueezeNet_v1.1/squeezenet_v1.1.caffemodel'}
4
5 def __init__(self, target, work_dir):
6 self.target = target
7 self.work_dir = work_dir
8
9 def setup(self):
10 self.target.run('mkdir -p %s' % self.work_dir)
11
12 def tear_down(self):
13 self.target.run('rm -rf %s' % self.work_dir)
14
15 def test_can_download_squeezenet_model(self, proxy_port):
16 return self.target.run('cd %s; wget %s -e https_proxy=%s' %
17 (self.work_dir,
18 self.download_files['squeezenet1.1.caffemodel'],
19 proxy_port))
20
21 def test_can_download_squeezenet_prototxt(self, proxy_port):
22 return self.target.run('cd %s; wget %s -e https_proxy=%s' %
23 (self.work_dir,
24 self.download_files['squeezenet1.1.prototxt'],
25 proxy_port))