summaryrefslogtreecommitdiffstats
path: root/lib/oeqa/runtime/cases
diff options
context:
space:
mode:
authorYeoh Ee Peng <ee.peng.yeoh@intel.com>2019-12-04 11:17:42 +0800
committerAnuj Mittal <anuj.mittal@intel.com>2019-12-10 13:31:24 +0800
commit1e514838dfad1d2339f896f850425bc33621d297 (patch)
treef93a016de883f4af953592e7c635cae6e6f56308 /lib/oeqa/runtime/cases
parentf39ad91524768c729cd68ce367dacf9b4dfc4cf7 (diff)
downloadmeta-intel-1e514838dfad1d2339f896f850425bc33621d297.tar.gz
oeqa/runtime/cases/dldt: Enable inference engine and model optimizer tests
Add sanity tests for inference engine: - test inference engine c/cpp shared library - test inference engine python api - test inference engine cpu, gpu, myriad plugin Add sanity tests for model optimizer - test model optmizer can generate ir Licenses: - classification_sample.py license: Apache 2.0 source: <install_root>/deployment_tools/inference_engine/samples/* Signed-off-by: Yeoh Ee Peng <ee.peng.yeoh@intel.com> Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
Diffstat (limited to 'lib/oeqa/runtime/cases')
-rw-r--r--lib/oeqa/runtime/cases/dldt_inference_engine.py101
-rw-r--r--lib/oeqa/runtime/cases/dldt_model_optimizer.py38
2 files changed, 139 insertions, 0 deletions
diff --git a/lib/oeqa/runtime/cases/dldt_inference_engine.py b/lib/oeqa/runtime/cases/dldt_inference_engine.py
new file mode 100644
index 00000000..2e969975
--- /dev/null
+++ b/lib/oeqa/runtime/cases/dldt_inference_engine.py
@@ -0,0 +1,101 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.core.decorator.depends import OETestDepends
4from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget
5from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest
6from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest
7from oeqa.runtime.miutils.tests.dldt_inference_engine_test import DldtInferenceEngineTest
8from oeqa.runtime.miutils.dldtutils import get_testdata_config
9
10class DldtInferenceEngine(OERuntimeTestCase):
11
12 @classmethod
13 def setUpClass(cls):
14 cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/ie/md')
15 cls.sqn_download.setup()
16 cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/ie/ir')
17 cls.dldt_mo.setup()
18 cls.dldt_ie = DldtInferenceEngineTest(OEQATarget(cls.tc.target), '/tmp/ie/inputs')
19 cls.dldt_ie.setup()
20 cls.ir_files_dir = cls.dldt_mo.work_dir
21
22 @classmethod
23 def tearDownClass(cls):
24 cls.dldt_ie.tear_down()
25 cls.dldt_mo.tear_down()
26 cls.sqn_download.tear_down()
27
28 @OEHasPackage(['dldt-model-optimizer'])
29 @OEHasPackage(['wget'])
30 def test_dldt_ie_can_create_ir_and_download_input(self):
31 proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY')
32 if not proxy_port:
33 self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").')
34 (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port)
35 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
36 (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port)
37 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
38
39 mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR')
40 if not mo_exe_dir:
41 self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").')
42 mo_files_dir = self.sqn_download.work_dir
43 (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir)
44 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
45
46 (status, output) = self.dldt_ie.test_can_download_input_file(proxy_port)
47 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
48
49 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
50 @OEHasPackage(['dldt-inference-engine'])
51 @OEHasPackage(['dldt-inference-engine-samples'])
52 def test_dldt_ie_classification_with_cpu(self):
53 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('CPU', self.ir_files_dir)
54 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
55
56 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
57 @OEHasPackage(['dldt-inference-engine'])
58 @OEHasPackage(['dldt-inference-engine-samples'])
59 @OEHasPackage(['intel-compute-runtime'])
60 @OEHasPackage(['opencl-icd-loader'])
61 def test_dldt_ie_classification_with_gpu(self):
62 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('GPU', self.ir_files_dir)
63 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
64
65 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
66 @OEHasPackage(['dldt-inference-engine'])
67 @OEHasPackage(['dldt-inference-engine-samples'])
68 @OEHasPackage(['dldt-inference-engine-vpu-firmware'])
69 def test_dldt_ie_classification_with_myriad(self):
70 (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('MYRIAD', self.ir_files_dir)
71 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
72
73 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
74 @OEHasPackage(['dldt-inference-engine'])
75 @OEHasPackage(['dldt-inference-engine-python3'])
76 @OEHasPackage(['python3-opencv'])
77 @OEHasPackage(['python3-numpy'])
78 def test_dldt_ie_classification_python_api_with_cpu(self):
79 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('CPU', self.ir_files_dir, 'libcpu_extension.so')
80 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
81
82 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
83 @OEHasPackage(['dldt-inference-engine'])
84 @OEHasPackage(['dldt-inference-engine-python3'])
85 @OEHasPackage(['intel-compute-runtime'])
86 @OEHasPackage(['opencl-icd-loader'])
87 @OEHasPackage(['python3-opencv'])
88 @OEHasPackage(['python3-numpy'])
89 def test_dldt_ie_classification_python_api_with_gpu(self):
90 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('GPU', self.ir_files_dir)
91 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
92
93 @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input'])
94 @OEHasPackage(['dldt-inference-engine'])
95 @OEHasPackage(['dldt-inference-engine-python3'])
96 @OEHasPackage(['dldt-inference-engine-vpu-firmware'])
97 @OEHasPackage(['python3-opencv'])
98 @OEHasPackage(['python3-numpy'])
99 def test_dldt_ie_classification_python_api_with_myriad(self):
100 (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('MYRIAD', self.ir_files_dir)
101 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
diff --git a/lib/oeqa/runtime/cases/dldt_model_optimizer.py b/lib/oeqa/runtime/cases/dldt_model_optimizer.py
new file mode 100644
index 00000000..736ea661
--- /dev/null
+++ b/lib/oeqa/runtime/cases/dldt_model_optimizer.py
@@ -0,0 +1,38 @@
1from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.runtime.decorator.package import OEHasPackage
3from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget
4from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest
5from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest
6from oeqa.runtime.miutils.dldtutils import get_testdata_config
7
8class DldtModelOptimizer(OERuntimeTestCase):
9
10 @classmethod
11 def setUpClass(cls):
12 cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/mo/md')
13 cls.sqn_download.setup()
14 cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/mo/ir')
15 cls.dldt_mo.setup()
16
17 @classmethod
18 def tearDownClass(cls):
19 cls.dldt_mo.tear_down()
20 cls.sqn_download.tear_down()
21
22 @OEHasPackage(['dldt-model-optimizer'])
23 @OEHasPackage(['wget'])
24 def test_dldt_mo_can_create_ir(self):
25 proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY')
26 if not proxy_port:
27 self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").')
28 (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port)
29 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
30 (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port)
31 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))
32
33 mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR')
34 if not mo_exe_dir:
35 self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").')
36 mo_files_dir = self.sqn_download.work_dir
37 (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir)
38 self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output))