diff options
author | Michael Wood <michael.g.wood@intel.com> | 2016-11-23 12:24:36 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-11-30 15:48:06 +0000 |
commit | cfdc5b853e7ae299fa9f539a017b4fab12553329 (patch) | |
tree | cd06be59907d54c2f08542e082c0aaac09e7286c | |
parent | 0e503bc5b2991b945c282152eb700e9dd1d47366 (diff) | |
download | poky-cfdc5b853e7ae299fa9f539a017b4fab12553329.tar.gz |
oe-selftest: toaster Remove redundant Toaster test
This test has been ported to be run as part of Toaster's own tests.
(From OE-Core rev: f6366781a34dbdb8ec2d73f4fb36359bc15c4f42)
Signed-off-by: Michael Wood <michael.g.wood@intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | meta/lib/oeqa/selftest/_toaster.py | 320 |
1 files changed, 0 insertions, 320 deletions
diff --git a/meta/lib/oeqa/selftest/_toaster.py b/meta/lib/oeqa/selftest/_toaster.py deleted file mode 100644 index 15ea9df9ef..0000000000 --- a/meta/lib/oeqa/selftest/_toaster.py +++ /dev/null | |||
@@ -1,320 +0,0 @@ | |||
1 | import unittest | ||
2 | import os | ||
3 | import sys | ||
4 | import shlex, subprocess | ||
5 | import urllib.request, urllib.parse, urllib.error, subprocess, time, getpass, re, json, shlex | ||
6 | |||
7 | import oeqa.utils.ftools as ftools | ||
8 | from oeqa.selftest.base import oeSelfTest | ||
9 | from oeqa.utils.commands import runCmd | ||
10 | |||
11 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../../', 'bitbake/lib/toaster'))) | ||
12 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toastermain.settings") | ||
13 | |||
14 | import toastermain.settings | ||
15 | from django.db.models import Q | ||
16 | from orm.models import * | ||
17 | from oeqa.utils.decorators import testcase | ||
18 | |||
19 | class ToasterSetup(oeSelfTest): | ||
20 | |||
21 | def recipe_parse(self, file_path, var): | ||
22 | for line in open(file_path,'r'): | ||
23 | if line.find(var) > -1: | ||
24 | val = line.split(" = ")[1].replace("\"", "").strip() | ||
25 | return val | ||
26 | |||
27 | def fix_file_path(self, file_path): | ||
28 | if ":" in file_path: | ||
29 | file_path=file_path.split(":")[2] | ||
30 | return file_path | ||
31 | |||
32 | class Toaster_DB_Tests(ToasterSetup): | ||
33 | |||
34 | # Check if build name is unique - tc_id=795 | ||
35 | @testcase(795) | ||
36 | def test_Build_Unique_Name(self): | ||
37 | all_builds = Build.objects.all().count() | ||
38 | distinct_builds = Build.objects.values('id').distinct().count() | ||
39 | self.assertEqual(distinct_builds, all_builds, msg = 'Build name is not unique') | ||
40 | |||
41 | # Check if build coocker log path is unique - tc_id=819 | ||
42 | @testcase(819) | ||
43 | def test_Build_Unique_Cooker_Log_Path(self): | ||
44 | distinct_path = Build.objects.values('cooker_log_path').distinct().count() | ||
45 | total_builds = Build.objects.values('id').count() | ||
46 | self.assertEqual(distinct_path, total_builds, msg = 'Build coocker log path is not unique') | ||
47 | |||
48 | # Check if task order is unique for one build - tc=824 | ||
49 | @testcase(824) | ||
50 | def test_Task_Unique_Order(self): | ||
51 | builds = Build.objects.values('id') | ||
52 | cnt_err = [] | ||
53 | for build in builds: | ||
54 | total_task_order = Task.objects.filter(build = build['id']).values('order').count() | ||
55 | distinct_task_order = Task.objects.filter(build = build['id']).values('order').distinct().count() | ||
56 | if (total_task_order != distinct_task_order): | ||
57 | cnt_err.append(build['id']) | ||
58 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for build id: %s' % cnt_err) | ||
59 | |||
60 | # Check task order sequence for one build - tc=825 | ||
61 | @testcase(825) | ||
62 | def test_Task_Order_Sequence(self): | ||
63 | builds = builds = Build.objects.values('id') | ||
64 | cnt_err = [] | ||
65 | for build in builds: | ||
66 | tasks = Task.objects.filter(Q(build = build['id']), ~Q(order = None), ~Q(task_name__contains = '_setscene')).values('id', 'order').order_by("order") | ||
67 | cnt_tasks = 0 | ||
68 | for task in tasks: | ||
69 | cnt_tasks += 1 | ||
70 | if (task['order'] != cnt_tasks): | ||
71 | cnt_err.append(task['id']) | ||
72 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
73 | |||
74 | # Check if disk_io matches the difference between EndTimeIO and StartTimeIO in build stats - tc=828 | ||
75 | ### this needs to be updated ### | ||
76 | #def test_Task_Disk_IO_TC828(self): | ||
77 | |||
78 | # Check if outcome = 2 (SSTATE) then sstate_result must be 3 (RESTORED) - tc=832 | ||
79 | @testcase(832) | ||
80 | def test_Task_If_Outcome_2_Sstate_Result_Must_Be_3(self): | ||
81 | tasks = Task.objects.filter(outcome = 2).values('id', 'sstate_result') | ||
82 | cnt_err = [] | ||
83 | for task in tasks: | ||
84 | if (row['sstate_result'] != 3): | ||
85 | cnt_err.append(task['id']) | ||
86 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
87 | |||
88 | # Check if outcome = 1 (COVERED) or 3 (EXISTING) then sstate_result must be 0 (SSTATE_NA) - tc=833 | ||
89 | @testcase(833) | ||
90 | def test_Task_If_Outcome_1_3_Sstate_Result_Must_Be_0(self): | ||
91 | tasks = Task.objects.filter(outcome__in = (1, 3)).values('id', 'sstate_result') | ||
92 | cnt_err = [] | ||
93 | for task in tasks: | ||
94 | if (task['sstate_result'] != 0): | ||
95 | cnt_err.append(task['id']) | ||
96 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
97 | |||
98 | # Check if outcome is 0 (SUCCESS) or 4 (FAILED) then sstate_result must be 0 (NA), 1 (MISS) or 2 (FAILED) - tc=834 | ||
99 | @testcase(834) | ||
100 | def test_Task_If_Outcome_0_4_Sstate_Result_Must_Be_0_1_2(self): | ||
101 | tasks = Task.objects.filter(outcome__in = (0, 4)).values('id', 'sstate_result') | ||
102 | cnt_err = [] | ||
103 | for task in tasks: | ||
104 | if (task['sstate_result'] not in [0, 1, 2]): | ||
105 | cnt_err.append(task['id']) | ||
106 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
107 | |||
108 | # Check if task_executed = TRUE (1), script_type must be 0 (CODING_NA), 2 (CODING_PYTHON), 3 (CODING_SHELL) - tc=891 | ||
109 | @testcase(891) | ||
110 | def test_Task_If_Task_Executed_True_Script_Type_0_2_3(self): | ||
111 | tasks = Task.objects.filter(task_executed = 1).values('id', 'script_type') | ||
112 | cnt_err = [] | ||
113 | for task in tasks: | ||
114 | if (task['script_type'] not in [0, 2, 3]): | ||
115 | cnt_err.append(task['id']) | ||
116 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
117 | |||
118 | # Check if task_executed = TRUE (1), outcome must be 0 (SUCCESS) or 4 (FAILED) - tc=836 | ||
119 | @testcase(836) | ||
120 | def test_Task_If_Task_Executed_True_Outcome_0_4(self): | ||
121 | tasks = Task.objects.filter(task_executed = 1).values('id', 'outcome') | ||
122 | cnt_err = [] | ||
123 | for task in tasks: | ||
124 | if (task['outcome'] not in [0, 4]): | ||
125 | cnt_err.append(task['id']) | ||
126 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
127 | |||
128 | # Check if task_executed = FALSE (0), script_type must be 0 - tc=890 | ||
129 | @testcase(890) | ||
130 | def test_Task_If_Task_Executed_False_Script_Type_0(self): | ||
131 | tasks = Task.objects.filter(task_executed = 0).values('id', 'script_type') | ||
132 | cnt_err = [] | ||
133 | for task in tasks: | ||
134 | if (task['script_type'] != 0): | ||
135 | cnt_err.append(task['id']) | ||
136 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
137 | |||
138 | # Check if task_executed = FALSE (0) and build outcome = SUCCEEDED (0), task outcome must be 1 (COVERED), 2 (CACHED), 3 (PREBUILT), 5 (EMPTY) - tc=837 | ||
139 | @testcase(837) | ||
140 | def test_Task_If_Task_Executed_False_Outcome_1_2_3_5(self): | ||
141 | builds = Build.objects.filter(outcome = 0).values('id') | ||
142 | cnt_err = [] | ||
143 | for build in builds: | ||
144 | tasks = Task.objects.filter(build = build['id'], task_executed = 0).values('id', 'outcome') | ||
145 | for task in tasks: | ||
146 | if (task['outcome'] not in [1, 2, 3, 5]): | ||
147 | cnt_err.append(task['id']) | ||
148 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) | ||
149 | |||
150 | # Key verification - tc=888 | ||
151 | @testcase(888) | ||
152 | def test_Target_Installed_Package(self): | ||
153 | rows = Target_Installed_Package.objects.values('id', 'target_id', 'package_id') | ||
154 | cnt_err = [] | ||
155 | for row in rows: | ||
156 | target = Target.objects.filter(id = row['target_id']).values('id') | ||
157 | package = Package.objects.filter(id = row['package_id']).values('id') | ||
158 | if (not target or not package): | ||
159 | cnt_err.append(row['id']) | ||
160 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for target installed package id: %s' % cnt_err) | ||
161 | |||
162 | # Key verification - tc=889 | ||
163 | @testcase(889) | ||
164 | def test_Task_Dependency(self): | ||
165 | rows = Task_Dependency.objects.values('id', 'task_id', 'depends_on_id') | ||
166 | cnt_err = [] | ||
167 | for row in rows: | ||
168 | task_id = Task.objects.filter(id = row['task_id']).values('id') | ||
169 | depends_on_id = Task.objects.filter(id = row['depends_on_id']).values('id') | ||
170 | if (not task_id or not depends_on_id): | ||
171 | cnt_err.append(row['id']) | ||
172 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for task dependency id: %s' % cnt_err) | ||
173 | |||
174 | # Check if build target file_name is populated only if is_image=true AND orm_build.outcome=0 then if the file exists and its size matches the file_size value | ||
175 | ### Need to add the tc in the test run | ||
176 | @testcase(1037) | ||
177 | def test_Target_File_Name_Populated(self): | ||
178 | builds = Build.objects.filter(outcome = 0).values('id') | ||
179 | for build in builds: | ||
180 | targets = Target.objects.filter(build_id = build['id'], is_image = 1).values('id') | ||
181 | for target in targets: | ||
182 | target_files = Target_Image_File.objects.filter(target_id = target['id']).values('id', 'file_name', 'file_size') | ||
183 | cnt_err = [] | ||
184 | for file_info in target_files: | ||
185 | target_id = file_info['id'] | ||
186 | target_file_name = file_info['file_name'] | ||
187 | target_file_size = file_info['file_size'] | ||
188 | if (not target_file_name or not target_file_size): | ||
189 | cnt_err.append(target_id) | ||
190 | else: | ||
191 | if (not os.path.exists(target_file_name)): | ||
192 | cnt_err.append(target_id) | ||
193 | else: | ||
194 | if (os.path.getsize(target_file_name) != target_file_size): | ||
195 | cnt_err.append(target_id) | ||
196 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for target image file id: %s' % cnt_err) | ||
197 | |||
198 | # Key verification - tc=884 | ||
199 | @testcase(884) | ||
200 | def test_Package_Dependency(self): | ||
201 | cnt_err = [] | ||
202 | deps = Package_Dependency.objects.values('id', 'package_id', 'depends_on_id') | ||
203 | for dep in deps: | ||
204 | if (dep['package_id'] == dep['depends_on_id']): | ||
205 | cnt_err.append(dep['id']) | ||
206 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for package dependency id: %s' % cnt_err) | ||
207 | |||
208 | # Recipe key verification, recipe name does not depends on a recipe having the same name - tc=883 | ||
209 | @testcase(883) | ||
210 | def test_Recipe_Dependency(self): | ||
211 | deps = Recipe_Dependency.objects.values('id', 'recipe_id', 'depends_on_id') | ||
212 | cnt_err = [] | ||
213 | for dep in deps: | ||
214 | if (not dep['recipe_id'] or not dep['depends_on_id']): | ||
215 | cnt_err.append(dep['id']) | ||
216 | else: | ||
217 | name = Recipe.objects.filter(id = dep['recipe_id']).values('name') | ||
218 | dep_name = Recipe.objects.filter(id = dep['depends_on_id']).values('name') | ||
219 | if (name == dep_name): | ||
220 | cnt_err.append(dep['id']) | ||
221 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for recipe dependency id: %s' % cnt_err) | ||
222 | |||
223 | # Check if package name does not start with a number (0-9) - tc=846 | ||
224 | @testcase(846) | ||
225 | def test_Package_Name_For_Number(self): | ||
226 | packages = Package.objects.filter(~Q(size = -1)).values('id', 'name') | ||
227 | cnt_err = [] | ||
228 | for package in packages: | ||
229 | if (package['name'][0].isdigit() is True): | ||
230 | cnt_err.append(package['id']) | ||
231 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) | ||
232 | |||
233 | # Check if package version starts with a number (0-9) - tc=847 | ||
234 | @testcase(847) | ||
235 | def test_Package_Version_Starts_With_Number(self): | ||
236 | packages = Package.objects.filter(~Q(size = -1)).values('id', 'version') | ||
237 | cnt_err = [] | ||
238 | for package in packages: | ||
239 | if (package['version'][0].isdigit() is False): | ||
240 | cnt_err.append(package['id']) | ||
241 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) | ||
242 | |||
243 | # Check if package revision starts with 'r' - tc=848 | ||
244 | @testcase(848) | ||
245 | def test_Package_Revision_Starts_With_r(self): | ||
246 | packages = Package.objects.filter(~Q(size = -1)).values('id', 'revision') | ||
247 | cnt_err = [] | ||
248 | for package in packages: | ||
249 | if (package['revision'][0].startswith("r") is False): | ||
250 | cnt_err.append(package['id']) | ||
251 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) | ||
252 | |||
253 | # Check the validity of the package build_id | ||
254 | ### TC must be added in test run | ||
255 | @testcase(1038) | ||
256 | def test_Package_Build_Id(self): | ||
257 | packages = Package.objects.filter(~Q(size = -1)).values('id', 'build_id') | ||
258 | cnt_err = [] | ||
259 | for package in packages: | ||
260 | build_id = Build.objects.filter(id = package['build_id']).values('id') | ||
261 | if (not build_id): | ||
262 | cnt_err.append(package['id']) | ||
263 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) | ||
264 | |||
265 | # Check the validity of package recipe_id | ||
266 | ### TC must be added in test run | ||
267 | @testcase(1039) | ||
268 | def test_Package_Recipe_Id(self): | ||
269 | packages = Package.objects.filter(~Q(size = -1)).values('id', 'recipe_id') | ||
270 | cnt_err = [] | ||
271 | for package in packages: | ||
272 | recipe_id = Recipe.objects.filter(id = package['recipe_id']).values('id') | ||
273 | if (not recipe_id): | ||
274 | cnt_err.append(package['id']) | ||
275 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) | ||
276 | |||
277 | # Check if package installed_size field is not null | ||
278 | ### TC must be aded in test run | ||
279 | @testcase(1040) | ||
280 | def test_Package_Installed_Size_Not_NULL(self): | ||
281 | packages = Package.objects.filter(installed_size__isnull = True).values('id') | ||
282 | cnt_err = [] | ||
283 | for package in packages: | ||
284 | cnt_err.append(package['id']) | ||
285 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) | ||
286 | |||
287 | # Check if all layers requests return exit code is 200 - tc=843 | ||
288 | @testcase(843) | ||
289 | def test_Layers_Requests_Exit_Code(self): | ||
290 | layers = Layer.objects.values('id', 'layer_index_url') | ||
291 | cnt_err = [] | ||
292 | for layer in layers: | ||
293 | resp = urllib.request.urlopen(layer['layer_index_url']) | ||
294 | if (resp.getcode() != 200): | ||
295 | cnt_err.append(layer['id']) | ||
296 | self.assertEqual(len(cnt_err), 0, msg = 'Errors for layer id: %s' % cnt_err) | ||
297 | |||
298 | # Check if django server starts regardless of the timezone set on the machine - tc=905 | ||
299 | @testcase(905) | ||
300 | def test_Start_Django_Timezone(self): | ||
301 | current_path = os.getcwd() | ||
302 | zonefilelist = [] | ||
303 | ZONEINFOPATH = '/usr/share/zoneinfo/' | ||
304 | os.chdir("../bitbake/lib/toaster/") | ||
305 | cnt_err = 0 | ||
306 | for filename in os.listdir(ZONEINFOPATH): | ||
307 | if os.path.isfile(os.path.join(ZONEINFOPATH, filename)): | ||
308 | zonefilelist.append(filename) | ||
309 | for k in range(len(zonefilelist)): | ||
310 | if k <= 5: | ||
311 | files = zonefilelist[k] | ||
312 | os.system("export TZ="+str(files)+"; python manage.py runserver > /dev/null 2>&1 &") | ||
313 | time.sleep(3) | ||
314 | pid = subprocess.check_output("ps aux | grep '[/u]sr/bin/python manage.py runserver' | awk '{print $2}'", shell = True) | ||
315 | if pid: | ||
316 | os.system("kill -9 "+str(pid)) | ||
317 | else: | ||
318 | cnt_err.append(zonefilelist[k]) | ||
319 | self.assertEqual(cnt_err, 0, msg = 'Errors django server does not start with timezone: %s' % cnt_err) | ||
320 | os.chdir(current_path) | ||