diff options
author | Adrian Dudau <adrian.dudau@enea.com> | 2014-06-26 14:36:22 +0200 |
---|---|---|
committer | Adrian Dudau <adrian.dudau@enea.com> | 2014-06-26 15:32:53 +0200 |
commit | f4cf9fe05bb3f32fabea4e54dd92d368967a80da (patch) | |
tree | 487180fa9866985ea7b28e625651765d86f515c3 /meta/classes/insane.bbclass | |
download | poky-f4cf9fe05bb3f32fabea4e54dd92d368967a80da.tar.gz |
initial commit for Enea Linux 4.0
Migrated from the internal git server on the daisy-enea branch
Signed-off-by: Adrian Dudau <adrian.dudau@enea.com>
Diffstat (limited to 'meta/classes/insane.bbclass')
-rw-r--r-- | meta/classes/insane.bbclass | 1005 |
1 files changed, 1005 insertions, 0 deletions
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass new file mode 100644 index 0000000000..9ce336415a --- /dev/null +++ b/meta/classes/insane.bbclass | |||
@@ -0,0 +1,1005 @@ | |||
1 | # BB Class inspired by ebuild.sh | ||
2 | # | ||
3 | # This class will test files after installation for certain | ||
4 | # security issues and other kind of issues. | ||
5 | # | ||
6 | # Checks we do: | ||
7 | # -Check the ownership and permissions | ||
8 | # -Check the RUNTIME path for the $TMPDIR | ||
9 | # -Check if .la files wrongly point to workdir | ||
10 | # -Check if .pc files wrongly point to workdir | ||
11 | # -Check if packages contains .debug directories or .so files | ||
12 | # where they should be in -dev or -dbg | ||
13 | # -Check if config.log contains traces to broken autoconf tests | ||
14 | # -Ensure that binaries in base_[bindir|sbindir|libdir] do not link | ||
15 | # into exec_prefix | ||
16 | # -Check that scripts in base_[bindir|sbindir|libdir] do not reference | ||
17 | # files under exec_prefix | ||
18 | |||
19 | |||
20 | PACKAGE_DEPENDS += "${QADEPENDS}" | ||
21 | PACKAGEFUNCS += " do_package_qa " | ||
22 | |||
23 | # unsafe-references-in-binaries requires prelink-rtld from | ||
24 | # prelink-native, but we don't want this DEPENDS for -native builds | ||
25 | QADEPENDS = "prelink-native" | ||
26 | QADEPENDS_class-native = "" | ||
27 | QADEPENDS_class-nativesdk = "" | ||
28 | QA_SANE = "True" | ||
29 | |||
30 | # Elect whether a given type of error is a warning or error, they may | ||
31 | # have been set by other files. | ||
32 | WARN_QA ?= "ldflags useless-rpaths rpaths staticdev libdir xorg-driver-abi \ | ||
33 | textrel already-stripped incompatible-license files-invalid \ | ||
34 | installed-vs-shipped compile-host-path install-host-path \ | ||
35 | pn-overrides infodir \ | ||
36 | " | ||
37 | ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ | ||
38 | perms dep-cmp pkgvarcheck perm-config perm-line perm-link \ | ||
39 | split-strip packages-list pkgv-undefined var-undefined \ | ||
40 | version-going-backwards \ | ||
41 | " | ||
42 | |||
43 | ALL_QA = "${WARN_QA} ${ERROR_QA}" | ||
44 | |||
45 | UNKNOWN_CONFIGURE_WHITELIST ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot" | ||
46 | |||
47 | # | ||
48 | # dictionary for elf headers | ||
49 | # | ||
50 | # feel free to add and correct. | ||
51 | # | ||
52 | # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? | ||
53 | def package_qa_get_machine_dict(): | ||
54 | return { | ||
55 | "darwin9" : { | ||
56 | "arm" : (40, 0, 0, True, 32), | ||
57 | }, | ||
58 | "linux" : { | ||
59 | "aarch64" : (183, 0, 0, True, 64), | ||
60 | "aarch64_be" :(183, 0, 0, False, 64), | ||
61 | "arm" : (40, 97, 0, True, 32), | ||
62 | "armeb": (40, 97, 0, False, 32), | ||
63 | "powerpc": (20, 0, 0, False, 32), | ||
64 | "powerpc64": (21, 0, 0, False, 64), | ||
65 | "i386": ( 3, 0, 0, True, 32), | ||
66 | "i486": ( 3, 0, 0, True, 32), | ||
67 | "i586": ( 3, 0, 0, True, 32), | ||
68 | "i686": ( 3, 0, 0, True, 32), | ||
69 | "x86_64": (62, 0, 0, True, 64), | ||
70 | "ia64": (50, 0, 0, True, 64), | ||
71 | "alpha": (36902, 0, 0, True, 64), | ||
72 | "hppa": (15, 3, 0, False, 32), | ||
73 | "m68k": ( 4, 0, 0, False, 32), | ||
74 | "mips": ( 8, 0, 0, False, 32), | ||
75 | "mipsel": ( 8, 0, 0, True, 32), | ||
76 | "mips64": ( 8, 0, 0, False, 64), | ||
77 | "mips64el": ( 8, 0, 0, True, 64), | ||
78 | "s390": (22, 0, 0, False, 32), | ||
79 | "sh4": (42, 0, 0, True, 32), | ||
80 | "sparc": ( 2, 0, 0, False, 32), | ||
81 | "microblaze": (189, 0, 0, False, 32), | ||
82 | "microblazeel":(189, 0, 0, True, 32), | ||
83 | }, | ||
84 | "linux-uclibc" : { | ||
85 | "arm" : ( 40, 97, 0, True, 32), | ||
86 | "armeb": ( 40, 97, 0, False, 32), | ||
87 | "powerpc": ( 20, 0, 0, False, 32), | ||
88 | "i386": ( 3, 0, 0, True, 32), | ||
89 | "i486": ( 3, 0, 0, True, 32), | ||
90 | "i586": ( 3, 0, 0, True, 32), | ||
91 | "i686": ( 3, 0, 0, True, 32), | ||
92 | "x86_64": ( 62, 0, 0, True, 64), | ||
93 | "mips": ( 8, 0, 0, False, 32), | ||
94 | "mipsel": ( 8, 0, 0, True, 32), | ||
95 | "mips64": ( 8, 0, 0, False, 64), | ||
96 | "mips64el": ( 8, 0, 0, True, 64), | ||
97 | "avr32": (6317, 0, 0, False, 32), | ||
98 | "sh4": (42, 0, 0, True, 32), | ||
99 | |||
100 | }, | ||
101 | "uclinux-uclibc" : { | ||
102 | "bfin": ( 106, 0, 0, True, 32), | ||
103 | }, | ||
104 | "linux-gnueabi" : { | ||
105 | "arm" : (40, 0, 0, True, 32), | ||
106 | "armeb" : (40, 0, 0, False, 32), | ||
107 | }, | ||
108 | "linux-uclibceabi" : { | ||
109 | "arm" : (40, 0, 0, True, 32), | ||
110 | "armeb" : (40, 0, 0, False, 32), | ||
111 | }, | ||
112 | "linux-gnuspe" : { | ||
113 | "powerpc": (20, 0, 0, False, 32), | ||
114 | }, | ||
115 | "linux-uclibcspe" : { | ||
116 | "powerpc": (20, 0, 0, False, 32), | ||
117 | }, | ||
118 | "linux-gnu" : { | ||
119 | "powerpc": (20, 0, 0, False, 32), | ||
120 | "sh4": (42, 0, 0, True, 32), | ||
121 | }, | ||
122 | "linux-gnux32" : { | ||
123 | "x86_64": (62, 0, 0, True, 32), | ||
124 | }, | ||
125 | "linux-gnun32" : { | ||
126 | "mips64": ( 8, 0, 0, False, 32), | ||
127 | "mips64el": ( 8, 0, 0, True, 32), | ||
128 | }, | ||
129 | } | ||
130 | |||
131 | |||
132 | def package_qa_clean_path(path,d): | ||
133 | """ Remove the common prefix from the path. In this case it is the TMPDIR""" | ||
134 | return path.replace(d.getVar('TMPDIR',True),"") | ||
135 | |||
136 | def package_qa_write_error(error, d): | ||
137 | logfile = d.getVar('QA_LOGFILE', True) | ||
138 | if logfile: | ||
139 | p = d.getVar('P', True) | ||
140 | f = file( logfile, "a+") | ||
141 | print >> f, "%s: %s" % (p, error) | ||
142 | f.close() | ||
143 | |||
144 | def package_qa_handle_error(error_class, error_msg, d): | ||
145 | package_qa_write_error(error_msg, d) | ||
146 | if error_class in (d.getVar("ERROR_QA", True) or "").split(): | ||
147 | bb.error("QA Issue: %s" % error_msg) | ||
148 | d.setVar("QA_SANE", False) | ||
149 | return False | ||
150 | elif error_class in (d.getVar("WARN_QA", True) or "").split(): | ||
151 | bb.warn("QA Issue: %s" % error_msg) | ||
152 | else: | ||
153 | bb.note("QA Issue: %s" % error_msg) | ||
154 | return True | ||
155 | |||
156 | QAPATHTEST[libexec] = "package_qa_check_libexec" | ||
157 | def package_qa_check_libexec(path,name, d, elf, messages): | ||
158 | |||
159 | # Skip the case where the default is explicitly /usr/libexec | ||
160 | libexec = d.getVar('libexecdir', True) | ||
161 | if libexec == "/usr/libexec": | ||
162 | return True | ||
163 | |||
164 | if 'libexec' in path.split(os.path.sep): | ||
165 | messages.append("%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec)) | ||
166 | return False | ||
167 | |||
168 | return True | ||
169 | |||
170 | QAPATHTEST[rpaths] = "package_qa_check_rpath" | ||
171 | def package_qa_check_rpath(file,name, d, elf, messages): | ||
172 | """ | ||
173 | Check for dangerous RPATHs | ||
174 | """ | ||
175 | if not elf: | ||
176 | return | ||
177 | |||
178 | if os.path.islink(file): | ||
179 | return | ||
180 | |||
181 | bad_dirs = [d.getVar('BASE_WORKDIR', True), d.getVar('STAGING_DIR_TARGET', True)] | ||
182 | |||
183 | phdrs = elf.run_objdump("-p", d) | ||
184 | |||
185 | import re | ||
186 | rpath_re = re.compile("\s+RPATH\s+(.*)") | ||
187 | for line in phdrs.split("\n"): | ||
188 | m = rpath_re.match(line) | ||
189 | if m: | ||
190 | rpath = m.group(1) | ||
191 | for dir in bad_dirs: | ||
192 | if dir in rpath: | ||
193 | messages.append("package %s contains bad RPATH %s in file %s" % (name, rpath, file)) | ||
194 | |||
195 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" | ||
196 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): | ||
197 | """ | ||
198 | Check for RPATHs that are useless but not dangerous | ||
199 | """ | ||
200 | def rpath_eq(a, b): | ||
201 | return os.path.normpath(a) == os.path.normpath(b) | ||
202 | |||
203 | if not elf: | ||
204 | return | ||
205 | |||
206 | if os.path.islink(file): | ||
207 | return | ||
208 | |||
209 | libdir = d.getVar("libdir", True) | ||
210 | base_libdir = d.getVar("base_libdir", True) | ||
211 | |||
212 | phdrs = elf.run_objdump("-p", d) | ||
213 | |||
214 | import re | ||
215 | rpath_re = re.compile("\s+RPATH\s+(.*)") | ||
216 | for line in phdrs.split("\n"): | ||
217 | m = rpath_re.match(line) | ||
218 | if m: | ||
219 | rpath = m.group(1) | ||
220 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): | ||
221 | # The dynamic linker searches both these places anyway. There is no point in | ||
222 | # looking there again. | ||
223 | messages.append("%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d), rpath)) | ||
224 | |||
225 | QAPATHTEST[dev-so] = "package_qa_check_dev" | ||
226 | def package_qa_check_dev(path, name, d, elf, messages): | ||
227 | """ | ||
228 | Check for ".so" library symlinks in non-dev packages | ||
229 | """ | ||
230 | |||
231 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): | ||
232 | messages.append("non -dev/-dbg/-nativesdk package contains symlink .so: %s path '%s'" % \ | ||
233 | (name, package_qa_clean_path(path,d))) | ||
234 | |||
235 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" | ||
236 | def package_qa_check_staticdev(path, name, d, elf, messages): | ||
237 | """ | ||
238 | Check for ".a" library in non-staticdev packages | ||
239 | There are a number of exceptions to this rule, -pic packages can contain | ||
240 | static libraries, the _nonshared.a belong with their -dev packages and | ||
241 | libgcc.a, libgcov.a will be skipped in their packages | ||
242 | """ | ||
243 | |||
244 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a"): | ||
245 | messages.append("non -staticdev package contains static .a library: %s path '%s'" % \ | ||
246 | (name, package_qa_clean_path(path,d))) | ||
247 | |||
248 | def package_qa_check_libdir(d): | ||
249 | """ | ||
250 | Check for wrong library installation paths. For instance, catch | ||
251 | recipes installing /lib/bar.so when ${base_libdir}="lib32" or | ||
252 | installing in /usr/lib64 when ${libdir}="/usr/lib" | ||
253 | """ | ||
254 | import re | ||
255 | |||
256 | pkgdest = d.getVar('PKGDEST', True) | ||
257 | base_libdir = d.getVar("base_libdir",True) + os.sep | ||
258 | libdir = d.getVar("libdir", True) + os.sep | ||
259 | exec_prefix = d.getVar("exec_prefix", True) + os.sep | ||
260 | |||
261 | messages = [] | ||
262 | |||
263 | lib_re = re.compile("^/lib.+\.so(\..+)?$") | ||
264 | exec_re = re.compile("^%s.*/lib.+\.so(\..+)?$" % exec_prefix) | ||
265 | |||
266 | for root, dirs, files in os.walk(pkgdest): | ||
267 | if root == pkgdest: | ||
268 | # Skip subdirectories for any packages with libdir in INSANE_SKIP | ||
269 | skippackages = [] | ||
270 | for package in dirs: | ||
271 | if 'libdir' in (d.getVar('INSANE_SKIP_' + package, True) or "").split(): | ||
272 | bb.note("Package %s skipping libdir QA test" % (package)) | ||
273 | skippackages.append(package) | ||
274 | for package in skippackages: | ||
275 | dirs.remove(package) | ||
276 | for file in files: | ||
277 | full_path = os.path.join(root, file) | ||
278 | rel_path = os.path.relpath(full_path, pkgdest) | ||
279 | if os.sep in rel_path: | ||
280 | package, rel_path = rel_path.split(os.sep, 1) | ||
281 | rel_path = os.sep + rel_path | ||
282 | if lib_re.match(rel_path): | ||
283 | if base_libdir not in rel_path: | ||
284 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | ||
285 | if exec_re.match(rel_path): | ||
286 | if libdir not in rel_path: | ||
287 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | ||
288 | |||
289 | if messages: | ||
290 | package_qa_handle_error("libdir", "\n".join(messages), d) | ||
291 | |||
292 | QAPATHTEST[debug-files] = "package_qa_check_dbg" | ||
293 | def package_qa_check_dbg(path, name, d, elf, messages): | ||
294 | """ | ||
295 | Check for ".debug" files or directories outside of the dbg package | ||
296 | """ | ||
297 | |||
298 | if not "-dbg" in name and not "-ptest" in name: | ||
299 | if '.debug' in path.split(os.path.sep): | ||
300 | messages.append("non debug package contains .debug directory: %s path %s" % \ | ||
301 | (name, package_qa_clean_path(path,d))) | ||
302 | |||
303 | QAPATHTEST[perms] = "package_qa_check_perm" | ||
304 | def package_qa_check_perm(path,name,d, elf, messages): | ||
305 | """ | ||
306 | Check the permission of files | ||
307 | """ | ||
308 | return | ||
309 | |||
310 | QAPATHTEST[unsafe-references-in-binaries] = "package_qa_check_unsafe_references_in_binaries" | ||
311 | def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages): | ||
312 | """ | ||
313 | Ensure binaries in base_[bindir|sbindir|libdir] do not link to files under exec_prefix | ||
314 | """ | ||
315 | if unsafe_references_skippable(path, name, d): | ||
316 | return | ||
317 | |||
318 | if elf: | ||
319 | import subprocess as sub | ||
320 | pn = d.getVar('PN', True) | ||
321 | |||
322 | exec_prefix = d.getVar('exec_prefix', True) | ||
323 | sysroot_path = d.getVar('STAGING_DIR_TARGET', True) | ||
324 | sysroot_path_usr = sysroot_path + exec_prefix | ||
325 | |||
326 | try: | ||
327 | ldd_output = bb.process.Popen(["prelink-rtld", "--root", sysroot_path, path], stdout=sub.PIPE).stdout.read() | ||
328 | except bb.process.CmdError: | ||
329 | error_msg = pn + ": prelink-rtld aborted when processing %s" % path | ||
330 | package_qa_handle_error("unsafe-references-in-binaries", error_msg, d) | ||
331 | return False | ||
332 | |||
333 | if sysroot_path_usr in ldd_output: | ||
334 | ldd_output = ldd_output.replace(sysroot_path, "") | ||
335 | |||
336 | pkgdest = d.getVar('PKGDEST', True) | ||
337 | packages = d.getVar('PACKAGES', True) | ||
338 | |||
339 | for package in packages.split(): | ||
340 | short_path = path.replace('%s/%s' % (pkgdest, package), "", 1) | ||
341 | if (short_path != path): | ||
342 | break | ||
343 | |||
344 | base_err = pn + ": %s, installed in the base_prefix, requires a shared library under exec_prefix (%s)" % (short_path, exec_prefix) | ||
345 | for line in ldd_output.split('\n'): | ||
346 | if exec_prefix in line: | ||
347 | error_msg = "%s: %s" % (base_err, line.strip()) | ||
348 | package_qa_handle_error("unsafe-references-in-binaries", error_msg, d) | ||
349 | |||
350 | return False | ||
351 | |||
352 | QAPATHTEST[unsafe-references-in-scripts] = "package_qa_check_unsafe_references_in_scripts" | ||
353 | def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages): | ||
354 | """ | ||
355 | Warn if scripts in base_[bindir|sbindir|libdir] reference files under exec_prefix | ||
356 | """ | ||
357 | if unsafe_references_skippable(path, name, d): | ||
358 | return | ||
359 | |||
360 | if not elf: | ||
361 | import stat | ||
362 | import subprocess | ||
363 | pn = d.getVar('PN', True) | ||
364 | |||
365 | # Ensure we're checking an executable script | ||
366 | statinfo = os.stat(path) | ||
367 | if bool(statinfo.st_mode & stat.S_IXUSR): | ||
368 | # grep shell scripts for possible references to /exec_prefix/ | ||
369 | exec_prefix = d.getVar('exec_prefix', True) | ||
370 | statement = "grep -e '%s/' %s > /dev/null" % (exec_prefix, path) | ||
371 | if subprocess.call(statement, shell=True) == 0: | ||
372 | error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path) | ||
373 | package_qa_handle_error("unsafe-references-in-scripts", error_msg, d) | ||
374 | error_msg = "Shell scripts in base_bindir and base_sbindir should not reference anything in exec_prefix" | ||
375 | package_qa_handle_error("unsafe-references-in-scripts", error_msg, d) | ||
376 | |||
377 | def unsafe_references_skippable(path, name, d): | ||
378 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d): | ||
379 | return True | ||
380 | |||
381 | if "-dbg" in name or "-dev" in name: | ||
382 | return True | ||
383 | |||
384 | # Other package names to skip: | ||
385 | if name.startswith("kernel-module-"): | ||
386 | return True | ||
387 | |||
388 | # Skip symlinks | ||
389 | if os.path.islink(path): | ||
390 | return True | ||
391 | |||
392 | # Skip unusual rootfs layouts which make these tests irrelevant | ||
393 | exec_prefix = d.getVar('exec_prefix', True) | ||
394 | if exec_prefix == "": | ||
395 | return True | ||
396 | |||
397 | pkgdest = d.getVar('PKGDEST', True) | ||
398 | pkgdest = pkgdest + "/" + name | ||
399 | pkgdest = os.path.abspath(pkgdest) | ||
400 | base_bindir = pkgdest + d.getVar('base_bindir', True) | ||
401 | base_sbindir = pkgdest + d.getVar('base_sbindir', True) | ||
402 | base_libdir = pkgdest + d.getVar('base_libdir', True) | ||
403 | bindir = pkgdest + d.getVar('bindir', True) | ||
404 | sbindir = pkgdest + d.getVar('sbindir', True) | ||
405 | libdir = pkgdest + d.getVar('libdir', True) | ||
406 | |||
407 | if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir: | ||
408 | return True | ||
409 | |||
410 | # Skip files not in base_[bindir|sbindir|libdir] | ||
411 | path = os.path.abspath(path) | ||
412 | if not (base_bindir in path or base_sbindir in path or base_libdir in path): | ||
413 | return True | ||
414 | |||
415 | return False | ||
416 | |||
417 | QAPATHTEST[arch] = "package_qa_check_arch" | ||
418 | def package_qa_check_arch(path,name,d, elf, messages): | ||
419 | """ | ||
420 | Check if archs are compatible | ||
421 | """ | ||
422 | if not elf: | ||
423 | return | ||
424 | |||
425 | target_os = d.getVar('TARGET_OS', True) | ||
426 | target_arch = d.getVar('TARGET_ARCH', True) | ||
427 | provides = d.getVar('PROVIDES', True) | ||
428 | bpn = d.getVar('BPN', True) | ||
429 | |||
430 | # FIXME: Cross package confuse this check, so just skip them | ||
431 | for s in ['cross', 'nativesdk', 'cross-canadian']: | ||
432 | if bb.data.inherits_class(s, d): | ||
433 | return | ||
434 | |||
435 | # avoid following links to /usr/bin (e.g. on udev builds) | ||
436 | # we will check the files pointed to anyway... | ||
437 | if os.path.islink(path): | ||
438 | return | ||
439 | |||
440 | #if this will throw an exception, then fix the dict above | ||
441 | (machine, osabi, abiversion, littleendian, bits) \ | ||
442 | = package_qa_get_machine_dict()[target_os][target_arch] | ||
443 | |||
444 | # Check the architecture and endiannes of the binary | ||
445 | if not ((machine == elf.machine()) or \ | ||
446 | ((("virtual/kernel" in provides) or bb.data.inherits_class("module", d) ) and (target_os == "linux-gnux32"))): | ||
447 | messages.append("Architecture did not match (%d to %d) on %s" % \ | ||
448 | (machine, elf.machine(), package_qa_clean_path(path,d))) | ||
449 | elif not ((bits == elf.abiSize()) or \ | ||
450 | ((("virtual/kernel" in provides) or bb.data.inherits_class("module", d) ) and (target_os == "linux-gnux32"))): | ||
451 | messages.append("Bit size did not match (%d to %d) %s on %s" % \ | ||
452 | (bits, elf.abiSize(), bpn, package_qa_clean_path(path,d))) | ||
453 | elif not littleendian == elf.isLittleEndian(): | ||
454 | messages.append("Endiannes did not match (%d to %d) on %s" % \ | ||
455 | (littleendian, elf.isLittleEndian(), package_qa_clean_path(path,d))) | ||
456 | |||
457 | QAPATHTEST[desktop] = "package_qa_check_desktop" | ||
458 | def package_qa_check_desktop(path, name, d, elf, messages): | ||
459 | """ | ||
460 | Run all desktop files through desktop-file-validate. | ||
461 | """ | ||
462 | if path.endswith(".desktop"): | ||
463 | desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate') | ||
464 | output = os.popen("%s %s" % (desktop_file_validate, path)) | ||
465 | # This only produces output on errors | ||
466 | for l in output: | ||
467 | messages.append("Desktop file issue: " + l.strip()) | ||
468 | |||
469 | QAPATHTEST[textrel] = "package_qa_textrel" | ||
470 | def package_qa_textrel(path, name, d, elf, messages): | ||
471 | """ | ||
472 | Check if the binary contains relocations in .text | ||
473 | """ | ||
474 | |||
475 | if not elf: | ||
476 | return | ||
477 | |||
478 | if os.path.islink(path): | ||
479 | return | ||
480 | |||
481 | phdrs = elf.run_objdump("-p", d) | ||
482 | sane = True | ||
483 | |||
484 | import re | ||
485 | textrel_re = re.compile("\s+TEXTREL\s+") | ||
486 | for line in phdrs.split("\n"): | ||
487 | if textrel_re.match(line): | ||
488 | sane = False | ||
489 | |||
490 | if not sane: | ||
491 | messages.append("ELF binary '%s' has relocations in .text" % path) | ||
492 | |||
493 | QAPATHTEST[ldflags] = "package_qa_hash_style" | ||
494 | def package_qa_hash_style(path, name, d, elf, messages): | ||
495 | """ | ||
496 | Check if the binary has the right hash style... | ||
497 | """ | ||
498 | |||
499 | if not elf: | ||
500 | return | ||
501 | |||
502 | if os.path.islink(path): | ||
503 | return | ||
504 | |||
505 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True) | ||
506 | if not gnu_hash: | ||
507 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True) | ||
508 | if not gnu_hash: | ||
509 | return | ||
510 | |||
511 | sane = False | ||
512 | has_syms = False | ||
513 | |||
514 | phdrs = elf.run_objdump("-p", d) | ||
515 | |||
516 | # If this binary has symbols, we expect it to have GNU_HASH too. | ||
517 | for line in phdrs.split("\n"): | ||
518 | if "SYMTAB" in line: | ||
519 | has_syms = True | ||
520 | if "GNU_HASH" in line: | ||
521 | sane = True | ||
522 | if "[mips32]" in line or "[mips64]" in line: | ||
523 | sane = True | ||
524 | |||
525 | if has_syms and not sane: | ||
526 | messages.append("No GNU_HASH in the elf binary: '%s'" % path) | ||
527 | |||
528 | |||
529 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" | ||
530 | def package_qa_check_buildpaths(path, name, d, elf, messages): | ||
531 | """ | ||
532 | Check for build paths inside target files and error if not found in the whitelist | ||
533 | """ | ||
534 | # Ignore .debug files, not interesting | ||
535 | if path.find(".debug") != -1: | ||
536 | return | ||
537 | |||
538 | # Ignore symlinks | ||
539 | if os.path.islink(path): | ||
540 | return | ||
541 | |||
542 | tmpdir = d.getVar('TMPDIR', True) | ||
543 | with open(path) as f: | ||
544 | file_content = f.read() | ||
545 | if tmpdir in file_content: | ||
546 | messages.append("File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d)) | ||
547 | |||
548 | |||
549 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" | ||
550 | def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | ||
551 | """ | ||
552 | Check that all packages containing Xorg drivers have ABI dependencies | ||
553 | """ | ||
554 | |||
555 | # Skip dev, dbg or nativesdk packages | ||
556 | if name.endswith("-dev") or name.endswith("-dbg") or name.startswith("nativesdk-"): | ||
557 | return | ||
558 | |||
559 | driverdir = d.expand("${libdir}/xorg/modules/drivers/") | ||
560 | if driverdir in path and path.endswith(".so"): | ||
561 | mlprefix = d.getVar('MLPREFIX', True) or '' | ||
562 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name, True) or ""): | ||
563 | if rdep.startswith("%sxorg-abi-" % mlprefix): | ||
564 | return | ||
565 | messages.append("Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | ||
566 | |||
567 | QAPATHTEST[infodir] = "package_qa_check_infodir" | ||
568 | def package_qa_check_infodir(path, name, d, elf, messages): | ||
569 | """ | ||
570 | Check that /usr/share/info/dir isn't shipped in a particular package | ||
571 | """ | ||
572 | infodir = d.expand("${infodir}/dir") | ||
573 | |||
574 | if infodir in path: | ||
575 | messages.append("The /usr/share/info/dir file is not meant to be shipped in a particular package.") | ||
576 | |||
577 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" | ||
578 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | ||
579 | """ | ||
580 | Check that the package doesn't contain any absolute symlinks to the sysroot. | ||
581 | """ | ||
582 | if os.path.islink(path): | ||
583 | target = os.readlink(path) | ||
584 | if os.path.isabs(target): | ||
585 | tmpdir = d.getVar('TMPDIR', True) | ||
586 | if target.startswith(tmpdir): | ||
587 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST", True), name), "") | ||
588 | messages.append("Symlink %s in %s points to TMPDIR" % (trimmed, name)) | ||
589 | |||
590 | def package_qa_check_license(workdir, d): | ||
591 | """ | ||
592 | Check for changes in the license files | ||
593 | """ | ||
594 | import tempfile | ||
595 | sane = True | ||
596 | |||
597 | lic_files = d.getVar('LIC_FILES_CHKSUM', True) | ||
598 | lic = d.getVar('LICENSE', True) | ||
599 | pn = d.getVar('PN', True) | ||
600 | |||
601 | if lic == "CLOSED": | ||
602 | return True | ||
603 | |||
604 | if not lic_files: | ||
605 | bb.error(pn + ": Recipe file does not have license file information (LIC_FILES_CHKSUM)") | ||
606 | return False | ||
607 | |||
608 | srcdir = d.getVar('S', True) | ||
609 | |||
610 | for url in lic_files.split(): | ||
611 | try: | ||
612 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | ||
613 | except bb.fetch.MalformedUrl: | ||
614 | raise bb.build.FuncFailed( pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) | ||
615 | srclicfile = os.path.join(srcdir, path) | ||
616 | if not os.path.isfile(srclicfile): | ||
617 | raise bb.build.FuncFailed( pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile) | ||
618 | |||
619 | recipemd5 = parm.get('md5', '') | ||
620 | beginline, endline = 0, 0 | ||
621 | if 'beginline' in parm: | ||
622 | beginline = int(parm['beginline']) | ||
623 | if 'endline' in parm: | ||
624 | endline = int(parm['endline']) | ||
625 | |||
626 | if (not beginline) and (not endline): | ||
627 | md5chksum = bb.utils.md5_file(srclicfile) | ||
628 | else: | ||
629 | fi = open(srclicfile, 'rb') | ||
630 | fo = tempfile.NamedTemporaryFile(mode='wb', prefix='poky.', suffix='.tmp', delete=False) | ||
631 | tmplicfile = fo.name; | ||
632 | lineno = 0 | ||
633 | linesout = 0 | ||
634 | for line in fi: | ||
635 | lineno += 1 | ||
636 | if (lineno >= beginline): | ||
637 | if ((lineno <= endline) or not endline): | ||
638 | fo.write(line) | ||
639 | linesout += 1 | ||
640 | else: | ||
641 | break | ||
642 | fo.flush() | ||
643 | fo.close() | ||
644 | fi.close() | ||
645 | md5chksum = bb.utils.md5_file(tmplicfile) | ||
646 | os.unlink(tmplicfile) | ||
647 | |||
648 | if recipemd5 == md5chksum: | ||
649 | bb.note (pn + ": md5 checksum matched for ", url) | ||
650 | else: | ||
651 | if recipemd5: | ||
652 | bb.error(pn + ": md5 data is not matching for ", url) | ||
653 | bb.error(pn + ": The new md5 checksum is ", md5chksum) | ||
654 | if beginline: | ||
655 | if endline: | ||
656 | srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline) | ||
657 | else: | ||
658 | srcfiledesc = "%s (beginning on line %d)" % (srclicfile, beginline) | ||
659 | elif endline: | ||
660 | srcfiledesc = "%s (ending on line %d)" % (srclicfile, endline) | ||
661 | else: | ||
662 | srcfiledesc = srclicfile | ||
663 | bb.error(pn + ": Check if the license information has changed in %s to verify that the LICENSE value \"%s\" remains valid" % (srcfiledesc, lic)) | ||
664 | else: | ||
665 | bb.error(pn + ": md5 checksum is not specified for ", url) | ||
666 | bb.error(pn + ": The md5 checksum is ", md5chksum) | ||
667 | sane = False | ||
668 | |||
669 | return sane | ||
670 | |||
671 | def package_qa_check_staged(path,d): | ||
672 | """ | ||
673 | Check staged la and pc files for sanity | ||
674 | -e.g. installed being false | ||
675 | |||
676 | As this is run after every stage we should be able | ||
677 | to find the one responsible for the errors easily even | ||
678 | if we look at every .pc and .la file | ||
679 | """ | ||
680 | |||
681 | sane = True | ||
682 | tmpdir = d.getVar('TMPDIR', True) | ||
683 | workdir = os.path.join(tmpdir, "work") | ||
684 | |||
685 | installed = "installed=yes" | ||
686 | if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): | ||
687 | pkgconfigcheck = workdir | ||
688 | else: | ||
689 | pkgconfigcheck = tmpdir | ||
690 | |||
691 | # find all .la and .pc files | ||
692 | # read the content | ||
693 | # and check for stuff that looks wrong | ||
694 | for root, dirs, files in os.walk(path): | ||
695 | for file in files: | ||
696 | path = os.path.join(root,file) | ||
697 | if file.endswith(".la"): | ||
698 | with open(path) as f: | ||
699 | file_content = f.read() | ||
700 | if workdir in file_content: | ||
701 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) | ||
702 | sane = package_qa_handle_error("la", error_msg, d) | ||
703 | elif file.endswith(".pc"): | ||
704 | with open(path) as f: | ||
705 | file_content = f.read() | ||
706 | if pkgconfigcheck in file_content: | ||
707 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) | ||
708 | sane = package_qa_handle_error("pkgconfig", error_msg, d) | ||
709 | |||
710 | return sane | ||
711 | |||
712 | # Walk over all files in a directory and call func | ||
713 | def package_qa_walk(path, warnfuncs, errorfuncs, skip, package, d): | ||
714 | import oe.qa | ||
715 | |||
716 | #if this will throw an exception, then fix the dict above | ||
717 | target_os = d.getVar('TARGET_OS', True) | ||
718 | target_arch = d.getVar('TARGET_ARCH', True) | ||
719 | |||
720 | warnings = [] | ||
721 | errors = [] | ||
722 | for path in pkgfiles[package]: | ||
723 | elf = oe.qa.ELFFile(path) | ||
724 | try: | ||
725 | elf.open() | ||
726 | except: | ||
727 | elf = None | ||
728 | for func in warnfuncs: | ||
729 | func(path, package, d, elf, warnings) | ||
730 | for func in errorfuncs: | ||
731 | func(path, package, d, elf, errors) | ||
732 | |||
733 | for w in warnings: | ||
734 | bb.warn("QA Issue: %s" % w) | ||
735 | package_qa_write_error(w, d) | ||
736 | for e in errors: | ||
737 | bb.error("QA Issue: %s" % e) | ||
738 | package_qa_write_error(e, d) | ||
739 | |||
740 | return len(errors) == 0 | ||
741 | |||
742 | def package_qa_check_rdepends(pkg, pkgdest, skip, d): | ||
743 | # Don't do this check for kernel/module recipes, there aren't too many debug/development | ||
744 | # packages and you can get false positives e.g. on kernel-module-lirc-dev | ||
745 | if bb.data.inherits_class("kernel", d) or bb.data.inherits_class("module-base", d): | ||
746 | return True | ||
747 | |||
748 | sane = True | ||
749 | if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg: | ||
750 | localdata = bb.data.createCopy(d) | ||
751 | localdata.setVar('OVERRIDES', pkg) | ||
752 | bb.data.update_data(localdata) | ||
753 | |||
754 | # Now check the RDEPENDS | ||
755 | rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "") | ||
756 | |||
757 | # Now do the sanity check!!! | ||
758 | for rdepend in rdepends: | ||
759 | if "-dbg" in rdepend and "debug-deps" not in skip: | ||
760 | error_msg = "%s rdepends on %s" % (pkg,rdepend) | ||
761 | sane = package_qa_handle_error("debug-deps", error_msg, d) | ||
762 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: | ||
763 | error_msg = "%s rdepends on %s" % (pkg, rdepend) | ||
764 | sane = package_qa_handle_error("dev-deps", error_msg, d) | ||
765 | |||
766 | return sane | ||
767 | |||
768 | def package_qa_check_deps(pkg, pkgdest, skip, d): | ||
769 | sane = True | ||
770 | |||
771 | localdata = bb.data.createCopy(d) | ||
772 | localdata.setVar('OVERRIDES', pkg) | ||
773 | bb.data.update_data(localdata) | ||
774 | |||
775 | def check_valid_deps(var): | ||
776 | sane = True | ||
777 | try: | ||
778 | rvar = bb.utils.explode_dep_versions2(localdata.getVar(var, True) or "") | ||
779 | except ValueError as e: | ||
780 | bb.fatal("%s_%s: %s" % (var, pkg, e)) | ||
781 | for dep in rvar: | ||
782 | for v in rvar[dep]: | ||
783 | if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')): | ||
784 | error_msg = "%s_%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v) | ||
785 | sane = package_qa_handle_error("dep-cmp", error_msg, d) | ||
786 | return sane | ||
787 | |||
788 | sane = True | ||
789 | if not check_valid_deps('RDEPENDS'): | ||
790 | sane = False | ||
791 | if not check_valid_deps('RRECOMMENDS'): | ||
792 | sane = False | ||
793 | if not check_valid_deps('RSUGGESTS'): | ||
794 | sane = False | ||
795 | if not check_valid_deps('RPROVIDES'): | ||
796 | sane = False | ||
797 | if not check_valid_deps('RREPLACES'): | ||
798 | sane = False | ||
799 | if not check_valid_deps('RCONFLICTS'): | ||
800 | sane = False | ||
801 | |||
802 | return sane | ||
803 | |||
804 | # The PACKAGE FUNC to scan each package | ||
805 | python do_package_qa () { | ||
806 | import subprocess | ||
807 | |||
808 | bb.note("DO PACKAGE QA") | ||
809 | |||
810 | logdir = d.getVar('T', True) | ||
811 | pkg = d.getVar('PN', True) | ||
812 | |||
813 | # Check the compile log for host contamination | ||
814 | compilelog = os.path.join(logdir,"log.do_compile") | ||
815 | |||
816 | if os.path.exists(compilelog): | ||
817 | statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % compilelog | ||
818 | if subprocess.call(statement, shell=True) == 0: | ||
819 | msg = "%s: The compile log indicates that host include and/or library paths were used.\n \ | ||
820 | Please check the log '%s' for more information." % (pkg, compilelog) | ||
821 | package_qa_handle_error("compile-host-path", msg, d) | ||
822 | |||
823 | # Check the install log for host contamination | ||
824 | installlog = os.path.join(logdir,"log.do_install") | ||
825 | |||
826 | if os.path.exists(installlog): | ||
827 | statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % installlog | ||
828 | if subprocess.call(statement, shell=True) == 0: | ||
829 | msg = "%s: The install log indicates that host include and/or library paths were used.\n \ | ||
830 | Please check the log '%s' for more information." % (pkg, installlog) | ||
831 | package_qa_handle_error("install-host-path", msg, d) | ||
832 | |||
833 | # Scan the packages... | ||
834 | pkgdest = d.getVar('PKGDEST', True) | ||
835 | packages = d.getVar('PACKAGES', True) | ||
836 | |||
837 | # no packages should be scanned | ||
838 | if not packages: | ||
839 | return | ||
840 | |||
841 | testmatrix = d.getVarFlags("QAPATHTEST") | ||
842 | import re | ||
843 | # The package name matches the [a-z0-9.+-]+ regular expression | ||
844 | pkgname_pattern = re.compile("^[a-z0-9.+-]+$") | ||
845 | |||
846 | g = globals() | ||
847 | walk_sane = True | ||
848 | rdepends_sane = True | ||
849 | deps_sane = True | ||
850 | for package in packages.split(): | ||
851 | skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split() | ||
852 | if skip: | ||
853 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) | ||
854 | warnchecks = [] | ||
855 | for w in (d.getVar("WARN_QA", True) or "").split(): | ||
856 | if w in skip: | ||
857 | continue | ||
858 | if w in testmatrix and testmatrix[w] in g: | ||
859 | warnchecks.append(g[testmatrix[w]]) | ||
860 | errorchecks = [] | ||
861 | for e in (d.getVar("ERROR_QA", True) or "").split(): | ||
862 | if e in skip: | ||
863 | continue | ||
864 | if e in testmatrix and testmatrix[e] in g: | ||
865 | errorchecks.append(g[testmatrix[e]]) | ||
866 | |||
867 | bb.note("Checking Package: %s" % package) | ||
868 | # Check package name | ||
869 | if not pkgname_pattern.match(package): | ||
870 | package_qa_handle_error("pkgname", | ||
871 | "%s doesn't match the [a-z0-9.+-]+ regex\n" % package, d) | ||
872 | |||
873 | path = "%s/%s" % (pkgdest, package) | ||
874 | if not package_qa_walk(path, warnchecks, errorchecks, skip, package, d): | ||
875 | walk_sane = False | ||
876 | if not package_qa_check_rdepends(package, pkgdest, skip, d): | ||
877 | rdepends_sane = False | ||
878 | if not package_qa_check_deps(package, pkgdest, skip, d): | ||
879 | deps_sane = False | ||
880 | |||
881 | |||
882 | if 'libdir' in d.getVar("ALL_QA", True).split(): | ||
883 | package_qa_check_libdir(d) | ||
884 | |||
885 | qa_sane = d.getVar("QA_SANE", True) | ||
886 | if not walk_sane or not rdepends_sane or not deps_sane or not qa_sane: | ||
887 | bb.fatal("QA run found fatal errors. Please consider fixing them.") | ||
888 | bb.note("DONE with PACKAGE QA") | ||
889 | } | ||
890 | |||
891 | |||
892 | python do_qa_staging() { | ||
893 | bb.note("QA checking staging") | ||
894 | |||
895 | if not package_qa_check_staged(d.expand('${SYSROOT_DESTDIR}${STAGING_LIBDIR}'), d): | ||
896 | bb.fatal("QA staging was broken by the package built above") | ||
897 | } | ||
898 | |||
899 | python do_qa_configure() { | ||
900 | import subprocess | ||
901 | |||
902 | ########################################################################### | ||
903 | # Check config.log for cross compile issues | ||
904 | ########################################################################### | ||
905 | |||
906 | configs = [] | ||
907 | workdir = d.getVar('WORKDIR', True) | ||
908 | bb.note("Checking autotools environment for common misconfiguration") | ||
909 | for root, dirs, files in os.walk(workdir): | ||
910 | statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % \ | ||
911 | os.path.join(root,"config.log") | ||
912 | if "config.log" in files: | ||
913 | if subprocess.call(statement, shell=True) == 0: | ||
914 | bb.fatal("""This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities. | ||
915 | Rerun configure task after fixing this. The path was '%s'""" % root) | ||
916 | |||
917 | if "configure.ac" in files: | ||
918 | configs.append(os.path.join(root,"configure.ac")) | ||
919 | if "configure.in" in files: | ||
920 | configs.append(os.path.join(root, "configure.in")) | ||
921 | |||
922 | ########################################################################### | ||
923 | # Check gettext configuration and dependencies are correct | ||
924 | ########################################################################### | ||
925 | |||
926 | cnf = d.getVar('EXTRA_OECONF', True) or "" | ||
927 | if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: | ||
928 | ml = d.getVar("MLPREFIX", True) or "" | ||
929 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): | ||
930 | gt = "gettext-native" | ||
931 | elif bb.data.inherits_class('cross-canadian', d): | ||
932 | gt = "nativesdk-gettext" | ||
933 | else: | ||
934 | gt = "virtual/" + ml + "gettext" | ||
935 | deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") | ||
936 | if gt not in deps: | ||
937 | for config in configs: | ||
938 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config | ||
939 | if subprocess.call(gnu, shell=True) == 0: | ||
940 | bb.fatal("""%s required but not in DEPENDS for file %s. | ||
941 | Missing inherit gettext?""" % (gt, config)) | ||
942 | |||
943 | ########################################################################### | ||
944 | # Check license variables | ||
945 | ########################################################################### | ||
946 | |||
947 | if not package_qa_check_license(workdir, d): | ||
948 | bb.fatal("Licensing Error: LIC_FILES_CHKSUM does not match, please fix") | ||
949 | |||
950 | ########################################################################### | ||
951 | # Check unrecognised configure options (with a white list) | ||
952 | ########################################################################### | ||
953 | if bb.data.inherits_class("autotools", d): | ||
954 | bb.note("Checking configure output for unrecognised options") | ||
955 | try: | ||
956 | flag = "WARNING: unrecognized options:" | ||
957 | log = os.path.join(d.getVar('B', True), 'config.log') | ||
958 | output = subprocess.check_output(['grep', '-F', flag, log]).replace(', ', ' ') | ||
959 | options = set() | ||
960 | for line in output.splitlines(): | ||
961 | options |= set(line.partition(flag)[2].split()) | ||
962 | whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST", True).split()) | ||
963 | options -= whitelist | ||
964 | if options: | ||
965 | pn = d.getVar('PN', True) | ||
966 | error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) | ||
967 | package_qa_handle_error("unknown-configure-option", error_msg, d) | ||
968 | except subprocess.CalledProcessError: | ||
969 | pass | ||
970 | } | ||
971 | # The Staging Func, to check all staging | ||
972 | #addtask qa_staging after do_populate_sysroot before do_build | ||
973 | do_populate_sysroot[postfuncs] += "do_qa_staging " | ||
974 | |||
975 | # Check broken config.log files, for packages requiring Gettext which don't | ||
976 | # have it in DEPENDS and for correct LIC_FILES_CHKSUM | ||
977 | #addtask qa_configure after do_configure before do_compile | ||
978 | do_configure[postfuncs] += "do_qa_configure " | ||
979 | |||
980 | python () { | ||
981 | tests = d.getVar('ALL_QA', True).split() | ||
982 | if "desktop" in tests: | ||
983 | d.appendVar("PACKAGE_DEPENDS", "desktop-file-utils-native") | ||
984 | |||
985 | ########################################################################### | ||
986 | # Check various variables | ||
987 | ########################################################################### | ||
988 | |||
989 | if d.getVar('do_stage', True) is not None: | ||
990 | bb.fatal("Legacy staging found for %s as it has a do_stage function. This will need conversion to a do_install or often simply removal to work with OE-core" % d.getVar("FILE", True)) | ||
991 | |||
992 | overrides = d.getVar('OVERRIDES', True).split(':') | ||
993 | pn = d.getVar('PN', True) | ||
994 | if pn in overrides: | ||
995 | msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE", True), pn) | ||
996 | package_qa_handle_error("pn-overrides", msg, d) | ||
997 | |||
998 | issues = [] | ||
999 | if (d.getVar('PACKAGES', True) or "").split(): | ||
1000 | for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': | ||
1001 | if d.getVar(var): | ||
1002 | issues.append(var) | ||
1003 | for i in issues: | ||
1004 | package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE", True), i), d) | ||
1005 | } | ||