diff options
Diffstat (limited to 'meta/classes/insane.bbclass')
-rw-r--r-- | meta/classes/insane.bbclass | 1403 |
1 files changed, 0 insertions, 1403 deletions
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass deleted file mode 100644 index 53230fc667..0000000000 --- a/meta/classes/insane.bbclass +++ /dev/null | |||
@@ -1,1403 +0,0 @@ | |||
1 | # BB Class inspired by ebuild.sh | ||
2 | # | ||
3 | # This class will test files after installation for certain | ||
4 | # security issues and other kind of issues. | ||
5 | # | ||
6 | # Checks we do: | ||
7 | # -Check the ownership and permissions | ||
8 | # -Check the RUNTIME path for the $TMPDIR | ||
9 | # -Check if .la files wrongly point to workdir | ||
10 | # -Check if .pc files wrongly point to workdir | ||
11 | # -Check if packages contains .debug directories or .so files | ||
12 | # where they should be in -dev or -dbg | ||
13 | # -Check if config.log contains traces to broken autoconf tests | ||
14 | # -Check invalid characters (non-utf8) on some package metadata | ||
15 | # -Ensure that binaries in base_[bindir|sbindir|libdir] do not link | ||
16 | # into exec_prefix | ||
17 | # -Check that scripts in base_[bindir|sbindir|libdir] do not reference | ||
18 | # files under exec_prefix | ||
19 | # -Check if the package name is upper case | ||
20 | |||
21 | QA_SANE = "True" | ||
22 | |||
23 | # Elect whether a given type of error is a warning or error, they may | ||
24 | # have been set by other files. | ||
25 | WARN_QA ?= " libdir xorg-driver-abi \ | ||
26 | textrel incompatible-license files-invalid \ | ||
27 | infodir build-deps src-uri-bad symlink-to-sysroot multilib \ | ||
28 | invalid-packageconfig host-user-contaminated uppercase-pn patch-fuzz \ | ||
29 | mime mime-xdg unlisted-pkg-lics unhandled-features-check \ | ||
30 | missing-update-alternatives native-last \ | ||
31 | " | ||
32 | ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ | ||
33 | perms dep-cmp pkgvarcheck perm-config perm-line perm-link \ | ||
34 | split-strip packages-list pkgv-undefined var-undefined \ | ||
35 | version-going-backwards expanded-d invalid-chars \ | ||
36 | license-checksum dev-elf file-rdeps configure-unsafe \ | ||
37 | configure-gettext perllocalpod shebang-size \ | ||
38 | already-stripped installed-vs-shipped ldflags compile-host-path \ | ||
39 | install-host-path pn-overrides unknown-configure-option \ | ||
40 | useless-rpaths rpaths staticdev \ | ||
41 | " | ||
42 | # Add usrmerge QA check based on distro feature | ||
43 | ERROR_QA_append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" | ||
44 | |||
45 | FAKEROOT_QA = "host-user-contaminated" | ||
46 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ | ||
47 | enabled tests are listed here, the do_package_qa task will run under fakeroot." | ||
48 | |||
49 | ALL_QA = "${WARN_QA} ${ERROR_QA}" | ||
50 | |||
51 | UNKNOWN_CONFIGURE_WHITELIST ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static" | ||
52 | |||
53 | def package_qa_clean_path(path, d, pkg=None): | ||
54 | """ | ||
55 | Remove redundant paths from the path for display. If pkg isn't set then | ||
56 | TMPDIR is stripped, otherwise PKGDEST/pkg is stripped. | ||
57 | """ | ||
58 | if pkg: | ||
59 | path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/") | ||
60 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") | ||
61 | |||
62 | def package_qa_write_error(type, error, d): | ||
63 | logfile = d.getVar('QA_LOGFILE') | ||
64 | if logfile: | ||
65 | p = d.getVar('P') | ||
66 | with open(logfile, "a+") as f: | ||
67 | f.write("%s: %s [%s]\n" % (p, error, type)) | ||
68 | |||
69 | def package_qa_handle_error(error_class, error_msg, d): | ||
70 | if error_class in (d.getVar("ERROR_QA") or "").split(): | ||
71 | package_qa_write_error(error_class, error_msg, d) | ||
72 | bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
73 | d.setVar("QA_SANE", False) | ||
74 | return False | ||
75 | elif error_class in (d.getVar("WARN_QA") or "").split(): | ||
76 | package_qa_write_error(error_class, error_msg, d) | ||
77 | bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
78 | else: | ||
79 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
80 | return True | ||
81 | |||
82 | def package_qa_add_message(messages, section, new_msg): | ||
83 | if section not in messages: | ||
84 | messages[section] = new_msg | ||
85 | else: | ||
86 | messages[section] = messages[section] + "\n" + new_msg | ||
87 | |||
88 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" | ||
89 | def package_qa_check_shebang_size(path, name, d, elf, messages): | ||
90 | import stat | ||
91 | if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf: | ||
92 | return | ||
93 | |||
94 | try: | ||
95 | with open(path, 'rb') as f: | ||
96 | stanza = f.readline(130) | ||
97 | except IOError: | ||
98 | return | ||
99 | |||
100 | if stanza.startswith(b'#!'): | ||
101 | #Shebang not found | ||
102 | try: | ||
103 | stanza = stanza.decode("utf-8") | ||
104 | except UnicodeDecodeError: | ||
105 | #If it is not a text file, it is not a script | ||
106 | return | ||
107 | |||
108 | if len(stanza) > 129: | ||
109 | package_qa_add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d))) | ||
110 | return | ||
111 | |||
112 | QAPATHTEST[libexec] = "package_qa_check_libexec" | ||
113 | def package_qa_check_libexec(path,name, d, elf, messages): | ||
114 | |||
115 | # Skip the case where the default is explicitly /usr/libexec | ||
116 | libexec = d.getVar('libexecdir') | ||
117 | if libexec == "/usr/libexec": | ||
118 | return True | ||
119 | |||
120 | if 'libexec' in path.split(os.path.sep): | ||
121 | package_qa_add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec)) | ||
122 | return False | ||
123 | |||
124 | return True | ||
125 | |||
126 | QAPATHTEST[rpaths] = "package_qa_check_rpath" | ||
127 | def package_qa_check_rpath(file,name, d, elf, messages): | ||
128 | """ | ||
129 | Check for dangerous RPATHs | ||
130 | """ | ||
131 | if not elf: | ||
132 | return | ||
133 | |||
134 | if os.path.islink(file): | ||
135 | return | ||
136 | |||
137 | bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] | ||
138 | |||
139 | phdrs = elf.run_objdump("-p", d) | ||
140 | |||
141 | import re | ||
142 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | ||
143 | for line in phdrs.split("\n"): | ||
144 | m = rpath_re.match(line) | ||
145 | if m: | ||
146 | rpath = m.group(1) | ||
147 | for dir in bad_dirs: | ||
148 | if dir in rpath: | ||
149 | package_qa_add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file)) | ||
150 | |||
151 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" | ||
152 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): | ||
153 | """ | ||
154 | Check for RPATHs that are useless but not dangerous | ||
155 | """ | ||
156 | def rpath_eq(a, b): | ||
157 | return os.path.normpath(a) == os.path.normpath(b) | ||
158 | |||
159 | if not elf: | ||
160 | return | ||
161 | |||
162 | if os.path.islink(file): | ||
163 | return | ||
164 | |||
165 | libdir = d.getVar("libdir") | ||
166 | base_libdir = d.getVar("base_libdir") | ||
167 | |||
168 | phdrs = elf.run_objdump("-p", d) | ||
169 | |||
170 | import re | ||
171 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | ||
172 | for line in phdrs.split("\n"): | ||
173 | m = rpath_re.match(line) | ||
174 | if m: | ||
175 | rpath = m.group(1) | ||
176 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): | ||
177 | # The dynamic linker searches both these places anyway. There is no point in | ||
178 | # looking there again. | ||
179 | package_qa_add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d), rpath)) | ||
180 | |||
181 | QAPATHTEST[dev-so] = "package_qa_check_dev" | ||
182 | def package_qa_check_dev(path, name, d, elf, messages): | ||
183 | """ | ||
184 | Check for ".so" library symlinks in non-dev packages | ||
185 | """ | ||
186 | |||
187 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): | ||
188 | package_qa_add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package contains symlink .so: %s path '%s'" % \ | ||
189 | (name, package_qa_clean_path(path,d))) | ||
190 | |||
191 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" | ||
192 | def package_qa_check_dev_elf(path, name, d, elf, messages): | ||
193 | """ | ||
194 | Check that -dev doesn't contain real shared libraries. The test has to | ||
195 | check that the file is not a link and is an ELF object as some recipes | ||
196 | install link-time .so files that are linker scripts. | ||
197 | """ | ||
198 | if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf: | ||
199 | package_qa_add_message(messages, "dev-elf", "-dev package contains non-symlink .so: %s path '%s'" % \ | ||
200 | (name, package_qa_clean_path(path,d))) | ||
201 | |||
202 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" | ||
203 | def package_qa_check_staticdev(path, name, d, elf, messages): | ||
204 | """ | ||
205 | Check for ".a" library in non-staticdev packages | ||
206 | There are a number of exceptions to this rule, -pic packages can contain | ||
207 | static libraries, the _nonshared.a belong with their -dev packages and | ||
208 | libgcc.a, libgcov.a will be skipped in their packages | ||
209 | """ | ||
210 | |||
211 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: | ||
212 | package_qa_add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ | ||
213 | (name, package_qa_clean_path(path,d))) | ||
214 | |||
215 | QAPATHTEST[mime] = "package_qa_check_mime" | ||
216 | def package_qa_check_mime(path, name, d, elf, messages): | ||
217 | """ | ||
218 | Check if package installs mime types to /usr/share/mime/packages | ||
219 | while no inheriting mime.bbclass | ||
220 | """ | ||
221 | |||
222 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): | ||
223 | package_qa_add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ | ||
224 | (name, package_qa_clean_path(path,d))) | ||
225 | |||
226 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" | ||
227 | def package_qa_check_mime_xdg(path, name, d, elf, messages): | ||
228 | """ | ||
229 | Check if package installs desktop file containing MimeType and requires | ||
230 | mime-types.bbclass to create /usr/share/applications/mimeinfo.cache | ||
231 | """ | ||
232 | |||
233 | if d.getVar("datadir") + "/applications" in path and path.endswith('.desktop') and not bb.data.inherits_class("mime-xdg", d): | ||
234 | mime_type_found = False | ||
235 | try: | ||
236 | with open(path, 'r') as f: | ||
237 | for line in f.read().split('\n'): | ||
238 | if 'MimeType' in line: | ||
239 | mime_type_found = True | ||
240 | break; | ||
241 | except: | ||
242 | # At least libreoffice installs symlinks with absolute paths that are dangling here. | ||
243 | # We could implement some magic but for few (one) recipes it is not worth the effort so just warn: | ||
244 | wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path,d)) | ||
245 | wstr += "Please check if (linked) file contains key 'MimeType'.\n" | ||
246 | pkgname = name | ||
247 | if name == d.getVar('PN'): | ||
248 | pkgname = '${PN}' | ||
249 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP_%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) | ||
250 | package_qa_add_message(messages, "mime-xdg", wstr) | ||
251 | if mime_type_found: | ||
252 | package_qa_add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \ | ||
253 | (name, package_qa_clean_path(path,d))) | ||
254 | |||
255 | def package_qa_check_libdir(d): | ||
256 | """ | ||
257 | Check for wrong library installation paths. For instance, catch | ||
258 | recipes installing /lib/bar.so when ${base_libdir}="lib32" or | ||
259 | installing in /usr/lib64 when ${libdir}="/usr/lib" | ||
260 | """ | ||
261 | import re | ||
262 | |||
263 | pkgdest = d.getVar('PKGDEST') | ||
264 | base_libdir = d.getVar("base_libdir") + os.sep | ||
265 | libdir = d.getVar("libdir") + os.sep | ||
266 | libexecdir = d.getVar("libexecdir") + os.sep | ||
267 | exec_prefix = d.getVar("exec_prefix") + os.sep | ||
268 | |||
269 | messages = [] | ||
270 | |||
271 | # The re's are purposely fuzzy, as some there are some .so.x.y.z files | ||
272 | # that don't follow the standard naming convention. It checks later | ||
273 | # that they are actual ELF files | ||
274 | lib_re = re.compile(r"^/lib.+\.so(\..+)?$") | ||
275 | exec_re = re.compile(r"^%s.*/lib.+\.so(\..+)?$" % exec_prefix) | ||
276 | |||
277 | for root, dirs, files in os.walk(pkgdest): | ||
278 | if root == pkgdest: | ||
279 | # Skip subdirectories for any packages with libdir in INSANE_SKIP | ||
280 | skippackages = [] | ||
281 | for package in dirs: | ||
282 | if 'libdir' in (d.getVar('INSANE_SKIP_' + package) or "").split(): | ||
283 | bb.note("Package %s skipping libdir QA test" % (package)) | ||
284 | skippackages.append(package) | ||
285 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"): | ||
286 | bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package)) | ||
287 | skippackages.append(package) | ||
288 | for package in skippackages: | ||
289 | dirs.remove(package) | ||
290 | for file in files: | ||
291 | full_path = os.path.join(root, file) | ||
292 | rel_path = os.path.relpath(full_path, pkgdest) | ||
293 | if os.sep in rel_path: | ||
294 | package, rel_path = rel_path.split(os.sep, 1) | ||
295 | rel_path = os.sep + rel_path | ||
296 | if lib_re.match(rel_path): | ||
297 | if base_libdir not in rel_path: | ||
298 | # make sure it's an actual ELF file | ||
299 | elf = oe.qa.ELFFile(full_path) | ||
300 | try: | ||
301 | elf.open() | ||
302 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | ||
303 | except (oe.qa.NotELFFileError): | ||
304 | pass | ||
305 | if exec_re.match(rel_path): | ||
306 | if libdir not in rel_path and libexecdir not in rel_path: | ||
307 | # make sure it's an actual ELF file | ||
308 | elf = oe.qa.ELFFile(full_path) | ||
309 | try: | ||
310 | elf.open() | ||
311 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | ||
312 | except (oe.qa.NotELFFileError): | ||
313 | pass | ||
314 | |||
315 | if messages: | ||
316 | package_qa_handle_error("libdir", "\n".join(messages), d) | ||
317 | |||
318 | QAPATHTEST[debug-files] = "package_qa_check_dbg" | ||
319 | def package_qa_check_dbg(path, name, d, elf, messages): | ||
320 | """ | ||
321 | Check for ".debug" files or directories outside of the dbg package | ||
322 | """ | ||
323 | |||
324 | if not "-dbg" in name and not "-ptest" in name: | ||
325 | if '.debug' in path.split(os.path.sep): | ||
326 | package_qa_add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \ | ||
327 | (name, package_qa_clean_path(path,d))) | ||
328 | |||
329 | QAPATHTEST[arch] = "package_qa_check_arch" | ||
330 | def package_qa_check_arch(path,name,d, elf, messages): | ||
331 | """ | ||
332 | Check if archs are compatible | ||
333 | """ | ||
334 | import re, oe.elf | ||
335 | |||
336 | if not elf: | ||
337 | return | ||
338 | |||
339 | target_os = d.getVar('TARGET_OS') | ||
340 | target_arch = d.getVar('TARGET_ARCH') | ||
341 | provides = d.getVar('PROVIDES') | ||
342 | bpn = d.getVar('BPN') | ||
343 | |||
344 | if target_arch == "allarch": | ||
345 | pn = d.getVar('PN') | ||
346 | package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") | ||
347 | return | ||
348 | |||
349 | # FIXME: Cross package confuse this check, so just skip them | ||
350 | for s in ['cross', 'nativesdk', 'cross-canadian']: | ||
351 | if bb.data.inherits_class(s, d): | ||
352 | return | ||
353 | |||
354 | # avoid following links to /usr/bin (e.g. on udev builds) | ||
355 | # we will check the files pointed to anyway... | ||
356 | if os.path.islink(path): | ||
357 | return | ||
358 | |||
359 | #if this will throw an exception, then fix the dict above | ||
360 | (machine, osabi, abiversion, littleendian, bits) \ | ||
361 | = oe.elf.machine_dict(d)[target_os][target_arch] | ||
362 | |||
363 | # Check the architecture and endiannes of the binary | ||
364 | is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ | ||
365 | (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \ | ||
366 | target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))) | ||
367 | is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF") | ||
368 | if not ((machine == elf.machine()) or is_32 or is_bpf): | ||
369 | package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \ | ||
370 | (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name))) | ||
371 | elif not ((bits == elf.abiSize()) or is_32 or is_bpf): | ||
372 | package_qa_add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \ | ||
373 | (elf.abiSize(), bits, package_qa_clean_path(path, d, name))) | ||
374 | elif not ((littleendian == elf.isLittleEndian()) or is_bpf): | ||
375 | package_qa_add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \ | ||
376 | (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name))) | ||
377 | |||
378 | QAPATHTEST[desktop] = "package_qa_check_desktop" | ||
379 | def package_qa_check_desktop(path, name, d, elf, messages): | ||
380 | """ | ||
381 | Run all desktop files through desktop-file-validate. | ||
382 | """ | ||
383 | if path.endswith(".desktop"): | ||
384 | desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate') | ||
385 | output = os.popen("%s %s" % (desktop_file_validate, path)) | ||
386 | # This only produces output on errors | ||
387 | for l in output: | ||
388 | package_qa_add_message(messages, "desktop", "Desktop file issue: " + l.strip()) | ||
389 | |||
390 | QAPATHTEST[textrel] = "package_qa_textrel" | ||
391 | def package_qa_textrel(path, name, d, elf, messages): | ||
392 | """ | ||
393 | Check if the binary contains relocations in .text | ||
394 | """ | ||
395 | |||
396 | if not elf: | ||
397 | return | ||
398 | |||
399 | if os.path.islink(path): | ||
400 | return | ||
401 | |||
402 | phdrs = elf.run_objdump("-p", d) | ||
403 | sane = True | ||
404 | |||
405 | import re | ||
406 | textrel_re = re.compile(r"\s+TEXTREL\s+") | ||
407 | for line in phdrs.split("\n"): | ||
408 | if textrel_re.match(line): | ||
409 | sane = False | ||
410 | break | ||
411 | |||
412 | if not sane: | ||
413 | path = package_qa_clean_path(path, d, name) | ||
414 | package_qa_add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path)) | ||
415 | |||
416 | QAPATHTEST[ldflags] = "package_qa_hash_style" | ||
417 | def package_qa_hash_style(path, name, d, elf, messages): | ||
418 | """ | ||
419 | Check if the binary has the right hash style... | ||
420 | """ | ||
421 | |||
422 | if not elf: | ||
423 | return | ||
424 | |||
425 | if os.path.islink(path): | ||
426 | return | ||
427 | |||
428 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') | ||
429 | if not gnu_hash: | ||
430 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') | ||
431 | if not gnu_hash: | ||
432 | return | ||
433 | |||
434 | sane = False | ||
435 | has_syms = False | ||
436 | |||
437 | phdrs = elf.run_objdump("-p", d) | ||
438 | |||
439 | # If this binary has symbols, we expect it to have GNU_HASH too. | ||
440 | for line in phdrs.split("\n"): | ||
441 | if "SYMTAB" in line: | ||
442 | has_syms = True | ||
443 | if "GNU_HASH" in line or "DT_MIPS_XHASH" in line: | ||
444 | sane = True | ||
445 | if ("[mips32]" in line or "[mips64]" in line) and d.getVar('TCLIBC') == "musl": | ||
446 | sane = True | ||
447 | if has_syms and not sane: | ||
448 | path = package_qa_clean_path(path, d, name) | ||
449 | package_qa_add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name)) | ||
450 | |||
451 | |||
452 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" | ||
453 | def package_qa_check_buildpaths(path, name, d, elf, messages): | ||
454 | """ | ||
455 | Check for build paths inside target files and error if not found in the whitelist | ||
456 | """ | ||
457 | # Ignore .debug files, not interesting | ||
458 | if path.find(".debug") != -1: | ||
459 | return | ||
460 | |||
461 | # Ignore symlinks | ||
462 | if os.path.islink(path): | ||
463 | return | ||
464 | |||
465 | tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8") | ||
466 | with open(path, 'rb') as f: | ||
467 | file_content = f.read() | ||
468 | if tmpdir in file_content: | ||
469 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | ||
470 | package_qa_add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name)) | ||
471 | |||
472 | |||
473 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" | ||
474 | def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | ||
475 | """ | ||
476 | Check that all packages containing Xorg drivers have ABI dependencies | ||
477 | """ | ||
478 | |||
479 | # Skip dev, dbg or nativesdk packages | ||
480 | if name.endswith("-dev") or name.endswith("-dbg") or name.startswith("nativesdk-"): | ||
481 | return | ||
482 | |||
483 | driverdir = d.expand("${libdir}/xorg/modules/drivers/") | ||
484 | if driverdir in path and path.endswith(".so"): | ||
485 | mlprefix = d.getVar('MLPREFIX') or '' | ||
486 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name) or ""): | ||
487 | if rdep.startswith("%sxorg-abi-" % mlprefix): | ||
488 | return | ||
489 | package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | ||
490 | |||
491 | QAPATHTEST[infodir] = "package_qa_check_infodir" | ||
492 | def package_qa_check_infodir(path, name, d, elf, messages): | ||
493 | """ | ||
494 | Check that /usr/share/info/dir isn't shipped in a particular package | ||
495 | """ | ||
496 | infodir = d.expand("${infodir}/dir") | ||
497 | |||
498 | if infodir in path: | ||
499 | package_qa_add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.") | ||
500 | |||
501 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" | ||
502 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | ||
503 | """ | ||
504 | Check that the package doesn't contain any absolute symlinks to the sysroot. | ||
505 | """ | ||
506 | if os.path.islink(path): | ||
507 | target = os.readlink(path) | ||
508 | if os.path.isabs(target): | ||
509 | tmpdir = d.getVar('TMPDIR') | ||
510 | if target.startswith(tmpdir): | ||
511 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | ||
512 | package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) | ||
513 | |||
514 | # Check license variables | ||
515 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" | ||
516 | python populate_lic_qa_checksum() { | ||
517 | """ | ||
518 | Check for changes in the license files. | ||
519 | """ | ||
520 | sane = True | ||
521 | |||
522 | lic_files = d.getVar('LIC_FILES_CHKSUM') or '' | ||
523 | lic = d.getVar('LICENSE') | ||
524 | pn = d.getVar('PN') | ||
525 | |||
526 | if lic == "CLOSED": | ||
527 | return | ||
528 | |||
529 | if not lic_files and d.getVar('SRC_URI'): | ||
530 | sane &= package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) | ||
531 | |||
532 | srcdir = d.getVar('S') | ||
533 | corebase_licensefile = d.getVar('COREBASE') + "/LICENSE" | ||
534 | for url in lic_files.split(): | ||
535 | try: | ||
536 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | ||
537 | except bb.fetch.MalformedUrl: | ||
538 | sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d) | ||
539 | continue | ||
540 | srclicfile = os.path.join(srcdir, path) | ||
541 | if not os.path.isfile(srclicfile): | ||
542 | sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d) | ||
543 | continue | ||
544 | |||
545 | if (srclicfile == corebase_licensefile): | ||
546 | bb.warn("${COREBASE}/LICENSE is not a valid license file, please use '${COMMON_LICENSE_DIR}/MIT' for a MIT License file in LIC_FILES_CHKSUM. This will become an error in the future") | ||
547 | |||
548 | recipemd5 = parm.get('md5', '') | ||
549 | beginline, endline = 0, 0 | ||
550 | if 'beginline' in parm: | ||
551 | beginline = int(parm['beginline']) | ||
552 | if 'endline' in parm: | ||
553 | endline = int(parm['endline']) | ||
554 | |||
555 | if (not beginline) and (not endline): | ||
556 | md5chksum = bb.utils.md5_file(srclicfile) | ||
557 | with open(srclicfile, 'r', errors='replace') as f: | ||
558 | license = f.read().splitlines() | ||
559 | else: | ||
560 | with open(srclicfile, 'rb') as f: | ||
561 | import hashlib | ||
562 | lineno = 0 | ||
563 | license = [] | ||
564 | m = hashlib.md5() | ||
565 | for line in f: | ||
566 | lineno += 1 | ||
567 | if (lineno >= beginline): | ||
568 | if ((lineno <= endline) or not endline): | ||
569 | m.update(line) | ||
570 | license.append(line.decode('utf-8', errors='replace').rstrip()) | ||
571 | else: | ||
572 | break | ||
573 | md5chksum = m.hexdigest() | ||
574 | if recipemd5 == md5chksum: | ||
575 | bb.note (pn + ": md5 checksum matched for ", url) | ||
576 | else: | ||
577 | if recipemd5: | ||
578 | msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url | ||
579 | msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum | ||
580 | max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20) | ||
581 | if not license or license[-1] != '': | ||
582 | # Ensure that our license text ends with a line break | ||
583 | # (will be added with join() below). | ||
584 | license.append('') | ||
585 | remove = len(license) - max_lines | ||
586 | if remove > 0: | ||
587 | start = max_lines // 2 | ||
588 | end = start + remove - 1 | ||
589 | del license[start:end] | ||
590 | license.insert(start, '...') | ||
591 | msg = msg + "\n" + pn + ": Here is the selected license text:" + \ | ||
592 | "\n" + \ | ||
593 | "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \ | ||
594 | "\n" + "\n".join(license) + \ | ||
595 | "{:^^70}".format(" endline=%d " % endline if endline else "") | ||
596 | if beginline: | ||
597 | if endline: | ||
598 | srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline) | ||
599 | else: | ||
600 | srcfiledesc = "%s (beginning on line %d)" % (srclicfile, beginline) | ||
601 | elif endline: | ||
602 | srcfiledesc = "%s (ending on line %d)" % (srclicfile, endline) | ||
603 | else: | ||
604 | srcfiledesc = srclicfile | ||
605 | msg = msg + "\n" + pn + ": Check if the license information has changed in %s to verify that the LICENSE value \"%s\" remains valid" % (srcfiledesc, lic) | ||
606 | |||
607 | else: | ||
608 | msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url | ||
609 | msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum | ||
610 | sane &= package_qa_handle_error("license-checksum", msg, d) | ||
611 | |||
612 | if not sane: | ||
613 | bb.fatal("Fatal QA errors found, failing task.") | ||
614 | } | ||
615 | |||
616 | def qa_check_staged(path,d): | ||
617 | """ | ||
618 | Check staged la and pc files for common problems like references to the work | ||
619 | directory. | ||
620 | |||
621 | As this is run after every stage we should be able to find the one | ||
622 | responsible for the errors easily even if we look at every .pc and .la file. | ||
623 | """ | ||
624 | |||
625 | sane = True | ||
626 | tmpdir = d.getVar('TMPDIR') | ||
627 | workdir = os.path.join(tmpdir, "work") | ||
628 | recipesysroot = d.getVar("RECIPE_SYSROOT") | ||
629 | |||
630 | if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): | ||
631 | pkgconfigcheck = workdir | ||
632 | else: | ||
633 | pkgconfigcheck = tmpdir | ||
634 | |||
635 | skip = (d.getVar('INSANE_SKIP') or "").split() | ||
636 | skip_la = False | ||
637 | if 'la' in skip: | ||
638 | bb.note("Recipe %s skipping qa checking: la" % d.getVar('PN')) | ||
639 | skip_la = True | ||
640 | |||
641 | skip_pkgconfig = False | ||
642 | if 'pkgconfig' in skip: | ||
643 | bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN')) | ||
644 | skip_pkgconfig = True | ||
645 | |||
646 | # find all .la and .pc files | ||
647 | # read the content | ||
648 | # and check for stuff that looks wrong | ||
649 | for root, dirs, files in os.walk(path): | ||
650 | for file in files: | ||
651 | path = os.path.join(root,file) | ||
652 | if file.endswith(".la") and not skip_la: | ||
653 | with open(path) as f: | ||
654 | file_content = f.read() | ||
655 | file_content = file_content.replace(recipesysroot, "") | ||
656 | if workdir in file_content: | ||
657 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) | ||
658 | sane &= package_qa_handle_error("la", error_msg, d) | ||
659 | elif file.endswith(".pc") and not skip_pkgconfig: | ||
660 | with open(path) as f: | ||
661 | file_content = f.read() | ||
662 | file_content = file_content.replace(recipesysroot, "") | ||
663 | if pkgconfigcheck in file_content: | ||
664 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) | ||
665 | sane &= package_qa_handle_error("pkgconfig", error_msg, d) | ||
666 | |||
667 | return sane | ||
668 | |||
669 | # Run all package-wide warnfuncs and errorfuncs | ||
670 | def package_qa_package(warnfuncs, errorfuncs, package, d): | ||
671 | warnings = {} | ||
672 | errors = {} | ||
673 | |||
674 | for func in warnfuncs: | ||
675 | func(package, d, warnings) | ||
676 | for func in errorfuncs: | ||
677 | func(package, d, errors) | ||
678 | |||
679 | for w in warnings: | ||
680 | package_qa_handle_error(w, warnings[w], d) | ||
681 | for e in errors: | ||
682 | package_qa_handle_error(e, errors[e], d) | ||
683 | |||
684 | return len(errors) == 0 | ||
685 | |||
686 | # Run all recipe-wide warnfuncs and errorfuncs | ||
687 | def package_qa_recipe(warnfuncs, errorfuncs, pn, d): | ||
688 | warnings = {} | ||
689 | errors = {} | ||
690 | |||
691 | for func in warnfuncs: | ||
692 | func(pn, d, warnings) | ||
693 | for func in errorfuncs: | ||
694 | func(pn, d, errors) | ||
695 | |||
696 | for w in warnings: | ||
697 | package_qa_handle_error(w, warnings[w], d) | ||
698 | for e in errors: | ||
699 | package_qa_handle_error(e, errors[e], d) | ||
700 | |||
701 | return len(errors) == 0 | ||
702 | |||
703 | # Walk over all files in a directory and call func | ||
704 | def package_qa_walk(warnfuncs, errorfuncs, package, d): | ||
705 | import oe.qa | ||
706 | |||
707 | #if this will throw an exception, then fix the dict above | ||
708 | target_os = d.getVar('TARGET_OS') | ||
709 | target_arch = d.getVar('TARGET_ARCH') | ||
710 | |||
711 | warnings = {} | ||
712 | errors = {} | ||
713 | for path in pkgfiles[package]: | ||
714 | elf = None | ||
715 | if os.path.isfile(path): | ||
716 | elf = oe.qa.ELFFile(path) | ||
717 | try: | ||
718 | elf.open() | ||
719 | except oe.qa.NotELFFileError: | ||
720 | elf = None | ||
721 | for func in warnfuncs: | ||
722 | func(path, package, d, elf, warnings) | ||
723 | for func in errorfuncs: | ||
724 | func(path, package, d, elf, errors) | ||
725 | |||
726 | for w in warnings: | ||
727 | package_qa_handle_error(w, warnings[w], d) | ||
728 | for e in errors: | ||
729 | package_qa_handle_error(e, errors[e], d) | ||
730 | |||
731 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | ||
732 | # Don't do this check for kernel/module recipes, there aren't too many debug/development | ||
733 | # packages and you can get false positives e.g. on kernel-module-lirc-dev | ||
734 | if bb.data.inherits_class("kernel", d) or bb.data.inherits_class("module-base", d): | ||
735 | return | ||
736 | |||
737 | if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg: | ||
738 | localdata = bb.data.createCopy(d) | ||
739 | localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES') + ':' + pkg) | ||
740 | |||
741 | # Now check the RDEPENDS | ||
742 | rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "") | ||
743 | |||
744 | # Now do the sanity check!!! | ||
745 | if "build-deps" not in skip: | ||
746 | for rdepend in rdepends: | ||
747 | if "-dbg" in rdepend and "debug-deps" not in skip: | ||
748 | error_msg = "%s rdepends on %s" % (pkg,rdepend) | ||
749 | package_qa_handle_error("debug-deps", error_msg, d) | ||
750 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: | ||
751 | error_msg = "%s rdepends on %s" % (pkg, rdepend) | ||
752 | package_qa_handle_error("dev-deps", error_msg, d) | ||
753 | if rdepend not in packages: | ||
754 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) | ||
755 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
756 | continue | ||
757 | if not rdep_data or not 'PN' in rdep_data: | ||
758 | pkgdata_dir = d.getVar("PKGDATA_DIR") | ||
759 | try: | ||
760 | possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend)) | ||
761 | except OSError: | ||
762 | possibles = [] | ||
763 | for p in possibles: | ||
764 | rdep_data = oe.packagedata.read_subpkgdata(p, d) | ||
765 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
766 | break | ||
767 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
768 | continue | ||
769 | if rdep_data and 'PN' in rdep_data: | ||
770 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN']) | ||
771 | else: | ||
772 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) | ||
773 | package_qa_handle_error("build-deps", error_msg, d) | ||
774 | |||
775 | if "file-rdeps" not in skip: | ||
776 | ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)']) | ||
777 | if bb.data.inherits_class('nativesdk', d): | ||
778 | ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl']) | ||
779 | # For Saving the FILERDEPENDS | ||
780 | filerdepends = {} | ||
781 | rdep_data = oe.packagedata.read_subpkgdata(pkg, d) | ||
782 | for key in rdep_data: | ||
783 | if key.startswith("FILERDEPENDS_"): | ||
784 | for subkey in bb.utils.explode_deps(rdep_data[key]): | ||
785 | if subkey not in ignored_file_rdeps and \ | ||
786 | not subkey.startswith('perl('): | ||
787 | # We already know it starts with FILERDEPENDS_ | ||
788 | filerdepends[subkey] = key[13:] | ||
789 | |||
790 | if filerdepends: | ||
791 | done = rdepends[:] | ||
792 | # Add the rprovides of itself | ||
793 | if pkg not in done: | ||
794 | done.insert(0, pkg) | ||
795 | |||
796 | # The python is not a package, but python-core provides it, so | ||
797 | # skip checking /usr/bin/python if python is in the rdeps, in | ||
798 | # case there is a RDEPENDS_pkg = "python" in the recipe. | ||
799 | for py in [ d.getVar('MLPREFIX') + "python", "python" ]: | ||
800 | if py in done: | ||
801 | filerdepends.pop("/usr/bin/python",None) | ||
802 | done.remove(py) | ||
803 | for rdep in done: | ||
804 | # The file dependencies may contain package names, e.g., | ||
805 | # perl | ||
806 | filerdepends.pop(rdep,None) | ||
807 | |||
808 | # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO | ||
809 | rdep_data = oe.packagedata.read_subpkgdata(rdep, d) | ||
810 | for key in rdep_data: | ||
811 | if key.startswith("FILERPROVIDES_") or key.startswith("RPROVIDES_"): | ||
812 | for subkey in bb.utils.explode_deps(rdep_data[key]): | ||
813 | filerdepends.pop(subkey,None) | ||
814 | # Add the files list to the rprovides | ||
815 | if key == "FILES_INFO": | ||
816 | # Use eval() to make it as a dict | ||
817 | for subkey in eval(rdep_data[key]): | ||
818 | filerdepends.pop(subkey,None) | ||
819 | if not filerdepends: | ||
820 | # Break if all the file rdepends are met | ||
821 | break | ||
822 | if filerdepends: | ||
823 | for key in filerdepends: | ||
824 | error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS_%s?" % \ | ||
825 | (filerdepends[key].replace("_%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) | ||
826 | package_qa_handle_error("file-rdeps", error_msg, d) | ||
827 | package_qa_check_rdepends[vardepsexclude] = "OVERRIDES" | ||
828 | |||
829 | def package_qa_check_deps(pkg, pkgdest, d): | ||
830 | |||
831 | localdata = bb.data.createCopy(d) | ||
832 | localdata.setVar('OVERRIDES', pkg) | ||
833 | |||
834 | def check_valid_deps(var): | ||
835 | try: | ||
836 | rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "") | ||
837 | except ValueError as e: | ||
838 | bb.fatal("%s_%s: %s" % (var, pkg, e)) | ||
839 | for dep in rvar: | ||
840 | for v in rvar[dep]: | ||
841 | if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')): | ||
842 | error_msg = "%s_%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v) | ||
843 | package_qa_handle_error("dep-cmp", error_msg, d) | ||
844 | |||
845 | check_valid_deps('RDEPENDS') | ||
846 | check_valid_deps('RRECOMMENDS') | ||
847 | check_valid_deps('RSUGGESTS') | ||
848 | check_valid_deps('RPROVIDES') | ||
849 | check_valid_deps('RREPLACES') | ||
850 | check_valid_deps('RCONFLICTS') | ||
851 | |||
852 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" | ||
853 | def package_qa_check_usrmerge(pkg, d, messages): | ||
854 | pkgdest = d.getVar('PKGDEST') | ||
855 | pkg_dir = pkgdest + os.sep + pkg + os.sep | ||
856 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() | ||
857 | for f in merged_dirs: | ||
858 | if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f): | ||
859 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) | ||
860 | package_qa_add_message(messages, "usrmerge", msg) | ||
861 | return False | ||
862 | return True | ||
863 | |||
864 | QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod" | ||
865 | def package_qa_check_perllocalpod(pkg, d, messages): | ||
866 | """ | ||
867 | Check that the recipe didn't ship a perlocal.pod file, which shouldn't be | ||
868 | installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to | ||
869 | handle this for most recipes. | ||
870 | """ | ||
871 | import glob | ||
872 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) | ||
873 | podpath = oe.path.join(pkgd, d.getVar("libdir"), "perl*", "*", "*", "perllocal.pod") | ||
874 | |||
875 | matches = glob.glob(podpath) | ||
876 | if matches: | ||
877 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] | ||
878 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) | ||
879 | package_qa_add_message(messages, "perllocalpod", msg) | ||
880 | |||
881 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" | ||
882 | def package_qa_check_expanded_d(package, d, messages): | ||
883 | """ | ||
884 | Check for the expanded D (${D}) value in pkg_* and FILES | ||
885 | variables, warn the user to use it correctly. | ||
886 | """ | ||
887 | sane = True | ||
888 | expanded_d = d.getVar('D') | ||
889 | |||
890 | for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': | ||
891 | bbvar = d.getVar(var + "_" + package) or "" | ||
892 | if expanded_d in bbvar: | ||
893 | if var == 'FILES': | ||
894 | package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) | ||
895 | sane = False | ||
896 | else: | ||
897 | package_qa_add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package)) | ||
898 | sane = False | ||
899 | return sane | ||
900 | |||
901 | QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics" | ||
902 | def package_qa_check_unlisted_pkg_lics(package, d, messages): | ||
903 | """ | ||
904 | Check that all licenses for a package are among the licenses for the recipe. | ||
905 | """ | ||
906 | pkg_lics = d.getVar('LICENSE_' + package) | ||
907 | if not pkg_lics: | ||
908 | return True | ||
909 | |||
910 | recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE')) | ||
911 | unlisted = oe.license.list_licenses(pkg_lics) - recipe_lics_set | ||
912 | if not unlisted: | ||
913 | return True | ||
914 | |||
915 | package_qa_add_message(messages, "unlisted-pkg-lics", | ||
916 | "LICENSE_%s includes licenses (%s) that are not " | ||
917 | "listed in LICENSE" % (package, ' '.join(unlisted))) | ||
918 | return False | ||
919 | |||
920 | def package_qa_check_encoding(keys, encode, d): | ||
921 | def check_encoding(key, enc): | ||
922 | sane = True | ||
923 | value = d.getVar(key) | ||
924 | if value: | ||
925 | try: | ||
926 | s = value.encode(enc) | ||
927 | except UnicodeDecodeError as e: | ||
928 | error_msg = "%s has non %s characters" % (key,enc) | ||
929 | sane = False | ||
930 | package_qa_handle_error("invalid-chars", error_msg, d) | ||
931 | return sane | ||
932 | |||
933 | for key in keys: | ||
934 | sane = check_encoding(key, encode) | ||
935 | if not sane: | ||
936 | break | ||
937 | |||
938 | HOST_USER_UID := "${@os.getuid()}" | ||
939 | HOST_USER_GID := "${@os.getgid()}" | ||
940 | |||
941 | QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user" | ||
942 | def package_qa_check_host_user(path, name, d, elf, messages): | ||
943 | """Check for paths outside of /home which are owned by the user running bitbake.""" | ||
944 | |||
945 | if not os.path.lexists(path): | ||
946 | return | ||
947 | |||
948 | dest = d.getVar('PKGDEST') | ||
949 | pn = d.getVar('PN') | ||
950 | home = os.path.join(dest, 'home') | ||
951 | if path == home or path.startswith(home + os.sep): | ||
952 | return | ||
953 | |||
954 | try: | ||
955 | stat = os.lstat(path) | ||
956 | except OSError as exc: | ||
957 | import errno | ||
958 | if exc.errno != errno.ENOENT: | ||
959 | raise | ||
960 | else: | ||
961 | check_uid = int(d.getVar('HOST_USER_UID')) | ||
962 | if stat.st_uid == check_uid: | ||
963 | package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) | ||
964 | return False | ||
965 | |||
966 | check_gid = int(d.getVar('HOST_USER_GID')) | ||
967 | if stat.st_gid == check_gid: | ||
968 | package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) | ||
969 | return False | ||
970 | return True | ||
971 | |||
972 | QARECIPETEST[src-uri-bad] = "package_qa_check_src_uri" | ||
973 | def package_qa_check_src_uri(pn, d, messages): | ||
974 | import re | ||
975 | |||
976 | if "${PN}" in d.getVar("SRC_URI", False): | ||
977 | package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d) | ||
978 | |||
979 | for url in d.getVar("SRC_URI").split(): | ||
980 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url): | ||
981 | package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) | ||
982 | |||
983 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" | ||
984 | def package_qa_check_unhandled_features_check(pn, d, messages): | ||
985 | if not bb.data.inherits_class('features_check', d): | ||
986 | var_set = False | ||
987 | for kind in ['DISTRO', 'MACHINE', 'COMBINED']: | ||
988 | for var in ['ANY_OF_' + kind + '_FEATURES', 'REQUIRED_' + kind + '_FEATURES', 'CONFLICT_' + kind + '_FEATURES']: | ||
989 | if d.getVar(var) is not None or d.overridedata.get(var) is not None: | ||
990 | var_set = True | ||
991 | if var_set: | ||
992 | package_qa_handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) | ||
993 | |||
994 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" | ||
995 | def package_qa_check_missing_update_alternatives(pn, d, messages): | ||
996 | # Look at all packages and find out if any of those sets ALTERNATIVE variable | ||
997 | # without inheriting update-alternatives class | ||
998 | for pkg in (d.getVar('PACKAGES') or '').split(): | ||
999 | if d.getVar('ALTERNATIVE_%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): | ||
1000 | package_qa_handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE_%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) | ||
1001 | |||
1002 | # The PACKAGE FUNC to scan each package | ||
1003 | python do_package_qa () { | ||
1004 | import subprocess | ||
1005 | import oe.packagedata | ||
1006 | |||
1007 | bb.note("DO PACKAGE QA") | ||
1008 | |||
1009 | bb.build.exec_func("read_subpackage_metadata", d) | ||
1010 | |||
1011 | # Check non UTF-8 characters on recipe's metadata | ||
1012 | package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d) | ||
1013 | |||
1014 | logdir = d.getVar('T') | ||
1015 | pn = d.getVar('PN') | ||
1016 | |||
1017 | # Check the compile log for host contamination | ||
1018 | compilelog = os.path.join(logdir,"log.do_compile") | ||
1019 | |||
1020 | if os.path.exists(compilelog): | ||
1021 | statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % compilelog | ||
1022 | if subprocess.call(statement, shell=True) == 0: | ||
1023 | msg = "%s: The compile log indicates that host include and/or library paths were used.\n \ | ||
1024 | Please check the log '%s' for more information." % (pn, compilelog) | ||
1025 | package_qa_handle_error("compile-host-path", msg, d) | ||
1026 | |||
1027 | # Check the install log for host contamination | ||
1028 | installlog = os.path.join(logdir,"log.do_install") | ||
1029 | |||
1030 | if os.path.exists(installlog): | ||
1031 | statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % installlog | ||
1032 | if subprocess.call(statement, shell=True) == 0: | ||
1033 | msg = "%s: The install log indicates that host include and/or library paths were used.\n \ | ||
1034 | Please check the log '%s' for more information." % (pn, installlog) | ||
1035 | package_qa_handle_error("install-host-path", msg, d) | ||
1036 | |||
1037 | # Scan the packages... | ||
1038 | pkgdest = d.getVar('PKGDEST') | ||
1039 | packages = set((d.getVar('PACKAGES') or '').split()) | ||
1040 | |||
1041 | global pkgfiles | ||
1042 | pkgfiles = {} | ||
1043 | for pkg in packages: | ||
1044 | pkgfiles[pkg] = [] | ||
1045 | pkgdir = os.path.join(pkgdest, pkg) | ||
1046 | for walkroot, dirs, files in os.walk(pkgdir): | ||
1047 | # Don't walk into top-level CONTROL or DEBIAN directories as these | ||
1048 | # are temporary directories created by do_package. | ||
1049 | if walkroot == pkgdir: | ||
1050 | for control in ("CONTROL", "DEBIAN"): | ||
1051 | if control in dirs: | ||
1052 | dirs.remove(control) | ||
1053 | for file in files: | ||
1054 | pkgfiles[pkg].append(os.path.join(walkroot, file)) | ||
1055 | |||
1056 | # no packages should be scanned | ||
1057 | if not packages: | ||
1058 | return | ||
1059 | |||
1060 | import re | ||
1061 | # The package name matches the [a-z0-9.+-]+ regular expression | ||
1062 | pkgname_pattern = re.compile(r"^[a-z0-9.+-]+$") | ||
1063 | |||
1064 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
1065 | taskdeps = set() | ||
1066 | for dep in taskdepdata: | ||
1067 | taskdeps.add(taskdepdata[dep][0]) | ||
1068 | |||
1069 | def parse_test_matrix(matrix_name): | ||
1070 | testmatrix = d.getVarFlags(matrix_name) or {} | ||
1071 | g = globals() | ||
1072 | warnchecks = [] | ||
1073 | for w in (d.getVar("WARN_QA") or "").split(): | ||
1074 | if w in skip: | ||
1075 | continue | ||
1076 | if w in testmatrix and testmatrix[w] in g: | ||
1077 | warnchecks.append(g[testmatrix[w]]) | ||
1078 | |||
1079 | errorchecks = [] | ||
1080 | for e in (d.getVar("ERROR_QA") or "").split(): | ||
1081 | if e in skip: | ||
1082 | continue | ||
1083 | if e in testmatrix and testmatrix[e] in g: | ||
1084 | errorchecks.append(g[testmatrix[e]]) | ||
1085 | return warnchecks, errorchecks | ||
1086 | |||
1087 | for package in packages: | ||
1088 | skip = set((d.getVar('INSANE_SKIP') or "").split() + | ||
1089 | (d.getVar('INSANE_SKIP_' + package) or "").split()) | ||
1090 | if skip: | ||
1091 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) | ||
1092 | |||
1093 | bb.note("Checking Package: %s" % package) | ||
1094 | # Check package name | ||
1095 | if not pkgname_pattern.match(package): | ||
1096 | package_qa_handle_error("pkgname", | ||
1097 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) | ||
1098 | |||
1099 | warn_checks, error_checks = parse_test_matrix("QAPATHTEST") | ||
1100 | package_qa_walk(warn_checks, error_checks, package, d) | ||
1101 | |||
1102 | warn_checks, error_checks = parse_test_matrix("QAPKGTEST") | ||
1103 | package_qa_package(warn_checks, error_checks, package, d) | ||
1104 | |||
1105 | package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) | ||
1106 | package_qa_check_deps(package, pkgdest, d) | ||
1107 | |||
1108 | warn_checks, error_checks = parse_test_matrix("QARECIPETEST") | ||
1109 | package_qa_recipe(warn_checks, error_checks, pn, d) | ||
1110 | |||
1111 | if 'libdir' in d.getVar("ALL_QA").split(): | ||
1112 | package_qa_check_libdir(d) | ||
1113 | |||
1114 | qa_sane = d.getVar("QA_SANE") | ||
1115 | if not qa_sane: | ||
1116 | bb.fatal("QA run found fatal errors. Please consider fixing them.") | ||
1117 | bb.note("DONE with PACKAGE QA") | ||
1118 | } | ||
1119 | |||
1120 | # binutils is used for most checks, so need to set as dependency | ||
1121 | # POPULATESYSROOTDEPS is defined in staging class. | ||
1122 | do_package_qa[depends] += "${POPULATESYSROOTDEPS}" | ||
1123 | do_package_qa[vardepsexclude] = "BB_TASKDEPDATA" | ||
1124 | do_package_qa[rdeptask] = "do_packagedata" | ||
1125 | addtask do_package_qa after do_packagedata do_package before do_build | ||
1126 | |||
1127 | # Add the package specific INSANE_SKIPs to the sstate dependencies | ||
1128 | python() { | ||
1129 | pkgs = (d.getVar('PACKAGES') or '').split() | ||
1130 | for pkg in pkgs: | ||
1131 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP_{}".format(pkg)) | ||
1132 | } | ||
1133 | |||
1134 | SSTATETASKS += "do_package_qa" | ||
1135 | do_package_qa[sstate-inputdirs] = "" | ||
1136 | do_package_qa[sstate-outputdirs] = "" | ||
1137 | python do_package_qa_setscene () { | ||
1138 | sstate_setscene(d) | ||
1139 | } | ||
1140 | addtask do_package_qa_setscene | ||
1141 | |||
1142 | python do_qa_staging() { | ||
1143 | bb.note("QA checking staging") | ||
1144 | if not qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d): | ||
1145 | bb.fatal("QA staging was broken by the package built above") | ||
1146 | } | ||
1147 | |||
1148 | python do_qa_patch() { | ||
1149 | import subprocess | ||
1150 | |||
1151 | ########################################################################### | ||
1152 | # Check patch.log for fuzz warnings | ||
1153 | # | ||
1154 | # Further information on why we check for patch fuzz warnings: | ||
1155 | # http://lists.openembedded.org/pipermail/openembedded-core/2018-March/148675.html | ||
1156 | # https://bugzilla.yoctoproject.org/show_bug.cgi?id=10450 | ||
1157 | ########################################################################### | ||
1158 | |||
1159 | logdir = d.getVar('T') | ||
1160 | patchlog = os.path.join(logdir,"log.do_patch") | ||
1161 | |||
1162 | if os.path.exists(patchlog): | ||
1163 | fuzzheader = '--- Patch fuzz start ---' | ||
1164 | fuzzfooter = '--- Patch fuzz end ---' | ||
1165 | statement = "grep -e '%s' %s > /dev/null" % (fuzzheader, patchlog) | ||
1166 | if subprocess.call(statement, shell=True) == 0: | ||
1167 | msg = "Fuzz detected:\n\n" | ||
1168 | fuzzmsg = "" | ||
1169 | inFuzzInfo = False | ||
1170 | f = open(patchlog, "r") | ||
1171 | for line in f: | ||
1172 | if fuzzheader in line: | ||
1173 | inFuzzInfo = True | ||
1174 | fuzzmsg = "" | ||
1175 | elif fuzzfooter in line: | ||
1176 | fuzzmsg = fuzzmsg.replace('\n\n', '\n') | ||
1177 | msg += fuzzmsg | ||
1178 | msg += "\n" | ||
1179 | inFuzzInfo = False | ||
1180 | elif inFuzzInfo and not 'Now at patch' in line: | ||
1181 | fuzzmsg += line | ||
1182 | f.close() | ||
1183 | msg += "The context lines in the patches can be updated with devtool:\n" | ||
1184 | msg += "\n" | ||
1185 | msg += " devtool modify %s\n" % d.getVar('PN') | ||
1186 | msg += " devtool finish --force-patch-refresh %s <layer_path>\n\n" % d.getVar('PN') | ||
1187 | msg += "Don't forget to review changes done by devtool!\n" | ||
1188 | if 'patch-fuzz' in d.getVar('ERROR_QA'): | ||
1189 | bb.error(msg) | ||
1190 | elif 'patch-fuzz' in d.getVar('WARN_QA'): | ||
1191 | bb.warn(msg) | ||
1192 | msg = "Patch log indicates that patches do not apply cleanly." | ||
1193 | package_qa_handle_error("patch-fuzz", msg, d) | ||
1194 | } | ||
1195 | |||
1196 | python do_qa_configure() { | ||
1197 | import subprocess | ||
1198 | |||
1199 | ########################################################################### | ||
1200 | # Check config.log for cross compile issues | ||
1201 | ########################################################################### | ||
1202 | |||
1203 | configs = [] | ||
1204 | workdir = d.getVar('WORKDIR') | ||
1205 | |||
1206 | skip = (d.getVar('INSANE_SKIP') or "").split() | ||
1207 | skip_configure_unsafe = False | ||
1208 | if 'configure-unsafe' in skip: | ||
1209 | bb.note("Recipe %s skipping qa checking: configure-unsafe" % d.getVar('PN')) | ||
1210 | skip_configure_unsafe = True | ||
1211 | |||
1212 | if bb.data.inherits_class('autotools', d) and not skip_configure_unsafe: | ||
1213 | bb.note("Checking autotools environment for common misconfiguration") | ||
1214 | for root, dirs, files in os.walk(workdir): | ||
1215 | statement = "grep -q -F -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s" % \ | ||
1216 | os.path.join(root,"config.log") | ||
1217 | if "config.log" in files: | ||
1218 | if subprocess.call(statement, shell=True) == 0: | ||
1219 | error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities. | ||
1220 | Rerun configure task after fixing this.""" | ||
1221 | package_qa_handle_error("configure-unsafe", error_msg, d) | ||
1222 | |||
1223 | if "configure.ac" in files: | ||
1224 | configs.append(os.path.join(root,"configure.ac")) | ||
1225 | if "configure.in" in files: | ||
1226 | configs.append(os.path.join(root, "configure.in")) | ||
1227 | |||
1228 | ########################################################################### | ||
1229 | # Check gettext configuration and dependencies are correct | ||
1230 | ########################################################################### | ||
1231 | |||
1232 | skip_configure_gettext = False | ||
1233 | if 'configure-gettext' in skip: | ||
1234 | bb.note("Recipe %s skipping qa checking: configure-gettext" % d.getVar('PN')) | ||
1235 | skip_configure_gettext = True | ||
1236 | |||
1237 | cnf = d.getVar('EXTRA_OECONF') or "" | ||
1238 | if not ("gettext" in d.getVar('P') or "gcc-runtime" in d.getVar('P') or \ | ||
1239 | "--disable-nls" in cnf or skip_configure_gettext): | ||
1240 | ml = d.getVar("MLPREFIX") or "" | ||
1241 | if bb.data.inherits_class('cross-canadian', d): | ||
1242 | gt = "nativesdk-gettext" | ||
1243 | else: | ||
1244 | gt = "gettext-native" | ||
1245 | deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "") | ||
1246 | if gt not in deps: | ||
1247 | for config in configs: | ||
1248 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config | ||
1249 | if subprocess.call(gnu, shell=True) == 0: | ||
1250 | error_msg = "AM_GNU_GETTEXT used but no inherit gettext" | ||
1251 | package_qa_handle_error("configure-gettext", error_msg, d) | ||
1252 | |||
1253 | ########################################################################### | ||
1254 | # Check unrecognised configure options (with a white list) | ||
1255 | ########################################################################### | ||
1256 | if bb.data.inherits_class("autotools", d) or bb.data.inherits_class("meson", d): | ||
1257 | bb.note("Checking configure output for unrecognised options") | ||
1258 | try: | ||
1259 | if bb.data.inherits_class("autotools", d): | ||
1260 | flag = "WARNING: unrecognized options:" | ||
1261 | log = os.path.join(d.getVar('B'), 'config.log') | ||
1262 | if bb.data.inherits_class("meson", d): | ||
1263 | flag = "WARNING: Unknown options:" | ||
1264 | log = os.path.join(d.getVar('T'), 'log.do_configure') | ||
1265 | output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ').replace('"', '') | ||
1266 | options = set() | ||
1267 | for line in output.splitlines(): | ||
1268 | options |= set(line.partition(flag)[2].split()) | ||
1269 | whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST").split()) | ||
1270 | options -= whitelist | ||
1271 | if options: | ||
1272 | pn = d.getVar('PN') | ||
1273 | error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) | ||
1274 | package_qa_handle_error("unknown-configure-option", error_msg, d) | ||
1275 | except subprocess.CalledProcessError: | ||
1276 | pass | ||
1277 | |||
1278 | # Check invalid PACKAGECONFIG | ||
1279 | pkgconfig = (d.getVar("PACKAGECONFIG") or "").split() | ||
1280 | if pkgconfig: | ||
1281 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | ||
1282 | for pconfig in pkgconfig: | ||
1283 | if pconfig not in pkgconfigflags: | ||
1284 | pn = d.getVar('PN') | ||
1285 | error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) | ||
1286 | package_qa_handle_error("invalid-packageconfig", error_msg, d) | ||
1287 | |||
1288 | qa_sane = d.getVar("QA_SANE") | ||
1289 | if not qa_sane: | ||
1290 | bb.fatal("Fatal QA errors found, failing task.") | ||
1291 | } | ||
1292 | |||
1293 | python do_qa_unpack() { | ||
1294 | src_uri = d.getVar('SRC_URI') | ||
1295 | s_dir = d.getVar('S') | ||
1296 | if src_uri and not os.path.exists(s_dir): | ||
1297 | bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) | ||
1298 | } | ||
1299 | |||
1300 | # The Staging Func, to check all staging | ||
1301 | #addtask qa_staging after do_populate_sysroot before do_build | ||
1302 | do_populate_sysroot[postfuncs] += "do_qa_staging " | ||
1303 | |||
1304 | # Check for patch fuzz | ||
1305 | do_patch[postfuncs] += "do_qa_patch " | ||
1306 | |||
1307 | # Check broken config.log files, for packages requiring Gettext which | ||
1308 | # don't have it in DEPENDS. | ||
1309 | #addtask qa_configure after do_configure before do_compile | ||
1310 | do_configure[postfuncs] += "do_qa_configure " | ||
1311 | |||
1312 | # Check does S exist. | ||
1313 | do_unpack[postfuncs] += "do_qa_unpack" | ||
1314 | |||
1315 | python () { | ||
1316 | import re | ||
1317 | |||
1318 | tests = d.getVar('ALL_QA').split() | ||
1319 | if "desktop" in tests: | ||
1320 | d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") | ||
1321 | |||
1322 | ########################################################################### | ||
1323 | # Check various variables | ||
1324 | ########################################################################### | ||
1325 | |||
1326 | # Checking ${FILESEXTRAPATHS} | ||
1327 | extrapaths = (d.getVar("FILESEXTRAPATHS") or "") | ||
1328 | if '__default' not in extrapaths.split(":"): | ||
1329 | msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n" | ||
1330 | msg += "type of assignment, and don't forget the colon.\n" | ||
1331 | msg += "Please assign it with the format of:\n" | ||
1332 | msg += " FILESEXTRAPATHS_append := \":${THISDIR}/Your_Files_Path\" or\n" | ||
1333 | msg += " FILESEXTRAPATHS_prepend := \"${THISDIR}/Your_Files_Path:\"\n" | ||
1334 | msg += "in your bbappend file\n\n" | ||
1335 | msg += "Your incorrect assignment is:\n" | ||
1336 | msg += "%s\n" % extrapaths | ||
1337 | bb.warn(msg) | ||
1338 | |||
1339 | overrides = d.getVar('OVERRIDES').split(':') | ||
1340 | pn = d.getVar('PN') | ||
1341 | if pn in overrides: | ||
1342 | msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn) | ||
1343 | package_qa_handle_error("pn-overrides", msg, d) | ||
1344 | prog = re.compile(r'[A-Z]') | ||
1345 | if prog.search(pn): | ||
1346 | package_qa_handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) | ||
1347 | |||
1348 | # Some people mistakenly use DEPENDS_${PN} instead of DEPENDS and wonder | ||
1349 | # why it doesn't work. | ||
1350 | if (d.getVar(d.expand('DEPENDS_${PN}'))): | ||
1351 | package_qa_handle_error("pkgvarcheck", "recipe uses DEPENDS_${PN}, should use DEPENDS", d) | ||
1352 | |||
1353 | issues = [] | ||
1354 | if (d.getVar('PACKAGES') or "").split(): | ||
1355 | for dep in (d.getVar('QADEPENDS') or "").split(): | ||
1356 | d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep) | ||
1357 | for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': | ||
1358 | if d.getVar(var, False): | ||
1359 | issues.append(var) | ||
1360 | |||
1361 | fakeroot_tests = d.getVar('FAKEROOT_QA').split() | ||
1362 | if set(tests) & set(fakeroot_tests): | ||
1363 | d.setVarFlag('do_package_qa', 'fakeroot', '1') | ||
1364 | d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | ||
1365 | else: | ||
1366 | d.setVarFlag('do_package_qa', 'rdeptask', '') | ||
1367 | for i in issues: | ||
1368 | package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d) | ||
1369 | |||
1370 | if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split(): | ||
1371 | for native_class in ['native', 'nativesdk']: | ||
1372 | if bb.data.inherits_class(native_class, d): | ||
1373 | |||
1374 | inherited_classes = d.getVar('__inherit_cache', False) or [] | ||
1375 | needle = os.path.join('classes', native_class) | ||
1376 | |||
1377 | bbclassextend = (d.getVar('BBCLASSEXTEND') or '').split() | ||
1378 | # BBCLASSEXTEND items are always added in the end | ||
1379 | skip_classes = bbclassextend | ||
1380 | if bb.data.inherits_class('native', d) or 'native' in bbclassextend: | ||
1381 | # native also inherits nopackages and relocatable bbclasses | ||
1382 | skip_classes.extend(['nopackages', 'relocatable']) | ||
1383 | |||
1384 | broken_order = [] | ||
1385 | for class_item in reversed(inherited_classes): | ||
1386 | if needle not in class_item: | ||
1387 | for extend_item in skip_classes: | ||
1388 | if os.path.join('classes', '%s.bbclass' % extend_item) in class_item: | ||
1389 | break | ||
1390 | else: | ||
1391 | pn = d.getVar('PN') | ||
1392 | broken_order.append(os.path.basename(class_item)) | ||
1393 | else: | ||
1394 | break | ||
1395 | if broken_order: | ||
1396 | package_qa_handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. " | ||
1397 | "Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d) | ||
1398 | |||
1399 | |||
1400 | qa_sane = d.getVar("QA_SANE") | ||
1401 | if not qa_sane: | ||
1402 | bb.fatal("Fatal QA errors found, failing task.") | ||
1403 | } | ||