diff options
Diffstat (limited to 'meta/classes-global/insane.bbclass')
-rw-r--r-- | meta/classes-global/insane.bbclass | 1453 |
1 files changed, 1453 insertions, 0 deletions
diff --git a/meta/classes-global/insane.bbclass b/meta/classes-global/insane.bbclass new file mode 100644 index 0000000000..46ea41e271 --- /dev/null +++ b/meta/classes-global/insane.bbclass | |||
@@ -0,0 +1,1453 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | # BB Class inspired by ebuild.sh | ||
8 | # | ||
9 | # This class will test files after installation for certain | ||
10 | # security issues and other kind of issues. | ||
11 | # | ||
12 | # Checks we do: | ||
13 | # -Check the ownership and permissions | ||
14 | # -Check the RUNTIME path for the $TMPDIR | ||
15 | # -Check if .la files wrongly point to workdir | ||
16 | # -Check if .pc files wrongly point to workdir | ||
17 | # -Check if packages contains .debug directories or .so files | ||
18 | # where they should be in -dev or -dbg | ||
19 | # -Check if config.log contains traces to broken autoconf tests | ||
20 | # -Check invalid characters (non-utf8) on some package metadata | ||
21 | # -Ensure that binaries in base_[bindir|sbindir|libdir] do not link | ||
22 | # into exec_prefix | ||
23 | # -Check that scripts in base_[bindir|sbindir|libdir] do not reference | ||
24 | # files under exec_prefix | ||
25 | # -Check if the package name is upper case | ||
26 | |||
27 | # Elect whether a given type of error is a warning or error, they may | ||
28 | # have been set by other files. | ||
29 | WARN_QA ?= " libdir xorg-driver-abi buildpaths \ | ||
30 | textrel incompatible-license files-invalid \ | ||
31 | infodir build-deps src-uri-bad symlink-to-sysroot multilib \ | ||
32 | invalid-packageconfig host-user-contaminated uppercase-pn patch-fuzz \ | ||
33 | mime mime-xdg unlisted-pkg-lics unhandled-features-check \ | ||
34 | missing-update-alternatives native-last missing-ptest \ | ||
35 | license-exists license-no-generic license-syntax license-format \ | ||
36 | license-incompatible license-file-missing obsolete-license \ | ||
37 | " | ||
38 | ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ | ||
39 | perms dep-cmp pkgvarcheck perm-config perm-line perm-link \ | ||
40 | split-strip packages-list pkgv-undefined var-undefined \ | ||
41 | version-going-backwards expanded-d invalid-chars \ | ||
42 | license-checksum dev-elf file-rdeps configure-unsafe \ | ||
43 | configure-gettext perllocalpod shebang-size \ | ||
44 | already-stripped installed-vs-shipped ldflags compile-host-path \ | ||
45 | install-host-path pn-overrides unknown-configure-option \ | ||
46 | useless-rpaths rpaths staticdev empty-dirs \ | ||
47 | " | ||
48 | # Add usrmerge QA check based on distro feature | ||
49 | ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" | ||
50 | |||
51 | FAKEROOT_QA = "host-user-contaminated" | ||
52 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ | ||
53 | enabled tests are listed here, the do_package_qa task will run under fakeroot." | ||
54 | |||
55 | ALL_QA = "${WARN_QA} ${ERROR_QA}" | ||
56 | |||
57 | UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static" | ||
58 | |||
59 | # This is a list of directories that are expected to be empty. | ||
60 | QA_EMPTY_DIRS ?= " \ | ||
61 | /dev/pts \ | ||
62 | /media \ | ||
63 | /proc \ | ||
64 | /run \ | ||
65 | /tmp \ | ||
66 | ${localstatedir}/run \ | ||
67 | ${localstatedir}/volatile \ | ||
68 | " | ||
69 | # It is possible to specify why a directory is expected to be empty by defining | ||
70 | # QA_EMPTY_DIRS_RECOMMENDATION:<path>, which will then be included in the error | ||
71 | # message if the directory is not empty. If it is not specified for a directory, | ||
72 | # then "but it is expected to be empty" will be used. | ||
73 | |||
74 | def package_qa_clean_path(path, d, pkg=None): | ||
75 | """ | ||
76 | Remove redundant paths from the path for display. If pkg isn't set then | ||
77 | TMPDIR is stripped, otherwise PKGDEST/pkg is stripped. | ||
78 | """ | ||
79 | if pkg: | ||
80 | path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/") | ||
81 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") | ||
82 | |||
83 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" | ||
84 | def package_qa_check_shebang_size(path, name, d, elf, messages): | ||
85 | import stat | ||
86 | if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf: | ||
87 | return | ||
88 | |||
89 | try: | ||
90 | with open(path, 'rb') as f: | ||
91 | stanza = f.readline(130) | ||
92 | except IOError: | ||
93 | return | ||
94 | |||
95 | if stanza.startswith(b'#!'): | ||
96 | #Shebang not found | ||
97 | try: | ||
98 | stanza = stanza.decode("utf-8") | ||
99 | except UnicodeDecodeError: | ||
100 | #If it is not a text file, it is not a script | ||
101 | return | ||
102 | |||
103 | if len(stanza) > 129: | ||
104 | oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d))) | ||
105 | return | ||
106 | |||
107 | QAPATHTEST[libexec] = "package_qa_check_libexec" | ||
108 | def package_qa_check_libexec(path,name, d, elf, messages): | ||
109 | |||
110 | # Skip the case where the default is explicitly /usr/libexec | ||
111 | libexec = d.getVar('libexecdir') | ||
112 | if libexec == "/usr/libexec": | ||
113 | return True | ||
114 | |||
115 | if 'libexec' in path.split(os.path.sep): | ||
116 | oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec)) | ||
117 | return False | ||
118 | |||
119 | return True | ||
120 | |||
121 | QAPATHTEST[rpaths] = "package_qa_check_rpath" | ||
122 | def package_qa_check_rpath(file,name, d, elf, messages): | ||
123 | """ | ||
124 | Check for dangerous RPATHs | ||
125 | """ | ||
126 | if not elf: | ||
127 | return | ||
128 | |||
129 | if os.path.islink(file): | ||
130 | return | ||
131 | |||
132 | bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] | ||
133 | |||
134 | phdrs = elf.run_objdump("-p", d) | ||
135 | |||
136 | import re | ||
137 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | ||
138 | for line in phdrs.split("\n"): | ||
139 | m = rpath_re.match(line) | ||
140 | if m: | ||
141 | rpath = m.group(1) | ||
142 | for dir in bad_dirs: | ||
143 | if dir in rpath: | ||
144 | oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file)) | ||
145 | |||
146 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" | ||
147 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): | ||
148 | """ | ||
149 | Check for RPATHs that are useless but not dangerous | ||
150 | """ | ||
151 | def rpath_eq(a, b): | ||
152 | return os.path.normpath(a) == os.path.normpath(b) | ||
153 | |||
154 | if not elf: | ||
155 | return | ||
156 | |||
157 | if os.path.islink(file): | ||
158 | return | ||
159 | |||
160 | libdir = d.getVar("libdir") | ||
161 | base_libdir = d.getVar("base_libdir") | ||
162 | |||
163 | phdrs = elf.run_objdump("-p", d) | ||
164 | |||
165 | import re | ||
166 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | ||
167 | for line in phdrs.split("\n"): | ||
168 | m = rpath_re.match(line) | ||
169 | if m: | ||
170 | rpath = m.group(1) | ||
171 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): | ||
172 | # The dynamic linker searches both these places anyway. There is no point in | ||
173 | # looking there again. | ||
174 | oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath)) | ||
175 | |||
176 | QAPATHTEST[dev-so] = "package_qa_check_dev" | ||
177 | def package_qa_check_dev(path, name, d, elf, messages): | ||
178 | """ | ||
179 | Check for ".so" library symlinks in non-dev packages | ||
180 | """ | ||
181 | |||
182 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): | ||
183 | oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ | ||
184 | (name, package_qa_clean_path(path, d, name))) | ||
185 | |||
186 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" | ||
187 | def package_qa_check_dev_elf(path, name, d, elf, messages): | ||
188 | """ | ||
189 | Check that -dev doesn't contain real shared libraries. The test has to | ||
190 | check that the file is not a link and is an ELF object as some recipes | ||
191 | install link-time .so files that are linker scripts. | ||
192 | """ | ||
193 | if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf: | ||
194 | oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ | ||
195 | (name, package_qa_clean_path(path, d, name))) | ||
196 | |||
197 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" | ||
198 | def package_qa_check_staticdev(path, name, d, elf, messages): | ||
199 | """ | ||
200 | Check for ".a" library in non-staticdev packages | ||
201 | There are a number of exceptions to this rule, -pic packages can contain | ||
202 | static libraries, the _nonshared.a belong with their -dev packages and | ||
203 | libgcc.a, libgcov.a will be skipped in their packages | ||
204 | """ | ||
205 | |||
206 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: | ||
207 | oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ | ||
208 | (name, package_qa_clean_path(path,d, name))) | ||
209 | |||
210 | QAPATHTEST[mime] = "package_qa_check_mime" | ||
211 | def package_qa_check_mime(path, name, d, elf, messages): | ||
212 | """ | ||
213 | Check if package installs mime types to /usr/share/mime/packages | ||
214 | while no inheriting mime.bbclass | ||
215 | """ | ||
216 | |||
217 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): | ||
218 | oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ | ||
219 | (name, package_qa_clean_path(path,d))) | ||
220 | |||
221 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" | ||
222 | def package_qa_check_mime_xdg(path, name, d, elf, messages): | ||
223 | """ | ||
224 | Check if package installs desktop file containing MimeType and requires | ||
225 | mime-types.bbclass to create /usr/share/applications/mimeinfo.cache | ||
226 | """ | ||
227 | |||
228 | if d.getVar("datadir") + "/applications" in path and path.endswith('.desktop') and not bb.data.inherits_class("mime-xdg", d): | ||
229 | mime_type_found = False | ||
230 | try: | ||
231 | with open(path, 'r') as f: | ||
232 | for line in f.read().split('\n'): | ||
233 | if 'MimeType' in line: | ||
234 | mime_type_found = True | ||
235 | break; | ||
236 | except: | ||
237 | # At least libreoffice installs symlinks with absolute paths that are dangling here. | ||
238 | # We could implement some magic but for few (one) recipes it is not worth the effort so just warn: | ||
239 | wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path,d)) | ||
240 | wstr += "Please check if (linked) file contains key 'MimeType'.\n" | ||
241 | pkgname = name | ||
242 | if name == d.getVar('PN'): | ||
243 | pkgname = '${PN}' | ||
244 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) | ||
245 | oe.qa.add_message(messages, "mime-xdg", wstr) | ||
246 | if mime_type_found: | ||
247 | oe.qa.add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \ | ||
248 | (name, package_qa_clean_path(path,d))) | ||
249 | |||
250 | def package_qa_check_libdir(d): | ||
251 | """ | ||
252 | Check for wrong library installation paths. For instance, catch | ||
253 | recipes installing /lib/bar.so when ${base_libdir}="lib32" or | ||
254 | installing in /usr/lib64 when ${libdir}="/usr/lib" | ||
255 | """ | ||
256 | import re | ||
257 | |||
258 | pkgdest = d.getVar('PKGDEST') | ||
259 | base_libdir = d.getVar("base_libdir") + os.sep | ||
260 | libdir = d.getVar("libdir") + os.sep | ||
261 | libexecdir = d.getVar("libexecdir") + os.sep | ||
262 | exec_prefix = d.getVar("exec_prefix") + os.sep | ||
263 | |||
264 | messages = [] | ||
265 | |||
266 | # The re's are purposely fuzzy, as some there are some .so.x.y.z files | ||
267 | # that don't follow the standard naming convention. It checks later | ||
268 | # that they are actual ELF files | ||
269 | lib_re = re.compile(r"^/lib.+\.so(\..+)?$") | ||
270 | exec_re = re.compile(r"^%s.*/lib.+\.so(\..+)?$" % exec_prefix) | ||
271 | |||
272 | for root, dirs, files in os.walk(pkgdest): | ||
273 | if root == pkgdest: | ||
274 | # Skip subdirectories for any packages with libdir in INSANE_SKIP | ||
275 | skippackages = [] | ||
276 | for package in dirs: | ||
277 | if 'libdir' in (d.getVar('INSANE_SKIP:' + package) or "").split(): | ||
278 | bb.note("Package %s skipping libdir QA test" % (package)) | ||
279 | skippackages.append(package) | ||
280 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"): | ||
281 | bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package)) | ||
282 | skippackages.append(package) | ||
283 | for package in skippackages: | ||
284 | dirs.remove(package) | ||
285 | for file in files: | ||
286 | full_path = os.path.join(root, file) | ||
287 | rel_path = os.path.relpath(full_path, pkgdest) | ||
288 | if os.sep in rel_path: | ||
289 | package, rel_path = rel_path.split(os.sep, 1) | ||
290 | rel_path = os.sep + rel_path | ||
291 | if lib_re.match(rel_path): | ||
292 | if base_libdir not in rel_path: | ||
293 | # make sure it's an actual ELF file | ||
294 | elf = oe.qa.ELFFile(full_path) | ||
295 | try: | ||
296 | elf.open() | ||
297 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | ||
298 | except (oe.qa.NotELFFileError): | ||
299 | pass | ||
300 | if exec_re.match(rel_path): | ||
301 | if libdir not in rel_path and libexecdir not in rel_path: | ||
302 | # make sure it's an actual ELF file | ||
303 | elf = oe.qa.ELFFile(full_path) | ||
304 | try: | ||
305 | elf.open() | ||
306 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | ||
307 | except (oe.qa.NotELFFileError): | ||
308 | pass | ||
309 | |||
310 | if messages: | ||
311 | oe.qa.handle_error("libdir", "\n".join(messages), d) | ||
312 | |||
313 | QAPATHTEST[debug-files] = "package_qa_check_dbg" | ||
314 | def package_qa_check_dbg(path, name, d, elf, messages): | ||
315 | """ | ||
316 | Check for ".debug" files or directories outside of the dbg package | ||
317 | """ | ||
318 | |||
319 | if not "-dbg" in name and not "-ptest" in name: | ||
320 | if '.debug' in path.split(os.path.sep): | ||
321 | oe.qa.add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \ | ||
322 | (name, package_qa_clean_path(path,d))) | ||
323 | |||
324 | QAPATHTEST[arch] = "package_qa_check_arch" | ||
325 | def package_qa_check_arch(path,name,d, elf, messages): | ||
326 | """ | ||
327 | Check if archs are compatible | ||
328 | """ | ||
329 | import re, oe.elf | ||
330 | |||
331 | if not elf: | ||
332 | return | ||
333 | |||
334 | target_os = d.getVar('HOST_OS') | ||
335 | target_arch = d.getVar('HOST_ARCH') | ||
336 | provides = d.getVar('PROVIDES') | ||
337 | bpn = d.getVar('BPN') | ||
338 | |||
339 | if target_arch == "allarch": | ||
340 | pn = d.getVar('PN') | ||
341 | oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") | ||
342 | return | ||
343 | |||
344 | # FIXME: Cross package confuse this check, so just skip them | ||
345 | for s in ['cross', 'nativesdk', 'cross-canadian']: | ||
346 | if bb.data.inherits_class(s, d): | ||
347 | return | ||
348 | |||
349 | # avoid following links to /usr/bin (e.g. on udev builds) | ||
350 | # we will check the files pointed to anyway... | ||
351 | if os.path.islink(path): | ||
352 | return | ||
353 | |||
354 | #if this will throw an exception, then fix the dict above | ||
355 | (machine, osabi, abiversion, littleendian, bits) \ | ||
356 | = oe.elf.machine_dict(d)[target_os][target_arch] | ||
357 | |||
358 | # Check the architecture and endiannes of the binary | ||
359 | is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ | ||
360 | (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \ | ||
361 | target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))) | ||
362 | is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF") | ||
363 | if not ((machine == elf.machine()) or is_32 or is_bpf): | ||
364 | oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \ | ||
365 | (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name))) | ||
366 | elif not ((bits == elf.abiSize()) or is_32 or is_bpf): | ||
367 | oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \ | ||
368 | (elf.abiSize(), bits, package_qa_clean_path(path, d, name))) | ||
369 | elif not ((littleendian == elf.isLittleEndian()) or is_bpf): | ||
370 | oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \ | ||
371 | (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name))) | ||
372 | |||
373 | QAPATHTEST[desktop] = "package_qa_check_desktop" | ||
374 | def package_qa_check_desktop(path, name, d, elf, messages): | ||
375 | """ | ||
376 | Run all desktop files through desktop-file-validate. | ||
377 | """ | ||
378 | if path.endswith(".desktop"): | ||
379 | desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate') | ||
380 | output = os.popen("%s %s" % (desktop_file_validate, path)) | ||
381 | # This only produces output on errors | ||
382 | for l in output: | ||
383 | oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip()) | ||
384 | |||
385 | QAPATHTEST[textrel] = "package_qa_textrel" | ||
386 | def package_qa_textrel(path, name, d, elf, messages): | ||
387 | """ | ||
388 | Check if the binary contains relocations in .text | ||
389 | """ | ||
390 | |||
391 | if not elf: | ||
392 | return | ||
393 | |||
394 | if os.path.islink(path): | ||
395 | return | ||
396 | |||
397 | phdrs = elf.run_objdump("-p", d) | ||
398 | sane = True | ||
399 | |||
400 | import re | ||
401 | textrel_re = re.compile(r"\s+TEXTREL\s+") | ||
402 | for line in phdrs.split("\n"): | ||
403 | if textrel_re.match(line): | ||
404 | sane = False | ||
405 | break | ||
406 | |||
407 | if not sane: | ||
408 | path = package_qa_clean_path(path, d, name) | ||
409 | oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path)) | ||
410 | |||
411 | QAPATHTEST[ldflags] = "package_qa_hash_style" | ||
412 | def package_qa_hash_style(path, name, d, elf, messages): | ||
413 | """ | ||
414 | Check if the binary has the right hash style... | ||
415 | """ | ||
416 | |||
417 | if not elf: | ||
418 | return | ||
419 | |||
420 | if os.path.islink(path): | ||
421 | return | ||
422 | |||
423 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') | ||
424 | if not gnu_hash: | ||
425 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') | ||
426 | if not gnu_hash: | ||
427 | return | ||
428 | |||
429 | sane = False | ||
430 | has_syms = False | ||
431 | |||
432 | phdrs = elf.run_objdump("-p", d) | ||
433 | |||
434 | # If this binary has symbols, we expect it to have GNU_HASH too. | ||
435 | for line in phdrs.split("\n"): | ||
436 | if "SYMTAB" in line: | ||
437 | has_syms = True | ||
438 | if "GNU_HASH" in line or "MIPS_XHASH" in line: | ||
439 | sane = True | ||
440 | if ("[mips32]" in line or "[mips64]" in line) and d.getVar('TCLIBC') == "musl": | ||
441 | sane = True | ||
442 | if has_syms and not sane: | ||
443 | path = package_qa_clean_path(path, d, name) | ||
444 | oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name)) | ||
445 | |||
446 | |||
447 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" | ||
448 | def package_qa_check_buildpaths(path, name, d, elf, messages): | ||
449 | """ | ||
450 | Check for build paths inside target files and error if paths are not | ||
451 | explicitly ignored. | ||
452 | """ | ||
453 | import stat | ||
454 | |||
455 | # Ignore symlinks/devs/fifos | ||
456 | mode = os.lstat(path).st_mode | ||
457 | if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode): | ||
458 | return | ||
459 | |||
460 | tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8") | ||
461 | with open(path, 'rb') as f: | ||
462 | file_content = f.read() | ||
463 | if tmpdir in file_content: | ||
464 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | ||
465 | oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name)) | ||
466 | |||
467 | |||
468 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" | ||
469 | def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | ||
470 | """ | ||
471 | Check that all packages containing Xorg drivers have ABI dependencies | ||
472 | """ | ||
473 | |||
474 | # Skip dev, dbg or nativesdk packages | ||
475 | if name.endswith("-dev") or name.endswith("-dbg") or name.startswith("nativesdk-"): | ||
476 | return | ||
477 | |||
478 | driverdir = d.expand("${libdir}/xorg/modules/drivers/") | ||
479 | if driverdir in path and path.endswith(".so"): | ||
480 | mlprefix = d.getVar('MLPREFIX') or '' | ||
481 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): | ||
482 | if rdep.startswith("%sxorg-abi-" % mlprefix): | ||
483 | return | ||
484 | oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | ||
485 | |||
486 | QAPATHTEST[infodir] = "package_qa_check_infodir" | ||
487 | def package_qa_check_infodir(path, name, d, elf, messages): | ||
488 | """ | ||
489 | Check that /usr/share/info/dir isn't shipped in a particular package | ||
490 | """ | ||
491 | infodir = d.expand("${infodir}/dir") | ||
492 | |||
493 | if infodir in path: | ||
494 | oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.") | ||
495 | |||
496 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" | ||
497 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | ||
498 | """ | ||
499 | Check that the package doesn't contain any absolute symlinks to the sysroot. | ||
500 | """ | ||
501 | if os.path.islink(path): | ||
502 | target = os.readlink(path) | ||
503 | if os.path.isabs(target): | ||
504 | tmpdir = d.getVar('TMPDIR') | ||
505 | if target.startswith(tmpdir): | ||
506 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | ||
507 | oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) | ||
508 | |||
509 | # Check license variables | ||
510 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" | ||
511 | python populate_lic_qa_checksum() { | ||
512 | """ | ||
513 | Check for changes in the license files. | ||
514 | """ | ||
515 | |||
516 | lic_files = d.getVar('LIC_FILES_CHKSUM') or '' | ||
517 | lic = d.getVar('LICENSE') | ||
518 | pn = d.getVar('PN') | ||
519 | |||
520 | if lic == "CLOSED": | ||
521 | return | ||
522 | |||
523 | if not lic_files and d.getVar('SRC_URI'): | ||
524 | oe.qa.handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) | ||
525 | |||
526 | srcdir = d.getVar('S') | ||
527 | corebase_licensefile = d.getVar('COREBASE') + "/LICENSE" | ||
528 | for url in lic_files.split(): | ||
529 | try: | ||
530 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | ||
531 | except bb.fetch.MalformedUrl: | ||
532 | oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d) | ||
533 | continue | ||
534 | srclicfile = os.path.join(srcdir, path) | ||
535 | if not os.path.isfile(srclicfile): | ||
536 | oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d) | ||
537 | continue | ||
538 | |||
539 | if (srclicfile == corebase_licensefile): | ||
540 | bb.warn("${COREBASE}/LICENSE is not a valid license file, please use '${COMMON_LICENSE_DIR}/MIT' for a MIT License file in LIC_FILES_CHKSUM. This will become an error in the future") | ||
541 | |||
542 | recipemd5 = parm.get('md5', '') | ||
543 | beginline, endline = 0, 0 | ||
544 | if 'beginline' in parm: | ||
545 | beginline = int(parm['beginline']) | ||
546 | if 'endline' in parm: | ||
547 | endline = int(parm['endline']) | ||
548 | |||
549 | if (not beginline) and (not endline): | ||
550 | md5chksum = bb.utils.md5_file(srclicfile) | ||
551 | with open(srclicfile, 'r', errors='replace') as f: | ||
552 | license = f.read().splitlines() | ||
553 | else: | ||
554 | with open(srclicfile, 'rb') as f: | ||
555 | import hashlib | ||
556 | lineno = 0 | ||
557 | license = [] | ||
558 | m = hashlib.new('MD5', usedforsecurity=False) | ||
559 | for line in f: | ||
560 | lineno += 1 | ||
561 | if (lineno >= beginline): | ||
562 | if ((lineno <= endline) or not endline): | ||
563 | m.update(line) | ||
564 | license.append(line.decode('utf-8', errors='replace').rstrip()) | ||
565 | else: | ||
566 | break | ||
567 | md5chksum = m.hexdigest() | ||
568 | if recipemd5 == md5chksum: | ||
569 | bb.note (pn + ": md5 checksum matched for ", url) | ||
570 | else: | ||
571 | if recipemd5: | ||
572 | msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url | ||
573 | msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum | ||
574 | max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20) | ||
575 | if not license or license[-1] != '': | ||
576 | # Ensure that our license text ends with a line break | ||
577 | # (will be added with join() below). | ||
578 | license.append('') | ||
579 | remove = len(license) - max_lines | ||
580 | if remove > 0: | ||
581 | start = max_lines // 2 | ||
582 | end = start + remove - 1 | ||
583 | del license[start:end] | ||
584 | license.insert(start, '...') | ||
585 | msg = msg + "\n" + pn + ": Here is the selected license text:" + \ | ||
586 | "\n" + \ | ||
587 | "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \ | ||
588 | "\n" + "\n".join(license) + \ | ||
589 | "{:^^70}".format(" endline=%d " % endline if endline else "") | ||
590 | if beginline: | ||
591 | if endline: | ||
592 | srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline) | ||
593 | else: | ||
594 | srcfiledesc = "%s (beginning on line %d)" % (srclicfile, beginline) | ||
595 | elif endline: | ||
596 | srcfiledesc = "%s (ending on line %d)" % (srclicfile, endline) | ||
597 | else: | ||
598 | srcfiledesc = srclicfile | ||
599 | msg = msg + "\n" + pn + ": Check if the license information has changed in %s to verify that the LICENSE value \"%s\" remains valid" % (srcfiledesc, lic) | ||
600 | |||
601 | else: | ||
602 | msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url | ||
603 | msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum | ||
604 | oe.qa.handle_error("license-checksum", msg, d) | ||
605 | |||
606 | oe.qa.exit_if_errors(d) | ||
607 | } | ||
608 | |||
609 | def qa_check_staged(path,d): | ||
610 | """ | ||
611 | Check staged la and pc files for common problems like references to the work | ||
612 | directory. | ||
613 | |||
614 | As this is run after every stage we should be able to find the one | ||
615 | responsible for the errors easily even if we look at every .pc and .la file. | ||
616 | """ | ||
617 | |||
618 | tmpdir = d.getVar('TMPDIR') | ||
619 | workdir = os.path.join(tmpdir, "work") | ||
620 | recipesysroot = d.getVar("RECIPE_SYSROOT") | ||
621 | |||
622 | if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): | ||
623 | pkgconfigcheck = workdir | ||
624 | else: | ||
625 | pkgconfigcheck = tmpdir | ||
626 | |||
627 | skip = (d.getVar('INSANE_SKIP') or "").split() | ||
628 | skip_la = False | ||
629 | if 'la' in skip: | ||
630 | bb.note("Recipe %s skipping qa checking: la" % d.getVar('PN')) | ||
631 | skip_la = True | ||
632 | |||
633 | skip_pkgconfig = False | ||
634 | if 'pkgconfig' in skip: | ||
635 | bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN')) | ||
636 | skip_pkgconfig = True | ||
637 | |||
638 | skip_shebang_size = False | ||
639 | if 'shebang-size' in skip: | ||
640 | bb.note("Recipe %s skipping qa checkking: shebang-size" % d.getVar('PN')) | ||
641 | skip_shebang_size = True | ||
642 | |||
643 | # find all .la and .pc files | ||
644 | # read the content | ||
645 | # and check for stuff that looks wrong | ||
646 | for root, dirs, files in os.walk(path): | ||
647 | for file in files: | ||
648 | path = os.path.join(root,file) | ||
649 | if file.endswith(".la") and not skip_la: | ||
650 | with open(path) as f: | ||
651 | file_content = f.read() | ||
652 | file_content = file_content.replace(recipesysroot, "") | ||
653 | if workdir in file_content: | ||
654 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) | ||
655 | oe.qa.handle_error("la", error_msg, d) | ||
656 | elif file.endswith(".pc") and not skip_pkgconfig: | ||
657 | with open(path) as f: | ||
658 | file_content = f.read() | ||
659 | file_content = file_content.replace(recipesysroot, "") | ||
660 | if pkgconfigcheck in file_content: | ||
661 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) | ||
662 | oe.qa.handle_error("pkgconfig", error_msg, d) | ||
663 | |||
664 | if not skip_shebang_size: | ||
665 | errors = {} | ||
666 | package_qa_check_shebang_size(path, "", d, None, errors) | ||
667 | for e in errors: | ||
668 | oe.qa.handle_error(e, errors[e], d) | ||
669 | |||
670 | |||
671 | # Run all package-wide warnfuncs and errorfuncs | ||
672 | def package_qa_package(warnfuncs, errorfuncs, package, d): | ||
673 | warnings = {} | ||
674 | errors = {} | ||
675 | |||
676 | for func in warnfuncs: | ||
677 | func(package, d, warnings) | ||
678 | for func in errorfuncs: | ||
679 | func(package, d, errors) | ||
680 | |||
681 | for w in warnings: | ||
682 | oe.qa.handle_error(w, warnings[w], d) | ||
683 | for e in errors: | ||
684 | oe.qa.handle_error(e, errors[e], d) | ||
685 | |||
686 | return len(errors) == 0 | ||
687 | |||
688 | # Run all recipe-wide warnfuncs and errorfuncs | ||
689 | def package_qa_recipe(warnfuncs, errorfuncs, pn, d): | ||
690 | warnings = {} | ||
691 | errors = {} | ||
692 | |||
693 | for func in warnfuncs: | ||
694 | func(pn, d, warnings) | ||
695 | for func in errorfuncs: | ||
696 | func(pn, d, errors) | ||
697 | |||
698 | for w in warnings: | ||
699 | oe.qa.handle_error(w, warnings[w], d) | ||
700 | for e in errors: | ||
701 | oe.qa.handle_error(e, errors[e], d) | ||
702 | |||
703 | return len(errors) == 0 | ||
704 | |||
705 | def prepopulate_objdump_p(elf, d): | ||
706 | output = elf.run_objdump("-p", d) | ||
707 | return (elf.name, output) | ||
708 | |||
709 | # Walk over all files in a directory and call func | ||
710 | def package_qa_walk(warnfuncs, errorfuncs, package, d): | ||
711 | #if this will throw an exception, then fix the dict above | ||
712 | target_os = d.getVar('HOST_OS') | ||
713 | target_arch = d.getVar('HOST_ARCH') | ||
714 | |||
715 | warnings = {} | ||
716 | errors = {} | ||
717 | elves = {} | ||
718 | for path in pkgfiles[package]: | ||
719 | elf = None | ||
720 | if os.path.isfile(path): | ||
721 | elf = oe.qa.ELFFile(path) | ||
722 | try: | ||
723 | elf.open() | ||
724 | elf.close() | ||
725 | except oe.qa.NotELFFileError: | ||
726 | elf = None | ||
727 | if elf: | ||
728 | elves[path] = elf | ||
729 | |||
730 | results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,)) | ||
731 | for item in results: | ||
732 | elves[item[0]].set_objdump("-p", item[1]) | ||
733 | |||
734 | for path in pkgfiles[package]: | ||
735 | if path in elves: | ||
736 | elves[path].open() | ||
737 | for func in warnfuncs: | ||
738 | func(path, package, d, elves.get(path), warnings) | ||
739 | for func in errorfuncs: | ||
740 | func(path, package, d, elves.get(path), errors) | ||
741 | if path in elves: | ||
742 | elves[path].close() | ||
743 | |||
744 | for w in warnings: | ||
745 | oe.qa.handle_error(w, warnings[w], d) | ||
746 | for e in errors: | ||
747 | oe.qa.handle_error(e, errors[e], d) | ||
748 | |||
749 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | ||
750 | # Don't do this check for kernel/module recipes, there aren't too many debug/development | ||
751 | # packages and you can get false positives e.g. on kernel-module-lirc-dev | ||
752 | if bb.data.inherits_class("kernel", d) or bb.data.inherits_class("module-base", d): | ||
753 | return | ||
754 | |||
755 | if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg: | ||
756 | localdata = bb.data.createCopy(d) | ||
757 | localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES') + ':' + pkg) | ||
758 | |||
759 | # Now check the RDEPENDS | ||
760 | rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "") | ||
761 | |||
762 | # Now do the sanity check!!! | ||
763 | if "build-deps" not in skip: | ||
764 | for rdepend in rdepends: | ||
765 | if "-dbg" in rdepend and "debug-deps" not in skip: | ||
766 | error_msg = "%s rdepends on %s" % (pkg,rdepend) | ||
767 | oe.qa.handle_error("debug-deps", error_msg, d) | ||
768 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: | ||
769 | error_msg = "%s rdepends on %s" % (pkg, rdepend) | ||
770 | oe.qa.handle_error("dev-deps", error_msg, d) | ||
771 | if rdepend not in packages: | ||
772 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) | ||
773 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
774 | continue | ||
775 | if not rdep_data or not 'PN' in rdep_data: | ||
776 | pkgdata_dir = d.getVar("PKGDATA_DIR") | ||
777 | try: | ||
778 | possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend)) | ||
779 | except OSError: | ||
780 | possibles = [] | ||
781 | for p in possibles: | ||
782 | rdep_data = oe.packagedata.read_subpkgdata(p, d) | ||
783 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
784 | break | ||
785 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
786 | continue | ||
787 | if rdep_data and 'PN' in rdep_data: | ||
788 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN']) | ||
789 | else: | ||
790 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) | ||
791 | oe.qa.handle_error("build-deps", error_msg, d) | ||
792 | |||
793 | if "file-rdeps" not in skip: | ||
794 | ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)']) | ||
795 | if bb.data.inherits_class('nativesdk', d): | ||
796 | ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl']) | ||
797 | # For Saving the FILERDEPENDS | ||
798 | filerdepends = {} | ||
799 | rdep_data = oe.packagedata.read_subpkgdata(pkg, d) | ||
800 | for key in rdep_data: | ||
801 | if key.startswith("FILERDEPENDS:"): | ||
802 | for subkey in bb.utils.explode_deps(rdep_data[key]): | ||
803 | if subkey not in ignored_file_rdeps and \ | ||
804 | not subkey.startswith('perl('): | ||
805 | # We already know it starts with FILERDEPENDS_ | ||
806 | filerdepends[subkey] = key[13:] | ||
807 | |||
808 | if filerdepends: | ||
809 | done = rdepends[:] | ||
810 | # Add the rprovides of itself | ||
811 | if pkg not in done: | ||
812 | done.insert(0, pkg) | ||
813 | |||
814 | # The python is not a package, but python-core provides it, so | ||
815 | # skip checking /usr/bin/python if python is in the rdeps, in | ||
816 | # case there is a RDEPENDS:pkg = "python" in the recipe. | ||
817 | for py in [ d.getVar('MLPREFIX') + "python", "python" ]: | ||
818 | if py in done: | ||
819 | filerdepends.pop("/usr/bin/python",None) | ||
820 | done.remove(py) | ||
821 | for rdep in done: | ||
822 | # The file dependencies may contain package names, e.g., | ||
823 | # perl | ||
824 | filerdepends.pop(rdep,None) | ||
825 | |||
826 | # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO | ||
827 | rdep_data = oe.packagedata.read_subpkgdata(rdep, d) | ||
828 | for key in rdep_data: | ||
829 | if key.startswith("FILERPROVIDES:") or key.startswith("RPROVIDES:"): | ||
830 | for subkey in bb.utils.explode_deps(rdep_data[key]): | ||
831 | filerdepends.pop(subkey,None) | ||
832 | # Add the files list to the rprovides | ||
833 | if key.startswith("FILES_INFO:"): | ||
834 | # Use eval() to make it as a dict | ||
835 | for subkey in eval(rdep_data[key]): | ||
836 | filerdepends.pop(subkey,None) | ||
837 | if not filerdepends: | ||
838 | # Break if all the file rdepends are met | ||
839 | break | ||
840 | if filerdepends: | ||
841 | for key in filerdepends: | ||
842 | error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \ | ||
843 | (filerdepends[key].replace(":%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) | ||
844 | oe.qa.handle_error("file-rdeps", error_msg, d) | ||
845 | package_qa_check_rdepends[vardepsexclude] = "OVERRIDES" | ||
846 | |||
847 | def package_qa_check_deps(pkg, pkgdest, d): | ||
848 | |||
849 | localdata = bb.data.createCopy(d) | ||
850 | localdata.setVar('OVERRIDES', pkg) | ||
851 | |||
852 | def check_valid_deps(var): | ||
853 | try: | ||
854 | rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "") | ||
855 | except ValueError as e: | ||
856 | bb.fatal("%s:%s: %s" % (var, pkg, e)) | ||
857 | for dep in rvar: | ||
858 | for v in rvar[dep]: | ||
859 | if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')): | ||
860 | error_msg = "%s:%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v) | ||
861 | oe.qa.handle_error("dep-cmp", error_msg, d) | ||
862 | |||
863 | check_valid_deps('RDEPENDS') | ||
864 | check_valid_deps('RRECOMMENDS') | ||
865 | check_valid_deps('RSUGGESTS') | ||
866 | check_valid_deps('RPROVIDES') | ||
867 | check_valid_deps('RREPLACES') | ||
868 | check_valid_deps('RCONFLICTS') | ||
869 | |||
870 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" | ||
871 | def package_qa_check_usrmerge(pkg, d, messages): | ||
872 | |||
873 | pkgdest = d.getVar('PKGDEST') | ||
874 | pkg_dir = pkgdest + os.sep + pkg + os.sep | ||
875 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() | ||
876 | for f in merged_dirs: | ||
877 | if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f): | ||
878 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) | ||
879 | oe.qa.add_message(messages, "usrmerge", msg) | ||
880 | return False | ||
881 | return True | ||
882 | |||
883 | QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod" | ||
884 | def package_qa_check_perllocalpod(pkg, d, messages): | ||
885 | """ | ||
886 | Check that the recipe didn't ship a perlocal.pod file, which shouldn't be | ||
887 | installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to | ||
888 | handle this for most recipes. | ||
889 | """ | ||
890 | import glob | ||
891 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) | ||
892 | podpath = oe.path.join(pkgd, d.getVar("libdir"), "perl*", "*", "*", "perllocal.pod") | ||
893 | |||
894 | matches = glob.glob(podpath) | ||
895 | if matches: | ||
896 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] | ||
897 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) | ||
898 | oe.qa.add_message(messages, "perllocalpod", msg) | ||
899 | |||
900 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" | ||
901 | def package_qa_check_expanded_d(package, d, messages): | ||
902 | """ | ||
903 | Check for the expanded D (${D}) value in pkg_* and FILES | ||
904 | variables, warn the user to use it correctly. | ||
905 | """ | ||
906 | sane = True | ||
907 | expanded_d = d.getVar('D') | ||
908 | |||
909 | for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': | ||
910 | bbvar = d.getVar(var + ":" + package) or "" | ||
911 | if expanded_d in bbvar: | ||
912 | if var == 'FILES': | ||
913 | oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) | ||
914 | sane = False | ||
915 | else: | ||
916 | oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package)) | ||
917 | sane = False | ||
918 | return sane | ||
919 | |||
920 | QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics" | ||
921 | def package_qa_check_unlisted_pkg_lics(package, d, messages): | ||
922 | """ | ||
923 | Check that all licenses for a package are among the licenses for the recipe. | ||
924 | """ | ||
925 | pkg_lics = d.getVar('LICENSE:' + package) | ||
926 | if not pkg_lics: | ||
927 | return True | ||
928 | |||
929 | recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE')) | ||
930 | package_lics = oe.license.list_licenses(pkg_lics) | ||
931 | unlisted = package_lics - recipe_lics_set | ||
932 | if unlisted: | ||
933 | oe.qa.add_message(messages, "unlisted-pkg-lics", | ||
934 | "LICENSE:%s includes licenses (%s) that are not " | ||
935 | "listed in LICENSE" % (package, ' '.join(unlisted))) | ||
936 | return False | ||
937 | obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set | ||
938 | if obsolete: | ||
939 | oe.qa.add_message(messages, "obsolete-license", | ||
940 | "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete))) | ||
941 | return False | ||
942 | return True | ||
943 | |||
944 | QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs" | ||
945 | def package_qa_check_empty_dirs(pkg, d, messages): | ||
946 | """ | ||
947 | Check for the existence of files in directories that are expected to be | ||
948 | empty. | ||
949 | """ | ||
950 | |||
951 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) | ||
952 | for dir in (d.getVar('QA_EMPTY_DIRS') or "").split(): | ||
953 | empty_dir = oe.path.join(pkgd, dir) | ||
954 | if os.path.exists(empty_dir) and os.listdir(empty_dir): | ||
955 | recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or | ||
956 | "but it is expected to be empty") | ||
957 | msg = "%s installs files in %s, %s" % (pkg, dir, recommendation) | ||
958 | oe.qa.add_message(messages, "empty-dirs", msg) | ||
959 | |||
960 | def package_qa_check_encoding(keys, encode, d): | ||
961 | def check_encoding(key, enc): | ||
962 | sane = True | ||
963 | value = d.getVar(key) | ||
964 | if value: | ||
965 | try: | ||
966 | s = value.encode(enc) | ||
967 | except UnicodeDecodeError as e: | ||
968 | error_msg = "%s has non %s characters" % (key,enc) | ||
969 | sane = False | ||
970 | oe.qa.handle_error("invalid-chars", error_msg, d) | ||
971 | return sane | ||
972 | |||
973 | for key in keys: | ||
974 | sane = check_encoding(key, encode) | ||
975 | if not sane: | ||
976 | break | ||
977 | |||
978 | HOST_USER_UID := "${@os.getuid()}" | ||
979 | HOST_USER_GID := "${@os.getgid()}" | ||
980 | |||
981 | QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user" | ||
982 | def package_qa_check_host_user(path, name, d, elf, messages): | ||
983 | """Check for paths outside of /home which are owned by the user running bitbake.""" | ||
984 | |||
985 | if not os.path.lexists(path): | ||
986 | return | ||
987 | |||
988 | dest = d.getVar('PKGDEST') | ||
989 | pn = d.getVar('PN') | ||
990 | home = os.path.join(dest, name, 'home') | ||
991 | if path == home or path.startswith(home + os.sep): | ||
992 | return | ||
993 | |||
994 | try: | ||
995 | stat = os.lstat(path) | ||
996 | except OSError as exc: | ||
997 | import errno | ||
998 | if exc.errno != errno.ENOENT: | ||
999 | raise | ||
1000 | else: | ||
1001 | check_uid = int(d.getVar('HOST_USER_UID')) | ||
1002 | if stat.st_uid == check_uid: | ||
1003 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) | ||
1004 | return False | ||
1005 | |||
1006 | check_gid = int(d.getVar('HOST_USER_GID')) | ||
1007 | if stat.st_gid == check_gid: | ||
1008 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) | ||
1009 | return False | ||
1010 | return True | ||
1011 | |||
1012 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" | ||
1013 | def package_qa_check_unhandled_features_check(pn, d, messages): | ||
1014 | if not bb.data.inherits_class('features_check', d): | ||
1015 | var_set = False | ||
1016 | for kind in ['DISTRO', 'MACHINE', 'COMBINED']: | ||
1017 | for var in ['ANY_OF_' + kind + '_FEATURES', 'REQUIRED_' + kind + '_FEATURES', 'CONFLICT_' + kind + '_FEATURES']: | ||
1018 | if d.getVar(var) is not None or d.hasOverrides(var): | ||
1019 | var_set = True | ||
1020 | if var_set: | ||
1021 | oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) | ||
1022 | |||
1023 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" | ||
1024 | def package_qa_check_missing_update_alternatives(pn, d, messages): | ||
1025 | # Look at all packages and find out if any of those sets ALTERNATIVE variable | ||
1026 | # without inheriting update-alternatives class | ||
1027 | for pkg in (d.getVar('PACKAGES') or '').split(): | ||
1028 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): | ||
1029 | oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) | ||
1030 | |||
1031 | # The PACKAGE FUNC to scan each package | ||
1032 | python do_package_qa () { | ||
1033 | import subprocess | ||
1034 | import oe.packagedata | ||
1035 | |||
1036 | bb.note("DO PACKAGE QA") | ||
1037 | |||
1038 | main_lic = d.getVar('LICENSE') | ||
1039 | |||
1040 | # Check for obsolete license references in main LICENSE (packages are checked below for any changes) | ||
1041 | main_licenses = oe.license.list_licenses(d.getVar('LICENSE')) | ||
1042 | obsolete = set(oe.license.obsolete_license_list()) & main_licenses | ||
1043 | if obsolete: | ||
1044 | oe.qa.handle_error("obsolete-license", "Recipe LICENSE includes obsolete licenses %s" % ' '.join(obsolete), d) | ||
1045 | |||
1046 | bb.build.exec_func("read_subpackage_metadata", d) | ||
1047 | |||
1048 | # Check non UTF-8 characters on recipe's metadata | ||
1049 | package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d) | ||
1050 | |||
1051 | logdir = d.getVar('T') | ||
1052 | pn = d.getVar('PN') | ||
1053 | |||
1054 | # Scan the packages... | ||
1055 | pkgdest = d.getVar('PKGDEST') | ||
1056 | packages = set((d.getVar('PACKAGES') or '').split()) | ||
1057 | |||
1058 | global pkgfiles | ||
1059 | pkgfiles = {} | ||
1060 | for pkg in packages: | ||
1061 | pkgfiles[pkg] = [] | ||
1062 | pkgdir = os.path.join(pkgdest, pkg) | ||
1063 | for walkroot, dirs, files in os.walk(pkgdir): | ||
1064 | # Don't walk into top-level CONTROL or DEBIAN directories as these | ||
1065 | # are temporary directories created by do_package. | ||
1066 | if walkroot == pkgdir: | ||
1067 | for control in ("CONTROL", "DEBIAN"): | ||
1068 | if control in dirs: | ||
1069 | dirs.remove(control) | ||
1070 | for file in files: | ||
1071 | pkgfiles[pkg].append(os.path.join(walkroot, file)) | ||
1072 | |||
1073 | # no packages should be scanned | ||
1074 | if not packages: | ||
1075 | return | ||
1076 | |||
1077 | import re | ||
1078 | # The package name matches the [a-z0-9.+-]+ regular expression | ||
1079 | pkgname_pattern = re.compile(r"^[a-z0-9.+-]+$") | ||
1080 | |||
1081 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
1082 | taskdeps = set() | ||
1083 | for dep in taskdepdata: | ||
1084 | taskdeps.add(taskdepdata[dep][0]) | ||
1085 | |||
1086 | def parse_test_matrix(matrix_name): | ||
1087 | testmatrix = d.getVarFlags(matrix_name) or {} | ||
1088 | g = globals() | ||
1089 | warnchecks = [] | ||
1090 | for w in (d.getVar("WARN_QA") or "").split(): | ||
1091 | if w in skip: | ||
1092 | continue | ||
1093 | if w in testmatrix and testmatrix[w] in g: | ||
1094 | warnchecks.append(g[testmatrix[w]]) | ||
1095 | |||
1096 | errorchecks = [] | ||
1097 | for e in (d.getVar("ERROR_QA") or "").split(): | ||
1098 | if e in skip: | ||
1099 | continue | ||
1100 | if e in testmatrix and testmatrix[e] in g: | ||
1101 | errorchecks.append(g[testmatrix[e]]) | ||
1102 | return warnchecks, errorchecks | ||
1103 | |||
1104 | for package in packages: | ||
1105 | skip = set((d.getVar('INSANE_SKIP') or "").split() + | ||
1106 | (d.getVar('INSANE_SKIP:' + package) or "").split()) | ||
1107 | if skip: | ||
1108 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) | ||
1109 | |||
1110 | bb.note("Checking Package: %s" % package) | ||
1111 | # Check package name | ||
1112 | if not pkgname_pattern.match(package): | ||
1113 | oe.qa.handle_error("pkgname", | ||
1114 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) | ||
1115 | |||
1116 | warn_checks, error_checks = parse_test_matrix("QAPATHTEST") | ||
1117 | package_qa_walk(warn_checks, error_checks, package, d) | ||
1118 | |||
1119 | warn_checks, error_checks = parse_test_matrix("QAPKGTEST") | ||
1120 | package_qa_package(warn_checks, error_checks, package, d) | ||
1121 | |||
1122 | package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) | ||
1123 | package_qa_check_deps(package, pkgdest, d) | ||
1124 | |||
1125 | warn_checks, error_checks = parse_test_matrix("QARECIPETEST") | ||
1126 | package_qa_recipe(warn_checks, error_checks, pn, d) | ||
1127 | |||
1128 | if 'libdir' in d.getVar("ALL_QA").split(): | ||
1129 | package_qa_check_libdir(d) | ||
1130 | |||
1131 | oe.qa.exit_if_errors(d) | ||
1132 | } | ||
1133 | |||
1134 | # binutils is used for most checks, so need to set as dependency | ||
1135 | # POPULATESYSROOTDEPS is defined in staging class. | ||
1136 | do_package_qa[depends] += "${POPULATESYSROOTDEPS}" | ||
1137 | do_package_qa[vardeps] = "${@bb.utils.contains('ERROR_QA', 'empty-dirs', 'QA_EMPTY_DIRS', '', d)}" | ||
1138 | do_package_qa[vardepsexclude] = "BB_TASKDEPDATA" | ||
1139 | do_package_qa[rdeptask] = "do_packagedata" | ||
1140 | addtask do_package_qa after do_packagedata do_package before do_build | ||
1141 | |||
1142 | # Add the package specific INSANE_SKIPs to the sstate dependencies | ||
1143 | python() { | ||
1144 | pkgs = (d.getVar('PACKAGES') or '').split() | ||
1145 | for pkg in pkgs: | ||
1146 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg)) | ||
1147 | } | ||
1148 | |||
1149 | SSTATETASKS += "do_package_qa" | ||
1150 | do_package_qa[sstate-inputdirs] = "" | ||
1151 | do_package_qa[sstate-outputdirs] = "" | ||
1152 | python do_package_qa_setscene () { | ||
1153 | sstate_setscene(d) | ||
1154 | } | ||
1155 | addtask do_package_qa_setscene | ||
1156 | |||
1157 | python do_qa_sysroot() { | ||
1158 | bb.note("QA checking do_populate_sysroot") | ||
1159 | sysroot_destdir = d.expand('${SYSROOT_DESTDIR}') | ||
1160 | for sysroot_dir in d.expand('${SYSROOT_DIRS}').split(): | ||
1161 | qa_check_staged(sysroot_destdir + sysroot_dir, d) | ||
1162 | oe.qa.exit_with_message_if_errors("do_populate_sysroot for this recipe installed files with QA issues", d) | ||
1163 | } | ||
1164 | do_populate_sysroot[postfuncs] += "do_qa_sysroot" | ||
1165 | |||
1166 | python do_qa_patch() { | ||
1167 | import subprocess | ||
1168 | |||
1169 | ########################################################################### | ||
1170 | # Check patch.log for fuzz warnings | ||
1171 | # | ||
1172 | # Further information on why we check for patch fuzz warnings: | ||
1173 | # http://lists.openembedded.org/pipermail/openembedded-core/2018-March/148675.html | ||
1174 | # https://bugzilla.yoctoproject.org/show_bug.cgi?id=10450 | ||
1175 | ########################################################################### | ||
1176 | |||
1177 | logdir = d.getVar('T') | ||
1178 | patchlog = os.path.join(logdir,"log.do_patch") | ||
1179 | |||
1180 | if os.path.exists(patchlog): | ||
1181 | fuzzheader = '--- Patch fuzz start ---' | ||
1182 | fuzzfooter = '--- Patch fuzz end ---' | ||
1183 | statement = "grep -e '%s' %s > /dev/null" % (fuzzheader, patchlog) | ||
1184 | if subprocess.call(statement, shell=True) == 0: | ||
1185 | msg = "Fuzz detected:\n\n" | ||
1186 | fuzzmsg = "" | ||
1187 | inFuzzInfo = False | ||
1188 | f = open(patchlog, "r") | ||
1189 | for line in f: | ||
1190 | if fuzzheader in line: | ||
1191 | inFuzzInfo = True | ||
1192 | fuzzmsg = "" | ||
1193 | elif fuzzfooter in line: | ||
1194 | fuzzmsg = fuzzmsg.replace('\n\n', '\n') | ||
1195 | msg += fuzzmsg | ||
1196 | msg += "\n" | ||
1197 | inFuzzInfo = False | ||
1198 | elif inFuzzInfo and not 'Now at patch' in line: | ||
1199 | fuzzmsg += line | ||
1200 | f.close() | ||
1201 | msg += "The context lines in the patches can be updated with devtool:\n" | ||
1202 | msg += "\n" | ||
1203 | msg += " devtool modify %s\n" % d.getVar('PN') | ||
1204 | msg += " devtool finish --force-patch-refresh %s <layer_path>\n\n" % d.getVar('PN') | ||
1205 | msg += "Don't forget to review changes done by devtool!\n" | ||
1206 | if bb.utils.filter('ERROR_QA', 'patch-fuzz', d): | ||
1207 | bb.error(msg) | ||
1208 | elif bb.utils.filter('WARN_QA', 'patch-fuzz', d): | ||
1209 | bb.warn(msg) | ||
1210 | msg = "Patch log indicates that patches do not apply cleanly." | ||
1211 | oe.qa.handle_error("patch-fuzz", msg, d) | ||
1212 | |||
1213 | # Check if the patch contains a correctly formatted and spelled Upstream-Status | ||
1214 | import re | ||
1215 | from oe import patch | ||
1216 | |||
1217 | for url in patch.src_patches(d): | ||
1218 | (_, _, fullpath, _, _, _) = bb.fetch.decodeurl(url) | ||
1219 | |||
1220 | # skip patches not in oe-core | ||
1221 | if '/meta/' not in fullpath: | ||
1222 | continue | ||
1223 | |||
1224 | kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE) | ||
1225 | strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Accepted|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE) | ||
1226 | guidelines = "https://www.openembedded.org/wiki/Commit_Patch_Message_Guidelines#Patch_Header_Recommendations:_Upstream-Status" | ||
1227 | |||
1228 | with open(fullpath, encoding='utf-8', errors='ignore') as f: | ||
1229 | file_content = f.read() | ||
1230 | match_kinda = kinda_status_re.search(file_content) | ||
1231 | match_strict = strict_status_re.search(file_content) | ||
1232 | |||
1233 | if not match_strict: | ||
1234 | if match_kinda: | ||
1235 | bb.error("Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0))) | ||
1236 | else: | ||
1237 | bb.error("Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines)) | ||
1238 | } | ||
1239 | |||
1240 | python do_qa_configure() { | ||
1241 | import subprocess | ||
1242 | |||
1243 | ########################################################################### | ||
1244 | # Check config.log for cross compile issues | ||
1245 | ########################################################################### | ||
1246 | |||
1247 | configs = [] | ||
1248 | workdir = d.getVar('WORKDIR') | ||
1249 | |||
1250 | skip = (d.getVar('INSANE_SKIP') or "").split() | ||
1251 | skip_configure_unsafe = False | ||
1252 | if 'configure-unsafe' in skip: | ||
1253 | bb.note("Recipe %s skipping qa checking: configure-unsafe" % d.getVar('PN')) | ||
1254 | skip_configure_unsafe = True | ||
1255 | |||
1256 | if bb.data.inherits_class('autotools', d) and not skip_configure_unsafe: | ||
1257 | bb.note("Checking autotools environment for common misconfiguration") | ||
1258 | for root, dirs, files in os.walk(workdir): | ||
1259 | statement = "grep -q -F -e 'is unsafe for cross-compilation' %s" % \ | ||
1260 | os.path.join(root,"config.log") | ||
1261 | if "config.log" in files: | ||
1262 | if subprocess.call(statement, shell=True) == 0: | ||
1263 | error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities. | ||
1264 | Rerun configure task after fixing this.""" | ||
1265 | oe.qa.handle_error("configure-unsafe", error_msg, d) | ||
1266 | |||
1267 | if "configure.ac" in files: | ||
1268 | configs.append(os.path.join(root,"configure.ac")) | ||
1269 | if "configure.in" in files: | ||
1270 | configs.append(os.path.join(root, "configure.in")) | ||
1271 | |||
1272 | ########################################################################### | ||
1273 | # Check gettext configuration and dependencies are correct | ||
1274 | ########################################################################### | ||
1275 | |||
1276 | skip_configure_gettext = False | ||
1277 | if 'configure-gettext' in skip: | ||
1278 | bb.note("Recipe %s skipping qa checking: configure-gettext" % d.getVar('PN')) | ||
1279 | skip_configure_gettext = True | ||
1280 | |||
1281 | cnf = d.getVar('EXTRA_OECONF') or "" | ||
1282 | if not ("gettext" in d.getVar('P') or "gcc-runtime" in d.getVar('P') or \ | ||
1283 | "--disable-nls" in cnf or skip_configure_gettext): | ||
1284 | ml = d.getVar("MLPREFIX") or "" | ||
1285 | if bb.data.inherits_class('cross-canadian', d): | ||
1286 | gt = "nativesdk-gettext" | ||
1287 | else: | ||
1288 | gt = "gettext-native" | ||
1289 | deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "") | ||
1290 | if gt not in deps: | ||
1291 | for config in configs: | ||
1292 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config | ||
1293 | if subprocess.call(gnu, shell=True) == 0: | ||
1294 | error_msg = "AM_GNU_GETTEXT used but no inherit gettext" | ||
1295 | oe.qa.handle_error("configure-gettext", error_msg, d) | ||
1296 | |||
1297 | ########################################################################### | ||
1298 | # Check unrecognised configure options (with a white list) | ||
1299 | ########################################################################### | ||
1300 | if bb.data.inherits_class("autotools", d): | ||
1301 | bb.note("Checking configure output for unrecognised options") | ||
1302 | try: | ||
1303 | if bb.data.inherits_class("autotools", d): | ||
1304 | flag = "WARNING: unrecognized options:" | ||
1305 | log = os.path.join(d.getVar('B'), 'config.log') | ||
1306 | output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ').replace('"', '') | ||
1307 | options = set() | ||
1308 | for line in output.splitlines(): | ||
1309 | options |= set(line.partition(flag)[2].split()) | ||
1310 | ignore_opts = set(d.getVar("UNKNOWN_CONFIGURE_OPT_IGNORE").split()) | ||
1311 | options -= ignore_opts | ||
1312 | if options: | ||
1313 | pn = d.getVar('PN') | ||
1314 | error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) | ||
1315 | oe.qa.handle_error("unknown-configure-option", error_msg, d) | ||
1316 | except subprocess.CalledProcessError: | ||
1317 | pass | ||
1318 | |||
1319 | # Check invalid PACKAGECONFIG | ||
1320 | pkgconfig = (d.getVar("PACKAGECONFIG") or "").split() | ||
1321 | if pkgconfig: | ||
1322 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | ||
1323 | for pconfig in pkgconfig: | ||
1324 | if pconfig not in pkgconfigflags: | ||
1325 | pn = d.getVar('PN') | ||
1326 | error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) | ||
1327 | oe.qa.handle_error("invalid-packageconfig", error_msg, d) | ||
1328 | |||
1329 | oe.qa.exit_if_errors(d) | ||
1330 | } | ||
1331 | |||
1332 | def unpack_check_src_uri(pn, d): | ||
1333 | import re | ||
1334 | |||
1335 | skip = (d.getVar('INSANE_SKIP') or "").split() | ||
1336 | if 'src-uri-bad' in skip: | ||
1337 | bb.note("Recipe %s skipping qa checking: src-uri-bad" % d.getVar('PN')) | ||
1338 | return | ||
1339 | |||
1340 | if "${PN}" in d.getVar("SRC_URI", False): | ||
1341 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d) | ||
1342 | |||
1343 | for url in d.getVar("SRC_URI").split(): | ||
1344 | # Search for github and gitlab URLs that pull unstable archives (comment for future greppers) | ||
1345 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url): | ||
1346 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) | ||
1347 | |||
1348 | python do_qa_unpack() { | ||
1349 | src_uri = d.getVar('SRC_URI') | ||
1350 | s_dir = d.getVar('S') | ||
1351 | if src_uri and not os.path.exists(s_dir): | ||
1352 | bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) | ||
1353 | |||
1354 | unpack_check_src_uri(d.getVar('PN'), d) | ||
1355 | } | ||
1356 | |||
1357 | # Check for patch fuzz | ||
1358 | do_patch[postfuncs] += "do_qa_patch " | ||
1359 | |||
1360 | # Check broken config.log files, for packages requiring Gettext which | ||
1361 | # don't have it in DEPENDS. | ||
1362 | #addtask qa_configure after do_configure before do_compile | ||
1363 | do_configure[postfuncs] += "do_qa_configure " | ||
1364 | |||
1365 | # Check does S exist. | ||
1366 | do_unpack[postfuncs] += "do_qa_unpack" | ||
1367 | |||
1368 | python () { | ||
1369 | import re | ||
1370 | |||
1371 | tests = d.getVar('ALL_QA').split() | ||
1372 | if "desktop" in tests: | ||
1373 | d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") | ||
1374 | |||
1375 | ########################################################################### | ||
1376 | # Check various variables | ||
1377 | ########################################################################### | ||
1378 | |||
1379 | # Checking ${FILESEXTRAPATHS} | ||
1380 | extrapaths = (d.getVar("FILESEXTRAPATHS") or "") | ||
1381 | if '__default' not in extrapaths.split(":"): | ||
1382 | msg = "FILESEXTRAPATHS-variable, must always use :prepend (or :append)\n" | ||
1383 | msg += "type of assignment, and don't forget the colon.\n" | ||
1384 | msg += "Please assign it with the format of:\n" | ||
1385 | msg += " FILESEXTRAPATHS:append := \":${THISDIR}/Your_Files_Path\" or\n" | ||
1386 | msg += " FILESEXTRAPATHS:prepend := \"${THISDIR}/Your_Files_Path:\"\n" | ||
1387 | msg += "in your bbappend file\n\n" | ||
1388 | msg += "Your incorrect assignment is:\n" | ||
1389 | msg += "%s\n" % extrapaths | ||
1390 | bb.warn(msg) | ||
1391 | |||
1392 | overrides = d.getVar('OVERRIDES').split(':') | ||
1393 | pn = d.getVar('PN') | ||
1394 | if pn in overrides: | ||
1395 | msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn) | ||
1396 | oe.qa.handle_error("pn-overrides", msg, d) | ||
1397 | prog = re.compile(r'[A-Z]') | ||
1398 | if prog.search(pn): | ||
1399 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) | ||
1400 | |||
1401 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder | ||
1402 | # why it doesn't work. | ||
1403 | if (d.getVar(d.expand('DEPENDS:${PN}'))): | ||
1404 | oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) | ||
1405 | |||
1406 | issues = [] | ||
1407 | if (d.getVar('PACKAGES') or "").split(): | ||
1408 | for dep in (d.getVar('QADEPENDS') or "").split(): | ||
1409 | d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep) | ||
1410 | for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': | ||
1411 | if d.getVar(var, False): | ||
1412 | issues.append(var) | ||
1413 | |||
1414 | fakeroot_tests = d.getVar('FAKEROOT_QA').split() | ||
1415 | if set(tests) & set(fakeroot_tests): | ||
1416 | d.setVarFlag('do_package_qa', 'fakeroot', '1') | ||
1417 | d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | ||
1418 | else: | ||
1419 | d.setVarFlag('do_package_qa', 'rdeptask', '') | ||
1420 | for i in issues: | ||
1421 | oe.qa.handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d) | ||
1422 | |||
1423 | if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split(): | ||
1424 | for native_class in ['native', 'nativesdk']: | ||
1425 | if bb.data.inherits_class(native_class, d): | ||
1426 | |||
1427 | inherited_classes = d.getVar('__inherit_cache', False) or [] | ||
1428 | needle = "/" + native_class | ||
1429 | |||
1430 | bbclassextend = (d.getVar('BBCLASSEXTEND') or '').split() | ||
1431 | # BBCLASSEXTEND items are always added in the end | ||
1432 | skip_classes = bbclassextend | ||
1433 | if bb.data.inherits_class('native', d) or 'native' in bbclassextend: | ||
1434 | # native also inherits nopackages and relocatable bbclasses | ||
1435 | skip_classes.extend(['nopackages', 'relocatable']) | ||
1436 | |||
1437 | broken_order = [] | ||
1438 | for class_item in reversed(inherited_classes): | ||
1439 | if needle not in class_item: | ||
1440 | for extend_item in skip_classes: | ||
1441 | if '/%s.bbclass' % extend_item in class_item: | ||
1442 | break | ||
1443 | else: | ||
1444 | pn = d.getVar('PN') | ||
1445 | broken_order.append(os.path.basename(class_item)) | ||
1446 | else: | ||
1447 | break | ||
1448 | if broken_order: | ||
1449 | oe.qa.handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. " | ||
1450 | "Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d) | ||
1451 | |||
1452 | oe.qa.exit_if_errors(d) | ||
1453 | } | ||