diff options
Diffstat (limited to 'meta/classes-global/insane.bbclass')
-rw-r--r-- | meta/classes-global/insane.bbclass | 592 |
1 files changed, 280 insertions, 312 deletions
diff --git a/meta/classes-global/insane.bbclass b/meta/classes-global/insane.bbclass index e963001d09..4ef664b3ce 100644 --- a/meta/classes-global/insane.bbclass +++ b/meta/classes-global/insane.bbclass | |||
@@ -24,41 +24,36 @@ | |||
24 | # files under exec_prefix | 24 | # files under exec_prefix |
25 | # -Check if the package name is upper case | 25 | # -Check if the package name is upper case |
26 | 26 | ||
27 | # These tests are required to be enabled and pass for Yocto Project Compatible Status | ||
28 | # for a layer. To change this list, please contact the Yocto Project TSC. | ||
29 | CHECKLAYER_REQUIRED_TESTS = "\ | ||
30 | configure-gettext configure-unsafe debug-files dep-cmp expanded-d files-invalid \ | ||
31 | host-user-contaminated incompatible-license infodir installed-vs-shipped invalid-chars \ | ||
32 | invalid-packageconfig la \ | ||
33 | license-checksum license-exception license-exists license-file-missing license-format license-no-generic license-syntax \ | ||
34 | mime mime-xdg missing-update-alternatives multilib obsolete-license \ | ||
35 | packages-list patch-fuzz patch-status perllocalpod perm-config perm-line perm-link recipe-naming \ | ||
36 | pkgconfig pkgvarcheck pkgv-undefined pn-overrides shebang-size src-uri-bad symlink-to-sysroot \ | ||
37 | unhandled-features-check unknown-configure-option unlisted-pkg-lics uppercase-pn useless-rpaths \ | ||
38 | var-undefined virtual-slash xorg-driver-abi" | ||
39 | |||
27 | # Elect whether a given type of error is a warning or error, they may | 40 | # Elect whether a given type of error is a warning or error, they may |
28 | # have been set by other files. | 41 | # have been set by other files. |
29 | WARN_QA ?= " libdir xorg-driver-abi buildpaths \ | 42 | WARN_QA ?= "32bit-time native-last pep517-backend" |
30 | textrel incompatible-license files-invalid \ | 43 | ERROR_QA ?= "\ |
31 | infodir build-deps src-uri-bad symlink-to-sysroot multilib \ | 44 | already-stripped arch buildpaths build-deps debug-deps dev-deps dev-elf dev-so empty-dirs file-rdeps \ |
32 | invalid-packageconfig host-user-contaminated uppercase-pn \ | 45 | ldflags libdir missing-ptest rpaths staticdev textrel version-going-backwards \ |
33 | mime mime-xdg unlisted-pkg-lics unhandled-features-check \ | 46 | ${CHECKLAYER_REQUIRED_TESTS}" |
34 | missing-update-alternatives native-last missing-ptest \ | 47 | |
35 | license-exists license-no-generic license-syntax license-format \ | ||
36 | license-incompatible license-file-missing obsolete-license \ | ||
37 | 32bit-time virtual-slash \ | ||
38 | " | ||
39 | ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ | ||
40 | perms dep-cmp pkgvarcheck perm-config perm-line perm-link \ | ||
41 | split-strip packages-list pkgv-undefined var-undefined \ | ||
42 | version-going-backwards expanded-d invalid-chars \ | ||
43 | license-checksum dev-elf file-rdeps configure-unsafe \ | ||
44 | configure-gettext perllocalpod shebang-size \ | ||
45 | already-stripped installed-vs-shipped ldflags compile-host-path \ | ||
46 | install-host-path pn-overrides unknown-configure-option \ | ||
47 | useless-rpaths rpaths staticdev empty-dirs \ | ||
48 | patch-fuzz \ | ||
49 | " | ||
50 | # Add usrmerge QA check based on distro feature | 48 | # Add usrmerge QA check based on distro feature |
51 | ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" | 49 | ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" |
52 | ERROR_QA:append:layer-core = " patch-status" | ||
53 | WARN_QA:append:layer-core = " missing-metadata missing-maintainer" | 50 | WARN_QA:append:layer-core = " missing-metadata missing-maintainer" |
54 | 51 | ||
55 | FAKEROOT_QA = "host-user-contaminated" | 52 | FAKEROOT_QA = "host-user-contaminated" |
56 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ | 53 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ |
57 | enabled tests are listed here, the do_package_qa task will run under fakeroot." | 54 | enabled tests are listed here, the do_package_qa task will run under fakeroot." |
58 | 55 | ||
59 | ALL_QA = "${WARN_QA} ${ERROR_QA}" | 56 | UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --disable-static" |
60 | |||
61 | UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static" | ||
62 | 57 | ||
63 | # This is a list of directories that are expected to be empty. | 58 | # This is a list of directories that are expected to be empty. |
64 | QA_EMPTY_DIRS ?= " \ | 59 | QA_EMPTY_DIRS ?= " \ |
@@ -85,9 +80,10 @@ def package_qa_clean_path(path, d, pkg=None): | |||
85 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") | 80 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") |
86 | 81 | ||
87 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" | 82 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" |
88 | def package_qa_check_shebang_size(path, name, d, elf, messages): | 83 | def package_qa_check_shebang_size(path, name, d, elf): |
89 | import stat | 84 | global cpath |
90 | if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf: | 85 | |
86 | if elf or cpath.islink(path) or not cpath.isfile(path): | ||
91 | return | 87 | return |
92 | 88 | ||
93 | try: | 89 | try: |
@@ -104,50 +100,44 @@ def package_qa_check_shebang_size(path, name, d, elf, messages): | |||
104 | return | 100 | return |
105 | 101 | ||
106 | if len(stanza) > 129: | 102 | if len(stanza) > 129: |
107 | oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d, name))) | 103 | oe.qa.handle_error("shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d, name)), d) |
108 | return | 104 | return |
109 | 105 | ||
110 | QAPATHTEST[libexec] = "package_qa_check_libexec" | 106 | QAPATHTEST[libexec] = "package_qa_check_libexec" |
111 | def package_qa_check_libexec(path,name, d, elf, messages): | 107 | def package_qa_check_libexec(path,name, d, elf): |
112 | 108 | ||
113 | # Skip the case where the default is explicitly /usr/libexec | 109 | # Skip the case where the default is explicitly /usr/libexec |
114 | libexec = d.getVar('libexecdir') | 110 | libexec = d.getVar('libexecdir') |
115 | if libexec == "/usr/libexec": | 111 | if libexec == "/usr/libexec": |
116 | return True | 112 | return |
117 | 113 | ||
118 | if 'libexec' in path.split(os.path.sep): | 114 | if 'libexec' in path.split(os.path.sep): |
119 | oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d, name), libexec)) | 115 | oe.qa.handle_error("libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d, name), libexec), d) |
120 | return False | ||
121 | |||
122 | return True | ||
123 | 116 | ||
124 | QAPATHTEST[rpaths] = "package_qa_check_rpath" | 117 | QAPATHTEST[rpaths] = "package_qa_check_rpath" |
125 | def package_qa_check_rpath(file,name, d, elf, messages): | 118 | def package_qa_check_rpath(file, name, d, elf): |
126 | """ | 119 | """ |
127 | Check for dangerous RPATHs | 120 | Check for dangerous RPATHs |
128 | """ | 121 | """ |
129 | if not elf: | 122 | if not elf: |
130 | return | 123 | return |
131 | 124 | ||
132 | if os.path.islink(file): | ||
133 | return | ||
134 | |||
135 | bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] | 125 | bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] |
136 | 126 | ||
137 | phdrs = elf.run_objdump("-p", d) | 127 | phdrs = elf.run_objdump("-p", d) |
138 | 128 | ||
139 | import re | 129 | import re |
140 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | 130 | rpath_re = re.compile(r"\s+(?:RPATH|RUNPATH)\s+(.*)") |
141 | for line in phdrs.split("\n"): | 131 | for line in phdrs.split("\n"): |
142 | m = rpath_re.match(line) | 132 | m = rpath_re.match(line) |
143 | if m: | 133 | if m: |
144 | rpath = m.group(1) | 134 | rpath = m.group(1) |
145 | for dir in bad_dirs: | 135 | for dir in bad_dirs: |
146 | if dir in rpath: | 136 | if dir in rpath: |
147 | oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file)) | 137 | oe.qa.handle_error("rpaths", "%s: %s contains bad RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath), d) |
148 | 138 | ||
149 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" | 139 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" |
150 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): | 140 | def package_qa_check_useless_rpaths(file, name, d, elf): |
151 | """ | 141 | """ |
152 | Check for RPATHs that are useless but not dangerous | 142 | Check for RPATHs that are useless but not dangerous |
153 | """ | 143 | """ |
@@ -157,16 +147,13 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages): | |||
157 | if not elf: | 147 | if not elf: |
158 | return | 148 | return |
159 | 149 | ||
160 | if os.path.islink(file): | ||
161 | return | ||
162 | |||
163 | libdir = d.getVar("libdir") | 150 | libdir = d.getVar("libdir") |
164 | base_libdir = d.getVar("base_libdir") | 151 | base_libdir = d.getVar("base_libdir") |
165 | 152 | ||
166 | phdrs = elf.run_objdump("-p", d) | 153 | phdrs = elf.run_objdump("-p", d) |
167 | 154 | ||
168 | import re | 155 | import re |
169 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | 156 | rpath_re = re.compile(r"\s+(?:RPATH|RUNPATH)\s+(.*)") |
170 | for line in phdrs.split("\n"): | 157 | for line in phdrs.split("\n"): |
171 | m = rpath_re.match(line) | 158 | m = rpath_re.match(line) |
172 | if m: | 159 | if m: |
@@ -174,31 +161,32 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages): | |||
174 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): | 161 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): |
175 | # The dynamic linker searches both these places anyway. There is no point in | 162 | # The dynamic linker searches both these places anyway. There is no point in |
176 | # looking there again. | 163 | # looking there again. |
177 | oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath)) | 164 | oe.qa.handle_error("useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath), d) |
178 | 165 | ||
179 | QAPATHTEST[dev-so] = "package_qa_check_dev" | 166 | QAPATHTEST[dev-so] = "package_qa_check_dev" |
180 | def package_qa_check_dev(path, name, d, elf, messages): | 167 | def package_qa_check_dev(path, name, d, elf): |
181 | """ | 168 | """ |
182 | Check for ".so" library symlinks in non-dev packages | 169 | Check for ".so" library symlinks in non-dev packages |
183 | """ | 170 | """ |
184 | 171 | global cpath | |
185 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): | 172 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and cpath.islink(path): |
186 | oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ | 173 | oe.qa.handle_error("dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ |
187 | (name, package_qa_clean_path(path, d, name))) | 174 | (name, package_qa_clean_path(path, d, name)), d) |
188 | 175 | ||
189 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" | 176 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" |
190 | def package_qa_check_dev_elf(path, name, d, elf, messages): | 177 | def package_qa_check_dev_elf(path, name, d, elf): |
191 | """ | 178 | """ |
192 | Check that -dev doesn't contain real shared libraries. The test has to | 179 | Check that -dev doesn't contain real shared libraries. The test has to |
193 | check that the file is not a link and is an ELF object as some recipes | 180 | check that the file is not a link and is an ELF object as some recipes |
194 | install link-time .so files that are linker scripts. | 181 | install link-time .so files that are linker scripts. |
195 | """ | 182 | """ |
196 | if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf: | 183 | global cpath |
197 | oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ | 184 | if name.endswith("-dev") and path.endswith(".so") and not cpath.islink(path) and elf: |
198 | (name, package_qa_clean_path(path, d, name))) | 185 | oe.qa.handle_error("dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ |
186 | (name, package_qa_clean_path(path, d, name)), d) | ||
199 | 187 | ||
200 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" | 188 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" |
201 | def package_qa_check_staticdev(path, name, d, elf, messages): | 189 | def package_qa_check_staticdev(path, name, d, elf): |
202 | """ | 190 | """ |
203 | Check for ".a" library in non-staticdev packages | 191 | Check for ".a" library in non-staticdev packages |
204 | There are a number of exceptions to this rule, -pic packages can contain | 192 | There are a number of exceptions to this rule, -pic packages can contain |
@@ -207,22 +195,22 @@ def package_qa_check_staticdev(path, name, d, elf, messages): | |||
207 | """ | 195 | """ |
208 | 196 | ||
209 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: | 197 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: |
210 | oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ | 198 | oe.qa.handle_error("staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ |
211 | (name, package_qa_clean_path(path, d, name))) | 199 | (name, package_qa_clean_path(path, d, name)), d) |
212 | 200 | ||
213 | QAPATHTEST[mime] = "package_qa_check_mime" | 201 | QAPATHTEST[mime] = "package_qa_check_mime" |
214 | def package_qa_check_mime(path, name, d, elf, messages): | 202 | def package_qa_check_mime(path, name, d, elf): |
215 | """ | 203 | """ |
216 | Check if package installs mime types to /usr/share/mime/packages | 204 | Check if package installs mime types to /usr/share/mime/packages |
217 | while no inheriting mime.bbclass | 205 | while no inheriting mime.bbclass |
218 | """ | 206 | """ |
219 | 207 | ||
220 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): | 208 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): |
221 | oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ | 209 | oe.qa.handle_error("mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ |
222 | (name, package_qa_clean_path(path, d, name))) | 210 | (name, package_qa_clean_path(path, d, name)), d) |
223 | 211 | ||
224 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" | 212 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" |
225 | def package_qa_check_mime_xdg(path, name, d, elf, messages): | 213 | def package_qa_check_mime_xdg(path, name, d, elf): |
226 | """ | 214 | """ |
227 | Check if package installs desktop file containing MimeType and requires | 215 | Check if package installs desktop file containing MimeType and requires |
228 | mime-types.bbclass to create /usr/share/applications/mimeinfo.cache | 216 | mime-types.bbclass to create /usr/share/applications/mimeinfo.cache |
@@ -245,10 +233,10 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages): | |||
245 | if name == d.getVar('PN'): | 233 | if name == d.getVar('PN'): |
246 | pkgname = '${PN}' | 234 | pkgname = '${PN}' |
247 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) | 235 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) |
248 | oe.qa.add_message(messages, "mime-xdg", wstr) | 236 | oe.qa.handle_error("mime-xdg", wstr, d) |
249 | if mime_type_found: | 237 | if mime_type_found: |
250 | oe.qa.add_message(messages, "mime-xdg", "%s: contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s" % \ | 238 | oe.qa.handle_error("mime-xdg", "%s: contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s" % \ |
251 | (name, package_qa_clean_path(path, d, name))) | 239 | (name, package_qa_clean_path(path, d, name)), d) |
252 | 240 | ||
253 | def package_qa_check_libdir(d): | 241 | def package_qa_check_libdir(d): |
254 | """ | 242 | """ |
@@ -298,7 +286,7 @@ def package_qa_check_libdir(d): | |||
298 | try: | 286 | try: |
299 | elf.open() | 287 | elf.open() |
300 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | 288 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) |
301 | except (oe.qa.NotELFFileError): | 289 | except (oe.qa.NotELFFileError, FileNotFoundError): |
302 | pass | 290 | pass |
303 | if exec_re.match(rel_path): | 291 | if exec_re.match(rel_path): |
304 | if libdir not in rel_path and libexecdir not in rel_path: | 292 | if libdir not in rel_path and libexecdir not in rel_path: |
@@ -307,25 +295,25 @@ def package_qa_check_libdir(d): | |||
307 | try: | 295 | try: |
308 | elf.open() | 296 | elf.open() |
309 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | 297 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) |
310 | except (oe.qa.NotELFFileError): | 298 | except (oe.qa.NotELFFileError, FileNotFoundError): |
311 | pass | 299 | pass |
312 | 300 | ||
313 | if messages: | 301 | if messages: |
314 | oe.qa.handle_error("libdir", "\n".join(messages), d) | 302 | oe.qa.handle_error("libdir", "\n".join(messages), d) |
315 | 303 | ||
316 | QAPATHTEST[debug-files] = "package_qa_check_dbg" | 304 | QAPATHTEST[debug-files] = "package_qa_check_dbg" |
317 | def package_qa_check_dbg(path, name, d, elf, messages): | 305 | def package_qa_check_dbg(path, name, d, elf): |
318 | """ | 306 | """ |
319 | Check for ".debug" files or directories outside of the dbg package | 307 | Check for ".debug" files or directories outside of the dbg package |
320 | """ | 308 | """ |
321 | 309 | ||
322 | if not "-dbg" in name and not "-ptest" in name: | 310 | if not "-dbg" in name and not "-ptest" in name: |
323 | if '.debug' in path.split(os.path.sep): | 311 | if '.debug' in path.split(os.path.sep): |
324 | oe.qa.add_message(messages, "debug-files", "%s: non debug package contains .debug directory %s" % \ | 312 | oe.qa.handle_error("debug-files", "%s: non debug package contains .debug directory %s" % \ |
325 | (name, package_qa_clean_path(path, d, name))) | 313 | (name, package_qa_clean_path(path, d, name)), d) |
326 | 314 | ||
327 | QAPATHTEST[arch] = "package_qa_check_arch" | 315 | QAPATHTEST[arch] = "package_qa_check_arch" |
328 | def package_qa_check_arch(path,name,d, elf, messages): | 316 | def package_qa_check_arch(path,name,d, elf): |
329 | """ | 317 | """ |
330 | Check if archs are compatible | 318 | Check if archs are compatible |
331 | """ | 319 | """ |
@@ -334,47 +322,47 @@ def package_qa_check_arch(path,name,d, elf, messages): | |||
334 | if not elf: | 322 | if not elf: |
335 | return | 323 | return |
336 | 324 | ||
337 | target_os = d.getVar('HOST_OS') | 325 | host_os = d.getVar('HOST_OS') |
338 | target_arch = d.getVar('HOST_ARCH') | 326 | host_arch = d.getVar('HOST_ARCH') |
339 | provides = d.getVar('PROVIDES') | 327 | provides = d.getVar('PROVIDES') |
340 | bpn = d.getVar('BPN') | ||
341 | 328 | ||
342 | if target_arch == "allarch": | 329 | if host_arch == "allarch": |
343 | pn = d.getVar('PN') | 330 | oe.qa.handle_error("arch", "%s: inherits the allarch class, but has architecture-specific binaries %s" % \ |
344 | oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") | 331 | (name, package_qa_clean_path(path, d, name)), d) |
345 | return | 332 | return |
346 | 333 | ||
347 | # FIXME: Cross package confuse this check, so just skip them | 334 | # If this throws an exception, the machine_dict needs expanding |
348 | for s in ['cross', 'nativesdk', 'cross-canadian']: | 335 | (expected_machine, expected_osabi, expected_abiversion, expected_littleendian, expected_bits) \ |
349 | if bb.data.inherits_class(s, d): | 336 | = oe.elf.machine_dict(d)[host_os][host_arch] |
350 | return | 337 | |
338 | actual_machine = elf.machine() | ||
339 | actual_bits = elf.abiSize() | ||
340 | actual_littleendian = elf.isLittleEndian() | ||
351 | 341 | ||
352 | # avoid following links to /usr/bin (e.g. on udev builds) | 342 | # BPF don't match the target |
353 | # we will check the files pointed to anyway... | 343 | if oe.qa.elf_machine_to_string(actual_machine) == "BPF": |
354 | if os.path.islink(path): | ||
355 | return | 344 | return |
356 | 345 | ||
357 | #if this will throw an exception, then fix the dict above | 346 | # These targets have 32-bit userspace but 64-bit kernel, so fudge the expected values |
358 | (machine, osabi, abiversion, littleendian, bits) \ | 347 | if (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and (host_os in ("linux-gnux32", "linux-muslx32", "linux-gnu_ilp32") or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))): |
359 | = oe.elf.machine_dict(d)[target_os][target_arch] | 348 | expected_bits = 64 |
360 | 349 | ||
361 | # Check the architecture and endiannes of the binary | 350 | # Check the architecture and endiannes of the binary |
362 | is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ | 351 | if expected_machine != actual_machine: |
363 | (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \ | 352 | oe.qa.handle_error("arch", "Architecture did not match (%s, expected %s) in %s" % \ |
364 | target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))) | 353 | (oe.qa.elf_machine_to_string(actual_machine), oe.qa.elf_machine_to_string(expected_machine), package_qa_clean_path(path, d, name)), d) |
365 | is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF") | 354 | |
366 | if not ((machine == elf.machine()) or is_32 or is_bpf): | 355 | if expected_bits != actual_bits: |
367 | oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \ | 356 | oe.qa.handle_error("arch", "Bit size did not match (%d, expected %d) in %s" % \ |
368 | (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name))) | 357 | (actual_bits, expected_bits, package_qa_clean_path(path, d, name)), d) |
369 | elif not ((bits == elf.abiSize()) or is_32 or is_bpf): | 358 | |
370 | oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \ | 359 | if expected_littleendian != actual_littleendian: |
371 | (elf.abiSize(), bits, package_qa_clean_path(path, d, name))) | 360 | oe.qa.handle_error("arch", "Endiannes did not match (%d, expected %d) in %s" % \ |
372 | elif not ((littleendian == elf.isLittleEndian()) or is_bpf): | 361 | (actual_littleendian, expected_littleendian, package_qa_clean_path(path, d, name)), d) |
373 | oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \ | 362 | package_qa_check_arch[vardepsexclude] = "DEFAULTTUNE" |
374 | (elf.isLittleEndian(), littleendian, package_qa_clean_path(path, d, name))) | ||
375 | 363 | ||
376 | QAPATHTEST[desktop] = "package_qa_check_desktop" | 364 | QAPATHTEST[desktop] = "package_qa_check_desktop" |
377 | def package_qa_check_desktop(path, name, d, elf, messages): | 365 | def package_qa_check_desktop(path, name, d, elf): |
378 | """ | 366 | """ |
379 | Run all desktop files through desktop-file-validate. | 367 | Run all desktop files through desktop-file-validate. |
380 | """ | 368 | """ |
@@ -383,10 +371,10 @@ def package_qa_check_desktop(path, name, d, elf, messages): | |||
383 | output = os.popen("%s %s" % (desktop_file_validate, path)) | 371 | output = os.popen("%s %s" % (desktop_file_validate, path)) |
384 | # This only produces output on errors | 372 | # This only produces output on errors |
385 | for l in output: | 373 | for l in output: |
386 | oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip()) | 374 | oe.qa.handle_error("desktop", "Desktop file issue: " + l.strip(), d) |
387 | 375 | ||
388 | QAPATHTEST[textrel] = "package_qa_textrel" | 376 | QAPATHTEST[textrel] = "package_qa_textrel" |
389 | def package_qa_textrel(path, name, d, elf, messages): | 377 | def package_qa_textrel(path, name, d, elf): |
390 | """ | 378 | """ |
391 | Check if the binary contains relocations in .text | 379 | Check if the binary contains relocations in .text |
392 | """ | 380 | """ |
@@ -394,25 +382,18 @@ def package_qa_textrel(path, name, d, elf, messages): | |||
394 | if not elf: | 382 | if not elf: |
395 | return | 383 | return |
396 | 384 | ||
397 | if os.path.islink(path): | ||
398 | return | ||
399 | |||
400 | phdrs = elf.run_objdump("-p", d) | 385 | phdrs = elf.run_objdump("-p", d) |
401 | sane = True | ||
402 | 386 | ||
403 | import re | 387 | import re |
404 | textrel_re = re.compile(r"\s+TEXTREL\s+") | 388 | textrel_re = re.compile(r"\s+TEXTREL\s+") |
405 | for line in phdrs.split("\n"): | 389 | for line in phdrs.split("\n"): |
406 | if textrel_re.match(line): | 390 | if textrel_re.match(line): |
407 | sane = False | 391 | path = package_qa_clean_path(path, d, name) |
408 | break | 392 | oe.qa.handle_error("textrel", "%s: ELF binary %s has relocations in .text" % (name, path), d) |
409 | 393 | return | |
410 | if not sane: | ||
411 | path = package_qa_clean_path(path, d, name) | ||
412 | oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path)) | ||
413 | 394 | ||
414 | QAPATHTEST[ldflags] = "package_qa_hash_style" | 395 | QAPATHTEST[ldflags] = "package_qa_hash_style" |
415 | def package_qa_hash_style(path, name, d, elf, messages): | 396 | def package_qa_hash_style(path, name, d, elf): |
416 | """ | 397 | """ |
417 | Check if the binary has the right hash style... | 398 | Check if the binary has the right hash style... |
418 | """ | 399 | """ |
@@ -420,9 +401,6 @@ def package_qa_hash_style(path, name, d, elf, messages): | |||
420 | if not elf: | 401 | if not elf: |
421 | return | 402 | return |
422 | 403 | ||
423 | if os.path.islink(path): | ||
424 | return | ||
425 | |||
426 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') | 404 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') |
427 | if not gnu_hash: | 405 | if not gnu_hash: |
428 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') | 406 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') |
@@ -444,17 +422,17 @@ def package_qa_hash_style(path, name, d, elf, messages): | |||
444 | sane = True | 422 | sane = True |
445 | if has_syms and not sane: | 423 | if has_syms and not sane: |
446 | path = package_qa_clean_path(path, d, name) | 424 | path = package_qa_clean_path(path, d, name) |
447 | oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name)) | 425 | oe.qa.handle_error("ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name), d) |
426 | package_qa_hash_style[vardepsexclude] = "TCLIBC" | ||
448 | 427 | ||
449 | 428 | ||
450 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" | 429 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" |
451 | def package_qa_check_buildpaths(path, name, d, elf, messages): | 430 | def package_qa_check_buildpaths(path, name, d, elf): |
452 | """ | 431 | """ |
453 | Check for build paths inside target files and error if paths are not | 432 | Check for build paths inside target files and error if paths are not |
454 | explicitly ignored. | 433 | explicitly ignored. |
455 | """ | 434 | """ |
456 | import stat | 435 | import stat |
457 | |||
458 | # Ignore symlinks/devs/fifos | 436 | # Ignore symlinks/devs/fifos |
459 | mode = os.lstat(path).st_mode | 437 | mode = os.lstat(path).st_mode |
460 | if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode): | 438 | if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode): |
@@ -464,12 +442,12 @@ def package_qa_check_buildpaths(path, name, d, elf, messages): | |||
464 | with open(path, 'rb') as f: | 442 | with open(path, 'rb') as f: |
465 | file_content = f.read() | 443 | file_content = f.read() |
466 | if tmpdir in file_content: | 444 | if tmpdir in file_content: |
467 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 445 | path = package_qa_clean_path(path, d, name) |
468 | oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name)) | 446 | oe.qa.handle_error("buildpaths", "File %s in package %s contains reference to TMPDIR" % (path, name), d) |
469 | 447 | ||
470 | 448 | ||
471 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" | 449 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" |
472 | def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | 450 | def package_qa_check_xorg_driver_abi(path, name, d, elf): |
473 | """ | 451 | """ |
474 | Check that all packages containing Xorg drivers have ABI dependencies | 452 | Check that all packages containing Xorg drivers have ABI dependencies |
475 | """ | 453 | """ |
@@ -484,33 +462,34 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | |||
484 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): | 462 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): |
485 | if rdep.startswith("%sxorg-abi-" % mlprefix): | 463 | if rdep.startswith("%sxorg-abi-" % mlprefix): |
486 | return | 464 | return |
487 | oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | 465 | oe.qa.handle_error("xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)), d) |
488 | 466 | ||
489 | QAPATHTEST[infodir] = "package_qa_check_infodir" | 467 | QAPATHTEST[infodir] = "package_qa_check_infodir" |
490 | def package_qa_check_infodir(path, name, d, elf, messages): | 468 | def package_qa_check_infodir(path, name, d, elf): |
491 | """ | 469 | """ |
492 | Check that /usr/share/info/dir isn't shipped in a particular package | 470 | Check that /usr/share/info/dir isn't shipped in a particular package |
493 | """ | 471 | """ |
494 | infodir = d.expand("${infodir}/dir") | 472 | infodir = d.expand("${infodir}/dir") |
495 | 473 | ||
496 | if infodir in path: | 474 | if infodir in path: |
497 | oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.") | 475 | oe.qa.handle_error("infodir", "The %s file is not meant to be shipped in a particular package." % infodir, d) |
498 | 476 | ||
499 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" | 477 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" |
500 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | 478 | def package_qa_check_symlink_to_sysroot(path, name, d, elf): |
501 | """ | 479 | """ |
502 | Check that the package doesn't contain any absolute symlinks to the sysroot. | 480 | Check that the package doesn't contain any absolute symlinks to the sysroot. |
503 | """ | 481 | """ |
504 | if os.path.islink(path): | 482 | global cpath |
483 | if cpath.islink(path): | ||
505 | target = os.readlink(path) | 484 | target = os.readlink(path) |
506 | if os.path.isabs(target): | 485 | if os.path.isabs(target): |
507 | tmpdir = d.getVar('TMPDIR') | 486 | tmpdir = d.getVar('TMPDIR') |
508 | if target.startswith(tmpdir): | 487 | if target.startswith(tmpdir): |
509 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 488 | path = package_qa_clean_path(path, d, name) |
510 | oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) | 489 | oe.qa.handle_error("symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (path, name), d) |
511 | 490 | ||
512 | QAPATHTEST[32bit-time] = "check_32bit_symbols" | 491 | QAPATHTEST[32bit-time] = "check_32bit_symbols" |
513 | def check_32bit_symbols(path, packagename, d, elf, messages): | 492 | def check_32bit_symbols(path, packagename, d, elf): |
514 | """ | 493 | """ |
515 | Check that ELF files do not use any 32 bit time APIs from glibc. | 494 | Check that ELF files do not use any 32 bit time APIs from glibc. |
516 | """ | 495 | """ |
@@ -615,7 +594,7 @@ def check_32bit_symbols(path, packagename, d, elf, messages): | |||
615 | ) | 594 | ) |
616 | 595 | ||
617 | # elf is a oe.qa.ELFFile object | 596 | # elf is a oe.qa.ELFFile object |
618 | if elf is not None: | 597 | if elf: |
619 | phdrs = elf.run_objdump("-tw", d) | 598 | phdrs = elf.run_objdump("-tw", d) |
620 | syms = re.finditer(ptrn, phdrs) | 599 | syms = re.finditer(ptrn, phdrs) |
621 | usedapis = {sym.group('notag') for sym in syms} | 600 | usedapis = {sym.group('notag') for sym in syms} |
@@ -629,11 +608,9 @@ def check_32bit_symbols(path, packagename, d, elf, messages): | |||
629 | if not allowed: | 608 | if not allowed: |
630 | msgformat = elfpath + " uses 32-bit api '%s'" | 609 | msgformat = elfpath + " uses 32-bit api '%s'" |
631 | for sym in usedapis: | 610 | for sym in usedapis: |
632 | oe.qa.add_message(messages, '32bit-time', msgformat % sym) | 611 | oe.qa.handle_error('32bit-time', msgformat % sym, d) |
633 | oe.qa.add_message( | 612 | oe.qa.handle_error('32bit-time', 'Suppress with INSANE_SKIP = "32bit-time"', d) |
634 | messages, '32bit-time', | 613 | check_32bit_symbols[vardepsexclude] = "OVERRIDES" |
635 | 'Suppress with INSANE_SKIP = "32bit-time"' | ||
636 | ) | ||
637 | 614 | ||
638 | # Check license variables | 615 | # Check license variables |
639 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" | 616 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" |
@@ -794,62 +771,19 @@ def qa_check_staged(path,d): | |||
794 | oe.qa.handle_error("pkgconfig", error_msg, d) | 771 | oe.qa.handle_error("pkgconfig", error_msg, d) |
795 | 772 | ||
796 | if not skip_shebang_size: | 773 | if not skip_shebang_size: |
797 | errors = {} | 774 | global cpath |
798 | package_qa_check_shebang_size(path, "", d, None, errors) | 775 | cpath = oe.cachedpath.CachedPath() |
799 | for e in errors: | 776 | package_qa_check_shebang_size(path, "", d, None) |
800 | oe.qa.handle_error(e, errors[e], d) | 777 | cpath = None |
801 | |||
802 | |||
803 | # Run all package-wide warnfuncs and errorfuncs | ||
804 | def package_qa_package(warnfuncs, errorfuncs, package, d): | ||
805 | warnings = {} | ||
806 | errors = {} | ||
807 | |||
808 | for func in warnfuncs: | ||
809 | func(package, d, warnings) | ||
810 | for func in errorfuncs: | ||
811 | func(package, d, errors) | ||
812 | |||
813 | for w in warnings: | ||
814 | oe.qa.handle_error(w, warnings[w], d) | ||
815 | for e in errors: | ||
816 | oe.qa.handle_error(e, errors[e], d) | ||
817 | |||
818 | return len(errors) == 0 | ||
819 | |||
820 | # Run all recipe-wide warnfuncs and errorfuncs | ||
821 | def package_qa_recipe(warnfuncs, errorfuncs, pn, d): | ||
822 | warnings = {} | ||
823 | errors = {} | ||
824 | |||
825 | for func in warnfuncs: | ||
826 | func(pn, d, warnings) | ||
827 | for func in errorfuncs: | ||
828 | func(pn, d, errors) | ||
829 | |||
830 | for w in warnings: | ||
831 | oe.qa.handle_error(w, warnings[w], d) | ||
832 | for e in errors: | ||
833 | oe.qa.handle_error(e, errors[e], d) | ||
834 | |||
835 | return len(errors) == 0 | ||
836 | |||
837 | def prepopulate_objdump_p(elf, d): | ||
838 | output = elf.run_objdump("-p", d) | ||
839 | return (elf.name, output) | ||
840 | 778 | ||
841 | # Walk over all files in a directory and call func | 779 | # Walk over all files in a directory and call func |
842 | def package_qa_walk(warnfuncs, errorfuncs, package, d): | 780 | def package_qa_walk(checkfuncs, package, d): |
843 | #if this will throw an exception, then fix the dict above | 781 | global cpath |
844 | target_os = d.getVar('HOST_OS') | ||
845 | target_arch = d.getVar('HOST_ARCH') | ||
846 | 782 | ||
847 | warnings = {} | ||
848 | errors = {} | ||
849 | elves = {} | 783 | elves = {} |
850 | for path in pkgfiles[package]: | 784 | for path in pkgfiles[package]: |
851 | elf = None | 785 | elf = None |
852 | if os.path.isfile(path): | 786 | if cpath.isfile(path) and not cpath.islink(path): |
853 | elf = oe.qa.ELFFile(path) | 787 | elf = oe.qa.ELFFile(path) |
854 | try: | 788 | try: |
855 | elf.open() | 789 | elf.open() |
@@ -859,24 +793,22 @@ def package_qa_walk(warnfuncs, errorfuncs, package, d): | |||
859 | if elf: | 793 | if elf: |
860 | elves[path] = elf | 794 | elves[path] = elf |
861 | 795 | ||
796 | def prepopulate_objdump_p(elf, d): | ||
797 | output = elf.run_objdump("-p", d) | ||
798 | return (elf.name, output) | ||
799 | |||
862 | results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,)) | 800 | results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,)) |
863 | for item in results: | 801 | for item in results: |
864 | elves[item[0]].set_objdump("-p", item[1]) | 802 | elves[item[0]].set_objdump("-p", item[1]) |
865 | 803 | ||
866 | for path in pkgfiles[package]: | 804 | for path in pkgfiles[package]: |
867 | if path in elves: | 805 | elf = elves.get(path) |
868 | elves[path].open() | 806 | if elf: |
869 | for func in warnfuncs: | 807 | elf.open() |
870 | func(path, package, d, elves.get(path), warnings) | 808 | for func in checkfuncs: |
871 | for func in errorfuncs: | 809 | func(path, package, d, elf) |
872 | func(path, package, d, elves.get(path), errors) | 810 | if elf: |
873 | if path in elves: | 811 | elf.close() |
874 | elves[path].close() | ||
875 | |||
876 | for w in warnings: | ||
877 | oe.qa.handle_error(w, warnings[w], d) | ||
878 | for e in errors: | ||
879 | oe.qa.handle_error(e, errors[e], d) | ||
880 | 812 | ||
881 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | 813 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): |
882 | # Don't do this check for kernel/module recipes, there aren't too many debug/development | 814 | # Don't do this check for kernel/module recipes, there aren't too many debug/development |
@@ -893,25 +825,30 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
893 | 825 | ||
894 | # Now do the sanity check!!! | 826 | # Now do the sanity check!!! |
895 | if "build-deps" not in skip: | 827 | if "build-deps" not in skip: |
828 | def check_rdep(rdep_data, possible_pn): | ||
829 | if rdep_data and "PN" in rdep_data: | ||
830 | possible_pn.add(rdep_data["PN"]) | ||
831 | return rdep_data["PN"] in taskdeps | ||
832 | return False | ||
833 | |||
896 | for rdepend in rdepends: | 834 | for rdepend in rdepends: |
897 | if "-dbg" in rdepend and "debug-deps" not in skip: | 835 | if rdepend.endswith("-dbg") and "debug-deps" not in skip: |
898 | error_msg = "%s rdepends on %s" % (pkg,rdepend) | 836 | error_msg = "%s rdepends on %s" % (pkg,rdepend) |
899 | oe.qa.handle_error("debug-deps", error_msg, d) | 837 | oe.qa.handle_error("debug-deps", error_msg, d) |
900 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: | 838 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: |
901 | error_msg = "%s rdepends on %s" % (pkg, rdepend) | 839 | error_msg = "%s rdepends on %s" % (pkg, rdepend) |
902 | oe.qa.handle_error("dev-deps", error_msg, d) | 840 | oe.qa.handle_error("dev-deps", error_msg, d) |
903 | if rdepend not in packages: | 841 | if rdepend not in packages: |
842 | possible_pn = set() | ||
904 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) | 843 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) |
905 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | 844 | if check_rdep(rdep_data, possible_pn): |
906 | continue | 845 | continue |
907 | if not rdep_data or not 'PN' in rdep_data: | 846 | |
908 | for _, rdep_data in oe.packagedata.foreach_runtime_provider_pkgdata(d, rdepend): | 847 | if any(check_rdep(rdep_data, possible_pn) for _, rdep_data in oe.packagedata.foreach_runtime_provider_pkgdata(d, rdepend)): |
909 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
910 | break | ||
911 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
912 | continue | 848 | continue |
913 | if rdep_data and 'PN' in rdep_data: | 849 | |
914 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN']) | 850 | if possible_pn: |
851 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing one of %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, ", ".join(possible_pn)) | ||
915 | else: | 852 | else: |
916 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) | 853 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) |
917 | oe.qa.handle_error("build-deps", error_msg, d) | 854 | oe.qa.handle_error("build-deps", error_msg, d) |
@@ -998,20 +935,19 @@ def package_qa_check_deps(pkg, pkgdest, d): | |||
998 | check_valid_deps('RCONFLICTS') | 935 | check_valid_deps('RCONFLICTS') |
999 | 936 | ||
1000 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" | 937 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" |
1001 | def package_qa_check_usrmerge(pkg, d, messages): | 938 | def package_qa_check_usrmerge(pkg, d): |
1002 | 939 | global cpath | |
1003 | pkgdest = d.getVar('PKGDEST') | 940 | pkgdest = d.getVar('PKGDEST') |
1004 | pkg_dir = pkgdest + os.sep + pkg + os.sep | 941 | pkg_dir = pkgdest + os.sep + pkg + os.sep |
1005 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() | 942 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() |
1006 | for f in merged_dirs: | 943 | for f in merged_dirs: |
1007 | if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f): | 944 | if cpath.exists(pkg_dir + f) and not cpath.islink(pkg_dir + f): |
1008 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) | 945 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) |
1009 | oe.qa.add_message(messages, "usrmerge", msg) | 946 | oe.qa.handle_error("usrmerge", msg, d) |
1010 | return False | 947 | return |
1011 | return True | ||
1012 | 948 | ||
1013 | QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod" | 949 | QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod" |
1014 | def package_qa_check_perllocalpod(pkg, d, messages): | 950 | def package_qa_check_perllocalpod(pkg, d): |
1015 | """ | 951 | """ |
1016 | Check that the recipe didn't ship a perlocal.pod file, which shouldn't be | 952 | Check that the recipe didn't ship a perlocal.pod file, which shouldn't be |
1017 | installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to | 953 | installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to |
@@ -1025,67 +961,61 @@ def package_qa_check_perllocalpod(pkg, d, messages): | |||
1025 | if matches: | 961 | if matches: |
1026 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] | 962 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] |
1027 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) | 963 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) |
1028 | oe.qa.add_message(messages, "perllocalpod", msg) | 964 | oe.qa.handle_error("perllocalpod", msg, d) |
1029 | 965 | ||
1030 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" | 966 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" |
1031 | def package_qa_check_expanded_d(package, d, messages): | 967 | def package_qa_check_expanded_d(package, d): |
1032 | """ | 968 | """ |
1033 | Check for the expanded D (${D}) value in pkg_* and FILES | 969 | Check for the expanded D (${D}) value in pkg_* and FILES |
1034 | variables, warn the user to use it correctly. | 970 | variables, warn the user to use it correctly. |
1035 | """ | 971 | """ |
1036 | sane = True | ||
1037 | expanded_d = d.getVar('D') | 972 | expanded_d = d.getVar('D') |
1038 | 973 | ||
1039 | for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': | 974 | for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': |
1040 | bbvar = d.getVar(var + ":" + package) or "" | 975 | bbvar = d.getVar(var + ":" + package) or "" |
1041 | if expanded_d in bbvar: | 976 | if expanded_d in bbvar: |
1042 | if var == 'FILES': | 977 | if var == 'FILES': |
1043 | oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) | 978 | oe.qa.handle_error("expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package, d) |
1044 | sane = False | ||
1045 | else: | 979 | else: |
1046 | oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package)) | 980 | oe.qa.handle_error("expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package), d) |
1047 | sane = False | ||
1048 | return sane | ||
1049 | 981 | ||
1050 | QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics" | 982 | QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics" |
1051 | def package_qa_check_unlisted_pkg_lics(package, d, messages): | 983 | def package_qa_check_unlisted_pkg_lics(package, d): |
1052 | """ | 984 | """ |
1053 | Check that all licenses for a package are among the licenses for the recipe. | 985 | Check that all licenses for a package are among the licenses for the recipe. |
1054 | """ | 986 | """ |
1055 | pkg_lics = d.getVar('LICENSE:' + package) | 987 | pkg_lics = d.getVar('LICENSE:' + package) |
1056 | if not pkg_lics: | 988 | if not pkg_lics: |
1057 | return True | 989 | return |
1058 | 990 | ||
1059 | recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE')) | 991 | recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE')) |
1060 | package_lics = oe.license.list_licenses(pkg_lics) | 992 | package_lics = oe.license.list_licenses(pkg_lics) |
1061 | unlisted = package_lics - recipe_lics_set | 993 | unlisted = package_lics - recipe_lics_set |
1062 | if unlisted: | 994 | if unlisted: |
1063 | oe.qa.add_message(messages, "unlisted-pkg-lics", | 995 | oe.qa.handle_error("unlisted-pkg-lics", |
1064 | "LICENSE:%s includes licenses (%s) that are not " | 996 | "LICENSE:%s includes licenses (%s) that are not " |
1065 | "listed in LICENSE" % (package, ' '.join(unlisted))) | 997 | "listed in LICENSE" % (package, ' '.join(unlisted)), d) |
1066 | return False | ||
1067 | obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set | 998 | obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set |
1068 | if obsolete: | 999 | if obsolete: |
1069 | oe.qa.add_message(messages, "obsolete-license", | 1000 | oe.qa.handle_error("obsolete-license", |
1070 | "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete))) | 1001 | "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete)), d) |
1071 | return False | ||
1072 | return True | ||
1073 | 1002 | ||
1074 | QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs" | 1003 | QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs" |
1075 | def package_qa_check_empty_dirs(pkg, d, messages): | 1004 | def package_qa_check_empty_dirs(pkg, d): |
1076 | """ | 1005 | """ |
1077 | Check for the existence of files in directories that are expected to be | 1006 | Check for the existence of files in directories that are expected to be |
1078 | empty. | 1007 | empty. |
1079 | """ | 1008 | """ |
1080 | 1009 | ||
1010 | global cpath | ||
1081 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) | 1011 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) |
1082 | for dir in (d.getVar('QA_EMPTY_DIRS') or "").split(): | 1012 | for dir in (d.getVar('QA_EMPTY_DIRS') or "").split(): |
1083 | empty_dir = oe.path.join(pkgd, dir) | 1013 | empty_dir = oe.path.join(pkgd, dir) |
1084 | if os.path.exists(empty_dir) and os.listdir(empty_dir): | 1014 | if cpath.exists(empty_dir) and os.listdir(empty_dir): |
1085 | recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or | 1015 | recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or |
1086 | "but it is expected to be empty") | 1016 | "but it is expected to be empty") |
1087 | msg = "%s installs files in %s, %s" % (pkg, dir, recommendation) | 1017 | msg = "%s installs files in %s, %s" % (pkg, dir, recommendation) |
1088 | oe.qa.add_message(messages, "empty-dirs", msg) | 1018 | oe.qa.handle_error("empty-dirs", msg, d) |
1089 | 1019 | ||
1090 | def package_qa_check_encoding(keys, encode, d): | 1020 | def package_qa_check_encoding(keys, encode, d): |
1091 | def check_encoding(key, enc): | 1021 | def check_encoding(key, enc): |
@@ -1109,10 +1039,11 @@ HOST_USER_UID := "${@os.getuid()}" | |||
1109 | HOST_USER_GID := "${@os.getgid()}" | 1039 | HOST_USER_GID := "${@os.getgid()}" |
1110 | 1040 | ||
1111 | QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user" | 1041 | QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user" |
1112 | def package_qa_check_host_user(path, name, d, elf, messages): | 1042 | def package_qa_check_host_user(path, name, d, elf): |
1113 | """Check for paths outside of /home which are owned by the user running bitbake.""" | 1043 | """Check for paths outside of /home which are owned by the user running bitbake.""" |
1044 | global cpath | ||
1114 | 1045 | ||
1115 | if not os.path.lexists(path): | 1046 | if not cpath.lexists(path): |
1116 | return | 1047 | return |
1117 | 1048 | ||
1118 | dest = d.getVar('PKGDEST') | 1049 | dest = d.getVar('PKGDEST') |
@@ -1130,17 +1061,15 @@ def package_qa_check_host_user(path, name, d, elf, messages): | |||
1130 | else: | 1061 | else: |
1131 | check_uid = int(d.getVar('HOST_USER_UID')) | 1062 | check_uid = int(d.getVar('HOST_USER_UID')) |
1132 | if stat.st_uid == check_uid: | 1063 | if stat.st_uid == check_uid: |
1133 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) | 1064 | oe.qa.handle_error("host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid), d) |
1134 | return False | ||
1135 | 1065 | ||
1136 | check_gid = int(d.getVar('HOST_USER_GID')) | 1066 | check_gid = int(d.getVar('HOST_USER_GID')) |
1137 | if stat.st_gid == check_gid: | 1067 | if stat.st_gid == check_gid: |
1138 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) | 1068 | oe.qa.handle_error("host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid), d) |
1139 | return False | 1069 | package_qa_check_host_user[vardepsexclude] = "HOST_USER_UID HOST_USER_GID" |
1140 | return True | ||
1141 | 1070 | ||
1142 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" | 1071 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" |
1143 | def package_qa_check_unhandled_features_check(pn, d, messages): | 1072 | def package_qa_check_unhandled_features_check(pn, d): |
1144 | if not bb.data.inherits_class('features_check', d): | 1073 | if not bb.data.inherits_class('features_check', d): |
1145 | var_set = False | 1074 | var_set = False |
1146 | for kind in ['DISTRO', 'MACHINE', 'COMBINED']: | 1075 | for kind in ['DISTRO', 'MACHINE', 'COMBINED']: |
@@ -1151,22 +1080,36 @@ def package_qa_check_unhandled_features_check(pn, d, messages): | |||
1151 | oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) | 1080 | oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) |
1152 | 1081 | ||
1153 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" | 1082 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" |
1154 | def package_qa_check_missing_update_alternatives(pn, d, messages): | 1083 | def package_qa_check_missing_update_alternatives(pn, d): |
1155 | # Look at all packages and find out if any of those sets ALTERNATIVE variable | 1084 | # Look at all packages and find out if any of those sets ALTERNATIVE variable |
1156 | # without inheriting update-alternatives class | 1085 | # without inheriting update-alternatives class |
1157 | for pkg in (d.getVar('PACKAGES') or '').split(): | 1086 | for pkg in (d.getVar('PACKAGES') or '').split(): |
1158 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): | 1087 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): |
1159 | oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) | 1088 | oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) |
1160 | 1089 | ||
1090 | def parse_test_matrix(matrix_name, skip, d): | ||
1091 | testmatrix = d.getVarFlags(matrix_name) or {} | ||
1092 | g = globals() | ||
1093 | checks = [] | ||
1094 | for w in (d.getVar("WARN_QA") or "").split(): | ||
1095 | if w in skip: | ||
1096 | continue | ||
1097 | if w in testmatrix and testmatrix[w] in g: | ||
1098 | checks.append(g[testmatrix[w]]) | ||
1099 | |||
1100 | for e in (d.getVar("ERROR_QA") or "").split(): | ||
1101 | if e in skip: | ||
1102 | continue | ||
1103 | if e in testmatrix and testmatrix[e] in g: | ||
1104 | checks.append(g[testmatrix[e]]) | ||
1105 | return checks | ||
1106 | parse_test_matrix[vardepsexclude] = "ERROR_QA WARN_QA" | ||
1107 | |||
1108 | |||
1161 | # The PACKAGE FUNC to scan each package | 1109 | # The PACKAGE FUNC to scan each package |
1162 | python do_package_qa () { | 1110 | python do_package_qa () { |
1163 | import subprocess | ||
1164 | import oe.packagedata | 1111 | import oe.packagedata |
1165 | 1112 | ||
1166 | bb.note("DO PACKAGE QA") | ||
1167 | |||
1168 | main_lic = d.getVar('LICENSE') | ||
1169 | |||
1170 | # Check for obsolete license references in main LICENSE (packages are checked below for any changes) | 1113 | # Check for obsolete license references in main LICENSE (packages are checked below for any changes) |
1171 | main_licenses = oe.license.list_licenses(d.getVar('LICENSE')) | 1114 | main_licenses = oe.license.list_licenses(d.getVar('LICENSE')) |
1172 | obsolete = set(oe.license.obsolete_license_list()) & main_licenses | 1115 | obsolete = set(oe.license.obsolete_license_list()) & main_licenses |
@@ -1182,27 +1125,28 @@ python do_package_qa () { | |||
1182 | pn = d.getVar('PN') | 1125 | pn = d.getVar('PN') |
1183 | 1126 | ||
1184 | # Scan the packages... | 1127 | # Scan the packages... |
1185 | pkgdest = d.getVar('PKGDEST') | ||
1186 | packages = set((d.getVar('PACKAGES') or '').split()) | 1128 | packages = set((d.getVar('PACKAGES') or '').split()) |
1129 | # no packages should be scanned | ||
1130 | if not packages: | ||
1131 | return | ||
1187 | 1132 | ||
1188 | global pkgfiles | 1133 | global pkgfiles, cpath |
1189 | pkgfiles = {} | 1134 | pkgfiles = {} |
1135 | cpath = oe.cachedpath.CachedPath() | ||
1136 | pkgdest = d.getVar('PKGDEST') | ||
1190 | for pkg in packages: | 1137 | for pkg in packages: |
1191 | pkgfiles[pkg] = [] | ||
1192 | pkgdir = os.path.join(pkgdest, pkg) | 1138 | pkgdir = os.path.join(pkgdest, pkg) |
1139 | pkgfiles[pkg] = [] | ||
1193 | for walkroot, dirs, files in os.walk(pkgdir): | 1140 | for walkroot, dirs, files in os.walk(pkgdir): |
1194 | # Don't walk into top-level CONTROL or DEBIAN directories as these | 1141 | # Don't walk into top-level CONTROL or DEBIAN directories as these |
1195 | # are temporary directories created by do_package. | 1142 | # are temporary directories created by do_package. |
1196 | if walkroot == pkgdir: | 1143 | if walkroot == pkgdir: |
1197 | for control in ("CONTROL", "DEBIAN"): | 1144 | for removedir in ("CONTROL", "DEBIAN"): |
1198 | if control in dirs: | 1145 | try: |
1199 | dirs.remove(control) | 1146 | dirs.remove(removedir) |
1200 | for file in files: | 1147 | except ValueError: |
1201 | pkgfiles[pkg].append(os.path.join(walkroot, file)) | 1148 | pass |
1202 | 1149 | pkgfiles[pkg].extend((os.path.join(walkroot, f) for f in files)) | |
1203 | # no packages should be scanned | ||
1204 | if not packages: | ||
1205 | return | ||
1206 | 1150 | ||
1207 | import re | 1151 | import re |
1208 | # The package name matches the [a-z0-9.+-]+ regular expression | 1152 | # The package name matches the [a-z0-9.+-]+ regular expression |
@@ -1213,24 +1157,6 @@ python do_package_qa () { | |||
1213 | for dep in taskdepdata: | 1157 | for dep in taskdepdata: |
1214 | taskdeps.add(taskdepdata[dep][0]) | 1158 | taskdeps.add(taskdepdata[dep][0]) |
1215 | 1159 | ||
1216 | def parse_test_matrix(matrix_name): | ||
1217 | testmatrix = d.getVarFlags(matrix_name) or {} | ||
1218 | g = globals() | ||
1219 | warnchecks = [] | ||
1220 | for w in (d.getVar("WARN_QA") or "").split(): | ||
1221 | if w in skip: | ||
1222 | continue | ||
1223 | if w in testmatrix and testmatrix[w] in g: | ||
1224 | warnchecks.append(g[testmatrix[w]]) | ||
1225 | |||
1226 | errorchecks = [] | ||
1227 | for e in (d.getVar("ERROR_QA") or "").split(): | ||
1228 | if e in skip: | ||
1229 | continue | ||
1230 | if e in testmatrix and testmatrix[e] in g: | ||
1231 | errorchecks.append(g[testmatrix[e]]) | ||
1232 | return warnchecks, errorchecks | ||
1233 | |||
1234 | for package in packages: | 1160 | for package in packages: |
1235 | skip = set((d.getVar('INSANE_SKIP') or "").split() + | 1161 | skip = set((d.getVar('INSANE_SKIP') or "").split() + |
1236 | (d.getVar('INSANE_SKIP:' + package) or "").split()) | 1162 | (d.getVar('INSANE_SKIP:' + package) or "").split()) |
@@ -1243,21 +1169,23 @@ python do_package_qa () { | |||
1243 | oe.qa.handle_error("pkgname", | 1169 | oe.qa.handle_error("pkgname", |
1244 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) | 1170 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) |
1245 | 1171 | ||
1246 | warn_checks, error_checks = parse_test_matrix("QAPATHTEST") | 1172 | checks = parse_test_matrix("QAPATHTEST", skip, d) |
1247 | package_qa_walk(warn_checks, error_checks, package, d) | 1173 | package_qa_walk(checks, package, d) |
1248 | 1174 | ||
1249 | warn_checks, error_checks = parse_test_matrix("QAPKGTEST") | 1175 | checks = parse_test_matrix("QAPKGTEST", skip, d) |
1250 | package_qa_package(warn_checks, error_checks, package, d) | 1176 | for func in checks: |
1177 | func(package, d) | ||
1251 | 1178 | ||
1252 | package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) | 1179 | package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) |
1253 | package_qa_check_deps(package, pkgdest, d) | 1180 | package_qa_check_deps(package, pkgdest, d) |
1254 | 1181 | ||
1255 | warn_checks, error_checks = parse_test_matrix("QARECIPETEST") | 1182 | checks = parse_test_matrix("QARECIPETEST", skip, d) |
1256 | package_qa_recipe(warn_checks, error_checks, pn, d) | 1183 | for func in checks: |
1184 | func(pn, d) | ||
1257 | 1185 | ||
1258 | if 'libdir' in d.getVar("ALL_QA").split(): | 1186 | package_qa_check_libdir(d) |
1259 | package_qa_check_libdir(d) | ||
1260 | 1187 | ||
1188 | cpath = None | ||
1261 | oe.qa.exit_if_errors(d) | 1189 | oe.qa.exit_if_errors(d) |
1262 | } | 1190 | } |
1263 | 1191 | ||
@@ -1269,11 +1197,17 @@ do_package_qa[vardepsexclude] = "BB_TASKDEPDATA" | |||
1269 | do_package_qa[rdeptask] = "do_packagedata" | 1197 | do_package_qa[rdeptask] = "do_packagedata" |
1270 | addtask do_package_qa after do_packagedata do_package before do_build | 1198 | addtask do_package_qa after do_packagedata do_package before do_build |
1271 | 1199 | ||
1200 | do_build[rdeptask] += "do_package_qa" | ||
1201 | |||
1272 | # Add the package specific INSANE_SKIPs to the sstate dependencies | 1202 | # Add the package specific INSANE_SKIPs to the sstate dependencies |
1273 | python() { | 1203 | python() { |
1274 | pkgs = (d.getVar('PACKAGES') or '').split() | 1204 | pkgs = (d.getVar('PACKAGES') or '').split() |
1275 | for pkg in pkgs: | 1205 | for pkg in pkgs: |
1276 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg)) | 1206 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg)) |
1207 | funcs = d.getVarFlags("QAPATHTEST") | ||
1208 | funcs.update(d.getVarFlags("QAPKGTEST")) | ||
1209 | funcs.update(d.getVarFlags("QARECIPETEST")) | ||
1210 | d.appendVarFlag("do_package_qa", "vardeps", " ".join(funcs.values())) | ||
1277 | } | 1211 | } |
1278 | 1212 | ||
1279 | SSTATETASKS += "do_package_qa" | 1213 | SSTATETASKS += "do_package_qa" |
@@ -1373,10 +1307,10 @@ python do_qa_patch() { | |||
1373 | srcdir = d.getVar('S') | 1307 | srcdir = d.getVar('S') |
1374 | if not bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): | 1308 | if not bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): |
1375 | pass | 1309 | pass |
1310 | elif not (bb.utils.contains('ERROR_QA', 'unimplemented-ptest', True, False, d) or bb.utils.contains('WARN_QA', 'unimplemented-ptest', True, False, d)): | ||
1311 | pass | ||
1376 | elif bb.data.inherits_class('ptest', d): | 1312 | elif bb.data.inherits_class('ptest', d): |
1377 | bb.note("Package %s QA: skipping unimplemented-ptest: ptest implementation detected" % d.getVar('PN')) | 1313 | bb.note("Package %s QA: skipping unimplemented-ptest: ptest implementation detected" % d.getVar('PN')) |
1378 | elif srcdir == d.getVar('WORKDIR'): | ||
1379 | bb.note("Package %s QA: skipping unimplemented-ptest: This check is not supported for recipe with \"S = \"${WORKDIR}\"" % d.getVar('PN')) | ||
1380 | 1314 | ||
1381 | # Detect perl Test:: based tests | 1315 | # Detect perl Test:: based tests |
1382 | elif os.path.exists(os.path.join(srcdir, "t")) and any(filename.endswith('.t') for filename in os.listdir(os.path.join(srcdir, 't'))): | 1316 | elif os.path.exists(os.path.join(srcdir, "t")) and any(filename.endswith('.t') for filename in os.listdir(os.path.join(srcdir, 't'))): |
@@ -1398,8 +1332,15 @@ python do_qa_patch() { | |||
1398 | elif os.path.exists(os.path.join(srcdir, "Makefile.in")) and (match_line_in_files(srcdir, "**/Makefile.in", r'\s*TESTS\s*\+?=') or match_line_in_files(srcdir,"**/*.at",r'.*AT_INIT')): | 1332 | elif os.path.exists(os.path.join(srcdir, "Makefile.in")) and (match_line_in_files(srcdir, "**/Makefile.in", r'\s*TESTS\s*\+?=') or match_line_in_files(srcdir,"**/*.at",r'.*AT_INIT')): |
1399 | oe.qa.handle_error("unimplemented-ptest", "%s: autotools-based tests detected" % d.getVar('PN'), d) | 1333 | oe.qa.handle_error("unimplemented-ptest", "%s: autotools-based tests detected" % d.getVar('PN'), d) |
1400 | 1334 | ||
1335 | # Detect cargo-based tests | ||
1336 | elif os.path.exists(os.path.join(srcdir, "Cargo.toml")) and ( | ||
1337 | match_line_in_files(srcdir, "**/*.rs", r'\s*#\s*\[\s*test\s*\]') or | ||
1338 | match_line_in_files(srcdir, "**/*.rs", r'\s*#\s*\[\s*cfg\s*\(\s*test\s*\)\s*\]') | ||
1339 | ): | ||
1340 | oe.qa.handle_error("unimplemented-ptest", "%s: cargo-based tests detected" % d.getVar('PN'), d) | ||
1341 | |||
1401 | # Last resort, detect a test directory in sources | 1342 | # Last resort, detect a test directory in sources |
1402 | elif any(filename.lower() in ["test", "tests"] for filename in os.listdir(srcdir)): | 1343 | elif os.path.exists(srcdir) and any(filename.lower() in ["test", "tests"] for filename in os.listdir(srcdir)): |
1403 | oe.qa.handle_error("unimplemented-ptest", "%s: test subdirectory detected" % d.getVar('PN'), d) | 1344 | oe.qa.handle_error("unimplemented-ptest", "%s: test subdirectory detected" % d.getVar('PN'), d) |
1404 | 1345 | ||
1405 | oe.qa.exit_if_errors(d) | 1346 | oe.qa.exit_if_errors(d) |
@@ -1484,22 +1425,20 @@ Rerun configure task after fixing this.""" | |||
1484 | except subprocess.CalledProcessError: | 1425 | except subprocess.CalledProcessError: |
1485 | pass | 1426 | pass |
1486 | 1427 | ||
1487 | # Check invalid PACKAGECONFIG | ||
1488 | pkgconfig = (d.getVar("PACKAGECONFIG") or "").split() | ||
1489 | if pkgconfig: | ||
1490 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | ||
1491 | for pconfig in pkgconfig: | ||
1492 | if pconfig not in pkgconfigflags: | ||
1493 | pn = d.getVar('PN') | ||
1494 | error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) | ||
1495 | oe.qa.handle_error("invalid-packageconfig", error_msg, d) | ||
1496 | |||
1497 | oe.qa.exit_if_errors(d) | 1428 | oe.qa.exit_if_errors(d) |
1498 | } | 1429 | } |
1499 | 1430 | ||
1500 | python do_qa_unpack() { | 1431 | python do_qa_unpack() { |
1501 | src_uri = d.getVar('SRC_URI') | 1432 | src_uri = d.getVar('SRC_URI') |
1502 | s_dir = d.getVar('S') | 1433 | s_dir = d.getVar('S') |
1434 | s_dir_orig = d.getVar('S', False) | ||
1435 | |||
1436 | if s_dir_orig == '${WORKDIR}/git' or s_dir_orig == '${UNPACKDIR}/git': | ||
1437 | bb.fatal('Recipes that set S = "${WORKDIR}/git" or S = "${UNPACKDIR}/git" should remove that assignment, as S set by bitbake.conf in oe-core now works.') | ||
1438 | |||
1439 | if '${WORKDIR}' in s_dir_orig: | ||
1440 | bb.fatal('S should be set relative to UNPACKDIR, e.g. replace WORKDIR with UNPACKDIR in "S = {}"'.format(s_dir_orig)) | ||
1441 | |||
1503 | if src_uri and not os.path.exists(s_dir): | 1442 | if src_uri and not os.path.exists(s_dir): |
1504 | bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) | 1443 | bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) |
1505 | } | 1444 | } |
@@ -1507,6 +1446,12 @@ python do_qa_unpack() { | |||
1507 | python do_recipe_qa() { | 1446 | python do_recipe_qa() { |
1508 | import re | 1447 | import re |
1509 | 1448 | ||
1449 | def test_naming(pn, d): | ||
1450 | if pn.endswith("-native") and not bb.data.inherits_class("native", d): | ||
1451 | oe.qa.handle_error("recipe-naming", "Recipe %s appears native but is not, should inherit native" % pn, d) | ||
1452 | if pn.startswith("nativesdk-") and not bb.data.inherits_class("nativesdk", d): | ||
1453 | oe.qa.handle_error("recipe-naming", "Recipe %s appears nativesdk but is not, should inherit nativesdk" % pn, d) | ||
1454 | |||
1510 | def test_missing_metadata(pn, d): | 1455 | def test_missing_metadata(pn, d): |
1511 | fn = d.getVar("FILE") | 1456 | fn = d.getVar("FILE") |
1512 | srcfile = d.getVar('SRC_URI').split() | 1457 | srcfile = d.getVar('SRC_URI').split() |
@@ -1541,10 +1486,21 @@ python do_recipe_qa() { | |||
1541 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url) or "//codeload.github.com/" in url: | 1486 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url) or "//codeload.github.com/" in url: |
1542 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) | 1487 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) |
1543 | 1488 | ||
1489 | def test_packageconfig(pn, d): | ||
1490 | pkgconfigs = (d.getVar("PACKAGECONFIG") or "").split() | ||
1491 | if pkgconfigs: | ||
1492 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | ||
1493 | invalid_pkgconfigs = set(pkgconfigs) - set(pkgconfigflags) | ||
1494 | if invalid_pkgconfigs: | ||
1495 | error_msg = "%s: invalid PACKAGECONFIG(s): %s" % (pn, " ".join(sorted(invalid_pkgconfigs))) | ||
1496 | oe.qa.handle_error("invalid-packageconfig", error_msg, d) | ||
1497 | |||
1544 | pn = d.getVar('PN') | 1498 | pn = d.getVar('PN') |
1499 | test_naming(pn, d) | ||
1545 | test_missing_metadata(pn, d) | 1500 | test_missing_metadata(pn, d) |
1546 | test_missing_maintainer(pn, d) | 1501 | test_missing_maintainer(pn, d) |
1547 | test_srcuri(pn, d) | 1502 | test_srcuri(pn, d) |
1503 | test_packageconfig(pn, d) | ||
1548 | oe.qa.exit_if_errors(d) | 1504 | oe.qa.exit_if_errors(d) |
1549 | } | 1505 | } |
1550 | 1506 | ||
@@ -1572,8 +1528,7 @@ do_unpack[postfuncs] += "do_qa_unpack" | |||
1572 | python () { | 1528 | python () { |
1573 | import re | 1529 | import re |
1574 | 1530 | ||
1575 | tests = d.getVar('ALL_QA').split() | 1531 | if bb.utils.contains('ERROR_QA', 'desktop', True, False, d) or bb.utils.contains('WARN_QA', 'desktop', True, False, d): |
1576 | if "desktop" in tests: | ||
1577 | d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") | 1532 | d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") |
1578 | 1533 | ||
1579 | ########################################################################### | 1534 | ########################################################################### |
@@ -1602,17 +1557,31 @@ python () { | |||
1602 | if prog.search(pn): | 1557 | if prog.search(pn): |
1603 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) | 1558 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) |
1604 | 1559 | ||
1560 | sourcedir = d.getVar("S") | ||
1561 | builddir = d.getVar("B") | ||
1562 | workdir = d.getVar("WORKDIR") | ||
1563 | unpackdir = d.getVar("UNPACKDIR") | ||
1564 | if sourcedir == workdir: | ||
1565 | bb.fatal("Using S = ${WORKDIR} is no longer supported") | ||
1566 | if builddir == workdir: | ||
1567 | bb.fatal("Using B = ${WORKDIR} is no longer supported") | ||
1568 | if unpackdir == workdir: | ||
1569 | bb.fatal("Using UNPACKDIR = ${WORKDIR} is not supported") | ||
1570 | if sourcedir[-1] == '/': | ||
1571 | bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) | ||
1572 | if builddir[-1] == '/': | ||
1573 | bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) | ||
1574 | |||
1605 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder | 1575 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder |
1606 | # why it doesn't work. | 1576 | # why it doesn't work. |
1607 | if (d.getVar(d.expand('DEPENDS:${PN}'))): | 1577 | if (d.getVar(d.expand('DEPENDS:${PN}'))): |
1608 | oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) | 1578 | oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) |
1609 | 1579 | ||
1610 | # virtual/ is meaningless for these variables | 1580 | # virtual/ is meaningless for these variables |
1611 | if "virtual-slash" in (d.getVar("ALL_QA") or "").split(): | 1581 | for k in ['RDEPENDS', 'RPROVIDES']: |
1612 | for k in ['RDEPENDS', 'RPROVIDES']: | 1582 | for var in bb.utils.explode_deps(d.getVar(k + ':' + pn) or ""): |
1613 | for var in bb.utils.explode_deps(d.getVar(k + ':' + pn) or ""): | 1583 | if var.startswith("virtual/"): |
1614 | if var.startswith("virtual/"): | 1584 | oe.qa.handle_error("virtual-slash", "%s is set to %s but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead." % (k, var), d) |
1615 | oe.qa.handle_error("virtual-slash", "%s is set to %s but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead." % (k, var), d) | ||
1616 | 1585 | ||
1617 | issues = [] | 1586 | issues = [] |
1618 | if (d.getVar('PACKAGES') or "").split(): | 1587 | if (d.getVar('PACKAGES') or "").split(): |
@@ -1622,8 +1591,7 @@ python () { | |||
1622 | if d.getVar(var, False): | 1591 | if d.getVar(var, False): |
1623 | issues.append(var) | 1592 | issues.append(var) |
1624 | 1593 | ||
1625 | fakeroot_tests = d.getVar('FAKEROOT_QA').split() | 1594 | if bb.utils.contains('ERROR_QA', 'host-user-contaminated', True, False, d) or bb.utils.contains('WARN_QA', 'host-user-contaminated', True, False, d): |
1626 | if set(tests) & set(fakeroot_tests): | ||
1627 | d.setVarFlag('do_package_qa', 'fakeroot', '1') | 1595 | d.setVarFlag('do_package_qa', 'fakeroot', '1') |
1628 | d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 1596 | d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') |
1629 | else: | 1597 | else: |