diff options
| author | Mike Crowe <mac@mcrowe.com> | 2021-10-15 15:39:53 +0100 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2021-10-17 11:56:32 +0100 |
| commit | 51b1611e204b32b3e65176ef86d4562e2f330835 (patch) | |
| tree | 7845a123d2eddbc2c9d5f9f72e1c2be9c2dea730 | |
| parent | 25675706b6339b6a11e69dbad1c885a4b79fc57c (diff) | |
| download | poky-51b1611e204b32b3e65176ef86d4562e2f330835.tar.gz | |
lib/oe/qa,insane: Move extra error handling functions to library
Extract package_qa_write_error, package_qa_handle_error and
package_qa_add_message functions from insane.bbclass to lib/oe/qa.py and
drop the package_qa_ prefixes.
Update various bbclasses to use the new functions. No import is required
since base.bbclass puts oe.qa in OE_IMPORTS.
Stop requiring callers to manually track whether a fatal error has been
encountered via a "sane" flag. Instead replace the QA_SANE variable with
QA_ERRORS_FOUND and call oe.qa.exit_if_errors or
oe.qa.exit_with_message_if_errors at the end of each task.
Inspired by discussion resulting from
https://lists.openembedded.org/g/openembedded-core/message/156793 and
https://lists.openembedded.org/g/openembedded-core/message/156900
(From OE-Core rev: f0ad152ef4cc15c042bc9eeefb6af096d054b220)
Signed-off-by: Mike Crowe <mac@mcrowe.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
| -rw-r--r-- | meta/classes/buildhistory.bbclass | 3 | ||||
| -rw-r--r-- | meta/classes/insane.bbclass | 180 | ||||
| -rw-r--r-- | meta/classes/multilib.bbclass | 3 | ||||
| -rw-r--r-- | meta/classes/package.bbclass | 26 | ||||
| -rw-r--r-- | meta/classes/ptest.bbclass | 2 | ||||
| -rw-r--r-- | meta/lib/oe/qa.py | 34 |
6 files changed, 120 insertions, 128 deletions
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index 7c44fec2d1..62d0d781a1 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass | |||
| @@ -287,7 +287,7 @@ python buildhistory_emit_pkghistory() { | |||
| 287 | r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr)) | 287 | r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr)) |
| 288 | if r < 0: | 288 | if r < 0: |
| 289 | msg = "Package version for package %s went backwards which would break package feeds (from %s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr) | 289 | msg = "Package version for package %s went backwards which would break package feeds (from %s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr) |
| 290 | package_qa_handle_error("version-going-backwards", msg, d) | 290 | oe.qa.handle_error("version-going-backwards", msg, d) |
| 291 | 291 | ||
| 292 | pkginfo = PackageInfo(pkg) | 292 | pkginfo = PackageInfo(pkg) |
| 293 | # Apparently the version can be different on a per-package basis (see Python) | 293 | # Apparently the version can be different on a per-package basis (see Python) |
| @@ -321,6 +321,7 @@ python buildhistory_emit_pkghistory() { | |||
| 321 | 321 | ||
| 322 | # Create files-in-<package-name>.txt files containing a list of files of each recipe's package | 322 | # Create files-in-<package-name>.txt files containing a list of files of each recipe's package |
| 323 | bb.build.exec_func("buildhistory_list_pkg_files", d) | 323 | bb.build.exec_func("buildhistory_list_pkg_files", d) |
| 324 | oe.qa.exit_if_errors(d) | ||
| 324 | } | 325 | } |
| 325 | 326 | ||
| 326 | python buildhistory_emit_outputsigs() { | 327 | python buildhistory_emit_outputsigs() { |
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 433e4dfa33..9ad9771dfa 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
| @@ -18,8 +18,6 @@ | |||
| 18 | # files under exec_prefix | 18 | # files under exec_prefix |
| 19 | # -Check if the package name is upper case | 19 | # -Check if the package name is upper case |
| 20 | 20 | ||
| 21 | QA_SANE = "True" | ||
| 22 | |||
| 23 | # Elect whether a given type of error is a warning or error, they may | 21 | # Elect whether a given type of error is a warning or error, they may |
| 24 | # have been set by other files. | 22 | # have been set by other files. |
| 25 | WARN_QA ?= " libdir xorg-driver-abi \ | 23 | WARN_QA ?= " libdir xorg-driver-abi \ |
| @@ -59,32 +57,6 @@ def package_qa_clean_path(path, d, pkg=None): | |||
| 59 | path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/") | 57 | path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/") |
| 60 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") | 58 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") |
| 61 | 59 | ||
| 62 | def package_qa_write_error(type, error, d): | ||
| 63 | logfile = d.getVar('QA_LOGFILE') | ||
| 64 | if logfile: | ||
| 65 | p = d.getVar('P') | ||
| 66 | with open(logfile, "a+") as f: | ||
| 67 | f.write("%s: %s [%s]\n" % (p, error, type)) | ||
| 68 | |||
| 69 | def package_qa_handle_error(error_class, error_msg, d): | ||
| 70 | if error_class in (d.getVar("ERROR_QA") or "").split(): | ||
| 71 | package_qa_write_error(error_class, error_msg, d) | ||
| 72 | bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 73 | d.setVar("QA_SANE", False) | ||
| 74 | return False | ||
| 75 | elif error_class in (d.getVar("WARN_QA") or "").split(): | ||
| 76 | package_qa_write_error(error_class, error_msg, d) | ||
| 77 | bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 78 | else: | ||
| 79 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 80 | return True | ||
| 81 | |||
| 82 | def package_qa_add_message(messages, section, new_msg): | ||
| 83 | if section not in messages: | ||
| 84 | messages[section] = new_msg | ||
| 85 | else: | ||
| 86 | messages[section] = messages[section] + "\n" + new_msg | ||
| 87 | |||
| 88 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" | 60 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" |
| 89 | def package_qa_check_shebang_size(path, name, d, elf, messages): | 61 | def package_qa_check_shebang_size(path, name, d, elf, messages): |
| 90 | import stat | 62 | import stat |
| @@ -106,7 +78,7 @@ def package_qa_check_shebang_size(path, name, d, elf, messages): | |||
| 106 | return | 78 | return |
| 107 | 79 | ||
| 108 | if len(stanza) > 129: | 80 | if len(stanza) > 129: |
| 109 | package_qa_add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d))) | 81 | oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d))) |
| 110 | return | 82 | return |
| 111 | 83 | ||
| 112 | QAPATHTEST[libexec] = "package_qa_check_libexec" | 84 | QAPATHTEST[libexec] = "package_qa_check_libexec" |
| @@ -118,7 +90,7 @@ def package_qa_check_libexec(path,name, d, elf, messages): | |||
| 118 | return True | 90 | return True |
| 119 | 91 | ||
| 120 | if 'libexec' in path.split(os.path.sep): | 92 | if 'libexec' in path.split(os.path.sep): |
| 121 | package_qa_add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec)) | 93 | oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec)) |
| 122 | return False | 94 | return False |
| 123 | 95 | ||
| 124 | return True | 96 | return True |
| @@ -146,7 +118,7 @@ def package_qa_check_rpath(file,name, d, elf, messages): | |||
| 146 | rpath = m.group(1) | 118 | rpath = m.group(1) |
| 147 | for dir in bad_dirs: | 119 | for dir in bad_dirs: |
| 148 | if dir in rpath: | 120 | if dir in rpath: |
| 149 | package_qa_add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file)) | 121 | oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file)) |
| 150 | 122 | ||
| 151 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" | 123 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" |
| 152 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): | 124 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): |
| @@ -176,7 +148,7 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages): | |||
| 176 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): | 148 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): |
| 177 | # The dynamic linker searches both these places anyway. There is no point in | 149 | # The dynamic linker searches both these places anyway. There is no point in |
| 178 | # looking there again. | 150 | # looking there again. |
| 179 | package_qa_add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath)) | 151 | oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath)) |
| 180 | 152 | ||
| 181 | QAPATHTEST[dev-so] = "package_qa_check_dev" | 153 | QAPATHTEST[dev-so] = "package_qa_check_dev" |
| 182 | def package_qa_check_dev(path, name, d, elf, messages): | 154 | def package_qa_check_dev(path, name, d, elf, messages): |
| @@ -185,7 +157,7 @@ def package_qa_check_dev(path, name, d, elf, messages): | |||
| 185 | """ | 157 | """ |
| 186 | 158 | ||
| 187 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): | 159 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): |
| 188 | package_qa_add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ | 160 | oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ |
| 189 | (name, package_qa_clean_path(path, d, name))) | 161 | (name, package_qa_clean_path(path, d, name))) |
| 190 | 162 | ||
| 191 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" | 163 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" |
| @@ -196,7 +168,7 @@ def package_qa_check_dev_elf(path, name, d, elf, messages): | |||
| 196 | install link-time .so files that are linker scripts. | 168 | install link-time .so files that are linker scripts. |
| 197 | """ | 169 | """ |
| 198 | if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf: | 170 | if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf: |
| 199 | package_qa_add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ | 171 | oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ |
| 200 | (name, package_qa_clean_path(path, d, name))) | 172 | (name, package_qa_clean_path(path, d, name))) |
| 201 | 173 | ||
| 202 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" | 174 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" |
| @@ -209,7 +181,7 @@ def package_qa_check_staticdev(path, name, d, elf, messages): | |||
| 209 | """ | 181 | """ |
| 210 | 182 | ||
| 211 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: | 183 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: |
| 212 | package_qa_add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ | 184 | oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ |
| 213 | (name, package_qa_clean_path(path,d, name))) | 185 | (name, package_qa_clean_path(path,d, name))) |
| 214 | 186 | ||
| 215 | QAPATHTEST[mime] = "package_qa_check_mime" | 187 | QAPATHTEST[mime] = "package_qa_check_mime" |
| @@ -220,7 +192,7 @@ def package_qa_check_mime(path, name, d, elf, messages): | |||
| 220 | """ | 192 | """ |
| 221 | 193 | ||
| 222 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): | 194 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): |
| 223 | package_qa_add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ | 195 | oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ |
| 224 | (name, package_qa_clean_path(path,d))) | 196 | (name, package_qa_clean_path(path,d))) |
| 225 | 197 | ||
| 226 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" | 198 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" |
| @@ -247,9 +219,9 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages): | |||
| 247 | if name == d.getVar('PN'): | 219 | if name == d.getVar('PN'): |
| 248 | pkgname = '${PN}' | 220 | pkgname = '${PN}' |
| 249 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) | 221 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) |
| 250 | package_qa_add_message(messages, "mime-xdg", wstr) | 222 | oe.qa.add_message(messages, "mime-xdg", wstr) |
| 251 | if mime_type_found: | 223 | if mime_type_found: |
| 252 | package_qa_add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \ | 224 | oe.qa.add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \ |
| 253 | (name, package_qa_clean_path(path,d))) | 225 | (name, package_qa_clean_path(path,d))) |
| 254 | 226 | ||
| 255 | def package_qa_check_libdir(d): | 227 | def package_qa_check_libdir(d): |
| @@ -313,7 +285,7 @@ def package_qa_check_libdir(d): | |||
| 313 | pass | 285 | pass |
| 314 | 286 | ||
| 315 | if messages: | 287 | if messages: |
| 316 | package_qa_handle_error("libdir", "\n".join(messages), d) | 288 | oe.qa.handle_error("libdir", "\n".join(messages), d) |
| 317 | 289 | ||
| 318 | QAPATHTEST[debug-files] = "package_qa_check_dbg" | 290 | QAPATHTEST[debug-files] = "package_qa_check_dbg" |
| 319 | def package_qa_check_dbg(path, name, d, elf, messages): | 291 | def package_qa_check_dbg(path, name, d, elf, messages): |
| @@ -323,7 +295,7 @@ def package_qa_check_dbg(path, name, d, elf, messages): | |||
| 323 | 295 | ||
| 324 | if not "-dbg" in name and not "-ptest" in name: | 296 | if not "-dbg" in name and not "-ptest" in name: |
| 325 | if '.debug' in path.split(os.path.sep): | 297 | if '.debug' in path.split(os.path.sep): |
| 326 | package_qa_add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \ | 298 | oe.qa.add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \ |
| 327 | (name, package_qa_clean_path(path,d))) | 299 | (name, package_qa_clean_path(path,d))) |
| 328 | 300 | ||
| 329 | QAPATHTEST[arch] = "package_qa_check_arch" | 301 | QAPATHTEST[arch] = "package_qa_check_arch" |
| @@ -343,7 +315,7 @@ def package_qa_check_arch(path,name,d, elf, messages): | |||
| 343 | 315 | ||
| 344 | if target_arch == "allarch": | 316 | if target_arch == "allarch": |
| 345 | pn = d.getVar('PN') | 317 | pn = d.getVar('PN') |
| 346 | package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") | 318 | oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") |
| 347 | return | 319 | return |
| 348 | 320 | ||
| 349 | # FIXME: Cross package confuse this check, so just skip them | 321 | # FIXME: Cross package confuse this check, so just skip them |
| @@ -366,13 +338,13 @@ def package_qa_check_arch(path,name,d, elf, messages): | |||
| 366 | target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))) | 338 | target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))) |
| 367 | is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF") | 339 | is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF") |
| 368 | if not ((machine == elf.machine()) or is_32 or is_bpf): | 340 | if not ((machine == elf.machine()) or is_32 or is_bpf): |
| 369 | package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \ | 341 | oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \ |
| 370 | (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name))) | 342 | (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name))) |
| 371 | elif not ((bits == elf.abiSize()) or is_32 or is_bpf): | 343 | elif not ((bits == elf.abiSize()) or is_32 or is_bpf): |
| 372 | package_qa_add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \ | 344 | oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \ |
| 373 | (elf.abiSize(), bits, package_qa_clean_path(path, d, name))) | 345 | (elf.abiSize(), bits, package_qa_clean_path(path, d, name))) |
| 374 | elif not ((littleendian == elf.isLittleEndian()) or is_bpf): | 346 | elif not ((littleendian == elf.isLittleEndian()) or is_bpf): |
| 375 | package_qa_add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \ | 347 | oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \ |
| 376 | (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name))) | 348 | (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name))) |
| 377 | 349 | ||
| 378 | QAPATHTEST[desktop] = "package_qa_check_desktop" | 350 | QAPATHTEST[desktop] = "package_qa_check_desktop" |
| @@ -385,7 +357,7 @@ def package_qa_check_desktop(path, name, d, elf, messages): | |||
| 385 | output = os.popen("%s %s" % (desktop_file_validate, path)) | 357 | output = os.popen("%s %s" % (desktop_file_validate, path)) |
| 386 | # This only produces output on errors | 358 | # This only produces output on errors |
| 387 | for l in output: | 359 | for l in output: |
| 388 | package_qa_add_message(messages, "desktop", "Desktop file issue: " + l.strip()) | 360 | oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip()) |
| 389 | 361 | ||
| 390 | QAPATHTEST[textrel] = "package_qa_textrel" | 362 | QAPATHTEST[textrel] = "package_qa_textrel" |
| 391 | def package_qa_textrel(path, name, d, elf, messages): | 363 | def package_qa_textrel(path, name, d, elf, messages): |
| @@ -411,7 +383,7 @@ def package_qa_textrel(path, name, d, elf, messages): | |||
| 411 | 383 | ||
| 412 | if not sane: | 384 | if not sane: |
| 413 | path = package_qa_clean_path(path, d, name) | 385 | path = package_qa_clean_path(path, d, name) |
| 414 | package_qa_add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path)) | 386 | oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path)) |
| 415 | 387 | ||
| 416 | QAPATHTEST[ldflags] = "package_qa_hash_style" | 388 | QAPATHTEST[ldflags] = "package_qa_hash_style" |
| 417 | def package_qa_hash_style(path, name, d, elf, messages): | 389 | def package_qa_hash_style(path, name, d, elf, messages): |
| @@ -446,7 +418,7 @@ def package_qa_hash_style(path, name, d, elf, messages): | |||
| 446 | sane = True | 418 | sane = True |
| 447 | if has_syms and not sane: | 419 | if has_syms and not sane: |
| 448 | path = package_qa_clean_path(path, d, name) | 420 | path = package_qa_clean_path(path, d, name) |
| 449 | package_qa_add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name)) | 421 | oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name)) |
| 450 | 422 | ||
| 451 | 423 | ||
| 452 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" | 424 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" |
| @@ -467,7 +439,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages): | |||
| 467 | file_content = f.read() | 439 | file_content = f.read() |
| 468 | if tmpdir in file_content: | 440 | if tmpdir in file_content: |
| 469 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 441 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") |
| 470 | package_qa_add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name)) | 442 | oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name)) |
| 471 | 443 | ||
| 472 | 444 | ||
| 473 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" | 445 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" |
| @@ -486,7 +458,7 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | |||
| 486 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): | 458 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): |
| 487 | if rdep.startswith("%sxorg-abi-" % mlprefix): | 459 | if rdep.startswith("%sxorg-abi-" % mlprefix): |
| 488 | return | 460 | return |
| 489 | package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | 461 | oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) |
| 490 | 462 | ||
| 491 | QAPATHTEST[infodir] = "package_qa_check_infodir" | 463 | QAPATHTEST[infodir] = "package_qa_check_infodir" |
| 492 | def package_qa_check_infodir(path, name, d, elf, messages): | 464 | def package_qa_check_infodir(path, name, d, elf, messages): |
| @@ -496,7 +468,7 @@ def package_qa_check_infodir(path, name, d, elf, messages): | |||
| 496 | infodir = d.expand("${infodir}/dir") | 468 | infodir = d.expand("${infodir}/dir") |
| 497 | 469 | ||
| 498 | if infodir in path: | 470 | if infodir in path: |
| 499 | package_qa_add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.") | 471 | oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.") |
| 500 | 472 | ||
| 501 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" | 473 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" |
| 502 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | 474 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): |
| @@ -509,7 +481,7 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | |||
| 509 | tmpdir = d.getVar('TMPDIR') | 481 | tmpdir = d.getVar('TMPDIR') |
| 510 | if target.startswith(tmpdir): | 482 | if target.startswith(tmpdir): |
| 511 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 483 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") |
| 512 | package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) | 484 | oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) |
| 513 | 485 | ||
| 514 | # Check license variables | 486 | # Check license variables |
| 515 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" | 487 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" |
| @@ -517,7 +489,6 @@ python populate_lic_qa_checksum() { | |||
| 517 | """ | 489 | """ |
| 518 | Check for changes in the license files. | 490 | Check for changes in the license files. |
| 519 | """ | 491 | """ |
| 520 | sane = True | ||
| 521 | 492 | ||
| 522 | lic_files = d.getVar('LIC_FILES_CHKSUM') or '' | 493 | lic_files = d.getVar('LIC_FILES_CHKSUM') or '' |
| 523 | lic = d.getVar('LICENSE') | 494 | lic = d.getVar('LICENSE') |
| @@ -527,7 +498,7 @@ python populate_lic_qa_checksum() { | |||
| 527 | return | 498 | return |
| 528 | 499 | ||
| 529 | if not lic_files and d.getVar('SRC_URI'): | 500 | if not lic_files and d.getVar('SRC_URI'): |
| 530 | sane &= package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) | 501 | oe.qa.handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) |
| 531 | 502 | ||
| 532 | srcdir = d.getVar('S') | 503 | srcdir = d.getVar('S') |
| 533 | corebase_licensefile = d.getVar('COREBASE') + "/LICENSE" | 504 | corebase_licensefile = d.getVar('COREBASE') + "/LICENSE" |
| @@ -535,11 +506,11 @@ python populate_lic_qa_checksum() { | |||
| 535 | try: | 506 | try: |
| 536 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | 507 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) |
| 537 | except bb.fetch.MalformedUrl: | 508 | except bb.fetch.MalformedUrl: |
| 538 | sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d) | 509 | oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d) |
| 539 | continue | 510 | continue |
| 540 | srclicfile = os.path.join(srcdir, path) | 511 | srclicfile = os.path.join(srcdir, path) |
| 541 | if not os.path.isfile(srclicfile): | 512 | if not os.path.isfile(srclicfile): |
| 542 | sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d) | 513 | oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d) |
| 543 | continue | 514 | continue |
| 544 | 515 | ||
| 545 | if (srclicfile == corebase_licensefile): | 516 | if (srclicfile == corebase_licensefile): |
| @@ -607,10 +578,9 @@ python populate_lic_qa_checksum() { | |||
| 607 | else: | 578 | else: |
| 608 | msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url | 579 | msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url |
| 609 | msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum | 580 | msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum |
| 610 | sane &= package_qa_handle_error("license-checksum", msg, d) | 581 | oe.qa.handle_error("license-checksum", msg, d) |
| 611 | 582 | ||
| 612 | if not sane: | 583 | oe.qa.exit_if_errors(d) |
| 613 | bb.fatal("Fatal QA errors found, failing task.") | ||
| 614 | } | 584 | } |
| 615 | 585 | ||
| 616 | def qa_check_staged(path,d): | 586 | def qa_check_staged(path,d): |
| @@ -622,7 +592,6 @@ def qa_check_staged(path,d): | |||
| 622 | responsible for the errors easily even if we look at every .pc and .la file. | 592 | responsible for the errors easily even if we look at every .pc and .la file. |
| 623 | """ | 593 | """ |
| 624 | 594 | ||
| 625 | sane = True | ||
| 626 | tmpdir = d.getVar('TMPDIR') | 595 | tmpdir = d.getVar('TMPDIR') |
| 627 | workdir = os.path.join(tmpdir, "work") | 596 | workdir = os.path.join(tmpdir, "work") |
| 628 | recipesysroot = d.getVar("RECIPE_SYSROOT") | 597 | recipesysroot = d.getVar("RECIPE_SYSROOT") |
| @@ -655,16 +624,14 @@ def qa_check_staged(path,d): | |||
| 655 | file_content = file_content.replace(recipesysroot, "") | 624 | file_content = file_content.replace(recipesysroot, "") |
| 656 | if workdir in file_content: | 625 | if workdir in file_content: |
| 657 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) | 626 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) |
| 658 | sane &= package_qa_handle_error("la", error_msg, d) | 627 | oe.qa.handle_error("la", error_msg, d) |
| 659 | elif file.endswith(".pc") and not skip_pkgconfig: | 628 | elif file.endswith(".pc") and not skip_pkgconfig: |
| 660 | with open(path) as f: | 629 | with open(path) as f: |
| 661 | file_content = f.read() | 630 | file_content = f.read() |
| 662 | file_content = file_content.replace(recipesysroot, "") | 631 | file_content = file_content.replace(recipesysroot, "") |
| 663 | if pkgconfigcheck in file_content: | 632 | if pkgconfigcheck in file_content: |
| 664 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) | 633 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) |
| 665 | sane &= package_qa_handle_error("pkgconfig", error_msg, d) | 634 | oe.qa.handle_error("pkgconfig", error_msg, d) |
| 666 | |||
| 667 | return sane | ||
| 668 | 635 | ||
| 669 | # Run all package-wide warnfuncs and errorfuncs | 636 | # Run all package-wide warnfuncs and errorfuncs |
| 670 | def package_qa_package(warnfuncs, errorfuncs, package, d): | 637 | def package_qa_package(warnfuncs, errorfuncs, package, d): |
| @@ -677,9 +644,9 @@ def package_qa_package(warnfuncs, errorfuncs, package, d): | |||
| 677 | func(package, d, errors) | 644 | func(package, d, errors) |
| 678 | 645 | ||
| 679 | for w in warnings: | 646 | for w in warnings: |
| 680 | package_qa_handle_error(w, warnings[w], d) | 647 | oe.qa.handle_error(w, warnings[w], d) |
| 681 | for e in errors: | 648 | for e in errors: |
| 682 | package_qa_handle_error(e, errors[e], d) | 649 | oe.qa.handle_error(e, errors[e], d) |
| 683 | 650 | ||
| 684 | return len(errors) == 0 | 651 | return len(errors) == 0 |
| 685 | 652 | ||
| @@ -694,9 +661,9 @@ def package_qa_recipe(warnfuncs, errorfuncs, pn, d): | |||
| 694 | func(pn, d, errors) | 661 | func(pn, d, errors) |
| 695 | 662 | ||
| 696 | for w in warnings: | 663 | for w in warnings: |
| 697 | package_qa_handle_error(w, warnings[w], d) | 664 | oe.qa.handle_error(w, warnings[w], d) |
| 698 | for e in errors: | 665 | for e in errors: |
| 699 | package_qa_handle_error(e, errors[e], d) | 666 | oe.qa.handle_error(e, errors[e], d) |
| 700 | 667 | ||
| 701 | return len(errors) == 0 | 668 | return len(errors) == 0 |
| 702 | 669 | ||
| @@ -722,9 +689,9 @@ def package_qa_walk(warnfuncs, errorfuncs, package, d): | |||
| 722 | func(path, package, d, elf, errors) | 689 | func(path, package, d, elf, errors) |
| 723 | 690 | ||
| 724 | for w in warnings: | 691 | for w in warnings: |
| 725 | package_qa_handle_error(w, warnings[w], d) | 692 | oe.qa.handle_error(w, warnings[w], d) |
| 726 | for e in errors: | 693 | for e in errors: |
| 727 | package_qa_handle_error(e, errors[e], d) | 694 | oe.qa.handle_error(e, errors[e], d) |
| 728 | 695 | ||
| 729 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | 696 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): |
| 730 | # Don't do this check for kernel/module recipes, there aren't too many debug/development | 697 | # Don't do this check for kernel/module recipes, there aren't too many debug/development |
| @@ -744,10 +711,10 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
| 744 | for rdepend in rdepends: | 711 | for rdepend in rdepends: |
| 745 | if "-dbg" in rdepend and "debug-deps" not in skip: | 712 | if "-dbg" in rdepend and "debug-deps" not in skip: |
| 746 | error_msg = "%s rdepends on %s" % (pkg,rdepend) | 713 | error_msg = "%s rdepends on %s" % (pkg,rdepend) |
| 747 | package_qa_handle_error("debug-deps", error_msg, d) | 714 | oe.qa.handle_error("debug-deps", error_msg, d) |
| 748 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: | 715 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: |
| 749 | error_msg = "%s rdepends on %s" % (pkg, rdepend) | 716 | error_msg = "%s rdepends on %s" % (pkg, rdepend) |
| 750 | package_qa_handle_error("dev-deps", error_msg, d) | 717 | oe.qa.handle_error("dev-deps", error_msg, d) |
| 751 | if rdepend not in packages: | 718 | if rdepend not in packages: |
| 752 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) | 719 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) |
| 753 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | 720 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: |
| @@ -768,7 +735,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
| 768 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN']) | 735 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN']) |
| 769 | else: | 736 | else: |
| 770 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) | 737 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) |
| 771 | package_qa_handle_error("build-deps", error_msg, d) | 738 | oe.qa.handle_error("build-deps", error_msg, d) |
| 772 | 739 | ||
| 773 | if "file-rdeps" not in skip: | 740 | if "file-rdeps" not in skip: |
| 774 | ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)']) | 741 | ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)']) |
| @@ -821,7 +788,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
| 821 | for key in filerdepends: | 788 | for key in filerdepends: |
| 822 | error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \ | 789 | error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \ |
| 823 | (filerdepends[key].replace(":%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) | 790 | (filerdepends[key].replace(":%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) |
| 824 | package_qa_handle_error("file-rdeps", error_msg, d) | 791 | oe.qa.handle_error("file-rdeps", error_msg, d) |
| 825 | package_qa_check_rdepends[vardepsexclude] = "OVERRIDES" | 792 | package_qa_check_rdepends[vardepsexclude] = "OVERRIDES" |
| 826 | 793 | ||
| 827 | def package_qa_check_deps(pkg, pkgdest, d): | 794 | def package_qa_check_deps(pkg, pkgdest, d): |
| @@ -838,7 +805,7 @@ def package_qa_check_deps(pkg, pkgdest, d): | |||
| 838 | for v in rvar[dep]: | 805 | for v in rvar[dep]: |
| 839 | if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')): | 806 | if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')): |
| 840 | error_msg = "%s:%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v) | 807 | error_msg = "%s:%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v) |
| 841 | package_qa_handle_error("dep-cmp", error_msg, d) | 808 | oe.qa.handle_error("dep-cmp", error_msg, d) |
| 842 | 809 | ||
| 843 | check_valid_deps('RDEPENDS') | 810 | check_valid_deps('RDEPENDS') |
| 844 | check_valid_deps('RRECOMMENDS') | 811 | check_valid_deps('RRECOMMENDS') |
| @@ -849,13 +816,14 @@ def package_qa_check_deps(pkg, pkgdest, d): | |||
| 849 | 816 | ||
| 850 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" | 817 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" |
| 851 | def package_qa_check_usrmerge(pkg, d, messages): | 818 | def package_qa_check_usrmerge(pkg, d, messages): |
| 819 | |||
| 852 | pkgdest = d.getVar('PKGDEST') | 820 | pkgdest = d.getVar('PKGDEST') |
| 853 | pkg_dir = pkgdest + os.sep + pkg + os.sep | 821 | pkg_dir = pkgdest + os.sep + pkg + os.sep |
| 854 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() | 822 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() |
| 855 | for f in merged_dirs: | 823 | for f in merged_dirs: |
| 856 | if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f): | 824 | if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f): |
| 857 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) | 825 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) |
| 858 | package_qa_add_message(messages, "usrmerge", msg) | 826 | oe.qa.add_message(messages, "usrmerge", msg) |
| 859 | return False | 827 | return False |
| 860 | return True | 828 | return True |
| 861 | 829 | ||
| @@ -874,7 +842,7 @@ def package_qa_check_perllocalpod(pkg, d, messages): | |||
| 874 | if matches: | 842 | if matches: |
| 875 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] | 843 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] |
| 876 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) | 844 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) |
| 877 | package_qa_add_message(messages, "perllocalpod", msg) | 845 | oe.qa.add_message(messages, "perllocalpod", msg) |
| 878 | 846 | ||
| 879 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" | 847 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" |
| 880 | def package_qa_check_expanded_d(package, d, messages): | 848 | def package_qa_check_expanded_d(package, d, messages): |
| @@ -889,10 +857,10 @@ def package_qa_check_expanded_d(package, d, messages): | |||
| 889 | bbvar = d.getVar(var + ":" + package) or "" | 857 | bbvar = d.getVar(var + ":" + package) or "" |
| 890 | if expanded_d in bbvar: | 858 | if expanded_d in bbvar: |
| 891 | if var == 'FILES': | 859 | if var == 'FILES': |
| 892 | package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) | 860 | oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) |
| 893 | sane = False | 861 | sane = False |
| 894 | else: | 862 | else: |
| 895 | package_qa_add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package)) | 863 | oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package)) |
| 896 | sane = False | 864 | sane = False |
| 897 | return sane | 865 | return sane |
| 898 | 866 | ||
| @@ -910,7 +878,7 @@ def package_qa_check_unlisted_pkg_lics(package, d, messages): | |||
| 910 | if not unlisted: | 878 | if not unlisted: |
| 911 | return True | 879 | return True |
| 912 | 880 | ||
| 913 | package_qa_add_message(messages, "unlisted-pkg-lics", | 881 | oe.qa.add_message(messages, "unlisted-pkg-lics", |
| 914 | "LICENSE:%s includes licenses (%s) that are not " | 882 | "LICENSE:%s includes licenses (%s) that are not " |
| 915 | "listed in LICENSE" % (package, ' '.join(unlisted))) | 883 | "listed in LICENSE" % (package, ' '.join(unlisted))) |
| 916 | return False | 884 | return False |
| @@ -925,7 +893,7 @@ def package_qa_check_encoding(keys, encode, d): | |||
| 925 | except UnicodeDecodeError as e: | 893 | except UnicodeDecodeError as e: |
| 926 | error_msg = "%s has non %s characters" % (key,enc) | 894 | error_msg = "%s has non %s characters" % (key,enc) |
| 927 | sane = False | 895 | sane = False |
| 928 | package_qa_handle_error("invalid-chars", error_msg, d) | 896 | oe.qa.handle_error("invalid-chars", error_msg, d) |
| 929 | return sane | 897 | return sane |
| 930 | 898 | ||
| 931 | for key in keys: | 899 | for key in keys: |
| @@ -958,12 +926,12 @@ def package_qa_check_host_user(path, name, d, elf, messages): | |||
| 958 | else: | 926 | else: |
| 959 | check_uid = int(d.getVar('HOST_USER_UID')) | 927 | check_uid = int(d.getVar('HOST_USER_UID')) |
| 960 | if stat.st_uid == check_uid: | 928 | if stat.st_uid == check_uid: |
| 961 | package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) | 929 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) |
| 962 | return False | 930 | return False |
| 963 | 931 | ||
| 964 | check_gid = int(d.getVar('HOST_USER_GID')) | 932 | check_gid = int(d.getVar('HOST_USER_GID')) |
| 965 | if stat.st_gid == check_gid: | 933 | if stat.st_gid == check_gid: |
| 966 | package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) | 934 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) |
| 967 | return False | 935 | return False |
| 968 | return True | 936 | return True |
| 969 | 937 | ||
| @@ -972,11 +940,11 @@ def package_qa_check_src_uri(pn, d, messages): | |||
| 972 | import re | 940 | import re |
| 973 | 941 | ||
| 974 | if "${PN}" in d.getVar("SRC_URI", False): | 942 | if "${PN}" in d.getVar("SRC_URI", False): |
| 975 | package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d) | 943 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d) |
| 976 | 944 | ||
| 977 | for url in d.getVar("SRC_URI").split(): | 945 | for url in d.getVar("SRC_URI").split(): |
| 978 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url): | 946 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url): |
| 979 | package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) | 947 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) |
| 980 | 948 | ||
| 981 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" | 949 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" |
| 982 | def package_qa_check_unhandled_features_check(pn, d, messages): | 950 | def package_qa_check_unhandled_features_check(pn, d, messages): |
| @@ -987,7 +955,7 @@ def package_qa_check_unhandled_features_check(pn, d, messages): | |||
| 987 | if d.getVar(var) is not None or d.overridedata.get(var) is not None: | 955 | if d.getVar(var) is not None or d.overridedata.get(var) is not None: |
| 988 | var_set = True | 956 | var_set = True |
| 989 | if var_set: | 957 | if var_set: |
| 990 | package_qa_handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) | 958 | oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) |
| 991 | 959 | ||
| 992 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" | 960 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" |
| 993 | def package_qa_check_missing_update_alternatives(pn, d, messages): | 961 | def package_qa_check_missing_update_alternatives(pn, d, messages): |
| @@ -995,7 +963,7 @@ def package_qa_check_missing_update_alternatives(pn, d, messages): | |||
| 995 | # without inheriting update-alternatives class | 963 | # without inheriting update-alternatives class |
| 996 | for pkg in (d.getVar('PACKAGES') or '').split(): | 964 | for pkg in (d.getVar('PACKAGES') or '').split(): |
| 997 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): | 965 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): |
| 998 | package_qa_handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) | 966 | oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) |
| 999 | 967 | ||
| 1000 | # The PACKAGE FUNC to scan each package | 968 | # The PACKAGE FUNC to scan each package |
| 1001 | python do_package_qa () { | 969 | python do_package_qa () { |
| @@ -1071,7 +1039,7 @@ python do_package_qa () { | |||
| 1071 | bb.note("Checking Package: %s" % package) | 1039 | bb.note("Checking Package: %s" % package) |
| 1072 | # Check package name | 1040 | # Check package name |
| 1073 | if not pkgname_pattern.match(package): | 1041 | if not pkgname_pattern.match(package): |
| 1074 | package_qa_handle_error("pkgname", | 1042 | oe.qa.handle_error("pkgname", |
| 1075 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) | 1043 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) |
| 1076 | 1044 | ||
| 1077 | warn_checks, error_checks = parse_test_matrix("QAPATHTEST") | 1045 | warn_checks, error_checks = parse_test_matrix("QAPATHTEST") |
| @@ -1089,10 +1057,7 @@ python do_package_qa () { | |||
| 1089 | if 'libdir' in d.getVar("ALL_QA").split(): | 1057 | if 'libdir' in d.getVar("ALL_QA").split(): |
| 1090 | package_qa_check_libdir(d) | 1058 | package_qa_check_libdir(d) |
| 1091 | 1059 | ||
| 1092 | qa_sane = d.getVar("QA_SANE") | 1060 | oe.qa.exit_if_errors(d) |
| 1093 | if not qa_sane: | ||
| 1094 | bb.fatal("QA run found fatal errors. Please consider fixing them.") | ||
| 1095 | bb.note("DONE with PACKAGE QA") | ||
| 1096 | } | 1061 | } |
| 1097 | 1062 | ||
| 1098 | # binutils is used for most checks, so need to set as dependency | 1063 | # binutils is used for most checks, so need to set as dependency |
| @@ -1119,8 +1084,8 @@ addtask do_package_qa_setscene | |||
| 1119 | 1084 | ||
| 1120 | python do_qa_staging() { | 1085 | python do_qa_staging() { |
| 1121 | bb.note("QA checking staging") | 1086 | bb.note("QA checking staging") |
| 1122 | if not qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d): | 1087 | qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d) |
| 1123 | bb.fatal("QA staging was broken by the package built above") | 1088 | oe.qa.exit_with_message_if_errors("QA staging was broken by the package built above", d) |
| 1124 | } | 1089 | } |
| 1125 | 1090 | ||
| 1126 | python do_qa_patch() { | 1091 | python do_qa_patch() { |
| @@ -1168,7 +1133,7 @@ python do_qa_patch() { | |||
| 1168 | elif 'patch-fuzz' in d.getVar('WARN_QA'): | 1133 | elif 'patch-fuzz' in d.getVar('WARN_QA'): |
| 1169 | bb.warn(msg) | 1134 | bb.warn(msg) |
| 1170 | msg = "Patch log indicates that patches do not apply cleanly." | 1135 | msg = "Patch log indicates that patches do not apply cleanly." |
| 1171 | package_qa_handle_error("patch-fuzz", msg, d) | 1136 | oe.qa.handle_error("patch-fuzz", msg, d) |
| 1172 | } | 1137 | } |
| 1173 | 1138 | ||
| 1174 | python do_qa_configure() { | 1139 | python do_qa_configure() { |
| @@ -1196,7 +1161,7 @@ python do_qa_configure() { | |||
| 1196 | if subprocess.call(statement, shell=True) == 0: | 1161 | if subprocess.call(statement, shell=True) == 0: |
| 1197 | error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities. | 1162 | error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities. |
| 1198 | Rerun configure task after fixing this.""" | 1163 | Rerun configure task after fixing this.""" |
| 1199 | package_qa_handle_error("configure-unsafe", error_msg, d) | 1164 | oe.qa.handle_error("configure-unsafe", error_msg, d) |
| 1200 | 1165 | ||
| 1201 | if "configure.ac" in files: | 1166 | if "configure.ac" in files: |
| 1202 | configs.append(os.path.join(root,"configure.ac")) | 1167 | configs.append(os.path.join(root,"configure.ac")) |
| @@ -1226,7 +1191,7 @@ Rerun configure task after fixing this.""" | |||
| 1226 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config | 1191 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config |
| 1227 | if subprocess.call(gnu, shell=True) == 0: | 1192 | if subprocess.call(gnu, shell=True) == 0: |
| 1228 | error_msg = "AM_GNU_GETTEXT used but no inherit gettext" | 1193 | error_msg = "AM_GNU_GETTEXT used but no inherit gettext" |
| 1229 | package_qa_handle_error("configure-gettext", error_msg, d) | 1194 | oe.qa.handle_error("configure-gettext", error_msg, d) |
| 1230 | 1195 | ||
| 1231 | ########################################################################### | 1196 | ########################################################################### |
| 1232 | # Check unrecognised configure options (with a white list) | 1197 | # Check unrecognised configure options (with a white list) |
| @@ -1249,7 +1214,7 @@ Rerun configure task after fixing this.""" | |||
| 1249 | if options: | 1214 | if options: |
| 1250 | pn = d.getVar('PN') | 1215 | pn = d.getVar('PN') |
| 1251 | error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) | 1216 | error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) |
| 1252 | package_qa_handle_error("unknown-configure-option", error_msg, d) | 1217 | oe.qa.handle_error("unknown-configure-option", error_msg, d) |
| 1253 | except subprocess.CalledProcessError: | 1218 | except subprocess.CalledProcessError: |
| 1254 | pass | 1219 | pass |
| 1255 | 1220 | ||
| @@ -1261,11 +1226,9 @@ Rerun configure task after fixing this.""" | |||
| 1261 | if pconfig not in pkgconfigflags: | 1226 | if pconfig not in pkgconfigflags: |
| 1262 | pn = d.getVar('PN') | 1227 | pn = d.getVar('PN') |
| 1263 | error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) | 1228 | error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) |
| 1264 | package_qa_handle_error("invalid-packageconfig", error_msg, d) | 1229 | oe.qa.handle_error("invalid-packageconfig", error_msg, d) |
| 1265 | 1230 | ||
| 1266 | qa_sane = d.getVar("QA_SANE") | 1231 | oe.qa.exit_if_errors(d) |
| 1267 | if not qa_sane: | ||
| 1268 | bb.fatal("Fatal QA errors found, failing task.") | ||
| 1269 | } | 1232 | } |
| 1270 | 1233 | ||
| 1271 | python do_qa_unpack() { | 1234 | python do_qa_unpack() { |
| @@ -1318,15 +1281,15 @@ python () { | |||
| 1318 | pn = d.getVar('PN') | 1281 | pn = d.getVar('PN') |
| 1319 | if pn in overrides: | 1282 | if pn in overrides: |
| 1320 | msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn) | 1283 | msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn) |
| 1321 | package_qa_handle_error("pn-overrides", msg, d) | 1284 | oe.qa.handle_error("pn-overrides", msg, d) |
| 1322 | prog = re.compile(r'[A-Z]') | 1285 | prog = re.compile(r'[A-Z]') |
| 1323 | if prog.search(pn): | 1286 | if prog.search(pn): |
| 1324 | package_qa_handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) | 1287 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) |
| 1325 | 1288 | ||
| 1326 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder | 1289 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder |
| 1327 | # why it doesn't work. | 1290 | # why it doesn't work. |
| 1328 | if (d.getVar(d.expand('DEPENDS:${PN}'))): | 1291 | if (d.getVar(d.expand('DEPENDS:${PN}'))): |
| 1329 | package_qa_handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) | 1292 | oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) |
| 1330 | 1293 | ||
| 1331 | issues = [] | 1294 | issues = [] |
| 1332 | if (d.getVar('PACKAGES') or "").split(): | 1295 | if (d.getVar('PACKAGES') or "").split(): |
| @@ -1343,7 +1306,7 @@ python () { | |||
| 1343 | else: | 1306 | else: |
| 1344 | d.setVarFlag('do_package_qa', 'rdeptask', '') | 1307 | d.setVarFlag('do_package_qa', 'rdeptask', '') |
| 1345 | for i in issues: | 1308 | for i in issues: |
| 1346 | package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d) | 1309 | oe.qa.handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d) |
| 1347 | 1310 | ||
| 1348 | if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split(): | 1311 | if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split(): |
| 1349 | for native_class in ['native', 'nativesdk']: | 1312 | for native_class in ['native', 'nativesdk']: |
| @@ -1371,11 +1334,8 @@ python () { | |||
| 1371 | else: | 1334 | else: |
| 1372 | break | 1335 | break |
| 1373 | if broken_order: | 1336 | if broken_order: |
| 1374 | package_qa_handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. " | 1337 | oe.qa.handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. " |
| 1375 | "Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d) | 1338 | "Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d) |
| 1376 | 1339 | ||
| 1377 | 1340 | oe.qa.exit_if_errors(d) | |
| 1378 | qa_sane = d.getVar("QA_SANE") | ||
| 1379 | if not qa_sane: | ||
| 1380 | bb.fatal("Fatal QA errors found, failing task.") | ||
| 1381 | } | 1341 | } |
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass index b210c49c0c..73ad2ab7b3 100644 --- a/meta/classes/multilib.bbclass +++ b/meta/classes/multilib.bbclass | |||
| @@ -210,7 +210,7 @@ python do_package_qa_multilib() { | |||
| 210 | if len(candidates) > 0: | 210 | if len(candidates) > 0: |
| 211 | msg = "%s package %s - suspicious values '%s' in %s" \ | 211 | msg = "%s package %s - suspicious values '%s' in %s" \ |
| 212 | % (d.getVar('PN'), pkg, ' '.join(candidates), var) | 212 | % (d.getVar('PN'), pkg, ' '.join(candidates), var) |
| 213 | package_qa_handle_error("multilib", msg, d) | 213 | oe.qa.handle_error("multilib", msg, d) |
| 214 | 214 | ||
| 215 | ml = d.getVar('MLPREFIX') | 215 | ml = d.getVar('MLPREFIX') |
| 216 | if not ml: | 216 | if not ml: |
| @@ -228,4 +228,5 @@ python do_package_qa_multilib() { | |||
| 228 | check_mlprefix(pkg, 'RSUGGESTS', ml) | 228 | check_mlprefix(pkg, 'RSUGGESTS', ml) |
| 229 | check_mlprefix(pkg, 'RREPLACES', ml) | 229 | check_mlprefix(pkg, 'RREPLACES', ml) |
| 230 | check_mlprefix(pkg, 'RCONFLICTS', ml) | 230 | check_mlprefix(pkg, 'RCONFLICTS', ml) |
| 231 | oe.qa.exit_if_errors(d) | ||
| 231 | } | 232 | } |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 985dfacd09..92eba98892 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
| @@ -41,8 +41,6 @@ | |||
| 41 | inherit packagedata | 41 | inherit packagedata |
| 42 | inherit chrpath | 42 | inherit chrpath |
| 43 | inherit package_pkgdata | 43 | inherit package_pkgdata |
| 44 | |||
| 45 | # Need the package_qa_handle_error() in insane.bbclass | ||
| 46 | inherit insane | 44 | inherit insane |
| 47 | 45 | ||
| 48 | PKGD = "${WORKDIR}/package" | 46 | PKGD = "${WORKDIR}/package" |
| @@ -865,7 +863,7 @@ python fixup_perms () { | |||
| 865 | self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) | 863 | self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) |
| 866 | else: | 864 | else: |
| 867 | msg = "Fixup Perms: invalid config line %s" % line | 865 | msg = "Fixup Perms: invalid config line %s" % line |
| 868 | package_qa_handle_error("perm-config", msg, d) | 866 | oe.qa.handle_error("perm-config", msg, d) |
| 869 | self.path = None | 867 | self.path = None |
| 870 | self.link = None | 868 | self.link = None |
| 871 | 869 | ||
| @@ -1005,7 +1003,7 @@ python fixup_perms () { | |||
| 1005 | continue | 1003 | continue |
| 1006 | if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): | 1004 | if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): |
| 1007 | msg = "Fixup perms: %s invalid line: %s" % (conf, line) | 1005 | msg = "Fixup perms: %s invalid line: %s" % (conf, line) |
| 1008 | package_qa_handle_error("perm-line", msg, d) | 1006 | oe.qa.handle_error("perm-line", msg, d) |
| 1009 | continue | 1007 | continue |
| 1010 | entry = fs_perms_entry(d.expand(line)) | 1008 | entry = fs_perms_entry(d.expand(line)) |
| 1011 | if entry and entry.path: | 1009 | if entry and entry.path: |
| @@ -1042,7 +1040,7 @@ python fixup_perms () { | |||
| 1042 | ptarget = os.path.join(os.path.dirname(dir), link) | 1040 | ptarget = os.path.join(os.path.dirname(dir), link) |
| 1043 | if os.path.exists(target): | 1041 | if os.path.exists(target): |
| 1044 | msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget) | 1042 | msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget) |
| 1045 | package_qa_handle_error("perm-link", msg, d) | 1043 | oe.qa.handle_error("perm-link", msg, d) |
| 1046 | continue | 1044 | continue |
| 1047 | 1045 | ||
| 1048 | # Create path to move directory to, move it, and then setup the symlink | 1046 | # Create path to move directory to, move it, and then setup the symlink |
| @@ -1202,7 +1200,7 @@ python split_and_strip_files () { | |||
| 1202 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | 1200 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) |
| 1203 | else: | 1201 | else: |
| 1204 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | 1202 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) |
| 1205 | package_qa_handle_error("already-stripped", msg, d) | 1203 | oe.qa.handle_error("already-stripped", msg, d) |
| 1206 | continue | 1204 | continue |
| 1207 | 1205 | ||
| 1208 | # At this point we have an unstripped elf file. We need to: | 1206 | # At this point we have an unstripped elf file. We need to: |
| @@ -1362,7 +1360,7 @@ python populate_packages () { | |||
| 1362 | for i, pkg in enumerate(packages): | 1360 | for i, pkg in enumerate(packages): |
| 1363 | if pkg in package_dict: | 1361 | if pkg in package_dict: |
| 1364 | msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg | 1362 | msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg |
| 1365 | package_qa_handle_error("packages-list", msg, d) | 1363 | oe.qa.handle_error("packages-list", msg, d) |
| 1366 | # Ensure the source package gets the chance to pick up the source files | 1364 | # Ensure the source package gets the chance to pick up the source files |
| 1367 | # before the debug package by ordering it first in PACKAGES. Whether it | 1365 | # before the debug package by ordering it first in PACKAGES. Whether it |
| 1368 | # actually picks up any source files is controlled by | 1366 | # actually picks up any source files is controlled by |
| @@ -1399,7 +1397,7 @@ python populate_packages () { | |||
| 1399 | filesvar = d.getVar('FILES:%s' % pkg) or "" | 1397 | filesvar = d.getVar('FILES:%s' % pkg) or "" |
| 1400 | if "//" in filesvar: | 1398 | if "//" in filesvar: |
| 1401 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | 1399 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg |
| 1402 | package_qa_handle_error("files-invalid", msg, d) | 1400 | oe.qa.handle_error("files-invalid", msg, d) |
| 1403 | filesvar.replace("//", "/") | 1401 | filesvar.replace("//", "/") |
| 1404 | 1402 | ||
| 1405 | origfiles = filesvar.split() | 1403 | origfiles = filesvar.split() |
| @@ -1468,7 +1466,7 @@ python populate_packages () { | |||
| 1468 | licenses = d.getVar('LICENSE_EXCLUSION-' + pkg) | 1466 | licenses = d.getVar('LICENSE_EXCLUSION-' + pkg) |
| 1469 | if licenses: | 1467 | if licenses: |
| 1470 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) | 1468 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) |
| 1471 | package_qa_handle_error("incompatible-license", msg, d) | 1469 | oe.qa.handle_error("incompatible-license", msg, d) |
| 1472 | else: | 1470 | else: |
| 1473 | package_list.append(pkg) | 1471 | package_list.append(pkg) |
| 1474 | d.setVar('PACKAGES', ' '.join(package_list)) | 1472 | d.setVar('PACKAGES', ' '.join(package_list)) |
| @@ -1492,7 +1490,7 @@ python populate_packages () { | |||
| 1492 | msg = msg + "\n " + f | 1490 | msg = msg + "\n " + f |
| 1493 | msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" | 1491 | msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" |
| 1494 | msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) | 1492 | msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) |
| 1495 | package_qa_handle_error("installed-vs-shipped", msg, d) | 1493 | oe.qa.handle_error("installed-vs-shipped", msg, d) |
| 1496 | } | 1494 | } |
| 1497 | populate_packages[dirs] = "${D}" | 1495 | populate_packages[dirs] = "${D}" |
| 1498 | 1496 | ||
| @@ -1838,7 +1836,7 @@ python package_do_shlibs() { | |||
| 1838 | ver = d.getVar('PKGV') | 1836 | ver = d.getVar('PKGV') |
| 1839 | if not ver: | 1837 | if not ver: |
| 1840 | msg = "PKGV not defined" | 1838 | msg = "PKGV not defined" |
| 1841 | package_qa_handle_error("pkgv-undefined", msg, d) | 1839 | oe.qa.handle_error("pkgv-undefined", msg, d) |
| 1842 | return | 1840 | return |
| 1843 | 1841 | ||
| 1844 | pkgdest = d.getVar('PKGDEST') | 1842 | pkgdest = d.getVar('PKGDEST') |
| @@ -2402,7 +2400,7 @@ python do_package () { | |||
| 2402 | 2400 | ||
| 2403 | if not workdir or not outdir or not dest or not dvar or not pn: | 2401 | if not workdir or not outdir or not dest or not dvar or not pn: |
| 2404 | msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" | 2402 | msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" |
| 2405 | package_qa_handle_error("var-undefined", msg, d) | 2403 | oe.qa.handle_error("var-undefined", msg, d) |
| 2406 | return | 2404 | return |
| 2407 | 2405 | ||
| 2408 | bb.build.exec_func("package_convert_pr_autoinc", d) | 2406 | bb.build.exec_func("package_convert_pr_autoinc", d) |
| @@ -2455,9 +2453,7 @@ python do_package () { | |||
| 2455 | for f in (d.getVar('PACKAGEFUNCS') or '').split(): | 2453 | for f in (d.getVar('PACKAGEFUNCS') or '').split(): |
| 2456 | bb.build.exec_func(f, d) | 2454 | bb.build.exec_func(f, d) |
| 2457 | 2455 | ||
| 2458 | qa_sane = d.getVar("QA_SANE") | 2456 | oe.qa.exit_if_errors(d) |
| 2459 | if not qa_sane: | ||
| 2460 | bb.fatal("Fatal QA errors found, failing task.") | ||
| 2461 | } | 2457 | } |
| 2462 | 2458 | ||
| 2463 | do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}" | 2459 | do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}" |
diff --git a/meta/classes/ptest.bbclass b/meta/classes/ptest.bbclass index 77614ae860..1ec23c0923 100644 --- a/meta/classes/ptest.bbclass +++ b/meta/classes/ptest.bbclass | |||
| @@ -129,4 +129,4 @@ def package_qa_check_missing_ptest(pn, d, messages): | |||
| 129 | 129 | ||
| 130 | enabled_ptests = " ".join([d.getVar('PTESTS_FAST'), d.getVar('PTESTS_SLOW'), d.getVar('PTESTS_PROBLEMS')]).split() | 130 | enabled_ptests = " ".join([d.getVar('PTESTS_FAST'), d.getVar('PTESTS_SLOW'), d.getVar('PTESTS_PROBLEMS')]).split() |
| 131 | if (pn + "-ptest").replace(d.getVar('MLPREFIX'), '') not in enabled_ptests: | 131 | if (pn + "-ptest").replace(d.getVar('MLPREFIX'), '') not in enabled_ptests: |
| 132 | package_qa_handle_error("missing-ptest", "supports ptests but is not included in oe-core's ptest-packagelists.inc", d) | 132 | oe.qa.handle_error("missing-ptest", "supports ptests but is not included in oe-core's ptest-packagelists.inc", d) |
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py index e8a854a302..efab7e8564 100644 --- a/meta/lib/oe/qa.py +++ b/meta/lib/oe/qa.py | |||
| @@ -171,6 +171,40 @@ def elf_machine_to_string(machine): | |||
| 171 | except: | 171 | except: |
| 172 | return "Unknown (%s)" % repr(machine) | 172 | return "Unknown (%s)" % repr(machine) |
| 173 | 173 | ||
| 174 | def write_error(type, error, d): | ||
| 175 | logfile = d.getVar('QA_LOGFILE') | ||
| 176 | if logfile: | ||
| 177 | p = d.getVar('P') | ||
| 178 | with open(logfile, "a+") as f: | ||
| 179 | f.write("%s: %s [%s]\n" % (p, error, type)) | ||
| 180 | |||
| 181 | def handle_error(error_class, error_msg, d): | ||
| 182 | if error_class in (d.getVar("ERROR_QA") or "").split(): | ||
| 183 | write_error(error_class, error_msg, d) | ||
| 184 | bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 185 | d.setVar("QA_ERRORS_FOUND", "True") | ||
| 186 | return False | ||
| 187 | elif error_class in (d.getVar("WARN_QA") or "").split(): | ||
| 188 | write_error(error_class, error_msg, d) | ||
| 189 | bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 190 | else: | ||
| 191 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 192 | return True | ||
| 193 | |||
| 194 | def add_message(messages, section, new_msg): | ||
| 195 | if section not in messages: | ||
| 196 | messages[section] = new_msg | ||
| 197 | else: | ||
| 198 | messages[section] = messages[section] + "\n" + new_msg | ||
| 199 | |||
| 200 | def exit_with_message_if_errors(message, d): | ||
| 201 | qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False) | ||
| 202 | if qa_fatal_errors: | ||
| 203 | bb.fatal(message) | ||
| 204 | |||
| 205 | def exit_if_errors(d): | ||
| 206 | exit_with_message_if_errors("Fatal QA errors were found, failing task.", d) | ||
| 207 | |||
| 174 | if __name__ == "__main__": | 208 | if __name__ == "__main__": |
| 175 | import sys | 209 | import sys |
| 176 | 210 | ||
