summaryrefslogtreecommitdiffstats
path: root/meta/classes/insane.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/insane.bbclass')
-rw-r--r--meta/classes/insane.bbclass862
1 files changed, 398 insertions, 464 deletions
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 2f3f768d58..0564f9c2a4 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -16,13 +16,8 @@
# into exec_prefix
# -Check that scripts in base_[bindir|sbindir|libdir] do not reference
# files under exec_prefix
+# -Check if the package name is upper case
-
-# unsafe-references-in-binaries requires prelink-rtld from
-# prelink-native, but we don't want this DEPENDS for -native builds
-QADEPENDS = "prelink-native"
-QADEPENDS_class-native = ""
-QADEPENDS_class-nativesdk = ""
QA_SANE = "True"
# Elect whether a given type of error is a warning or error, they may
@@ -30,16 +25,20 @@ QA_SANE = "True"
WARN_QA ?= "ldflags useless-rpaths rpaths staticdev libdir xorg-driver-abi \
textrel already-stripped incompatible-license files-invalid \
installed-vs-shipped compile-host-path install-host-path \
- pn-overrides infodir build-deps file-rdeps \
+ pn-overrides infodir build-deps src-uri-bad \
unknown-configure-option symlink-to-sysroot multilib \
- invalid-packageconfig host-user-contaminated \
+ invalid-packageconfig host-user-contaminated uppercase-pn patch-fuzz \
"
ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
split-strip packages-list pkgv-undefined var-undefined \
version-going-backwards expanded-d invalid-chars \
- license-checksum dev-elf \
+ license-checksum dev-elf file-rdeps configure-unsafe \
+ configure-gettext perllocalpod \
"
+# Add usrmerge QA check based on distro feature
+ERROR_QA_append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}"
+
FAKEROOT_QA = "host-user-contaminated"
FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \
enabled tests are listed here, the do_package_qa task will run under fakeroot."
@@ -48,160 +47,30 @@ ALL_QA = "${WARN_QA} ${ERROR_QA}"
UNKNOWN_CONFIGURE_WHITELIST ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static"
-#
-# dictionary for elf headers
-#
-# feel free to add and correct.
-#
-# TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit?
-def package_qa_get_machine_dict(d):
- machdata = {
- "darwin9" : {
- "arm" : (40, 0, 0, True, 32),
- },
- "eabi" : {
- "arm" : (40, 0, 0, True, 32),
- },
- "elf" : {
- "i586" : (3, 0, 0, True, 32),
- "x86_64": (62, 0, 0, True, 64),
- "epiphany": (4643, 0, 0, True, 32),
- },
- "linux" : {
- "aarch64" : (183, 0, 0, True, 64),
- "aarch64_be" :(183, 0, 0, False, 64),
- "arm" : (40, 97, 0, True, 32),
- "armeb": (40, 97, 0, False, 32),
- "powerpc": (20, 0, 0, False, 32),
- "powerpc64": (21, 0, 0, False, 64),
- "i386": ( 3, 0, 0, True, 32),
- "i486": ( 3, 0, 0, True, 32),
- "i586": ( 3, 0, 0, True, 32),
- "i686": ( 3, 0, 0, True, 32),
- "x86_64": (62, 0, 0, True, 64),
- "ia64": (50, 0, 0, True, 64),
- "alpha": (36902, 0, 0, True, 64),
- "hppa": (15, 3, 0, False, 32),
- "m68k": ( 4, 0, 0, False, 32),
- "mips": ( 8, 0, 0, False, 32),
- "mipsel": ( 8, 0, 0, True, 32),
- "mips64": ( 8, 0, 0, False, 64),
- "mips64el": ( 8, 0, 0, True, 64),
- "nios2": (113, 0, 0, True, 32),
- "s390": (22, 0, 0, False, 32),
- "sh4": (42, 0, 0, True, 32),
- "sparc": ( 2, 0, 0, False, 32),
- "microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
- "microblazeel":(189, 0, 0, True, 32),
- },
- "linux-uclibc" : {
- "arm" : ( 40, 97, 0, True, 32),
- "armeb": ( 40, 97, 0, False, 32),
- "powerpc": ( 20, 0, 0, False, 32),
- "i386": ( 3, 0, 0, True, 32),
- "i486": ( 3, 0, 0, True, 32),
- "i586": ( 3, 0, 0, True, 32),
- "i686": ( 3, 0, 0, True, 32),
- "x86_64": ( 62, 0, 0, True, 64),
- "mips": ( 8, 0, 0, False, 32),
- "mipsel": ( 8, 0, 0, True, 32),
- "mips64": ( 8, 0, 0, False, 64),
- "mips64el": ( 8, 0, 0, True, 64),
- "avr32": (6317, 0, 0, False, 32),
- "sh4": (42, 0, 0, True, 32),
-
- },
- "linux-musl" : {
- "aarch64" : (183, 0, 0, True, 64),
- "aarch64_be" :(183, 0, 0, False, 64),
- "arm" : ( 40, 97, 0, True, 32),
- "armeb": ( 40, 97, 0, False, 32),
- "powerpc": ( 20, 0, 0, False, 32),
- "i386": ( 3, 0, 0, True, 32),
- "i486": ( 3, 0, 0, True, 32),
- "i586": ( 3, 0, 0, True, 32),
- "i686": ( 3, 0, 0, True, 32),
- "x86_64": ( 62, 0, 0, True, 64),
- "mips": ( 8, 0, 0, False, 32),
- "mipsel": ( 8, 0, 0, True, 32),
- "mips64": ( 8, 0, 0, False, 64),
- "mips64el": ( 8, 0, 0, True, 64),
- "microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
- "microblazeel":(189, 0, 0, True, 32),
- },
- "uclinux-uclibc" : {
- "bfin": ( 106, 0, 0, True, 32),
- },
- "linux-gnueabi" : {
- "arm" : (40, 0, 0, True, 32),
- "armeb" : (40, 0, 0, False, 32),
- },
- "linux-musleabi" : {
- "arm" : (40, 0, 0, True, 32),
- "armeb" : (40, 0, 0, False, 32),
- },
- "linux-uclibceabi" : {
- "arm" : (40, 0, 0, True, 32),
- "armeb" : (40, 0, 0, False, 32),
- },
- "linux-gnuspe" : {
- "powerpc": (20, 0, 0, False, 32),
- },
- "linux-muslspe" : {
- "powerpc": (20, 0, 0, False, 32),
- },
- "linux-uclibcspe" : {
- "powerpc": (20, 0, 0, False, 32),
- },
- "linux-gnu" : {
- "powerpc": (20, 0, 0, False, 32),
- "sh4": (42, 0, 0, True, 32),
- },
- "linux-gnux32" : {
- "x86_64": (62, 0, 0, True, 32),
- },
- "linux-gnun32" : {
- "mips64": ( 8, 0, 0, False, 32),
- "mips64el": ( 8, 0, 0, True, 32),
- },
- }
-
- # Add in any extra user supplied data which may come from a BSP layer, removing the
- # need to always change this class directly
- extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS", True) or "").split()
- for m in extra_machdata:
- call = m + "(machdata, d)"
- locs = { "machdata" : machdata, "d" : d}
- machdata = bb.utils.better_eval(call, locs)
-
- return machdata
-
-
def package_qa_clean_path(path, d, pkg=None):
"""
Remove redundant paths from the path for display. If pkg isn't set then
TMPDIR is stripped, otherwise PKGDEST/pkg is stripped.
"""
if pkg:
- path = path.replace(os.path.join(d.getVar("PKGDEST", True), pkg), "/")
- return path.replace(d.getVar("TMPDIR", True), "/").replace("//", "/")
+ path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/")
+ return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
def package_qa_write_error(type, error, d):
- logfile = d.getVar('QA_LOGFILE', True)
+ logfile = d.getVar('QA_LOGFILE')
if logfile:
- p = d.getVar('P', True)
+ p = d.getVar('P')
with open(logfile, "a+") as f:
f.write("%s: %s [%s]\n" % (p, error, type))
def package_qa_handle_error(error_class, error_msg, d):
- package_qa_write_error(error_class, error_msg, d)
- if error_class in (d.getVar("ERROR_QA", True) or "").split():
+ if error_class in (d.getVar("ERROR_QA") or "").split():
+ package_qa_write_error(error_class, error_msg, d)
bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
d.setVar("QA_SANE", False)
return False
- elif error_class in (d.getVar("WARN_QA", True) or "").split():
+ elif error_class in (d.getVar("WARN_QA") or "").split():
+ package_qa_write_error(error_class, error_msg, d)
bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
else:
bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
@@ -217,7 +86,7 @@ QAPATHTEST[libexec] = "package_qa_check_libexec"
def package_qa_check_libexec(path,name, d, elf, messages):
# Skip the case where the default is explicitly /usr/libexec
- libexec = d.getVar('libexecdir', True)
+ libexec = d.getVar('libexecdir')
if libexec == "/usr/libexec":
return True
@@ -238,12 +107,12 @@ def package_qa_check_rpath(file,name, d, elf, messages):
if os.path.islink(file):
return
- bad_dirs = [d.getVar('BASE_WORKDIR', True), d.getVar('STAGING_DIR_TARGET', True)]
+ bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')]
phdrs = elf.run_objdump("-p", d)
import re
- rpath_re = re.compile("\s+RPATH\s+(.*)")
+ rpath_re = re.compile(r"\s+RPATH\s+(.*)")
for line in phdrs.split("\n"):
m = rpath_re.match(line)
if m:
@@ -266,13 +135,13 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
if os.path.islink(file):
return
- libdir = d.getVar("libdir", True)
- base_libdir = d.getVar("base_libdir", True)
+ libdir = d.getVar("libdir")
+ base_libdir = d.getVar("base_libdir")
phdrs = elf.run_objdump("-p", d)
import re
- rpath_re = re.compile("\s+RPATH\s+(.*)")
+ rpath_re = re.compile(r"\s+RPATH\s+(.*)")
for line in phdrs.split("\n"):
m = rpath_re.match(line)
if m:
@@ -324,29 +193,29 @@ def package_qa_check_libdir(d):
"""
import re
- pkgdest = d.getVar('PKGDEST', True)
- base_libdir = d.getVar("base_libdir",True) + os.sep
- libdir = d.getVar("libdir", True) + os.sep
- libexecdir = d.getVar("libexecdir", True) + os.sep
- exec_prefix = d.getVar("exec_prefix", True) + os.sep
+ pkgdest = d.getVar('PKGDEST')
+ base_libdir = d.getVar("base_libdir") + os.sep
+ libdir = d.getVar("libdir") + os.sep
+ libexecdir = d.getVar("libexecdir") + os.sep
+ exec_prefix = d.getVar("exec_prefix") + os.sep
messages = []
# The re's are purposely fuzzy, as some there are some .so.x.y.z files
# that don't follow the standard naming convention. It checks later
# that they are actual ELF files
- lib_re = re.compile("^/lib.+\.so(\..+)?$")
- exec_re = re.compile("^%s.*/lib.+\.so(\..+)?$" % exec_prefix)
+ lib_re = re.compile(r"^/lib.+\.so(\..+)?$")
+ exec_re = re.compile(r"^%s.*/lib.+\.so(\..+)?$" % exec_prefix)
for root, dirs, files in os.walk(pkgdest):
if root == pkgdest:
# Skip subdirectories for any packages with libdir in INSANE_SKIP
skippackages = []
for package in dirs:
- if 'libdir' in (d.getVar('INSANE_SKIP_' + package, True) or "").split():
+ if 'libdir' in (d.getVar('INSANE_SKIP_' + package) or "").split():
bb.note("Package %s skipping libdir QA test" % (package))
skippackages.append(package)
- elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory' and package.endswith("-dbg"):
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"):
bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package))
skippackages.append(package)
for package in skippackages:
@@ -387,138 +256,26 @@ def package_qa_check_dbg(path, name, d, elf, messages):
if not "-dbg" in name and not "-ptest" in name:
if '.debug' in path.split(os.path.sep):
- messages("debug-files", "non debug package contains .debug directory: %s path %s" % \
+ package_qa_add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \
(name, package_qa_clean_path(path,d)))
-QAPATHTEST[perms] = "package_qa_check_perm"
-def package_qa_check_perm(path,name,d, elf, messages):
- """
- Check the permission of files
- """
- return
-
-QAPATHTEST[unsafe-references-in-binaries] = "package_qa_check_unsafe_references_in_binaries"
-def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages):
- """
- Ensure binaries in base_[bindir|sbindir|libdir] do not link to files under exec_prefix
- """
- if unsafe_references_skippable(path, name, d):
- return
-
- if elf:
- import subprocess as sub
- pn = d.getVar('PN', True)
-
- exec_prefix = d.getVar('exec_prefix', True)
- sysroot_path = d.getVar('STAGING_DIR_TARGET', True)
- sysroot_path_usr = sysroot_path + exec_prefix
-
- try:
- ldd_output = bb.process.Popen(["prelink-rtld", "--root", sysroot_path, path], stdout=sub.PIPE).stdout.read().decode("utf-8")
- except bb.process.CmdError:
- error_msg = pn + ": prelink-rtld aborted when processing %s" % path
- package_qa_handle_error("unsafe-references-in-binaries", error_msg, d)
- return False
-
- if sysroot_path_usr in ldd_output:
- ldd_output = ldd_output.replace(sysroot_path, "")
-
- pkgdest = d.getVar('PKGDEST', True)
- packages = d.getVar('PACKAGES', True)
-
- for package in packages.split():
- short_path = path.replace('%s/%s' % (pkgdest, package), "", 1)
- if (short_path != path):
- break
-
- base_err = pn + ": %s, installed in the base_prefix, requires a shared library under exec_prefix (%s)" % (short_path, exec_prefix)
- for line in ldd_output.split('\n'):
- if exec_prefix in line:
- error_msg = "%s: %s" % (base_err, line.strip())
- package_qa_handle_error("unsafe-references-in-binaries", error_msg, d)
-
- return False
-
-QAPATHTEST[unsafe-references-in-scripts] = "package_qa_check_unsafe_references_in_scripts"
-def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages):
- """
- Warn if scripts in base_[bindir|sbindir|libdir] reference files under exec_prefix
- """
- if unsafe_references_skippable(path, name, d):
- return
-
- if not elf:
- import stat
- import subprocess
- pn = d.getVar('PN', True)
-
- # Ensure we're checking an executable script
- statinfo = os.stat(path)
- if bool(statinfo.st_mode & stat.S_IXUSR):
- # grep shell scripts for possible references to /exec_prefix/
- exec_prefix = d.getVar('exec_prefix', True)
- statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path)
- if subprocess.call(statement, shell=True) == 0:
- error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path)
- package_qa_handle_error("unsafe-references-in-scripts", error_msg, d)
- error_msg = "Shell scripts in base_bindir and base_sbindir should not reference anything in exec_prefix"
- package_qa_handle_error("unsafe-references-in-scripts", error_msg, d)
-
-def unsafe_references_skippable(path, name, d):
- if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d):
- return True
-
- if "-dbg" in name or "-dev" in name:
- return True
-
- # Other package names to skip:
- if name.startswith("kernel-module-"):
- return True
-
- # Skip symlinks
- if os.path.islink(path):
- return True
-
- # Skip unusual rootfs layouts which make these tests irrelevant
- exec_prefix = d.getVar('exec_prefix', True)
- if exec_prefix == "":
- return True
-
- pkgdest = d.getVar('PKGDEST', True)
- pkgdest = pkgdest + "/" + name
- pkgdest = os.path.abspath(pkgdest)
- base_bindir = pkgdest + d.getVar('base_bindir', True)
- base_sbindir = pkgdest + d.getVar('base_sbindir', True)
- base_libdir = pkgdest + d.getVar('base_libdir', True)
- bindir = pkgdest + d.getVar('bindir', True)
- sbindir = pkgdest + d.getVar('sbindir', True)
- libdir = pkgdest + d.getVar('libdir', True)
-
- if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir:
- return True
-
- # Skip files not in base_[bindir|sbindir|libdir]
- path = os.path.abspath(path)
- if not (base_bindir in path or base_sbindir in path or base_libdir in path):
- return True
-
- return False
-
QAPATHTEST[arch] = "package_qa_check_arch"
def package_qa_check_arch(path,name,d, elf, messages):
"""
Check if archs are compatible
"""
+ import re, oe.elf
+
if not elf:
return
- target_os = d.getVar('TARGET_OS', True)
- target_arch = d.getVar('TARGET_ARCH', True)
- provides = d.getVar('PROVIDES', True)
- bpn = d.getVar('BPN', True)
+ target_os = d.getVar('TARGET_OS')
+ target_arch = d.getVar('TARGET_ARCH')
+ provides = d.getVar('PROVIDES')
+ bpn = d.getVar('BPN')
if target_arch == "allarch":
- pn = d.getVar('PN', True)
+ pn = d.getVar('PN')
package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
return
@@ -534,18 +291,20 @@ def package_qa_check_arch(path,name,d, elf, messages):
#if this will throw an exception, then fix the dict above
(machine, osabi, abiversion, littleendian, bits) \
- = package_qa_get_machine_dict(d)[target_os][target_arch]
+ = oe.elf.machine_dict(d)[target_os][target_arch]
# Check the architecture and endiannes of the binary
- if not ((machine == elf.machine()) or \
- ((("virtual/kernel" in provides) or bb.data.inherits_class("module", d) ) and (target_os == "linux-gnux32" or target_os == "linux-gnun32"))):
- package_qa_add_message(messages, "arch", "Architecture did not match (%d to %d) on %s" % \
- (machine, elf.machine(), package_qa_clean_path(path,d)))
- elif not ((bits == elf.abiSize()) or \
- ((("virtual/kernel" in provides) or bb.data.inherits_class("module", d) ) and (target_os == "linux-gnux32" or target_os == "linux-gnun32"))):
+ is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \
+ (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \
+ target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE')))
+ is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF")
+ if not ((machine == elf.machine()) or is_32 or is_bpf):
+ package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \
+ (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d)))
+ elif not ((bits == elf.abiSize()) or is_32 or is_bpf):
package_qa_add_message(messages, "arch", "Bit size did not match (%d to %d) %s on %s" % \
(bits, elf.abiSize(), bpn, package_qa_clean_path(path,d)))
- elif not littleendian == elf.isLittleEndian():
+ elif not ((littleendian == elf.isLittleEndian()) or is_bpf):
package_qa_add_message(messages, "arch", "Endiannes did not match (%d to %d) on %s" % \
(littleendian, elf.isLittleEndian(), package_qa_clean_path(path,d)))
@@ -555,7 +314,7 @@ def package_qa_check_desktop(path, name, d, elf, messages):
Run all desktop files through desktop-file-validate.
"""
if path.endswith(".desktop"):
- desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate')
+ desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate')
output = os.popen("%s %s" % (desktop_file_validate, path))
# This only produces output on errors
for l in output:
@@ -577,13 +336,15 @@ def package_qa_textrel(path, name, d, elf, messages):
sane = True
import re
- textrel_re = re.compile("\s+TEXTREL\s+")
+ textrel_re = re.compile(r"\s+TEXTREL\s+")
for line in phdrs.split("\n"):
if textrel_re.match(line):
sane = False
+ break
if not sane:
- package_qa_add_message(messages, "textrel", "ELF binary '%s' has relocations in .text" % path)
+ path = package_qa_clean_path(path, d, name)
+ package_qa_add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path))
QAPATHTEST[ldflags] = "package_qa_hash_style"
def package_qa_hash_style(path, name, d, elf, messages):
@@ -597,9 +358,9 @@ def package_qa_hash_style(path, name, d, elf, messages):
if os.path.islink(path):
return
- gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True)
+ gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS')
if not gnu_hash:
- gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True)
+ gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS')
if not gnu_hash:
return
@@ -618,7 +379,7 @@ def package_qa_hash_style(path, name, d, elf, messages):
sane = True
if has_syms and not sane:
- package_qa_add_message(messages, "ldflags", "No GNU_HASH in the elf binary: '%s'" % path)
+ package_qa_add_message(messages, "ldflags", "No GNU_HASH in the ELF binary %s, didn't pass LDFLAGS?" % path)
QAPATHTEST[buildpaths] = "package_qa_check_buildpaths"
@@ -638,11 +399,12 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1:
return
- tmpdir = d.getVar('TMPDIR', True)
- with open(path) as f:
+ tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8")
+ with open(path, 'rb') as f:
file_content = f.read()
if tmpdir in file_content:
- package_qa_add_message(messages, "buildpaths", "File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d))
+ trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
+ package_qa_add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name))
QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi"
@@ -657,8 +419,8 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
driverdir = d.expand("${libdir}/xorg/modules/drivers/")
if driverdir in path and path.endswith(".so"):
- mlprefix = d.getVar('MLPREFIX', True) or ''
- for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name, True) or ""):
+ mlprefix = d.getVar('MLPREFIX') or ''
+ for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name) or ""):
if rdep.startswith("%sxorg-abi-" % mlprefix):
return
package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
@@ -681,9 +443,9 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
if os.path.islink(path):
target = os.readlink(path)
if os.path.isabs(target):
- tmpdir = d.getVar('TMPDIR', True)
+ tmpdir = d.getVar('TMPDIR')
if target.startswith(tmpdir):
- trimmed = path.replace(os.path.join (d.getVar("PKGDEST", True), name), "")
+ trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
# Check license variables
@@ -692,32 +454,34 @@ python populate_lic_qa_checksum() {
"""
Check for changes in the license files.
"""
- import tempfile
sane = True
- lic_files = d.getVar('LIC_FILES_CHKSUM', True) or ''
- lic = d.getVar('LICENSE', True)
- pn = d.getVar('PN', True)
+ lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
+ lic = d.getVar('LICENSE')
+ pn = d.getVar('PN')
if lic == "CLOSED":
return
- if not lic_files and d.getVar('SRC_URI', True):
- sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
-
- srcdir = d.getVar('S', True)
+ if not lic_files and d.getVar('SRC_URI'):
+ sane &= package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
+ srcdir = d.getVar('S')
+ corebase_licensefile = d.getVar('COREBASE') + "/LICENSE"
for url in lic_files.split():
try:
(type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
except bb.fetch.MalformedUrl:
- sane = package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d)
+ sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d)
continue
srclicfile = os.path.join(srcdir, path)
if not os.path.isfile(srclicfile):
- package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d)
+ sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d)
continue
+ if (srclicfile == corebase_licensefile):
+ bb.warn("${COREBASE}/LICENSE is not a valid license file, please use '${COMMON_LICENSE_DIR}/MIT' for a MIT License file in LIC_FILES_CHKSUM. This will become an error in the future")
+
recipemd5 = parm.get('md5', '')
beginline, endline = 0, 0
if 'beginline' in parm:
@@ -727,32 +491,45 @@ python populate_lic_qa_checksum() {
if (not beginline) and (not endline):
md5chksum = bb.utils.md5_file(srclicfile)
+ with open(srclicfile, 'r', errors='replace') as f:
+ license = f.read().splitlines()
else:
- fi = open(srclicfile, 'rb')
- fo = tempfile.NamedTemporaryFile(mode='wb', prefix='poky.', suffix='.tmp', delete=False)
- tmplicfile = fo.name;
- lineno = 0
- linesout = 0
- for line in fi:
- lineno += 1
- if (lineno >= beginline):
- if ((lineno <= endline) or not endline):
- fo.write(line)
- linesout += 1
- else:
- break
- fo.flush()
- fo.close()
- fi.close()
- md5chksum = bb.utils.md5_file(tmplicfile)
- os.unlink(tmplicfile)
-
+ with open(srclicfile, 'rb') as f:
+ import hashlib
+ lineno = 0
+ license = []
+ m = hashlib.md5()
+ for line in f:
+ lineno += 1
+ if (lineno >= beginline):
+ if ((lineno <= endline) or not endline):
+ m.update(line)
+ license.append(line.decode('utf-8', errors='replace').rstrip())
+ else:
+ break
+ md5chksum = m.hexdigest()
if recipemd5 == md5chksum:
bb.note (pn + ": md5 checksum matched for ", url)
else:
if recipemd5:
msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url
msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum
+ max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20)
+ if not license or license[-1] != '':
+ # Ensure that our license text ends with a line break
+ # (will be added with join() below).
+ license.append('')
+ remove = len(license) - max_lines
+ if remove > 0:
+ start = max_lines // 2
+ end = start + remove - 1
+ del license[start:end]
+ license.insert(start, '...')
+ msg = msg + "\n" + pn + ": Here is the selected license text:" + \
+ "\n" + \
+ "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \
+ "\n" + "\n".join(license) + \
+ "{:^^70}".format(" endline=%d " % endline if endline else "")
if beginline:
if endline:
srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline)
@@ -767,13 +544,13 @@ python populate_lic_qa_checksum() {
else:
msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url
msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum
- sane = package_qa_handle_error("license-checksum", msg, d)
+ sane &= package_qa_handle_error("license-checksum", msg, d)
if not sane:
bb.fatal("Fatal QA errors found, failing task.")
}
-def package_qa_check_staged(path,d):
+def qa_check_staged(path,d):
"""
Check staged la and pc files for common problems like references to the work
directory.
@@ -783,42 +560,90 @@ def package_qa_check_staged(path,d):
"""
sane = True
- tmpdir = d.getVar('TMPDIR', True)
+ tmpdir = d.getVar('TMPDIR')
workdir = os.path.join(tmpdir, "work")
+ recipesysroot = d.getVar("RECIPE_SYSROOT")
if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
pkgconfigcheck = workdir
else:
pkgconfigcheck = tmpdir
+ skip = (d.getVar('INSANE_SKIP') or "").split()
+ skip_la = False
+ if 'la' in skip:
+ bb.note("Recipe %s skipping qa checking: la" % d.getVar('PN'))
+ skip_la = True
+
+ skip_pkgconfig = False
+ if 'pkgconfig' in skip:
+ bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN'))
+ skip_pkgconfig = True
+
# find all .la and .pc files
# read the content
# and check for stuff that looks wrong
for root, dirs, files in os.walk(path):
for file in files:
path = os.path.join(root,file)
- if file.endswith(".la"):
+ if file.endswith(".la") and not skip_la:
with open(path) as f:
file_content = f.read()
+ file_content = file_content.replace(recipesysroot, "")
if workdir in file_content:
error_msg = "%s failed sanity test (workdir) in path %s" % (file,root)
- sane = package_qa_handle_error("la", error_msg, d)
- elif file.endswith(".pc"):
+ sane &= package_qa_handle_error("la", error_msg, d)
+ elif file.endswith(".pc") and not skip_pkgconfig:
with open(path) as f:
file_content = f.read()
+ file_content = file_content.replace(recipesysroot, "")
if pkgconfigcheck in file_content:
error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root)
- sane = package_qa_handle_error("pkgconfig", error_msg, d)
+ sane &= package_qa_handle_error("pkgconfig", error_msg, d)
return sane
+# Run all package-wide warnfuncs and errorfuncs
+def package_qa_package(warnfuncs, errorfuncs, package, d):
+ warnings = {}
+ errors = {}
+
+ for func in warnfuncs:
+ func(package, d, warnings)
+ for func in errorfuncs:
+ func(package, d, errors)
+
+ for w in warnings:
+ package_qa_handle_error(w, warnings[w], d)
+ for e in errors:
+ package_qa_handle_error(e, errors[e], d)
+
+ return len(errors) == 0
+
+# Run all recipe-wide warnfuncs and errorfuncs
+def package_qa_recipe(warnfuncs, errorfuncs, pn, d):
+ warnings = {}
+ errors = {}
+
+ for func in warnfuncs:
+ func(pn, d, warnings)
+ for func in errorfuncs:
+ func(pn, d, errors)
+
+ for w in warnings:
+ package_qa_handle_error(w, warnings[w], d)
+ for e in errors:
+ package_qa_handle_error(e, errors[e], d)
+
+ return len(errors) == 0
+
# Walk over all files in a directory and call func
-def package_qa_walk(warnfuncs, errorfuncs, skip, package, d):
+def package_qa_walk(warnfuncs, errorfuncs, package, d):
import oe.qa
#if this will throw an exception, then fix the dict above
- target_os = d.getVar('TARGET_OS', True)
- target_arch = d.getVar('TARGET_ARCH', True)
+ target_os = d.getVar('TARGET_OS')
+ target_arch = d.getVar('TARGET_ARCH')
warnings = {}
errors = {}
@@ -847,11 +672,10 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg:
localdata = bb.data.createCopy(d)
- localdata.setVar('OVERRIDES', pkg)
- bb.data.update_data(localdata)
+ localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES') + ':' + pkg)
# Now check the RDEPENDS
- rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "")
+ rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "")
# Now do the sanity check!!!
if "build-deps" not in skip:
@@ -867,7 +691,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
continue
if not rdep_data or not 'PN' in rdep_data:
- pkgdata_dir = d.getVar("PKGDATA_DIR", True)
+ pkgdata_dir = d.getVar("PKGDATA_DIR")
try:
possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
except OSError:
@@ -887,38 +711,20 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
if "file-rdeps" not in skip:
ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)'])
if bb.data.inherits_class('nativesdk', d):
- ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl'])
+ ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl'])
# For Saving the FILERDEPENDS
filerdepends = {}
rdep_data = oe.packagedata.read_subpkgdata(pkg, d)
for key in rdep_data:
if key.startswith("FILERDEPENDS_"):
- for subkey in rdep_data[key].split():
- if subkey not in ignored_file_rdeps:
+ for subkey in bb.utils.explode_deps(rdep_data[key]):
+ if subkey not in ignored_file_rdeps and \
+ not subkey.startswith('perl('):
# We already know it starts with FILERDEPENDS_
filerdepends[subkey] = key[13:]
if filerdepends:
- next = rdepends
done = rdepends[:]
- # Find all the rdepends on the dependency chain
- while next:
- new = []
- for rdep in next:
- rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
- sub_rdeps = rdep_data.get("RDEPENDS_" + rdep)
- if not sub_rdeps:
- continue
- for sub_rdep in sub_rdeps.split():
- if sub_rdep in done:
- continue
- if not sub_rdep.startswith('(') and \
- oe.packagedata.has_subpkgdata(sub_rdep, d):
- # It's a new rdep
- done.append(sub_rdep)
- new.append(sub_rdep)
- next = new
-
# Add the rprovides of itself
if pkg not in done:
done.insert(0, pkg)
@@ -926,16 +732,20 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
# The python is not a package, but python-core provides it, so
# skip checking /usr/bin/python if python is in the rdeps, in
# case there is a RDEPENDS_pkg = "python" in the recipe.
- for py in [ d.getVar('MLPREFIX', True) + "python", "python" ]:
+ for py in [ d.getVar('MLPREFIX') + "python", "python" ]:
if py in done:
filerdepends.pop("/usr/bin/python",None)
done.remove(py)
for rdep in done:
+ # The file dependencies may contain package names, e.g.,
+ # perl
+ filerdepends.pop(rdep,None)
+
# For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO
rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
for key in rdep_data:
if key.startswith("FILERPROVIDES_") or key.startswith("RPROVIDES_"):
- for subkey in rdep_data[key].split():
+ for subkey in bb.utils.explode_deps(rdep_data[key]):
filerdepends.pop(subkey,None)
# Add the files list to the rprovides
if key == "FILES_INFO":
@@ -949,17 +759,17 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
for key in filerdepends:
error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS_%s?" % \
(filerdepends[key].replace("_%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg)
- package_qa_handle_error("file-rdeps", error_msg, d)
+ package_qa_handle_error("file-rdeps", error_msg, d)
+package_qa_check_rdepends[vardepsexclude] = "OVERRIDES"
-def package_qa_check_deps(pkg, pkgdest, skip, d):
+def package_qa_check_deps(pkg, pkgdest, d):
localdata = bb.data.createCopy(d)
localdata.setVar('OVERRIDES', pkg)
- bb.data.update_data(localdata)
def check_valid_deps(var):
try:
- rvar = bb.utils.explode_dep_versions2(localdata.getVar(var, True) or "")
+ rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "")
except ValueError as e:
bb.fatal("%s_%s: %s" % (var, pkg, e))
for dep in rvar:
@@ -975,37 +785,59 @@ def package_qa_check_deps(pkg, pkgdest, skip, d):
check_valid_deps('RREPLACES')
check_valid_deps('RCONFLICTS')
-QAPATHTEST[expanded-d] = "package_qa_check_expanded_d"
-def package_qa_check_expanded_d(path,name,d,elf,messages):
+QAPKGTEST[usrmerge] = "package_qa_check_usrmerge"
+def package_qa_check_usrmerge(pkg, d, messages):
+ pkgdest = d.getVar('PKGDEST')
+ pkg_dir = pkgdest + os.sep + pkg + os.sep
+ merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split()
+ for f in merged_dirs:
+ if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f):
+ msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f)
+ package_qa_add_message(messages, "usrmerge", msg)
+ return False
+ return True
+
+QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod"
+def package_qa_check_perllocalpod(pkg, d, messages):
+ """
+ Check that the recipe didn't ship a perlocal.pod file, which shouldn't be
+ installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to
+ handle this for most recipes.
+ """
+ import glob
+ pkgd = oe.path.join(d.getVar('PKGDEST'), pkg)
+ podpath = oe.path.join(pkgd, d.getVar("libdir"), "perl*", "*", "*", "perllocal.pod")
+
+ matches = glob.glob(podpath)
+ if matches:
+ matches = [package_qa_clean_path(path, d, pkg) for path in matches]
+ msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches))
+ package_qa_add_message(messages, "perllocalpod", msg)
+
+QAPKGTEST[expanded-d] = "package_qa_check_expanded_d"
+def package_qa_check_expanded_d(package, d, messages):
"""
Check for the expanded D (${D}) value in pkg_* and FILES
variables, warn the user to use it correctly.
"""
-
sane = True
- expanded_d = d.getVar('D',True)
-
- # Get packages for current recipe and iterate
- packages = d.getVar('PACKAGES', True).split(" ")
- for pak in packages:
- # Go through all variables and check if expanded D is found, warn the user accordingly
- for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
- bbvar = d.getVar(var + "_" + pak, False)
- if bbvar:
- # Bitbake expands ${D} within bbvar during the previous step, so we check for its expanded value
- if expanded_d in bbvar:
- if var == 'FILES':
- package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % pak)
- sane = False
- else:
- package_qa_add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, pak))
- sane = False
+ expanded_d = d.getVar('D')
+
+ for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
+ bbvar = d.getVar(var + "_" + package) or ""
+ if expanded_d in bbvar:
+ if var == 'FILES':
+ package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
+ sane = False
+ else:
+ package_qa_add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package))
+ sane = False
return sane
def package_qa_check_encoding(keys, encode, d):
def check_encoding(key, enc):
sane = True
- value = d.getVar(key, True)
+ value = d.getVar(key)
if value:
try:
s = value.encode(enc)
@@ -1030,8 +862,8 @@ def package_qa_check_host_user(path, name, d, elf, messages):
if not os.path.lexists(path):
return
- dest = d.getVar('PKGDEST', True)
- pn = d.getVar('PN', True)
+ dest = d.getVar('PKGDEST')
+ pn = d.getVar('PN')
home = os.path.join(dest, 'home')
if path == home or path.startswith(home + os.sep):
return
@@ -1043,18 +875,29 @@ def package_qa_check_host_user(path, name, d, elf, messages):
if exc.errno != errno.ENOENT:
raise
else:
- rootfs_path = path[len(dest):]
- check_uid = int(d.getVar('HOST_USER_UID', True))
+ check_uid = int(d.getVar('HOST_USER_UID'))
if stat.st_uid == check_uid:
- package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid))
+ package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid))
return False
- check_gid = int(d.getVar('HOST_USER_GID', True))
+ check_gid = int(d.getVar('HOST_USER_GID'))
if stat.st_gid == check_gid:
- package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid))
+ package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid))
return False
return True
+QARECIPETEST[src-uri-bad] = "package_qa_check_src_uri"
+def package_qa_check_src_uri(pn, d, messages):
+ import re
+
+ if "${PN}" in d.getVar("SRC_URI", False):
+ package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
+
+ for url in d.getVar("SRC_URI").split():
+ if re.search(r"github\.com/.+/.+/archive/.+", url):
+ package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub archives" % pn, d)
+
+
# The PACKAGE FUNC to scan each package
python do_package_qa () {
import subprocess
@@ -1067,8 +910,8 @@ python do_package_qa () {
# Check non UTF-8 characters on recipe's metadata
package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d)
- logdir = d.getVar('T', True)
- pkg = d.getVar('PN', True)
+ logdir = d.getVar('T')
+ pn = d.getVar('PN')
# Check the compile log for host contamination
compilelog = os.path.join(logdir,"log.do_compile")
@@ -1077,7 +920,7 @@ python do_package_qa () {
statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % compilelog
if subprocess.call(statement, shell=True) == 0:
msg = "%s: The compile log indicates that host include and/or library paths were used.\n \
- Please check the log '%s' for more information." % (pkg, compilelog)
+ Please check the log '%s' for more information." % (pn, compilelog)
package_qa_handle_error("compile-host-path", msg, d)
# Check the install log for host contamination
@@ -1087,12 +930,12 @@ python do_package_qa () {
statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % installlog
if subprocess.call(statement, shell=True) == 0:
msg = "%s: The install log indicates that host include and/or library paths were used.\n \
- Please check the log '%s' for more information." % (pkg, installlog)
+ Please check the log '%s' for more information." % (pn, installlog)
package_qa_handle_error("install-host-path", msg, d)
# Scan the packages...
- pkgdest = d.getVar('PKGDEST', True)
- packages = set((d.getVar('PACKAGES', True) or '').split())
+ pkgdest = d.getVar('PKGDEST')
+ packages = set((d.getVar('PACKAGES') or '').split())
cpath = oe.cachedpath.CachedPath()
global pkgfiles
@@ -1107,38 +950,38 @@ python do_package_qa () {
if not packages:
return
- testmatrix = d.getVarFlags("QAPATHTEST")
import re
# The package name matches the [a-z0-9.+-]+ regular expression
- pkgname_pattern = re.compile("^[a-z0-9.+-]+$")
+ pkgname_pattern = re.compile(r"^[a-z0-9.+-]+$")
taskdepdata = d.getVar("BB_TASKDEPDATA", False)
taskdeps = set()
for dep in taskdepdata:
taskdeps.add(taskdepdata[dep][0])
- g = globals()
- for package in packages:
- skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split()
- if skip:
- bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
+ def parse_test_matrix(matrix_name):
+ testmatrix = d.getVarFlags(matrix_name) or {}
+ g = globals()
warnchecks = []
- for w in (d.getVar("WARN_QA", True) or "").split():
+ for w in (d.getVar("WARN_QA") or "").split():
if w in skip:
continue
if w in testmatrix and testmatrix[w] in g:
warnchecks.append(g[testmatrix[w]])
- if w == 'unsafe-references-in-binaries':
- oe.utils.write_ld_so_conf(d)
errorchecks = []
- for e in (d.getVar("ERROR_QA", True) or "").split():
+ for e in (d.getVar("ERROR_QA") or "").split():
if e in skip:
continue
if e in testmatrix and testmatrix[e] in g:
errorchecks.append(g[testmatrix[e]])
- if e == 'unsafe-references-in-binaries':
- oe.utils.write_ld_so_conf(d)
+ return warnchecks, errorchecks
+
+ for package in packages:
+ skip = set((d.getVar('INSANE_SKIP') or "").split() +
+ (d.getVar('INSANE_SKIP_' + package) or "").split())
+ if skip:
+ bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
bb.note("Checking Package: %s" % package)
# Check package name
@@ -1146,25 +989,41 @@ python do_package_qa () {
package_qa_handle_error("pkgname",
"%s doesn't match the [a-z0-9.+-]+ regex" % package, d)
- path = "%s/%s" % (pkgdest, package)
- package_qa_walk(warnchecks, errorchecks, skip, package, d)
+ warn_checks, error_checks = parse_test_matrix("QAPATHTEST")
+ package_qa_walk(warn_checks, error_checks, package, d)
+
+ warn_checks, error_checks = parse_test_matrix("QAPKGTEST")
+ package_qa_package(warn_checks, error_checks, package, d)
package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
- package_qa_check_deps(package, pkgdest, skip, d)
+ package_qa_check_deps(package, pkgdest, d)
- if 'libdir' in d.getVar("ALL_QA", True).split():
+ warn_checks, error_checks = parse_test_matrix("QARECIPETEST")
+ package_qa_recipe(warn_checks, error_checks, pn, d)
+
+ if 'libdir' in d.getVar("ALL_QA").split():
package_qa_check_libdir(d)
- qa_sane = d.getVar("QA_SANE", True)
+ qa_sane = d.getVar("QA_SANE")
if not qa_sane:
bb.fatal("QA run found fatal errors. Please consider fixing them.")
bb.note("DONE with PACKAGE QA")
}
+# binutils is used for most checks, so need to set as dependency
+# POPULATESYSROOTDEPS is defined in staging class.
+do_package_qa[depends] += "${POPULATESYSROOTDEPS}"
do_package_qa[vardepsexclude] = "BB_TASKDEPDATA"
do_package_qa[rdeptask] = "do_packagedata"
addtask do_package_qa after do_packagedata do_package before do_build
+# Add the package specific INSANE_SKIPs to the sstate dependencies
+python() {
+ pkgs = (d.getVar('PACKAGES') or '').split()
+ for pkg in pkgs:
+ d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP_{}".format(pkg))
+}
+
SSTATETASKS += "do_package_qa"
do_package_qa[sstate-inputdirs] = ""
do_package_qa[sstate-outputdirs] = ""
@@ -1175,11 +1034,58 @@ addtask do_package_qa_setscene
python do_qa_staging() {
bb.note("QA checking staging")
-
- if not package_qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d):
+ if not qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d):
bb.fatal("QA staging was broken by the package built above")
}
+python do_qa_patch() {
+ import subprocess
+
+ ###########################################################################
+ # Check patch.log for fuzz warnings
+ #
+ # Further information on why we check for patch fuzz warnings:
+ # http://lists.openembedded.org/pipermail/openembedded-core/2018-March/148675.html
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=10450
+ ###########################################################################
+
+ logdir = d.getVar('T')
+ patchlog = os.path.join(logdir,"log.do_patch")
+
+ if os.path.exists(patchlog):
+ fuzzheader = '--- Patch fuzz start ---'
+ fuzzfooter = '--- Patch fuzz end ---'
+ statement = "grep -e '%s' %s > /dev/null" % (fuzzheader, patchlog)
+ if subprocess.call(statement, shell=True) == 0:
+ msg = "Fuzz detected:\n\n"
+ fuzzmsg = ""
+ inFuzzInfo = False
+ f = open(patchlog, "r")
+ for line in f:
+ if fuzzheader in line:
+ inFuzzInfo = True
+ fuzzmsg = ""
+ elif fuzzfooter in line:
+ fuzzmsg = fuzzmsg.replace('\n\n', '\n')
+ msg += fuzzmsg
+ msg += "\n"
+ inFuzzInfo = False
+ elif inFuzzInfo and not 'Now at patch' in line:
+ fuzzmsg += line
+ f.close()
+ msg += "The context lines in the patches can be updated with devtool:\n"
+ msg += "\n"
+ msg += " devtool modify %s\n" % d.getVar('PN')
+ msg += " devtool finish --force-patch-refresh %s <layer_path>\n\n" % d.getVar('PN')
+ msg += "Don't forget to review changes done by devtool!\n"
+ if 'patch-fuzz' in d.getVar('ERROR_QA'):
+ bb.error(msg)
+ elif 'patch-fuzz' in d.getVar('WARN_QA'):
+ bb.warn(msg)
+ msg = "Patch log indicates that patches do not apply cleanly."
+ package_qa_handle_error("patch-fuzz", msg, d)
+}
+
python do_qa_configure() {
import subprocess
@@ -1188,17 +1094,24 @@ python do_qa_configure() {
###########################################################################
configs = []
- workdir = d.getVar('WORKDIR', True)
+ workdir = d.getVar('WORKDIR')
- if bb.data.inherits_class('autotools', d):
+ skip = (d.getVar('INSANE_SKIP') or "").split()
+ skip_configure_unsafe = False
+ if 'configure-unsafe' in skip:
+ bb.note("Recipe %s skipping qa checking: configure-unsafe" % d.getVar('PN'))
+ skip_configure_unsafe = True
+
+ if bb.data.inherits_class('autotools', d) and not skip_configure_unsafe:
bb.note("Checking autotools environment for common misconfiguration")
for root, dirs, files in os.walk(workdir):
statement = "grep -q -F -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s" % \
os.path.join(root,"config.log")
if "config.log" in files:
if subprocess.call(statement, shell=True) == 0:
- bb.fatal("""This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
-Rerun configure task after fixing this.""")
+ error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
+Rerun configure task after fixing this."""
+ package_qa_handle_error("configure-unsafe", error_msg, d)
if "configure.ac" in files:
configs.append(os.path.join(root,"configure.ac"))
@@ -1209,70 +1122,81 @@ Rerun configure task after fixing this.""")
# Check gettext configuration and dependencies are correct
###########################################################################
- cnf = d.getVar('EXTRA_OECONF', True) or ""
- if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf:
- ml = d.getVar("MLPREFIX", True) or ""
- if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
- gt = "gettext-native"
- elif bb.data.inherits_class('cross-canadian', d):
+ skip_configure_gettext = False
+ if 'configure-gettext' in skip:
+ bb.note("Recipe %s skipping qa checking: configure-gettext" % d.getVar('PN'))
+ skip_configure_gettext = True
+
+ cnf = d.getVar('EXTRA_OECONF') or ""
+ if not ("gettext" in d.getVar('P') or "gcc-runtime" in d.getVar('P') or \
+ "--disable-nls" in cnf or skip_configure_gettext):
+ ml = d.getVar("MLPREFIX") or ""
+ if bb.data.inherits_class('cross-canadian', d):
gt = "nativesdk-gettext"
else:
- gt = "virtual/" + ml + "gettext"
- deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "")
+ gt = "gettext-native"
+ deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "")
if gt not in deps:
for config in configs:
gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
if subprocess.call(gnu, shell=True) == 0:
- bb.fatal("""%s required but not in DEPENDS for file %s.
-Missing inherit gettext?""" % (gt, config))
+ error_msg = "AM_GNU_GETTEXT used but no inherit gettext"
+ package_qa_handle_error("configure-gettext", error_msg, d)
###########################################################################
# Check unrecognised configure options (with a white list)
###########################################################################
- if bb.data.inherits_class("autotools", d):
+ if bb.data.inherits_class("autotools", d) or bb.data.inherits_class("meson", d):
bb.note("Checking configure output for unrecognised options")
try:
- flag = "WARNING: unrecognized options:"
- log = os.path.join(d.getVar('B', True), 'config.log')
- output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ')
+ if bb.data.inherits_class("autotools", d):
+ flag = "WARNING: unrecognized options:"
+ log = os.path.join(d.getVar('B'), 'config.log')
+ if bb.data.inherits_class("meson", d):
+ flag = "WARNING: Unknown options:"
+ log = os.path.join(d.getVar('T'), 'log.do_configure')
+ output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ').replace('"', '')
options = set()
for line in output.splitlines():
options |= set(line.partition(flag)[2].split())
- whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST", True).split())
+ whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST").split())
options -= whitelist
if options:
- pn = d.getVar('PN', True)
+ pn = d.getVar('PN')
error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
package_qa_handle_error("unknown-configure-option", error_msg, d)
except subprocess.CalledProcessError:
pass
# Check invalid PACKAGECONFIG
- pkgconfig = (d.getVar("PACKAGECONFIG", True) or "").split()
+ pkgconfig = (d.getVar("PACKAGECONFIG") or "").split()
if pkgconfig:
pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
for pconfig in pkgconfig:
if pconfig not in pkgconfigflags:
- pn = d.getVar('PN', True)
+ pn = d.getVar('PN')
error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
package_qa_handle_error("invalid-packageconfig", error_msg, d)
- qa_sane = d.getVar("QA_SANE", True)
+ qa_sane = d.getVar("QA_SANE")
if not qa_sane:
bb.fatal("Fatal QA errors found, failing task.")
}
python do_qa_unpack() {
- src_uri = d.getVar('SRC_URI', True)
- s_dir = d.getVar('S', True)
+ src_uri = d.getVar('SRC_URI')
+ s_dir = d.getVar('S')
if src_uri and not os.path.exists(s_dir):
- bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN', True), d.getVar('S', False), s_dir))
+ bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
}
# The Staging Func, to check all staging
#addtask qa_staging after do_populate_sysroot before do_build
do_populate_sysroot[postfuncs] += "do_qa_staging "
+# Check for patch fuzz
+do_patch[postfuncs] += "do_qa_patch "
+
# Check broken config.log files, for packages requiring Gettext which
# don't have it in DEPENDS.
#addtask qa_configure after do_configure before do_compile
@@ -1282,7 +1206,9 @@ do_configure[postfuncs] += "do_qa_configure "
do_unpack[postfuncs] += "do_qa_unpack"
python () {
- tests = d.getVar('ALL_QA', True).split()
+ import re
+
+ tests = d.getVar('ALL_QA').split()
if "desktop" in tests:
d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
@@ -1291,7 +1217,7 @@ python () {
###########################################################################
# Checking ${FILESEXTRAPATHS}
- extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
+ extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
if '__default' not in extrapaths.split(":"):
msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n"
msg += "type of assignment, and don't forget the colon.\n"
@@ -1303,29 +1229,37 @@ python () {
msg += "%s\n" % extrapaths
bb.warn(msg)
- overrides = d.getVar('OVERRIDES', True).split(':')
- pn = d.getVar('PN', True)
+ overrides = d.getVar('OVERRIDES').split(':')
+ pn = d.getVar('PN')
if pn in overrides:
- msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE", True), pn)
+ msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn)
package_qa_handle_error("pn-overrides", msg, d)
+ prog = re.compile(r'[A-Z]')
+ if prog.search(pn):
+ package_qa_handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d)
+
+ # Some people mistakenly use DEPENDS_${PN} instead of DEPENDS and wonder
+ # why it doesn't work.
+ if (d.getVar(d.expand('DEPENDS_${PN}'))):
+ package_qa_handle_error("pkgvarcheck", "recipe uses DEPENDS_${PN}, should use DEPENDS", d)
issues = []
- if (d.getVar('PACKAGES', True) or "").split():
- for dep in (d.getVar('QADEPENDS', True) or "").split():
+ if (d.getVar('PACKAGES') or "").split():
+ for dep in (d.getVar('QADEPENDS') or "").split():
d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep)
for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY':
if d.getVar(var, False):
issues.append(var)
- fakeroot_tests = d.getVar('FAKEROOT_QA', True).split()
+ fakeroot_tests = d.getVar('FAKEROOT_QA').split()
if set(tests) & set(fakeroot_tests):
d.setVarFlag('do_package_qa', 'fakeroot', '1')
d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
else:
d.setVarFlag('do_package_qa', 'rdeptask', '')
for i in issues:
- package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE", True), i), d)
- qa_sane = d.getVar("QA_SANE", True)
+ package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
+ qa_sane = d.getVar("QA_SANE")
if not qa_sane:
bb.fatal("Fatal QA errors found, failing task.")
}