summaryrefslogtreecommitdiffstats
path: root/meta/lib
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib')
-rw-r--r--meta/lib/buildstats.py4
-rw-r--r--meta/lib/oe/cve_check.py147
-rw-r--r--meta/lib/oe/license.py6
-rw-r--r--meta/lib/oe/package_manager.py13
-rw-r--r--meta/lib/oe/packagedata.py11
-rw-r--r--meta/lib/oe/patch.py6
-rw-r--r--meta/lib/oe/reproducible.py5
-rw-r--r--meta/lib/oe/rootfs.py4
-rw-r--r--meta/lib/oe/sbom.py84
-rw-r--r--meta/lib/oe/spdx.py357
-rw-r--r--meta/lib/oe/sstatesig.py5
-rw-r--r--meta/lib/oe/terminal.py4
-rw-r--r--meta/lib/oe/utils.py3
-rw-r--r--meta/lib/oeqa/core/target/ssh.py4
-rw-r--r--meta/lib/oeqa/manual/eclipse-plugin.json6
-rw-r--r--meta/lib/oeqa/manual/toaster-managed-mode.json2
-rw-r--r--meta/lib/oeqa/runtime/cases/date.py13
-rw-r--r--meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py36
-rw-r--r--meta/lib/oeqa/runtime/cases/ksample.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py19
-rw-r--r--meta/lib/oeqa/runtime/cases/ping.py20
-rw-r--r--meta/lib/oeqa/runtime/cases/rpm.py23
-rw-r--r--meta/lib/oeqa/runtime/cases/rtc.py40
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/suspend.py33
-rw-r--r--meta/lib/oeqa/runtime/cases/terminal.py21
-rw-r--r--meta/lib/oeqa/runtime/cases/usb_hid.py22
-rw-r--r--meta/lib/oeqa/runtime/context.py33
-rw-r--r--meta/lib/oeqa/sdk/cases/buildepoxy.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py13
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py178
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py19
-rw-r--r--meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt1
-rw-r--r--meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt1
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py8
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py3
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py3
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py37
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py43
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py28
-rw-r--r--meta/lib/oeqa/utils/buildproject.py3
-rw-r--r--meta/lib/oeqa/utils/metadata.py6
-rw-r--r--meta/lib/oeqa/utils/nfs.py4
-rw-r--r--meta/lib/oeqa/utils/qemurunner.py20
-rw-r--r--meta/lib/oeqa/utils/targetbuild.py4
53 files changed, 1190 insertions, 160 deletions
diff --git a/meta/lib/buildstats.py b/meta/lib/buildstats.py
index 8627ed3c31..c52b6c3b72 100644
--- a/meta/lib/buildstats.py
+++ b/meta/lib/buildstats.py
@@ -43,8 +43,8 @@ class SystemStats:
# depends on the heartbeat event, which fires less often.
self.min_seconds = 1
- self.meminfo_regex = re.compile(b'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)')
- self.diskstats_regex = re.compile(b'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$')
+ self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)')
+ self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$')
self.diskstats_ltime = None
self.diskstats_data = None
self.stat_ltimes = None
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py
index a1d7c292af..ed4af18ced 100644
--- a/meta/lib/oe/cve_check.py
+++ b/meta/lib/oe/cve_check.py
@@ -63,3 +63,150 @@ def _cmpkey(release, patch_l, pre_l, pre_v):
else:
_pre = float(pre_v) if pre_v else float('-inf')
return _release, _patch, _pre
+
+def cve_check_merge_jsons(output, data):
+ """
+ Merge the data in the "package" property to the main data file
+ output
+ """
+ if output["version"] != data["version"]:
+ bb.error("Version mismatch when merging JSON outputs")
+ return
+
+ for product in output["package"]:
+ if product["name"] == data["package"][0]["name"]:
+ bb.error("Error adding the same package %s twice" % product["name"])
+ return
+
+ output["package"].append(data["package"][0])
+
+def update_symlinks(target_path, link_path):
+ """
+ Update a symbolic link link_path to point to target_path.
+ Remove the link and recreate it if exist and is different.
+ """
+ if link_path != target_path and os.path.exists(target_path):
+ if os.path.exists(os.path.realpath(link_path)):
+ os.remove(link_path)
+ os.symlink(os.path.basename(target_path), link_path)
+
+def get_patched_cves(d):
+ """
+ Get patches that solve CVEs using the "CVE: " tag.
+ """
+
+ import re
+ import oe.patch
+
+ pn = d.getVar("PN")
+ cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
+
+ # Matches the last "CVE-YYYY-ID" in the file name, also if written
+ # in lowercase. Possible to have multiple CVE IDs in a single
+ # file name, but only the last one will be detected from the file name.
+ # However, patch files contents addressing multiple CVE IDs are supported
+ # (cve_match regular expression)
+
+ cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
+
+ patched_cves = set()
+ bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
+ for url in oe.patch.src_patches(d):
+ patch_file = bb.fetch.decodeurl(url)[2]
+
+ # Check patch file name for CVE ID
+ fname_match = cve_file_name_match.search(patch_file)
+ if fname_match:
+ cve = fname_match.group(1).upper()
+ patched_cves.add(cve)
+ bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
+
+ # Remote patches won't be present and compressed patches won't be
+ # unpacked, so say we're not scanning them
+ if not os.path.isfile(patch_file):
+ bb.note("%s is remote or compressed, not scanning content" % patch_file)
+ continue
+
+ with open(patch_file, "r", encoding="utf-8") as f:
+ try:
+ patch_text = f.read()
+ except UnicodeDecodeError:
+ bb.debug(1, "Failed to read patch %s using UTF-8 encoding"
+ " trying with iso8859-1" % patch_file)
+ f.close()
+ with open(patch_file, "r", encoding="iso8859-1") as f:
+ patch_text = f.read()
+
+ # Search for one or more "CVE: " lines
+ text_match = False
+ for match in cve_match.finditer(patch_text):
+ # Get only the CVEs without the "CVE: " tag
+ cves = patch_text[match.start()+5:match.end()]
+ for cve in cves.split():
+ bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
+ patched_cves.add(cve)
+ text_match = True
+
+ if not fname_match and not text_match:
+ bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
+
+ return patched_cves
+
+
+def get_cpe_ids(cve_product, version):
+ """
+ Get list of CPE identifiers for the given product and version
+ """
+
+ version = version.split("+git")[0]
+
+ cpe_ids = []
+ for product in cve_product.split():
+ # CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not,
+ # use wildcard for vendor.
+ if ":" in product:
+ vendor, product = product.split(":", 1)
+ else:
+ vendor = "*"
+
+ cpe_id = 'cpe:2.3:a:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version)
+ cpe_ids.append(cpe_id)
+
+ return cpe_ids
+
+def convert_cve_version(version):
+ """
+ This function converts from CVE format to Yocto version format.
+ eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1
+
+ Unless it is redefined using CVE_VERSION in the recipe,
+ cve_check uses the version in the name of the recipe (${PV})
+ to check vulnerabilities against a CVE in the database downloaded from NVD.
+
+ When the version has an update, i.e.
+ "p1" in OpenSSH 8.3p1,
+ "-rc1" in linux kernel 6.2-rc1,
+ the database stores the version as version_update (8.3_p1, 6.2_rc1).
+ Therefore, we must transform this version before comparing to the
+ recipe version.
+
+ In this case, the parameter of the function is 8.3_p1.
+ If the version uses the Release Candidate format, "rc",
+ this function replaces the '_' by '-'.
+ If the version uses the Update format, "p",
+ this function removes the '_' completely.
+ """
+ import re
+
+ matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version)
+
+ if not matches:
+ return version
+
+ version = matches.group(1)
+ update = matches.group(2)
+
+ if matches.group(3) == "rc":
+ return version + '-' + update
+
+ return version + update
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py
index c1274a61de..c4efbe142b 100644
--- a/meta/lib/oe/license.py
+++ b/meta/lib/oe/license.py
@@ -81,6 +81,9 @@ class FlattenVisitor(LicenseVisitor):
def visit_Str(self, node):
self.licenses.append(node.s)
+ def visit_Constant(self, node):
+ self.licenses.append(node.value)
+
def visit_BinOp(self, node):
if isinstance(node.op, ast.BitOr):
left = FlattenVisitor(self.choose_licenses)
@@ -234,6 +237,9 @@ class ListVisitor(LicenseVisitor):
def visit_Str(self, node):
self.licenses.add(node.s)
+ def visit_Constant(self, node):
+ self.licenses.add(node.value)
+
def list_licenses(licensestr):
"""Simply get a list of all licenses mentioned in a license string.
Binary operators are not applied or taken into account in any way"""
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py
index db988d9247..502dfbe3ed 100644
--- a/meta/lib/oe/package_manager.py
+++ b/meta/lib/oe/package_manager.py
@@ -611,12 +611,13 @@ class PackageManager(object, metaclass=ABCMeta):
"'%s' returned %d:\n%s" %
(' '.join(cmd), e.returncode, e.output.decode("utf-8")))
- target_arch = self.d.getVar('TARGET_ARCH')
- localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
- if os.path.exists(localedir) and os.listdir(localedir):
- generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
- # And now delete the binary locales
- self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
+ if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
+ target_arch = self.d.getVar('TARGET_ARCH')
+ localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
+ if os.path.exists(localedir) and os.listdir(localedir):
+ generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
+ # And now delete the binary locales
+ self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
def deploy_dir_lock(self):
if self.deploy_dir is None:
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py
index a82085a792..feb834c0e3 100644
--- a/meta/lib/oe/packagedata.py
+++ b/meta/lib/oe/packagedata.py
@@ -57,6 +57,17 @@ def read_subpkgdata_dict(pkg, d):
ret[newvar] = subd[var]
return ret
+def read_subpkgdata_extended(pkg, d):
+ import json
+ import gzip
+
+ fn = d.expand("${PKGDATA_DIR}/extended/%s.json.gz" % pkg)
+ try:
+ with gzip.open(fn, "rt", encoding="utf-8") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return None
+
def _pkgmap(d):
"""Return a dictionary mapping package to recipe name."""
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index 7cd8436da5..feb6ee7082 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -2,6 +2,9 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
+import shlex
+import subprocess
import oe.path
import oe.types
@@ -24,7 +27,6 @@ class CmdError(bb.BBHandledException):
def runcmd(args, dir = None):
- import pipes
import subprocess
if dir:
@@ -35,7 +37,7 @@ def runcmd(args, dir = None):
# print("cwd: %s -> %s" % (olddir, dir))
try:
- args = [ pipes.quote(str(arg)) for arg in args ]
+ args = [ shlex.quote(str(arg)) for arg in args ]
cmd = " ".join(args)
# print("cmd: %s" % cmd)
(exitstatus, output) = subprocess.getstatusoutput(cmd)
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py
index 204b9bd734..1ed79b18ca 100644
--- a/meta/lib/oe/reproducible.py
+++ b/meta/lib/oe/reproducible.py
@@ -41,7 +41,7 @@ def find_git_folder(d, sourcedir):
for root, dirs, files in os.walk(workdir, topdown=True):
dirs[:] = [d for d in dirs if d not in exclude]
if '.git' in dirs:
- return root
+ return os.path.join(root, ".git")
bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir)
return None
@@ -62,7 +62,8 @@ def get_source_date_epoch_from_git(d, sourcedir):
return None
bb.debug(1, "git repository: %s" % gitpath)
- p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE)
+ p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'],
+ check=True, stdout=subprocess.PIPE)
return int(p.stdout.decode('utf-8'))
def get_source_date_epoch_from_youngest_file(d, sourcedir):
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 9e9f7f1f08..5391c25af9 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -321,7 +321,9 @@ class Rootfs(object, metaclass=ABCMeta):
if not os.path.exists(kernel_abi_ver_file):
bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
- kernel_ver = open(kernel_abi_ver_file).read().strip(' \n')
+ with open(kernel_abi_ver_file) as f:
+ kernel_ver = f.read().strip(' \n')
+
versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
bb.utils.mkdirhier(versioned_modules_dir)
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py
new file mode 100644
index 0000000000..22ed5070ea
--- /dev/null
+++ b/meta/lib/oe/sbom.py
@@ -0,0 +1,84 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import collections
+
+DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe"))
+DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file"))
+
+
+def get_recipe_spdxid(d):
+ return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN"))
+
+
+def get_download_spdxid(d, idx):
+ return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx)
+
+
+def get_package_spdxid(pkg):
+ return "SPDXRef-Package-%s" % pkg
+
+
+def get_source_file_spdxid(d, idx):
+ return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx)
+
+
+def get_packaged_file_spdxid(pkg, idx):
+ return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx)
+
+
+def get_image_spdxid(img):
+ return "SPDXRef-Image-%s" % img
+
+
+def get_sdk_spdxid(sdk):
+ return "SPDXRef-SDK-%s" % sdk
+
+
+def write_doc(d, spdx_doc, subdir, spdx_deploy=None, indent=None):
+ from pathlib import Path
+
+ if spdx_deploy is None:
+ spdx_deploy = Path(d.getVar("SPDXDEPLOY"))
+
+ dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json")
+ dest.parent.mkdir(exist_ok=True, parents=True)
+ with dest.open("wb") as f:
+ doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
+
+ l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_")
+ l.parent.mkdir(exist_ok=True, parents=True)
+ l.symlink_to(os.path.relpath(dest, l.parent))
+
+ return doc_sha1
+
+
+def read_doc(fn):
+ import hashlib
+ import oe.spdx
+ import io
+ import contextlib
+
+ @contextlib.contextmanager
+ def get_file():
+ if isinstance(fn, io.IOBase):
+ yield fn
+ else:
+ with fn.open("rb") as f:
+ yield f
+
+ with get_file() as f:
+ sha1 = hashlib.sha1()
+ while True:
+ chunk = f.read(4096)
+ if not chunk:
+ break
+ sha1.update(chunk)
+
+ f.seek(0)
+ doc = oe.spdx.SPDXDocument.from_json(f)
+
+ return (doc, sha1.hexdigest())
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py
new file mode 100644
index 0000000000..7aaf2af5ed
--- /dev/null
+++ b/meta/lib/oe/spdx.py
@@ -0,0 +1,357 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# This library is intended to capture the JSON SPDX specification in a type
+# safe manner. It is not intended to encode any particular OE specific
+# behaviors, see the sbom.py for that.
+#
+# The documented SPDX spec document doesn't cover the JSON syntax for
+# particular configuration, which can make it hard to determine what the JSON
+# syntax should be. I've found it is actually much simpler to read the official
+# SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec
+# in schemas/spdx-schema.json
+#
+
+import hashlib
+import itertools
+import json
+
+SPDX_VERSION = "2.2"
+
+
+#
+# The following are the support classes that are used to implement SPDX object
+#
+
+class _Property(object):
+ """
+ A generic SPDX object property. The different types will derive from this
+ class
+ """
+
+ def __init__(self, *, default=None):
+ self.default = default
+
+ def setdefault(self, dest, name):
+ if self.default is not None:
+ dest.setdefault(name, self.default)
+
+
+class _String(_Property):
+ """
+ A scalar string property for an SPDX object
+ """
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ def set_property(self, attrs, name):
+ def get_helper(obj):
+ return obj._spdx[name]
+
+ def set_helper(obj, value):
+ obj._spdx[name] = value
+
+ def del_helper(obj):
+ del obj._spdx[name]
+
+ attrs[name] = property(get_helper, set_helper, del_helper)
+
+ def init(self, source):
+ return source
+
+
+class _Object(_Property):
+ """
+ A scalar SPDX object property of a SPDX object
+ """
+
+ def __init__(self, cls, **kwargs):
+ super().__init__(**kwargs)
+ self.cls = cls
+
+ def set_property(self, attrs, name):
+ def get_helper(obj):
+ if not name in obj._spdx:
+ obj._spdx[name] = self.cls()
+ return obj._spdx[name]
+
+ def set_helper(obj, value):
+ obj._spdx[name] = value
+
+ def del_helper(obj):
+ del obj._spdx[name]
+
+ attrs[name] = property(get_helper, set_helper)
+
+ def init(self, source):
+ return self.cls(**source)
+
+
+class _ListProperty(_Property):
+ """
+ A list of SPDX properties
+ """
+
+ def __init__(self, prop, **kwargs):
+ super().__init__(**kwargs)
+ self.prop = prop
+
+ def set_property(self, attrs, name):
+ def get_helper(obj):
+ if not name in obj._spdx:
+ obj._spdx[name] = []
+ return obj._spdx[name]
+
+ def set_helper(obj, value):
+ obj._spdx[name] = list(value)
+
+ def del_helper(obj):
+ del obj._spdx[name]
+
+ attrs[name] = property(get_helper, set_helper, del_helper)
+
+ def init(self, source):
+ return [self.prop.init(o) for o in source]
+
+
+class _StringList(_ListProperty):
+ """
+ A list of strings as a property for an SPDX object
+ """
+
+ def __init__(self, **kwargs):
+ super().__init__(_String(), **kwargs)
+
+
+class _ObjectList(_ListProperty):
+ """
+ A list of SPDX objects as a property for an SPDX object
+ """
+
+ def __init__(self, cls, **kwargs):
+ super().__init__(_Object(cls), **kwargs)
+
+
+class MetaSPDXObject(type):
+ """
+ A metaclass that allows properties (anything derived from a _Property
+ class) to be defined for a SPDX object
+ """
+ def __new__(mcls, name, bases, attrs):
+ attrs["_properties"] = {}
+
+ for key in attrs.keys():
+ if isinstance(attrs[key], _Property):
+ prop = attrs[key]
+ attrs["_properties"][key] = prop
+ prop.set_property(attrs, key)
+
+ return super().__new__(mcls, name, bases, attrs)
+
+
+class SPDXObject(metaclass=MetaSPDXObject):
+ """
+ The base SPDX object; all SPDX spec classes must derive from this class
+ """
+ def __init__(self, **d):
+ self._spdx = {}
+
+ for name, prop in self._properties.items():
+ prop.setdefault(self._spdx, name)
+ if name in d:
+ self._spdx[name] = prop.init(d[name])
+
+ def serializer(self):
+ return self._spdx
+
+ def __setattr__(self, name, value):
+ if name in self._properties or name == "_spdx":
+ super().__setattr__(name, value)
+ return
+ raise KeyError("%r is not a valid SPDX property" % name)
+
+#
+# These are the SPDX objects implemented from the spec. The *only* properties
+# that can be added to these objects are ones directly specified in the SPDX
+# spec, however you may add helper functions to make operations easier.
+#
+# Defaults should *only* be specified if the SPDX spec says there is a certain
+# required value for a field (e.g. dataLicense), or if the field is mandatory
+# and has some sane "this field is unknown" (e.g. "NOASSERTION")
+#
+
+class SPDXAnnotation(SPDXObject):
+ annotationDate = _String()
+ annotationType = _String()
+ annotator = _String()
+ comment = _String()
+
+class SPDXChecksum(SPDXObject):
+ algorithm = _String()
+ checksumValue = _String()
+
+
+class SPDXRelationship(SPDXObject):
+ spdxElementId = _String()
+ relatedSpdxElement = _String()
+ relationshipType = _String()
+ comment = _String()
+ annotations = _ObjectList(SPDXAnnotation)
+
+
+class SPDXExternalReference(SPDXObject):
+ referenceCategory = _String()
+ referenceType = _String()
+ referenceLocator = _String()
+
+
+class SPDXPackageVerificationCode(SPDXObject):
+ packageVerificationCodeValue = _String()
+ packageVerificationCodeExcludedFiles = _StringList()
+
+
+class SPDXPackage(SPDXObject):
+ ALLOWED_CHECKSUMS = [
+ "SHA1",
+ "SHA224",
+ "SHA256",
+ "SHA384",
+ "SHA512",
+ "MD2",
+ "MD4",
+ "MD5",
+ "MD6",
+ ]
+
+ name = _String()
+ SPDXID = _String()
+ versionInfo = _String()
+ downloadLocation = _String(default="NOASSERTION")
+ supplier = _String(default="NOASSERTION")
+ homepage = _String()
+ licenseConcluded = _String(default="NOASSERTION")
+ licenseDeclared = _String(default="NOASSERTION")
+ summary = _String()
+ description = _String()
+ sourceInfo = _String()
+ copyrightText = _String(default="NOASSERTION")
+ licenseInfoFromFiles = _StringList(default=["NOASSERTION"])
+ externalRefs = _ObjectList(SPDXExternalReference)
+ packageVerificationCode = _Object(SPDXPackageVerificationCode)
+ hasFiles = _StringList()
+ packageFileName = _String()
+ annotations = _ObjectList(SPDXAnnotation)
+ checksums = _ObjectList(SPDXChecksum)
+
+
+class SPDXFile(SPDXObject):
+ SPDXID = _String()
+ fileName = _String()
+ licenseConcluded = _String(default="NOASSERTION")
+ copyrightText = _String(default="NOASSERTION")
+ licenseInfoInFiles = _StringList(default=["NOASSERTION"])
+ checksums = _ObjectList(SPDXChecksum)
+ fileTypes = _StringList()
+
+
+class SPDXCreationInfo(SPDXObject):
+ created = _String()
+ licenseListVersion = _String()
+ comment = _String()
+ creators = _StringList()
+
+
+class SPDXExternalDocumentRef(SPDXObject):
+ externalDocumentId = _String()
+ spdxDocument = _String()
+ checksum = _Object(SPDXChecksum)
+
+
+class SPDXExtractedLicensingInfo(SPDXObject):
+ name = _String()
+ comment = _String()
+ licenseId = _String()
+ extractedText = _String()
+
+
+class SPDXDocument(SPDXObject):
+ spdxVersion = _String(default="SPDX-" + SPDX_VERSION)
+ dataLicense = _String(default="CC0-1.0")
+ SPDXID = _String(default="SPDXRef-DOCUMENT")
+ name = _String()
+ documentNamespace = _String()
+ creationInfo = _Object(SPDXCreationInfo)
+ packages = _ObjectList(SPDXPackage)
+ files = _ObjectList(SPDXFile)
+ relationships = _ObjectList(SPDXRelationship)
+ externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef)
+ hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo)
+
+ def __init__(self, **d):
+ super().__init__(**d)
+
+ def to_json(self, f, *, sort_keys=False, indent=None, separators=None):
+ class Encoder(json.JSONEncoder):
+ def default(self, o):
+ if isinstance(o, SPDXObject):
+ return o.serializer()
+
+ return super().default(o)
+
+ sha1 = hashlib.sha1()
+ for chunk in Encoder(
+ sort_keys=sort_keys,
+ indent=indent,
+ separators=separators,
+ ).iterencode(self):
+ chunk = chunk.encode("utf-8")
+ f.write(chunk)
+ sha1.update(chunk)
+
+ return sha1.hexdigest()
+
+ @classmethod
+ def from_json(cls, f):
+ return cls(**json.load(f))
+
+ def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None):
+ if isinstance(_from, SPDXObject):
+ from_spdxid = _from.SPDXID
+ else:
+ from_spdxid = _from
+
+ if isinstance(_to, SPDXObject):
+ to_spdxid = _to.SPDXID
+ else:
+ to_spdxid = _to
+
+ r = SPDXRelationship(
+ spdxElementId=from_spdxid,
+ relatedSpdxElement=to_spdxid,
+ relationshipType=relationship,
+ )
+
+ if comment is not None:
+ r.comment = comment
+
+ if annotation is not None:
+ r.annotations.append(annotation)
+
+ self.relationships.append(r)
+
+ def find_by_spdxid(self, spdxid):
+ for o in itertools.chain(self.packages, self.files):
+ if o.SPDXID == spdxid:
+ return o
+ return None
+
+ def find_external_document_ref(self, namespace):
+ for r in self.externalDocumentRefs:
+ if r.spdxDocument == namespace:
+ return r
+ return None
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index aeceb100d7..65bb4efe25 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -480,8 +480,10 @@ def OEOuthashBasic(path, sigfile, task, d):
if "package_write_" in task or task == "package_qa":
include_owners = False
include_timestamps = False
+ include_root = True
if task == "package":
include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1'
+ include_root = False
extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
try:
@@ -592,7 +594,8 @@ def OEOuthashBasic(path, sigfile, task, d):
update_hash("\n")
# Process this directory and all its child files
- process(root)
+ if include_root or root != ".":
+ process(root)
for f in files:
if f == 'fixmepath':
continue
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
index 2ac39df9e1..a0c166d884 100644
--- a/meta/lib/oe/terminal.py
+++ b/meta/lib/oe/terminal.py
@@ -102,6 +102,10 @@ class Rxvt(XTerminal):
command = 'rxvt -T "{title}" -e {command}'
priority = 1
+class URxvt(XTerminal):
+ command = 'urxvt -T "{title}" -e {command}'
+ priority = 1
+
class Screen(Terminal):
command = 'screen -D -m -t "{title}" -S devshell {command}'
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 83d298906b..3e016244c5 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -481,7 +481,8 @@ class ThreadedWorker(Thread):
try:
func(self, *args, **kargs)
except Exception as e:
- print(e)
+ # Eat all exceptions
+ bb.mainlogger.debug("Worker task raised %s" % e, exc_info=e)
finally:
self.tasks.task_done()
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py
index aefb576805..832b6216f6 100644
--- a/meta/lib/oeqa/core/target/ssh.py
+++ b/meta/lib/oeqa/core/target/ssh.py
@@ -34,6 +34,7 @@ class OESSHTarget(OETarget):
self.timeout = timeout
self.user = user
ssh_options = [
+ '-o', 'HostKeyAlgorithms=+ssh-rsa',
'-o', 'UserKnownHostsFile=/dev/null',
'-o', 'StrictHostKeyChecking=no',
'-o', 'LogLevel=ERROR'
@@ -225,6 +226,9 @@ def SSHCall(command, logger, timeout=None, **opts):
endtime = time.time() + timeout
except InterruptedError:
continue
+ except BlockingIOError:
+ logger.debug('BlockingIOError')
+ continue
# process hasn't returned yet
if not eof:
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json
index d77d0e673b..6c110d0656 100644
--- a/meta/lib/oeqa/manual/eclipse-plugin.json
+++ b/meta/lib/oeqa/manual/eclipse-plugin.json
@@ -44,7 +44,7 @@
"expected_results": ""
},
"2": {
- "action": "wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/qemu (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
+ "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
"expected_results": " Qemu can be lauched normally."
},
"3": {
@@ -60,7 +60,7 @@
"expected_results": ""
},
"6": {
- "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
+ "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
"expected_results": ""
},
"7": {
@@ -247,7 +247,7 @@
"execution": {
"1": {
"action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n",
- "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on http://autobuilder.yoctoproject.org/pub/releases/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
+ "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
},
"2": {
"action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n",
diff --git a/meta/lib/oeqa/manual/toaster-managed-mode.json b/meta/lib/oeqa/manual/toaster-managed-mode.json
index 12374c7c64..9566d9d10e 100644
--- a/meta/lib/oeqa/manual/toaster-managed-mode.json
+++ b/meta/lib/oeqa/manual/toaster-managed-mode.json
@@ -136,7 +136,7 @@
"expected_results": ""
},
"3": {
- "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL_append - \"Not set\" \n\tPACKAGE_CLASES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n",
+ "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL_append - \"Not set\" \n\tPACKAGE_CLASSES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n",
"expected_results": ""
},
"4": {
diff --git a/meta/lib/oeqa/runtime/cases/date.py b/meta/lib/oeqa/runtime/cases/date.py
index fdd2a6ae58..bd6537400e 100644
--- a/meta/lib/oeqa/runtime/cases/date.py
+++ b/meta/lib/oeqa/runtime/cases/date.py
@@ -13,12 +13,12 @@ class DateTest(OERuntimeTestCase):
def setUp(self):
if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
self.logger.debug('Stopping systemd-timesyncd daemon')
- self.target.run('systemctl disable --now systemd-timesyncd')
+ self.target.run('systemctl disable --now --runtime systemd-timesyncd')
def tearDown(self):
if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
self.logger.debug('Starting systemd-timesyncd daemon')
- self.target.run('systemctl enable --now systemd-timesyncd')
+ self.target.run('systemctl enable --now --runtime systemd-timesyncd')
@OETestDepends(['ssh.SSHTest.test_ssh'])
@OEHasPackage(['coreutils', 'busybox'])
@@ -28,14 +28,13 @@ class DateTest(OERuntimeTestCase):
self.assertEqual(status, 0, msg=msg)
oldDate = output
- sampleDate = '"2016-08-09 10:00:00"'
- (status, output) = self.target.run("date -s %s" % sampleDate)
+ sampleTimestamp = 1488800000
+ (status, output) = self.target.run("date -s @%d" % sampleTimestamp)
self.assertEqual(status, 0, msg='Date set failed, output: %s' % output)
- (status, output) = self.target.run("date -R")
- p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output)
+ (status, output) = self.target.run('date +"%s"')
msg = 'The date was not set correctly, output: %s' % output
- self.assertTrue(p, msg=msg)
+ self.assertTrue(int(output) - sampleTimestamp < 300, msg=msg)
(status, output) = self.target.run('date -s "%s"' % oldDate)
msg = 'Failed to reset date, output: %s' % output
diff --git a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
new file mode 100644
index 0000000000..e010612838
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
@@ -0,0 +1,36 @@
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.decorator.depends import OETestDepends
+from oeqa.core.decorator.data import skipIfQemu
+
+class Ethernet_Test(OERuntimeTestCase):
+
+ def set_ip(self, x):
+ x = x.split(".")
+ sample_host_address = '150'
+ x[3] = sample_host_address
+ x = '.'.join(x)
+ return x
+
+ @skipIfQemu('qemuall', 'Test only runs on real hardware')
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ def test_set_virtual_ip(self):
+ (status, output) = self.target.run("ifconfig eth0 | grep 'inet ' | awk '{print $2}'")
+ self.assertEqual(status, 0, msg='Failed to get ip address. Make sure you have an ethernet connection on your device, output: %s' % output)
+ original_ip = output
+ virtual_ip = self.set_ip(original_ip)
+
+ (status, output) = self.target.run("ifconfig eth0:1 %s netmask 255.255.255.0 && sleep 2 && ping -c 5 %s && ifconfig eth0:1 down" % (virtual_ip,virtual_ip))
+ self.assertEqual(status, 0, msg='Failed to create virtual ip address, output: %s' % output)
+
+ @OETestDepends(['ethernet_ip_connman.Ethernet_Test.test_set_virtual_ip'])
+ def test_get_ip_from_dhcp(self):
+ (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'")
+ self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output)
+ wired_interfaces = output
+
+ (status, output) = self.target.run("ip route | grep default | awk '{print $3}'")
+ self.assertEqual(status, 0, msg='Failed to retrieve the default gateway, output: %s' % output)
+ default_gateway = output
+
+ (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway))
+ self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output) \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/ksample.py b/meta/lib/oeqa/runtime/cases/ksample.py
index a9a1620ebd..9883aa9aa8 100644
--- a/meta/lib/oeqa/runtime/cases/ksample.py
+++ b/meta/lib/oeqa/runtime/cases/ksample.py
@@ -10,7 +10,7 @@ from oeqa.core.decorator.depends import OETestDepends
from oeqa.core.decorator.data import skipIfNotFeature
# need some kernel fragments
-# echo "KERNEL_FEATURES_append += \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf
+# echo "KERNEL_FEATURES_append = \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf
class KSample(OERuntimeTestCase):
def cmd_and_check(self, cmd='', match_string=''):
status, output = self.target.run(cmd)
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py
index a66d5d13d7..879f2a673c 100644
--- a/meta/lib/oeqa/runtime/cases/ltp.py
+++ b/meta/lib/oeqa/runtime/cases/ltp.py
@@ -67,7 +67,7 @@ class LtpTest(LtpTestBase):
def runltp(self, ltp_group):
cmd = '/opt/ltp/runltp -f %s -p -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group)
starttime = time.time()
- (status, output) = self.target.run(cmd)
+ (status, output) = self.target.run(cmd, timeout=1200)
endtime = time.time()
with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f:
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py
index f703927660..1cac59725d 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -32,7 +32,7 @@ common_errors = [
"Failed to load module \"fbdev\"",
"Failed to load module fbdev",
"Failed to load module glx",
- "[drm] Cannot find any crtc or sizes - going 1024x768",
+ "[drm] Cannot find any crtc or sizes",
"_OSC failed (AE_NOT_FOUND); disabling ASPM",
"Open ACPI failed (/var/run/acpid.socket) (No such file or directory)",
"NX (Execute Disable) protection cannot be enabled: non-PAE kernel!",
@@ -61,6 +61,8 @@ common_errors = [
"[rdrand]: Initialization Failed",
"[pulseaudio] authkey.c: Failed to open cookie file",
"[pulseaudio] authkey.c: Failed to load authentication key",
+ "was skipped because of a failed condition check",
+ "was skipped because all trigger condition checks failed",
]
video_related = [
@@ -90,6 +92,7 @@ qemux86_common = [
"glamor initialization failed",
"blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)",
"floppy: error",
+ 'failed to IDENTIFY (I/O error, err_mask=0x4)',
] + common_errors
ignore_errors = {
@@ -295,7 +298,7 @@ class ParseLogsTest(OERuntimeTestCase):
grepcmd = 'grep '
grepcmd += '-Ei "'
for error in errors:
- grepcmd += '\<' + error + '\>' + '|'
+ grepcmd += r'\<' + error + r'\>' + '|'
grepcmd = grepcmd[:-1]
grepcmd += '" ' + str(log) + " | grep -Eiv \'"
@@ -306,13 +309,13 @@ class ParseLogsTest(OERuntimeTestCase):
errorlist = ignore_errors['default']
for ignore_error in errorlist:
- ignore_error = ignore_error.replace('(', '\(')
- ignore_error = ignore_error.replace(')', '\)')
+ ignore_error = ignore_error.replace('(', r'\(')
+ ignore_error = ignore_error.replace(')', r'\)')
ignore_error = ignore_error.replace("'", '.')
- ignore_error = ignore_error.replace('?', '\?')
- ignore_error = ignore_error.replace('[', '\[')
- ignore_error = ignore_error.replace(']', '\]')
- ignore_error = ignore_error.replace('*', '\*')
+ ignore_error = ignore_error.replace('?', r'\?')
+ ignore_error = ignore_error.replace('[', r'\[')
+ ignore_error = ignore_error.replace(']', r'\]')
+ ignore_error = ignore_error.replace('*', r'\*')
ignore_error = ignore_error.replace('0-9', '[0-9]')
grepcmd += ignore_error + '|'
grepcmd = grepcmd[:-1]
diff --git a/meta/lib/oeqa/runtime/cases/ping.py b/meta/lib/oeqa/runtime/cases/ping.py
index f6603f75ec..498f80d0a5 100644
--- a/meta/lib/oeqa/runtime/cases/ping.py
+++ b/meta/lib/oeqa/runtime/cases/ping.py
@@ -6,6 +6,7 @@ from subprocess import Popen, PIPE
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.oetimeout import OETimeout
+from oeqa.core.exception import OEQATimeoutError
class PingTest(OERuntimeTestCase):
@@ -13,14 +14,17 @@ class PingTest(OERuntimeTestCase):
def test_ping(self):
output = ''
count = 0
- while count < 5:
- cmd = 'ping -c 1 %s' % self.target.ip
- proc = Popen(cmd, shell=True, stdout=PIPE)
- output += proc.communicate()[0].decode('utf-8')
- if proc.poll() == 0:
- count += 1
- else:
- count = 0
+ try:
+ while count < 5:
+ cmd = 'ping -c 1 %s' % self.target.ip
+ proc = Popen(cmd, shell=True, stdout=PIPE)
+ output += proc.communicate()[0].decode('utf-8')
+ if proc.poll() == 0:
+ count += 1
+ else:
+ count = 0
+ except OEQATimeoutError:
+ self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output))
msg = ('Expected 5 consecutive, got %d.\n'
'ping output is:\n%s' % (count,output))
self.assertEqual(count, 5, msg = msg)
diff --git a/meta/lib/oeqa/runtime/cases/rpm.py b/meta/lib/oeqa/runtime/cases/rpm.py
index 7a9d62c003..203fcc8505 100644
--- a/meta/lib/oeqa/runtime/cases/rpm.py
+++ b/meta/lib/oeqa/runtime/cases/rpm.py
@@ -49,21 +49,20 @@ class RpmBasicTest(OERuntimeTestCase):
msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output)
self.assertEqual(status, 0, msg=msg)
- def check_no_process_for_user(u):
- _, output = self.target.run(self.tc.target_cmds['ps'])
- if u + ' ' in output:
- return False
- else:
- return True
+ def wait_for_no_process_for_user(u, timeout = 120):
+ timeout_at = time.time() + timeout
+ while time.time() < timeout_at:
+ _, output = self.target.run(self.tc.target_cmds['ps'])
+ if u + ' ' not in output:
+ return
+ time.sleep(1)
+ user_pss = [ps for ps in output.split("\n") if u + ' ' in ps]
+ msg = "User %s has processes still running: %s" % (u, "\n".join(user_pss))
+ self.fail(msg=msg)
def unset_up_test_user(u):
# ensure no test1 process in running
- timeout = time.time() + 30
- while time.time() < timeout:
- if check_no_process_for_user(u):
- break
- else:
- time.sleep(1)
+ wait_for_no_process_for_user(u)
status, output = self.target.run('userdel -r %s' % u)
msg = 'Failed to erase user: %s' % output
self.assertTrue(status == 0, msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/rtc.py b/meta/lib/oeqa/runtime/cases/rtc.py
new file mode 100644
index 0000000000..39f4d29f23
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rtc.py
@@ -0,0 +1,40 @@
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.decorator.depends import OETestDepends
+from oeqa.core.decorator.data import skipIfFeature
+from oeqa.runtime.decorator.package import OEHasPackage
+
+import re
+
+class RTCTest(OERuntimeTestCase):
+
+ def setUp(self):
+ if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
+ self.logger.debug('Stopping systemd-timesyncd daemon')
+ self.target.run('systemctl disable --now --runtime systemd-timesyncd')
+
+ def tearDown(self):
+ if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
+ self.logger.debug('Starting systemd-timesyncd daemon')
+ self.target.run('systemctl enable --now --runtime systemd-timesyncd')
+
+ @skipIfFeature('read-only-rootfs',
+ 'Test does not work with read-only-rootfs in IMAGE_FEATURES')
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ @OEHasPackage(['coreutils', 'busybox'])
+ def test_rtc(self):
+ (status, output) = self.target.run('hwclock -r')
+ self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output)
+
+ (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"')
+ self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime)
+
+ example_datetime = '062309452008'
+ (status, output) = self.target.run('date %s ; hwclock -w ; hwclock -r' % example_datetime)
+ check_hwclock = re.search('2008-06-23 09:45:..', output)
+ self.assertTrue(check_hwclock, msg='The RTC time was not set correctly, output: %s' % output)
+
+ (status, output) = self.target.run('date %s' % current_datetime)
+ self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output)
+
+ (status, output) = self.target.run('hwclock -w')
+ self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output)
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py
index 3a5f292152..f2bbc947d6 100644
--- a/meta/lib/oeqa/runtime/cases/scp.py
+++ b/meta/lib/oeqa/runtime/cases/scp.py
@@ -23,7 +23,7 @@ class ScpTest(OERuntimeTestCase):
os.remove(cls.tmp_path)
@OETestDepends(['ssh.SSHTest.test_ssh'])
- @OEHasPackage(['openssh-scp', 'dropbear'])
+ @OEHasPackage(['openssh-scp'])
def test_scp_file(self):
dst = '/tmp/test_scp_file'
diff --git a/meta/lib/oeqa/runtime/cases/suspend.py b/meta/lib/oeqa/runtime/cases/suspend.py
new file mode 100644
index 0000000000..67b6f7e56f
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/suspend.py
@@ -0,0 +1,33 @@
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.decorator.depends import OETestDepends
+from oeqa.core.decorator.data import skipIfQemu
+import threading
+import time
+
+class Suspend_Test(OERuntimeTestCase):
+
+ def test_date(self):
+ (status, output) = self.target.run('date')
+ self.assertEqual(status, 0, msg = 'Failed to run date command, output : %s' % output)
+
+ def test_ping(self):
+ t_thread = threading.Thread(target=self.target.run, args=("ping 8.8.8.8",))
+ t_thread.start()
+ time.sleep(2)
+
+ status, output = self.target.run('pidof ping')
+ self.target.run('kill -9 %s' % output)
+ self.assertEqual(status, 0, msg = 'Not able to find process that runs ping, output : %s' % output)
+
+ def set_suspend(self):
+ (status, output) = self.target.run('sudo rtcwake -m mem -s 10')
+ self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
+
+ @skipIfQemu('qemuall', 'Test only runs on real hardware')
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ def test_suspend(self):
+ self.test_date()
+ self.test_ping()
+ self.set_suspend()
+ self.test_date()
+ self.test_ping()
diff --git a/meta/lib/oeqa/runtime/cases/terminal.py b/meta/lib/oeqa/runtime/cases/terminal.py
new file mode 100644
index 0000000000..8fcca99f47
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/terminal.py
@@ -0,0 +1,21 @@
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.decorator.depends import OETestDepends
+from oeqa.runtime.decorator.package import OEHasPackage
+
+import threading
+import time
+
+class TerminalTest(OERuntimeTestCase):
+
+ @OEHasPackage(['matchbox-terminal'])
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ def test_terminal_running(self):
+ t_thread = threading.Thread(target=self.target.run, args=("export DISPLAY=:0 && matchbox-terminal -e 'sh -c \"uname -a && exec sh\"'",))
+ t_thread.start()
+ time.sleep(2)
+
+ status, output = self.target.run('pidof matchbox-terminal')
+ number_of_terminal = len(output.split())
+ self.assertEqual(number_of_terminal, 1, msg='There should be only one terminal being launched. Number of terminal launched : %s' % number_of_terminal)
+ self.target.run('kill -9 %s' % output)
+ self.assertEqual(status, 0, msg='Not able to find process that runs terminal.')
diff --git a/meta/lib/oeqa/runtime/cases/usb_hid.py b/meta/lib/oeqa/runtime/cases/usb_hid.py
new file mode 100644
index 0000000000..3c292cf661
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/usb_hid.py
@@ -0,0 +1,22 @@
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.decorator.depends import OETestDepends
+from oeqa.core.decorator.data import skipIfQemu
+from oeqa.runtime.decorator.package import OEHasPackage
+
+class USB_HID_Test(OERuntimeTestCase):
+
+ def keyboard_mouse_simulation(self):
+ (status, output) = self.target.run('export DISPLAY=:0 && xdotool key F2 && xdotool mousemove 100 100')
+ return self.assertEqual(status, 0, msg = 'Failed to simulate keyboard/mouse input event, output : %s' % output)
+
+ def set_suspend(self):
+ (status, output) = self.target.run('sudo rtcwake -m mem -s 10')
+ return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
+
+ @OEHasPackage(['xdotool'])
+ @skipIfQemu('qemuall', 'Test only runs on real hardware')
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ def test_USB_Hid_input(self):
+ self.keyboard_mouse_simulation()
+ self.set_suspend()
+ self.keyboard_mouse_simulation()
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py
index 3826f27642..8a0dbd0736 100644
--- a/meta/lib/oeqa/runtime/context.py
+++ b/meta/lib/oeqa/runtime/context.py
@@ -5,6 +5,7 @@
#
import os
+import sys
from oeqa.core.context import OETestContext, OETestContextExecutor
from oeqa.core.target.ssh import OESSHTarget
@@ -66,11 +67,11 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
% self.default_target_type)
runtime_group.add_argument('--target-ip', action='store',
default=self.default_target_ip,
- help="IP address of device under test, default: %s" \
+ help="IP address and optionally ssh port (default 22) of device under test, for example '192.168.0.7:22'. Default: %s" \
% self.default_target_ip)
runtime_group.add_argument('--server-ip', action='store',
default=self.default_target_ip,
- help="IP address of device under test, default: %s" \
+ help="IP address of the test host from test target machine, default: %s" \
% self.default_server_ip)
runtime_group.add_argument('--host-dumper-dir', action='store',
@@ -119,8 +120,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
# XXX: Don't base your targets on this code it will be refactored
# in the near future.
# Custom target module loading
- target_modules_path = kwargs.get('target_modules_path', '')
- controller = OERuntimeTestContextExecutor.getControllerModule(target_type, target_modules_path)
+ controller = OERuntimeTestContextExecutor.getControllerModule(target_type)
target = controller(logger, target_ip, server_ip, **kwargs)
return target
@@ -130,15 +130,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
# AttributeError raised if not found.
# ImportError raised if a provided module can not be imported.
@staticmethod
- def getControllerModule(target, target_modules_path):
- controllerslist = OERuntimeTestContextExecutor._getControllerModulenames(target_modules_path)
+ def getControllerModule(target):
+ controllerslist = OERuntimeTestContextExecutor._getControllerModulenames()
controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist)
return controller
# Return a list of all python modules in lib/oeqa/controllers for each
# layer in bbpath
@staticmethod
- def _getControllerModulenames(target_modules_path):
+ def _getControllerModulenames():
controllerslist = []
@@ -153,9 +153,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
else:
raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module)
- extpath = target_modules_path.split(':')
- for p in extpath:
- controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers')
+ for p in sys.path:
+ controllerpath = os.path.join(p, 'oeqa', 'controllers')
if os.path.exists(controllerpath):
add_controller_list(controllerpath)
return controllerslist
@@ -175,16 +174,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
# Search for and return a controller or None from given module name
@staticmethod
def _loadControllerFromModule(target, modulename):
- obj = None
- # import module, allowing it to raise import exception
- module = __import__(modulename, globals(), locals(), [target])
- # look for target class in the module, catching any exceptions as it
- # is valid that a module may not have the target class.
try:
- obj = getattr(module, target)
- except:
- obj = None
- return obj
+ import importlib
+ module = importlib.import_module(modulename)
+ return getattr(module, target)
+ except AttributeError:
+ return None
@staticmethod
def readPackagesManifest(manifest):
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/buildepoxy.py
index 385f8ccca8..f69f720cd6 100644
--- a/meta/lib/oeqa/sdk/cases/buildepoxy.py
+++ b/meta/lib/oeqa/sdk/cases/buildepoxy.py
@@ -17,7 +17,7 @@ class EpoxyTest(OESDKTestCase):
"""
def setUp(self):
if not (self.tc.hasHostPackage("nativesdk-meson")):
- raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain Meson")
+ raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson")
def test_epoxy(self):
with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
index bc5447d2a3..6a5c8ec71e 100644
--- a/meta/lib/oeqa/selftest/cases/archiver.py
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -35,11 +35,11 @@ class Archiver(OESelftestTestCase):
src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
# Check that include_recipe was included
- included_present = len(glob.glob(src_path + '/%s-*' % include_recipe))
+ included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe))
self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
# Check that exclude_recipe was excluded
- excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe))
+ excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
def test_archiver_filters_by_type(self):
@@ -67,11 +67,11 @@ class Archiver(OESelftestTestCase):
src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
# Check that target_recipe was included
- included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe))
+ included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe))
self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe)
# Check that native_recipe was excluded
- excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe))
+ excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe)
def test_archiver_filters_by_type_and_name(self):
@@ -104,17 +104,17 @@ class Archiver(OESelftestTestCase):
src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
# Check that target_recipe[0] and native_recipes[1] were included
- included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0]))
+ included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0]))
self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0])
- included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1]))
+ included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1]))
self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1])
# Check that native_recipes[0] and target_recipes[1] were excluded
- excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0]))
+ excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0]))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0])
- excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1]))
+ excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
index dc423ec439..0b88316950 100644
--- a/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -148,9 +148,6 @@ INHERIT_remove = \"report-error\"
self.delete_recipeinc('man-db')
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output)
- line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
- self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
-doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
def test_rename_downloaded_file(self):
# TODO unique dldir instead of using cleanall
@@ -160,7 +157,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
- data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
+ data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
self.write_recipeinc('aspell', data)
result = bitbake('-f -c fetch aspell', ignore_status=True)
self.delete_recipeinc('aspell')
@@ -188,6 +185,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
def test_prefile(self):
+ # Test when the prefile does not exist
+ result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
+ # Test when the prefile exists
preconf = os.path.join(self.builddir, 'conf/prefile.conf')
self.track_for_cleanup(preconf)
ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
@@ -198,6 +199,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertIn('localconf', result.output)
def test_postfile(self):
+ # Test when the postfile does not exist
+ result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
+ # Test when the postfile exists
postconf = os.path.join(self.builddir, 'conf/postfile.conf')
self.track_for_cleanup(postconf)
ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index d1947baffc..22ffeffd29 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -1,9 +1,13 @@
-from oe.cve_check import Version
+import json
+import os
from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
class CVECheck(OESelftestTestCase):
def test_version_compare(self):
+ from oe.cve_check import Version
+
result = Version("100") > Version("99")
self.assertTrue( result, msg="Failed to compare version '100' > '99'")
result = Version("2.3.1") > Version("2.2.3")
@@ -42,3 +46,175 @@ class CVECheck(OESelftestTestCase):
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
+
+
+ def test_convert_cve_version(self):
+ from oe.cve_check import convert_cve_version
+
+ # Default format
+ self.assertEqual(convert_cve_version("8.3"), "8.3")
+ self.assertEqual(convert_cve_version(""), "")
+
+ # OpenSSL format version
+ self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
+
+ # OpenSSH format
+ self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
+ self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
+
+ # Linux kernel format
+ self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
+ self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
+
+
+ def test_recipe_report_json(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("m4-native -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "m4-native")
+ found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
+ self.assertIn("CVE-2008-1687", found_cves)
+ self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
+
+
+ def test_image_json(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ print(report_json)
+ try:
+ os.remove(report_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("core-image-minimal-initramfs")
+ self.assertExists(report_json)
+
+ # Check that the summary report lists at least one package
+ with open(report_json) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertGreater(len(report["package"]), 1)
+
+ # Check that a random recipe wrote a recipe report to deploy/cve/
+ recipename = report["package"][0]["name"]
+ recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
+ self.assertExists(recipe_report)
+ with open(recipe_report) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ self.assertEqual(report["package"][0]["name"], recipename)
+
+
+ def test_recipe_report_json_unpatched(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "0"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("m4-native -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "m4-native")
+ #m4 had only Patched CVEs, so the issues array will be empty
+ self.assertEqual(package["issue"], [])
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
+
+
+ def test_recipe_report_json_ignored(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("logrotate -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "logrotate")
+ found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
+ # m4 CVE should not be in logrotate
+ self.assertNotIn("CVE-2008-1687", found_cves)
+ # logrotate has both Patched and Ignored CVEs
+ self.assertIn("CVE-2011-1098", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1098"], "Patched")
+ self.assertIn("CVE-2011-1548", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1548"], "Ignored")
+ self.assertIn("CVE-2011-1549", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1549"], "Ignored")
+ self.assertIn("CVE-2011-1550", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1550"], "Ignored")
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 0985434238..9efe342a0d 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -8,6 +8,7 @@ import shutil
import tempfile
import glob
import fnmatch
+import unittest
import oeqa.utils.ftools as ftools
from oeqa.selftest.case import OESelftestTestCase
@@ -38,6 +39,13 @@ def setUpModule():
canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
edited_layers.append(layerpath)
oldmetapath = os.path.realpath(layerpath)
+
+ # when downloading poky from tar.gz some tests will be skipped (BUG 12389)
+ try:
+ runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath)
+ except:
+ raise unittest.SkipTest("devtool tests require folder to be a git repo")
+
result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
oldreporoot = result.output.rstrip()
newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
@@ -340,7 +348,7 @@ class DevtoolAddTests(DevtoolBase):
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
checkvars['S'] = '${WORKDIR}/git'
checkvars['PV'] = '0.1+git${SRCPV}'
- checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https'
+ checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master'
checkvars['SRCREV'] = srcrev
checkvars['DEPENDS'] = set(['dbus'])
self._test_recipe_contents(recipefile, checkvars, [])
@@ -442,6 +450,7 @@ class DevtoolAddTests(DevtoolBase):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
url = 'gitsm://git.yoctoproject.org/mraa'
+ url_branch = '%s;branch=master' % url
checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d'
testrecipe = 'mraa'
srcdir = os.path.join(tempdir, testrecipe)
@@ -462,7 +471,7 @@ class DevtoolAddTests(DevtoolBase):
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
checkvars['PV'] = '1.0+git${SRCPV}'
- checkvars['SRC_URI'] = url
+ checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = '${AUTOREV}'
self._test_recipe_contents(recipefile, checkvars, [])
# Try with revision and version specified
@@ -481,7 +490,7 @@ class DevtoolAddTests(DevtoolBase):
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
checkvars['PV'] = '1.5+git${SRCPV}'
- checkvars['SRC_URI'] = url
+ checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = checkrev
self._test_recipe_contents(recipefile, checkvars, [])
@@ -880,7 +889,7 @@ class DevtoolUpdateTests(DevtoolBase):
self._check_repo_status(os.path.dirname(recipefile), expected_status)
result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
- addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"']
+ addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"']
srcurilines = src_uri.split()
srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
srcurilines.append('"')
@@ -1322,7 +1331,7 @@ class DevtoolExtractTests(DevtoolBase):
# Now really test deploy-target
result = runCmd('devtool deploy-target -c %s root@%s' % (testrecipe, qemu.ip))
# Run a test command to see if it was installed properly
- sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o HostKeyAlgorithms=+ssh-rsa'
result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand))
# Check if it deployed all of the files with the right ownership/perms
# First look on the host - need to do this under pseudo to get the correct ownership/perms
diff --git a/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt b/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt
new file mode 100644
index 0000000000..f70f10e4db
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt
@@ -0,0 +1 @@
+A
diff --git a/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt b/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt
new file mode 100644
index 0000000000..223b7836fb
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt
@@ -0,0 +1 @@
+B
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
index c687f6ef93..c1f6e4c1fb 100644
--- a/meta/lib/oeqa/selftest/cases/glibc.py
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -33,7 +33,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
ptestsuite = "glibc-user" if ssh is None else "glibc"
self.ptest_section(ptestsuite)
- with open(os.path.join(builddir, "tests.sum"), "r") as f:
+ with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
for test, result in parse_values(f):
self.ptest_result(ptestsuite, test, result)
@@ -41,7 +41,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
with contextlib.ExitStack() as s:
# use the base work dir, as the nfs mount, since the recipe directory may not exist
tmpdir = get_bb_var("BASE_WORKDIR")
- nfsport, mountport = s.enter_context(unfs_server(tmpdir))
+ nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
# build core-image-minimal with required packages
default_installed_packages = [
@@ -61,7 +61,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
bitbake("core-image-minimal")
# start runqemu
- qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic"))
+ qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
# validate that SSH is working
status, _ = qemu.run("uname")
@@ -70,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
# setup nfs mount
if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
raise Exception("Failed to setup NFS mount directory on target")
- mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
+ mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
status, output = qemu.run(mountcmd)
if status != 0:
raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
index 3119520f0d..59f80aad28 100644
--- a/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -43,6 +43,12 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
@classmethod
def tearDownClass(cls):
+ # Go creates file which are readonly
+ for dirpath, dirnames, filenames in os.walk(cls.tmpdir_SDKQA):
+ for filename in filenames + dirnames:
+ f = os.path.join(dirpath, filename)
+ if not os.path.islink(f):
+ os.chmod(f, 0o775)
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
super(oeGoToolchainSelfTest, cls).tearDownClass()
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index 2b9c4998f7..535d80cb86 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -240,7 +240,7 @@ USERADD_GID_TABLES += "files/static-group"
def test_no_busybox_base_utils(self):
config = """
# Enable x11
-DISTRO_FEATURES_append += "x11"
+DISTRO_FEATURES_append = " x11"
# Switch to systemd
DISTRO_FEATURES += "systemd"
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
index a7214beb4c..bbf67bf9c9 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/utils.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -64,7 +64,7 @@ class TestMultiprocessLaunch(TestCase):
import bb
def testfunction(item, d):
- if item == "2" or item == "1":
+ if item == "2":
raise KeyError("Invalid number %s" % item)
return "Found %s" % item
@@ -99,5 +99,4 @@ class TestMultiprocessLaunch(TestCase):
# Assert the function prints exceptions
with captured_output() as (out, err):
self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,))
- self.assertIn("KeyError: 'Invalid number 1'", out.getvalue())
self.assertIn("KeyError: 'Invalid number 2'", out.getvalue())
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index 726daff7c6..fb99be447e 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -133,7 +133,8 @@ class OEListPackageconfigTests(OEScriptTests):
def check_endlines(self, results, expected_endlines):
for line in results.output.splitlines():
for el in expected_endlines:
- if line.split() == el.split():
+ if line and line.split()[0] == el.split()[0] and \
+ ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
expected_endlines.remove(el)
break
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
index 578b2b4dd9..fdc1e40058 100644
--- a/meta/lib/oeqa/selftest/cases/prservice.py
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -75,7 +75,7 @@ class BitbakePrTests(OESelftestTestCase):
exported_db_path = os.path.join(self.builddir, 'export.inc')
export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
- self.assertTrue(os.path.exists(exported_db_path))
+ self.assertTrue(os.path.exists(exported_db_path), msg="%s didn't exist, tool output %s" % (exported_db_path, export_result.output))
if replace_current_db:
current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index c2ade2543a..e8aeea3023 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -370,7 +370,7 @@ class RecipetoolTests(RecipetoolBase):
tempsrc = os.path.join(self.tempdir, 'srctree')
os.makedirs(tempsrc)
recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
- srcuri = 'git://git.yoctoproject.org/libmatchbox'
+ srcuri = 'git://git.yoctoproject.org/libmatchbox;branch=master'
result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
checkvars = {}
@@ -456,7 +456,7 @@ class RecipetoolTests(RecipetoolBase):
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
checkvars['LICENSE'] = set(['Apache-2.0'])
- checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https'
+ checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=master'
inherits = ['setuptools3']
self._test_recipe_contents(recipefile, checkvars, inherits)
@@ -523,7 +523,7 @@ class RecipetoolTests(RecipetoolBase):
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
checkvars['LICENSE'] = set(['GPLv2'])
- checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http'
+ checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http;branch=master'
inherits = ['pkgconfig', 'autotools']
self._test_recipe_contents(recipefile, checkvars, inherits)
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 0e44ce4dbf..be4cdcc429 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -31,7 +31,6 @@ exclude_packages = [
'bootchart2-doc',
'epiphany',
'gcr',
- 'git',
'glide',
'go-dep',
'go-helloworld',
@@ -40,11 +39,9 @@ exclude_packages = [
'gstreamer1.0-python',
'hwlatdetect',
'kernel-devsrc',
- 'libaprutil',
'libcap-ng',
'libjson',
'libproxy',
- 'lsb-release',
'lttng-tools-dbg',
'lttng-tools-ptest',
'ltp',
@@ -55,14 +52,12 @@ exclude_packages = [
'pybootchartgui',
'qemu',
'quilt-ptest',
- "rpm",
'rsync',
'ruby',
'stress-ng',
'systemd-bootchart',
'systemtap',
'valgrind-ptest',
- 'vim',
'webkitgtk',
]
@@ -144,6 +139,32 @@ def compare_file(reference, test, diffutils_sysroot):
result.status = SAME
return result
+def run_diffoscope(a_dir, b_dir, html_dir, **kwargs):
+ return runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir],
+ **kwargs)
+
+class DiffoscopeTests(OESelftestTestCase):
+ diffoscope_test_files = os.path.join(os.path.dirname(os.path.abspath(__file__)), "diffoscope")
+
+ def test_diffoscope(self):
+ bitbake("diffoscope-native -c addto_recipe_sysroot")
+ diffoscope_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffoscope-native")
+
+ # Check that diffoscope doesn't return an error when the files compare
+ # the same (a general check that diffoscope is working)
+ with tempfile.TemporaryDirectory() as tmpdir:
+ run_diffoscope('A', 'A', tmpdir,
+ native_sysroot=diffoscope_sysroot, cwd=self.diffoscope_test_files)
+
+ # Check that diffoscope generates an index.html file when the files are
+ # different
+ with tempfile.TemporaryDirectory() as tmpdir:
+ r = run_diffoscope('A', 'B', tmpdir,
+ native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=self.diffoscope_test_files)
+
+ self.assertNotEqual(r.status, 0, msg="diffoscope was successful when an error was expected")
+ self.assertTrue(os.path.exists(os.path.join(tmpdir, 'index.html')), "HTML index not found!")
+
class ReproducibleTests(OESelftestTestCase):
# Test the reproducibility of whatever is built between sstate_targets and targets
@@ -167,7 +188,7 @@ class ReproducibleTests(OESelftestTestCase):
def setUpLocal(self):
super().setUpLocal()
- needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS']
+ needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS', 'BB_HASHSERVE']
bb_vars = get_bb_vars(needed_vars)
for v in needed_vars:
setattr(self, v.lower(), bb_vars[v])
@@ -238,7 +259,7 @@ class ReproducibleTests(OESelftestTestCase):
# mirror, forcing a complete build from scratch
config += textwrap.dedent('''\
SSTATE_DIR = "${TMPDIR}/sstate"
- SSTATE_MIRRORS = ""
+ SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
''')
self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
@@ -321,7 +342,7 @@ class ReproducibleTests(OESelftestTestCase):
# Copy jquery to improve the diffoscope output usability
self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
- runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', '--html-dir', package_html_dir, 'reproducibleA', 'reproducibleB'],
+ run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir,
native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
if fails:
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
index fa6113d7fa..e9612389fe 100644
--- a/meta/lib/oeqa/selftest/cases/runcmd.py
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -27,8 +27,8 @@ class RunCmdTests(OESelftestTestCase):
# The delta is intentionally smaller than the timeout, to detect cases where
# we incorrectly apply the timeout more than once.
- TIMEOUT = 5
- DELTA = 3
+ TIMEOUT = 10
+ DELTA = 8
def test_result_okay(self):
result = runCmd("true")
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index 976b513727..cc4190c1d6 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -14,11 +14,6 @@ from oeqa.core.decorator.data import skipIfNotQemu
class TestExport(OESelftestTestCase):
- @classmethod
- def tearDownClass(cls):
- runCmd("rm -rf /tmp/sdk")
- super(TestExport, cls).tearDownClass()
-
def test_testexport_basic(self):
"""
Summary: Check basic testexport functionality with only ping test enabled.
@@ -95,19 +90,20 @@ class TestExport(OESelftestTestCase):
msg = "Couldn't find SDK tarball: %s" % tarball_path
self.assertEqual(os.path.isfile(tarball_path), True, msg)
- # Extract SDK and run tar from SDK
- result = runCmd("%s -y -d /tmp/sdk" % tarball_path)
- self.assertEqual(0, result.status, "Couldn't extract SDK")
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ # Extract SDK and run tar from SDK
+ result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname))
+ self.assertEqual(0, result.status, "Couldn't extract SDK")
- env_script = result.output.split()[-1]
- result = runCmd(". %s; which tar" % env_script, shell=True)
- self.assertEqual(0, result.status, "Couldn't setup SDK environment")
- is_sdk_tar = True if "/tmp/sdk" in result.output else False
- self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
+ env_script = result.output.split()[-1]
+ result = runCmd(". %s; which tar" % env_script, shell=True)
+ self.assertEqual(0, result.status, "Couldn't setup SDK environment")
+ is_sdk_tar = True if tmpdirname in result.output else False
+ self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
- tar_sdk = result.output
- result = runCmd("%s --version" % tar_sdk)
- self.assertEqual(0, result.status, "Couldn't run tar from SDK")
+ tar_sdk = result.output
+ result = runCmd("%s --version" % tar_sdk)
+ self.assertEqual(0, result.status, "Couldn't run tar from SDK")
class TestImage(OESelftestTestCase):
@@ -179,12 +175,24 @@ class TestImage(OESelftestTestCase):
if "DISPLAY" not in os.environ:
self.skipTest("virgl gtk test must be run inside a X session")
distro = oe.lsb.distro_identifier()
+ if distro and distro.startswith('almalinux'):
+ self.skipTest('virgl isn\'t working with Alma Linux')
+ if distro and distro.startswith('rocky'):
+ self.skipTest('virgl isn\'t working with Rocky Linux')
if distro and distro == 'debian-8':
self.skipTest('virgl isn\'t working with Debian 8')
if distro and distro == 'centos-7':
self.skipTest('virgl isn\'t working with Centos 7')
+ if distro and distro == 'centos-8':
+ self.skipTest('virgl isn\'t working with Centos 8')
+ if distro and distro.startswith('fedora'):
+ self.skipTest('virgl isn\'t working with Fedora')
if distro and distro == 'opensuseleap-15.0':
self.skipTest('virgl isn\'t working with Opensuse 15.0')
+ if distro and distro == 'ubuntu-22.04':
+ self.skipTest('virgl isn\'t working with Ubuntu 22.04')
+ if distro and distro == 'ubuntu-22.10':
+ self.skipTest('virgl isn\'t working with Ubuntu 22.10')
qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
sdl_packageconfig = get_bb_var('PACKAGECONFIG', 'libsdl2-native')
@@ -220,6 +228,7 @@ class TestImage(OESelftestTestCase):
Author: Alexander Kanavin <alex.kanavin@gmail.com>
"""
import subprocess, os
+ self.skipTest("Crashes in mesa observed with this test on dunfell: https://bugzilla.yoctoproject.org/show_bug.cgi?id=14527")
try:
content = os.listdir("/dev/dri")
if len([i for i in content if i.startswith('render')]) == 0:
@@ -227,7 +236,7 @@ class TestImage(OESelftestTestCase):
except FileNotFoundError:
self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.")
try:
- dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True)
+ dripath = subprocess.check_output("PATH=/bin:/usr/bin:$PATH pkg-config --variable=dridriverdir dri", shell=True)
except subprocess.CalledProcessError as e:
self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index c46e8ba489..1bfe88c87d 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -39,7 +39,7 @@ class SStateTests(SStateBase):
recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
os.makedirs(os.path.dirname(recipefile))
- srcuri = 'git://' + srcdir + ';protocol=file'
+ srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
@@ -137,7 +137,7 @@ class SStateTests(SStateBase):
filtered_results.append(r)
self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
- self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+ self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
self.track_for_cleanup(self.distro_specific_sstate + "_old")
shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
@@ -146,13 +146,13 @@ class SStateTests(SStateBase):
bitbake(['-cclean'] + targets)
bitbake(targets)
file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
- self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+ self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
- self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated)))
+ self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated)))
created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
- self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once)))
+ self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
def test_rebuild_distro_specific_sstate_cross_native_targets(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
@@ -202,9 +202,9 @@ class SStateTests(SStateBase):
actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)]
actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
- self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected)))
+ self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected)))
expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
- self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual)))
+ self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual)))
def test_sstate_cache_management_script_using_pr_1(self):
global_config = []
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
index a51c6048d3..6668d7cdc8 100644
--- a/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -65,6 +65,20 @@ class TinfoilTests(OESelftestTestCase):
localdata.setVar('PN', 'hello')
self.assertEqual('hello', localdata.getVar('BPN'))
+ # The config_data API tp parse_recipe_file is used by:
+ # layerindex-web layerindex/update_layer.py
+ def test_parse_recipe_custom_data(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
+ testrecipe = 'mdadm'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
+
def test_list_recipes(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
@@ -87,23 +101,22 @@ class TinfoilTests(OESelftestTestCase):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=True)
- tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted'])
+ tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit'])
# Need to drain events otherwise events that were masked may still be in the queue
while tinfoil.wait_event():
pass
pattern = 'conf'
- res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine')
+ res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False)
self.assertTrue(res)
eventreceived = False
commandcomplete = False
start = time.time()
- # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example
- # The test is IO load sensitive too
+ # Wait for maximum 120s in total so we'd detect spurious heartbeat events for example
while (not (eventreceived == True and commandcomplete == True)
- and (time.time() - start < 60)):
+ and (time.time() - start < 120)):
# if we received both events (on let's say a good day), we are done
event = tinfoil.wait_event(1)
if event:
@@ -111,14 +124,15 @@ class TinfoilTests(OESelftestTestCase):
commandcomplete = True
elif isinstance(event, bb.event.FilesMatchingFound):
self.assertEqual(pattern, event._pattern)
- self.assertIn('qemuarm.conf', event._matches)
+ self.assertIn('A', event._matches)
+ self.assertIn('B', event._matches)
eventreceived = True
elif isinstance(event, logging.LogRecord):
continue
else:
self.fail('Unexpected event: %s' % event)
- self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server')
+ self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived))
self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
def test_setvariable_clean(self):
diff --git a/meta/lib/oeqa/utils/buildproject.py b/meta/lib/oeqa/utils/buildproject.py
index e6d80cc8dc..dfb9661868 100644
--- a/meta/lib/oeqa/utils/buildproject.py
+++ b/meta/lib/oeqa/utils/buildproject.py
@@ -18,6 +18,7 @@ class BuildProject(metaclass=ABCMeta):
def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None):
self.uri = uri
self.archive = os.path.basename(uri)
+ self.tempdirobj = None
if not tmpdir:
self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-')
tmpdir = self.tempdirobj.name
@@ -57,6 +58,8 @@ class BuildProject(metaclass=ABCMeta):
return self._run('cd %s; make install %s' % (self.targetdir, install_args))
def clean(self):
+ if self.tempdirobj:
+ self.tempdirobj.cleanup()
if not self.needclean:
return
self._run('rm -rf %s' % self.targetdir)
diff --git a/meta/lib/oeqa/utils/metadata.py b/meta/lib/oeqa/utils/metadata.py
index 8013aa684d..15ec190c4a 100644
--- a/meta/lib/oeqa/utils/metadata.py
+++ b/meta/lib/oeqa/utils/metadata.py
@@ -27,9 +27,9 @@ def metadata_from_bb():
data_dict = get_bb_vars()
# Distro information
- info_dict['distro'] = {'id': data_dict['DISTRO'],
- 'version_id': data_dict['DISTRO_VERSION'],
- 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])}
+ info_dict['distro'] = {'id': data_dict.get('DISTRO', 'NODISTRO'),
+ 'version_id': data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'),
+ 'pretty_name': '%s %s' % (data_dict.get('DISTRO', 'NODISTRO'), data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'))}
# Host distro information
os_release = get_os_release()
diff --git a/meta/lib/oeqa/utils/nfs.py b/meta/lib/oeqa/utils/nfs.py
index a37686c914..c9bac050a4 100644
--- a/meta/lib/oeqa/utils/nfs.py
+++ b/meta/lib/oeqa/utils/nfs.py
@@ -8,7 +8,7 @@ from oeqa.utils.commands import bitbake, get_bb_var, Command
from oeqa.utils.network import get_free_port
@contextlib.contextmanager
-def unfs_server(directory, logger = None):
+def unfs_server(directory, logger = None, udp = True):
unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native")
if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")):
# build native tool
@@ -22,7 +22,7 @@ def unfs_server(directory, logger = None):
exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode())
# find some ports for the server
- nfsport, mountport = get_free_port(udp = True), get_free_port(udp = True)
+ nfsport, mountport = get_free_port(udp), get_free_port(udp)
nenv = dict(os.environ)
nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '')
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py
index 79db2cc247..c84d299a80 100644
--- a/meta/lib/oeqa/utils/qemurunner.py
+++ b/meta/lib/oeqa/utils/qemurunner.py
@@ -120,7 +120,10 @@ class QemuRunner:
import fcntl
fl = fcntl.fcntl(o, fcntl.F_GETFL)
fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
- return os.read(o.fileno(), 1000000).decode("utf-8")
+ try:
+ return os.read(o.fileno(), 1000000).decode("utf-8")
+ except BlockingIOError:
+ return ""
def handleSIGCHLD(self, signum, frame):
@@ -231,7 +234,7 @@ class QemuRunner:
r = os.fdopen(r)
x = r.read()
os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM)
- sys.exit(0)
+ os._exit(0)
self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid)
self.logger.debug("waiting at most %s seconds for qemu pid (%s)" %
@@ -429,12 +432,17 @@ class QemuRunner:
except OSError as e:
if e.errno != errno.ESRCH:
raise
- endtime = time.time() + self.runqemutime
- while self.runqemu.poll() is None and time.time() < endtime:
- time.sleep(1)
- if self.runqemu.poll() is None:
+ try:
+ outs, errs = self.runqemu.communicate(timeout = self.runqemutime)
+ if outs:
+ self.logger.info("Output from runqemu:\n%s", outs.decode("utf-8"))
+ if errs:
+ self.logger.info("Stderr from runqemu:\n%s", errs.decode("utf-8"))
+ except TimeoutExpired:
self.logger.debug("Sending SIGKILL to runqemu")
os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL)
+ if not self.runqemu.stdout.closed:
+ self.logger.info("Output from runqemu:\n%s" % self.getOutput(self.runqemu.stdout))
self.runqemu.stdin.close()
self.runqemu.stdout.close()
self.runqemu_exited = True
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py
index 1055810ca3..09738add1d 100644
--- a/meta/lib/oeqa/utils/targetbuild.py
+++ b/meta/lib/oeqa/utils/targetbuild.py
@@ -19,6 +19,7 @@ class BuildProject(metaclass=ABCMeta):
self.d = d
self.uri = uri
self.archive = os.path.basename(uri)
+ self.tempdirobj = None
if not tmpdir:
tmpdir = self.d.getVar('WORKDIR')
if not tmpdir:
@@ -71,9 +72,10 @@ class BuildProject(metaclass=ABCMeta):
return self._run('cd %s; make install %s' % (self.targetdir, install_args))
def clean(self):
+ if self.tempdirobj:
+ self.tempdirobj.cleanup()
self._run('rm -rf %s' % self.targetdir)
subprocess.check_call('rm -f %s' % self.localarchive, shell=True)
- pass
class TargetBuildProject(BuildProject):