summaryrefslogtreecommitdiffstats
path: root/meta/lib/oeqa/selftest/cases
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oeqa/selftest/cases')
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py34
-rw-r--r--meta/lib/oeqa/selftest/cases/baremetal.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py125
-rw-r--r--meta/lib/oeqa/selftest/cases/bblock.py203
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py20
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py56
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/c_cpp.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py200
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py158
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py1143
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/efibootpartition.py41
-rw-r--r--meta/lib/oeqa/selftest/cases/esdk.py (renamed from meta/lib/oeqa/selftest/cases/eSDK.py)2
-rw-r--r--meta/lib/oeqa/selftest/cases/externalsrc.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py75
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py12
-rw-r--r--meta/lib/oeqa/selftest/cases/gdbserver.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/gitarchivetests.py136
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py19
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py147
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/intercept.py (renamed from meta/lib/oeqa/selftest/cases/git.py)6
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/newlib.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py43
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py163
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py37
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py33
-rw-r--r--meta/lib/oeqa/selftest/cases/pseudo.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py605
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py42
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py279
-rw-r--r--meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py97
-rw-r--r--meta/lib/oeqa/selftest/cases/rpmtests.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/runqemu.py72
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py59
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py231
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py64
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py613
-rw-r--r--meta/lib/oeqa/selftest/cases/sysroot.py49
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py39
-rw-r--r--meta/lib/oeqa/selftest/cases/usergrouptests.py57
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py334
-rw-r--r--meta/lib/oeqa/selftest/cases/wrapper.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py39
73 files changed, 5081 insertions, 753 deletions
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
index bff6e7740c..2c9584d329 100644
--- a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
+++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -7,7 +9,7 @@ import shutil
import oeqa.utils.ftools as ftools
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
-from oeqa.selftest.cases.sstate import SStateBase
+from oeqa.selftest.cases.sstatetests import SStateBase
class RebuildFromSState(SStateBase):
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
index 75195241b7..3cb888c506 100644
--- a/meta/lib/oeqa/selftest/cases/archiver.py
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -1,9 +1,12 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
import os
import glob
+import re
from oeqa.utils.commands import bitbake, get_bb_vars
from oeqa.selftest.case import OESelftestTestCase
@@ -117,7 +120,38 @@ class Archiver(OESelftestTestCase):
excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
+ def test_archiver_multiconfig_shared_unpack_and_patch(self):
+ """
+ Test that shared recipes in original mode with diff enabled works in multiconfig,
+ otherwise it will not build when using the same TMP dir.
+ """
+
+ features = 'BBMULTICONFIG = "mc1 mc2"\n'
+ features += 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "original"\n'
+ features += 'ARCHIVER_MODE[diff] = "1"\n'
+ self.write_config(features)
+ # We can use any machine in multiconfig as long as they are different
+ self.write_config('MACHINE = "qemuarm"\n', 'mc1')
+ self.write_config('MACHINE = "qemux86"\n', 'mc2')
+
+ task = 'do_unpack_and_patch'
+ # Use gcc-source as it is a shared recipe (appends the pv to the pn)
+ pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
+
+ # Generate the tasks signatures
+ bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task))
+
+ # Check the tasks signatures
+ # To be machine agnostic the tasks needs to generate the same signature for each machine
+ locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir
+ locked_sigs = open(locked_sigs_inc).read()
+ task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs)
+ uniq_sigs = set(task_sigs)
+ self.assertFalse(len(uniq_sigs) - 1, \
+ 'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \
+ % (task, pn, locked_sigs_inc))
def test_archiver_srpm_mode(self):
"""
diff --git a/meta/lib/oeqa/selftest/cases/baremetal.py b/meta/lib/oeqa/selftest/cases/baremetal.py
new file mode 100644
index 0000000000..cadaea2f1a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/baremetal.py
@@ -0,0 +1,14 @@
+
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BaremetalTest(OESelftestTestCase):
+ def test_baremetal(self):
+ self.write_config('TCLIBC = "baremetal"')
+ bitbake('baremetal-helloworld')
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py
index 7d74833f61..695d17377d 100644
--- a/meta/lib/oeqa/selftest/cases/bblayers.py
+++ b/meta/lib/oeqa/selftest/cases/bblayers.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -6,12 +8,18 @@ import os
import re
import oeqa.utils.ftools as ftools
-from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars
+from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
from oeqa.selftest.case import OESelftestTestCase
class BitbakeLayers(OESelftestTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super(BitbakeLayers, cls).setUpClass()
+ bitbake("python3-jsonschema-native")
+ bitbake("-c addto_recipe_sysroot python3-jsonschema-native")
+
def test_bitbakelayers_layerindexshowdepends(self):
result = runCmd('bitbake-layers layerindex-show-depends meta-poky')
find_in_contents = re.search("openembedded-core", result.output)
@@ -46,7 +54,7 @@ class BitbakeLayers(OESelftestTestCase):
bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
contents = ftools.read_file(bb_file)
- find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
+ find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
def test_bitbakelayers_add_remove(self):
@@ -77,8 +85,9 @@ class BitbakeLayers(OESelftestTestCase):
result = runCmd('bitbake-layers show-recipes -i image')
self.assertIn('core-image-minimal', result.output)
self.assertNotIn('mtd-utils:', result.output)
- result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
+ result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig')
self.assertIn('libproxy:', result.output)
+ result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
self.assertNotIn('wget:', result.output) # doesn't inherit cmake
self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
@@ -111,6 +120,11 @@ class BitbakeLayers(OESelftestTestCase):
self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
+ result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1"))
+ for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'):
+ fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f)
+ self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath))
+
def get_recipe_basename(self, recipe):
recipe_file = ""
result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
@@ -121,3 +135,108 @@ class BitbakeLayers(OESelftestTestCase):
self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
return os.path.basename(recipe_file)
+
+ def validate_layersjson(self, json):
+ python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3')
+ jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema')
+ jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json')
+ result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema))
+
+ def test_validate_examplelayersjson(self):
+ json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json")
+ self.validate_layersjson(json)
+
+ def test_bitbakelayers_setup(self):
+ result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
+ jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
+ self.validate_layersjson(jsonfile)
+
+ # The revision-under-test may not necessarily be available on the remote server,
+ # so replace it with a revision that has a yocto-4.1 tag.
+ import json
+ with open(jsonfile) as f:
+ data = json.load(f)
+ for s in data['sources']:
+ data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0'
+ with open(jsonfile, 'w') as f:
+ json.dump(data, f)
+
+ testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout')
+ result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir))
+ layers_json = os.path.join(testcheckoutdir, ".oe-layers.json")
+ self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json))
+
+ # As setup-layers checkout out an old revision of poky, there is no setup-build symlink,
+ # and we need to run oe-setup-build directly from the current poky tree under test
+ oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build')
+ oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build')
+ os.symlink(oe_setup_build,oe_setup_build_l)
+
+ cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json)
+ result = runCmd(cmd)
+ cond = "conf/templates/default" in result.output
+ self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output))
+
+ # rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user
+ conf = None
+ if 'poky-default' in result.output:
+ conf = 'poky-default'
+ elif 'meta-default' in result.output:
+ conf = 'meta-default'
+ self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output))
+
+ cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf)
+ result = runCmd(cmd)
+
+ def test_bitbakelayers_updatelayer(self):
+ result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
+ jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
+ self.validate_layersjson(jsonfile)
+
+ import json
+ with open(jsonfile) as f:
+ data = json.load(f)
+ repos = []
+ for s in data['sources']:
+ repos.append(s)
+
+ self.assertTrue(len(repos) > 1, "Not enough repositories available")
+ self.validate_layersjson(jsonfile)
+
+ test_ref_1 = 'ref_1'
+ test_ref_2 = 'ref_2'
+
+ # Create a new layers setup using custom references
+ result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}'
+ .format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path))
+ self.validate_layersjson(jsonfile)
+
+ with open(jsonfile) as f:
+ data = json.load(f)
+ first_rev_1 = data['sources'][repos[0]]['git-remote']['rev']
+ first_desc_1 = data['sources'][repos[0]]['git-remote']['describe']
+ second_rev_1 = data['sources'][repos[1]]['git-remote']['rev']
+ second_desc_1 = data['sources'][repos[1]]['git-remote']['describe']
+
+ self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1))
+ self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1))
+ self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1))
+ self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1))
+
+ # Update one of the repositories in the layers setup using a different custom reference
+ # This should only update the selected repository, everything else should remain as is
+ result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}'
+ .format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path))
+ self.validate_layersjson(jsonfile)
+
+ with open(jsonfile) as f:
+ data = json.load(f)
+ first_rev_2 = data['sources'][repos[0]]['git-remote']['rev']
+ first_desc_2 = data['sources'][repos[0]]['git-remote']['describe']
+ second_rev_2 = data['sources'][repos[1]]['git-remote']['rev']
+ second_desc_2 = data['sources'][repos[1]]['git-remote']['describe']
+
+ self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2))
+ self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2))
+ self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2))
+ self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2))
diff --git a/meta/lib/oeqa/selftest/cases/bblock.py b/meta/lib/oeqa/selftest/cases/bblock.py
new file mode 100644
index 0000000000..2b62d2a0aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblock.py
@@ -0,0 +1,203 @@
+#
+# Copyright (c) 2023 BayLibre, SAS
+# Author: Julien Stepahn <jstephan@baylibre.com>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import re
+import bb.tinfoil
+
+import oeqa.utils.ftools as ftools
+from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
+
+from oeqa.selftest.case import OESelftestTestCase
+
+
+class BBLock(OESelftestTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super(BBLock, cls).setUpClass()
+ cls.lockfile = cls.builddir + "/conf/bblock.conf"
+
+ def unlock_recipes(self, recipes=None, tasks=None):
+ cmd = "bblock -r "
+ if recipes:
+ cmd += " ".join(recipes)
+ if tasks:
+ cmd += " -t " + ",".join(tasks)
+ result = runCmd(cmd)
+
+ if recipes:
+ # ensure all signatures are removed from lockfile
+ contents = ftools.read_file(self.lockfile)
+ for recipe in recipes:
+ for task in tasks:
+ find_in_contents = re.search(
+ 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
+ contents,
+ )
+ self.assertFalse(
+ find_in_contents,
+ msg="%s:%s should not be present into bblock.conf anymore"
+ % (recipe, task),
+ )
+ self.assertExists(self.lockfile)
+ else:
+ self.assertNotExists(self.lockfile)
+
+ def lock_recipes(self, recipes, tasks=None):
+ cmd = "bblock " + " ".join(recipes)
+ if tasks:
+ cmd += " -t " + ",".join(tasks)
+
+ result = runCmd(cmd)
+
+ self.assertExists(self.lockfile)
+
+ # ensure all signatures are added to lockfile
+ contents = ftools.read_file(self.lockfile)
+ for recipe in recipes:
+ if tasks:
+ for task in tasks:
+ find_in_contents = re.search(
+ 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
+ contents,
+ )
+ self.assertTrue(
+ find_in_contents,
+ msg="%s:%s was not added into bblock.conf. bblock output: %s"
+ % (recipe, task, result.output),
+ )
+
+ def modify_tasks(self, recipes, tasks):
+ task_append = ""
+ for recipe in recipes:
+ bb_vars = get_bb_vars(["PV"], recipe)
+ recipe_pv = bb_vars["PV"]
+ recipe_append_file = recipe + "_" + recipe_pv + ".bbappend"
+
+ os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe))
+ recipe_append_path = os.path.join(
+ self.testlayer_path, "recipes-test", recipe, recipe_append_file
+ )
+
+ for task in tasks:
+ task_append += "%s:append() {\n#modify task hash \n}\n" % task
+ ftools.write_file(recipe_append_path, task_append)
+ self.add_command_to_tearDown(
+ "rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe)
+ )
+
+ def test_lock_single_recipe_single_task(self):
+ recipes = ["quilt"]
+ tasks = ["do_compile"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_single_recipe_multiple_tasks(self):
+ recipes = ["quilt"]
+ tasks = ["do_compile", "do_install"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_single_recipe_all_tasks(self):
+ recipes = ["quilt"]
+ self._run_test(recipes, None)
+
+ def test_lock_multiple_recipe_single_task(self):
+ recipes = ["quilt", "bc"]
+ tasks = ["do_compile"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_architecture_specific(self):
+ # unlock all recipes and ensure no bblock.conf file exist
+ self.unlock_recipes()
+
+ recipes = ["quilt"]
+ tasks = ["do_compile"]
+
+ # lock quilt's do_compile task for another machine
+ if self.td["MACHINE"] == "qemux86-64":
+ machine = "qemuarm"
+ else:
+ machine = "qemux86-64"
+
+ self.write_config('MACHINE = "%s"\n' % machine)
+
+ self.lock_recipes(recipes, tasks)
+
+ self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"])
+ # modify quilt's do_compile task
+ self.modify_tasks(recipes, tasks)
+
+ # build quilt using the default machine
+ # No Note/Warning should be emitted since sig is locked for another machine
+ # (quilt package is architecture dependant)
+ info_message = "NOTE: The following recipes have locked tasks: " + recipes[0]
+ warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0])
+ result = bitbake(recipes[0] + " -n")
+ self.assertNotIn(info_message, result.output)
+ self.assertNotIn(warn_message, result.output)
+
+ # unlock all recipes
+ self.unlock_recipes()
+
+ def _run_test(self, recipes, tasks=None):
+ # unlock all recipes and ensure no bblock.conf file exist
+ self.unlock_recipes()
+
+ self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"')
+
+ # lock tasks for recipes
+ result = self.lock_recipes(recipes, tasks)
+
+ if not tasks:
+ tasks = []
+ result = bitbake("-c listtasks " + recipes[0])
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ d = tinfoil.parse_recipe(recipes[0])
+
+ for line in result.output.splitlines():
+ if line.startswith("do_"):
+ task = line.split()[0]
+ if "setscene" in task:
+ continue
+ if d.getVarFlag(task, "nostamp"):
+ continue
+ tasks.append(task)
+
+ # build recipes. At this stage we should have a Note about recipes
+ # having locked task's sig, but no warning since sig still match
+ info_message = "NOTE: The following recipes have locked tasks: " + " ".join(
+ recipes
+ )
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertNotIn(warn_message, result.output)
+
+ # modify all tasks that are locked to trigger a sig change then build the recipes
+ # at this stage we should have a Note as before, but also a Warning for all
+ # locked tasks indicating the sig mismatch
+ self.modify_tasks(recipes, tasks)
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertIn(warn_message, result.output)
+
+ # unlock all tasks and rebuild, no more Note/Warning should remain
+ self.unlock_recipes(recipes, tasks)
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertNotIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertNotIn(warn_message, result.output)
+
+ # unlock all recipes
+ self.unlock_recipes()
diff --git a/meta/lib/oeqa/selftest/cases/bblogging.py b/meta/lib/oeqa/selftest/cases/bblogging.py
index 317e68b82f..040c6db089 100644
--- a/meta/lib/oeqa/selftest/cases/bblogging.py
+++ b/meta/lib/oeqa/selftest/cases/bblogging.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -103,16 +105,14 @@ class BitBakeLogging(OESelftestTestCase):
self.write_config('BBINCLUDELOGS = ""')
result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_exit_loggingD(self):
# logs, verbose
self.write_config('BBINCLUDELOGS = "yes"')
result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_exec_func_python_loggingA(self):
# no logs, no verbose
@@ -137,8 +137,7 @@ class BitBakeLogging(OESelftestTestCase):
result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_exec_func_python_loggingD(self):
# logs, verbose
@@ -146,8 +145,7 @@ class BitBakeLogging(OESelftestTestCase):
result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_fatal_loggingA(self):
# no logs, no verbose
@@ -171,8 +169,7 @@ class BitBakeLogging(OESelftestTestCase):
self.write_config('BBINCLUDELOGS = ""')
result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is python fatal test stdout", 1)
self.assertCount(result.output, "This is a fatal error", 1)
def test_python_fatal_loggingD(self):
@@ -180,7 +177,6 @@ class BitBakeLogging(OESelftestTestCase):
self.write_config('BBINCLUDELOGS = "yes"')
result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is python fatal test stdout", 1)
self.assertCount(result.output, "This is a fatal error", 1)
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
index cfac7afcf4..98e9f81661 100644
--- a/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -39,7 +41,7 @@ class BitbakeTests(OESelftestTestCase):
def test_event_handler(self):
self.write_config("INHERIT += \"test_events\"")
- result = bitbake('m4-native')
+ result = bitbake('selftest-hello-native')
find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
@@ -47,11 +49,11 @@ class BitbakeTests(OESelftestTestCase):
self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
def test_local_sstate(self):
- bitbake('m4-native')
- bitbake('m4-native -cclean')
- result = bitbake('m4-native')
- find_setscene = re.search("m4-native.*do_.*_setscene", result.output)
- self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output )
+ bitbake('selftest-hello-native')
+ bitbake('selftest-hello-native -cclean')
+ result = bitbake('selftest-hello-native')
+ find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output)
+ self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output )
def test_bitbake_invalid_recipe(self):
result = bitbake('-b asdf', ignore_status=True)
@@ -145,12 +147,10 @@ INHERIT:remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
- bitbake('-ccleanall man-db')
result = bitbake('-c fetch man-db', ignore_status=True)
- bitbake('-ccleanall man-db')
self.delete_recipeinc('man-db')
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
- self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output)
+ self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output)
def test_rename_downloaded_file(self):
# TODO unique dldir instead of using cleanall
@@ -175,7 +175,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertIn('localconf', result.output)
def test_dry_run(self):
- result = runCmd('bitbake -n m4-native')
+ result = runCmd('bitbake -n selftest-hello-native')
self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
def test_just_parse(self):
@@ -188,6 +188,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
def test_prefile(self):
+ # Test when the prefile does not exist
+ result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
+ # Test when the prefile exists
preconf = os.path.join(self.builddir, 'conf/prefile.conf')
self.track_for_cleanup(preconf)
ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
@@ -198,6 +202,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertIn('localconf', result.output)
def test_postfile(self):
+ # Test when the postfile does not exist
+ result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
+ # Test when the postfile exists
postconf = os.path.join(self.builddir, 'conf/postfile.conf')
self.track_for_cleanup(postconf)
ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
@@ -224,16 +232,21 @@ INHERIT:remove = \"report-error\"
self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
def test_non_gplv3(self):
- self.write_config('INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"')
+ self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"
+require conf/distro/include/no-gplv3.inc
+''')
result = bitbake('selftest-ed', ignore_status=True)
self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
lic_dir = get_bb_var('LICENSE_DIRECTORY')
- self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPL-3.0-or-later')))
- self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPL-2.0-or-later')))
+ arch = get_bb_var('SSTATE_PKGARCH')
+ filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later')
+ self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename)
+ filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-or-later')
+ self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename)
def test_setscene_only(self):
""" Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
- test_recipe = 'ed'
+ test_recipe = 'selftest-hello-native'
bitbake(test_recipe)
bitbake('-c clean %s' % test_recipe)
@@ -246,7 +259,7 @@ INHERIT:remove = \"report-error\"
'Executed tasks were: %s' % (task, str(tasks)))
def test_skip_setscene(self):
- test_recipe = 'ed'
+ test_recipe = 'selftest-hello-native'
bitbake(test_recipe)
bitbake('-c clean %s' % test_recipe)
@@ -350,4 +363,15 @@ INHERIT:remove = \"report-error\"
self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True)
- self.assertTrue("Recipe uses a floating tag/branch without a fixed SRCREV" in result.output, msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
+ self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
+
+ def test_unexpanded_variable_in_path(self):
+ """
+ Test that bitbake fails if directory contains unexpanded bitbake variable in the name
+ """
+ recipe_name = "gitunpackoffline"
+ self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"')
+ result = bitbake('{}'.format(recipe_name), ignore_status=True)
+ self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path")
+ self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution",
+ result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output)
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
index 3b0b44b390..1688eabe4e 100644
--- a/meta/lib/oeqa/selftest/cases/binutils.py
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -1,5 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
+#
import os
+import time
from oeqa.core.decorator import OETestTag
from oeqa.core.case import OEPTestResultTestCase
from oeqa.selftest.case import OESelftestTestCase
@@ -32,15 +37,19 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
+ start_time = time.time()
+
bitbake("{0} -c check".format(recipe))
+ end_time = time.time()
+
sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
if not os.path.exists(sumspath):
sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
logpath = os.path.splitext(sumspath)[0] + ".log"
ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
- self.ptest_section(ptestsuite, logfile = logpath)
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
with open(sumspath, "r") as f:
for test, result in parse_values(f):
self.ptest_result(ptestsuite, test, result)
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py
index d865da6252..2d55994916 100644
--- a/meta/lib/oeqa/selftest/cases/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/buildhistory.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index ad604d6ae2..31dafaa9c5 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -9,8 +11,10 @@ import shutil
import tempfile
from oeqa.selftest.case import OESelftestTestCase
from oeqa.selftest.cases.buildhistory import BuildhistoryBase
+from oeqa.core.decorator.data import skipIfMachine
from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
import oeqa.utils.ftools as ftools
+from oeqa.core.decorator import OETestTag
class ImageOptionsTests(OESelftestTestCase):
@@ -201,6 +205,7 @@ class ToolchainOptions(OESelftestTestCase):
self.write_config(features)
bitbake('fortran-helloworld')
+@OETestTag("yocto-mirrors")
class SourceMirroring(OESelftestTestCase):
# Can we download everything from the Yocto Sources Mirror over http only
def test_yocto_source_mirror(self):
diff --git a/meta/lib/oeqa/selftest/cases/c_cpp.py b/meta/lib/oeqa/selftest/cases/c_cpp.py
new file mode 100644
index 0000000000..9a70ce29f5
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/c_cpp.py
@@ -0,0 +1,60 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator.data import skipIfNotQemuUsermode
+from oeqa.utils.commands import bitbake
+
+
+class CCppTests(OESelftestTestCase):
+
+ @skipIfNotQemuUsermode()
+ def _qemu_usermode(self, recipe_name):
+ self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
+ bitbake("%s -c run_tests" % recipe_name)
+
+ @skipIfNotQemuUsermode()
+ def _qemu_usermode_failing(self, recipe_name):
+ config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name
+ self.write_config(config)
+ self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
+ result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True)
+ self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % (
+ result.command, result.status, result.output, result.error))
+
+
+class CMakeTests(CCppTests):
+ def test_cmake_qemu(self):
+ """Test for cmake-qemu.bbclass good case
+
+ compile the cmake-example and verify the CTests pass in qemu-user.
+ qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR.
+ """
+ self._qemu_usermode("cmake-example")
+
+ def test_cmake_qemu_failing(self):
+ """Test for cmake-qemu.bbclass bad case
+
+ Break the comparison in the test code and verify the CTests do not pass.
+ """
+ self._qemu_usermode_failing("cmake-example")
+
+
+class MesonTests(CCppTests):
+ def test_meson_qemu(self):
+ """Test the qemu-user feature of the meson.bbclass good case
+
+ compile the meson-example and verify the Unit Test pass in qemu-user.
+ qemu-user is configured by meson's exe_wrapper option.
+ """
+ self._qemu_usermode("meson-example")
+
+ def test_meson_qemu_failing(self):
+ """Test the qemu-user feature of the meson.bbclass bad case
+
+ Break the comparison in the test code and verify the Unit Test does not pass in qemu-user.
+ """
+ self._qemu_usermode_failing("meson-example")
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py
index e0aea1a1ef..23c0a1408a 100644
--- a/meta/lib/oeqa/selftest/cases/containerimage.py
+++ b/meta/lib/oeqa/selftest/cases/containerimage.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index d1947baffc..60cecd1328 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -1,9 +1,19 @@
-from oe.cve_check import Version
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import json
+import os
from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
class CVECheck(OESelftestTestCase):
def test_version_compare(self):
+ from oe.cve_check import Version
+
result = Version("100") > Version("99")
self.assertTrue( result, msg="Failed to compare version '100' > '99'")
result = Version("2.3.1") > Version("2.2.3")
@@ -42,3 +52,191 @@ class CVECheck(OESelftestTestCase):
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
+
+
+ def test_convert_cve_version(self):
+ from oe.cve_check import convert_cve_version
+
+ # Default format
+ self.assertEqual(convert_cve_version("8.3"), "8.3")
+ self.assertEqual(convert_cve_version(""), "")
+
+ # OpenSSL format version
+ self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
+
+ # OpenSSH format
+ self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
+ self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
+
+ # Linux kernel format
+ self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
+ self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
+
+
+ def test_recipe_report_json(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("m4-native -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "m4-native")
+ found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
+ self.assertIn("CVE-2008-1687", found_cves)
+ self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
+
+
+ def test_image_json(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ print(report_json)
+ try:
+ os.remove(report_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("core-image-minimal-initramfs")
+ self.assertExists(report_json)
+
+ # Check that the summary report lists at least one package
+ with open(report_json) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertGreater(len(report["package"]), 1)
+
+ # Check that a random recipe wrote a recipe report to deploy/cve/
+ recipename = report["package"][0]["name"]
+ recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
+ self.assertExists(recipe_report)
+ with open(recipe_report) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ self.assertEqual(report["package"][0]["name"], recipename)
+
+
+ def test_recipe_report_json_unpatched(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "0"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("m4-native -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "m4-native")
+ #m4 had only Patched CVEs, so the issues array will be empty
+ self.assertEqual(package["issue"], [])
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
+
+
+ def test_recipe_report_json_ignored(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("logrotate -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "logrotate")
+ found_cves = {}
+ for issue in package["issue"]:
+ found_cves[issue["id"]] = {
+ "status" : issue["status"],
+ "detail" : issue["detail"] if "detail" in issue else "",
+ "description" : issue["description"] if "description" in issue else ""
+ }
+ # m4 CVE should not be in logrotate
+ self.assertNotIn("CVE-2008-1687", found_cves)
+ # logrotate has both Patched and Ignored CVEs
+ self.assertIn("CVE-2011-1098", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
+ self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0)
+ self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
+ detail = "not-applicable-platform"
+ description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
+ self.assertIn("CVE-2011-1548", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1548"]["description"], description)
+ self.assertIn("CVE-2011-1549", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1549"]["description"], description)
+ self.assertIn("CVE-2011-1550", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1550"]["description"], description)
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
new file mode 100644
index 0000000000..505b4be837
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -0,0 +1,158 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import socketserver
+import subprocess
+import time
+import urllib
+import pathlib
+
+from oeqa.core.decorator import OETestTag
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, runqemu
+
+
+class Debuginfod(OESelftestTestCase):
+
+ def wait_for_debuginfod(self, port):
+ """
+ debuginfod takes time to scan the packages and requesting too early may
+ result in a test failure if the right packages haven't been scanned yet.
+
+ Request the metrics endpoint periodically and wait for there to be no
+ busy scanning threads.
+
+ Returns if debuginfod is ready, raises an exception if not within the
+ timeout.
+ """
+
+ # Wait two minutes
+ countdown = 24
+ delay = 5
+ latest = None
+
+ while countdown:
+ self.logger.info("waiting...")
+ time.sleep(delay)
+
+ self.logger.info("polling server")
+ if self.debuginfod.poll():
+ self.logger.info("server dead")
+ self.debuginfod.communicate()
+ self.fail("debuginfod terminated unexpectedly")
+ self.logger.info("server alive")
+
+ try:
+ with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f:
+ for line in f.read().decode("ascii").splitlines():
+ key, value = line.rsplit(" ", 1)
+ if key == "thread_busy{role=\"scan\"}":
+ latest = int(value)
+ self.logger.info("Waiting for %d scan jobs to finish" % latest)
+ if latest == 0:
+ return
+ except urllib.error.URLError as e:
+ # TODO: how to catch just timeouts?
+ self.logger.error(e)
+
+ countdown -= 1
+
+ raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
+
+ def start_debuginfod(self):
+ # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
+
+ # Save some useful paths for later
+ native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native"))
+ native_bindir = native_sysroot / "usr" / "bin"
+ self.debuginfod = native_bindir / "debuginfod"
+ self.debuginfod_find = native_bindir / "debuginfod-find"
+
+ cmd = [
+ self.debuginfod,
+ "--verbose",
+ # In-memory database, this is a one-shot test
+ "--database=:memory:",
+ # Don't use all the host cores
+ "--concurrency=8",
+ "--connection-pool=8",
+ # Disable rescanning, this is a one-shot test
+ "--rescan-time=0",
+ "--groom-time=0",
+ get_bb_var("DEPLOY_DIR"),
+ ]
+
+ format = get_bb_var("PACKAGE_CLASSES").split()[0]
+ if format == "package_deb":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_ipk":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_rpm":
+ cmd.append("--scan-rpm-dir")
+ else:
+ self.fail("Unknown package class %s" % format)
+
+ # Find a free port. Racey but the window is small.
+ with socketserver.TCPServer(("localhost", 0), None) as s:
+ self.port = s.server_address[1]
+ cmd.append("--port=%d" % self.port)
+
+ self.logger.info(f"Starting server {cmd}")
+ self.debuginfod = subprocess.Popen(cmd, env={})
+ self.wait_for_debuginfod(self.port)
+
+
+ def test_debuginfod_native(self):
+ """
+ Test debuginfod outside of qemu, by building a package and looking up a
+ binary's debuginfo using elfutils-native.
+ """
+
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-debuginfod"
+DISTRO_FEATURES:append = " debuginfod"
+""")
+ bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package")
+
+ try:
+ self.start_debuginfod()
+
+ env = os.environ.copy()
+ env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
+
+ pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz"))
+ cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz")
+ self.logger.info(f"Starting client {cmd}")
+ output = subprocess.check_output(cmd, env=env, text=True)
+ # This should be more comprehensive
+ self.assertIn("/.cache/debuginfod_client/", output)
+ finally:
+ self.debuginfod.kill()
+
+ @OETestTag("runqemu")
+ def test_debuginfod_qemu(self):
+ """
+ Test debuginfod-find inside a qemu, talking to a debuginfod on the host.
+ """
+
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-debuginfod"
+DISTRO_FEATURES:append = " debuginfod"
+CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
+ """)
+ bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot")
+
+ try:
+ self.start_debuginfod()
+
+ with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
+ cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
+ self.logger.info(f"Starting client {cmd}")
+ status, output = qemu.run_serial(cmd)
+ # This should be more comprehensive
+ self.assertIn("/.cache/debuginfod_client/", output)
+ finally:
+ self.debuginfod.kill()
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 3eea2b1a0e..51949e3c93 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -1,13 +1,18 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
+import errno
import os
import re
import shutil
import tempfile
import glob
import fnmatch
+import unittest
+import json
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
@@ -24,6 +29,9 @@ def setUpModule():
corecopydir = os.path.join(templayerdir, 'core-copy')
bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
edited_layers = []
+ # make sure user doesn't have a local workspace
+ result = runCmd('bitbake-layers show-layers')
+ assert "workspacelayer" not in result.output, "Devtool test suite cannot be run with a local workspace directory"
# We need to take a copy of the meta layer so we can modify it and not
# have any races against other tests that might be running in parallel
@@ -38,10 +46,17 @@ def setUpModule():
canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
edited_layers.append(layerpath)
oldmetapath = os.path.realpath(layerpath)
+
+ # when downloading poky from tar.gz some tests will be skipped (BUG 12389)
+ try:
+ runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath)
+ except:
+ raise unittest.SkipTest("devtool tests require folder to be a git repo")
+
result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
oldreporoot = result.output.rstrip()
newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
- runCmd('git clone %s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
+ runCmd('git clone file://%s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
# Now we need to copy any modified files
# You might ask "why not just copy the entire tree instead of
# cloning and doing this?" - well, the problem with that is
@@ -218,6 +233,75 @@ class DevtoolTestCase(OESelftestTestCase):
filelist.append(' '.join(splitline))
return filelist
+ def _check_diff(self, diffoutput, addlines, removelines):
+ """Check output from 'git diff' matches expectation"""
+ remaining_addlines = addlines[:]
+ remaining_removelines = removelines[:]
+ for line in diffoutput.splitlines():
+ if line.startswith('+++') or line.startswith('---'):
+ continue
+ elif line.startswith('+'):
+ matched = False
+ for item in addlines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ remaining_addlines.remove(item)
+ break
+ self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
+ elif line.startswith('-'):
+ matched = False
+ for item in removelines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ remaining_removelines.remove(item)
+ break
+ self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+ if remaining_addlines:
+ self.fail('Expected added lines not found: %s' % remaining_addlines)
+ if remaining_removelines:
+ self.fail('Expected removed lines not found: %s' % remaining_removelines)
+
+ def _check_runqemu_prerequisites(self):
+ """Check runqemu is available
+
+ Whilst some tests would seemingly be better placed as a runtime test,
+ unfortunately the runtime tests run under bitbake and you can't run
+ devtool within bitbake (since devtool needs to run bitbake itself).
+ Additionally we are testing build-time functionality as well, so
+ really this has to be done as an oe-selftest test.
+ """
+ machine = get_bb_var('MACHINE')
+ if not machine.startswith('qemu'):
+ self.skipTest('This test only works with qemu machines')
+ if not os.path.exists('/etc/runqemu-nosudo'):
+ self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
+ if result.status != 0:
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
+ if result.status != 0:
+ self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
+ for line in result.output.splitlines():
+ if line.startswith('tap'):
+ break
+ else:
+ self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+
+ def _test_devtool_add_git_url(self, git_url, version, pn, resulting_src_uri):
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add --version %s %s %s' % (version, pn, git_url))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ # Check the recipe name is correct
+ recipefile = get_bb_var('FILE', pn)
+ self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named')
+ self.assertIn(recipefile, result.output)
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(pn, result.output)
+ self.assertIn(recipefile, result.output)
+ checkvars = {}
+ checkvars['SRC_URI'] = resulting_src_uri
+ self._test_recipe_contents(recipefile, checkvars, [])
class DevtoolBase(DevtoolTestCase):
@@ -230,6 +314,7 @@ class DevtoolBase(DevtoolTestCase):
cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
% cls.original_sstate)
+ cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"\n')
@classmethod
def tearDownClass(cls):
@@ -311,6 +396,38 @@ class DevtoolAddTests(DevtoolBase):
bindir = bindir[1:]
self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
+ def test_devtool_add_binary(self):
+ # Create a binary package containing a known test file
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ pn = 'tst-bin'
+ pv = '1.0'
+ test_file_dir = "var/lib/%s/" % pn
+ test_file_name = "test_file"
+ test_file_content = "TEST CONTENT"
+ test_file_package_root = os.path.join(tempdir, pn)
+ test_file_dir_full = os.path.join(test_file_package_root, test_file_dir)
+ bb.utils.mkdirhier(test_file_dir_full)
+ with open(os.path.join(test_file_dir_full, test_file_name), "w") as f:
+ f.write(test_file_content)
+ bin_package_path = os.path.join(tempdir, "%s.tar.gz" % pn)
+ runCmd("tar czf %s -C %s ." % (bin_package_path, test_file_package_root))
+
+ # Test devtool add -b on the binary package
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add -b %s %s' % (pn, bin_package_path))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+
+ # Build the resulting recipe
+ result = runCmd('devtool build %s' % pn)
+ installdir = get_bb_var('D', pn)
+ self.assertTrue(installdir, 'Could not query installdir variable')
+
+ # Check that a known file from the binary package has indeed been installed
+ self.assertTrue(os.path.isfile(os.path.join(installdir, test_file_dir, test_file_name)), '%s not found in D' % test_file_name)
+
def test_devtool_add_git_local(self):
# We need dbus built so that DEPENDS recognition works
bitbake('dbus')
@@ -346,12 +463,28 @@ class DevtoolAddTests(DevtoolBase):
checkvars['LICENSE'] = 'GPL-2.0-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '0.1+git${SRCPV}'
+ checkvars['PV'] = '0.1+git'
checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master'
checkvars['SRCREV'] = srcrev
checkvars['DEPENDS'] = set(['dbus'])
self._test_recipe_contents(recipefile, checkvars, [])
+ def test_devtool_add_git_style1(self):
+ version = 'v3.1.0'
+ pn = 'mbedtls'
+ # this will trigger reformat_git_uri with branch parameter in url
+ git_url = "'git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https'"
+ resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https"
+ self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
+
+ def test_devtool_add_git_style2(self):
+ version = 'v3.1.0'
+ pn = 'mbedtls'
+ # this will trigger reformat_git_uri with branch parameter in url
+ git_url = "'git://git@github.com/ARMmbed/mbedtls.git;protocol=https'"
+ resulting_src_uri = "gitsm://git@github.com/ARMmbed/mbedtls.git;protocol=https;branch=master"
+ self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
+
def test_devtool_add_library(self):
# Fetch source
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -412,7 +545,7 @@ class DevtoolAddTests(DevtoolBase):
self.track_for_cleanup(self.workspacedir)
self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
- result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+ result = runCmd('devtool add --no-pypi %s %s -f %s' % (testrecipe, srcdir, url))
self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
@@ -431,7 +564,7 @@ class DevtoolAddTests(DevtoolBase):
result = runCmd('devtool reset -n %s' % testrecipe)
shutil.rmtree(srcdir)
fakever = '1.9'
- result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
+ result = runCmd('devtool add --no-pypi %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
# Test devtool status
result = runCmd('devtool status')
@@ -469,7 +602,7 @@ class DevtoolAddTests(DevtoolBase):
self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.0+git${SRCPV}'
+ checkvars['PV'] = '1.0+git'
checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = '${AUTOREV}'
self._test_recipe_contents(recipefile, checkvars, [])
@@ -488,7 +621,7 @@ class DevtoolAddTests(DevtoolBase):
self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.5+git${SRCPV}'
+ checkvars['PV'] = '1.5+git'
checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = checkrev
self._test_recipe_contents(recipefile, checkvars, [])
@@ -512,7 +645,7 @@ class DevtoolAddTests(DevtoolBase):
result = runCmd('devtool status')
self.assertIn(testrecipe, result.output)
self.assertIn(srcdir, result.output)
- # Check recipe
+ # Check recipedevtool add
recipefile = get_bb_var('FILE', testrecipe)
self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
checkvars = {}
@@ -544,6 +677,19 @@ class DevtoolAddTests(DevtoolBase):
# Test devtool build
result = runCmd('devtool build %s' % pn)
+ def test_devtool_add_python_egg_requires(self):
+ # Fetch source
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ testver = '0.14.0'
+ url = 'https://files.pythonhosted.org/packages/e9/9e/25d59f5043cf763833b2581c8027fa92342c4cf8ee523b498ecdf460c16d/uvicorn-%s.tar.gz' % testver
+ testrecipe = 'python3-uvicorn'
+ srcdir = os.path.join(tempdir, testrecipe)
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+
class DevtoolModifyTests(DevtoolBase):
def test_devtool_modify(self):
@@ -705,6 +851,7 @@ class DevtoolModifyTests(DevtoolBase):
self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
+
def test_devtool_modify_localfiles_only(self):
# Check preconditions
testrecipe = 'base-files'
@@ -771,6 +918,122 @@ class DevtoolModifyTests(DevtoolBase):
# Try building
bitbake(testrecipe)
+ def test_devtool_modify_git_no_extract(self):
+ # Check preconditions
+ testrecipe = 'psplash'
+ src_uri = get_bb_var('SRC_URI', testrecipe)
+ self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('git clone https://git.yoctoproject.org/psplash %s && devtool modify -n %s %s' % (tempdir, testrecipe, tempdir))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+
+ def test_devtool_modify_git_crates_subpath(self):
+ # This tests two things in devtool context:
+ # - that we support local git dependencies for cargo based recipe
+ # - that we support patches in SRC_URI when git url contains subpath parameter
+
+ # Check preconditions:
+ # recipe inherits cargo
+ # git:// uri with a subpath as the main package
+ # some crate:// in SRC_URI
+ # others git:// in SRC_URI
+ # cointains a patch
+ testrecipe = 'hello-rs'
+ bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe)
+ recipefile = bb_vars['FILE']
+ workdir = bb_vars['WORKDIR']
+ cargo_home = bb_vars['CARGO_HOME']
+ src_uri = bb_vars['SRC_URI'].split()
+ self.assertTrue(src_uri[0].startswith('git://'),
+ 'This test expects the %s recipe to have a git repo has its main uri' % testrecipe)
+ self.assertIn(';subpath=', src_uri[0],
+ 'This test expects the %s recipe to have a git uri with subpath' % testrecipe)
+ self.assertTrue(any([uri.startswith('crate://') for uri in src_uri]),
+ 'This test expects the %s recipe to have some crates in its src uris' % testrecipe)
+ self.assertGreaterEqual(sum(map(lambda x:x.startswith('git://'), src_uri)), 2,
+ 'This test expects the %s recipe to have several git:// uris' % testrecipe)
+ self.assertTrue(any([uri.startswith('file://') and '.patch' in uri for uri in src_uri]),
+ 'This test expects the %s recipe to have a patch in its src uris' % testrecipe)
+
+ self._test_recipe_contents(recipefile, {}, ['ptest-cargo'])
+
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'Cargo.toml'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Check that the patch is correctly applied.
+ # The last commit message in the tree must contain the following note:
+ # Notes (devtool):
+ # original patch: <patchname>
+ # ..
+ patchname = None
+ for uri in src_uri:
+ if uri.startswith('file://') and '.patch' in uri:
+ patchname = uri.replace("file://", "").partition('.patch')[0] + '.patch'
+ self.assertIsNotNone(patchname)
+ result = runCmd('git -C %s log -1' % tempdir)
+ self.assertIn("Notes (devtool):\n original patch: %s" % patchname, result.output)
+
+ # Configure the recipe to check that the git dependencies are correctly patched in cargo config
+ bitbake('-c configure %s' % testrecipe)
+
+ cargo_config_path = os.path.join(cargo_home, 'config')
+ with open(cargo_config_path, "r") as f:
+ cargo_config_contents = [line.strip('\n') for line in f.readlines()]
+
+ # Get back git dependencies of the recipe (ignoring the main one)
+ # and check that they are all correctly patched to be fetched locally
+ git_deps = [uri for uri in src_uri if uri.startswith("git://")][1:]
+ for git_dep in git_deps:
+ raw_url, _, raw_parms = git_dep.partition(";")
+ parms = {}
+ for parm in raw_parms.split(";"):
+ name_parm, _, value_parm = parm.partition('=')
+ parms[name_parm]=value_parm
+ self.assertIn('protocol', parms, 'git dependencies uri should contain the "protocol" parameter')
+ self.assertIn('name', parms, 'git dependencies uri should contain the "name" parameter')
+ self.assertIn('destsuffix', parms, 'git dependencies uri should contain the "destsuffix" parameter')
+ self.assertIn('type', parms, 'git dependencies uri should contain the "type" parameter')
+ self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
+ raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
+ patch_line = '[patch."%s"]' % raw_url
+ path_patched = os.path.join(workdir, parms['destsuffix'])
+ path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
+ # Would have been better to use tomllib to read this file :/
+ self.assertIn(patch_line, cargo_config_contents)
+ self.assertIn(path_override_line, cargo_config_contents)
+
+ # Try to package the recipe
+ bitbake('-c package_qa %s' % testrecipe)
+
def test_devtool_modify_localfiles(self):
# Check preconditions
testrecipe = 'lighttpd'
@@ -836,12 +1099,43 @@ class DevtoolModifyTests(DevtoolBase):
runCmd('git -C %s checkout %s' % (tempdir, branch))
with open(source, "rt") as f:
content = f.read()
- self.assertEquals(content, expected)
- check('devtool', 'This is a test for something\n')
+ self.assertEqual(content, expected)
+ if self.td["MACHINE"] == "qemux86":
+ check('devtool', 'This is a test for qemux86\n')
+ elif self.td["MACHINE"] == "qemuarm":
+ check('devtool', 'This is a test for qemuarm\n')
+ else:
+ check('devtool', 'This is a test for something\n')
check('devtool-no-overrides', 'This is a test for something\n')
check('devtool-override-qemuarm', 'This is a test for qemuarm\n')
check('devtool-override-qemux86', 'This is a test for qemux86\n')
+ def test_devtool_modify_multiple_sources(self):
+ # This test check that recipes fetching several sources can be used with devtool modify/build
+ # Check preconditions
+ testrecipe = 'bzip2'
+ src_uri = get_bb_var('SRC_URI', testrecipe)
+ src1 = 'https://' in src_uri
+ src2 = 'git://' in src_uri
+ self.assertTrue(src1 and src2, 'This test expects the %s recipe to fetch both a git source and a tarball and it seems that it no longer does' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertEqual(result.status, 0, "Could not modify recipe %s. Output: %s" % (testrecipe, result.output))
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Try building
+ result = bitbake(testrecipe)
+ self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
+
class DevtoolUpdateTests(DevtoolBase):
def test_devtool_update_recipe(self):
@@ -871,14 +1165,15 @@ class DevtoolUpdateTests(DevtoolBase):
result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
+ result = runCmd('git add minicom', cwd=os.path.dirname(recipefile))
expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
- ('??', '.*/0001-Change-the-README.patch$'),
- ('??', '.*/0002-Add-a-new-file.patch$')]
+ ('A ', '.*/0001-Change-the-README.patch$'),
+ ('A ', '.*/0002-Add-a-new-file.patch$')]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
def test_devtool_update_recipe_git(self):
# Check preconditions
- testrecipe = 'mtd-utils'
+ testrecipe = 'mtd-utils-selftest'
bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
recipefile = bb_vars['FILE']
src_uri = bb_vars['SRC_URI']
@@ -917,23 +1212,7 @@ class DevtoolUpdateTests(DevtoolBase):
srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
srcurilines.append('"')
removelines = ['SRCREV = ".*"'] + srcurilines
- for line in result.output.splitlines():
- if line.startswith('+++') or line.startswith('---'):
- continue
- elif line.startswith('+'):
- matched = False
- for item in addlines:
- if re.match(item, line[1:].strip()):
- matched = True
- break
- self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
- elif line.startswith('-'):
- matched = False
- for item in removelines:
- if re.match(item, line[1:].strip()):
- matched = True
- break
- self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+ self._check_diff(result.output, addlines, removelines)
# Now try with auto mode
runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
@@ -1015,10 +1294,11 @@ class DevtoolUpdateTests(DevtoolBase):
def test_devtool_update_recipe_append_git(self):
# Check preconditions
- testrecipe = 'mtd-utils'
- bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ testrecipe = 'mtd-utils-selftest'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe)
recipefile = bb_vars['FILE']
src_uri = bb_vars['SRC_URI']
+ corenames = bb_vars['LAYERSERIES_CORENAMES']
self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
for entry in src_uri.split():
if entry.startswith('git://'):
@@ -1049,7 +1329,7 @@ class DevtoolUpdateTests(DevtoolBase):
f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
- f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n')
+ f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames)
self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
# Create the bbappend
@@ -1125,14 +1405,30 @@ class DevtoolUpdateTests(DevtoolBase):
runCmd('echo "Bar" > new-file', cwd=tempdir)
runCmd('git add new-file', cwd=tempdir)
runCmd('git commit -m "Add new file"', cwd=tempdir)
- self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' %
- os.path.dirname(recipefile))
runCmd('devtool update-recipe %s' % testrecipe)
expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
(' M', '.*/makedevs/makedevs.c$'),
('??', '.*/makedevs/new-local$'),
('??', '.*/makedevs/0001-Add-new-file.patch$')]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ # Now try to update recipe in another layer, so first, clean it
+ runCmd('cd %s; git clean -fd .; git checkout .' % os.path.dirname(recipefile))
+ # Create a temporary layer and add it to bblayers.conf
+ self._create_temp_layer(templayerdir, True, 'templayer')
+ # Update recipe in templayer
+ result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+ self.assertNotIn('WARNING:', result.output)
+ # Check recipe is still clean
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ splitpath = os.path.dirname(recipefile).split(os.sep)
+ appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
+ bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
+ patchfile = os.path.join(appenddir, testrecipe, '0001-Add-new-file.patch')
+ new_local_file = os.path.join(appenddir, testrecipe, 'new_local')
+ local_file = os.path.join(appenddir, testrecipe, 'makedevs.c')
+ self.assertExists(patchfile, 'Patch file 0001-Add-new-file.patch not created')
+ self.assertExists(local_file, 'File makedevs.c not created')
+ self.assertExists(patchfile, 'File new_local not created')
def test_devtool_update_recipe_local_files_2(self):
"""Check local source files support when oe-local-files is in Git"""
@@ -1267,7 +1563,7 @@ class DevtoolUpdateTests(DevtoolBase):
# Modify one file
srctree = os.path.join(self.workspacedir, 'sources', testrecipe)
runCmd('echo "Another line" >> README', cwd=srctree)
- runCmd('git commit -a --amend --no-edit', cwd=srctree)
+ runCmd('git commit -a --amend --no-edit --no-verify', cwd=srctree)
self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)]
@@ -1303,6 +1599,121 @@ class DevtoolUpdateTests(DevtoolBase):
expected_status = []
self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ def test_devtool_finish_modify_git_subdir(self):
+ # Check preconditions
+ testrecipe = 'dos2unix'
+ self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n')
+ bb_vars = get_bb_vars(['SRC_URI', 'S', 'WORKDIR', 'FILE'], testrecipe)
+ self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ workdir_git = '%s/git/' % bb_vars['WORKDIR']
+ if not bb_vars['S'].startswith(workdir_git):
+ self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe)
+ subdir = bb_vars['S'].split(workdir_git, 1)[1]
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ testsrcfile = os.path.join(tempdir, subdir, 'dos2unix.c')
+ self.assertExists(testsrcfile, 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ self.assertNotExists(os.path.join(tempdir, subdir, '.git'), 'Subdirectory has been initialised as a git repo')
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Modify file
+ runCmd("sed -i '1s:^:/* Add a comment */\\n:' %s" % testsrcfile)
+ result = runCmd('git commit -a -m "Add a comment"', cwd=tempdir)
+ # Now try updating original recipe
+ recipefile = bb_vars['FILE']
+ recipedir = os.path.dirname(recipefile)
+ self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ ('??', '.*/%s/%s/$' % (testrecipe, testrecipe))]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+ removelines = ['SRC_URI = "git://.*"']
+ addlines = [
+ 'SRC_URI = "git://.* \\\\',
+ 'file://0001-Add-a-comment.patch;patchdir=.. \\\\',
+ '"'
+ ]
+ self._check_diff(result.output, addlines, removelines)
+ # Put things back so we can run devtool finish on a different layer
+ runCmd('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
+ # Run devtool finish
+ res = re.search('recipes-.*', recipedir)
+ self.assertTrue(res, 'Unable to find recipe subdirectory')
+ recipesubdir = res[0]
+ self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, recipesubdir))
+ result = runCmd('devtool finish %s meta-selftest' % testrecipe)
+ # Check bbappend file contents
+ appendfn = os.path.join(self.testlayer_path, recipesubdir, '%s_%%.bbappend' % testrecipe)
+ with open(appendfn, 'r') as f:
+ appendlines = f.readlines()
+ expected_appendlines = [
+ 'FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://0001-Add-a-comment.patch;patchdir=.."\n',
+ '\n'
+ ]
+ self.assertEqual(appendlines, expected_appendlines)
+ self.assertExists(os.path.join(os.path.dirname(appendfn), testrecipe, '0001-Add-a-comment.patch'))
+ # Try building
+ bitbake('%s -c patch' % testrecipe)
+
+ def test_devtool_git_submodules(self):
+ # This tests if we can add a patch in a git submodule and extract it properly using devtool finish
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ recipe = 'vulkan-samples'
+ src_uri = get_bb_var('SRC_URI', recipe)
+ self.assertIn('gitsm://', src_uri, 'This test expects the %s recipe to be a git recipe with submodules' % recipe)
+ oldrecipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(oldrecipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ self.assertIn('/meta/', recipedir)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s %s' % (recipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(recipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Modify a source file in a submodule, (grab the first one)
+ result = runCmd('git submodule --quiet foreach \'echo $sm_path\'', cwd=tempdir)
+ submodule = result.output.splitlines()[0]
+ submodule_path = os.path.join(tempdir, submodule)
+ runCmd('echo "#This is a first comment" >> testfile', cwd=submodule_path)
+ result = runCmd('git status --porcelain . ', cwd=submodule_path)
+ self.assertIn("testfile", result.output)
+ runCmd('git add testfile; git commit -m "Adding a new file"', cwd=submodule_path)
+
+ # Try finish to the original layer
+ self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
+ runCmd('devtool finish -f %s meta' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)),
+ ('??', '.*/.*-Adding-a-new-file.patch$')]
+ self._check_repo_status(recipedir, expected_status)
+ # Make sure the patch is added to the recipe with the correct "patchdir" option
+ result = runCmd('git diff .', cwd=recipedir)
+ addlines = [
+ 'file://0001-Adding-a-new-file.patch;patchdir=%s \\\\' % submodule
+ ]
+ self._check_diff(result.output, addlines, [])
+
class DevtoolExtractTests(DevtoolBase):
def test_devtool_extract(self):
@@ -1353,28 +1764,7 @@ class DevtoolExtractTests(DevtoolBase):
@OETestTag("runqemu")
def test_devtool_deploy_target(self):
- # NOTE: Whilst this test would seemingly be better placed as a runtime test,
- # unfortunately the runtime tests run under bitbake and you can't run
- # devtool within bitbake (since devtool needs to run bitbake itself).
- # Additionally we are testing build-time functionality as well, so
- # really this has to be done as an oe-selftest test.
- #
- # Check preconditions
- machine = get_bb_var('MACHINE')
- if not machine.startswith('qemu'):
- self.skipTest('This test only works with qemu machines')
- if not os.path.exists('/etc/runqemu-nosudo'):
- self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
- result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
- if result.status != 0:
- result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
- if result.status != 0:
- self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
- for line in result.output.splitlines():
- if line.startswith('tap'):
- break
- else:
- self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ self._check_runqemu_prerequisites()
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
# Definitions
testrecipe = 'mdadm'
@@ -1560,6 +1950,54 @@ class DevtoolUpgradeTests(DevtoolBase):
self.assertNotIn(recipe, result.output)
self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
+ def test_devtool_upgrade_drop_md5sum(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # For the moment, we are using a real recipe.
+ recipe = 'devtool-upgrade-test3'
+ version = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+ # we are downgrading instead of upgrading.
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+
+ def test_devtool_upgrade_all_checksums(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # For the moment, we are using a real recipe.
+ recipe = 'devtool-upgrade-test4'
+ version = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+ # we are downgrading instead of upgrading.
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+
def test_devtool_layer_plugins(self):
"""Test that devtool can use plugins from other layers.
@@ -1578,7 +2016,15 @@ class DevtoolUpgradeTests(DevtoolBase):
for p in paths:
dstdir = os.path.join(dstdir, p)
if not os.path.exists(dstdir):
- os.makedirs(dstdir)
+ try:
+ os.makedirs(dstdir)
+ except PermissionError:
+ return False
+ except OSError as e:
+ if e.errno == errno.EROFS:
+ return False
+ else:
+ raise e
if p == "lib":
# Can race with other tests
self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -1586,8 +2032,12 @@ class DevtoolUpgradeTests(DevtoolBase):
self.track_for_cleanup(dstdir)
dstfile = os.path.join(dstdir, os.path.basename(srcfile))
if srcfile != dstfile:
- shutil.copy(srcfile, dstfile)
+ try:
+ shutil.copy(srcfile, dstfile)
+ except PermissionError:
+ return False
self.track_for_cleanup(dstfile)
+ return True
def test_devtool_load_plugin(self):
"""Test that devtool loads only the first found plugin in BBPATH."""
@@ -1605,15 +2055,17 @@ class DevtoolUpgradeTests(DevtoolBase):
plugincontent = fh.readlines()
try:
self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
- for path in searchpath:
- self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
+ searchpath = [
+ path for path in searchpath
+ if self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
+ ]
result = runCmd("devtool --quiet count")
self.assertEqual(result.output, '1')
result = runCmd("devtool --quiet multiloaded")
self.assertEqual(result.output, "no")
for path in searchpath:
result = runCmd("devtool --quiet bbdir")
- self.assertEqual(result.output, path)
+ self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py'))
finally:
with open(srcfile, 'w') as fh:
@@ -1794,6 +2246,52 @@ class DevtoolUpgradeTests(DevtoolBase):
if files:
self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files))
+ def test_devtool_finish_update_patch(self):
+ # This test uses a modified version of the sysdig recipe from meta-oe.
+ # - The patches have been renamed.
+ # - The dependencies are commented out since the recipe is not being
+ # built.
+ #
+ # The sysdig recipe is interesting in that it fetches two different Git
+ # repositories, and there are patches for both. This leads to that
+ # devtool will create ignore commits as it uses Git submodules to keep
+ # track of the second repository.
+ #
+ # This test will verify that the ignored commits actually are ignored
+ # when a commit in between is modified. It will also verify that the
+ # updated patch keeps its original name.
+
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Try modifying a recipe
+ self.track_for_cleanup(self.workspacedir)
+ recipe = 'sysdig-selftest'
+ recipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(recipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s %s' % (recipe, tempdir))
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (recipedir, recipe, recipe, os.path.basename(recipefile)))
+ self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
+ # Make a change to one of the existing commits
+ result = runCmd('echo "# A comment " >> CMakeLists.txt', cwd=tempdir)
+ result = runCmd('git status --porcelain', cwd=tempdir)
+ self.assertIn('M CMakeLists.txt', result.output)
+ result = runCmd('git commit --fixup HEAD^ CMakeLists.txt', cwd=tempdir)
+ result = runCmd('git show -s --format=%s', cwd=tempdir)
+ self.assertIn('fixup! cmake: Pass PROBE_NAME via CFLAGS', result.output)
+ result = runCmd('GIT_SEQUENCE_EDITOR=true git rebase -i --autosquash devtool-base', cwd=tempdir)
+ result = runCmd('devtool finish %s meta-selftest' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ expected_status = [(' M', '.*/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch$')]
+ self._check_repo_status(recipedir, expected_status)
+
def test_devtool_rename(self):
# Check preconditions
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1830,7 +2328,6 @@ class DevtoolUpgradeTests(DevtoolBase):
self._test_recipe_contents(newrecipefile, checkvars, [])
# Try again - change just name this time
result = runCmd('devtool reset -n %s' % newrecipename)
- shutil.rmtree(newsrctree)
add_recipe()
newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever))
result = runCmd('devtool rename %s %s' % (recipename, newrecipename))
@@ -1843,7 +2340,6 @@ class DevtoolUpgradeTests(DevtoolBase):
self._test_recipe_contents(newrecipefile, checkvars, [])
# Try again - change just version this time
result = runCmd('devtool reset -n %s' % newrecipename)
- shutil.rmtree(newsrctree)
add_recipe()
newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever))
result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever))
@@ -1914,7 +2410,9 @@ class DevtoolUpgradeTests(DevtoolBase):
#Modify the kernel source
modfile = os.path.join(tempdir, 'init/version.c')
- runCmd("sed -i 's/Linux/LiNuX/g' %s" % (modfile))
+ # Moved to uts.h in 6.1 onwards
+ modfile2 = os.path.join(tempdir, 'include/linux/uts.h')
+ runCmd("sed -i 's/Linux/LiNuX/g' %s %s" % (modfile, modfile2))
#Modify the configuration
codeconfigfile = os.path.join(tempdir, '.config.new')
@@ -1929,3 +2427,518 @@ class DevtoolUpgradeTests(DevtoolBase):
#Step 4.5
runCmd("grep %s %s" % (modconfopt, codeconfigfile))
+
+
+class DevtoolIdeSdkTests(DevtoolBase):
+ def _write_bb_config(self, recipe_names):
+ """Helper to write the bitbake local.conf file"""
+ conf_lines = [
+ 'IMAGE_CLASSES += "image-combined-dbg"',
+ 'IMAGE_GEN_DEBUGFS = "1"',
+ 'IMAGE_INSTALL:append = " gdbserver %s"' % ' '.join(
+ [r + '-ptest' for r in recipe_names])
+ ]
+ self.write_config("\n".join(conf_lines))
+
+ def _check_workspace(self):
+ """Check if a workspace directory is available and setup the cleanup"""
+ self.assertTrue(not os.path.exists(self.workspacedir),
+ 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ def _workspace_scripts_dir(self, recipe_name):
+ return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts'))
+
+ def _sources_scripts_dir(self, src_dir):
+ return os.path.realpath(os.path.join(src_dir, 'oe-scripts'))
+
+ def _workspace_gdbinit_dir(self, recipe_name):
+ return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts', 'gdbinit'))
+
+ def _sources_gdbinit_dir(self, src_dir):
+ return os.path.realpath(os.path.join(src_dir, 'oe-gdbinit'))
+
+ def _devtool_ide_sdk_recipe(self, recipe_name, build_file, testimage):
+ """Setup a recipe for working with devtool ide-sdk
+
+ Basically devtool modify -x followed by some tests
+ """
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name)
+
+ result = runCmd('devtool modify %s -x %s' % (recipe_name, tempdir))
+ self.assertExists(os.path.join(tempdir, build_file),
+ 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf',
+ 'layer.conf'), 'Workspace directory not created')
+ matches = glob.glob(os.path.join(self.workspacedir,
+ 'appends', recipe_name + '.bbappend'))
+ self.assertTrue(matches, 'bbappend not created %s' % result.output)
+
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(recipe_name, result.output)
+ self.assertIn(tempdir, result.output)
+ self._check_src_repo(tempdir)
+
+ # Usually devtool ide-sdk would initiate the build of the SDK.
+ # But there is a circular dependency with starting Qemu and passing the IP of runqemu to devtool ide-sdk.
+ if testimage:
+ bitbake("%s qemu-native qemu-helper-native" % testimage)
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
+ self.add_command_to_tearDown(
+ 'rm -f %s/%s*' % (deploy_dir_image, testimage))
+
+ return tempdir
+
+ def _get_recipe_ids(self, recipe_name):
+ """IDs needed to write recipe specific config entries into IDE config files"""
+ package_arch = get_bb_var('PACKAGE_ARCH', recipe_name)
+ recipe_id = recipe_name + "-" + package_arch
+ recipe_id_pretty = recipe_name + ": " + package_arch
+ return (recipe_id, recipe_id_pretty)
+
+ def _verify_install_script_code(self, tempdir, recipe_name):
+ """Verify the scripts referred by the tasks.json file are fine.
+
+ This function does not depend on Qemu. Therefore it verifies the scripts
+ exists and the delete step works as expected. But it does not try to
+ deploy to Qemu.
+ """
+ recipe_id, recipe_id_pretty = self._get_recipe_ids(recipe_name)
+ with open(os.path.join(tempdir, '.vscode', 'tasks.json')) as tasks_j:
+ tasks_d = json.load(tasks_j)
+ tasks = tasks_d["tasks"]
+ task_install = next(
+ (task for task in tasks if task["label"] == "install && deploy-target %s" % recipe_id_pretty), None)
+ self.assertIsNot(task_install, None)
+ # execute only the bb_run_do_install script since the deploy would require e.g. Qemu running.
+ i_and_d_script = "install_and_deploy_" + recipe_id
+ i_and_d_script_path = os.path.join(
+ self._workspace_scripts_dir(recipe_name), i_and_d_script)
+ self.assertExists(i_and_d_script_path)
+ del_script = "delete_package_dirs_" + recipe_id
+ del_script_path = os.path.join(
+ self._workspace_scripts_dir(recipe_name), del_script)
+ self.assertExists(del_script_path)
+ runCmd(del_script_path, cwd=tempdir)
+
+ def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe):
+ """Verify deployment and execution in Qemu system work for one recipe.
+
+ This function checks the entire SDK workflow: changing the code, recompiling
+ it and deploying it back to Qemu, and checking that the changes have been
+ incorporated into the provided binaries. It also runs the tests of the recipe.
+ """
+ recipe_id, _ = self._get_recipe_ids(recipe_name)
+ i_and_d_script = "install_and_deploy_" + recipe_id
+ install_deploy_cmd = os.path.join(
+ self._workspace_scripts_dir(recipe_name), i_and_d_script)
+ self.assertExists(install_deploy_cmd,
+ '%s script not found' % install_deploy_cmd)
+ runCmd(install_deploy_cmd)
+
+ MAGIC_STRING_ORIG = "Magic: 123456789"
+ MAGIC_STRING_NEW = "Magic: 987654321"
+ ptest_cmd = "ptest-runner " + recipe_name
+
+ # validate that SSH is working
+ status, _ = qemu.run("uname")
+ self.assertEqual(
+ status, 0, msg="Failed to connect to the SSH server on Qemu")
+
+ # Verify the unmodified example prints the magic string
+ status, output = qemu.run(example_exe)
+ self.assertEqual(status, 0, msg="%s failed: %s" %
+ (example_exe, output))
+ self.assertIn(MAGIC_STRING_ORIG, output)
+
+ # Verify the unmodified ptests work
+ status, output = qemu.run(ptest_cmd)
+ self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
+ self.assertIn("PASS: cpp-example-lib", output)
+
+ # Verify remote debugging works
+ self._gdb_cross_debugging(
+ qemu, recipe_name, example_exe, MAGIC_STRING_ORIG)
+
+ # Replace the Magic String in the code, compile and deploy to Qemu
+ cpp_example_lib_hpp = os.path.join(tempdir, 'cpp-example-lib.hpp')
+ with open(cpp_example_lib_hpp, 'r') as file:
+ cpp_code = file.read()
+ cpp_code = cpp_code.replace(MAGIC_STRING_ORIG, MAGIC_STRING_NEW)
+ with open(cpp_example_lib_hpp, 'w') as file:
+ file.write(cpp_code)
+ runCmd(install_deploy_cmd, cwd=tempdir)
+
+ # Verify the modified example prints the modified magic string
+ status, output = qemu.run(example_exe)
+ self.assertEqual(status, 0, msg="%s failed: %s" %
+ (example_exe, output))
+ self.assertNotIn(MAGIC_STRING_ORIG, output)
+ self.assertIn(MAGIC_STRING_NEW, output)
+
+ # Verify the modified example ptests work
+ status, output = qemu.run(ptest_cmd)
+ self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
+ self.assertIn("PASS: cpp-example-lib", output)
+
+ # Verify remote debugging works wit the modified magic string
+ self._gdb_cross_debugging(
+ qemu, recipe_name, example_exe, MAGIC_STRING_NEW)
+
+ def _gdb_cross(self):
+ """Verify gdb-cross is provided by devtool ide-sdk"""
+ target_arch = self.td["TARGET_ARCH"]
+ target_sys = self.td["TARGET_SYS"]
+ gdb_recipe = "gdb-cross-" + target_arch
+ gdb_binary = target_sys + "-gdb"
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
+ r = runCmd("%s --version" % gdb_binary,
+ native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(r.status, 0)
+ self.assertIn("GNU gdb", r.output)
+
+ def _gdb_cross_debugging(self, qemu, recipe_name, example_exe, magic_string):
+ """Verify gdb-cross is working
+
+ Test remote debugging:
+ break main
+ run
+ continue
+ break CppExample::print_json()
+ continue
+ print CppExample::test_string.compare("cpp-example-lib Magic: 123456789")
+ $1 = 0
+ print CppExample::test_string.compare("cpp-example-lib Magic: 123456789aaa")
+ $2 = -3
+ list cpp-example-lib.hpp:13,13
+ 13 inline static const std::string test_string = "cpp-example-lib Magic: 123456789";
+ continue
+ """
+ sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ gdbserver_script = os.path.join(self._workspace_scripts_dir(
+ recipe_name), 'gdbserver_1234_usr-bin-' + example_exe + '_m')
+ gdb_script = os.path.join(self._workspace_scripts_dir(
+ recipe_name), 'gdb_1234_usr-bin-' + example_exe)
+
+ # Start a gdbserver
+ r = runCmd(gdbserver_script)
+ self.assertEqual(r.status, 0)
+
+ # Check there is a gdbserver running
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
+ self.assertEqual(r.status, 0)
+ self.assertIn("gdbserver ", r.output)
+
+ # Check the pid file is correct
+ test_cmd = "cat /proc/$(cat /tmp/gdbserver_1234_usr-bin-" + \
+ example_exe + "/pid)/cmdline"
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, test_cmd))
+ self.assertEqual(r.status, 0)
+ self.assertIn("gdbserver", r.output)
+
+ # Test remote debugging works
+ gdb_batch_cmd = " --batch -ex 'break main' -ex 'run'"
+ gdb_batch_cmd += " -ex 'break CppExample::print_json()' -ex 'continue'"
+ gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %s\")'" % magic_string
+ gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %saaa\")'" % magic_string
+ gdb_batch_cmd += " -ex 'list cpp-example-lib.hpp:13,13'"
+ gdb_batch_cmd += " -ex 'continue'"
+ r = runCmd(gdb_script + gdb_batch_cmd)
+ self.logger.debug("%s %s returned: %s", gdb_script,
+ gdb_batch_cmd, r.output)
+ self.assertEqual(r.status, 0)
+ self.assertIn("Breakpoint 1, main", r.output)
+ self.assertIn("$1 = 0", r.output) # test.string.compare equal
+ self.assertIn("$2 = -3", r.output) # test.string.compare longer
+ self.assertIn(
+ 'inline static const std::string test_string = "cpp-example-lib %s";' % magic_string, r.output)
+ self.assertIn("exited normally", r.output)
+
+ # Stop the gdbserver
+ r = runCmd(gdbserver_script + ' stop')
+ self.assertEqual(r.status, 0)
+
+ # Check there is no gdbserver running
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
+ self.assertEqual(r.status, 0)
+ self.assertNotIn("gdbserver ", r.output)
+
+ def _verify_cmake_preset(self, tempdir):
+ """Verify the generated cmake preset works as expected
+
+ Check if compiling works
+ Check if unit tests can be executed in qemu (not qemu-system)
+ """
+ with open(os.path.join(tempdir, 'CMakeUserPresets.json')) as cmake_preset_j:
+ cmake_preset_d = json.load(cmake_preset_j)
+ config_presets = cmake_preset_d["configurePresets"]
+ self.assertEqual(len(config_presets), 1)
+ cmake_exe = config_presets[0]["cmakeExecutable"]
+ preset_name = config_presets[0]["name"]
+
+ # Verify the wrapper for cmake native is available
+ self.assertExists(cmake_exe)
+
+ # Verify the cmake preset generated by devtool ide-sdk is available
+ result = runCmd('%s --list-presets' % cmake_exe, cwd=tempdir)
+ self.assertIn(preset_name, result.output)
+
+ # Verify cmake re-uses the o files compiled by bitbake
+ result = runCmd('%s --build --preset %s' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("ninja: no work to do.", result.output)
+
+ # Verify the unit tests work (in Qemu user mode)
+ result = runCmd('%s --build --preset %s --target test' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("100% tests passed", result.output)
+
+ # Verify re-building and testing works again
+ result = runCmd('%s --build --preset %s --target clean' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Cleaning", result.output)
+ result = runCmd('%s --build --preset %s' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Building", result.output)
+ self.assertIn("Linking", result.output)
+ result = runCmd('%s --build --preset %s --target test' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Running tests...", result.output)
+ self.assertIn("100% tests passed", result.output)
+
+ @OETestTag("runqemu")
+ def test_devtool_ide_sdk_none_qemu(self):
+ """Start qemu-system and run tests for multiple recipes. ide=none is used."""
+ recipe_names = ["cmake-example", "meson-example"]
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config(recipe_names)
+ self._check_runqemu_prerequisites()
+
+ # Verify deployment to Qemu (system mode) works
+ bitbake(testimage)
+ with runqemu(testimage, runqemuparams="nographic") as qemu:
+ # cmake-example recipe
+ recipe_name = "cmake-example"
+ example_exe = "cmake-example"
+ build_file = "CMakeLists.txt"
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
+ recipe_name, testimage, qemu.ip)
+ runCmd(bitbake_sdk_cmd)
+ self._gdb_cross()
+ self._verify_cmake_preset(tempdir)
+ self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
+ # Verify the oe-scripts sym-link is valid
+ self.assertEqual(self._workspace_scripts_dir(
+ recipe_name), self._sources_scripts_dir(tempdir))
+
+ # meson-example recipe
+ recipe_name = "meson-example"
+ example_exe = "mesonex"
+ build_file = "meson.build"
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
+ recipe_name, testimage, qemu.ip)
+ runCmd(bitbake_sdk_cmd)
+ self._gdb_cross()
+ self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
+ # Verify the oe-scripts sym-link is valid
+ self.assertEqual(self._workspace_scripts_dir(
+ recipe_name), self._sources_scripts_dir(tempdir))
+
+ def test_devtool_ide_sdk_code_cmake(self):
+ """Verify a cmake recipe works with ide=code mode"""
+ recipe_name = "cmake-example"
+ build_file = "CMakeLists.txt"
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config([recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
+ recipe_name, testimage)
+ runCmd(bitbake_sdk_cmd)
+ self._verify_cmake_preset(tempdir)
+ self._verify_install_script_code(tempdir, recipe_name)
+ self._gdb_cross()
+
+ def test_devtool_ide_sdk_code_meson(self):
+ """Verify a meson recipe works with ide=code mode"""
+ recipe_name = "meson-example"
+ build_file = "meson.build"
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config([recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
+ recipe_name, testimage)
+ runCmd(bitbake_sdk_cmd)
+
+ with open(os.path.join(tempdir, '.vscode', 'settings.json')) as settings_j:
+ settings_d = json.load(settings_j)
+ meson_exe = settings_d["mesonbuild.mesonPath"]
+ meson_build_folder = settings_d["mesonbuild.buildFolder"]
+
+ # Verify the wrapper for meson native is available
+ self.assertExists(meson_exe)
+
+ # Verify meson re-uses the o files compiled by bitbake
+ result = runCmd('%s compile -C %s' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("ninja: no work to do.", result.output)
+
+ # Verify the unit tests work (in Qemu)
+ runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
+
+ # Verify re-building and testing works again
+ result = runCmd('%s compile -C %s --clean' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("Cleaning...", result.output)
+ result = runCmd('%s compile -C %s' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("Linking target", result.output)
+ runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
+
+ self._verify_install_script_code(tempdir, recipe_name)
+ self._gdb_cross()
+
+ def test_devtool_ide_sdk_shared_sysroots(self):
+ """Verify the shared sysroot SDK"""
+
+ # Handle the workspace (which is not needed by this test case)
+ self._check_workspace()
+
+ result_init = runCmd(
+ 'devtool ide-sdk -m shared oe-selftest-image cmake-example meson-example --ide=code')
+ bb_vars = get_bb_vars(
+ ['REAL_MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'], "meta-ide-support")
+ environment_script = 'environment-setup-%s' % bb_vars['REAL_MULTIMACH_TARGET_SYS']
+ deploydir = bb_vars['DEPLOY_DIR_IMAGE']
+ environment_script_path = os.path.join(deploydir, environment_script)
+ cpp_example_src = os.path.join(
+ bb_vars['COREBASE'], 'meta-selftest', 'recipes-test', 'cpp', 'files')
+
+ # Verify the cross environment script is available
+ self.assertExists(environment_script_path)
+
+ def runCmdEnv(cmd, cwd):
+ cmd = '/bin/sh -c ". %s > /dev/null && %s"' % (
+ environment_script_path, cmd)
+ return runCmd(cmd, cwd)
+
+ # Verify building the C++ example works with CMake
+ tempdir_cmake = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir_cmake)
+
+ result_cmake = runCmdEnv("which cmake", cwd=tempdir_cmake)
+ cmake_native = os.path.normpath(result_cmake.output.strip())
+ self.assertExists(cmake_native)
+
+ runCmdEnv('cmake %s' % cpp_example_src, cwd=tempdir_cmake)
+ runCmdEnv('cmake --build %s' % tempdir_cmake, cwd=tempdir_cmake)
+
+ # Verify the printed note really referres to a cmake executable
+ cmake_native_code = ""
+ for line in result_init.output.splitlines():
+ m = re.search(r'"cmake.cmakePath": "(.*)"', line)
+ if m:
+ cmake_native_code = m.group(1)
+ break
+ self.assertExists(cmake_native_code)
+ self.assertEqual(cmake_native, cmake_native_code)
+
+ # Verify building the C++ example works with Meson
+ tempdir_meson = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir_meson)
+
+ result_cmake = runCmdEnv("which meson", cwd=tempdir_meson)
+ meson_native = os.path.normpath(result_cmake.output.strip())
+ self.assertExists(meson_native)
+
+ runCmdEnv('meson setup %s' % tempdir_meson, cwd=cpp_example_src)
+ runCmdEnv('meson compile', cwd=tempdir_meson)
+
+ def test_devtool_ide_sdk_plugins(self):
+ """Test that devtool ide-sdk can use plugins from other layers."""
+
+ # We need a workspace layer and a modified recipe (but no image)
+ modified_recipe_name = "meson-example"
+ modified_build_file = "meson.build"
+ testimage = "oe-selftest-image"
+ shared_recipe_name = "cmake-example"
+
+ self._check_workspace()
+ self._write_bb_config([modified_recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ modified_recipe_name, modified_build_file, None)
+
+ IDE_RE = re.compile(r'.*--ide \{(.*)\}.*')
+
+ def get_ides_from_help(help_str):
+ m = IDE_RE.search(help_str)
+ return m.group(1).split(',')
+
+ # verify the default plugins are available but the foo plugin is not
+ result = runCmd('devtool ide-sdk -h')
+ found_ides = get_ides_from_help(result.output)
+ self.assertIn('code', found_ides)
+ self.assertIn('none', found_ides)
+ self.assertNotIn('foo', found_ides)
+
+ shared_config_file = os.path.join(tempdir, 'shared-config.txt')
+ shared_config_str = 'Dummy shared IDE config'
+ modified_config_file = os.path.join(tempdir, 'modified-config.txt')
+ modified_config_str = 'Dummy modified IDE config'
+
+ # Generate a foo plugin in the workspace layer
+ plugin_dir = os.path.join(
+ self.workspacedir, 'lib', 'devtool', 'ide_plugins')
+ os.makedirs(plugin_dir)
+ plugin_code = 'from devtool.ide_plugins import IdeBase\n\n'
+ plugin_code += 'class IdeFoo(IdeBase):\n'
+ plugin_code += ' def setup_shared_sysroots(self, shared_env):\n'
+ plugin_code += ' with open("%s", "w") as config_file:\n' % shared_config_file
+ plugin_code += ' config_file.write("%s")\n\n' % shared_config_str
+ plugin_code += ' def setup_modified_recipe(self, args, image_recipe, modified_recipe):\n'
+ plugin_code += ' with open("%s", "w") as config_file:\n' % modified_config_file
+ plugin_code += ' config_file.write("%s")\n\n' % modified_config_str
+ plugin_code += 'def register_ide_plugin(ide_plugins):\n'
+ plugin_code += ' ide_plugins["foo"] = IdeFoo\n'
+
+ plugin_py = os.path.join(plugin_dir, 'ide_foo.py')
+ with open(plugin_py, 'w') as plugin_file:
+ plugin_file.write(plugin_code)
+
+ # Verify the foo plugin is available as well
+ result = runCmd('devtool ide-sdk -h')
+ found_ides = get_ides_from_help(result.output)
+ self.assertIn('code', found_ides)
+ self.assertIn('none', found_ides)
+ self.assertIn('foo', found_ides)
+
+ # Verify the foo plugin generates a shared config
+ result = runCmd(
+ 'devtool ide-sdk -m shared --skip-bitbake --ide foo %s' % shared_recipe_name)
+ with open(shared_config_file) as shared_config:
+ shared_config_new = shared_config.read()
+ self.assertEqual(shared_config_str, shared_config_new)
+
+ # Verify the foo plugin generates a modified config
+ result = runCmd('devtool ide-sdk --skip-bitbake --ide foo %s %s' %
+ (modified_recipe_name, testimage))
+ with open(modified_config_file) as modified_config:
+ modified_config_new = modified_config.read()
+ self.assertEqual(modified_config_str, modified_config_new)
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index b80d091c1c..ad952c004b 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -37,42 +39,6 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
""" + "\n".join(regressed_successes)
self.assertTrue(len(regressed_failures) == 0 and len(regressed_successes) == 0, msg)
- def test_missing_homepg(self):
- """
- Summary: Test for oe-core recipes that don't have a HOMEPAGE or DESCRIPTION
- Expected: All oe-core recipes should have a DESCRIPTION entry
- Expected: All oe-core recipes should have a HOMEPAGE entry except for recipes that are not fetched from external sources.
- Product: oe-core
- """
- with bb.tinfoil.Tinfoil() as tinfoil:
- tinfoil.prepare(config_only=False)
- no_description = []
- no_homepage = []
- for fn in tinfoil.all_recipe_files(variants=False):
- if not '/meta/recipes-' in fn:
- # We are only interested in OE-Core
- continue
- rd = tinfoil.parse_recipe_file(fn, appends=False)
- pn = rd.getVar('BPN')
- srcfile = rd.getVar('SRC_URI').split()
- #Since DESCRIPTION defaults to SUMMARY if not set, we are only interested in recipes without DESCRIPTION or SUMMARY
- if not (rd.getVar('SUMMARY') or rd.getVar('DESCRIPTION')):
- no_description.append((pn, fn))
- if not rd.getVar('HOMEPAGE'):
- if srcfile and srcfile[0].startswith('file') or not rd.getVar('SRC_URI'):
- # We are only interested in recipes SRC_URI fetched from external sources
- continue
- no_homepage.append((pn, fn))
- if no_homepage:
- self.fail("""
-The following recipes do not have a HOMEPAGE. Please add an entry for HOMEPAGE in the recipe.
-""" + "\n".join(['%s (%s)' % i for i in no_homepage]))
-
- if no_description:
- self.fail("""
-The following recipes do not have a DESCRIPTION. Please add an entry for DESCRIPTION in the recipe.
-""" + "\n".join(['%s (%s)' % i for i in no_description]))
-
def test_maintainers(self):
"""
Summary: Test that oe-core recipes have a maintainer and entries in maintainers list have a recipe
@@ -82,15 +48,15 @@ The following recipes do not have a DESCRIPTION. Please add an entry for DESCRIP
Author: Alexander Kanavin <alex.kanavin@gmail.com>
"""
def is_exception(pkg):
- exceptions = ["packagegroup-", "initramfs-", "systemd-machine-units", "target-sdk-provides-dummy"]
+ exceptions = ["packagegroup-",]
for i in exceptions:
if i in pkg:
return True
return False
def is_maintainer_exception(entry):
- exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran",
- "cve-update-db-native", "rust"]
+ exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
+ "cve-update-nvd2-native",]
for i in exceptions:
if i in entry:
return True
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py
index 26de3a07c9..fa74103dec 100644
--- a/meta/lib/oeqa/selftest/cases/efibootpartition.py
+++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py
@@ -5,42 +5,29 @@
# SPDX-License-Identifier: MIT
#
-import re
-
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import bitbake, runqemu, get_bb_var
+from oeqa.utils.commands import bitbake, runqemu
+from oeqa.core.decorator.data import skipIfNotMachine
+import oe.types
class GenericEFITest(OESelftestTestCase):
"""EFI booting test class"""
+ @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently")
+ def test_boot_efi(self):
+ cmd = "runqemu nographic serial wic ovmf"
+ if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
+ cmd += " kvm"
+ image = "core-image-minimal"
- cmd_common = "runqemu nographic serial wic ovmf"
- efi_provider = "systemd-boot"
- image = "core-image-minimal"
- machine = "qemux86-64"
- recipes_built = False
-
- @classmethod
- def setUpLocal(self):
- super(GenericEFITest, self).setUpLocal(self)
-
- self.write_config(self,
-"""
-EFI_PROVIDER = "%s"
+ self.write_config("""
+EFI_PROVIDER = "systemd-boot"
IMAGE_FSTYPES:pn-%s:append = " wic"
-MACHINE = "%s"
MACHINE_FEATURES:append = " efi"
WKS_FILE = "efi-bootdisk.wks.in"
IMAGE_INSTALL:append = " grub-efi systemd-boot kernel-image-bzimage"
"""
-% (self.efi_provider, self.image, self.machine))
- if not self.recipes_built:
- bitbake("ovmf")
- bitbake(self.image)
- self.recipes_built = True
+% (image))
- @classmethod
- def test_boot_efi(self):
- """Test generic boot partition with qemu"""
- cmd = "%s %s" % (self.cmd_common, self.machine)
- with runqemu(self.image, ssh=False, launch_cmd=cmd) as qemu:
+ bitbake(image + " ovmf")
+ with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/esdk.py
index 3ea0f66357..9f5de2cde7 100644
--- a/meta/lib/oeqa/selftest/cases/eSDK.py
+++ b/meta/lib/oeqa/selftest/cases/esdk.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/externalsrc.py b/meta/lib/oeqa/selftest/cases/externalsrc.py
new file mode 100644
index 0000000000..1d800dc82c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/externalsrc.py
@@ -0,0 +1,44 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import tempfile
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import get_bb_var, runCmd
+
+class ExternalSrc(OESelftestTestCase):
+ # test that srctree_hash_files does not crash
+ # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
+ # so we check only that a recipe with externalsrc can be parsed
+ def test_externalsrc_srctree_hash_files(self):
+ test_recipe = "git-submodule-test"
+ git_url = "git://git.yoctoproject.org/git-submodule-test"
+ externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
+
+ self.write_config(
+ """
+INHERIT += "externalsrc"
+EXTERNALSRC:pn-%s = "%s"
+""" % (test_recipe, externalsrc_dir)
+ )
+
+ # test with git without submodules
+ runCmd('git clone %s %s' % (git_url, externalsrc_dir))
+ os.unlink(externalsrc_dir + "/.gitmodules")
+ open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+ os.unlink(".gitmodules")
+
+ # test with git with submodules
+ runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
+ runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+
+ # test without git
+ shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
index be14272e63..44099176fc 100644
--- a/meta/lib/oeqa/selftest/cases/fetch.py
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -34,6 +36,7 @@ PREMIRRORS:forcevariable = ""
# No mirrors and broken git, should fail
features = """
DL_DIR = "%s"
+SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
GIT_PROXY_COMMAND = "false"
MIRRORS:forcevariable = ""
PREMIRRORS:forcevariable = ""
@@ -46,6 +49,7 @@ PREMIRRORS:forcevariable = ""
# Broken git but a specific mirror
features = """
DL_DIR = "%s"
+SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
GIT_PROXY_COMMAND = "false"
MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
""" % dldir
@@ -99,7 +103,7 @@ class Dependencies(OESelftestTestCase):
r = """
LICENSE="CLOSED"
- SRC_URI="git://example.com/repo;branch=master"
+ SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
"""
f = self.write_recipe(textwrap.dedent(r), tempdir)
d = tinfoil.parse_recipe_file(f)
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
index e6bfd1257e..347c065377 100644
--- a/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -1,9 +1,11 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
import os
import re
@@ -31,6 +33,8 @@ KERNEL_CLASSES = " kernel-fitimage "
# RAM disk variables including load address and entrypoint for kernel and RAM disk
IMAGE_FSTYPES += "cpio.gz"
INITRAMFS_IMAGE = "core-image-minimal"
+# core-image-minimal is used as initramfs here, drop the rootfs suffix
+IMAGE_NAME_SUFFIX:pn-core-image-minimal = ""
UBOOT_RD_LOADADDRESS = "0x88000000"
UBOOT_RD_ENTRYPOINT = "0x88000000"
UBOOT_LOADADDRESS = "0x80080000"
@@ -40,15 +44,14 @@ FIT_DESC = "A model description"
self.write_config(config)
# fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ image = "virtual/kernel"
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'INITRAMFS_IMAGE_NAME', 'KERNEL_FIT_LINK_NAME'], image)
- image_type = "core-image-minimal"
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = get_bb_var('MACHINE')
- fitimage_its_path = os.path.join(deploy_dir_image,
- "fitImage-its-%s-%s-%s" % (image_type, machine, machine))
- fitimage_path = os.path.join(deploy_dir_image,
- "fitImage-%s-%s-%s" % (image_type, machine, machine))
+ fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-its-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
+ fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
self.assertTrue(os.path.exists(fitimage_its_path),
"%s image tree source doesn't exist" % (fitimage_its_path))
@@ -121,15 +124,14 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
self.write_config(config)
# fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ image = "virtual/kernel"
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'KERNEL_FIT_LINK_NAME'], image)
- image_type = "core-image-minimal"
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = get_bb_var('MACHINE')
- fitimage_its_path = os.path.join(deploy_dir_image,
- "fitImage-its-%s" % (machine,))
- fitimage_path = os.path.join(deploy_dir_image,
- "fitImage-%s.bin" % (machine,))
+ fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-its-%s" % (bb_vars['KERNEL_FIT_LINK_NAME']))
+ fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME']))
self.assertTrue(os.path.exists(fitimage_its_path),
"%s image tree source doesn't exist" % (fitimage_its_path))
@@ -202,7 +204,7 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
signed_sections = {}
for line in result.output.splitlines():
if line.startswith((' Configuration', ' Image')):
- in_signed = re.search('\((.*)\)', line).groups()[0]
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
elif re.match('^ *', line) in (' ', ''):
in_signed = None
elif in_signed:
@@ -275,8 +277,8 @@ FIT_SIGN_INDIVIDUAL = "1"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
machine = get_bb_var('MACHINE')
@@ -348,7 +350,8 @@ UBOOT_LOADADDRESS = "0x80080000"
UBOOT_ENTRYPOINT = "0x80080000"
UBOOT_FIT_DESC = "A model description"
KERNEL_IMAGETYPES += " fitImage "
-KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
+KERNEL_CLASSES = " kernel-fitimage "
+INHERIT += "test-mkimage-wrapper"
UBOOT_SIGN_ENABLE = "1"
FIT_GENERATE_KEYS = "1"
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
@@ -359,8 +362,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
machine = get_bb_var('MACHINE')
@@ -430,7 +433,8 @@ UBOOT_MACHINE = "am57xx_evm_defconfig"
SPL_BINARY = "MLO"
# The kernel-fitimage class is a dependency even if we're only
# creating/signing the U-Boot fitImage
-KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
+KERNEL_CLASSES = " kernel-fitimage"
+INHERIT += "test-mkimage-wrapper"
# Enable creation and signing of the U-Boot fitImage
UBOOT_FITIMAGE_ENABLE = "1"
SPL_SIGN_ENABLE = "1"
@@ -449,8 +453,8 @@ UBOOT_FIT_HASH_ALG = "sha256"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
image_type = "core-image-minimal"
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -521,7 +525,7 @@ UBOOT_FIT_HASH_ALG = "sha256"
signed_sections = {}
for line in result.output.splitlines():
if line.startswith((' Image')):
- in_signed = re.search('\((.*)\)', line).groups()[0]
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
elif re.match(' \w', line):
in_signed = None
elif in_signed:
@@ -538,7 +542,7 @@ UBOOT_FIT_HASH_ALG = "sha256"
self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
# Check for SPL_MKIMAGE_SIGN_ARGS
- result = runCmd('bitbake -e virtual/kernel | grep ^T=')
+ result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
tempdir = result.output.split('=', 1)[1].strip().strip('')
result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
@@ -593,7 +597,8 @@ UBOOT_EXTLINUX = "0"
UBOOT_FIT_GENERATE_KEYS = "1"
UBOOT_FIT_HASH_ALG = "sha256"
KERNEL_IMAGETYPES += " fitImage "
-KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
+KERNEL_CLASSES = " kernel-fitimage "
+INHERIT += "test-mkimage-wrapper"
UBOOT_SIGN_ENABLE = "1"
FIT_GENERATE_KEYS = "1"
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
@@ -603,8 +608,8 @@ FIT_SIGN_INDIVIDUAL = "1"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
image_type = "core-image-minimal"
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -675,7 +680,7 @@ FIT_SIGN_INDIVIDUAL = "1"
signed_sections = {}
for line in result.output.splitlines():
if line.startswith((' Image')):
- in_signed = re.search('\((.*)\)', line).groups()[0]
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
elif re.match(' \w', line):
in_signed = None
elif in_signed:
@@ -692,7 +697,7 @@ FIT_SIGN_INDIVIDUAL = "1"
self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
# Check for SPL_MKIMAGE_SIGN_ARGS
- result = runCmd('bitbake -e virtual/kernel | grep ^T=')
+ result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
tempdir = result.output.split('=', 1)[1].strip().strip('')
result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
@@ -738,6 +743,7 @@ UBOOT_LOADADDRESS = "0x80000000"
UBOOT_DTB_LOADADDRESS = "0x82000000"
UBOOT_ARCH = "arm"
UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
+UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
UBOOT_EXTLINUX = "0"
FIT_GENERATE_KEYS = "1"
KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
@@ -763,6 +769,7 @@ FIT_HASH_ALG = "sha256"
kernel_load = str(get_bb_var('UBOOT_LOADADDRESS'))
kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT'))
+ kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE'))
kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
uboot_arch = str(get_bb_var('UBOOT_ARCH'))
fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
@@ -775,7 +782,7 @@ FIT_HASH_ALG = "sha256"
'kernel-1 {',
'description = "Linux kernel";',
'data = /incbin/("linux.bin");',
- 'type = "kernel";',
+ 'type = "' + kernel_type + '";',
'arch = "' + uboot_arch + '";',
'os = "linux";',
'compression = "' + kernel_compression + '";',
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
index b9ea03ae62..89360178fe 100644
--- a/meta/lib/oeqa/selftest/cases/gcc.py
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -1,5 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
+#
import os
+import time
from oeqa.core.decorator import OETestTag
from oeqa.core.case import OEPTestResultTestCase
from oeqa.selftest.case import OESelftestTestCase
@@ -39,8 +44,13 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
self.write_config("\n".join(features))
recipe = "gcc-runtime"
+
+ start_time = time.time()
+
bitbake("{} -c check".format(recipe))
+ end_time = time.time()
+
bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
@@ -54,7 +64,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
- self.ptest_section(ptestsuite, logfile = logpath)
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
with open(sumspath, "r") as f:
for test, result in parse_values(f):
self.ptest_result(ptestsuite, test, result)
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py
new file mode 100644
index 0000000000..9da97ae780
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gdbserver.py
@@ -0,0 +1,67 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import time
+import tempfile
+import shutil
+import concurrent.futures
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd
+
+class GdbServerTest(OESelftestTestCase):
+ def test_gdb_server(self):
+ target_arch = self.td["TARGET_ARCH"]
+ target_sys = self.td["TARGET_SYS"]
+
+ features = """
+IMAGE_GEN_DEBUGFS = "1"
+IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
+CORE_IMAGE_EXTRA_INSTALL = "gdbserver"
+ """
+ self.write_config(features)
+
+ gdb_recipe = "gdb-cross-" + target_arch
+ gdb_binary = target_sys + "-gdb"
+
+ bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe)
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
+ r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(r.status, 0)
+ self.assertIn("GNU gdb", r.output)
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+
+ with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs:
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
+ shutil.unpack_archive(filename, debugfs)
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
+ shutil.unpack_archive(filename, debugfs)
+
+ with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
+ status, output = qemu.run_serial("kmod --help")
+ self.assertIn("modprobe", output)
+
+ with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
+ def run_gdb():
+ for _ in range(5):
+ time.sleep(2)
+ cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip)
+ self.logger.warning("starting gdb %s" % cmd)
+ r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(0, r.status)
+ line_re = r"Line \d+ of \"/usr/src/debug/kmod/.*/tools/kmod.c\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>"
+ self.assertRegex(r.output, line_re)
+ break
+ else:
+ self.fail("Timed out connecting to gdb")
+ future = executor.submit(run_gdb)
+
+ status, output = qemu.run_serial("gdbserver --once :9999 kmod --help")
+ self.assertEqual(status, 1)
+ # The future either returns None, or raises an exception
+ future.result()
diff --git a/meta/lib/oeqa/selftest/cases/gitarchivetests.py b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
new file mode 100644
index 0000000000..71382089c1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
@@ -0,0 +1,136 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+import oeqa.utils.gitarchive as ga
+from oeqa.utils.git import GitError
+import tempfile
+import shutil
+import scriptutils
+import logging
+from oeqa.selftest.case import OESelftestTestCase
+
+logger = scriptutils.logger_create('resulttool')
+
+def create_fake_repository(commit, tag_list=[], add_remote=True):
+ """ Create a testing git directory
+
+ Initialize a simple git repository with one initial commit, and as many
+ tags on this commit as listed in tag_list
+ Returns both git directory path and gitarchive git object
+ If commit is true, fake data will be commited, otherwise it will stay in staging area
+ If commit is true and tag_lsit is non empty, all tags in tag_list will be
+ created on the initial commit
+ Fake remote will also be added to make git ls-remote work
+ """
+ fake_data_file = "fake_data.txt"
+ tempdir = tempfile.mkdtemp(prefix='fake_results.')
+ repo = ga.init_git_repo(tempdir, False, False, logger)
+ if add_remote:
+ repo.run_cmd(["remote", "add", "origin", "."])
+ with open(os.path.join(tempdir, fake_data_file), "w") as fake_data:
+ fake_data.write("Fake data")
+ if commit:
+ repo.run_cmd(["add", fake_data_file])
+ repo.run_cmd(["commit", "-m", "\"Add fake data\""])
+ for tag in tag_list:
+ repo.run_cmd(["tag", tag])
+
+ return tempdir, repo
+
+def delete_fake_repository(path):
+ shutil.rmtree(path)
+
+def tag_exists(git_obj, target_tag):
+ for tag in git_obj.run_cmd(["tag"]).splitlines():
+ if target_tag == tag:
+ return True
+ return False
+
+class GitArchiveTests(OESelftestTestCase):
+ TEST_BRANCH="main"
+ TEST_COMMIT="0f7d5df"
+ TEST_COMMIT_COUNT="42"
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.log = logging.getLogger('gitarchivetests')
+ cls.log.setLevel(logging.DEBUG)
+
+ def test_create_first_test_tag(self):
+ path, git_obj = create_fake_repository(False)
+ keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
+ target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
+
+ ga.gitarchive(path, path, True, False,
+ "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
+ False, "{branch}/{commit_count}-g{commit}/{tag_number}",
+ 'Test run #{tag_number} of {branch}:{commit}', '',
+ [], [], False, keywords, logger)
+ self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created")
+ delete_fake_repository(path)
+
+ def test_create_second_test_tag(self):
+ first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
+ second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1"
+ keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
+
+ path, git_obj = create_fake_repository(True, [first_tag])
+ ga.gitarchive(path, path, True, False,
+ "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
+ False, "{branch}/{commit_count}-g{commit}/{tag_number}",
+ 'Test run #{tag_number} of {branch}:{commit}', '',
+ [], [], False, keywords, logger)
+ self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created")
+ delete_fake_repository(path)
+
+ def test_get_revs_on_branch(self):
+ fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
+ tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
+
+ path, git_obj = create_fake_repository(True, fake_tags_list)
+ revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main")
+ self.assertEqual(len(revs), 1)
+ self.assertEqual(revs[0].commit, "0f7d5df")
+ self.assertEqual(len(revs[0].tags), 2)
+ self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1'])
+ delete_fake_repository(path)
+
+ def test_get_tags_without_valid_remote(self):
+ url = 'git://git.yoctoproject.org/poky'
+ path, git_obj = create_fake_repository(False, None, False)
+
+ tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
+ """Test for some well established tags (released tags)"""
+ self.assertIn("yocto-4.0", tags)
+ self.assertIn("yocto-4.1", tags)
+ self.assertIn("yocto-4.2", tags)
+ delete_fake_repository(path)
+
+ def test_get_tags_with_only_local_tag(self):
+ fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
+ path, git_obj = create_fake_repository(True, fake_tags_list, False)
+
+ """No remote is configured and no url is passed: get_tags must fall
+ back to local tags
+ """
+ tags = ga.get_tags(git_obj, self.log)
+ self.assertCountEqual(tags, fake_tags_list)
+ delete_fake_repository(path)
+
+ def test_get_tags_without_valid_remote_and_wrong_url(self):
+ url = 'git://git.foo.org/bar'
+ path, git_obj = create_fake_repository(False, None, False)
+
+ """Test for some well established tags (released tags)"""
+ with self.assertRaises(GitError):
+ tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
+ delete_fake_repository(path)
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
index 6fc98e9cb4..bd56b2f6e7 100644
--- a/meta/lib/oeqa/selftest/cases/glibc.py
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -1,5 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
+#
import os
+import time
import contextlib
from oeqa.core.decorator import OETestTag
from oeqa.core.case import OEPTestResultTestCase
@@ -24,15 +29,19 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
# force single threaded test execution
- features.append('EGLIBCPARALLELISM_task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
+ features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
self.write_config("\n".join(features))
+ start_time = time.time()
+
bitbake("glibc-testsuite -c check")
+ end_time = time.time()
+
builddir = get_bb_var("B", "glibc-testsuite")
ptestsuite = "glibc-user" if ssh is None else "glibc"
- self.ptest_section(ptestsuite)
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time))
with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
for test, result in parse_values(f):
self.ptest_result(ptestsuite, test, result)
@@ -41,7 +50,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
with contextlib.ExitStack() as s:
# use the base work dir, as the nfs mount, since the recipe directory may not exist
tmpdir = get_bb_var("BASE_WORKDIR")
- nfsport, mountport = s.enter_context(unfs_server(tmpdir))
+ nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
# build core-image-minimal with required packages
default_installed_packages = [
@@ -61,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
bitbake("core-image-minimal")
# start runqemu
- qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic"))
+ qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
# validate that SSH is working
status, _ = qemu.run("uname")
@@ -70,7 +79,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
# setup nfs mount
if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
raise Exception("Failed to setup NFS mount directory on target")
- mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
+ mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
status, output = qemu.run(mountcmd)
if status != 0:
raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
index c809d7c9b1..ee2cf4b09a 100644
--- a/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -43,12 +45,6 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
@classmethod
def tearDownClass(cls):
- # Go creates file which are readonly
- for dirpath, dirnames, filenames in os.walk(cls.tmpdir_SDKQA):
- for filename in filenames + dirnames:
- f = os.path.join(dirpath, filename)
- if not os.path.islink(f):
- os.chmod(f, 0o775)
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
super(oeGoToolchainSelfTest, cls).tearDownClass()
@@ -56,6 +52,9 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name)
cmd = cmd + ". %s; " % self.env_SDK
cmd = cmd + "export GOPATH=%s; " % self.go_path
+ cmd = cmd + "export GOFLAGS=-modcacherw; "
+ cmd = cmd + "export CGO_ENABLED=1; "
+ cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; "
cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
return runCmd(cmd).status
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py
index 5b182a8f94..17c98baf14 100644
--- a/meta/lib/oeqa/selftest/cases/image_typedep.py
+++ b/meta/lib/oeqa/selftest/cases/image_typedep.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index 6d010b3e3a..dc88c222bd 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -1,10 +1,12 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from oeqa.selftest.case import OESelftestTestCase
from oeqa.core.decorator import OETestTag
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
from oeqa.utils.sshcontrol import SSHControl
import glob
import os
@@ -100,12 +102,11 @@ class ImageFeatures(OESelftestTestCase):
features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
- image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME'])
bmap_path = "%s.bmap" % image_path
gzip_path = "%s.gz" % bmap_path
@@ -118,8 +119,8 @@ class ImageFeatures(OESelftestTestCase):
image_stat = os.stat(image_path)
self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
- # check if the resulting gzip is valid
- self.assertTrue(runCmd('gzip -t %s' % gzip_path))
+ # check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink
+ self.assertTrue(runCmd('gzip --test --force %s' % gzip_path))
def test_hypervisor_fmts(self):
"""
@@ -134,17 +135,16 @@ class ImageFeatures(OESelftestTestCase):
img_types = [ 'vmdk', 'vdi', 'qcow2' ]
features = ""
for itype in img_types:
- features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype
+ features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
for itype in img_types:
- image_path = os.path.join(deploy_dir_image, "%s.wic.%s" %
- (link_name, itype))
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" %
+ (bb_vars['IMAGE_LINK_NAME'], itype))
# check if result image file is in deploy directory
self.assertTrue(os.path.exists(image_path))
@@ -164,24 +164,22 @@ class ImageFeatures(OESelftestTestCase):
"""
Summary: Check for chaining many CONVERSION_CMDs together
Expected: 1. core-image-minimal can be built with
- ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a
+ ext4.bmap.gz.bz2.zst.xz.u-boot and also create a
sha256sum
2. The above image has a valid sha256sum
Product: oe-core
Author: Tom Rini <trini@konsulko.com>
"""
- conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot"
+ conv = "ext4.bmap.gz.bz2.zst.xz.u-boot"
features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
-
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
- image_path = os.path.join(deploy_dir_image, "%s.%s" %
- (link_name, conv))
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" %
+ (bb_vars['IMAGE_LINK_NAME'], conv))
# check if resulting image is in the deploy directory
self.assertTrue(os.path.exists(image_path))
@@ -189,7 +187,7 @@ class ImageFeatures(OESelftestTestCase):
# check if the resulting sha256sum agrees
self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
- (deploy_dir_image, link_name, conv)))
+ (bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv)))
def test_image_fstypes(self):
"""
@@ -198,26 +196,43 @@ class ImageFeatures(OESelftestTestCase):
Product: oe-core
Author: Ed Bartosh <ed.bartosh@linux.intel.com>
"""
- image_name = 'core-image-minimal'
+ image = 'core-image-minimal'
- all_image_types = set(get_bb_var("IMAGE_TYPES", image_name).split())
- skip_image_types = set(('container', 'elf', 'f2fs', 'multiubi', 'tar.zst', 'wic.zst'))
+ all_image_types = set(get_bb_var("IMAGE_TYPES", image).split())
+ skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat'))
img_types = all_image_types - skip_image_types
- config = 'IMAGE_FSTYPES += "%s"\n'\
- 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\
- 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types)
+ config = """
+IMAGE_FSTYPES += "%s"
+WKS_FILE = "wictestdisk.wks"
+MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"
+UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"
+MULTIUBI_BUILD += "mtd_2_128"
+MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047"
+UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512"
+MULTIUBI_BUILD += "mtd_4_256"
+MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096"
+UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB"
+""" % ' '.join(img_types)
self.write_config(config)
- bitbake(image_name)
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
for itype in img_types:
- image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype))
- # check if result image is in deploy directory
- self.assertTrue(os.path.exists(image_path),
- "%s image %s doesn't exist" % (itype, image_path))
+ if itype == 'multiubi':
+ # For multiubi build we need to manage MULTIUBI_BUILD entry to append
+ # specific name to IMAGE_LINK_NAME
+ for vname in bb_vars['MULTIUBI_BUILD'].split():
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname))
+ # check if result image is in deploy directory
+ self.assertTrue(os.path.exists(image_path),
+ "%s image %s doesn't exist" % (itype, image_path))
+ else:
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype))
+ # check if result image is in deploy directory
+ self.assertTrue(os.path.exists(image_path),
+ "%s image %s doesn't exist" % (itype, image_path))
def test_useradd_static(self):
config = """
@@ -235,7 +250,7 @@ USERADD_GID_TABLES += "files/static-group"
DISTRO_FEATURES:append = " pam opengl wayland"
# Switch to systemd
-DISTRO_FEATURES:append = " systemd"
+DISTRO_FEATURES:append = " systemd usrmerge"
VIRTUAL-RUNTIME_init_manager = "systemd"
VIRTUAL-RUNTIME_initscripts = ""
VIRTUAL-RUNTIME_syslog = ""
@@ -266,20 +281,20 @@ SKIP_RECIPE[busybox] = "Don't build this"
Yeoh Ee Peng <ee.peng.yeoh@intel.com>
"""
- image_name = 'core-image-minimal'
+ image = 'core-image-minimal'
+ image_fstypes_debugfs = 'tar.bz2'
features = 'IMAGE_GEN_DEBUGFS = "1"\n'
- features += 'IMAGE_FSTYPES_DEBUGFS = "tar.bz2"\n'
- features += 'MACHINE = "genericx86-64"\n'
+ features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs
self.write_config(features)
- bitbake(image_name)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- dbg_tar_file = os.path.join(deploy_dir_image, "*-dbg.rootfs.tar.bz2")
- debug_files = glob.glob(dbg_tar_file)
- self.assertNotEqual(len(debug_files), 0, 'debug filesystem not generated at %s' % dbg_tar_file)
- result = runCmd('cd %s; tar xvf %s' % (deploy_dir_image, dbg_tar_file))
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+
+ dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs))
+ self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file)
+ result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file))
self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output))
- result = runCmd('find %s -name %s' % (deploy_dir_image, "udevadm"))
+ result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm"))
self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output)
dbg_symbols_targets = result.output.splitlines()
self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets)
@@ -289,9 +304,33 @@ SKIP_RECIPE[busybox] = "Don't build this"
def test_empty_image(self):
"""Test creation of image with no packages"""
- bitbake('test-empty-image')
- res_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- images = os.path.join(res_dir, "test-empty-image-*.manifest")
- result = glob.glob(images)
- with open(result[1],"r") as f:
+ image = 'test-empty-image'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME'])
+ self.assertTrue(os.path.exists(manifest))
+
+ with open(manifest, "r") as f:
self.assertEqual(len(f.read().strip()),0)
+
+ def test_mandb(self):
+ """
+ Test that an image containing manpages has working man and apropos commands.
+ """
+ config = """
+DISTRO_FEATURES:append = " api-documentation"
+CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc"
+"""
+ self.write_config(config)
+ bitbake("core-image-minimal")
+
+ with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
+ # This manpage is provided by man-pages
+ status, output = qemu.run_serial("apropos 8859")
+ self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
+ self.assertIn("iso_8859_15", output)
+
+ # This manpage is provided by kmod
+ status, output = qemu.run_serial("man --pager=cat modprobe")
+ self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
+ self.assertIn("force-modversion", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
index 6279d74245..f4af67a239 100644
--- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake
@@ -108,6 +113,7 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
raise AssertionError(result.output)
def test_bash_and_license(self):
+ self.disable_class("create-spdx")
self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"')
error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
@@ -116,6 +122,7 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
raise AssertionError(result.output)
def test_bash_or_license(self):
+ self.disable_class("create-spdx")
self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"')
bitbake('core-image-minimal')
@@ -134,23 +141,11 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
def test_core_image_full_cmdline_weston(self):
self.write_config("""
-INHERIT += "testimage"
+IMAGE_CLASSES += "testimage"
INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*"
INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*"
-# Settings for full-cmdline
-RDEPENDS:packagegroup-core-full-cmdline-utils:remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-fish mc-helpers mc-helpers-perl sed tar time"
-RDEPENDS:packagegroup-core-full-cmdline-dev-utils:remove = "diffutils m4 make patch"
-RDEPENDS:packagegroup-core-full-cmdline-multiuser:remove = "gzip"
-# Settings for weston
-# direct gpl3 dependencies
-RRECOMMENDS:packagegroup-base-vfat:remove = "dosfstools"
-PACKAGECONFIG:remove:pn-bluez5 = "readline"
-# dnf pulls in gpg which is gpl3; it also pulls in python3-rpm which pulls in rpm-build which pulls in bash
-# so install rpm but not dnf
-IMAGE_FEATURES:remove:pn-core-image-weston = "package-management"
-CORE_IMAGE_EXTRA_INSTALL:pn-core-image-weston += "rpm"
-# matchbox-terminal depends on vte, which is gpl3
-CORE_IMAGE_BASE_INSTALL:remove:pn-core-image-weston = "matchbox-terminal"
+
+require conf/distro/include/no-gplv3.inc
""")
bitbake('core-image-full-cmdline core-image-weston')
bitbake('-c testimage core-image-full-cmdline core-image-weston')
diff --git a/meta/lib/oeqa/selftest/cases/git.py b/meta/lib/oeqa/selftest/cases/intercept.py
index f12874dc7d..12583c3099 100644
--- a/meta/lib/oeqa/selftest/cases/git.py
+++ b/meta/lib/oeqa/selftest/cases/intercept.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
index b1623a1885..b1f78a0cd1 100644
--- a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
+++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, get_bb_var
@@ -58,6 +64,7 @@ class KernelDev(OESelftestTestCase):
recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
with open(recipe_append, 'w+') as fh:
fh.write('SRC_URI += "file://%s"\n' % patch_name)
+ fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n')
fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"')
runCmd('bitbake virtual/kernel -c clean')
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 8fb1e6c530..379ed589ad 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py
index afe8f8809f..d5ffffdcb4 100644
--- a/meta/lib/oeqa/selftest/cases/liboe.py
+++ b/meta/lib/oeqa/selftest/cases/liboe.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -97,6 +99,6 @@ class LibOE(OESelftestTestCase):
dstcnt = len(os.listdir(dst))
srccnt = len(os.listdir(src))
- self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
+ self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
oe.path.remove(testloc)
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py
index 8f1226e6a5..2d0b805b90 100644
--- a/meta/lib/oeqa/selftest/cases/lic_checksum.py
+++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -26,6 +28,7 @@ LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
""" % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path)))
result = bitbake(bitbake_cmd)
+ self.delete_recipeinc('emptytest')
# Verify that changing a license file that has an absolute path causes
@@ -51,5 +54,6 @@ SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
f.write("data")
result = bitbake(bitbake_cmd, ignore_status=True)
+ self.delete_recipeinc('emptytest')
if error_msg not in result.output:
raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
new file mode 100644
index 0000000000..4ca8ffb7aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -0,0 +1,54 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator import OETestTag
+from oeqa.utils.commands import bitbake, runqemu
+
+class LocalesTest(OESelftestTestCase):
+
+ @OETestTag("runqemu")
+
+ def run_locales_test(self, binary_enabled):
+ features = []
+ features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
+ features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
+ features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"')
+ features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
+ if binary_enabled:
+ features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
+ else:
+ features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"')
+ self.write_config("\n".join(features))
+
+ # Build a core-image-minimal
+ bitbake('core-image-minimal')
+
+ with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu:
+ cmd = "locale -a"
+ status, output = qemu.run_serial(cmd)
+ # output must includes fr_FR or fr_FR.UTF-8
+ self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output)
+ self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output)
+
+ cmd = "localedef --list-archive -v"
+ status, output = qemu.run_serial(cmd)
+ # output must includes fr_FR.utf8
+ self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output)
+ self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output)
+
+ def test_locales_on(self):
+ """
+ Summary: Test the locales are generated
+ Expected: 1. Check the locale exist in the locale-archive
+ 2. Check the locale exist for the glibc
+ 3. Check the locale can be generated
+ Product: oe-core
+ Author: Louis Rannou <lrannou@baylibre.com>
+ AutomatedBy: Louis Rannou <lrannou@baylibre.com>
+ """
+ self.run_locales_test(True)
+
+ def test_locales_off(self):
+ self.run_locales_test(False)
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py
index 0a04c13a85..07a6c80489 100644
--- a/meta/lib/oeqa/selftest/cases/manifest.py
+++ b/meta/lib/oeqa/selftest/cases/manifest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
index 6f10d30dc9..ffe0d2604d 100644
--- a/meta/lib/oeqa/selftest/cases/meta_ide.py
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -16,13 +18,15 @@ class MetaIDE(OESelftestTestCase):
def setUpClass(cls):
super(MetaIDE, cls).setUpClass()
bitbake('meta-ide-support')
- bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'TMPDIR', 'COREBASE'])
+ bitbake('build-sysroots -c build_native_sysroot')
+ bitbake('build-sysroots -c build_target_sysroot')
+ bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS']
- cls.tmpdir = bb_vars['TMPDIR']
- cls.environment_script_path = '%s/%s' % (cls.tmpdir, cls.environment_script)
+ cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
+ cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
cls.corebasedir = bb_vars['COREBASE']
cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
-
+
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
@@ -40,12 +44,17 @@ class MetaIDE(OESelftestTestCase):
def test_meta_ide_can_build_cpio_project(self):
dl_dir = self.td.get('DL_DIR', None)
self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
- "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz",
+ "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz",
self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
self.project.download_archive()
- self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS --disable-maintainer-mode','sed -i -e "/char \*program_name/d" src/global.c;'), 0,
+ self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS'), 0,
msg="Running configure failed")
self.assertEqual(self.project.run_make(), 0,
msg="Running make failed")
self.assertEqual(self.project.run_install(), 0,
msg="Running make install failed")
+
+ def test_meta_ide_can_run_sdk_tests(self):
+ bitbake('-c populate_sysroot gtk+3')
+ bitbake('build-sysroots -c build_target_sysroot')
+ bitbake('-c testsdk meta-ide-support')
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
new file mode 100644
index 0000000000..2919f07939
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -0,0 +1,44 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import subprocess
+import tempfile
+import shutil
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
+
+
+class Minidebuginfo(OESelftestTestCase):
+ def test_minidebuginfo(self):
+ target_sys = get_bb_var("TARGET_SYS")
+ binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH"))
+
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image)
+
+ self.write_config("""
+DISTRO_FEATURES:append = " minidebuginfo"
+IMAGE_FSTYPES = "tar.bz2"
+""")
+ bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils))
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils)
+
+ # confirm that executables and shared libraries contain an ELF section
+ # ".gnu_debugdata" which stores minidebuginfo.
+ with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs:
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME']))
+ shutil.unpack_archive(filename, unpackedfs)
+
+ r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")],
+ native_sysroot = native_sysroot, target_sys = target_sys)
+ self.assertIn(".gnu_debugdata", r.output)
+
+ r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")],
+ native_sysroot = native_sysroot, target_sys = target_sys)
+ self.assertIn(".gnu_debugdata", r.output)
+
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py
index baae9b456f..f509cbf607 100644
--- a/meta/lib/oeqa/selftest/cases/multiconfig.py
+++ b/meta/lib/oeqa/selftest/cases/multiconfig.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -70,3 +72,16 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
result = bitbake('mc:test:multiconfig-test-parse -c showvar')
self.assertIn('MCTESTVAR=test2', result.output.splitlines())
+
+ def test_multiconfig_inlayer(self):
+ """
+ Test that a multiconfig from meta-selftest works.
+ """
+
+ config = """
+BBMULTICONFIG = "muslmc"
+"""
+ self.write_config(config)
+
+ # Build a core-image-minimal, only dry run needed to check config is present
+ bitbake('mc:muslmc:bash -n')
diff --git a/meta/lib/oeqa/selftest/cases/newlib.py b/meta/lib/oeqa/selftest/cases/newlib.py
index 999e3e78b0..fe57aa51f2 100644
--- a/meta/lib/oeqa/selftest/cases/newlib.py
+++ b/meta/lib/oeqa/selftest/cases/newlib.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
index 802a91a488..042ccdd2b4 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
@@ -1,8 +1,11 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
import os
+import sys
from oeqa.selftest.case import OESelftestTestCase
import tempfile
import operator
@@ -11,15 +14,14 @@ from oeqa.utils.commands import get_bb_var
class TestBlobParsing(OESelftestTestCase):
def setUp(self):
- import time
self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
dir=get_bb_var('TOPDIR'))
try:
from git import Repo
self.repo = Repo.init(self.repo_path)
- except ImportError:
- self.skipTest('Python module GitPython is not present')
+ except ImportError as e:
+ self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path))
self.test_file = "test"
self.var_map = {}
@@ -28,6 +30,16 @@ class TestBlobParsing(OESelftestTestCase):
import shutil
shutil.rmtree(self.repo_path)
+ @property
+ def heads_default(self):
+ """
+ Support repos defaulting to master or to main branch
+ """
+ try:
+ return self.repo.heads.main
+ except AttributeError:
+ return self.repo.heads.master
+
def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
if len(to_add) == 0 and len(to_remove) == 0:
return
@@ -65,10 +77,10 @@ class TestBlobParsing(OESelftestTestCase):
changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
- blob1 = self.repo.heads.master.commit.tree.blobs[0]
+ blob1 = self.heads_default.commit.tree.blobs[0]
self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
- blob2 = self.repo.heads.master.commit.tree.blobs[0]
+ blob2 = self.heads_default.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
@@ -84,10 +96,10 @@ class TestBlobParsing(OESelftestTestCase):
defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
self.commit_vars(to_add = { "foo" : "1" })
- blob1 = self.repo.heads.master.commit.tree.blobs[0]
+ blob1 = self.heads_default.commit.tree.blobs[0]
self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
- blob2 = self.repo.heads.master.commit.tree.blobs[0]
+ blob2 = self.heads_default.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py
index 5a5f9b4fdf..7bf550b6fd 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/elf.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py
index 3b359396b6..5eea12e761 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/license.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/license.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py
index a1cfa08c09..b963e447e3 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/path.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/path.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py
index 7eb49e6f95..58318b18b2 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/types.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/types.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
index bbf67bf9c9..0cb46425a0 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/utils.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index cd687816c8..f69efccfee 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -21,7 +23,7 @@ class BuildhistoryDiffTests(BuildhistoryBase):
pkgv = result.output.rstrip()
result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
expected_endlines = [
- "xcursor-transparent-theme-dev: RDEPENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
+ "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
"xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
]
for line in result.output.splitlines():
@@ -34,18 +36,16 @@ class BuildhistoryDiffTests(BuildhistoryBase):
if expected_endlines:
self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
-class OEScriptTests(OESelftestTestCase):
- scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
-
@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present")
-class OEPybootchartguyTests(OEScriptTests):
+class OEPybootchartguyTests(OESelftestTestCase):
@classmethod
def setUpClass(cls):
- super(OEScriptTests, cls).setUpClass()
+ super().setUpClass()
bitbake("core-image-minimal -c rootfs -f")
cls.tmpdir = get_bb_var('TMPDIR')
cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
def test_pybootchartguy_help(self):
runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
@@ -63,7 +63,12 @@ class OEPybootchartguyTests(OEScriptTests):
self.assertTrue(os.path.exists(self.tmpdir + "/charts.pdf"))
-class OEGitproxyTests(OEScriptTests):
+class OEGitproxyTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
def test_oegitproxy_help(self):
try:
@@ -121,15 +126,22 @@ class OEGitproxyTests(OEScriptTests):
class OeRunNativeTest(OESelftestTestCase):
def test_oe_run_native(self):
bitbake("qemu-helper-native -c addto_recipe_sysroot")
- result = runCmd("oe-run-native qemu-helper-native tunctl -h")
- self.assertIn("Delete: tunctl -d device-name [-f tun-clone-device]", result.output)
+ result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help")
+ self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output)
+
+class OEListPackageconfigTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
-class OEListPackageconfigTests(OEScriptTests):
#oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
def check_endlines(self, results, expected_endlines):
for line in results.output.splitlines():
for el in expected_endlines:
- if line.split() == el.split():
+ if line and line.split()[0] == el.split()[0] and \
+ ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
expected_endlines.remove(el)
break
@@ -145,7 +157,7 @@ class OEListPackageconfigTests(OEScriptTests):
results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
expected_endlines = []
expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
- expected_endlines.append("pinentry gtk2 libcap ncurses qt secret")
+ expected_endlines.append("pinentry gtk2 ncurses qt secret")
expected_endlines.append("tar acl selinux")
self.check_endlines(results, expected_endlines)
@@ -163,11 +175,10 @@ class OEListPackageconfigTests(OEScriptTests):
def test_packageconfig_flags_option_all(self):
results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
expected_endlines = []
- expected_endlines.append("pinentry-1.2.0")
- expected_endlines.append("PACKAGECONFIG ncurses libcap")
+ expected_endlines.append("pinentry-1.2.1")
+ expected_endlines.append("PACKAGECONFIG ncurses")
expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
- expected_endlines.append("PACKAGECONFIG[libcap] --with-libcap, --without-libcap, libcap")
expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
@@ -177,7 +188,7 @@ class OEListPackageconfigTests(OEScriptTests):
results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
expected_endlines = []
expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
- expected_endlines.append("pinentry gtk2 libcap ncurses qt secret")
+ expected_endlines.append("pinentry gtk2 ncurses qt secret")
self.check_endlines(results, expected_endlines)
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
index 96beb8b869..e31063567b 100644
--- a/meta/lib/oeqa/selftest/cases/overlayfs.py
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -1,10 +1,13 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake, runqemu
from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotMachine
def getline_qemu(out, line):
for l in out.split('\n'):
@@ -55,7 +58,7 @@ inherit overlayfs
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
"""
self.write_config(config)
@@ -76,7 +79,7 @@ DISTRO_FEATURES:append = " systemd overlayfs"
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES += "systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
"""
@@ -94,7 +97,7 @@ OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
"""
self.write_config(config)
@@ -112,7 +115,7 @@ DISTRO_FEATURES:append = " systemd overlayfs"
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
"""
wrong_machine_config = """
@@ -136,10 +139,10 @@ OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay"
config = """
IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " overlayfs"
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -271,10 +274,8 @@ class OverlayFSEtcRunTimeTests(OESelftestTestCase):
"""
configBase = """
-DISTRO_FEATURES:append = " systemd"
-
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -313,10 +314,8 @@ OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1"
"""
config = """
-DISTRO_FEATURES:append = " systemd"
-
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -331,28 +330,18 @@ EXTRA_IMAGE_FEATURES += "package-management"
self.assertTrue("overlayfs-etc" in res.output, msg=res.output)
self.assertTrue("package-management" in res.output, msg=res.output)
- def test_image_feature_is_missing_class_included(self):
- configAppend = """
-INHERIT += "overlayfs-etc"
-"""
- self.run_check_image_feature(configAppend)
-
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_image_feature_is_missing(self):
- self.run_check_image_feature()
-
- def run_check_image_feature(self, appendToConfig=""):
"""
Summary: Overlayfs-etc class is not applied when image feature is not set
- even if we inherit it directly,
Expected: Image is created successfully but /etc is not an overlay
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
"""
- config = f"""
-DISTRO_FEATURES:append = " systemd"
-
+ config = """
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -364,7 +353,7 @@ EXTRA_IMAGE_FEATURES += "read-only-rootfs"
# Image configuration for overlayfs-etc
OVERLAYFS_ETC_MOUNT_POINT = "/data"
OVERLAYFS_ETC_DEVICE = "/dev/sda3"
-{appendToConfig}
+OVERLAYFS_ROOTFS_TYPE = "ext4"
"""
self.write_config(config)
@@ -377,45 +366,32 @@ OVERLAYFS_ETC_DEVICE = "/dev/sda3"
line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
self.assertFalse(line, msg=output)
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_sbin_init_preinit(self):
- self.run_sbin_init(False)
+ self.run_sbin_init(False, "ext4")
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_sbin_init_original(self):
- self.run_sbin_init(True)
+ self.run_sbin_init(True, "ext4")
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_sbin_init_read_only(self):
+ self.run_sbin_init(True, "squashfs")
- def run_sbin_init(self, origInit):
+ def run_sbin_init(self, origInit, rootfsType):
"""
Summary: Confirm we can replace original init and mount overlay on top of /etc
Expected: Image is created successfully and /etc is mounted as an overlay
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
"""
- config = """
-DISTRO_FEATURES:append = " systemd"
-
-# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
-
-# enable overlayfs in the kernel
-KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
-
-IMAGE_FSTYPES += "wic"
-OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
-WKS_FILE = "overlayfs_etc.wks.in"
-
-EXTRA_IMAGE_FEATURES += "read-only-rootfs"
-# Image configuration for overlayfs-etc
-EXTRA_IMAGE_FEATURES += "overlayfs-etc"
-IMAGE_FEATURES:remove = "package-management"
-OVERLAYFS_ETC_MOUNT_POINT = "/data"
-OVERLAYFS_ETC_FSTYPE = "ext4"
-OVERLAYFS_ETC_DEVICE = "/dev/sda3"
-OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
-"""
+ config = self.get_working_config()
args = {
'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit",
- 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True)
+ 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True),
+ 'OVERLAYFS_ROOTFS_TYPE': rootfsType,
+ 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4")
}
self.write_config(config.format(**args))
@@ -432,6 +408,11 @@ OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output)
+ # check that lower layer is not available
+ status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower")
+ line = getline_qemu(output, "No such file or directory")
+ self.assertTrue(line, msg=output)
+
status, output = qemu.run_serial("touch " + testFile)
status, output = qemu.run_serial("sync")
status, output = qemu.run_serial("ls -1 " + testFile)
@@ -443,3 +424,79 @@ OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
status, output = qemu.run_serial("ls -1 " + testFile)
line = getline_qemu(output, testFile)
self.assertTrue(line and line.startswith(testFile), msg=output)
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_lower_layer_access(self):
+ """
+ Summary: Test that lower layer of /etc is available read-only when configured
+ Expected: Can't write to lower layer. The files on lower and upper different after
+ modification
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = self.get_working_config()
+
+ configLower = """
+OVERLAYFS_ETC_EXPOSE_LOWER = "1"
+IMAGE_INSTALL:append = " overlayfs-user"
+"""
+ testFile = "lower-layer-test.txt"
+
+ args = {
+ 'OVERLAYFS_INIT_OPTION': "",
+ 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
+ 'OVERLAYFS_ROOTFS_TYPE': "ext4",
+ 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
+ }
+
+ self.write_config(config.format(**args))
+
+ self.append_config(configLower)
+ bitbake('core-image-minimal')
+
+ with runqemu('core-image-minimal', image_fstype='wic') as qemu:
+ status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile)
+ status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile)
+ line = getline_qemu(output, "Modified in upper")
+ self.assertTrue(line, msg=output)
+ line = getline_qemu(output, "Original file")
+ self.assertTrue(line, msg=output)
+
+ status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt")
+ line = getline_qemu(output, "Read-only file system")
+ self.assertTrue(line, msg=output)
+
+ def get_working_config(self):
+ return """
+# Use systemd as init manager
+INIT_MANAGER = "systemd"
+
+# enable overlayfs in the kernel
+KERNEL_EXTRA_FEATURES:append = " \
+ features/overlayfs/overlayfs.scc \
+ cfg/fs/squashfs.scc"
+
+IMAGE_FSTYPES += "wic"
+OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
+OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}"
+OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}"
+WKS_FILE = "overlayfs_etc.wks.in"
+
+EXTRA_IMAGE_FEATURES += "read-only-rootfs"
+# Image configuration for overlayfs-etc
+EXTRA_IMAGE_FEATURES += "overlayfs-etc"
+IMAGE_FEATURES:remove = "package-management"
+OVERLAYFS_ETC_MOUNT_POINT = "/data"
+OVERLAYFS_ETC_FSTYPE = "ext4"
+OVERLAYFS_ETC_DEVICE = "/dev/sda3"
+OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
+
+ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs"
+
+ext4_rootfs() {{
+}}
+
+squashfs_rootfs() {{
+ mkdir -p ${{IMAGE_ROOTFS}}/data
+}}
+"""
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 51d835259e..1aa6c03f8a 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -87,6 +89,13 @@ class VersionOrdering(OESelftestTestCase):
self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
class PackageTests(OESelftestTestCase):
+ # Verify that a recipe cannot rename a package into an existing one
+ def test_package_name_conflict(self):
+ res = bitbake("packagenameconflict", ignore_status=True)
+ self.assertNotEqual(res.status, 0)
+ err = "package name already exists"
+ self.assertTrue(err in res.output)
+
# Verify that a recipe which sets up hardlink files has those preserved into split packages
# Also test file sparseness is preserved
def test_preserve_sparse_hardlinks(self):
@@ -133,8 +142,10 @@ class PackageTests(OESelftestTestCase):
self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
return False
- # Check debugging symbols works correctly
- elif re.match(r"Breakpoint 1.*hello\.c.*4", l):
+ # Check debugging symbols works correctly. Don't look for a
+ # source file as optimisation can put the breakpoint inside
+ # stdio.h.
+ elif "Breakpoint 1 at" in l:
return True
self.logger.error("GDB result:\n%d: %s", status, output)
@@ -149,25 +160,25 @@ class PackageTests(OESelftestTestCase):
self.fail('GDB %s failed' % binary)
def test_preserve_ownership(self):
- import os, stat, oe.cachedpath
features = 'IMAGE_INSTALL:append = " selftest-chown"\n'
self.write_config(features)
bitbake("core-image-minimal")
- sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
- def check_ownership(qemu, gid, uid, path):
+ def check_ownership(qemu, expected_gid, expected_uid, path):
self.logger.info("Check ownership of %s", path)
- status, output = qemu.run_serial(r'/bin/stat -c "%U %G" ' + path, timeout=60)
- output = output.split(" ")
- if output[0] != uid or output[1] != gid :
- self.logger.error("Incrrect ownership %s [%s:%s]", path, output[0], output[1])
- return False
- return True
+ status, output = qemu.run_serial('stat -c "%U %G" ' + path)
+ self.assertEqual(status, 1, "stat failed: " + output)
+ try:
+ uid, gid = output.split()
+ self.assertEqual(uid, expected_uid)
+ self.assertEqual(gid, expected_gid)
+ except ValueError:
+ self.fail("Cannot parse output: " + output)
+ sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
with runqemu('core-image-minimal') as qemu:
for path in [ sysconfdir + "/selftest-chown/file",
sysconfdir + "/selftest-chown/dir",
sysconfdir + "/selftest-chown/symlink",
sysconfdir + "/selftest-chown/fifotest/fifo"]:
- if not check_ownership(qemu, "test", "test", path):
- self.fail('Test ownership %s failed' % path)
+ check_ownership(qemu, "test", "test", path)
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py
index 254abc40c6..d786c33018 100644
--- a/meta/lib/oeqa/selftest/cases/pkgdata.py
+++ b/meta/lib/oeqa/selftest/cases/pkgdata.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -47,8 +49,8 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
def test_find_path(self):
- result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1')
- self.assertEqual(result.output, 'zlib: /lib/libz.so.1')
+ result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1')
+ self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1')
result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
self.assertEqual(result.output, 'm4: /usr/bin/m4')
result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
@@ -120,8 +122,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
curpkg = line.split(':')[0]
files[curpkg] = []
return files
- bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir'])
- base_libdir = bb_vars['base_libdir']
+ bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir'])
libdir = bb_vars['libdir']
includedir = bb_vars['includedir']
mandir = bb_vars['mandir']
@@ -138,7 +139,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
self.assertGreater(len(files['libz1']), 1)
- libspec = os.path.join(base_libdir, 'libz.so.1.*')
+ libspec = os.path.join(libdir, 'libz.so.1.*')
found = False
for fileitem in files['libz1']:
if fnmatch.fnmatchcase(fileitem, libspec):
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
index 10158ca7c2..8da3739c57 100644
--- a/meta/lib/oeqa/selftest/cases/prservice.py
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -12,6 +14,8 @@ from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
from oeqa.utils.network import get_free_port
+import bb.utils
+
class BitbakePrTests(OESelftestTestCase):
@classmethod
@@ -19,6 +23,16 @@ class BitbakePrTests(OESelftestTestCase):
super(BitbakePrTests, cls).setUpClass()
cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
+ cls.exported_db_path = os.path.join(cls.builddir, 'export.inc')
+ cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
+
+ def cleanup(self):
+ # Ensure any memory resident bitbake is stopped
+ bitbake("-m")
+ # Remove any existing export file or prserv database
+ bb.utils.remove(self.exported_db_path)
+ bb.utils.remove(self.current_db_path + "*")
+
def get_pr_version(self, package_name):
package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
package_data = ftools.read_file(package_data_file)
@@ -47,6 +61,7 @@ class BitbakePrTests(OESelftestTestCase):
self.assertEqual(res.status, 0, msg=res.output)
def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
+ self.cleanup()
config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
self.write_config(config_package_data)
config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
@@ -66,24 +81,24 @@ class BitbakePrTests(OESelftestTestCase):
self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
+ self.cleanup()
+
def run_test_pr_export_import(self, package_name, replace_current_db=True):
self.config_pr_tests(package_name)
self.increment_package_pr(package_name)
pr_1 = self.get_pr_version(package_name)
- exported_db_path = os.path.join(self.builddir, 'export.inc')
- export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
+ export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True)
self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
- self.assertTrue(os.path.exists(exported_db_path))
+ self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output))
if replace_current_db:
- current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
- self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path)
- os.remove(current_db_path)
+ self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path)
+ os.remove(self.current_db_path)
- import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True)
- os.remove(exported_db_path)
+ import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True)
+ #os.remove(self.exported_db_path)
self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
self.increment_package_pr(package_name)
@@ -91,6 +106,8 @@ class BitbakePrTests(OESelftestTestCase):
self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
+ self.cleanup()
+
def test_import_export_replace_db(self):
self.run_test_pr_export_import('m4')
diff --git a/meta/lib/oeqa/selftest/cases/pseudo.py b/meta/lib/oeqa/selftest/cases/pseudo.py
index 33593d5ce9..3ef8786022 100644
--- a/meta/lib/oeqa/selftest/cases/pseudo.py
+++ b/meta/lib/oeqa/selftest/cases/pseudo.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 510dae6bad..aebea42502 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -1,7 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
+import errno
import os
import shutil
import tempfile
@@ -25,6 +28,16 @@ def tearDownModule():
runCmd('rm -rf %s' % templayerdir)
+def needTomllib(test):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ try:
+ import tomllib
+ except ImportError:
+ try:
+ import tomli
+ except ImportError:
+ test.skipTest('Test requires python 3.11 or above for tomllib module or tomli module')
+
class RecipetoolBase(devtool.DevtoolTestCase):
def setUpLocal(self):
@@ -35,6 +48,8 @@ class RecipetoolBase(devtool.DevtoolTestCase):
self.testfile = os.path.join(self.tempdir, 'testfile')
with open(self.testfile, 'w') as f:
f.write('Test file\n')
+ config = 'BBMASK += "meta-poky/recipes-core/base-files/base-files_%.bbappend"\n'
+ self.append_config(config)
def tearDownLocal(self):
runCmd('rm -rf %s/recipes-*' % self.templayerdir)
@@ -346,7 +361,6 @@ class RecipetoolCreateTests(RecipetoolBase):
checkvars['LICENSE'] = 'GPL-2.0-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz'
- checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288'
checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07'
self._test_recipe_contents(recipefile, checkvars, [])
@@ -359,14 +373,14 @@ class RecipetoolCreateTests(RecipetoolBase):
tempsrc = os.path.join(self.tempdir, 'srctree')
os.makedirs(tempsrc)
recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
- srcuri = 'git://git.yoctoproject.org/libmatchbox'
+ srcuri = 'git://git.yoctoproject.org/libmatchbox;protocol=https'
result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
checkvars = {}
checkvars['LICENSE'] = 'LGPL-2.1-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.11+git${SRCPV}'
+ checkvars['PV'] = '1.11+git'
checkvars['SRC_URI'] = srcuri + ';branch=master'
checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango'])
inherits = ['autotools', 'pkgconfig']
@@ -404,7 +418,6 @@ class RecipetoolCreateTests(RecipetoolBase):
checkvars = {}
checkvars['LICENSE'] = set(['LGPL-2.1-only', 'MPL-1.1-only'])
checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz'
- checkvars['SRC_URI[md5sum]'] = 'cee7be0ccfc892fa433d6c837df9522a'
checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b'
checkvars['DEPENDS'] = set(['boost', 'zlib'])
inherits = ['cmake']
@@ -440,44 +453,257 @@ class RecipetoolCreateTests(RecipetoolBase):
self._test_recipe_contents(recipefile, checkvars, inherits)
def test_recipetool_create_github(self):
- # Basic test to see if github URL mangling works
+ # Basic test to see if github URL mangling works. Deliberately use an
+ # older release of Meson at present so we don't need a toml parser.
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- recipefile = os.path.join(temprecipe, 'meson_git.bb')
- srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0'
- result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri])
- self.assertTrue(os.path.isfile(recipefile))
+ recipefile = os.path.join(temprecipe, 'python3-meson_git.bb')
+ srcuri = 'https://github.com/mesonbuild/meson;rev=0.52.1'
+ cmd = ['recipetool', 'create', '-o', temprecipe, srcuri]
+ result = runCmd(cmd)
+ self.assertTrue(os.path.isfile(recipefile), msg="recipe %s not created for command %s, output %s" % (recipefile, " ".join(cmd), result.output))
checkvars = {}
- checkvars['LICENSE'] = set(['Apache-2.0'])
- checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=master'
+ checkvars['LICENSE'] = set(['Apache-2.0', "Unknown"])
+ checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=0.52'
inherits = ['setuptools3']
self._test_recipe_contents(recipefile, checkvars, inherits)
def test_recipetool_create_python3_setuptools(self):
# Test creating python3 package from tarball (using setuptools3 class)
+ # Use the --no-pypi switch to avoid creating a pypi enabled recipe and
+ # and check the created recipe as if it was a more general tarball
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
pn = 'python-magic'
pv = '0.4.15'
recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
- result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ result = runCmd('recipetool create --no-pypi -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
checkvars['LICENSE'] = set(['MIT'])
checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz'
- checkvars['SRC_URI[md5sum]'] = 'e384c95a47218f66c6501cd6dd45ff59'
checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
inherits = ['setuptools3']
self._test_recipe_contents(recipefile, checkvars, inherits)
+ def test_recipetool_create_python3_setuptools_pypi_tarball(self):
+ # Test creating python3 package from tarball (using setuptools3 and pypi classes)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', 'pypi']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_setuptools_pypi(self):
+ # Test creating python3 package from pypi url (using setuptools3 and pypi classes)
+ # Intentionnaly using setuptools3 class here instead of any of the pep517 class
+ # to avoid the toml dependency and allows this test to run on host autobuilders
+ # with older version of python
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ # First specify the required version in the url
+ srcuri = 'https://pypi.org/project/%s/%s' % (pn, pv)
+ runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # Now specify the version as a recipetool parameter
+ runCmd('rm -rf %s' % recipefile)
+ self.assertFalse(os.path.isfile(recipefile))
+ srcuri = 'https://pypi.org/project/%s' % pn
+ runCmd('recipetool create -o %s %s --version %s' % (temprecipe, srcuri, pv))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # Now, try to grab latest version of the package, so we cannot guess the name of the recipe,
+ # unless hardcoding the latest version but it means we will need to update the test for each release,
+ # so use a regexp
+ runCmd('rm -rf %s' % recipefile)
+ self.assertFalse(os.path.isfile(recipefile))
+ recipefile_re = r'%s_(.*)\.bb' % pn
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ dirlist = os.listdir(temprecipe)
+ if len(dirlist) > 1:
+ self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])):
+ self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ import re
+ match = re.match(recipefile_re, dirlist[0])
+ self.assertTrue(match)
+ latest_pv = match.group(1)
+ self.assertTrue(latest_pv != pv)
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, latest_pv))
+ # Do not check LIC_FILES_CHKSUM and SRC_URI checksum here to avoid having updating the test on each release
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_setuptools_build_meta(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using setuptools.build_meta class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'webcolors'
+ pv = '1.13'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/a1/fb/f95560c6a5d4469d9c49e24cf1b5d4d21ffab5608251c6020a965fb7791c/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'A library for working with the color formats defined by HTML and CSS.'
+ checkvars['LICENSE'] = set(['BSD-3-Clause'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19'
+ checkvars['SRC_URI[sha256sum]'] = 'c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a'
+ inherits = ['python_setuptools_build_meta', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_poetry_core_masonry_api(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using poetry.core.masonry.api class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'iso8601'
+ pv = '2.1.0'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'Simple module to parse ISO 8601 dates'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367'
+ checkvars['SRC_URI[sha256sum]'] = '6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df'
+ inherits = ['python_poetry_core', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_flit_core_buildapi(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using flit_core.buildapi class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'typing-extensions'
+ pv = '4.8.0'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/1f/7a/8b94bb016069caa12fc9f587b28080ac33b4fbb8ca369b98bc0a4828543e/typing_extensions-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'Backported and Experimental Type Hints for Python 3.8+'
+ checkvars['LICENSE'] = set(['PSF-2.0'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2'
+ checkvars['SRC_URI[sha256sum]'] = 'df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef'
+ inherits = ['python_flit_core', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_hatchling(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using hatchling class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'jsonschema'
+ pv = '4.19.1'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/e4/43/087b24516db11722c8687e0caf0f66c7785c0b1c51b0ab951dfde924e3f5/jsonschema-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'An implementation of JSON Schema validation for Python'
+ checkvars['HOMEPAGE'] = 'https://github.com/python-jsonschema/jsonschema'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af'
+ checkvars['SRC_URI[sha256sum]'] = 'ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf'
+ inherits = ['python_hatchling', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_maturin(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using maturin class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'pydantic-core'
+ pv = '2.14.5'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/64/26/cffb93fe9c6b5a91c497f37fae14a4b073ecbc47fc36a9979c7aa888b245/pydantic_core-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['HOMEPAGE'] = 'https://github.com/pydantic/pydantic-core'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=ab599c188b4a314d2856b3a55030c75c'
+ checkvars['SRC_URI[sha256sum]'] = '6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71'
+ inherits = ['python_maturin', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_mesonpy(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using mesonpy class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'siphash24'
+ pv = '1.4'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/c2/32/b934a70592f314afcfa86c7f7e388804a8061be65b822e2aa07e573b6477/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SRC_URI[sha256sum]'] = '7fd65e39b2a7c8c4ddc3a168a687f4610751b0ac2ebb518783c0cdfc30bec4a0'
+ inherits = ['python_mesonpy', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
def test_recipetool_create_github_tarball(self):
- # Basic test to ensure github URL mangling doesn't apply to release tarballs
+ # Basic test to ensure github URL mangling doesn't apply to release tarballs.
+ # Deliberately use an older release of Meson at present so we don't need a toml parser.
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- pv = '0.32.0'
- recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv)
+ pv = '0.52.1'
+ recipefile = os.path.join(temprecipe, 'python3-meson_%s.bb' % pv)
srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv)
result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
@@ -509,14 +735,252 @@ class RecipetoolCreateTests(RecipetoolBase):
self._test_recipetool_create_git('http://git.yoctoproject.org/git/matchbox-keyboard')
def test_recipetool_create_git_srcuri_master(self):
- self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master')
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master;protocol=https')
def test_recipetool_create_git_srcuri_branch(self):
- self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1')
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1;protocol=https')
def test_recipetool_create_git_srcbranch(self):
- self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard', 'matchbox-keyboard-0-1')
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;protocol=https', 'matchbox-keyboard-0-1')
+
+ def _go_urifiy(self, url, version, modulepath = None, pathmajor = None, subdir = None):
+ modulepath = ",path='%s'" % modulepath if len(modulepath) else ''
+ pathmajor = ",pathmajor='%s'" % pathmajor if len(pathmajor) else ''
+ subdir = ",subdir='%s'" % subdir if len(subdir) else ''
+ return "${@go_src_uri('%s','%s'%s%s%s)}" % (url, version, modulepath, pathmajor, subdir)
+
+ def test_recipetool_create_go(self):
+ # Basic test to check go recipe generation
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+
+ recipefile = os.path.join(temprecipe, 'edgex-go_git.bb')
+ deps_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-modules.inc')
+ lics_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-licenses.inc')
+ modules_txt_file = os.path.join(temprecipe, 'edgex-go', 'modules.txt')
+
+ srcuri = 'https://github.com/edgexfoundry/edgex-go.git'
+ srcrev = "v3.0.0"
+ srcbranch = "main"
+
+ result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch))
+
+ self.maxDiff = None
+ inherits = ['go-vendor']
+
+ checkvars = {}
+ checkvars['GO_IMPORT'] = "github.com/edgexfoundry/edgex-go"
+ checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
+ 'file://modules.txt'}
+ checkvars['LIC_FILES_CHKSUM'] = {'file://src/${GO_IMPORT}/LICENSE;md5=8f8bc924cf73f6a32381e5fd4c58d603'}
+
+ self.assertTrue(os.path.isfile(recipefile))
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ checkvars = {}
+ checkvars['VENDORED_LIC_FILES_CHKSUM'] = set(
+ ['file://src/${GO_IMPORT}/vendor/github.com/Microsoft/go-winio/LICENSE;md5=69205ff73858f2c22b2ca135b557e8ef',
+ 'file://src/${GO_IMPORT}/vendor/github.com/armon/go-metrics/LICENSE;md5=d2d77030c0183e3d1e66d26dc1f243be',
+ 'file://src/${GO_IMPORT}/vendor/github.com/cenkalti/backoff/LICENSE;md5=1571d94433e3f3aa05267efd4dbea68b',
+ 'file://src/${GO_IMPORT}/vendor/github.com/davecgh/go-spew/LICENSE;md5=c06795ed54b2a35ebeeb543cd3a73e56',
+ 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE;md5=dcdb33474b60c38efd27356d8f2edec7',
+ 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10;md5=3adfcc70f5aeb7a44f3f9b495aa1fbf3',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-bootstrap/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-configuration/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-core-contracts/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-messaging/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-registry/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-secrets/v3/LICENSE;md5=f9fa2f4f8e0ef8cc7b5dd150963eb457',
+ 'file://src/${GO_IMPORT}/vendor/github.com/fatih/color/LICENSE.md;md5=316e6d590bdcde7993fb175662c0dd5a',
+ 'file://src/${GO_IMPORT}/vendor/github.com/fxamacker/cbor/v2/LICENSE;md5=827f5a2fa861382d35a3943adf9ebb86',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/json/LICENSE;md5=591778525c869cdde0ab5a1bf283cd81',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-kit/log/LICENSE;md5=5b7c15ad5fffe2ff6e9d58a6c161f082',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-logfmt/logfmt/LICENSE;md5=98e39517c38127f969de33057067091e',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/locales/LICENSE;md5=3ccbda375ee345400ad1da85ba522301',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/universal-translator/LICENSE;md5=2e2b21ef8f61057977d27c727c84bef1',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/validator/v10/LICENSE;md5=a718a0f318d76f7c5d510cbae84f0b60',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-redis/redis/v7/LICENSE;md5=58103aa5ea1ee9b7a369c9c4a95ef9b5',
+ 'file://src/${GO_IMPORT}/vendor/github.com/golang/protobuf/LICENSE;md5=939cce1ec101726fa754e698ac871622',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gomodule/redigo/LICENSE;md5=2ee41112a44fe7014dce33e26468ba93',
+ 'file://src/${GO_IMPORT}/vendor/github.com/google/uuid/LICENSE;md5=88073b6dd8ec00fe09da59e0b6dfded1',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/mux/LICENSE;md5=33fa1116c45f9e8de714033f99edde13',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/websocket/LICENSE;md5=c007b54a1743d596f46b2748d9f8c044',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/consul/api/LICENSE;md5=b8a277a612171b7526e9be072f405ef4',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/errwrap/LICENSE;md5=b278a92d2c1509760384428817710378',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-cleanhttp/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-hclog/LICENSE;md5=ec7f605b74b9ad03347d0a93a5cc7eb8',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-immutable-radix/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-multierror/LICENSE;md5=d44fdeb607e2d2614db9464dbedd4094',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-rootcerts/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/golang-lru/LICENSE;md5=f27a50d2e878867827842f2c60e30bfc',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/serf/LICENSE;md5=b278a92d2c1509760384428817710378',
+ 'file://src/${GO_IMPORT}/vendor/github.com/leodido/go-urn/LICENSE;md5=8f50db5538ec1148a9b3d14ed96c3418',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-colorable/LICENSE;md5=24ce168f90aec2456a73de1839037245',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-isatty/LICENSE;md5=f509beadd5a11227c27b5d2ad6c9f2c6',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/consulstructure/LICENSE;md5=96ada10a9e51c98c4656f2cede08c673',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/copystructure/LICENSE;md5=56da355a12d4821cda57b8f23ec34bc4',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/go-homedir/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/mapstructure/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/reflectwalk/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nats.go/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nkeys/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nuid/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/pmezard/go-difflib/LICENSE;md5=e9a2ebb8de779a07500ddecca806145e',
+ 'file://src/${GO_IMPORT}/vendor/github.com/rcrowley/go-metrics/LICENSE;md5=1bdf5d819f50f141366dabce3be1460f',
+ 'file://src/${GO_IMPORT}/vendor/github.com/spiffe/go-spiffe/v2/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/objx/LICENSE;md5=d023fd31d3ca39ec61eec65a91732735',
+ 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/testify/LICENSE;md5=188f01994659f3c0d310612333d2a26f',
+ 'file://src/${GO_IMPORT}/vendor/github.com/x448/float16/LICENSE;md5=de8f8e025d57fe7ee0b67f30d571323b',
+ 'file://src/${GO_IMPORT}/vendor/github.com/zeebo/errs/LICENSE;md5=84914ab36fc0eb48edbaa53e66e8d326',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/crypto/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/mod/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/net/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/sync/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/sys/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/text/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/tools/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/genproto/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/grpc/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/protobuf/LICENSE;md5=02d4002e9171d41a8fad93aa7faf3956',
+ 'file://src/${GO_IMPORT}/vendor/gopkg.in/eapache/queue.v1/LICENSE;md5=1bfd4408d3de090ef6b908b0cc45a316',
+ 'file://src/${GO_IMPORT}/vendor/gopkg.in/yaml.v3/LICENSE;md5=3c91c17266710e16afdbb2b6d15c761c'])
+
+ self.assertTrue(os.path.isfile(lics_require_file))
+ self._test_recipe_contents(lics_require_file, checkvars, [])
+
+ dependencies = \
+ [ ('github.com/eclipse/paho.mqtt.golang','v1.4.2', '', '', ''),
+ ('github.com/edgexfoundry/go-mod-bootstrap','v3.0.1','github.com/edgexfoundry/go-mod-bootstrap/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-configuration','v3.0.0','github.com/edgexfoundry/go-mod-configuration/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-core-contracts','v3.0.0','github.com/edgexfoundry/go-mod-core-contracts/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-messaging','v3.0.0','github.com/edgexfoundry/go-mod-messaging/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-secrets','v3.0.1','github.com/edgexfoundry/go-mod-secrets/v3','/v3', ''),
+ ('github.com/fxamacker/cbor','v2.4.0','github.com/fxamacker/cbor/v2','/v2', ''),
+ ('github.com/gomodule/redigo','v1.8.9', '', '', ''),
+ ('github.com/google/uuid','v1.3.0', '', '', ''),
+ ('github.com/gorilla/mux','v1.8.0', '', '', ''),
+ ('github.com/rcrowley/go-metrics','v0.0.0-20201227073835-cf1acfcdf475', '', '', ''),
+ ('github.com/spiffe/go-spiffe','v2.1.4','github.com/spiffe/go-spiffe/v2','/v2', ''),
+ ('github.com/stretchr/testify','v1.8.2', '', '', ''),
+ ('go.googlesource.com/crypto','v0.8.0','golang.org/x/crypto', '', ''),
+ ('gopkg.in/eapache/queue.v1','v1.1.0', '', '', ''),
+ ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
+ ('github.com/microsoft/go-winio','v0.6.0','github.com/Microsoft/go-winio', '', ''),
+ ('github.com/hashicorp/go-metrics','v0.3.10','github.com/armon/go-metrics', '', ''),
+ ('github.com/cenkalti/backoff','v2.2.1+incompatible', '', '', ''),
+ ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
+ ('github.com/edgexfoundry/go-mod-registry','v3.0.0','github.com/edgexfoundry/go-mod-registry/v3','/v3', ''),
+ ('github.com/fatih/color','v1.9.0', '', '', ''),
+ ('github.com/go-jose/go-jose','v3.0.0','github.com/go-jose/go-jose/v3','/v3', ''),
+ ('github.com/go-kit/log','v0.2.1', '', '', ''),
+ ('github.com/go-logfmt/logfmt','v0.5.1', '', '', ''),
+ ('github.com/go-playground/locales','v0.14.1', '', '', ''),
+ ('github.com/go-playground/universal-translator','v0.18.1', '', '', ''),
+ ('github.com/go-playground/validator','v10.13.0','github.com/go-playground/validator/v10','/v10', ''),
+ ('github.com/go-redis/redis','v7.3.0','github.com/go-redis/redis/v7','/v7', ''),
+ ('github.com/golang/protobuf','v1.5.2', '', '', ''),
+ ('github.com/gorilla/websocket','v1.4.2', '', '', ''),
+ ('github.com/hashicorp/consul','v1.20.0','github.com/hashicorp/consul/api', '', 'api'),
+ ('github.com/hashicorp/errwrap','v1.0.0', '', '', ''),
+ ('github.com/hashicorp/go-cleanhttp','v0.5.1', '', '', ''),
+ ('github.com/hashicorp/go-hclog','v0.14.1', '', '', ''),
+ ('github.com/hashicorp/go-immutable-radix','v1.3.0', '', '', ''),
+ ('github.com/hashicorp/go-multierror','v1.1.1', '', '', ''),
+ ('github.com/hashicorp/go-rootcerts','v1.0.2', '', '', ''),
+ ('github.com/hashicorp/golang-lru','v0.5.4', '', '', ''),
+ ('github.com/hashicorp/serf','v0.10.1', '', '', ''),
+ ('github.com/leodido/go-urn','v1.2.3', '', '', ''),
+ ('github.com/mattn/go-colorable','v0.1.12', '', '', ''),
+ ('github.com/mattn/go-isatty','v0.0.14', '', '', ''),
+ ('github.com/mitchellh/consulstructure','v0.0.0-20190329231841-56fdc4d2da54', '', '', ''),
+ ('github.com/mitchellh/copystructure','v1.2.0', '', '', ''),
+ ('github.com/mitchellh/go-homedir','v1.1.0', '', '', ''),
+ ('github.com/mitchellh/mapstructure','v1.5.0', '', '', ''),
+ ('github.com/mitchellh/reflectwalk','v1.0.2', '', '', ''),
+ ('github.com/nats-io/nats.go','v1.25.0', '', '', ''),
+ ('github.com/nats-io/nkeys','v0.4.4', '', '', ''),
+ ('github.com/nats-io/nuid','v1.0.1', '', '', ''),
+ ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
+ ('github.com/stretchr/objx','v0.5.0', '', '', ''),
+ ('github.com/x448/float16','v0.8.4', '', '', ''),
+ ('github.com/zeebo/errs','v1.3.0', '', '', ''),
+ ('go.googlesource.com/mod','v0.8.0','golang.org/x/mod', '', ''),
+ ('go.googlesource.com/net','v0.9.0','golang.org/x/net', '', ''),
+ ('go.googlesource.com/sync','v0.1.0','golang.org/x/sync', '', ''),
+ ('go.googlesource.com/sys','v0.7.0','golang.org/x/sys', '', ''),
+ ('go.googlesource.com/text','v0.9.0','golang.org/x/text', '', ''),
+ ('go.googlesource.com/tools','v0.6.0','golang.org/x/tools', '', ''),
+ ('github.com/googleapis/go-genproto','v0.0.0-20230223222841-637eb2293923','google.golang.org/genproto', '', ''),
+ ('github.com/grpc/grpc-go','v1.53.0','google.golang.org/grpc', '', ''),
+ ('go.googlesource.com/protobuf','v1.28.1','google.golang.org/protobuf', '', ''),
+ ]
+ src_uri = set()
+ for d in dependencies:
+ src_uri.add(self._go_urifiy(*d))
+
+ checkvars = {}
+ checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
+
+ self.assertTrue(os.path.isfile(deps_require_file))
+ self._test_recipe_contents(deps_require_file, checkvars, [])
+
+ def test_recipetool_create_go_replace_modules(self):
+ # Check handling of replaced modules
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+
+ recipefile = os.path.join(temprecipe, 'openapi-generator_git.bb')
+ deps_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-modules.inc')
+ lics_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-licenses.inc')
+ modules_txt_file = os.path.join(temprecipe, 'openapi-generator', 'modules.txt')
+
+ srcuri = 'https://github.com/OpenAPITools/openapi-generator.git'
+ srcrev = "v7.2.0"
+ srcbranch = "master"
+ srcsubdir = "samples/openapi3/client/petstore/go"
+
+ result = runCmd('recipetool create -o %s %s -S %s -B %s --src-subdir %s' % (temprecipe, srcuri, srcrev, srcbranch, srcsubdir))
+
+ self.maxDiff = None
+ inherits = ['go-vendor']
+
+ checkvars = {}
+ checkvars['GO_IMPORT'] = "github.com/OpenAPITools/openapi-generator/samples/openapi3/client/petstore/go"
+ checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
+ 'file://modules.txt'}
+
+ self.assertNotIn('Traceback', result.output)
+ self.assertIn('No license file was detected for the main module', result.output)
+ self.assertTrue(os.path.isfile(recipefile))
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # make sure that dependencies don't mention local directory ./go-petstore
+ dependencies = \
+ [ ('github.com/stretchr/testify','v1.8.4', '', '', ''),
+ ('go.googlesource.com/oauth2','v0.10.0','golang.org/x/oauth2', '', ''),
+ ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
+ ('github.com/golang/protobuf','v1.5.3', '', '', ''),
+ ('github.com/kr/pretty','v0.3.0', '', '', ''),
+ ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
+ ('github.com/rogpeppe/go-internal','v1.9.0', '', '', ''),
+ ('go.googlesource.com/net','v0.12.0','golang.org/x/net', '', ''),
+ ('github.com/golang/appengine','v1.6.7','google.golang.org/appengine', '', ''),
+ ('go.googlesource.com/protobuf','v1.31.0','google.golang.org/protobuf', '', ''),
+ ('gopkg.in/check.v1','v1.0.0-20201130134442-10cb98267c6c', '', '', ''),
+ ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
+ ]
+
+ src_uri = set()
+ for d in dependencies:
+ src_uri.add(self._go_urifiy(*d))
+
+ checkvars = {}
+ checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
+
+ self.assertTrue(os.path.isfile(deps_require_file))
+ self._test_recipe_contents(deps_require_file, checkvars, [])
class RecipetoolTests(RecipetoolBase):
@@ -536,7 +1000,15 @@ class RecipetoolTests(RecipetoolBase):
for p in paths:
dstdir = os.path.join(dstdir, p)
if not os.path.exists(dstdir):
- os.makedirs(dstdir)
+ try:
+ os.makedirs(dstdir)
+ except PermissionError:
+ return False
+ except OSError as e:
+ if e.errno == errno.EROFS:
+ return False
+ else:
+ raise e
if p == "lib":
# Can race with other tests
self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -544,8 +1016,12 @@ class RecipetoolTests(RecipetoolBase):
self.track_for_cleanup(dstdir)
dstfile = os.path.join(dstdir, os.path.basename(srcfile))
if srcfile != dstfile:
- shutil.copy(srcfile, dstfile)
+ try:
+ shutil.copy(srcfile, dstfile)
+ except PermissionError:
+ return False
self.track_for_cleanup(dstfile)
+ return True
def test_recipetool_load_plugin(self):
"""Test that recipetool loads only the first found plugin in BBPATH."""
@@ -559,15 +1035,17 @@ class RecipetoolTests(RecipetoolBase):
plugincontent = fh.readlines()
try:
self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
- for path in searchpath:
- self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
+ searchpath = [
+ path for path in searchpath
+ if self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
+ ]
result = runCmd("recipetool --quiet count")
self.assertEqual(result.output, '1')
result = runCmd("recipetool --quiet multiloaded")
self.assertEqual(result.output, "no")
for path in searchpath:
result = runCmd("recipetool --quiet bbdir")
- self.assertEqual(result.output, path)
+ self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py'))
finally:
with open(srcfile, 'w') as fh:
@@ -579,7 +1057,10 @@ class RecipetoolTests(RecipetoolBase):
commonlicdir = get_bb_var('COMMON_LICENSE_DIR')
- d = bb.tinfoil.TinfoilDataStoreConnector
+ class DataConnectorCopy(bb.tinfoil.TinfoilDataStoreConnector):
+ pass
+
+ d = DataConnectorCopy
d.getVar = Mock(return_value=commonlicdir)
srctree = tempfile.mkdtemp(prefix='recipetoolqa')
@@ -727,9 +1208,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
for uri in src_uri:
p = urllib.parse.urlparse(uri)
if p.scheme == 'file':
- return p.netloc + p.path
+ return p.netloc + p.path, uri
- def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''):
+ def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, remove=None, machine=None , options=''):
if newfile is None:
newfile = self.testfile
@@ -756,12 +1237,40 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
+
+ override = ""
+ if machine:
+ options += ' -m %s' % machine
+ override = ':append:%s' % machine
+ expectedlines.extend(['PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
+ '\n'])
+
+ if remove:
+ for entry in remove:
+ if machine:
+ entry_remove_line = 'SRC_URI:remove:%s = " %s"\n' % (machine, entry)
+ else:
+ entry_remove_line = 'SRC_URI:remove = "%s"\n' % entry
+
+ expectedlines.extend([entry_remove_line,
+ '\n'])
+
if has_src_uri:
uri = 'file://%s' % filename
if expected_subdir:
uri += ';subdir=%s' % expected_subdir
- expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri,
- '\n']
+ if machine:
+ src_uri_line = 'SRC_URI%s = " %s"\n' % (override, uri)
+ else:
+ src_uri_line = 'SRC_URI += "%s"\n' % uri
+
+ expectedlines.extend([src_uri_line, '\n'])
+
+ with open("/tmp/tmp.txt", "w") as file:
+ print(expectedlines, file=file)
+
+ if machine:
+ filename = '%s/%s' % (machine, filename)
return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
@@ -816,18 +1325,46 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
def test_recipetool_appendsrcfile_existing_in_src_uri(self):
testrecipe = 'base-files'
- filepath = self._get_first_file_uri(testrecipe)
+ filepath,_ = self._get_first_file_uri(testrecipe)
self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
- def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self):
+ def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self, machine=None):
testrecipe = 'base-files'
subdir = 'tmp'
- filepath = self._get_first_file_uri(testrecipe)
+ filepath, srcuri_entry = self._get_first_file_uri(testrecipe)
self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
- output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False)
- self.assertTrue(any('with different parameters' in l for l in output))
+ self._test_appendsrcfile(testrecipe, filepath, subdir, machine=machine, remove=[srcuri_entry])
+
+ def test_recipetool_appendsrcfile_machine(self):
+ # A very basic test
+ self._test_appendsrcfile('base-files', 'a-file', machine='mymachine')
+
+ # Force cleaning the output of previous test
+ self.tearDownLocal()
+
+ # A more complex test: existing entry in src_uri with different param
+ self.test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(machine='mymachine')
+
+ def test_recipetool_appendsrcfile_update_recipe_basic(self):
+ testrecipe = "mtd-utils-selftest"
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('meta-selftest', recipefile, 'This test expect %s recipe to be in meta-selftest')
+ cmd = 'recipetool appendsrcfile -W -u meta-selftest %s %s' % (testrecipe, self.testfile)
+ result = runCmd(cmd)
+ self.assertNotIn('Traceback', result.output)
+ self.add_command_to_tearDown('cd %s; rm -f %s/%s; git checkout .' % (os.path.dirname(recipefile), testrecipe, os.path.basename(self.testfile)))
+
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ ('??', '.*/%s/%s$' % (testrecipe, os.path.basename(self.testfile)))]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+ removelines = []
+ addlines = [
+ 'file://%s \\\\' % os.path.basename(self.testfile),
+ ]
+ self._check_diff(result.output, addlines, removelines)
def test_recipetool_appendsrcfile_replace_file_srcdir(self):
testrecipe = 'bash'
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
index 74b2098ae8..2cb4445f81 100644
--- a/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -48,7 +50,7 @@ class RecipeUtilsTests(OESelftestTestCase):
+SRC_URI[md5sum] = "aaaaaa"
SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
- RDEPENDS:${PN} += "${PYTHON_PN}-threading"
+ RDEPENDS:${PN} += "python3-threading"
"""
patchlines = []
for f in patches:
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 5042c11d8e..80e830136f 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -43,13 +43,14 @@ class CompareResult(object):
return (self.status, self.test) < (other.status, other.test)
class PackageCompareResults(object):
- def __init__(self):
+ def __init__(self, exclusions):
self.total = []
self.missing = []
self.different = []
self.different_excluded = []
self.same = []
self.active_exclusions = set()
+ exclude_packages.extend((exclusions or "").split())
def add_result(self, r):
self.total.append(r)
@@ -133,8 +134,10 @@ class ReproducibleTests(OESelftestTestCase):
# targets are the things we want to test the reproducibility of
targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world']
+
# sstate targets are things to pull from sstate to potentially cut build/debugging time
sstate_targets = []
+
save_results = False
if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ:
save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT']
@@ -149,11 +152,29 @@ class ReproducibleTests(OESelftestTestCase):
def setUpLocal(self):
super().setUpLocal()
- needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS']
+ needed_vars = [
+ 'TOPDIR',
+ 'TARGET_PREFIX',
+ 'BB_NUMBER_THREADS',
+ 'BB_HASHSERVE',
+ 'OEQA_REPRODUCIBLE_TEST_PACKAGE',
+ 'OEQA_REPRODUCIBLE_TEST_TARGET',
+ 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS',
+ 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES',
+ ]
bb_vars = get_bb_vars(needed_vars)
for v in needed_vars:
setattr(self, v.lower(), bb_vars[v])
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']:
+ self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split()
+
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET']:
+ self.targets = bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'].split()
+
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']:
+ self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split()
+
self.extraresults = {}
self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = ''
self.extraresults.setdefault('reproducible', {}).setdefault('files', {})
@@ -162,7 +183,7 @@ class ReproducibleTests(OESelftestTestCase):
self.extraresults['reproducible.rawlogs']['log'] += msg
def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
- result = PackageCompareResults()
+ result = PackageCompareResults(self.oeqa_reproducible_excluded_packages)
old_cwd = os.getcwd()
try:
@@ -202,10 +223,9 @@ class ReproducibleTests(OESelftestTestCase):
config = textwrap.dedent('''\
PACKAGE_CLASSES = "{package_classes}"
- INHIBIT_PACKAGE_STRIP = "1"
TMPDIR = "{tmpdir}"
LICENSE_FLAGS_ACCEPTED = "commercial"
- DISTRO_FEATURES:append = ' systemd pam'
+ DISTRO_FEATURES:append = ' pam'
USERADDEXTENSION = "useradd-staticids"
USERADD_ERROR_DYNAMIC = "skip"
USERADD_UID_TABLES += "files/static-passwd"
@@ -223,7 +243,7 @@ class ReproducibleTests(OESelftestTestCase):
# mirror, forcing a complete build from scratch
config += textwrap.dedent('''\
SSTATE_DIR = "${TMPDIR}/sstate"
- SSTATE_MIRRORS = ""
+ SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
''')
self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
@@ -290,9 +310,13 @@ class ReproducibleTests(OESelftestTestCase):
self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)]))
self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)]))
- if result.missing or result.different:
- fails.append("The following %s packages are missing or different and not in exclusion list: %s" %
- (c, '\n'.join(r.test for r in (result.missing + result.different))))
+ if result.different:
+ fails.append("The following %s packages are different and not in exclusion list:\n%s" %
+ (c, '\n'.join(r.test for r in (result.different))))
+
+ if result.missing and len(self.sstate_targets) == 0:
+ fails.append("The following %s packages are missing and not in exclusion list:\n%s" %
+ (c, '\n'.join(r.test for r in (result.missing))))
# Clean up empty directories
if self.save_results:
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py
index dac5c46801..c3303f3fbb 100644
--- a/meta/lib/oeqa/selftest/cases/resulttooltests.py
+++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -69,7 +71,7 @@ class ResultToolTests(OESelftestTestCase):
self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
- def test_regrresion_can_get_regression_result(self):
+ def test_regression_can_get_regression_result(self):
base_result_data = {'result': {'test1': {'status': 'PASSED'},
'test2': {'status': 'PASSED'},
'test3': {'status': 'FAILED'},
@@ -96,3 +98,278 @@ class ResultToolTests(OESelftestTestCase):
resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
+ def test_results_without_metadata_can_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests without metadata should be compared")
+
+ def test_target_result_with_missing_metadata_can_not_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "OESELFTEST_METADATA": {
+ "run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None
+ }}, "result": {}}
+ target_configuration = {"configuration": {"TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests should not be compared")
+
+ def test_results_with_matching_metadata_can_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests with matching metadata should be compared")
+
+ def test_results_with_mismatching_metadata_can_not_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["machine"],
+ "exclude_tags": None}
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests with mismatching metadata should not be compared")
+
+ def test_metadata_matching_is_only_checked_for_relevant_test_type(self):
+ base_configuration = {"configuration": {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}}, "result": {}}
+ target_configuration = {"configuration": {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["machine"],
+ "exclude_tags": None}}, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE'])
+
+ def test_machine_matches(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"}, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect machine filtering, identical machine tests should be compared")
+
+ def test_machine_mismatches(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86_64"
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect machine filtering, mismatching machine tests should not be compared")
+
+ def test_can_not_compare_non_ltp_tests(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86_64"
+ }, "result": {
+ "bar": {
+ "status": "PASSED"
+ }}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared")
+
+ def test_can_compare_ltp_tests(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect ltpresult filtering, matching ltpresult content should be compared")
+
+ def test_can_match_non_static_ptest_names(self):
+ base_configuration = {"a": {
+ "conf_X": {
+ "configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ptestresult.lttng-tools.foo_-_bar_-_moo": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace.bar_-_moo_-_foo": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace2.moo_-_foo_-_bar": {
+ "status": "PASSED"
+ },
+ "ptestresult.curl.test_0000__foo_out_of_bar": {
+ "status": "PASSED"
+ },
+ "ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": {
+ "status": "PASSED"
+ },
+ "ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
+ "status": "PASSED"
+ }
+ }}}}
+ target_configuration = {"a": {
+ "conf_Y": {
+ "configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ptestresult.lttng-tools.foo_-_yyy_-_zzz": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace.bar_-_zzz_-_xxx": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace2.moo_-_xxx_-_yyy": {
+ "status": "PASSED"
+ },
+ "ptestresult.curl.test_0000__xxx_out_of_yyy": {
+ "status": "PASSED"
+ },
+ "ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": {
+ "status": "PASSED"
+ },
+ "ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
+ "status": "PASSED"
+ }
+ }}}}
+ regression.fixup_ptest_names(base_configuration, self.logger)
+ regression.fixup_ptest_names(target_configuration, self.logger)
+ result, resultstring = regression.compare_result(
+ self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"])
+ self.assertDictEqual(
+ result, {}, msg=f"ptests should be compared: {resultstring}")
diff --git a/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
new file mode 100644
index 0000000000..44e2c09a6f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
@@ -0,0 +1,97 @@
+# SPDX-FileCopyrightText: Huawei Inc.
+#
+# SPDX-License-Identifier: MIT
+
+import os
+import oe
+import unittest
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
+
+class ShadowUtilsTidyFiles(OESelftestTestCase):
+ """
+ Check if shadow image rootfs files are tidy.
+
+ The tests are focused on testing the functionality provided by the
+ 'tidy_shadowutils_files' rootfs postprocess command (via
+ SORT_PASSWD_POSTPROCESS_COMMAND).
+ """
+
+ def sysconf_build(self):
+ """
+ Verify if shadow tidy files tests are to be run and if yes, build a
+ test image and return its sysconf rootfs path.
+ """
+
+ test_image = "core-image-minimal"
+
+ config = 'IMAGE_CLASSES += "extrausers"\n'
+ config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n'
+ config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n'
+ self.write_config(config)
+
+ vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"),
+ test_image)
+ passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"]
+ self.assertIsNotNone(passwd_postprocess_cmd)
+ if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'):
+ raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' "
+ "rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.")
+
+ rootfs = vars["IMAGE_ROOTFS"]
+ self.assertIsNotNone(rootfs)
+ sysconfdir = vars["sysconfdir"]
+ bitbake(test_image)
+ self.assertIsNotNone(sysconfdir)
+
+ return oe.path.join(rootfs, sysconfdir)
+
+ def test_shadowutils_backup_files(self):
+ """
+ Test that the rootfs doesn't include any known shadow backup files.
+ """
+
+ backup_files = (
+ 'group-',
+ 'gshadow-',
+ 'passwd-',
+ 'shadow-',
+ 'subgid-',
+ 'subuid-',
+ )
+
+ rootfs_sysconfdir = self.sysconf_build()
+ found = []
+ for backup_file in backup_files:
+ backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file)
+ if os.path.exists(backup_filepath):
+ found.append(backup_file)
+ if (found):
+ raise Exception('The following shadow backup files were found in '
+ 'the rootfs: %s' % found)
+
+ def test_shadowutils_sorted_files(self):
+ """
+ Test that the 'passwd' and the 'group' shadow utils files are ordered
+ by ID.
+ """
+
+ files = (
+ 'passwd',
+ 'group',
+ )
+
+ rootfs_sysconfdir = self.sysconf_build()
+ unsorted = []
+ for file in files:
+ filepath = oe.path.join(rootfs_sysconfdir, file)
+ with open(filepath, 'rb') as f:
+ ids = []
+ lines = f.readlines()
+ for line in lines:
+ entries = line.split(b':')
+ ids.append(int(entries[2]))
+ if (ids != sorted(ids)):
+ unsorted.append(file)
+ if (unsorted):
+ raise Exception("The following files were not sorted by ID as expected: %s" % unsorted)
diff --git a/meta/lib/oeqa/selftest/cases/rpmtests.py b/meta/lib/oeqa/selftest/cases/rpmtests.py
new file mode 100644
index 0000000000..902d7dca3d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rpmtests.py
@@ -0,0 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BitbakeTests(OESelftestTestCase):
+
+ def test_rpm_filenames(self):
+ test_recipe = "testrpm"
+ bitbake(test_recipe)
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
index e9612389fe..70047ca0ca 100644
--- a/meta/lib/oeqa/selftest/cases/runcmd.py
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -56,11 +58,11 @@ class RunCmdTests(OESelftestTestCase):
self.assertEqual(result.status, 0)
def test_result_assertion(self):
- self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
+ self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
runCmd, "echo foobar >&2; false", shell=True)
def test_result_exception(self):
- self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
+ self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
def test_output(self):
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py
index c1d277a095..f01e1eec66 100644
--- a/meta/lib/oeqa/selftest/cases/runqemu.py
+++ b/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -4,13 +4,15 @@
# SPDX-License-Identifier: MIT
#
+import os
import re
-import tempfile
import time
import oe.types
from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import bitbake, runqemu, get_bb_var, runCmd
+from oeqa.utils.commands import bitbake, runqemu, get_bb_var
+
@OETestTag("runqemu")
class RunqemuTests(OESelftestTestCase):
@@ -22,23 +24,26 @@ class RunqemuTests(OESelftestTestCase):
def setUpLocal(self):
super(RunqemuTests, self).setUpLocal()
self.recipe = 'core-image-minimal'
- self.machine = 'qemux86-64'
- self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi"
- self.cmd_common = "runqemu nographic"
+ self.machine = self.td['MACHINE']
+ self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe)
- kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), 'x86_64')
+ self.fstypes = "ext4"
+ if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'):
+ self.fstypes += " iso hddimg"
+ if self.machine == "qemux86-64":
+ self.fstypes += " wic.vmdk wic.qcow2 wic.vdi"
+
+ self.cmd_common = "runqemu nographic"
+ kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"])
if kvm:
self.cmd_common += " kvm"
self.write_config(
"""
-MACHINE = "%s"
IMAGE_FSTYPES = "%s"
# 10 means 1 second
SYSLINUX_TIMEOUT = "10"
-"""
-% (self.machine, self.fstypes)
- )
+""" % self.fstypes)
if not RunqemuTests.image_is_ready:
RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -57,14 +62,17 @@ SYSLINUX_TIMEOUT = "10"
cmd = "%s %s ext4" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('rootfs.ext4', f.read(), "Failed: %s" % cmd)
+ regexp = r'\nROOTFS: .*\.ext4]\n'
+ self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd))
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_boot_machine_iso(self):
"""Test runqemu machine iso"""
cmd = "%s %s iso" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('media=cdrom', f.read(), "Failed: %s" % cmd)
+ text_in = 'media=cdrom'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
def test_boot_recipe_image(self):
"""Test runqemu recipe-image"""
@@ -73,20 +81,24 @@ SYSLINUX_TIMEOUT = "10"
with open(qemu.qemurunnerlog) as f:
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
-
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_recipe_image_vmdk(self):
"""Test runqemu recipe-image vmdk"""
cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('format=vmdk', f.read(), "Failed: %s" % cmd)
+ text_in = 'format=vmdk'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_recipe_image_vdi(self):
"""Test runqemu recipe-image vdi"""
cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('format=vdi', f.read(), "Failed: %s" % cmd)
+ text_in = 'format=vdi'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
def test_boot_deploy(self):
"""Test runqemu deploy_dir_image"""
@@ -95,7 +107,7 @@ SYSLINUX_TIMEOUT = "10"
with open(qemu.qemurunnerlog) as f:
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
-
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_boot_deploy_hddimg(self):
"""Test runqemu deploy_dir_image hddimg"""
cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
@@ -110,6 +122,7 @@ SYSLINUX_TIMEOUT = "10"
with open(qemu.qemurunnerlog) as f:
self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_machine_slirp_qcow2(self):
"""Test runqemu machine slirp qcow2"""
cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
@@ -119,7 +132,7 @@ SYSLINUX_TIMEOUT = "10"
def test_boot_qemu_boot(self):
"""Test runqemu /path/to/image.qemuboot.conf"""
- qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine)
+ qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name)
qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
if not os.path.exists(qemuboot_conf):
self.skipTest("%s not found" % qemuboot_conf)
@@ -130,7 +143,7 @@ SYSLINUX_TIMEOUT = "10"
def test_boot_rootfs(self):
"""Test runqemu /path/to/rootfs.ext4"""
- rootfs = "%s-%s.ext4" % (self.recipe, self.machine)
+ rootfs = "%s.ext4" % (self.image_link_name)
rootfs = os.path.join(self.deploy_dir_image, rootfs)
if not os.path.exists(rootfs):
self.skipTest("%s not found" % rootfs)
@@ -157,10 +170,11 @@ class QemuTest(OESelftestTestCase):
def setUpClass(cls):
super(QemuTest, cls).setUpClass()
cls.recipe = 'core-image-minimal'
- cls.machine = get_bb_var('MACHINE')
- cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ cls.machine = get_bb_var('MACHINE')
+ cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe)
cls.cmd_common = "runqemu nographic"
- cls.qemuboot_conf = "%s-%s.qemuboot.conf" % (cls.recipe, cls.machine)
+ cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name)
cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
bitbake(cls.recipe)
@@ -191,22 +205,12 @@ class QemuTest(OESelftestTestCase):
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
- # Need to have portmap/rpcbind running to allow this test to work and
- # current autobuilder setup does not have this.
- def disabled_test_qemu_can_boot_nfs_and_shutdown(self):
- self.assertExists(self.qemuboot_conf)
- bitbake('meta-ide-support')
- rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine)
+ def test_qemu_can_boot_nfs_and_shutdown(self):
+ rootfs_tar = "%s.tar.bz2" % (self.image_link_name)
rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
self.assertExists(rootfs_tar)
- tmpdir = tempfile.mkdtemp(prefix='qemu_nfs')
- tmpdir_nfs = os.path.join(tmpdir, 'nfs')
- cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs)
- result = runCmd(cmd_extract_nfs)
- self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output)
- cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs)
+ cmd = "%s %s" % (self.cmd_common, rootfs_tar)
shutdown_timeout = 120
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
- runCmd('rm -rf %s' % tmpdir)
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index 8eacde40ad..12000aac16 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -8,7 +10,7 @@ from oeqa.core.decorator import OETestTag
import os
import tempfile
import oe.lsb
-from oeqa.core.decorator.data import skipIfNotQemu
+from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine
class TestExport(OESelftestTestCase):
@@ -23,7 +25,7 @@ class TestExport(OESelftestTestCase):
Author: Mariano Lopez <mariano.lopez@intel.com>
"""
- features = 'INHERIT += "testexport"\n'
+ features = 'IMAGE_CLASSES += "testexport"\n'
# These aren't the actual IP addresses but testexport class needs something defined
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -64,7 +66,7 @@ class TestExport(OESelftestTestCase):
Author: Mariano Lopez <mariano.lopez@intel.com>
"""
- features = 'INHERIT += "testexport"\n'
+ features = 'IMAGE_CLASSES += "testexport"\n'
# These aren't the actual IP addresses but testexport class needs something defined
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -119,7 +121,7 @@ class TestImage(OESelftestTestCase):
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
features += 'IMAGE_INSTALL:append = " libssl"\n'
features += 'TEST_SUITES = "ping ssh selftest"\n'
self.write_config(features)
@@ -127,6 +129,22 @@ class TestImage(OESelftestTestCase):
bitbake('core-image-full-cmdline socat')
bitbake('-c testimage core-image-full-cmdline')
+ def test_testimage_slirp(self):
+ """
+ Summary: Check basic testimage functionality with qemu and slirp networking.
+ """
+
+ features = '''
+IMAGE_CLASSES:append = " testimage"
+IMAGE_FEATURES:append = " ssh-server-dropbear"
+IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}"
+TEST_RUNQEMUPARAMS += " slirp"
+'''
+ self.write_config(features)
+
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+
def test_testimage_dnf(self):
"""
Summary: Check package feeds functionality for dnf
@@ -137,7 +155,7 @@ class TestImage(OESelftestTestCase):
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
# We don't yet know what the server ip and port will be - they will be patched
# in at the start of the on-image test
@@ -172,7 +190,7 @@ class TestImage(OESelftestTestCase):
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n'
# We don't yet know what the server ip and port will be - they will be patched
# in at the start of the on-image test
@@ -200,6 +218,8 @@ class TestImage(OESelftestTestCase):
bitbake('core-image-full-cmdline socat')
bitbake('-c testimage core-image-full-cmdline')
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966
+ @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
def test_testimage_virgl_gtk_sdl(self):
"""
Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
@@ -222,7 +242,7 @@ class TestImage(OESelftestTestCase):
qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
if 'gtk+' not in qemu_packageconfig:
features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n'
if 'sdl' not in qemu_packageconfig:
@@ -232,15 +252,16 @@ class TestImage(OESelftestTestCase):
features += 'TEST_SUITES = "ping ssh virgl"\n'
features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
features += 'IMAGE_INSTALL:append = " kmscube"\n'
- features_gtk = features + 'TEST_RUNQEMUPARAMS = "gtk gl"\n'
+ features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n'
self.write_config(features_gtk)
bitbake('core-image-minimal')
bitbake('-c testimage core-image-minimal')
- features_sdl = features + 'TEST_RUNQEMUPARAMS = "sdl gl"\n'
+ features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n'
self.write_config(features_sdl)
bitbake('core-image-minimal')
bitbake('-c testimage core-image-minimal')
+ @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
def test_testimage_virgl_headless(self):
"""
Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
@@ -252,28 +273,18 @@ class TestImage(OESelftestTestCase):
import subprocess, os
distro = oe.lsb.distro_identifier()
- if distro and distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04', 'almalinux-8.5', 'almalinux-8.6']:
+ if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or
+ distro.startswith('almalinux') or distro.startswith('rocky')):
self.skipTest('virgl headless cannot be tested with %s' %(distro))
- render_hint = """If /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create one suitable for mesa llvmpipe software renderer."""
- try:
- content = os.listdir("/dev/dri")
- if len([i for i in content if i.startswith('render')]) == 0:
- self.fail("No render nodes found in /dev/dri: %s. %s" %(content, render_hint))
- except FileNotFoundError:
- self.fail("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
- try:
- dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True)
- except subprocess.CalledProcessError as e:
- self.fail("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
if 'opengl' not in qemu_distrofeatures:
features += 'DISTRO_FEATURES:append = " opengl"\n'
features += 'TEST_SUITES = "ping ssh virgl"\n'
features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
features += 'IMAGE_INSTALL:append = " kmscube"\n'
- features += 'TEST_RUNQEMUPARAMS = "egl-headless"\n'
+ features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n'
self.write_config(features)
bitbake('core-image-minimal')
bitbake('-c testimage core-image-minimal')
@@ -299,7 +310,7 @@ class Postinst(OESelftestTestCase):
features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
features += 'PACKAGE_CLASSES = "%s"\n' % classes
if init_manager == "systemd":
- features += 'DISTRO_FEATURES:append = " systemd"\n'
+ features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n'
features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
new file mode 100644
index 0000000000..ad14189c6d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -0,0 +1,231 @@
+# SPDX-License-Identifier: MIT
+import os
+import subprocess
+import time
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu, Command
+from oeqa.utils.sshcontrol import SSHControl
+
+def parse_results(filename):
+ tests = {}
+ with open(filename, "r") as f:
+ lines = f.readlines()
+ for line in lines:
+ if "..." in line and "test [" in line:
+ test = line.split("test ")[1].split(" ... ")[0]
+ if "] " in test:
+ test = test.split("] ", 1)[1]
+ result = line.split(" ... ")[1].strip()
+ if result == "ok":
+ result = "PASS"
+ elif result == "failed":
+ result = "FAIL"
+ elif "ignored" in result:
+ result = "SKIPPED"
+ if test in tests:
+ if tests[test] != result:
+ print("Duplicate and mismatching result %s for %s" % (result, test))
+ else:
+ print("Duplicate result %s for %s" % (result, test))
+ else:
+ tests[test] = result
+ return tests
+
+# Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs.
+@OETestTag("toolchain-system")
+@OETestTag("toolchain-user")
+@OETestTag("runqemu")
+class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
+ def test_rust(self, *args, **kwargs):
+ # Disable Rust Oe-selftest
+ #self.skipTest("The Rust Oe-selftest is disabled.")
+
+ # Skip mips32 target since it is unstable with rust tests
+ machine = get_bb_var('MACHINE')
+ if machine == "qemumips":
+ self.skipTest("The mips32 target is skipped for Rust Oe-selftest.")
+
+ # build remote-test-server before image build
+ recipe = "rust"
+ start_time = time.time()
+ bitbake("{} -c test_compile".format(recipe))
+ builddir = get_bb_var("RUSTSRC", "rust")
+ # build core-image-minimal with required packages
+ default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
+ features = []
+ features.append('IMAGE_FEATURES += "ssh-server-dropbear"')
+ features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
+ self.write_config("\n".join(features))
+ bitbake("core-image-minimal")
+
+ # Exclude the test folders that error out while building
+ # TODO: Fix the errors and include them for testing
+ # no-fail-fast: Run all tests regardless of failure.
+ # bless: First runs rustfmt to format the codebase,
+ # then runs tidy checks.
+ exclude_list = [
+ 'compiler/rustc',
+ 'compiler/rustc_interface/src/tests.rs',
+ 'library/panic_abort',
+ 'library/panic_unwind',
+ 'library/test/src/stats/tests.rs',
+ 'src/bootstrap/builder/tests.rs',
+ 'src/doc/rustc',
+ 'src/doc/rustdoc',
+ 'src/doc/unstable-book',
+ 'src/librustdoc',
+ 'src/rustdoc-json-types',
+ 'src/tools/compiletest/src/common.rs',
+ 'src/tools/lint-docs',
+ 'src/tools/rust-analyzer',
+ 'src/tools/rustdoc-themes',
+ 'src/tools/tidy',
+ 'tests/assembly/asm/aarch64-outline-atomics.rs',
+ 'tests/codegen/abi-main-signature-32bit-c-int.rs',
+ 'tests/codegen/abi-repr-ext.rs',
+ 'tests/codegen/abi-x86-interrupt.rs',
+ 'tests/codegen/branch-protection.rs',
+ 'tests/codegen/catch-unwind.rs',
+ 'tests/codegen/cf-protection.rs',
+ 'tests/codegen/enum-bounds-check-derived-idx.rs',
+ 'tests/codegen/force-unwind-tables.rs',
+ 'tests/codegen/intrinsic-no-unnamed-attr.rs',
+ 'tests/codegen/issues/issue-103840.rs',
+ 'tests/codegen/issues/issue-47278.rs',
+ 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs',
+ 'tests/codegen/lifetime_start_end.rs',
+ 'tests/codegen/local-generics-in-exe-internalized.rs',
+ 'tests/codegen/match-unoptimized.rs',
+ 'tests/codegen/noalias-rwlockreadguard.rs',
+ 'tests/codegen/non-terminate/nonempty-infinite-loop.rs',
+ 'tests/codegen/noreturn-uninhabited.rs',
+ 'tests/codegen/repr-transparent-aggregates-3.rs',
+ 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs',
+ 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs',
+ 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs',
+ 'tests/codegen/sse42-implies-crc32.rs',
+ 'tests/codegen/thread-local.rs',
+ 'tests/codegen/uninit-consts.rs',
+ 'tests/pretty/raw-str-nonexpr.rs',
+ 'tests/run-make',
+ 'tests/run-make-fulldeps',
+ 'tests/rustdoc',
+ 'tests/rustdoc-json',
+ 'tests/rustdoc-js-std',
+ 'tests/rustdoc-ui/cfg-test.rs',
+ 'tests/rustdoc-ui/check-cfg-test.rs',
+ 'tests/rustdoc-ui/display-output.rs',
+ 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs',
+ 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs',
+ 'tests/rustdoc-ui/doc-test-doctest-feature.rs',
+ 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs',
+ 'tests/rustdoc-ui/doctest-output.rs',
+ 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
+ 'tests/rustdoc-ui/failed-doctest-compile-fail.rs',
+ 'tests/rustdoc-ui/issue-80992.rs',
+ 'tests/rustdoc-ui/issue-91134.rs',
+ 'tests/rustdoc-ui/nocapture-fail.rs',
+ 'tests/rustdoc-ui/nocapture.rs',
+ 'tests/rustdoc-ui/no-run-flag.rs',
+ 'tests/rustdoc-ui/run-directory.rs',
+ 'tests/rustdoc-ui/test-no_std.rs',
+ 'tests/rustdoc-ui/test-type.rs',
+ 'tests/rustdoc/unit-return.rs',
+ 'tests/ui/abi/stack-probes-lto.rs',
+ 'tests/ui/abi/stack-probes.rs',
+ 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs',
+ 'tests/ui/asm/x86_64/sym.rs',
+ 'tests/ui/associated-type-bounds/fn-apit.rs',
+ 'tests/ui/associated-type-bounds/fn-dyn-apit.rs',
+ 'tests/ui/associated-type-bounds/fn-wrap-apit.rs',
+ 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
+ 'tests/ui/drop/dynamic-drop.rs',
+ 'tests/ui/empty_global_asm.rs',
+ 'tests/ui/functions-closures/fn-help-with-err.rs',
+ 'tests/ui/linkage-attr/issue-10755.rs',
+ 'tests/ui/macros/restricted-shadowing-legacy.rs',
+ 'tests/ui/process/nofile-limit.rs',
+ 'tests/ui/process/process-panic-after-fork.rs',
+ 'tests/ui/process/process-sigpipe.rs',
+ 'tests/ui/simd/target-feature-mixup.rs',
+ 'tests/ui/structs-enums/multiple-reprs.rs',
+ 'src/tools/jsondoclint',
+ 'src/tools/replace-version-placeholder',
+ 'tests/codegen/abi-efiapi.rs',
+ 'tests/codegen/abi-sysv64.rs',
+ 'tests/codegen/align-byval.rs',
+ 'tests/codegen/align-fn.rs',
+ 'tests/codegen/asm-powerpc-clobbers.rs',
+ 'tests/codegen/async-fn-debug-awaitee-field.rs',
+ 'tests/codegen/binary-search-index-no-bound-check.rs',
+ 'tests/codegen/call-metadata.rs',
+ 'tests/codegen/debug-column.rs',
+ 'tests/codegen/debug-limited.rs',
+ 'tests/codegen/debuginfo-generic-closure-env-names.rs',
+ 'tests/codegen/drop.rs',
+ 'tests/codegen/dst-vtable-align-nonzero.rs',
+ 'tests/codegen/enable-lto-unit-splitting.rs',
+ 'tests/codegen/enum/enum-u128.rs',
+ 'tests/codegen/fn-impl-trait-self.rs',
+ 'tests/codegen/inherit_overflow.rs',
+ 'tests/codegen/inline-function-args-debug-info.rs',
+ 'tests/codegen/intrinsics/mask.rs',
+ 'tests/codegen/intrinsics/transmute-niched.rs',
+ 'tests/codegen/issues/issue-73258.rs',
+ 'tests/codegen/issues/issue-75546.rs',
+ 'tests/codegen/issues/issue-77812.rs',
+ 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs',
+ 'tests/codegen/llvm-ident.rs',
+ 'tests/codegen/mainsubprogram.rs',
+ 'tests/codegen/move-operands.rs',
+ 'tests/codegen/repr/transparent-mips64.rs',
+ 'tests/mir-opt/',
+ 'tests/rustdoc-json',
+ 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
+ 'tests/rustdoc-ui/no-run-flag.rs',
+ 'tests/ui-fulldeps/',
+ 'tests/ui/numbers-arithmetic/u128.rs'
+ ]
+
+ exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
+ # Add exclude_fail_tests with other test arguments
+ testargs = exclude_fail_tests + " --doc --no-fail-fast --bless"
+
+ # wrap the execution with a qemu instance.
+ # Tests are run with 512 tasks in parallel to execute all tests very quickly
+ with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu:
+ # Copy remote-test-server to image through scp
+ host_sys = get_bb_var("RUST_BUILD_SYS", "rust")
+ ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root")
+ ssh.copy_to(builddir + "/build/" + host_sys + "/stage1-tools-bin/remote-test-server","~/")
+ # Execute remote-test-server on image through background ssh
+ command = '~/remote-test-server --bind 0.0.0.0:12345 -v'
+ sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # Get the values of variables.
+ tcpath = get_bb_var("TARGET_SYS", "rust")
+ targetsys = get_bb_var("RUST_TARGET_SYS", "rust")
+ rustlibpath = get_bb_var("WORKDIR", "rust")
+ tmpdir = get_bb_var("TMPDIR", "rust")
+
+ # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
+ cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath
+ cmd = cmd + " export TARGET_VENDOR=\"-poky\";"
+ cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir)
+ cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
+ # Trigger testing.
+ cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
+ cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys)
+ retval = runCmd(cmd)
+ end_time = time.time()
+
+ resultlog = rustlibpath + "/results-log.txt"
+ with open(resultlog, "w") as f:
+ f.write(retval.output)
+
+ ptestsuite = "rust"
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog)
+ test_results = parse_results(resultlog)
+ for test in test_results:
+ self.ptest_result(ptestsuite, test, test_results[test])
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py
index 7268e25939..a80a8651a5 100644
--- a/meta/lib/oeqa/selftest/cases/selftest.py
+++ b/meta/lib/oeqa/selftest/cases/selftest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py
index 6f3d4aeae9..18cce0ba25 100644
--- a/meta/lib/oeqa/selftest/cases/signing.py
+++ b/meta/lib/oeqa/selftest/cases/signing.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -189,7 +191,7 @@ class LockedSignatures(OESelftestTestCase):
bitbake(test_recipe)
# Generate locked sigs include file
- bitbake('-S none %s' % test_recipe)
+ bitbake('-S lockedsigs %s' % test_recipe)
feature = 'require %s\n' % locked_sigs_file
feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
new file mode 100644
index 0000000000..05fc4e390b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -0,0 +1,54 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import json
+import os
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, runCmd
+
+class SPDXCheck(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(SPDXCheck, cls).setUpClass()
+ bitbake("python3-spdx-tools-native")
+ bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
+
+ def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
+ config = """
+INHERIT += "create-spdx"
+"""
+ self.write_config(config)
+
+ deploy_dir = get_bb_var("DEPLOY_DIR")
+ machine_var = get_bb_var("MACHINE")
+ # qemux86-64 creates the directory qemux86_64
+ machine_dir = machine_var.replace("-", "_")
+
+ full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file)
+
+ try:
+ os.remove(full_file_path)
+ except FileNotFoundError:
+ pass
+
+ bitbake("%s -c create_spdx" % target_name)
+
+ def check_spdx_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertNotEqual(report, None)
+ self.assertNotEqual(report["SPDXID"], None)
+
+ python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3')
+ validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools')
+ result = runCmd("{} {} -i {}".format(python, validator, filename))
+
+ self.assertExists(full_file_path)
+ result = check_spdx_json(full_file_path)
+
+ def test_spdx_base_files(self):
+ self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py
deleted file mode 100644
index 176766331a..0000000000
--- a/meta/lib/oeqa/selftest/cases/sstate.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#
-# SPDX-License-Identifier: MIT
-#
-
-import datetime
-import os
-import re
-
-from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import get_bb_vars
-
-
-class SStateBase(OESelftestTestCase):
-
- def setUpLocal(self):
- super(SStateBase, self).setUpLocal()
- self.temp_sstate_location = None
- needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
- 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
- bb_vars = get_bb_vars(needed_vars)
- self.sstate_path = bb_vars['SSTATE_DIR']
- self.hostdistro = bb_vars['NATIVELSBSTRING']
- self.tclibc = bb_vars['TCLIBC']
- self.tune_arch = bb_vars['TUNE_ARCH']
- self.topdir = bb_vars['TOPDIR']
- self.target_vendor = bb_vars['TARGET_VENDOR']
- self.target_os = bb_vars['TARGET_OS']
- self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
-
- # Creates a special sstate configuration with the option to add sstate mirrors
- def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
- self.temp_sstate_location = temp_sstate_location
-
- if self.temp_sstate_location:
- temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
- config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
- self.append_config(config_temp_sstate)
- self.track_for_cleanup(temp_sstate_path)
- bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
- self.sstate_path = bb_vars['SSTATE_DIR']
- self.hostdistro = bb_vars['NATIVELSBSTRING']
- self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
-
- if add_local_mirrors:
- config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
- self.append_config(config_set_sstate_if_not_set)
- for local_mirror in add_local_mirrors:
- self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
- config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
- self.append_config(config_sstate_mirror)
-
- # Returns a list containing sstate files
- def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
- result = []
- for root, dirs, files in os.walk(self.sstate_path):
- if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
- for f in files:
- if re.search(filename_regex, f):
- result.append(f)
- if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
- for f in files:
- if re.search(filename_regex, f):
- result.append(f)
- return result
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index 4a32af902f..86d6cd7464 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -7,54 +9,77 @@ import shutil
import glob
import subprocess
import tempfile
+import datetime
+import re
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
-from oeqa.selftest.cases.sstate import SStateBase
-import oe
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, get_bb_vars
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator import OETestTag
+import oe
import bb.siggen
-class SStateTests(SStateBase):
- def test_autorev_sstate_works(self):
- # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
- # when PV does not contain SRCPV
-
- tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
- tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
- self.track_for_cleanup(tempdir)
- self.track_for_cleanup(tempdldir)
- create_temp_layer(tempdir, 'selftestrecipetool')
- self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir)
- self.append_config("DL_DIR = \"%s\"" % tempdldir)
- runCmd('bitbake-layers add-layer %s' % tempdir)
-
- # Use dbus-wait as a local git repo we can add a commit between two builds in
- pn = 'dbus-wait'
- srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
- url = 'git://git.yoctoproject.org/dbus-wait'
- result = runCmd('git clone %s noname' % url, cwd=tempdir)
- srcdir = os.path.join(tempdir, 'noname')
- result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
- self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
-
- recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
- os.makedirs(os.path.dirname(recipefile))
- srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
- result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
- self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
-
- with open(recipefile, 'a') as f:
- f.write('SRCREV = "${AUTOREV}"\n')
- f.write('PV = "1.0"\n')
-
- bitbake("dbus-wait-test -c fetch")
- with open(os.path.join(srcdir, "bar.txt"), "w") as f:
- f.write("foo")
- result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
- bitbake("dbus-wait-test -c unpack")
-
-
- # Test sstate files creation and their location
+# Set to True to preserve stamp files after test execution for debugging failures
+keep_temp_files = False
+
+class SStateBase(OESelftestTestCase):
+
+ def setUpLocal(self):
+ super(SStateBase, self).setUpLocal()
+ self.temp_sstate_location = None
+ needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
+ 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
+ bb_vars = get_bb_vars(needed_vars)
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.tclibc = bb_vars['TCLIBC']
+ self.tune_arch = bb_vars['TUNE_ARCH']
+ self.topdir = bb_vars['TOPDIR']
+ self.target_vendor = bb_vars['TARGET_VENDOR']
+ self.target_os = bb_vars['TARGET_OS']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ def track_for_cleanup(self, path):
+ if not keep_temp_files:
+ super().track_for_cleanup(path)
+
+ # Creates a special sstate configuration with the option to add sstate mirrors
+ def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
+ self.temp_sstate_location = temp_sstate_location
+
+ if self.temp_sstate_location:
+ temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+ config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
+ self.append_config(config_temp_sstate)
+ self.track_for_cleanup(temp_sstate_path)
+ bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ if add_local_mirrors:
+ config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
+ self.append_config(config_set_sstate_if_not_set)
+ for local_mirror in add_local_mirrors:
+ self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
+ config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
+ self.append_config(config_sstate_mirror)
+
+ # Returns a list containing sstate files
+ def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
+ result = []
+ for root, dirs, files in os.walk(self.sstate_path):
+ if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ return result
+
+ # Test sstate files creation and their location and directory perms
def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True):
self.config_sstate(temp_sstate_location, [self.sstate_path])
@@ -63,6 +88,19 @@ class SStateTests(SStateBase):
else:
bitbake(['-ccleansstate'] + targets)
+ # We need to test that the env umask have does not effect sstate directory creation
+ # So, first, we'll get the current umask and set it to something we know incorrect
+ # See: sstate_task_postfunc for correct umask of os.umask(0o002)
+ import os
+ def current_umask():
+ current_umask = os.umask(0)
+ os.umask(current_umask)
+ return current_umask
+
+ orig_umask = current_umask()
+ # Set it to a umask we know will be 'wrong'
+ os.umask(0o022)
+
bitbake(targets)
file_tracker = []
results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
@@ -79,17 +117,18 @@ class SStateTests(SStateBase):
else:
self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker)))
- def test_sstate_creation_distro_specific_pass(self):
- self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
-
- def test_sstate_creation_distro_specific_fail(self):
- self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
+ # Now we'll walk the tree to check the mode and see if things are incorrect.
+ badperms = []
+ for root, dirs, files in os.walk(self.sstate_path):
+ for directory in dirs:
+ if (os.stat(os.path.join(root, directory)).st_mode & 0o777) != 0o775:
+ badperms.append(os.path.join(root, directory))
- def test_sstate_creation_distro_nonspecific_pass(self):
- self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+ # Return to original umask
+ os.umask(orig_umask)
- def test_sstate_creation_distro_nonspecific_fail(self):
- self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+ if should_pass:
+ self.assertTrue(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms)))
# Test the sstate files deletion part of the do_cleansstate task
def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
@@ -108,20 +147,6 @@ class SStateTests(SStateBase):
archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed)))
- def test_cleansstate_task_distro_specific_nonspecific(self):
- targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
- targets.append('linux-libc-headers')
- self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
-
- def test_cleansstate_task_distro_nonspecific(self):
- self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
-
- def test_cleansstate_task_distro_specific(self):
- targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
- targets.append('linux-libc-headers')
- self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
-
-
# Test rebuilding of distro-specific sstate files
def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
self.config_sstate(temp_sstate_location, [self.sstate_path])
@@ -154,6 +179,105 @@ class SStateTests(SStateBase):
created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
+ def sstate_common_samesigs(self, configA, configB, allarch=False):
+
+ self.write_config(configA)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("world meta-toolchain -S none")
+ self.write_config(configB)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("world meta-toolchain -S none")
+
+ def get_files(d, result):
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ if "meta-environment" in root or "cross-canadian" in root:
+ continue
+ if "do_build" not in name:
+ # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
+ (_, task, _, shash) = name.rsplit(".", 3)
+ result[os.path.join(os.path.basename(root), task)] = shash
+
+ files1 = {}
+ files2 = {}
+ subdirs = sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux"))
+ if allarch:
+ subdirs.extend(sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")))
+
+ for subdir in subdirs:
+ nativesdkdir = os.path.basename(subdir)
+ get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir, files1)
+ get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir, files2)
+
+ self.maxDiff = None
+ self.assertEqual(files1, files2)
+
+class SStateTests(SStateBase):
+ def test_autorev_sstate_works(self):
+ # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
+
+ tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
+ tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(tempdldir)
+ create_temp_layer(tempdir, 'selftestrecipetool')
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir)
+ self.append_config("DL_DIR = \"%s\"" % tempdldir)
+ runCmd('bitbake-layers add-layer %s' % tempdir)
+
+ # Use dbus-wait as a local git repo we can add a commit between two builds in
+ pn = 'dbus-wait'
+ srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
+ url = 'git://git.yoctoproject.org/dbus-wait'
+ result = runCmd('git clone %s noname' % url, cwd=tempdir)
+ srcdir = os.path.join(tempdir, 'noname')
+ result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
+
+ recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
+ os.makedirs(os.path.dirname(recipefile))
+ srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
+ result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
+ self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
+
+ with open(recipefile, 'a') as f:
+ f.write('SRCREV = "${AUTOREV}"\n')
+ f.write('PV = "1.0"\n')
+
+ bitbake("dbus-wait-test -c fetch")
+ with open(os.path.join(srcdir, "bar.txt"), "w") as f:
+ f.write("foo")
+ result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
+ bitbake("dbus-wait-test -c unpack")
+
+class SStateCreation(SStateBase):
+ def test_sstate_creation_distro_specific_pass(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_specific_fail(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
+
+ def test_sstate_creation_distro_nonspecific_pass(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_nonspecific_fail(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+
+class SStateCleanup(SStateBase):
+ def test_cleansstate_task_distro_specific_nonspecific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_nonspecific(self):
+ self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_specific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+class SStateDistroTests(SStateBase):
def test_rebuild_distro_specific_sstate_cross_native_targets(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
@@ -163,30 +287,30 @@ class SStateTests(SStateBase):
def test_rebuild_distro_specific_sstate_native_target(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
-
+class SStateCacheManagement(SStateBase):
# Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
- # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE)
+ # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.py (such as changing the value of MACHINE)
def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
self.assertTrue(global_config)
self.assertTrue(target_config)
self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
- self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
- # If buildhistory is enabled, we need to disable version-going-backwards
- # QA checks for this test. It may report errors otherwise.
- self.append_config('ERROR_QA:remove = "version-going-backwards"')
+ for idx in range(len(target_config)):
+ self.append_config(global_config[idx])
+ self.append_recipeinc(target, target_config[idx])
+ bitbake(target)
+ self.remove_config(global_config[idx])
+ self.remove_recipeinc(target, target_config[idx])
+
+ self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
# For now this only checks if random sstate tasks are handled correctly as a group.
# In the future we should add control over what tasks we check for.
- sstate_archs_list = []
expected_remaining_sstate = []
for idx in range(len(target_config)):
self.append_config(global_config[idx])
self.append_recipeinc(target, target_config[idx])
- sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
- if not sstate_arch in sstate_archs_list:
- sstate_archs_list.append(sstate_arch)
if target_config[idx] == target_config[-1]:
target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$')
bitbake("-cclean %s" % target)
@@ -198,7 +322,7 @@ class SStateTests(SStateBase):
self.remove_recipeinc(target, target_config[idx])
self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
- runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list))))
+ runCmd("sstate-cache-management.py -y --cache-dir=%s --remove-duplicated" % (self.sstate_path))
actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)]
actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
@@ -242,6 +366,7 @@ class SStateTests(SStateBase):
target_config.append('')
self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
+class SStateHashSameSigs(SStateBase):
def test_sstate_32_64_same_hash(self):
"""
The sstate checksums for both native and target should not vary whether
@@ -327,6 +452,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
self.maxDiff = None
self.assertCountEqual(files1, files2)
+class SStateHashSameSigs2(SStateBase):
def test_sstate_allarch_samesigs(self):
"""
The sstate checksums of allarch packages should be independent of whichever
@@ -375,41 +501,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
"""
self.sstate_common_samesigs(configA, configB)
- def sstate_common_samesigs(self, configA, configB, allarch=False):
-
- self.write_config(configA)
- self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
- bitbake("world meta-toolchain -S none")
- self.write_config(configB)
- self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
- bitbake("world meta-toolchain -S none")
-
- def get_files(d):
- f = {}
- for root, dirs, files in os.walk(d):
- for name in files:
- if "meta-environment" in root or "cross-canadian" in root:
- continue
- if "do_build" not in name:
- # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
- (_, task, _, shash) = name.rsplit(".", 3)
- f[os.path.join(os.path.basename(root), task)] = shash
- return f
-
- nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0])
-
- files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir)
- files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir)
- self.maxDiff = None
- self.assertEqual(files1, files2)
-
- if allarch:
- allarchdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")[0])
-
- files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + allarchdir)
- files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + allarchdir)
- self.assertEqual(files1, files2)
-
+class SStateHashSameSigs3(SStateBase):
def test_sstate_sametune_samesigs(self):
"""
The sstate checksums of two identical machines (using the same tune) should be the
@@ -444,7 +536,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
f = []
for root, dirs, files in os.walk(d):
for name in files:
- if "meta-environment" in root or "cross-canadian" in root:
+ if "meta-environment" in root or "cross-canadian" in root or 'meta-ide-support' in root:
continue
if "qemux86copy-" in root or "qemux86-" in root:
continue
@@ -497,7 +589,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
self.maxDiff = None
self.assertCountEqual(files1, files2)
-
+class SStateHashSameSigs4(SStateBase):
def test_sstate_noop_samesigs(self):
"""
The sstate checksums of two builds with these variables changed or
@@ -594,6 +686,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
copy_layer_2 = self.topdir + "/meta-copy2/meta"
oe.path.copytree(core_layer, copy_layer_1)
+ os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy1/scripts")
self.write_config("""
TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
""")
@@ -603,6 +696,7 @@ TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
bitbake("bash -S none")
oe.path.copytree(core_layer, copy_layer_2)
+ os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy2/scripts")
self.write_config("""
TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
""")
@@ -623,3 +717,292 @@ TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
self.maxDiff = None
self.assertCountEqual(files1, files2)
+class SStateFindSiginfo(SStateBase):
+ def test_sstate_compare_sigfiles_and_find_siginfo(self):
+ """
+ Test the functionality of the find_siginfo: basic function and callback in compare_sigfiles
+ """
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86-64\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstates-findsiginfo")
+
+ pns = ["binutils", "binutils-native", "lib32-binutils"]
+ target_configs = [
+"""
+TMPVAL1 = "tmpval1"
+TMPVAL2 = "tmpval2"
+do_tmptask1() {
+ echo ${TMPVAL1}
+}
+do_tmptask2() {
+ echo ${TMPVAL2}
+}
+addtask do_tmptask1
+addtask tmptask2 before do_tmptask1
+""",
+"""
+TMPVAL3 = "tmpval3"
+TMPVAL4 = "tmpval4"
+do_tmptask1() {
+ echo ${TMPVAL3}
+}
+do_tmptask2() {
+ echo ${TMPVAL4}
+}
+addtask do_tmptask1
+addtask tmptask2 before do_tmptask1
+"""
+ ]
+
+ for target_config in target_configs:
+ self.write_recipeinc("binutils", target_config)
+ for pn in pns:
+ bitbake("%s -c do_tmptask1 -S none" % pn)
+ self.delete_recipeinc("binutils")
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+
+ def find_siginfo(pn, taskname, sigs=None):
+ result = None
+ command_complete = False
+ tinfoil.set_event_mask(["bb.event.FindSigInfoResult",
+ "bb.command.CommandCompleted"])
+ ret = tinfoil.run_command("findSigInfo", pn, taskname, sigs)
+ if ret:
+ while result is None or not command_complete:
+ event = tinfoil.wait_event(1)
+ if event:
+ if isinstance(event, bb.command.CommandCompleted):
+ command_complete = True
+ elif isinstance(event, bb.event.FindSigInfoResult):
+ result = event.result
+ return result
+
+ def recursecb(key, hash1, hash2):
+ nonlocal recursecb_count
+ recursecb_count += 1
+ hashes = [hash1, hash2]
+ hashfiles = find_siginfo(key, None, hashes)
+ self.assertCountEqual(hashes, hashfiles)
+ bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
+
+ for pn in pns:
+ recursecb_count = 0
+ matches = find_siginfo(pn, "do_tmptask1")
+ self.assertGreaterEqual(len(matches), 2)
+ latesthashes = sorted(matches.keys(), key=lambda h: matches[h]['time'])[-2:]
+ bb.siggen.compare_sigfiles(matches[latesthashes[-2]]['path'], matches[latesthashes[-1]]['path'], recursecb)
+ self.assertEqual(recursecb_count,1)
+
+class SStatePrintdiff(SStateBase):
+ def run_test_printdiff_changerecipe(self, target, change_recipe, change_bbtask, change_content, expected_sametmp_output, expected_difftmp_output):
+ import time
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
+""".format(time.time()))
+ # Use runall do_build to ensure any indirect sstate is created, e.g. tzcode-native on both x86 and
+ # aarch64 hosts since only allarch target recipes depend upon it and it may not be built otherwise.
+ # A bitbake -c cleansstate tzcode-native would cause some of these tests to error for example.
+ bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
+ bitbake("-S none {}".format(target))
+ bitbake(change_bbtask)
+ self.write_recipeinc(change_recipe, change_content)
+ result_sametmp = bitbake("-S printdiff {}".format(target))
+
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
+""".format(time.time()))
+ result_difftmp = bitbake("-S printdiff {}".format(target))
+
+ self.delete_recipeinc(change_recipe)
+ for item in expected_sametmp_output:
+ self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
+ for item in expected_difftmp_output:
+ self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
+
+ def run_test_printdiff_changeconfig(self, target, change_bbtasks, change_content, expected_sametmp_output, expected_difftmp_output):
+ import time
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
+""".format(time.time()))
+ bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
+ bitbake("-S none {}".format(target))
+ bitbake(" ".join(change_bbtasks))
+ self.append_config(change_content)
+ result_sametmp = bitbake("-S printdiff {}".format(target))
+
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
+""".format(time.time()))
+ self.append_config(change_content)
+ result_difftmp = bitbake("-S printdiff {}".format(target))
+
+ for item in expected_sametmp_output:
+ self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
+ for item in expected_difftmp_output:
+ self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
+
+
+ # Check if printdiff walks the full dependency chain from the image target to where the change is in a specific recipe
+ def test_image_minimal_vs_perlcross(self):
+ expected_output = ("Task perlcross-native:do_install couldn't be used from the cache because:",
+"We need hash",
+"most recent matching task was")
+ expected_sametmp_output = expected_output + (
+"Variable do_install value changed",
+'+ echo "this changes the task signature"')
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changerecipe("core-image-minimal", "perlcross", "-c do_install perlcross-native",
+"""
+do_install:append() {
+ echo "this changes the task signature"
+}
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+ # Check if changes to gcc-source (which uses tmp/work-shared) are correctly discovered
+ def test_gcc_runtime_vs_gcc_source(self):
+ gcc_source_pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
+
+ expected_output = ("Task {}:do_preconfigure couldn't be used from the cache because:".format(gcc_source_pn),
+"We need hash",
+"most recent matching task was")
+ expected_sametmp_output = expected_output + (
+"Variable do_preconfigure value changed",
+'+ print("this changes the task signature")')
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changerecipe("gcc-runtime", "gcc-source", "-c do_preconfigure {}".format(gcc_source_pn),
+"""
+python do_preconfigure:append() {
+ print("this changes the task signature")
+}
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+ # Check if changing a really base task definiton is reported against multiple core recipes using it
+ def test_image_minimal_vs_base_do_configure(self):
+ change_bbtasks = ('zstd-native:do_configure',
+'texinfo-dummy-native:do_configure',
+'ldconfig-native:do_configure',
+'gettext-minimal-native:do_configure',
+'tzcode-native:do_configure',
+'makedevs-native:do_configure',
+'pigz-native:do_configure',
+'update-rc.d-native:do_configure',
+'unzip-native:do_configure',
+'gnu-config-native:do_configure')
+
+ expected_output = ["Task {} couldn't be used from the cache because:".format(t) for t in change_bbtasks] + [
+"We need hash",
+"most recent matching task was"]
+
+ expected_sametmp_output = expected_output + [
+"Variable base_do_configure value changed",
+'+ echo "this changes base_do_configure() definiton "']
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changeconfig("core-image-minimal",change_bbtasks,
+"""
+INHERIT += "base-do-configure-modified"
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+@OETestTag("yocto-mirrors")
+class SStateMirrors(SStateBase):
+ def check_bb_output(self, output, exceptions, check_cdn):
+ def is_exception(object, exceptions):
+ for e in exceptions:
+ if re.search(e, object):
+ return True
+ return False
+
+ output_l = output.splitlines()
+ for l in output_l:
+ if l.startswith("Sstate summary"):
+ for idx, item in enumerate(l.split()):
+ if item == 'Missed':
+ missing_objects = int(l.split()[idx+1])
+ break
+ else:
+ self.fail("Did not find missing objects amount in sstate summary: {}".format(l))
+ break
+ else:
+ self.fail("Did not find 'Sstate summary' line in bitbake output")
+
+ failed_urls = []
+ failed_urls_extrainfo = []
+ for l in output_l:
+ if "SState: Unsuccessful fetch test for" in l and check_cdn:
+ missing_object = l.split()[6]
+ elif "SState: Looked for but didn't find file" in l and not check_cdn:
+ missing_object = l.split()[8]
+ else:
+ missing_object = None
+ if missing_object:
+ if not is_exception(missing_object, exceptions):
+ failed_urls.append(missing_object)
+ else:
+ missing_objects -= 1
+
+ if "urlopen failed for" in l and not is_exception(l, exceptions):
+ failed_urls_extrainfo.append(l)
+
+ self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
+ self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
+
+ def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
+ # sstate is checked for existence of these, but they never get written out to begin with
+ exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
+ exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
+ exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
+ exceptions += ["linux-yocto.*shared_workdir"]
+ # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
+ # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
+ # which makes tracing other changes difficult
+ exceptions += ["{}.*create_spdx".format(t) for t in targets.split()]
+ exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()]
+
+ if check_cdn:
+ self.config_sstate(True)
+ self.append_config("""
+MACHINE = "{}"
+BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
+SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH"
+""".format(machine))
+ else:
+ self.append_config("""
+MACHINE = "{}"
+""".format(machine))
+ result = bitbake("-DD -n {}".format(targets))
+ bitbake("-S none {}".format(targets))
+ if ignore_errors:
+ return
+ self.check_bb_output(result.output, exceptions, check_cdn)
+
+ def test_cdn_mirror_qemux86_64(self):
+ exceptions = []
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
+
+ def test_cdn_mirror_qemuarm64(self):
+ exceptions = []
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
+
+ def test_local_cache_qemux86_64(self):
+ exceptions = []
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
+
+ def test_local_cache_qemuarm64(self):
+ exceptions = []
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
diff --git a/meta/lib/oeqa/selftest/cases/sysroot.py b/meta/lib/oeqa/selftest/cases/sysroot.py
index 315d1a61c2..ef854f6fee 100644
--- a/meta/lib/oeqa/selftest/cases/sysroot.py
+++ b/meta/lib/oeqa/selftest/cases/sysroot.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -35,3 +37,50 @@ TESTSTRING:pn-sysroot-test-arch1 = "%s"
TESTSTRING:pn-sysroot-test-arch2 = "%s"
""" % (uuid1, uuid2))
bitbake("sysroot-test")
+
+ def test_sysroot_max_shebang(self):
+ """
+ Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed.
+ Expected: Fail when a shebang bigger than the max shebang-size is reached.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]"
+ res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ def test_sysroot_la(self):
+ """
+ Summary: Check that workdir paths are not contained in .la files.
+ Expected: Fail when a workdir path is found in the file content.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "la-test.la failed sanity test (workdir) in path"
+
+ res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[la]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[la]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ def test_sysroot_pkgconfig(self):
+ """
+ Summary: Check that tmpdir paths are not contained in .pc files.
+ Expected: Fail when a tmpdir path is found in the file content.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "test.pc failed sanity test (tmpdir) in path"
+
+ res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True)
+ self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
index c81d56d82b..21c8686b2a 100644
--- a/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -46,6 +48,17 @@ class TinfoilTests(OESelftestTestCase):
rd = tinfoil.parse_recipe_file(best[3])
self.assertEqual(testrecipe, rd.getVar('PN'))
+ def test_parse_virtual_recipe(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ testrecipe = 'nativesdk-gcc'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3])
+ self.assertEqual(testrecipe, rd.getVar('PN'))
+ self.assertIsNotNone(rd.getVar('FILE_LAYERNAME'))
+
def test_parse_recipe_copy_expand(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
@@ -64,6 +77,32 @@ class TinfoilTests(OESelftestTestCase):
localdata.setVar('PN', 'hello')
self.assertEqual('hello', localdata.getVar('BPN'))
+ # The config_data API to parse_recipe_file is used by:
+ # layerindex-web layerindex/update_layer.py
+ def test_parse_recipe_custom_data(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
+ testrecipe = 'mdadm'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
+
+ def test_parse_virtual_recipe_custom_data(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
+ testrecipe = 'nativesdk-gcc'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
+
def test_list_recipes(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
diff --git a/meta/lib/oeqa/selftest/cases/usergrouptests.py b/meta/lib/oeqa/selftest/cases/usergrouptests.py
new file mode 100644
index 0000000000..3c59b0f290
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/usergrouptests.py
@@ -0,0 +1,57 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer
+
+class UserGroupTests(OESelftestTestCase):
+ def test_group_from_dep_package(self):
+ self.logger.info("Building creategroup2")
+ bitbake(' creategroup2 creategroup1')
+ bitbake(' creategroup2 creategroup1 -c clean')
+ self.logger.info("Packaging creategroup2")
+ self.assertTrue(bitbake(' creategroup2 -c package'))
+
+ def test_add_task_between_p_sysroot_and_package(self):
+ # Test for YOCTO #14961
+ self.assertTrue(bitbake('useraddbadtask -C fetch'))
+
+ def test_postinst_order(self):
+ self.logger.info("Building dcreategroup")
+ self.assertTrue(bitbake(' dcreategroup'))
+
+ def test_static_useradd_from_dynamic(self):
+ metaselftestpath = get_test_layer()
+ self.logger.info("Building core-image-minimal to generate passwd/group file")
+ bitbake(' core-image-minimal')
+ self.logger.info("Setting up useradd-staticids")
+ repropassdir = os.path.join(metaselftestpath, "conf/include")
+ os.makedirs(repropassdir)
+ etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \
+ os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc")))
+ shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd"))
+ shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group"))
+ # Copy the original local.conf
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'))
+
+ self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
+ self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
+ self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"")
+ self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"")
+ self.logger.info("Rebuild with staticids")
+ bitbake(' core-image-minimal')
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'))
+ self.logger.info("Rebuild without staticids")
+ bitbake(' core-image-minimal')
+ self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
+ self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
+ self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"")
+ self.write_config("USERADD_GID_TABLES += \"files/static-group\"")
+ self.logger.info("Rebuild with other staticids")
+ self.assertTrue(bitbake(' core-image-minimal'))
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py
index de74c07a03..b616759209 100644
--- a/meta/lib/oeqa/selftest/cases/wic.py
+++ b/meta/lib/oeqa/selftest/cases/wic.py
@@ -15,33 +15,15 @@ import hashlib
from glob import glob
from shutil import rmtree, copy
-from functools import wraps, lru_cache
from tempfile import NamedTemporaryFile
+from tempfile import TemporaryDirectory
from oeqa.selftest.case import OESelftestTestCase
from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotArch
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
-@lru_cache()
-def get_host_arch():
- return get_bb_var('HOST_ARCH')
-
-
-def only_for_arch(archs):
- """Decorator for wrapping test cases that can be run only for specific target
- architectures. A list of compatible architectures is passed in `archs`.
- """
- def wrapper(func):
- @wraps(func)
- def wrapped_f(*args, **kwargs):
- arch = get_host_arch()
- if archs and arch not in archs:
- raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch)
- return func(*args, **kwargs)
- return wrapped_f
- return wrapper
-
def extract_files(debugfs_output):
"""
extract file names from the output of debugfs -R 'ls -p',
@@ -165,20 +147,87 @@ class CLITests(OESelftestTestCase):
self.assertEqual(1, runCmd('wic', ignore_status=True).status)
class Wic(WicTestCase):
+ def test_skip_kernel_install(self):
+ """Test the functionality of not installing the kernel in the boot directory using the wic plugin"""
+ # create a temporary file for the WKS content
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.write(
+ 'part --source bootimg-efi '
+ '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" '
+ '--label boot --active\n'
+ )
+ wks.flush()
+ # create a temporary directory to extract the disk image to
+ with TemporaryDirectory() as tmpdir:
+ img = 'core-image-minimal'
+ # build the image using the WKS file
+ cmd = "wic create %s -e %s -o %s" % (
+ wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(
+ self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ # extract the content of the disk image to the temporary directory
+ cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
+ runCmd(cmd)
+ # check if the kernel is installed or not
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
+ for file in os.listdir(tmpdir):
+ if file == kimgtype:
+ raise AssertionError(
+ "The kernel image '{}' was found in the partition".format(kimgtype)
+ )
+
+ def test_kernel_install(self):
+ """Test the installation of the kernel to the boot directory in the wic plugin"""
+ # create a temporary file for the WKS content
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.write(
+ 'part --source bootimg-efi '
+ '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" '
+ '--label boot --active\n'
+ )
+ wks.flush()
+ # create a temporary directory to extract the disk image to
+ with TemporaryDirectory() as tmpdir:
+ img = 'core-image-minimal'
+ # build the image using the WKS file
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ # extract the content of the disk image to the temporary directory
+ cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
+ runCmd(cmd)
+ # check if the kernel is installed or not
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
+ found = False
+ for file in os.listdir(tmpdir):
+ if file == kimgtype:
+ found = True
+ break
+ self.assertTrue(
+ found, "The kernel image '{}' was not found in the boot partition".format(kimgtype)
+ )
+
def test_build_image_name(self):
"""Test wic create wictestdisk --image-name=core-image-minimal"""
cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join (self.resultdir, "wictestdisk-*.direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_gpt_image(self):
"""Test creation of core-image-minimal with gpt table and UUID boot"""
cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_iso_image(self):
"""Test creation of hybrid iso image with legacy and EFI boot"""
config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
@@ -192,21 +241,21 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "HYBRID_ISO_IMG-*.direct"))))
self.assertEqual(1, len(glob(os.path.join (self.resultdir, "HYBRID_ISO_IMG-*.iso"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_qemux86_directdisk(self):
"""Test creation of qemux-86-directdisk image"""
cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "qemux86-directdisk-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
def test_mkefidisk(self):
"""Test creation of mkefidisk image"""
cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "mkefidisk-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_bootloader_config(self):
"""Test creation of directdisk-bootloader-config image"""
config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
@@ -217,7 +266,7 @@ class Wic(WicTestCase):
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-bootloader-config-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
def test_systemd_bootdisk(self):
"""Test creation of systemd-bootdisk image"""
config = 'MACHINE_FEATURES:append = " efi"\n'
@@ -248,7 +297,7 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
# TODO this doesn't have to be x86-specific
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_default_output_dir(self):
"""Test default output location"""
for fname in glob("directdisk-*.direct"):
@@ -261,7 +310,7 @@ class Wic(WicTestCase):
runCmd(cmd)
self.assertEqual(1, len(glob("directdisk-*.direct")))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_build_artifacts(self):
"""Test wic create directdisk providing all artifacts."""
bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -353,7 +402,7 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
# TODO this doesn't have to be x86-specific
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_rootfs_indirect_recipes(self):
"""Test usage of rootfs plugin with rootfs recipes"""
runCmd("wic create directdisk-multi-rootfs "
@@ -364,7 +413,7 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-multi-rootfs*.direct"))))
# TODO this doesn't have to be x86-specific
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_rootfs_artifacts(self):
"""Test usage of rootfs plugin with rootfs paths"""
bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -748,7 +797,48 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
wicout = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
self.assertEqual(1, len(wicout))
size = os.path.getsize(wicout[0])
- self.assertTrue(size > extraspace)
+ self.assertTrue(size > extraspace, msg="Extra space not present (%s vs %s)" % (size, extraspace))
+
+ def test_no_table(self):
+ """Test --no-table wks option."""
+ wks_file = 'temp.wks'
+
+ # Absolute argument.
+ with open(wks_file, 'w') as wks:
+ wks.write("part testspace --no-table --fixed-size 16k --offset 4080k")
+ runCmd("wic create %s --image-name core-image-minimal -o %s" % (wks_file, self.resultdir))
+
+ wicout = glob(os.path.join(self.resultdir, "*.*"))
+
+ self.assertEqual(1, len(wicout))
+ size = os.path.getsize(wicout[0])
+ self.assertEqual(size, 4 * 1024 * 1024)
+
+ os.remove(wks_file)
+
+ def test_partition_hidden_attributes(self):
+ """Test --hidden wks option."""
+ wks_file = 'temp.wks'
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ try:
+ with open(wks_file, 'w') as wks:
+ wks.write("""
+part / --source rootfs --fstype=ext4
+part / --source rootfs --fstype=ext4 --hidden
+bootloader --ptable gpt""")
+
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+ wicout = os.path.join(self.resultdir, "*.direct")
+
+ result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 1" % (sysroot, wicout))
+ self.assertEqual('', result.output)
+ result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 2" % (sysroot, wicout))
+ self.assertEqual('RequiredPartition', result.output)
+
+ finally:
+ os.remove(wks_file)
+
class Wic2(WicTestCase):
@@ -775,7 +865,7 @@ class Wic2(WicTestCase):
basename = bb_vars['IMAGE_BASENAME']
self.assertEqual(basename, image)
path = os.path.join(imgdatadir, basename) + '.env'
- self.assertTrue(os.path.isfile(path))
+ self.assertTrue(os.path.isfile(path), msg="File %s wasn't generated as expected" % path)
wicvars = set(bb_vars['WICVARS'].split())
# filter out optional variables
@@ -788,7 +878,7 @@ class Wic2(WicTestCase):
# test if variables used by wic present in the .env file
for var in wicvars:
self.assertTrue(var in content, "%s is not in .env file" % var)
- self.assertTrue(content[var])
+ self.assertTrue(content[var], "%s doesn't have a value (%s)" % (var, content[var]))
def test_image_vars_dir_short(self):
"""Test image vars directory selection -v option"""
@@ -817,27 +907,29 @@ class Wic2(WicTestCase):
self.resultdir))
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ # TODO this test could also work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_wic_image_type(self):
"""Test building wic images by bitbake"""
config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
'MACHINE_FEATURES:append = " efi"\n'
self.append_config(config)
- bitbake('wic-image-minimal')
+ image = 'wic-image-minimal'
+ bitbake(image)
self.remove_config(config)
- deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = self.td['MACHINE']
- prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME'])
+
# check if we have result image and manifests symlinks
# pointing to existing files
for suffix in ('wic', 'manifest'):
path = prefix + suffix
- self.assertTrue(os.path.islink(path))
- self.assertTrue(os.path.isfile(os.path.realpath(path)))
+ self.assertTrue(os.path.islink(path), msg="Link %s wasn't generated as expected" % path)
+ self.assertTrue(os.path.isfile(os.path.realpath(path)), msg="File linked to by %s wasn't generated as expected" % path)
# TODO this should work on aarch64
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_qemu(self):
"""Test wic-image-minimal under qemu"""
@@ -853,12 +945,12 @@ class Wic2(WicTestCase):
status, output = qemu.run_serial(cmd)
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
self.assertEqual(output, '4')
- cmd = "grep UUID= /etc/fstab"
+ cmd = "grep UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba /etc/fstab"
status, output = qemu.run_serial(cmd)
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0')
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_qemu_efi(self):
"""Test core-image-minimal efi image under qemu"""
@@ -1042,7 +1134,8 @@ class Wic2(WicTestCase):
size = int(size[:-3])
self.assertGreaterEqual(size, 204800)
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ # TODO this test could also work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_rawcopy_plugin_qemu(self):
"""Test rawcopy plugin in qemu"""
@@ -1050,9 +1143,13 @@ class Wic2(WicTestCase):
config = 'IMAGE_FSTYPES = "ext4"\n'
self.append_config(config)
bitbake('core-image-minimal')
+ image_link_name = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal')
self.remove_config(config)
- config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n'
+ config = 'IMAGE_FSTYPES = "wic"\n' \
+ 'IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL = "%s"\n'\
+ 'WKS_FILE = "test_rawcopy_plugin.wks.in"\n'\
+ % image_link_name
self.append_config(config)
bitbake('core-image-minimal-mtdutils')
self.remove_config(config)
@@ -1066,14 +1163,14 @@ class Wic2(WicTestCase):
def _rawcopy_plugin(self, fstype):
"""Test rawcopy plugin"""
- img = 'core-image-minimal'
- machine = self.td["MACHINE"]
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
params = ',unpack' if fstype.endswith('.gz') else ''
with NamedTemporaryFile("w", suffix=".wks") as wks:
- wks.write('part / --source rawcopy --sourceparams="file=%s-%s.%s%s"\n'\
- % (img, machine, fstype, params))
+ wks.write('part / --source rawcopy --sourceparams="file=%s.%s%s"\n'\
+ % (bb_vars['IMAGE_LINK_NAME'], fstype, params))
wks.flush()
- cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ cmd = "wic create %s -e %s -o %s" % (wks.name, image, self.resultdir)
runCmd(cmd)
wksname = os.path.splitext(os.path.basename(wks.name))[0]
out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
@@ -1094,13 +1191,12 @@ class Wic2(WicTestCase):
"""Test empty plugin"""
config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n'
self.append_config(config)
- bitbake('core-image-minimal')
+ image = 'core-image-minimal'
+ bitbake(image)
self.remove_config(config)
- deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = self.td['MACHINE']
-
- image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
- self.assertTrue(os.path.exists(image_path))
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+ self.assertTrue(os.path.exists(image_path), msg="Image file %s wasn't generated as expected" % image_path)
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1109,7 +1205,7 @@ class Wic2(WicTestCase):
result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot))
self.assertEqual('1', result.output)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_biosplusefi_plugin_qemu(self):
"""Test biosplusefi plugin in qemu"""
@@ -1142,7 +1238,7 @@ class Wic2(WicTestCase):
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
self.assertEqual(output, '*')
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_biosplusefi_plugin(self):
"""Test biosplusefi plugin"""
# Wic generation below may fail depending on the order of the unittests
@@ -1168,8 +1264,28 @@ class Wic2(WicTestCase):
out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
self.assertEqual(1, len(out))
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
+ def test_uefi_kernel(self):
+ """ Test uefi-kernel in wic """
+ config = 'IMAGE_EFI_BOOT_FILES="/etc/fstab;testfile"\nIMAGE_FSTYPES = "wic"\nWKS_FILE = "test_uefikernel.wks"\nMACHINE_FEATURES:append = " efi"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+
+ img = 'core-image-minimal'
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part /boot --source bootimg-efi --sourceparams="loader=uefi-kernel"\n'
+ 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
+ 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+
# TODO this test could also work on aarch64
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_efi_plugin_unified_kernel_image_qemu(self):
"""Test efi plugin's Unified Kernel Image feature in qemu"""
@@ -1287,19 +1403,19 @@ class Wic2(WicTestCase):
out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
self.assertEqual(1, len(out))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_expand_mbr_image(self):
"""Test wic write --expand command for mbr image"""
# build an image
config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
self.append_config(config)
- bitbake('core-image-minimal')
+ image = 'core-image-minimal'
+ bitbake(image)
# get path to the image
- deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = self.td['MACHINE']
- image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
self.remove_config(config)
@@ -1307,7 +1423,7 @@ class Wic2(WicTestCase):
# expand image to 1G
new_image_path = None
with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
- dir=deploy_dir, delete=False) as sparse:
+ dir=bb_vars['DEPLOY_DIR_IMAGE'], delete=False) as sparse:
sparse.truncate(1024 ** 3)
new_image_path = sparse.name
@@ -1321,11 +1437,11 @@ class Wic2(WicTestCase):
orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
- self.assertTrue(orig_sizes[1] < exp_sizes[1])
+ self.assertTrue(orig_sizes[1] < exp_sizes[1], msg="Parition size wasn't enlarged (%s vs %s)" % (orig_sizes[1], exp_sizes[1]))
# Check if all free space is partitioned
result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
- self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output)
+ self.assertIn("0 B, 0 bytes, 0 sectors", result.output)
os.rename(image_path, image_path + '.bak')
os.rename(new_image_path, image_path)
@@ -1341,6 +1457,68 @@ class Wic2(WicTestCase):
if os.path.exists(image_path + '.bak'):
os.rename(image_path + '.bak', image_path)
+ def test_gpt_partition_name(self):
+ """Test --part-name argument to set partition name in GPT table"""
+ config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "test_gpt_partition_name.wks"\n'
+ self.append_config(config)
+ image = 'core-image-minimal'
+ bitbake(image)
+ self.remove_config(config)
+ deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # Image is created
+ self.assertTrue(os.path.exists(image_path), "image file %s doesn't exist" % image_path)
+
+ # Check the names of the three partitions
+ # as listed in test_gpt_partition_name.wks
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 1" % (sysroot, image_path))
+ self.assertEqual('boot-A', result.output)
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 2" % (sysroot, image_path))
+ self.assertEqual('root-A', result.output)
+ # When the --part-name is not defined, the partition name is equal to the --label
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 3" % (sysroot, image_path))
+ self.assertEqual('ext-space', result.output)
+
+ def test_empty_zeroize_plugin(self):
+ img = 'core-image-minimal'
+ expected_size = [ 1024*1024, # 1M
+ 512*1024, # 512K
+ 2*1024*1024] # 2M
+ # Check combination of sourceparams
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(
+ ['part empty --source empty --sourceparams="fill" --ondisk sda --fixed-size 1M\n',
+ 'part empty --source empty --sourceparams="size=512K" --ondisk sda --size 1M --align 1024\n',
+ 'part empty --source empty --sourceparams="size=2048k,bs=512K" --ondisk sda --size 4M --align 1024\n'
+ ])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
+ # Skip the complete image and just look at the single partitions
+ for idx, value in enumerate(wicout[1:]):
+ self.logger.info(wicout[idx])
+ # Check if partitions are actually zeroized
+ with open(wicout[idx], mode="rb") as fd:
+ ba = bytearray(fd.read())
+ for b in ba:
+ self.assertEqual(b, 0)
+ self.assertEqual(expected_size[idx], os.path.getsize(wicout[idx]))
+
+ # Check inconsistancy check between "fill" and "--size" parameter
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part empty --source empty --sourceparams="fill" --ondisk sda --size 1M\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ result = runCmd(cmd, ignore_status=True)
+ self.assertIn("Source parameter 'fill' only works with the '--fixed-size' option, exiting.", result.output)
+ self.assertNotEqual(0, result.status)
+
class ModifyTests(WicTestCase):
def test_wic_ls(self):
"""Test listing image content using 'wic ls'"""
@@ -1383,7 +1561,7 @@ class ModifyTests(WicTestCase):
# check if file is there
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
self.assertEqual(7, len(result.output.split('\n')))
- self.assertTrue(os.path.basename(testfile.name) in result.output)
+ self.assertIn(os.path.basename(testfile.name), result.output)
# prepare directory
testdir = os.path.join(self.resultdir, 'wic-test-cp-dir')
@@ -1397,13 +1575,13 @@ class ModifyTests(WicTestCase):
# check if directory is there
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
self.assertEqual(8, len(result.output.split('\n')))
- self.assertTrue(os.path.basename(testdir) in result.output)
+ self.assertIn(os.path.basename(testdir), result.output)
# copy the file from the partition and check if it success
dest = '%s-cp' % testfile.name
runCmd("wic cp %s:1/%s %s -n %s" % (images[0],
os.path.basename(testfile.name), dest, sysroot))
- self.assertTrue(os.path.exists(dest))
+ self.assertTrue(os.path.exists(dest), msg="File %s wasn't generated as expected" % dest)
def test_wic_rm(self):
@@ -1420,7 +1598,7 @@ class ModifyTests(WicTestCase):
# list directory content of the first partition
result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
- self.assertIn('\n%s ' % kerneltype.upper(), result.output)
+ self.assertIn('\n%s ' % kerneltype.upper(), result.output)
self.assertIn('\nEFI <DIR> ', result.output)
# remove file. EFI partitions are case-insensitive so exercise that too
@@ -1447,7 +1625,7 @@ class ModifyTests(WicTestCase):
# list directory content of the second ext4 partition
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(
- set(line.split()[-1] for line in result.output.split('\n') if line)))
+ set(line.split()[-1] for line in result.output.split('\n') if line)), msg="Expected directories not present %s" % result.output)
def test_wic_cp_ext(self):
"""Test copy files and directories to the ext partition."""
@@ -1462,7 +1640,7 @@ class ModifyTests(WicTestCase):
# list directory content of the ext4 partition
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
dirs = set(line.split()[-1] for line in result.output.split('\n') if line)
- self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs))
+ self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs), msg="Expected directories not present %s" % dirs)
with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
testfile.write("test")
@@ -1477,12 +1655,12 @@ class ModifyTests(WicTestCase):
# check if the file to copy is in the partition
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
# copy file from the partition, replace the temporary file content with it and
# check for the file size to validate the copy
runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot))
- self.assertTrue(os.stat(testfile.name).st_size > 0)
+ self.assertTrue(os.stat(testfile.name).st_size > 0, msg="Filesize not as expected %s" % os.stat(testfile.name).st_size)
def test_wic_rm_ext(self):
@@ -1497,18 +1675,18 @@ class ModifyTests(WicTestCase):
# list directory content of the /etc directory on ext4 partition
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
# remove file
runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
# check if it's removed
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertNotIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
# remove non-empty directory
runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot))
# check if it's removed
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
- self.assertTrue('etc' not in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertNotIn('etc', [line.split()[-1] for line in result.output.split('\n') if line])
diff --git a/meta/lib/oeqa/selftest/cases/wrapper.py b/meta/lib/oeqa/selftest/cases/wrapper.py
new file mode 100644
index 0000000000..f2be44262c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/wrapper.py
@@ -0,0 +1,16 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class WrapperTests(OESelftestTestCase):
+ def test_shebang_wrapper(self):
+ """
+ Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective.
+ Expected: Exit status to be 0.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False)
diff --git a/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
new file mode 100644
index 0000000000..312edb6431
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
@@ -0,0 +1,39 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+import subprocess
+import shutil
+from oeqa.selftest.case import OESelftestTestCase
+from yocto_testresults_query import get_sha1, create_workdir
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+
+
+class TestResultsQueryTests(OESelftestTestCase):
+ def test_get_sha1(self):
+ test_data_get_sha1 = [
+ {"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"},
+ {"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"},
+ ]
+ for data in test_data_get_sha1:
+ test_name = data["input"]
+ with self.subTest(f"Test SHA1 from {test_name}"):
+ self.assertEqual(
+ get_sha1(basepath, data["input"]), data["expected"])
+
+ def test_create_workdir(self):
+ workdir = create_workdir()
+ try:
+ url = subprocess.check_output(
+ ["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8")
+ except:
+ shutil.rmtree(workdir, ignore_errors=True)
+ self.fail(f"Can not execute git commands in {workdir}")
+ shutil.rmtree(workdir)
+ self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults")