summaryrefslogtreecommitdiffstats
path: root/meta/lib/oeqa/selftest/cases
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oeqa/selftest/cases')
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py263
-rw-r--r--meta/lib/oeqa/selftest/cases/baremetal.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py175
-rw-r--r--meta/lib/oeqa/selftest/cases/bblock.py203
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py182
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py256
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py56
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py139
-rw-r--r--meta/lib/oeqa/selftest/cases/c_cpp.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py22
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py242
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py158
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py1621
-rw-r--r--meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt1
-rw-r--r--meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt1
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py109
-rw-r--r--meta/lib/oeqa/selftest/cases/efibootpartition.py33
-rw-r--r--meta/lib/oeqa/selftest/cases/esdk.py (renamed from meta/lib/oeqa/selftest/cases/eSDK.py)37
-rw-r--r--meta/lib/oeqa/selftest/cases/externalsrc.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py110
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py846
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py169
-rw-r--r--meta/lib/oeqa/selftest/cases/gdbserver.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/gitarchivetests.py136
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py99
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py228
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py152
-rw-r--r--meta/lib/oeqa/selftest/cases/intercept.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py74
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py20
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py12
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py32
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py87
-rw-r--r--meta/lib/oeqa/selftest/cases/newlib.py13
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py82
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py9
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py28
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py55
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py189
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py502
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py110
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py35
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py59
-rw-r--r--meta/lib/oeqa/selftest/cases/pseudo.py29
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py969
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py138
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py338
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py375
-rw-r--r--meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py97
-rw-r--r--meta/lib/oeqa/selftest/cases/rpmtests.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py56
-rw-r--r--meta/lib/oeqa/selftest/cases/runqemu.py150
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py387
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py231
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py9
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py125
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py63
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py776
-rw-r--r--meta/lib/oeqa/selftest/cases/sysroot.py86
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py73
-rw-r--r--meta/lib/oeqa/selftest/cases/usergrouptests.py57
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py1521
-rw-r--r--meta/lib/oeqa/selftest/cases/wrapper.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py39
75 files changed, 11064 insertions, 1547 deletions
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
index 0e5896234c..2c9584d329 100644
--- a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
+++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
@@ -1,9 +1,15 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import shutil
import oeqa.utils.ftools as ftools
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer
-from oeqa.selftest.cases.sstate import SStateBase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.selftest.cases.sstatetests import SStateBase
class RebuildFromSState(SStateBase):
@@ -86,7 +92,7 @@ class RebuildFromSState(SStateBase):
self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate)))
def test_sstate_relocation(self):
- self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=True, rebuild_dependencies=True)
+ self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=True, rebuild_dependencies=True)
def test_sstate_rebuild(self):
- self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=False, rebuild_dependencies=True)
+ self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=False, rebuild_dependencies=True)
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
index f61a522017..3cb888c506 100644
--- a/meta/lib/oeqa/selftest/cases/archiver.py
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -1,12 +1,17 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import glob
+import re
from oeqa.utils.commands import bitbake, get_bb_vars
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.core.decorator.oeid import OETestID
class Archiver(OESelftestTestCase):
- @OETestID(1345)
def test_archiver_allows_to_filter_on_recipe_name(self):
"""
Summary: The archiver should offer the possibility to filter on the recipe. (#6929)
@@ -17,8 +22,8 @@ class Archiver(OESelftestTestCase):
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
- include_recipe = 'busybox'
- exclude_recipe = 'zlib'
+ include_recipe = 'selftest-ed'
+ exclude_recipe = 'initscripts'
features = 'INHERIT += "archiver"\n'
features += 'ARCHIVER_MODE[src] = "original"\n'
@@ -33,14 +38,13 @@ class Archiver(OESelftestTestCase):
src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
# Check that include_recipe was included
- included_present = len(glob.glob(src_path + '/%s-*' % include_recipe))
+ included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe))
self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
# Check that exclude_recipe was excluded
- excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe))
+ excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
- @OETestID(1900)
def test_archiver_filters_by_type(self):
"""
Summary: The archiver is documented to filter on the recipe type.
@@ -50,8 +54,8 @@ class Archiver(OESelftestTestCase):
Author: André Draszik <adraszik@tycoint.com>
"""
- target_recipe = 'initscripts'
- native_recipe = 'zlib-native'
+ target_recipe = 'selftest-ed'
+ native_recipe = 'selftest-ed-native'
features = 'INHERIT += "archiver"\n'
features += 'ARCHIVER_MODE[src] = "original"\n'
@@ -66,14 +70,13 @@ class Archiver(OESelftestTestCase):
src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
# Check that target_recipe was included
- included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe))
+ included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe))
self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe)
# Check that native_recipe was excluded
- excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe))
+ excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe)
- @OETestID(1901)
def test_archiver_filters_by_type_and_name(self):
"""
Summary: Test that the archiver archives by recipe type, taking the
@@ -86,8 +89,8 @@ class Archiver(OESelftestTestCase):
Author: André Draszik <adraszik@tycoint.com>
"""
- target_recipes = [ 'initscripts', 'zlib' ]
- native_recipes = [ 'update-rc.d-native', 'zlib-native' ]
+ target_recipes = [ 'initscripts', 'selftest-ed' ]
+ native_recipes = [ 'update-rc.d-native', 'selftest-ed-native' ]
features = 'INHERIT += "archiver"\n'
features += 'ARCHIVER_MODE[src] = "original"\n'
@@ -104,15 +107,239 @@ class Archiver(OESelftestTestCase):
src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
# Check that target_recipe[0] and native_recipes[1] were included
- included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0]))
+ included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0]))
self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0])
- included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1]))
+ included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1]))
self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1])
# Check that native_recipes[0] and target_recipes[1] were excluded
- excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0]))
+ excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0]))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0])
- excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1]))
+ excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
+
+ def test_archiver_multiconfig_shared_unpack_and_patch(self):
+ """
+ Test that shared recipes in original mode with diff enabled works in multiconfig,
+ otherwise it will not build when using the same TMP dir.
+ """
+
+ features = 'BBMULTICONFIG = "mc1 mc2"\n'
+ features += 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "original"\n'
+ features += 'ARCHIVER_MODE[diff] = "1"\n'
+ self.write_config(features)
+
+ # We can use any machine in multiconfig as long as they are different
+ self.write_config('MACHINE = "qemuarm"\n', 'mc1')
+ self.write_config('MACHINE = "qemux86"\n', 'mc2')
+
+ task = 'do_unpack_and_patch'
+ # Use gcc-source as it is a shared recipe (appends the pv to the pn)
+ pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
+
+ # Generate the tasks signatures
+ bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task))
+
+ # Check the tasks signatures
+ # To be machine agnostic the tasks needs to generate the same signature for each machine
+ locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir
+ locked_sigs = open(locked_sigs_inc).read()
+ task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs)
+ uniq_sigs = set(task_sigs)
+ self.assertFalse(len(uniq_sigs) - 1, \
+ 'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \
+ % (task, pn, locked_sigs_inc))
+
+ def test_archiver_srpm_mode(self):
+ """
+ Test that in srpm mode, the added recipe dependencies at least exist/work [YOCTO #11121]
+ """
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[srpm] = "1"\n'
+ features += 'PACKAGE_CLASSES = "package_rpm"\n'
+ self.write_config(features)
+
+ bitbake('-n selftest-nopackages selftest-ed')
+
+ def _test_archiver_mode(self, mode, target_file_name, extra_config=None):
+ target = 'selftest-ed-native'
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "%s"\n' % (mode)
+ if extra_config:
+ features += extra_config
+ self.write_config(features)
+
+ bitbake('-c clean %s' % (target))
+ bitbake('-c deploy_archives %s' % (target))
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'BUILD_SYS'])
+ glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'], '%s-*' % (target))
+ glob_result = glob.glob(glob_str)
+ self.assertTrue(glob_result, 'Missing archiver directory for %s' % (target))
+
+ archive_path = os.path.join(glob_result[0], target_file_name)
+ self.assertTrue(os.path.exists(archive_path), 'Missing archive file %s' % (target_file_name))
+
+ def test_archiver_mode_original(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[src] = "original"`.
+ """
+
+ self._test_archiver_mode('original', 'ed-1.14.1.tar.lz')
+
+ def test_archiver_mode_patched(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.xz')
+
+ def test_archiver_mode_configured(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`.
+ """
+
+ self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.xz')
+
+ def test_archiver_mode_recipe(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.xz',
+ 'ARCHIVER_MODE[recipe] = "1"\n')
+
+ def test_archiver_mode_diff(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[diff] = "1"`.
+ Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-diff.gz',
+ 'ARCHIVER_MODE[diff] = "1"\n')
+
+ def test_archiver_mode_dumpdata(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-showdata.dump',
+ 'ARCHIVER_MODE[dumpdata] = "1"\n')
+
+ def test_archiver_mode_mirror(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`.
+ """
+
+ self._test_archiver_mode('mirror', 'ed-1.14.1.tar.lz',
+ 'BB_GENERATE_MIRROR_TARBALLS = "1"\n')
+
+ def test_archiver_mode_mirror_excludes(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"` and
+ correctly excludes an archive when its URL matches
+ `ARCHIVER_MIRROR_EXCLUDE`.
+ """
+
+ target='selftest-ed'
+ target_file_name = 'ed-1.14.1.tar.lz'
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "mirror"\n'
+ features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
+ features += 'ARCHIVER_MIRROR_EXCLUDE = "${GNU_MIRROR}"\n'
+ self.write_config(features)
+
+ bitbake('-c clean %s' % (target))
+ bitbake('-c deploy_archives %s' % (target))
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS'])
+ glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'], '%s-*' % (target))
+ glob_result = glob.glob(glob_str)
+ self.assertTrue(glob_result, 'Missing archiver directory for %s' % (target))
+
+ archive_path = os.path.join(glob_result[0], target_file_name)
+ self.assertFalse(os.path.exists(archive_path), 'Failed to exclude archive file %s' % (target_file_name))
+
+ def test_archiver_mode_mirror_combined(self):
+ """
+ Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`
+ and `ARCHIVER_MODE[mirror] = "combined"`. Archives for multiple recipes
+ should all end up in the 'mirror' directory.
+ """
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "mirror"\n'
+ features += 'ARCHIVER_MODE[mirror] = "combined"\n'
+ features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
+ features += 'COPYLEFT_LICENSE_INCLUDE = "*"\n'
+ self.write_config(features)
+
+ for target in ['selftest-ed', 'selftest-hardlink']:
+ bitbake('-c clean %s' % (target))
+ bitbake('-c deploy_archives %s' % (target))
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
+ for target_file_name in ['ed-1.14.1.tar.lz', 'hello.c']:
+ glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
+ glob_result = glob.glob(glob_str)
+ self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name))
+
+ def test_archiver_mode_mirror_gitsm(self):
+ """
+ Test that the archiver correctly handles git submodules with
+ `ARCHIVER_MODE[src] = "mirror"`.
+ """
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "mirror"\n'
+ features += 'ARCHIVER_MODE[mirror] = "combined"\n'
+ features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
+ features += 'COPYLEFT_LICENSE_INCLUDE = "*"\n'
+ self.write_config(features)
+
+ bitbake('-c clean git-submodule-test')
+ bitbake('-c deploy_archives -f git-submodule-test')
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
+ for target_file_name in [
+ 'git2_git.yoctoproject.org.git-submodule-test.tar.gz',
+ 'git2_git.yoctoproject.org.bitbake-gitsm-test1.tar.gz',
+ 'git2_git.yoctoproject.org.bitbake-gitsm-test2.tar.gz',
+ 'git2_git.openembedded.org.bitbake.tar.gz'
+ ]:
+ target_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
+ self.assertTrue(os.path.exists(target_path))
+
+ def test_archiver_mode_mirror_gitsm_shallow(self):
+ """
+ Test that the archiver correctly handles git submodules with
+ `ARCHIVER_MODE[src] = "mirror"`.
+ """
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "mirror"\n'
+ features += 'ARCHIVER_MODE[mirror] = "combined"\n'
+ features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
+ features += 'COPYLEFT_LICENSE_INCLUDE = "*"\n'
+ features += 'BB_GIT_SHALLOW = "1"\n'
+ features += 'BB_GENERATE_SHALLOW_TARBALLS = "1"\n'
+ features += 'DL_DIR = "${TOPDIR}/downloads-shallow"\n'
+ self.write_config(features)
+
+ bitbake('-c clean git-submodule-test')
+ bitbake('-c deploy_archives -f git-submodule-test')
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
+ for target_file_name in [
+ 'gitsmshallow_git.yoctoproject.org.git-submodule-test_a2885dd-1_master.tar.gz',
+ 'gitsmshallow_git.yoctoproject.org.bitbake-gitsm-test1_bare_120f4c7-1.tar.gz',
+ 'gitsmshallow_git.yoctoproject.org.bitbake-gitsm-test2_bare_f66699e-1.tar.gz',
+ 'gitsmshallow_git.openembedded.org.bitbake_bare_52a144a-1.tar.gz',
+ 'gitsmshallow_git.openembedded.org.bitbake_bare_c39b997-1.tar.gz'
+ ]:
+ target_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
+ self.assertTrue(os.path.exists(target_path))
diff --git a/meta/lib/oeqa/selftest/cases/baremetal.py b/meta/lib/oeqa/selftest/cases/baremetal.py
new file mode 100644
index 0000000000..cadaea2f1a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/baremetal.py
@@ -0,0 +1,14 @@
+
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BaremetalTest(OESelftestTestCase):
+ def test_baremetal(self):
+ self.write_config('TCLIBC = "baremetal"')
+ bitbake('baremetal-helloworld')
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py
index 90a2249b08..695d17377d 100644
--- a/meta/lib/oeqa/selftest/cases/bblayers.py
+++ b/meta/lib/oeqa/selftest/cases/bblayers.py
@@ -1,37 +1,48 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import re
import oeqa.utils.ftools as ftools
-from oeqa.utils.commands import runCmd, get_bb_var
+from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.core.decorator.oeid import OETestID
class BitbakeLayers(OESelftestTestCase):
- @OETestID(756)
+ @classmethod
+ def setUpClass(cls):
+ super(BitbakeLayers, cls).setUpClass()
+ bitbake("python3-jsonschema-native")
+ bitbake("-c addto_recipe_sysroot python3-jsonschema-native")
+
+ def test_bitbakelayers_layerindexshowdepends(self):
+ result = runCmd('bitbake-layers layerindex-show-depends meta-poky')
+ find_in_contents = re.search("openembedded-core", result.output)
+ self.assertTrue(find_in_contents, msg = "openembedded-core should have been listed at this step. bitbake-layers layerindex-show-depends meta-poky output: %s" % result.output)
+
def test_bitbakelayers_showcrossdepends(self):
result = runCmd('bitbake-layers show-cross-depends')
- self.assertTrue('aspell' in result.output, msg = "No dependencies were shown. bitbake-layers show-cross-depends output: %s" % result.output)
+ self.assertIn('aspell', result.output)
- @OETestID(83)
def test_bitbakelayers_showlayers(self):
result = runCmd('bitbake-layers show-layers')
- self.assertTrue('meta-selftest' in result.output, msg = "No layers were shown. bitbake-layers show-layers output: %s" % result.output)
+ self.assertIn('meta-selftest', result.output)
- @OETestID(93)
def test_bitbakelayers_showappends(self):
recipe = "xcursor-transparent-theme"
bb_file = self.get_recipe_basename(recipe)
result = runCmd('bitbake-layers show-appends')
- self.assertTrue(bb_file in result.output, msg="%s file was not recognised. bitbake-layers show-appends output: %s" % (bb_file, result.output))
+ self.assertIn(bb_file, result.output)
- @OETestID(90)
def test_bitbakelayers_showoverlayed(self):
result = runCmd('bitbake-layers show-overlayed')
- self.assertTrue('aspell' in result.output, msg="aspell overlayed recipe was not recognised bitbake-layers show-overlayed %s" % result.output)
+ self.assertIn('aspell', result.output)
- @OETestID(95)
def test_bitbakelayers_flatten(self):
recipe = "xcursor-transparent-theme"
recipe_path = "recipes-graphics/xcursor-transparent-theme"
@@ -43,10 +54,9 @@ class BitbakeLayers(OESelftestTestCase):
bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
contents = ftools.read_file(bb_file)
- find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
+ find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
- @OETestID(1195)
def test_bitbakelayers_add_remove(self):
test_layer = os.path.join(get_bb_var('COREBASE'), 'meta-skeleton')
result = runCmd('bitbake-layers show-layers')
@@ -64,7 +74,6 @@ class BitbakeLayers(OESelftestTestCase):
result = runCmd('bitbake-layers show-layers')
self.assertNotIn('meta-skeleton', result.output, msg = "meta-skeleton should have been removed at this step. bitbake-layers show-layers output: %s" % result.output)
- @OETestID(1384)
def test_bitbakelayers_showrecipes(self):
result = runCmd('bitbake-layers show-recipes')
self.assertIn('aspell:', result.output)
@@ -76,8 +85,9 @@ class BitbakeLayers(OESelftestTestCase):
result = runCmd('bitbake-layers show-recipes -i image')
self.assertIn('core-image-minimal', result.output)
self.assertNotIn('mtd-utils:', result.output)
- result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
+ result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig')
self.assertIn('libproxy:', result.output)
+ result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
self.assertNotIn('wget:', result.output) # doesn't inherit cmake
self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
@@ -85,6 +95,36 @@ class BitbakeLayers(OESelftestTestCase):
self.assertNotEqual(result.status, 0, 'bitbake-layers show-recipes -i nonexistentclass should have failed')
self.assertIn('ERROR:', result.output)
+ def test_bitbakelayers_createlayer(self):
+ priority = 10
+ layername = 'test-bitbakelayer-layercreate'
+ layerpath = os.path.join(self.builddir, layername)
+ self.assertFalse(os.path.exists(layerpath), '%s should not exist at this point in time' % layerpath)
+ result = runCmd('bitbake-layers create-layer --priority=%d %s' % (priority, layerpath))
+ self.track_for_cleanup(layerpath)
+ result = runCmd('bitbake-layers add-layer %s' % layerpath)
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % layerpath)
+ result = runCmd('bitbake-layers show-layers')
+ find_in_contents = re.search(re.escape(layername) + r'\s+' + re.escape(layerpath) + r'\s+' + re.escape(str(priority)), result.output)
+ self.assertTrue(find_in_contents, "%s not found in layers\n%s" % (layername, result.output))
+
+ layervars = ['BBFILE_PRIORITY', 'BBFILE_PATTERN', 'LAYERDEPENDS', 'LAYERSERIES_COMPAT']
+ bb_vars = get_bb_vars(['BBFILE_COLLECTIONS'] + ['%s_%s' % (v, layername) for v in layervars])
+
+ for v in layervars:
+ varname = '%s_%s' % (v, layername)
+ self.assertIsNotNone(bb_vars[varname], "%s not found" % varname)
+
+ find_in_contents = re.search(r'(^|\s)' + re.escape(layername) + r'($|\s)', bb_vars['BBFILE_COLLECTIONS'])
+ self.assertTrue(find_in_contents, "%s not in BBFILE_COLLECTIONS" % layername)
+
+ self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
+
+ result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1"))
+ for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'):
+ fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f)
+ self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath))
+
def get_recipe_basename(self, recipe):
recipe_file = ""
result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
@@ -95,3 +135,108 @@ class BitbakeLayers(OESelftestTestCase):
self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
return os.path.basename(recipe_file)
+
+ def validate_layersjson(self, json):
+ python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3')
+ jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema')
+ jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json')
+ result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema))
+
+ def test_validate_examplelayersjson(self):
+ json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json")
+ self.validate_layersjson(json)
+
+ def test_bitbakelayers_setup(self):
+ result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
+ jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
+ self.validate_layersjson(jsonfile)
+
+ # The revision-under-test may not necessarily be available on the remote server,
+ # so replace it with a revision that has a yocto-4.1 tag.
+ import json
+ with open(jsonfile) as f:
+ data = json.load(f)
+ for s in data['sources']:
+ data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0'
+ with open(jsonfile, 'w') as f:
+ json.dump(data, f)
+
+ testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout')
+ result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir))
+ layers_json = os.path.join(testcheckoutdir, ".oe-layers.json")
+ self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json))
+
+ # As setup-layers checkout out an old revision of poky, there is no setup-build symlink,
+ # and we need to run oe-setup-build directly from the current poky tree under test
+ oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build')
+ oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build')
+ os.symlink(oe_setup_build,oe_setup_build_l)
+
+ cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json)
+ result = runCmd(cmd)
+ cond = "conf/templates/default" in result.output
+ self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output))
+
+ # rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user
+ conf = None
+ if 'poky-default' in result.output:
+ conf = 'poky-default'
+ elif 'meta-default' in result.output:
+ conf = 'meta-default'
+ self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output))
+
+ cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf)
+ result = runCmd(cmd)
+
+ def test_bitbakelayers_updatelayer(self):
+ result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
+ jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
+ self.validate_layersjson(jsonfile)
+
+ import json
+ with open(jsonfile) as f:
+ data = json.load(f)
+ repos = []
+ for s in data['sources']:
+ repos.append(s)
+
+ self.assertTrue(len(repos) > 1, "Not enough repositories available")
+ self.validate_layersjson(jsonfile)
+
+ test_ref_1 = 'ref_1'
+ test_ref_2 = 'ref_2'
+
+ # Create a new layers setup using custom references
+ result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}'
+ .format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path))
+ self.validate_layersjson(jsonfile)
+
+ with open(jsonfile) as f:
+ data = json.load(f)
+ first_rev_1 = data['sources'][repos[0]]['git-remote']['rev']
+ first_desc_1 = data['sources'][repos[0]]['git-remote']['describe']
+ second_rev_1 = data['sources'][repos[1]]['git-remote']['rev']
+ second_desc_1 = data['sources'][repos[1]]['git-remote']['describe']
+
+ self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1))
+ self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1))
+ self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1))
+ self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1))
+
+ # Update one of the repositories in the layers setup using a different custom reference
+ # This should only update the selected repository, everything else should remain as is
+ result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}'
+ .format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path))
+ self.validate_layersjson(jsonfile)
+
+ with open(jsonfile) as f:
+ data = json.load(f)
+ first_rev_2 = data['sources'][repos[0]]['git-remote']['rev']
+ first_desc_2 = data['sources'][repos[0]]['git-remote']['describe']
+ second_rev_2 = data['sources'][repos[1]]['git-remote']['rev']
+ second_desc_2 = data['sources'][repos[1]]['git-remote']['describe']
+
+ self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2))
+ self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2))
+ self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2))
+ self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2))
diff --git a/meta/lib/oeqa/selftest/cases/bblock.py b/meta/lib/oeqa/selftest/cases/bblock.py
new file mode 100644
index 0000000000..2b62d2a0aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblock.py
@@ -0,0 +1,203 @@
+#
+# Copyright (c) 2023 BayLibre, SAS
+# Author: Julien Stepahn <jstephan@baylibre.com>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import re
+import bb.tinfoil
+
+import oeqa.utils.ftools as ftools
+from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
+
+from oeqa.selftest.case import OESelftestTestCase
+
+
+class BBLock(OESelftestTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super(BBLock, cls).setUpClass()
+ cls.lockfile = cls.builddir + "/conf/bblock.conf"
+
+ def unlock_recipes(self, recipes=None, tasks=None):
+ cmd = "bblock -r "
+ if recipes:
+ cmd += " ".join(recipes)
+ if tasks:
+ cmd += " -t " + ",".join(tasks)
+ result = runCmd(cmd)
+
+ if recipes:
+ # ensure all signatures are removed from lockfile
+ contents = ftools.read_file(self.lockfile)
+ for recipe in recipes:
+ for task in tasks:
+ find_in_contents = re.search(
+ 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
+ contents,
+ )
+ self.assertFalse(
+ find_in_contents,
+ msg="%s:%s should not be present into bblock.conf anymore"
+ % (recipe, task),
+ )
+ self.assertExists(self.lockfile)
+ else:
+ self.assertNotExists(self.lockfile)
+
+ def lock_recipes(self, recipes, tasks=None):
+ cmd = "bblock " + " ".join(recipes)
+ if tasks:
+ cmd += " -t " + ",".join(tasks)
+
+ result = runCmd(cmd)
+
+ self.assertExists(self.lockfile)
+
+ # ensure all signatures are added to lockfile
+ contents = ftools.read_file(self.lockfile)
+ for recipe in recipes:
+ if tasks:
+ for task in tasks:
+ find_in_contents = re.search(
+ 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
+ contents,
+ )
+ self.assertTrue(
+ find_in_contents,
+ msg="%s:%s was not added into bblock.conf. bblock output: %s"
+ % (recipe, task, result.output),
+ )
+
+ def modify_tasks(self, recipes, tasks):
+ task_append = ""
+ for recipe in recipes:
+ bb_vars = get_bb_vars(["PV"], recipe)
+ recipe_pv = bb_vars["PV"]
+ recipe_append_file = recipe + "_" + recipe_pv + ".bbappend"
+
+ os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe))
+ recipe_append_path = os.path.join(
+ self.testlayer_path, "recipes-test", recipe, recipe_append_file
+ )
+
+ for task in tasks:
+ task_append += "%s:append() {\n#modify task hash \n}\n" % task
+ ftools.write_file(recipe_append_path, task_append)
+ self.add_command_to_tearDown(
+ "rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe)
+ )
+
+ def test_lock_single_recipe_single_task(self):
+ recipes = ["quilt"]
+ tasks = ["do_compile"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_single_recipe_multiple_tasks(self):
+ recipes = ["quilt"]
+ tasks = ["do_compile", "do_install"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_single_recipe_all_tasks(self):
+ recipes = ["quilt"]
+ self._run_test(recipes, None)
+
+ def test_lock_multiple_recipe_single_task(self):
+ recipes = ["quilt", "bc"]
+ tasks = ["do_compile"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_architecture_specific(self):
+ # unlock all recipes and ensure no bblock.conf file exist
+ self.unlock_recipes()
+
+ recipes = ["quilt"]
+ tasks = ["do_compile"]
+
+ # lock quilt's do_compile task for another machine
+ if self.td["MACHINE"] == "qemux86-64":
+ machine = "qemuarm"
+ else:
+ machine = "qemux86-64"
+
+ self.write_config('MACHINE = "%s"\n' % machine)
+
+ self.lock_recipes(recipes, tasks)
+
+ self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"])
+ # modify quilt's do_compile task
+ self.modify_tasks(recipes, tasks)
+
+ # build quilt using the default machine
+ # No Note/Warning should be emitted since sig is locked for another machine
+ # (quilt package is architecture dependant)
+ info_message = "NOTE: The following recipes have locked tasks: " + recipes[0]
+ warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0])
+ result = bitbake(recipes[0] + " -n")
+ self.assertNotIn(info_message, result.output)
+ self.assertNotIn(warn_message, result.output)
+
+ # unlock all recipes
+ self.unlock_recipes()
+
+ def _run_test(self, recipes, tasks=None):
+ # unlock all recipes and ensure no bblock.conf file exist
+ self.unlock_recipes()
+
+ self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"')
+
+ # lock tasks for recipes
+ result = self.lock_recipes(recipes, tasks)
+
+ if not tasks:
+ tasks = []
+ result = bitbake("-c listtasks " + recipes[0])
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ d = tinfoil.parse_recipe(recipes[0])
+
+ for line in result.output.splitlines():
+ if line.startswith("do_"):
+ task = line.split()[0]
+ if "setscene" in task:
+ continue
+ if d.getVarFlag(task, "nostamp"):
+ continue
+ tasks.append(task)
+
+ # build recipes. At this stage we should have a Note about recipes
+ # having locked task's sig, but no warning since sig still match
+ info_message = "NOTE: The following recipes have locked tasks: " + " ".join(
+ recipes
+ )
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertNotIn(warn_message, result.output)
+
+ # modify all tasks that are locked to trigger a sig change then build the recipes
+ # at this stage we should have a Note as before, but also a Warning for all
+ # locked tasks indicating the sig mismatch
+ self.modify_tasks(recipes, tasks)
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertIn(warn_message, result.output)
+
+ # unlock all tasks and rebuild, no more Note/Warning should remain
+ self.unlock_recipes(recipes, tasks)
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertNotIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertNotIn(warn_message, result.output)
+
+ # unlock all recipes
+ self.unlock_recipes()
diff --git a/meta/lib/oeqa/selftest/cases/bblogging.py b/meta/lib/oeqa/selftest/cases/bblogging.py
new file mode 100644
index 0000000000..040c6db089
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblogging.py
@@ -0,0 +1,182 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BitBakeLogging(OESelftestTestCase):
+
+ def assertCount(self, item, entry, count):
+ self.assertEqual(item.count(entry), count, msg="Output:\n'''\n%s\n'''\ndoesn't contain %d copies of:\n'''\n%s\n'''\n" % (item, count, entry))
+
+ def test_shell_loggingA(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c shelltest -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is shell stdout", result.output)
+ self.assertNotIn("This is shell stderr", result.output)
+
+ def test_shell_loggingB(self):
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c shelltest -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is shell stdout", 1)
+ self.assertCount(result.output, "This is shell stderr", 1)
+
+ def test_shell_loggingC(self):
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # two copies due to set +x
+ self.assertCount(result.output, "This is shell stdout", 2)
+ self.assertCount(result.output, "This is shell stderr", 2)
+
+ def test_shell_loggingD(self):
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # two copies due to set +x
+ self.assertCount(result.output, "This is shell stdout", 2)
+ self.assertCount(result.output, "This is shell stderr", 2)
+
+ def test_python_exec_func_shell_loggingA(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exec_func_shell -f",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is shell stdout", result.output)
+ self.assertNotIn("This is shell stderr", result.output)
+
+ def test_python_exec_func_shell_loggingB(self):
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exec_func_shell -f",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is shell stdout", 1)
+ self.assertCount(result.output, "This is shell stderr", 1)
+
+ def test_python_exec_func_shell_loggingC(self):
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # two copies due to set +x
+ self.assertCount(result.output, "This is shell stdout", 2)
+ self.assertCount(result.output, "This is shell stderr", 2)
+
+ def test_python_exec_func_shell_loggingD(self):
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # two copies due to set +x
+ self.assertCount(result.output, "This is shell stdout", 2)
+ self.assertCount(result.output, "This is shell stderr", 2)
+
+ def test_python_exit_loggingA(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is python stdout", result.output)
+
+ def test_python_exit_loggingB(self):
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # A sys.exit() should include the output
+ self.assertCount(result.output, "This is python stdout", 1)
+
+ def test_python_exit_loggingC(self):
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is python stdout", 1)
+
+ def test_python_exit_loggingD(self):
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is python stdout", 1)
+
+ def test_python_exec_func_python_loggingA(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exec_func_python -f",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is python stdout", result.output)
+
+ def test_python_exec_func_python_loggingB(self):
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exec_func_python -f",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # A sys.exit() should include the output
+ self.assertCount(result.output, "This is python stdout", 1)
+
+ def test_python_exec_func_python_loggingC(self):
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is python stdout", 1)
+
+ def test_python_exec_func_python_loggingD(self):
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
+ ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is python stdout", 1)
+
+ def test_python_fatal_loggingA(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is python fatal test stdout", result.output)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
+ def test_python_fatal_loggingB(self):
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # A bb.fatal() should not include the output
+ self.assertNotIn("This is python fatal test stdout", result.output)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
+ def test_python_fatal_loggingC(self):
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
+ def test_python_fatal_loggingD(self):
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
index 4c82049032..98e9f81661 100644
--- a/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import re
@@ -5,7 +11,6 @@ import oeqa.utils.ftools as ftools
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.core.decorator.oeid import OETestID
class BitbakeTests(OESelftestTestCase):
@@ -14,75 +19,81 @@ class BitbakeTests(OESelftestTestCase):
if line in l:
return l
- @OETestID(789)
+ # Test bitbake can run from the <builddir>/conf directory
def test_run_bitbake_from_dir_1(self):
os.chdir(os.path.join(self.builddir, 'conf'))
self.assertEqual(bitbake('-e').status, 0, msg = "bitbake couldn't run from \"conf\" dir")
- @OETestID(790)
+ # Test bitbake can run from the <builddir>'s parent directory
def test_run_bitbake_from_dir_2(self):
my_env = os.environ.copy()
my_env['BBPATH'] = my_env['BUILDDIR']
os.chdir(os.path.dirname(os.environ['BUILDDIR']))
- self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from builddir")
+ self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from builddir's parent directory")
+
+ # Test bitbake can run from some other random system location (we use /tmp/)
+ def test_run_bitbake_from_dir_3(self):
+ my_env = os.environ.copy()
+ my_env['BBPATH'] = my_env['BUILDDIR']
+ os.chdir("/tmp/")
+ self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from /tmp/")
+
- @OETestID(806)
def test_event_handler(self):
self.write_config("INHERIT += \"test_events\"")
- result = bitbake('m4-native')
- find_build_started = re.search("NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing RunQueue Tasks", result.output)
- find_build_completed = re.search("Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
+ result = bitbake('selftest-hello-native')
+ find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
+ find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
self.assertTrue(find_build_completed, msg = "Match failed in:\n%s" % result.output)
- self.assertFalse('Test for bb.event.InvalidEvent' in result.output, msg = "\"Test for bb.event.InvalidEvent\" message found during bitbake process. bitbake output: %s" % result.output)
+ self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
- @OETestID(103)
def test_local_sstate(self):
- bitbake('m4-native')
- bitbake('m4-native -cclean')
- result = bitbake('m4-native')
- find_setscene = re.search("m4-native.*do_.*_setscene", result.output)
- self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output )
+ bitbake('selftest-hello-native')
+ bitbake('selftest-hello-native -cclean')
+ result = bitbake('selftest-hello-native')
+ find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output)
+ self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output )
- @OETestID(105)
def test_bitbake_invalid_recipe(self):
result = bitbake('-b asdf', ignore_status=True)
self.assertTrue("ERROR: Unable to find any recipe file matching 'asdf'" in result.output, msg = "Though asdf recipe doesn't exist, bitbake didn't output any err. message. bitbake output: %s" % result.output)
- @OETestID(107)
def test_bitbake_invalid_target(self):
result = bitbake('asdf', ignore_status=True)
- self.assertTrue("ERROR: Nothing PROVIDES 'asdf'" in result.output, msg = "Though no 'asdf' target exists, bitbake didn't output any err. message. bitbake output: %s" % result.output)
+ self.assertIn("ERROR: Nothing PROVIDES 'asdf'", result.output)
- @OETestID(106)
def test_warnings_errors(self):
result = bitbake('-b asdf', ignore_status=True)
- find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output)
- find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output)
+ find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages*", result.output)
+ find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages*", result.output)
self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
- @OETestID(108)
def test_invalid_patch(self):
- # This patch already exists in SRC_URI so adding it again will cause the
- # patch to fail.
- self.write_recipeinc('man', 'SRC_URI += "file://man-1.5h1-make.patch"')
- self.write_config("INHERIT_remove = \"report-error\"")
- result = bitbake('man -c patch', ignore_status=True)
- self.delete_recipeinc('man')
- bitbake('-cclean man')
- line = self.getline(result, "Function failed: patch_do_patch")
- self.assertTrue(line and line.startswith("ERROR:"), msg = "Repeated patch application didn't fail. bitbake output: %s" % result.output)
-
- @OETestID(1354)
+ # This patch should fail to apply.
+ self.write_recipeinc('man-db', 'FILESEXTRAPATHS:prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"')
+ self.write_config("INHERIT:remove = \"report-error\"")
+ result = bitbake('man-db -c patch', ignore_status=True)
+ self.delete_recipeinc('man-db')
+ bitbake('-cclean man-db')
+ found = False
+ for l in result.output.split('\n'):
+ if l.startswith("ERROR:") and "failed" in l and "do_patch" in l:
+ found = l
+ self.assertTrue(found and found.startswith("ERROR:"), msg = "Incorrectly formed patch application didn't fail. bitbake output: %s" % result.output)
+
def test_force_task_1(self):
# test 1 from bug 5875
+ import uuid
test_recipe = 'zlib'
- test_data = "Microsoft Made No Profit From Anyone's Zunes Yo"
+ # Need to use uuid otherwise hash equivlance would change the workflow
+ test_data = "Microsoft Made No Profit From Anyone's Zunes Yo %s" % uuid.uuid1()
bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
image_dir = bb_vars['D']
pkgsplit_dir = bb_vars['PKGDEST']
man_dir = bb_vars['mandir']
+ self.write_config("PACKAGE_CLASSES = \"package_rpm\"")
bitbake('-c clean %s' % test_recipe)
bitbake('-c package -f %s' % test_recipe)
@@ -99,7 +110,6 @@ class BitbakeTests(OESelftestTestCase):
ret = bitbake(test_recipe)
self.assertIn('task do_package_write_rpm:', ret.output, 'Task do_package_write_rpm did not re-executed.')
- @OETestID(163)
def test_force_task_2(self):
# test 2 from bug 5875
test_recipe = 'zlib'
@@ -112,15 +122,14 @@ class BitbakeTests(OESelftestTestCase):
for task in look_for_tasks:
self.assertIn(task, result.output, msg="Couldn't find %s task.")
- @OETestID(167)
def test_bitbake_g(self):
- result = bitbake('-g core-image-minimal')
- for f in ['pn-buildlist', 'recipe-depends.dot', 'task-depends.dot']:
+ recipe = 'base-files'
+ result = bitbake('-g %s' % recipe)
+ for f in ['pn-buildlist', 'task-depends.dot']:
self.addCleanup(os.remove, f)
self.assertTrue('Task dependencies saved to \'task-depends.dot\'' in result.output, msg = "No task dependency \"task-depends.dot\" file was generated for the given task target. bitbake output: %s" % result.output)
- self.assertTrue('busybox' in ftools.read_file(os.path.join(self.builddir, 'task-depends.dot')), msg = "No \"busybox\" dependency found in task-depends.dot file.")
+ self.assertIn(recipe, ftools.read_file(os.path.join(self.builddir, 'task-depends.dot')))
- @OETestID(899)
def test_image_manifest(self):
bitbake('core-image-minimal')
bb_vars = get_bb_vars(["DEPLOY_DIR_IMAGE", "IMAGE_LINK_NAME"], "core-image-minimal")
@@ -129,28 +138,20 @@ class BitbakeTests(OESelftestTestCase):
manifest = os.path.join(deploydir, imagename + ".manifest")
self.assertTrue(os.path.islink(manifest), msg="No manifest file created for image. It should have been created in %s" % manifest)
- @OETestID(168)
def test_invalid_recipe_src_uri(self):
data = 'SRC_URI = "file://invalid"'
- self.write_recipeinc('man', data)
+ self.write_recipeinc('man-db', data)
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
-INHERIT_remove = \"report-error\"
+INHERIT:remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
- bitbake('-ccleanall man')
- result = bitbake('-c fetch man', ignore_status=True)
- bitbake('-ccleanall man')
- self.delete_recipeinc('man')
+ result = bitbake('-c fetch man-db', ignore_status=True)
+ self.delete_recipeinc('man-db')
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
- self.assertTrue('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:' in result.output, msg = "\"invalid\" file \
-doesn't exist, yet no error message encountered. bitbake output: %s" % result.output)
- line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
- self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
-doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
+ self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output)
- @OETestID(171)
def test_rename_downloaded_file(self):
# TODO unique dldir instead of using cleanall
# TODO: need to set sstatedir?
@@ -159,7 +160,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
- data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
+ data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
self.write_recipeinc('aspell', data)
result = bitbake('-f -c fetch aspell', ignore_status=True)
self.delete_recipeinc('aspell')
@@ -168,81 +169,84 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % dl_dir)
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % dl_dir)
- @OETestID(1028)
def test_environment(self):
self.write_config("TEST_ENV=\"localconf\"")
result = runCmd('bitbake -e | grep TEST_ENV=')
- self.assertTrue('localconf' in result.output, msg = "bitbake didn't report any value for TEST_ENV variable. To test, run 'bitbake -e | grep TEST_ENV='")
+ self.assertIn('localconf', result.output)
- @OETestID(1029)
def test_dry_run(self):
- result = runCmd('bitbake -n m4-native')
+ result = runCmd('bitbake -n selftest-hello-native')
self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
- @OETestID(1030)
def test_just_parse(self):
result = runCmd('bitbake -p')
self.assertEqual(0, result.status, "errors encountered when parsing recipes. %s" % result.output)
- @OETestID(1031)
def test_version(self):
result = runCmd('bitbake -s | grep wget')
- find = re.search("wget *:([0-9a-zA-Z\.\-]+)", result.output)
+ find = re.search(r"wget *:([0-9a-zA-Z\.\-]+)", result.output)
self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
- @OETestID(1032)
def test_prefile(self):
+ # Test when the prefile does not exist
+ result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
+ # Test when the prefile exists
preconf = os.path.join(self.builddir, 'conf/prefile.conf')
self.track_for_cleanup(preconf)
ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
- self.assertTrue('prefile' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration. ")
+ self.assertIn('prefile', result.output)
self.write_config("TEST_PREFILE=\"localconf\"")
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
- self.assertTrue('localconf' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration.")
+ self.assertIn('localconf', result.output)
- @OETestID(1033)
def test_postfile(self):
+ # Test when the postfile does not exist
+ result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
+ # Test when the postfile exists
postconf = os.path.join(self.builddir, 'conf/postfile.conf')
self.track_for_cleanup(postconf)
ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
self.write_config("TEST_POSTFILE=\"localconf\"")
result = runCmd('bitbake -R conf/postfile.conf -e | grep TEST_POSTFILE=')
- self.assertTrue('postfile' in result.output, "Postconfigure file \"postfile.conf\"was not taken into consideration.")
+ self.assertIn('postfile', result.output)
- @OETestID(1034)
def test_checkuri(self):
result = runCmd('bitbake -c checkuri m4')
self.assertEqual(0, result.status, msg = "\"checkuri\" task was not executed. bitbake output: %s" % result.output)
- @OETestID(1035)
def test_continue(self):
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
-INHERIT_remove = \"report-error\"
+INHERIT:remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
- self.write_recipeinc('man',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
- runCmd('bitbake -c cleanall man xcursor-transparent-theme')
- result = runCmd('bitbake -c unpack -k man xcursor-transparent-theme', ignore_status=True)
+ self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
+ runCmd('bitbake -c cleanall man-db xcursor-transparent-theme')
+ result = runCmd('bitbake -c unpack -k man-db xcursor-transparent-theme', ignore_status=True)
errorpos = result.output.find('ERROR: Function failed: do_fail_task')
manver = re.search("NOTE: recipe xcursor-transparent-theme-(.*?): task do_unpack: Started", result.output)
continuepos = result.output.find('NOTE: recipe xcursor-transparent-theme-%s: task do_unpack: Started' % manver.group(1))
self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
- @OETestID(1119)
def test_non_gplv3(self):
- self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"')
+ self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"
+require conf/distro/include/no-gplv3.inc
+''')
result = bitbake('selftest-ed', ignore_status=True)
self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
lic_dir = get_bb_var('LICENSE_DIRECTORY')
- self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3')))
- self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2')))
+ arch = get_bb_var('SSTATE_PKGARCH')
+ filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later')
+ self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename)
+ filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-or-later')
+ self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename)
- @OETestID(1422)
def test_setscene_only(self):
""" Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
- test_recipe = 'ed'
+ test_recipe = 'selftest-hello-native'
bitbake(test_recipe)
bitbake('-c clean %s' % test_recipe)
@@ -254,7 +258,36 @@ INHERIT_remove = \"report-error\"
self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
'Executed tasks were: %s' % (task, str(tasks)))
- @OETestID(1425)
+ def test_skip_setscene(self):
+ test_recipe = 'selftest-hello-native'
+
+ bitbake(test_recipe)
+ bitbake('-c clean %s' % test_recipe)
+
+ ret = bitbake('--setscene-only %s' % test_recipe)
+ tasks = re.findall(r'task\s+(do_\S+):', ret.output)
+
+ for task in tasks:
+ self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
+ 'Executed tasks were: %s' % (task, str(tasks)))
+
+ # Run without setscene. Should do nothing
+ ret = bitbake('--skip-setscene %s' % test_recipe)
+ tasks = re.findall(r'task\s+(do_\S+):', ret.output)
+
+ self.assertFalse(tasks, 'Tasks %s ran when they should not have' % (str(tasks)))
+
+ # Clean (leave sstate cache) and run with --skip-setscene. No setscene
+ # tasks should run
+ bitbake('-c clean %s' % test_recipe)
+
+ ret = bitbake('--skip-setscene %s' % test_recipe)
+ tasks = re.findall(r'task\s+(do_\S+):', ret.output)
+
+ for task in tasks:
+ self.assertNotIn('_setscene', task, 'A _setscene task ran: %s.\n'
+ 'Executed tasks were: %s' % (task, str(tasks)))
+
def test_bbappend_order(self):
""" Bitbake should bbappend to recipe in a predictable order """
test_recipe = 'ed'
@@ -277,3 +310,68 @@ INHERIT_remove = \"report-error\"
test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
self.assertEqual(expected_recipe_summary, test_recipe_summary_after)
+
+ def test_git_patchtool(self):
+ """ PATCHTOOL=git should work with non-git sources like tarballs
+ test recipe for the test must NOT containt git:// repository in SRC_URI
+ """
+ test_recipe = "man-db"
+ self.write_recipeinc(test_recipe, 'PATCHTOOL=\"git\"')
+ src = get_bb_var("SRC_URI",test_recipe)
+ gitscm = re.search("git://", src)
+ self.assertFalse(gitscm, "test_git_patchtool pre-condition failed: {} test recipe contains git repo!".format(test_recipe))
+ result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
+ fatal = re.search("fatal: not a git repository (or any of the parent directories)", result.output)
+ self.assertFalse(fatal, "Failed to patch using PATCHTOOL=\"git\"")
+ self.delete_recipeinc(test_recipe)
+ bitbake('-cclean {}'.format(test_recipe))
+
+ def test_git_patchtool2(self):
+ """ Test if PATCHTOOL=git works with git repo and doesn't reinitialize it
+ """
+ test_recipe = "gitrepotest"
+ src = get_bb_var("SRC_URI",test_recipe)
+ gitscm = re.search("git://", src)
+ self.assertTrue(gitscm, "test_git_patchtool pre-condition failed: {} test recipe doesn't contains git repo!".format(test_recipe))
+ result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
+ srcdir = get_bb_var('S', test_recipe)
+ result = runCmd("git log", cwd = srcdir)
+ self.assertFalse("bitbake_patching_started" in result.output, msg = "Repository has been reinitialized. {}".format(srcdir))
+ self.delete_recipeinc(test_recipe)
+ bitbake('-cclean {}'.format(test_recipe))
+
+
+ def test_git_unpack_nonetwork(self):
+ """
+ Test that a recipe with a floating tag that needs to be resolved upstream doesn't
+ access the network in a patch task run in a separate builld invocation
+ """
+
+ # Enable the recipe to float using a distro override
+ self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
+
+ bitbake('gitunpackoffline -c fetch')
+ bitbake('gitunpackoffline -c patch')
+
+ def test_git_unpack_nonetwork_fail(self):
+ """
+ Test that a recipe with a floating tag which doesn't call get_srcrev() in the fetcher
+ raises an error when the fetcher is called.
+ """
+
+ # Enable the recipe to float using a distro override
+ self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
+
+ result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True)
+ self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
+
+ def test_unexpanded_variable_in_path(self):
+ """
+ Test that bitbake fails if directory contains unexpanded bitbake variable in the name
+ """
+ recipe_name = "gitunpackoffline"
+ self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"')
+ result = bitbake('{}'.format(recipe_name), ignore_status=True)
+ self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path")
+ self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution",
+ result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output)
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
new file mode 100644
index 0000000000..1688eabe4e
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -0,0 +1,56 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import time
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
+
+def parse_values(content):
+ for i in content:
+ for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
+ if i.startswith(v + ": "):
+ yield i[len(v) + 2:].strip(), v
+ break
+
+@OETestTag("toolchain-user", "toolchain-system")
+class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
+ def test_binutils(self):
+ self.run_binutils("binutils")
+
+ def test_gas(self):
+ self.run_binutils("gas")
+
+ def test_ld(self):
+ self.run_binutils("ld")
+
+ def run_binutils(self, suite):
+ features = []
+ features.append('CHECK_TARGETS = "{0}"'.format(suite))
+ self.write_config("\n".join(features))
+
+ recipe = "binutils-cross-testsuite"
+ bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
+ builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
+
+ start_time = time.time()
+
+ bitbake("{0} -c check".format(recipe))
+
+ end_time = time.time()
+
+ sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
+ if not os.path.exists(sumspath):
+ sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
+ logpath = os.path.splitext(sumspath)[0] + ".log"
+
+ ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
+ with open(sumspath, "r") as f:
+ for test, result in parse_values(f):
+ self.ptest_result(ptestsuite, test, result)
+
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py
index 06792d9146..2d55994916 100644
--- a/meta/lib/oeqa/selftest/cases/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/buildhistory.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import re
import datetime
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index cf221c33af..31dafaa9c5 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import re
import glob as g
@@ -5,13 +11,13 @@ import shutil
import tempfile
from oeqa.selftest.case import OESelftestTestCase
from oeqa.selftest.cases.buildhistory import BuildhistoryBase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+from oeqa.core.decorator.data import skipIfMachine
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
import oeqa.utils.ftools as ftools
-from oeqa.core.decorator.oeid import OETestID
+from oeqa.core.decorator import OETestTag
class ImageOptionsTests(OESelftestTestCase):
- @OETestID(761)
def test_incremental_image_generation(self):
image_pkgtype = get_bb_var("IMAGE_PKGTYPE")
if image_pkgtype != 'rpm':
@@ -30,42 +36,42 @@ class ImageOptionsTests(OESelftestTestCase):
incremental_removed = re.search(r"Erasing\s*:\s*packagegroup-core-ssh-openssh", log_data_removed)
self.assertTrue(incremental_removed, msg = "Match failed in:\n%s" % log_data_removed)
- @OETestID(286)
def test_ccache_tool(self):
bitbake("ccache-native")
bb_vars = get_bb_vars(['SYSROOT_DESTDIR', 'bindir'], 'ccache-native')
p = bb_vars['SYSROOT_DESTDIR'] + bb_vars['bindir'] + "/" + "ccache"
self.assertTrue(os.path.isfile(p), msg = "No ccache found (%s)" % p)
self.write_config('INHERIT += "ccache"')
- self.add_command_to_tearDown('bitbake -c clean m4')
- bitbake("m4 -f -c compile")
- log_compile = os.path.join(get_bb_var("WORKDIR","m4"), "temp/log.do_compile")
- res = runCmd("grep ccache %s" % log_compile, ignore_status=True)
- self.assertEqual(0, res.status, msg="No match for ccache in m4 log.do_compile. For further details: %s" % log_compile)
+ recipe = "libgcc-initial"
+ self.add_command_to_tearDown('bitbake -c clean %s' % recipe)
+ bitbake("%s -c clean" % recipe)
+ bitbake("%s -f -c compile" % recipe)
+ log_compile = os.path.join(get_bb_var("WORKDIR", recipe), "temp/log.do_compile")
+ with open(log_compile, "r") as f:
+ loglines = "".join(f.readlines())
+ self.assertIn("ccache", loglines, msg="No match for ccache in %s log.do_compile. For further details: %s" % (recipe , log_compile))
- @OETestID(1435)
def test_read_only_image(self):
distro_features = get_bb_var('DISTRO_FEATURES')
if not ('x11' in distro_features and 'opengl' in distro_features):
- self.skipTest('core-image-sato requires x11 and opengl in distro features')
+ self.skipTest('core-image-sato/weston requires x11 and opengl in distro features')
self.write_config('IMAGE_FEATURES += "read-only-rootfs"')
- bitbake("core-image-sato")
+ bitbake("core-image-sato core-image-weston")
# do_image will fail if there are any pending postinsts
class DiskMonTest(OESelftestTestCase):
- @OETestID(277)
def test_stoptask_behavior(self):
- self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"')
- res = bitbake("m4", ignore_status = True)
+ self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
+ res = bitbake("delay -c delay", ignore_status = True)
self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output)
self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
- self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"')
- res = bitbake("m4", ignore_status = True)
- self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output)
+ self.write_config('BB_DISKMON_DIRS = "HALT,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
+ res = bitbake("delay -c delay", ignore_status = True)
+ self.assertTrue('ERROR: Immediately halt since the disk space monitor action is "HALT"!' in res.output, "Tasks should have been halted immediately. Disk monitor is set to HALT: %s" % res.output)
self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
- self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"')
- res = bitbake("m4")
+ self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
+ res = bitbake("delay -c delay")
self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output)
class SanityOptionsTest(OESelftestTestCase):
@@ -74,12 +80,11 @@ class SanityOptionsTest(OESelftestTestCase):
if line in l:
return l
- @OETestID(927)
def test_options_warnqa_errorqa_switch(self):
- self.write_config("INHERIT_remove = \"report-error\"")
+ self.write_config("INHERIT:remove = \"report-error\"")
if "packages-list" not in get_bb_var("ERROR_QA"):
- self.append_config("ERROR_QA_append = \" packages-list\"")
+ self.append_config("ERROR_QA:append = \" packages-list\"")
self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
@@ -89,14 +94,13 @@ class SanityOptionsTest(OESelftestTestCase):
self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
- self.append_config('ERROR_QA_remove = "packages-list"')
- self.append_config('WARN_QA_append = " packages-list"')
+ self.append_config('ERROR_QA:remove = "packages-list"')
+ self.append_config('WARN_QA:append = " packages-list"')
res = bitbake("xcursor-transparent-theme -f -c package")
self.delete_recipeinc('xcursor-transparent-theme')
line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
- @OETestID(1421)
def test_layer_without_git_dir(self):
"""
Summary: Test that layer git revisions are displayed and do not fail without git repository
@@ -138,29 +142,98 @@ class SanityOptionsTest(OESelftestTestCase):
class BuildhistoryTests(BuildhistoryBase):
- @OETestID(293)
def test_buildhistory_basic(self):
self.run_buildhistory_operation('xcursor-transparent-theme')
self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
- @OETestID(294)
def test_buildhistory_buildtime_pr_backwards(self):
target = 'xcursor-transparent-theme'
- error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds from (.*-r1.* to .*-r0.*)" % target
+ error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target
self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
+ def test_fileinfo(self):
+ self.config_buildhistory()
+ bitbake('hicolor-icon-theme')
+ history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme')
+ self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.')
+
+ def load_bh(f):
+ d = {}
+ for line in open(f):
+ split = [s.strip() for s in line.split('=', 1)]
+ if len(split) > 1:
+ d[split[0]] = split[1]
+ return d
+
+ data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest'))
+ self.assertIn('FILELIST', data)
+ self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme')
+ self.assertGreater(int(data['PKGSIZE']), 0)
+
+ data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest'))
+ if 'FILELIST' in data:
+ self.assertEqual(data['FILELIST'], '')
+ self.assertEqual(int(data['PKGSIZE']), 0)
+
class ArchiverTest(OESelftestTestCase):
- @OETestID(926)
def test_arch_work_dir_and_export_source(self):
"""
Test for archiving the work directory and exporting the source files.
"""
- self.write_config("INHERIT += \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"")
+ self.write_config("""
+INHERIT += "archiver"
+PACKAGE_CLASSES = "package_rpm"
+ARCHIVER_MODE[src] = "original"
+ARCHIVER_MODE[srpm] = "1"
+""")
res = bitbake("xcursor-transparent-theme", ignore_status=True)
self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC')
pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*")
src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm"
- tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz"
- self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under %s/allarch*/xcursor*" % deploy_dir_src)
+ tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.xz"
+ self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.xz files under %s/allarch*/xcursor*" % deploy_dir_src)
+
+class ToolchainOptions(OESelftestTestCase):
+ def test_toolchain_fortran(self):
+ """
+ Test that Fortran works by building a Hello, World binary.
+ """
+
+ features = 'FORTRAN:forcevariable = ",fortran"\n'
+ self.write_config(features)
+ bitbake('fortran-helloworld')
+
+@OETestTag("yocto-mirrors")
+class SourceMirroring(OESelftestTestCase):
+ # Can we download everything from the Yocto Sources Mirror over http only
+ def test_yocto_source_mirror(self):
+ self.write_config("""
+BB_ALLOWED_NETWORKS = "downloads.yoctoproject.org"
+MIRRORS = ""
+DL_DIR = "${TMPDIR}/test_downloads"
+STAMPS_DIR = "${TMPDIR}/test_stamps"
+SSTATE_DIR = "${TMPDIR}/test_sstate-cache"
+PREMIRRORS = "\\
+ bzr://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ cvs://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ git://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ gitsm://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ hg://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ osc://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ p4://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ svn://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ http://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n"
+""")
+
+ bitbake("world --runall fetch")
+
+
+class Poisoning(OESelftestTestCase):
+ def test_poisoning(self):
+ res = bitbake("poison", ignore_status=True)
+ self.assertNotEqual(res.status, 0)
+ self.assertTrue("is unsafe for cross-compilation" in res.output)
diff --git a/meta/lib/oeqa/selftest/cases/c_cpp.py b/meta/lib/oeqa/selftest/cases/c_cpp.py
new file mode 100644
index 0000000000..9a70ce29f5
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/c_cpp.py
@@ -0,0 +1,60 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator.data import skipIfNotQemuUsermode
+from oeqa.utils.commands import bitbake
+
+
+class CCppTests(OESelftestTestCase):
+
+ @skipIfNotQemuUsermode()
+ def _qemu_usermode(self, recipe_name):
+ self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
+ bitbake("%s -c run_tests" % recipe_name)
+
+ @skipIfNotQemuUsermode()
+ def _qemu_usermode_failing(self, recipe_name):
+ config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name
+ self.write_config(config)
+ self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
+ result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True)
+ self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % (
+ result.command, result.status, result.output, result.error))
+
+
+class CMakeTests(CCppTests):
+ def test_cmake_qemu(self):
+ """Test for cmake-qemu.bbclass good case
+
+ compile the cmake-example and verify the CTests pass in qemu-user.
+ qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR.
+ """
+ self._qemu_usermode("cmake-example")
+
+ def test_cmake_qemu_failing(self):
+ """Test for cmake-qemu.bbclass bad case
+
+ Break the comparison in the test code and verify the CTests do not pass.
+ """
+ self._qemu_usermode_failing("cmake-example")
+
+
+class MesonTests(CCppTests):
+ def test_meson_qemu(self):
+ """Test the qemu-user feature of the meson.bbclass good case
+
+ compile the meson-example and verify the Unit Test pass in qemu-user.
+ qemu-user is configured by meson's exe_wrapper option.
+ """
+ self._qemu_usermode("meson-example")
+
+ def test_meson_qemu_failing(self):
+ """Test the qemu-user feature of the meson.bbclass bad case
+
+ Break the comparison in the test code and verify the Unit Test does not pass in qemu-user.
+ """
+ self._qemu_usermode_failing("meson-example")
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py
index 99a5cc9e57..23c0a1408a 100644
--- a/meta/lib/oeqa/selftest/cases/containerimage.py
+++ b/meta/lib/oeqa/selftest/cases/containerimage.py
@@ -1,8 +1,13 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
-from oeqa.core.decorator.oeid import OETestID
# This test builds an image with using the "container" IMAGE_FSTYPE, and
# ensures that then files in the image are only the ones expected.
@@ -10,7 +15,7 @@ from oeqa.core.decorator.oeid import OETestID
# The only package added to the image is container_image_testpkg, which
# contains one file. However, due to some other things not cleaning up during
# rootfs creation, there is some cruft. Ideally bugs will be filed and the
-# cruft removed, but for now we whitelist some known set.
+# cruft removed, but for now we ignore some known set.
#
# Also for performance reasons we're only checking the cruft when using ipk.
# When using deb, and rpm it is a bit different and we could test all
@@ -19,9 +24,8 @@ from oeqa.core.decorator.oeid import OETestID
#
class ContainerImageTests(OESelftestTestCase):
- # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that
+ # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
# the conversion type bar gets added as a dep as well
- @OETestID(1619)
def test_expected_files(self):
def get_each_path_part(path):
@@ -39,6 +43,10 @@ class ContainerImageTests(OESelftestTestCase):
IMAGE_FSTYPES = "container"
PACKAGE_CLASSES = "package_ipk"
IMAGE_FEATURES = ""
+IMAGE_BUILDINFO_FILE = ""
+INIT_MANAGER = "sysvinit"
+IMAGE_INSTALL:remove = "ssh-pregen-hostkeys"
+
""")
bbvars = get_bb_vars(['bindir', 'sysconfdir', 'localstatedir',
@@ -54,11 +62,7 @@ IMAGE_FEATURES = ""
'.{sysconfdir}/version',
'./run/',
'.{localstatedir}/cache/',
- '.{localstatedir}/cache/ldconfig/',
- '.{localstatedir}/cache/ldconfig/aux-cache',
- '.{localstatedir}/cache/opkg/',
- '.{localstatedir}/lib/',
- '.{localstatedir}/lib/opkg/'
+ '.{localstatedir}/lib/'
]
expected_files = [ x.format(bindir=bbvars['bindir'],
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
new file mode 100644
index 0000000000..60cecd1328
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -0,0 +1,242 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import json
+import os
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
+
+class CVECheck(OESelftestTestCase):
+
+ def test_version_compare(self):
+ from oe.cve_check import Version
+
+ result = Version("100") > Version("99")
+ self.assertTrue( result, msg="Failed to compare version '100' > '99'")
+ result = Version("2.3.1") > Version("2.2.3")
+ self.assertTrue( result, msg="Failed to compare version '2.3.1' > '2.2.3'")
+ result = Version("2021-01-21") > Version("2020-12-25")
+ self.assertTrue( result, msg="Failed to compare version '2021-01-21' > '2020-12-25'")
+ result = Version("1.2-20200910") < Version("1.2-20200920")
+ self.assertTrue( result, msg="Failed to compare version '1.2-20200910' < '1.2-20200920'")
+
+ result = Version("1.0") >= Version("1.0beta")
+ self.assertTrue( result, msg="Failed to compare version '1.0' >= '1.0beta'")
+ result = Version("1.0-rc2") > Version("1.0-rc1")
+ self.assertTrue( result, msg="Failed to compare version '1.0-rc2' > '1.0-rc1'")
+ result = Version("1.0.alpha1") < Version("1.0")
+ self.assertTrue( result, msg="Failed to compare version '1.0.alpha1' < '1.0'")
+ result = Version("1.0_dev") <= Version("1.0")
+ self.assertTrue( result, msg="Failed to compare version '1.0_dev' <= '1.0'")
+
+ # ignore "p1" and "p2", so these should be equal
+ result = Version("1.0p2") == Version("1.0p1")
+ self.assertTrue( result ,msg="Failed to compare version '1.0p2' to '1.0p1'")
+ # ignore the "b" and "r"
+ result = Version("1.0b") == Version("1.0r")
+ self.assertTrue( result ,msg="Failed to compare version '1.0b' to '1.0r'")
+
+ # consider the trailing alphabet as patched level when comparing
+ result = Version("1.0b","alphabetical") < Version("1.0r","alphabetical")
+ self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'")
+ result = Version("1.0b","alphabetical") > Version("1.0","alphabetical")
+ self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'")
+
+ # consider the trailing "p" and "patch" as patched released when comparing
+ result = Version("1.0","patch") < Version("1.0p1","patch")
+ self.assertTrue( result ,msg="Failed to compare version with suffix '1.0' < '1.0p1'")
+ result = Version("1.0p2","patch") > Version("1.0p1","patch")
+ self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
+ result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
+ self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
+
+
+ def test_convert_cve_version(self):
+ from oe.cve_check import convert_cve_version
+
+ # Default format
+ self.assertEqual(convert_cve_version("8.3"), "8.3")
+ self.assertEqual(convert_cve_version(""), "")
+
+ # OpenSSL format version
+ self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
+
+ # OpenSSH format
+ self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
+ self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
+
+ # Linux kernel format
+ self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
+ self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
+
+
+ def test_recipe_report_json(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("m4-native -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "m4-native")
+ found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
+ self.assertIn("CVE-2008-1687", found_cves)
+ self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
+
+
+ def test_image_json(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ print(report_json)
+ try:
+ os.remove(report_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("core-image-minimal-initramfs")
+ self.assertExists(report_json)
+
+ # Check that the summary report lists at least one package
+ with open(report_json) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertGreater(len(report["package"]), 1)
+
+ # Check that a random recipe wrote a recipe report to deploy/cve/
+ recipename = report["package"][0]["name"]
+ recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
+ self.assertExists(recipe_report)
+ with open(recipe_report) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ self.assertEqual(report["package"][0]["name"], recipename)
+
+
+ def test_recipe_report_json_unpatched(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "0"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("m4-native -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "m4-native")
+ #m4 had only Patched CVEs, so the issues array will be empty
+ self.assertEqual(package["issue"], [])
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
+
+
+ def test_recipe_report_json_ignored(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("logrotate -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "logrotate")
+ found_cves = {}
+ for issue in package["issue"]:
+ found_cves[issue["id"]] = {
+ "status" : issue["status"],
+ "detail" : issue["detail"] if "detail" in issue else "",
+ "description" : issue["description"] if "description" in issue else ""
+ }
+ # m4 CVE should not be in logrotate
+ self.assertNotIn("CVE-2008-1687", found_cves)
+ # logrotate has both Patched and Ignored CVEs
+ self.assertIn("CVE-2011-1098", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
+ self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0)
+ self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
+ detail = "not-applicable-platform"
+ description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
+ self.assertIn("CVE-2011-1548", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1548"]["description"], description)
+ self.assertIn("CVE-2011-1549", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1549"]["description"], description)
+ self.assertIn("CVE-2011-1550", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1550"]["description"], description)
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
new file mode 100644
index 0000000000..505b4be837
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -0,0 +1,158 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import socketserver
+import subprocess
+import time
+import urllib
+import pathlib
+
+from oeqa.core.decorator import OETestTag
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, runqemu
+
+
+class Debuginfod(OESelftestTestCase):
+
+ def wait_for_debuginfod(self, port):
+ """
+ debuginfod takes time to scan the packages and requesting too early may
+ result in a test failure if the right packages haven't been scanned yet.
+
+ Request the metrics endpoint periodically and wait for there to be no
+ busy scanning threads.
+
+ Returns if debuginfod is ready, raises an exception if not within the
+ timeout.
+ """
+
+ # Wait two minutes
+ countdown = 24
+ delay = 5
+ latest = None
+
+ while countdown:
+ self.logger.info("waiting...")
+ time.sleep(delay)
+
+ self.logger.info("polling server")
+ if self.debuginfod.poll():
+ self.logger.info("server dead")
+ self.debuginfod.communicate()
+ self.fail("debuginfod terminated unexpectedly")
+ self.logger.info("server alive")
+
+ try:
+ with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f:
+ for line in f.read().decode("ascii").splitlines():
+ key, value = line.rsplit(" ", 1)
+ if key == "thread_busy{role=\"scan\"}":
+ latest = int(value)
+ self.logger.info("Waiting for %d scan jobs to finish" % latest)
+ if latest == 0:
+ return
+ except urllib.error.URLError as e:
+ # TODO: how to catch just timeouts?
+ self.logger.error(e)
+
+ countdown -= 1
+
+ raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
+
+ def start_debuginfod(self):
+ # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
+
+ # Save some useful paths for later
+ native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native"))
+ native_bindir = native_sysroot / "usr" / "bin"
+ self.debuginfod = native_bindir / "debuginfod"
+ self.debuginfod_find = native_bindir / "debuginfod-find"
+
+ cmd = [
+ self.debuginfod,
+ "--verbose",
+ # In-memory database, this is a one-shot test
+ "--database=:memory:",
+ # Don't use all the host cores
+ "--concurrency=8",
+ "--connection-pool=8",
+ # Disable rescanning, this is a one-shot test
+ "--rescan-time=0",
+ "--groom-time=0",
+ get_bb_var("DEPLOY_DIR"),
+ ]
+
+ format = get_bb_var("PACKAGE_CLASSES").split()[0]
+ if format == "package_deb":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_ipk":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_rpm":
+ cmd.append("--scan-rpm-dir")
+ else:
+ self.fail("Unknown package class %s" % format)
+
+ # Find a free port. Racey but the window is small.
+ with socketserver.TCPServer(("localhost", 0), None) as s:
+ self.port = s.server_address[1]
+ cmd.append("--port=%d" % self.port)
+
+ self.logger.info(f"Starting server {cmd}")
+ self.debuginfod = subprocess.Popen(cmd, env={})
+ self.wait_for_debuginfod(self.port)
+
+
+ def test_debuginfod_native(self):
+ """
+ Test debuginfod outside of qemu, by building a package and looking up a
+ binary's debuginfo using elfutils-native.
+ """
+
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-debuginfod"
+DISTRO_FEATURES:append = " debuginfod"
+""")
+ bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package")
+
+ try:
+ self.start_debuginfod()
+
+ env = os.environ.copy()
+ env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
+
+ pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz"))
+ cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz")
+ self.logger.info(f"Starting client {cmd}")
+ output = subprocess.check_output(cmd, env=env, text=True)
+ # This should be more comprehensive
+ self.assertIn("/.cache/debuginfod_client/", output)
+ finally:
+ self.debuginfod.kill()
+
+ @OETestTag("runqemu")
+ def test_debuginfod_qemu(self):
+ """
+ Test debuginfod-find inside a qemu, talking to a debuginfod on the host.
+ """
+
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-debuginfod"
+DISTRO_FEATURES:append = " debuginfod"
+CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
+ """)
+ bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot")
+
+ try:
+ self.start_debuginfod()
+
+ with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
+ cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
+ self.logger.info(f"Starting client {cmd}")
+ status, output = qemu.run_serial(cmd)
+ # This should be more comprehensive
+ self.assertIn("/.cache/debuginfod_client/", output)
+ finally:
+ self.debuginfod.kill()
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 43280cdc0e..c8f9534e41 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -1,24 +1,142 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import errno
import os
import re
import shutil
import tempfile
import glob
import fnmatch
+import unittest
+import json
-import oeqa.utils.ftools as ftools
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer
-from oeqa.core.decorator.oeid import OETestID
+from oeqa.core.decorator import OETestTag
+
+oldmetapath = None
+
+def setUpModule():
+ import bb.utils
+
+ global templayerdir
+ templayerdir = tempfile.mkdtemp(prefix='devtoolqa')
+ corecopydir = os.path.join(templayerdir, 'core-copy')
+ bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
+ edited_layers = []
+ # make sure user doesn't have a local workspace
+ result = runCmd('bitbake-layers show-layers')
+ assert "workspacelayer" not in result.output, "Devtool test suite cannot be run with a local workspace directory"
+
+ # We need to take a copy of the meta layer so we can modify it and not
+ # have any races against other tests that might be running in parallel
+ # however things like COREBASE mean that you can't just copy meta, you
+ # need the whole repository.
+ def bblayers_edit_cb(layerpath, canonical_layerpath):
+ global oldmetapath
+ if not canonical_layerpath.endswith('/'):
+ # This helps us match exactly when we're using this path later
+ canonical_layerpath += '/'
+ if not edited_layers and canonical_layerpath.endswith('/meta/'):
+ canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
+ edited_layers.append(layerpath)
+ oldmetapath = os.path.realpath(layerpath)
+
+ # when downloading poky from tar.gz some tests will be skipped (BUG 12389)
+ try:
+ runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath)
+ except:
+ raise unittest.SkipTest("devtool tests require folder to be a git repo")
+
+ result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
+ oldreporoot = result.output.rstrip()
+ newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
+ runCmd('git clone file://%s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
+ # Now we need to copy any modified files
+ # You might ask "why not just copy the entire tree instead of
+ # cloning and doing this?" - well, the problem with that is
+ # TMPDIR or an equally large subdirectory might exist
+ # under COREBASE and we don't want to copy that, so we have
+ # to be selective.
+ result = runCmd('git status --porcelain', cwd=oldreporoot)
+ for line in result.output.splitlines():
+ if line.startswith(' M ') or line.startswith('?? '):
+ relpth = line.split()[1]
+ pth = os.path.join(oldreporoot, relpth)
+ if pth.startswith(canonical_layerpath):
+ if relpth.endswith('/'):
+ destdir = os.path.join(corecopydir, relpth)
+ # avoid race condition by not copying .pyc files YPBZ#13421,13803
+ shutil.copytree(pth, destdir, ignore=shutil.ignore_patterns('*.pyc', '__pycache__'))
+ else:
+ destdir = os.path.join(corecopydir, os.path.dirname(relpth))
+ bb.utils.mkdirhier(destdir)
+ shutil.copy2(pth, destdir)
+ return newmetapath
+ else:
+ return layerpath
+ bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb)
+
+def tearDownModule():
+ if oldmetapath:
+ edited_layers = []
+ def bblayers_edit_cb(layerpath, canonical_layerpath):
+ if not edited_layers and canonical_layerpath.endswith('/meta'):
+ edited_layers.append(layerpath)
+ return oldmetapath
+ else:
+ return layerpath
+ bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
+ bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb)
+ shutil.rmtree(templayerdir)
-class DevtoolBase(OESelftestTestCase):
+class DevtoolTestCase(OESelftestTestCase):
- buffer = True
+ def setUp(self):
+ """Test case setup function"""
+ super(DevtoolTestCase, self).setUp()
+ self.workspacedir = os.path.join(self.builddir, 'workspace')
+ self.assertTrue(not os.path.exists(self.workspacedir),
+ 'This test cannot be run with a workspace directory '
+ 'under the build directory')
+
+ def _check_src_repo(self, repo_dir):
+ """Check srctree git repository"""
+ self.assertTrue(os.path.isdir(os.path.join(repo_dir, '.git')),
+ 'git repository for external source tree not found')
+ result = runCmd('git status --porcelain', cwd=repo_dir)
+ self.assertEqual(result.output.strip(), "",
+ 'Created git repo is not clean')
+ result = runCmd('git symbolic-ref HEAD', cwd=repo_dir)
+ self.assertEqual(result.output.strip(), "refs/heads/devtool",
+ 'Wrong branch in git repo')
+
+ def _check_repo_status(self, repo_dir, expected_status):
+ """Check the worktree status of a repository"""
+ result = runCmd('git status . --porcelain',
+ cwd=repo_dir)
+ for line in result.output.splitlines():
+ for ind, (f_status, fn_re) in enumerate(expected_status):
+ if re.match(fn_re, line[3:]):
+ if f_status != line[:2]:
+ self.fail('Unexpected status in line: %s' % line)
+ expected_status.pop(ind)
+ break
+ else:
+ self.fail('Unexpected modified file in line: %s' % line)
+ if expected_status:
+ self.fail('Missing file changes: %s' % expected_status)
def _test_recipe_contents(self, recipefile, checkvars, checkinherits):
with open(recipefile, 'r') as f:
invar = None
invalue = None
+ inherits = set()
for line in f:
var = None
if invar:
@@ -40,7 +158,7 @@ class DevtoolBase(OESelftestTestCase):
invar = var
continue
elif line.startswith('inherit '):
- inherits = line.split()[1:]
+ inherits.update(line.split()[1:])
if var and var in checkvars:
needvalue = checkvars.pop(var)
@@ -115,66 +233,113 @@ class DevtoolBase(OESelftestTestCase):
filelist.append(' '.join(splitline))
return filelist
+ def _check_diff(self, diffoutput, addlines, removelines):
+ """Check output from 'git diff' matches expectation"""
+ remaining_addlines = addlines[:]
+ remaining_removelines = removelines[:]
+ for line in diffoutput.splitlines():
+ if line.startswith('+++') or line.startswith('---'):
+ continue
+ elif line.startswith('+'):
+ matched = False
+ for item in addlines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ remaining_addlines.remove(item)
+ break
+ self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
+ elif line.startswith('-'):
+ matched = False
+ for item in removelines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ remaining_removelines.remove(item)
+ break
+ self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+ if remaining_addlines:
+ self.fail('Expected added lines not found: %s' % remaining_addlines)
+ if remaining_removelines:
+ self.fail('Expected removed lines not found: %s' % remaining_removelines)
+
+ def _check_runqemu_prerequisites(self):
+ """Check runqemu is available
+
+ Whilst some tests would seemingly be better placed as a runtime test,
+ unfortunately the runtime tests run under bitbake and you can't run
+ devtool within bitbake (since devtool needs to run bitbake itself).
+ Additionally we are testing build-time functionality as well, so
+ really this has to be done as an oe-selftest test.
+ """
+ machine = get_bb_var('MACHINE')
+ if not machine.startswith('qemu'):
+ self.skipTest('This test only works with qemu machines')
+ if not os.path.exists('/etc/runqemu-nosudo'):
+ self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
+ if result.status != 0:
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
+ if result.status != 0:
+ self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
+ for line in result.output.splitlines():
+ if line.startswith('tap'):
+ break
+ else:
+ self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+
+ def _test_devtool_add_git_url(self, git_url, version, pn, resulting_src_uri, srcrev=None):
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ command = 'devtool add --version %s %s %s' % (version, pn, git_url)
+ if srcrev :
+ command += ' --srcrev %s' %srcrev
+ result = runCmd(command)
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ # Check the recipe name is correct
+ recipefile = get_bb_var('FILE', pn)
+ self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named')
+ self.assertIn(recipefile, result.output)
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(pn, result.output)
+ self.assertIn(recipefile, result.output)
+ checkvars = {}
+ checkvars['SRC_URI'] = resulting_src_uri
+ self._test_recipe_contents(recipefile, checkvars, [])
-class DevtoolTests(DevtoolBase):
+class DevtoolBase(DevtoolTestCase):
@classmethod
def setUpClass(cls):
- super(DevtoolTests, cls).setUpClass()
+ super(DevtoolBase, cls).setUpClass()
bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
cls.original_sstate = bb_vars['SSTATE_DIR']
cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
% cls.original_sstate)
+ cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"\n')
@classmethod
def tearDownClass(cls):
cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
runCmd('rm -rf %s' % cls.devtool_sstate)
- super(DevtoolTests, cls).tearDownClass()
+ super(DevtoolBase, cls).tearDownClass()
def setUp(self):
"""Test case setup function"""
- super(DevtoolTests, self).setUp()
- self.workspacedir = os.path.join(self.builddir, 'workspace')
- self.assertTrue(not os.path.exists(self.workspacedir),
- 'This test cannot be run with a workspace directory '
- 'under the build directory')
+ super(DevtoolBase, self).setUp()
self.append_config(self.sstate_conf)
- def _check_src_repo(self, repo_dir):
- """Check srctree git repository"""
- self.assertTrue(os.path.isdir(os.path.join(repo_dir, '.git')),
- 'git repository for external source tree not found')
- result = runCmd('git status --porcelain', cwd=repo_dir)
- self.assertEqual(result.output.strip(), "",
- 'Created git repo is not clean')
- result = runCmd('git symbolic-ref HEAD', cwd=repo_dir)
- self.assertEqual(result.output.strip(), "refs/heads/devtool",
- 'Wrong branch in git repo')
- def _check_repo_status(self, repo_dir, expected_status):
- """Check the worktree status of a repository"""
- result = runCmd('git status . --porcelain',
- cwd=repo_dir)
- for line in result.output.splitlines():
- for ind, (f_status, fn_re) in enumerate(expected_status):
- if re.match(fn_re, line[3:]):
- if f_status != line[:2]:
- self.fail('Unexpected status in line: %s' % line)
- expected_status.pop(ind)
- break
- else:
- self.fail('Unexpected modified file in line: %s' % line)
- if expected_status:
- self.fail('Missing file changes: %s' % expected_status)
+class DevtoolTests(DevtoolBase):
- @OETestID(1158)
def test_create_workspace(self):
# Check preconditions
result = runCmd('bitbake-layers show-layers')
- self.assertTrue('/workspace' not in result.output, 'This test cannot be run with a workspace layer in bblayers.conf')
+ self.assertTrue('\nworkspace' not in result.output, 'This test cannot be run with a workspace layer in bblayers.conf')
+ # remove conf/devtool.conf to avoid it corrupting tests
+ devtoolconf = os.path.join(self.builddir, 'conf', 'devtool.conf')
+ self.track_for_cleanup(devtoolconf)
# Try creating a workspace layer with a specific path
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
@@ -191,14 +356,15 @@ class DevtoolTests(DevtoolBase):
self.assertNotIn(tempdir, result.output)
self.assertIn(self.workspacedir, result.output)
- @OETestID(1159)
+class DevtoolAddTests(DevtoolBase):
+
def test_devtool_add(self):
# Fetch source
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
pn = 'pv'
pv = '1.5.3'
- url = 'http://www.ivarch.com/programs/sources/pv-1.5.3.tar.bz2'
+ url = 'http://downloads.yoctoproject.org/mirror/sources/pv-1.5.3.tar.bz2'
result = runCmd('wget %s' % url, cwd=tempdir)
result = runCmd('tar xfv %s' % os.path.basename(url), cwd=tempdir)
srcdir = os.path.join(tempdir, '%s-%s' % (pn, pv))
@@ -233,8 +399,41 @@ class DevtoolTests(DevtoolBase):
bindir = bindir[1:]
self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
- @OETestID(1423)
+ def test_devtool_add_binary(self):
+ # Create a binary package containing a known test file
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ pn = 'tst-bin'
+ pv = '1.0'
+ test_file_dir = "var/lib/%s/" % pn
+ test_file_name = "test_file"
+ test_file_content = "TEST CONTENT"
+ test_file_package_root = os.path.join(tempdir, pn)
+ test_file_dir_full = os.path.join(test_file_package_root, test_file_dir)
+ bb.utils.mkdirhier(test_file_dir_full)
+ with open(os.path.join(test_file_dir_full, test_file_name), "w") as f:
+ f.write(test_file_content)
+ bin_package_path = os.path.join(tempdir, "%s.tar.gz" % pn)
+ runCmd("tar czf %s -C %s ." % (bin_package_path, test_file_package_root))
+
+ # Test devtool add -b on the binary package
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add -b %s %s' % (pn, bin_package_path))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+
+ # Build the resulting recipe
+ result = runCmd('devtool build %s' % pn)
+ installdir = get_bb_var('D', pn)
+ self.assertTrue(installdir, 'Could not query installdir variable')
+
+ # Check that a known file from the binary package has indeed been installed
+ self.assertTrue(os.path.isfile(os.path.join(installdir, test_file_dir, test_file_name)), '%s not found in D' % test_file_name)
+
def test_devtool_add_git_local(self):
+ # We need dbus built so that DEPENDS recognition works
+ bitbake('dbus')
# Fetch source from a remote URL, but do it outside of devtool
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
@@ -264,16 +463,32 @@ class DevtoolTests(DevtoolBase):
self.assertIn(srcdir, result.output)
self.assertIn(recipefile, result.output)
checkvars = {}
- checkvars['LICENSE'] = 'GPLv2'
+ checkvars['LICENSE'] = 'GPL-2.0-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '0.1+git${SRCPV}'
- checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https'
+ checkvars['PV'] = '0.1+git'
+ checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master'
checkvars['SRCREV'] = srcrev
checkvars['DEPENDS'] = set(['dbus'])
self._test_recipe_contents(recipefile, checkvars, [])
- @OETestID(1162)
+ def test_devtool_add_git_style1(self):
+ version = 'v3.1.0'
+ pn = 'mbedtls'
+ # this will trigger reformat_git_uri with branch parameter in url
+ git_url = "'git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https'"
+ resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https"
+ self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
+
+ def test_devtool_add_git_style2(self):
+ version = 'v3.1.0'
+ srcrev = 'v3.1.0'
+ pn = 'mbedtls'
+ # this will trigger reformat_git_uri with branch parameter in url
+ git_url = "'git://git@github.com/ARMmbed/mbedtls.git;protocol=https'"
+ resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;protocol=https;branch=master"
+ self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri, srcrev)
+
def test_devtool_add_library(self):
# Fetch source
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -302,7 +517,7 @@ class DevtoolTests(DevtoolBase):
recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version)
result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile)
with open(recipefile, 'a') as f:
- f.write('\nFILES_${PN}-dev += "${datadir}/cmake/Modules"\n')
+ f.write('\nFILES:${PN}-dev += "${datadir}/cmake/Modules"\n')
# We don't have the ability to pick up this dependency automatically yet...
f.write('\nDEPENDS += "libusb1"\n')
f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n')
@@ -322,20 +537,19 @@ class DevtoolTests(DevtoolBase):
self.assertFalse(matches, 'Stamp files exist for recipe libftdi that should have been cleaned')
self.assertFalse(os.path.isfile(os.path.join(staging_libdir, 'libftdi1.so.2.1.0')), 'libftdi binary still found in STAGING_LIBDIR after cleaning')
- @OETestID(1160)
def test_devtool_add_fetch(self):
# Fetch source
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
testver = '0.23'
- url = 'https://pypi.python.org/packages/source/M/MarkupSafe/MarkupSafe-%s.tar.gz' % testver
+ url = 'https://files.pythonhosted.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-%s.tar.gz' % testver
testrecipe = 'python-markupsafe'
srcdir = os.path.join(tempdir, testrecipe)
# Test devtool add
self.track_for_cleanup(self.workspacedir)
self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
- result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+ result = runCmd('devtool add --no-pypi %s %s -f %s' % (testrecipe, srcdir, url))
self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
@@ -354,7 +568,7 @@ class DevtoolTests(DevtoolBase):
result = runCmd('devtool reset -n %s' % testrecipe)
shutil.rmtree(srcdir)
fakever = '1.9'
- result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
+ result = runCmd('devtool add --no-pypi %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
# Test devtool status
result = runCmd('devtool status')
@@ -368,11 +582,11 @@ class DevtoolTests(DevtoolBase):
checkvars['SRC_URI'] = url
self._test_recipe_contents(recipefile, checkvars, [])
- @OETestID(1161)
def test_devtool_add_fetch_git(self):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
url = 'gitsm://git.yoctoproject.org/mraa'
+ url_branch = '%s;branch=master' % url
checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d'
testrecipe = 'mraa'
srcdir = os.path.join(tempdir, testrecipe)
@@ -392,8 +606,8 @@ class DevtoolTests(DevtoolBase):
self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.0+git${SRCPV}'
- checkvars['SRC_URI'] = url
+ checkvars['PV'] = '1.0+git'
+ checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = '${AUTOREV}'
self._test_recipe_contents(recipefile, checkvars, [])
# Try with revision and version specified
@@ -411,12 +625,11 @@ class DevtoolTests(DevtoolBase):
self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.5+git${SRCPV}'
- checkvars['SRC_URI'] = url
+ checkvars['PV'] = '1.5+git'
+ checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = checkrev
self._test_recipe_contents(recipefile, checkvars, [])
- @OETestID(1391)
def test_devtool_add_fetch_simple(self):
# Fetch source from a remote URL, auto-detecting name
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -436,7 +649,7 @@ class DevtoolTests(DevtoolBase):
result = runCmd('devtool status')
self.assertIn(testrecipe, result.output)
self.assertIn(srcdir, result.output)
- # Check recipe
+ # Check recipedevtool add
recipefile = get_bb_var('FILE', testrecipe)
self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
checkvars = {}
@@ -444,15 +657,53 @@ class DevtoolTests(DevtoolBase):
checkvars['SRC_URI'] = url.replace(testver, '${PV}')
self._test_recipe_contents(recipefile, checkvars, [])
- @OETestID(1164)
+ def test_devtool_add_npm(self):
+ collections = get_bb_var('BBFILE_COLLECTIONS').split()
+ if "openembedded-layer" not in collections:
+ self.skipTest("Test needs meta-oe for nodejs")
+
+ pn = 'savoirfairelinux-node-server-example'
+ pv = '1.0.0'
+ url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=' + pv
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add \'%s\'' % url)
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ self.assertExists(os.path.join(self.workspacedir, 'recipes', pn, '%s_%s.bb' % (pn, pv)), 'Recipe not created')
+ self.assertExists(os.path.join(self.workspacedir, 'recipes', pn, pn, 'npm-shrinkwrap.json'), 'Shrinkwrap not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(pn, result.output)
+ # Clean up anything in the workdir/sysroot/sstate cache (have to do this *after* devtool add since the recipe only exists then)
+ bitbake('%s -c cleansstate' % pn)
+ # Test devtool build
+ result = runCmd('devtool build %s' % pn)
+
+ def test_devtool_add_python_egg_requires(self):
+ # Fetch source
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ testver = '0.14.0'
+ url = 'https://files.pythonhosted.org/packages/e9/9e/25d59f5043cf763833b2581c8027fa92342c4cf8ee523b498ecdf460c16d/uvicorn-%s.tar.gz' % testver
+ testrecipe = 'python3-uvicorn'
+ srcdir = os.path.join(tempdir, testrecipe)
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+
+class DevtoolModifyTests(DevtoolBase):
+
def test_devtool_modify(self):
import oe.path
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
self.track_for_cleanup(self.workspacedir)
- self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.add_command_to_tearDown('bitbake -c clean mdadm')
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
result = runCmd('devtool modify mdadm -x %s' % tempdir)
self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
@@ -502,7 +753,6 @@ class DevtoolTests(DevtoolBase):
result = runCmd('devtool status')
self.assertNotIn('mdadm', result.output)
- @OETestID(1620)
def test_devtool_buildclean(self):
def assertFile(path, *paths):
f = os.path.join(path, *paths)
@@ -521,8 +771,8 @@ class DevtoolTests(DevtoolBase):
self.track_for_cleanup(tempdir_m4)
self.track_for_cleanup(builddir_m4)
self.track_for_cleanup(self.workspacedir)
- self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.add_command_to_tearDown('bitbake -c clean mdadm m4')
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.write_recipeinc('m4', 'EXTERNALSRC_BUILD = "%s"\ndo_clean() {\n\t:\n}\n' % builddir_m4)
try:
runCmd('devtool modify mdadm -x %s' % tempdir_mdadm)
@@ -538,6 +788,7 @@ class DevtoolTests(DevtoolBase):
bitbake('mdadm m4 -c buildclean')
assertNoFile(tempdir_mdadm, 'mdadm')
assertNoFile(builddir_m4, 'src/m4')
+ runCmd('echo "#Trigger rebuild" >> %s/Makefile' % tempdir_mdadm)
bitbake('mdadm m4 -c compile')
assertFile(tempdir_mdadm, 'mdadm')
assertFile(builddir_m4, 'src/m4')
@@ -549,7 +800,6 @@ class DevtoolTests(DevtoolBase):
finally:
self.delete_recipeinc('m4')
- @OETestID(1166)
def test_devtool_modify_invalid(self):
# Try modifying some recipes
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -557,7 +807,7 @@ class DevtoolTests(DevtoolBase):
self.track_for_cleanup(self.workspacedir)
self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
- testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split()
+ testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk'.split()
# Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose
result = runCmd('bitbake-layers show-recipes gcc-source*')
for line in result.output.splitlines():
@@ -578,7 +828,6 @@ class DevtoolTests(DevtoolBase):
self.assertNotEqual(result.status, 0, 'devtool modify on %s should have failed. devtool output: %s' % (testrecipe, result.output))
self.assertIn('ERROR: ', result.output, 'devtool modify on %s should have given an ERROR' % testrecipe)
- @OETestID(1365)
def test_devtool_modify_native(self):
# Check preconditions
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -590,7 +839,7 @@ class DevtoolTests(DevtoolBase):
bbclassextended = False
inheritnative = False
- testrecipes = 'mtools-native apt-native desktop-file-utils-native'.split()
+ testrecipes = 'cdrtools-native mtools-native apt-native desktop-file-utils-native'.split()
for testrecipe in testrecipes:
checkextend = 'native' in (get_bb_var('BBCLASSEXTEND', testrecipe) or '').split()
if not bbclassextended:
@@ -607,11 +856,48 @@ class DevtoolTests(DevtoolBase):
self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
+ def test_devtool_modify_localfiles_only(self):
+ # Check preconditions
+ testrecipe = 'base-files'
+ src_uri = (get_bb_var('SRC_URI', testrecipe) or '').split()
+ foundlocalonly = False
+ correct_symlink = False
+ for item in src_uri:
+ if item.startswith('file://'):
+ if '.patch' not in item:
+ foundlocalonly = True
+ else:
+ foundlocalonly = False
+ break
+ self.assertTrue(foundlocalonly, 'This test expects the %s recipe to fetch local files only and it seems that it no longer does' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ srcfile = os.path.join(tempdir, 'oe-local-files/share/dot.bashrc')
+ srclink = os.path.join(tempdir, 'share/dot.bashrc')
+ self.assertExists(srcfile, 'Extracted source could not be found')
+ if os.path.islink(srclink) and os.path.exists(srclink) and os.path.samefile(srcfile, srclink):
+ correct_symlink = True
+ self.assertTrue(correct_symlink, 'Source symlink to oe-local-files is broken')
+
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Try building
+ bitbake(testrecipe)
- @OETestID(1165)
def test_devtool_modify_git(self):
# Check preconditions
- testrecipe = 'mkelfimage'
+ testrecipe = 'psplash'
src_uri = get_bb_var('SRC_URI', testrecipe)
self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
# Clean up anything in the workdir/sysroot/sstate cache
@@ -620,12 +906,12 @@ class DevtoolTests(DevtoolBase):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
self.track_for_cleanup(self.workspacedir)
- self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
- self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
- matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'mkelfimage_*.bbappend'))
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
self.assertTrue(matches, 'bbappend not created')
# Test devtool status
result = runCmd('devtool status')
@@ -636,7 +922,122 @@ class DevtoolTests(DevtoolBase):
# Try building
bitbake(testrecipe)
- @OETestID(1167)
+ def test_devtool_modify_git_no_extract(self):
+ # Check preconditions
+ testrecipe = 'psplash'
+ src_uri = get_bb_var('SRC_URI', testrecipe)
+ self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('git clone https://git.yoctoproject.org/psplash %s && devtool modify -n %s %s' % (tempdir, testrecipe, tempdir))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+
+ def test_devtool_modify_git_crates_subpath(self):
+ # This tests two things in devtool context:
+ # - that we support local git dependencies for cargo based recipe
+ # - that we support patches in SRC_URI when git url contains subpath parameter
+
+ # Check preconditions:
+ # recipe inherits cargo
+ # git:// uri with a subpath as the main package
+ # some crate:// in SRC_URI
+ # others git:// in SRC_URI
+ # cointains a patch
+ testrecipe = 'hello-rs'
+ bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe)
+ recipefile = bb_vars['FILE']
+ workdir = bb_vars['WORKDIR']
+ cargo_home = bb_vars['CARGO_HOME']
+ src_uri = bb_vars['SRC_URI'].split()
+ self.assertTrue(src_uri[0].startswith('git://'),
+ 'This test expects the %s recipe to have a git repo has its main uri' % testrecipe)
+ self.assertIn(';subpath=', src_uri[0],
+ 'This test expects the %s recipe to have a git uri with subpath' % testrecipe)
+ self.assertTrue(any([uri.startswith('crate://') for uri in src_uri]),
+ 'This test expects the %s recipe to have some crates in its src uris' % testrecipe)
+ self.assertGreaterEqual(sum(map(lambda x:x.startswith('git://'), src_uri)), 2,
+ 'This test expects the %s recipe to have several git:// uris' % testrecipe)
+ self.assertTrue(any([uri.startswith('file://') and '.patch' in uri for uri in src_uri]),
+ 'This test expects the %s recipe to have a patch in its src uris' % testrecipe)
+
+ self._test_recipe_contents(recipefile, {}, ['ptest-cargo'])
+
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'Cargo.toml'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Check that the patch is correctly applied.
+ # The last commit message in the tree must contain the following note:
+ # Notes (devtool):
+ # original patch: <patchname>
+ # ..
+ patchname = None
+ for uri in src_uri:
+ if uri.startswith('file://') and '.patch' in uri:
+ patchname = uri.replace("file://", "").partition('.patch')[0] + '.patch'
+ self.assertIsNotNone(patchname)
+ result = runCmd('git -C %s log -1' % tempdir)
+ self.assertIn("Notes (devtool):\n original patch: %s" % patchname, result.output)
+
+ # Configure the recipe to check that the git dependencies are correctly patched in cargo config
+ bitbake('-c configure %s' % testrecipe)
+
+ cargo_config_path = os.path.join(cargo_home, 'config')
+ with open(cargo_config_path, "r") as f:
+ cargo_config_contents = [line.strip('\n') for line in f.readlines()]
+
+ # Get back git dependencies of the recipe (ignoring the main one)
+ # and check that they are all correctly patched to be fetched locally
+ git_deps = [uri for uri in src_uri if uri.startswith("git://")][1:]
+ for git_dep in git_deps:
+ raw_url, _, raw_parms = git_dep.partition(";")
+ parms = {}
+ for parm in raw_parms.split(";"):
+ name_parm, _, value_parm = parm.partition('=')
+ parms[name_parm]=value_parm
+ self.assertIn('protocol', parms, 'git dependencies uri should contain the "protocol" parameter')
+ self.assertIn('name', parms, 'git dependencies uri should contain the "name" parameter')
+ self.assertIn('destsuffix', parms, 'git dependencies uri should contain the "destsuffix" parameter')
+ self.assertIn('type', parms, 'git dependencies uri should contain the "type" parameter')
+ self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
+ raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
+ patch_line = '[patch."%s"]' % raw_url
+ path_patched = os.path.join(workdir, parms['destsuffix'])
+ path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
+ # Would have been better to use tomllib to read this file :/
+ self.assertIn(patch_line, cargo_config_contents)
+ self.assertIn(path_override_line, cargo_config_contents)
+
+ # Try to package the recipe
+ bitbake('-c package_qa %s' % testrecipe)
+
def test_devtool_modify_localfiles(self):
# Check preconditions
testrecipe = 'lighttpd'
@@ -653,8 +1054,8 @@ class DevtoolTests(DevtoolBase):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
self.track_for_cleanup(self.workspacedir)
- self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
self.assertExists(os.path.join(tempdir, 'configure.ac'), 'Extracted source could not be found')
self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
@@ -667,7 +1068,6 @@ class DevtoolTests(DevtoolBase):
# Try building
bitbake(testrecipe)
- @OETestID(1378)
def test_devtool_modify_virtual(self):
# Try modifying a virtual recipe
virtrecipe = 'virtual/make'
@@ -689,8 +1089,59 @@ class DevtoolTests(DevtoolBase):
self._check_src_repo(tempdir)
# This is probably sufficient
+ def test_devtool_modify_overrides(self):
+ # Try modifying a recipe with patches in overrides
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify devtool-patch-overrides -x %s' % (tempdir))
+
+ self._check_src_repo(tempdir)
+ source = os.path.join(tempdir, "source")
+ def check(branch, expected):
+ runCmd('git -C %s checkout %s' % (tempdir, branch))
+ with open(source, "rt") as f:
+ content = f.read()
+ self.assertEqual(content, expected)
+ if self.td["MACHINE"] == "qemux86":
+ check('devtool', 'This is a test for qemux86\n')
+ elif self.td["MACHINE"] == "qemuarm":
+ check('devtool', 'This is a test for qemuarm\n')
+ else:
+ check('devtool', 'This is a test for something\n')
+ check('devtool-no-overrides', 'This is a test for something\n')
+ check('devtool-override-qemuarm', 'This is a test for qemuarm\n')
+ check('devtool-override-qemux86', 'This is a test for qemux86\n')
+
+ def test_devtool_modify_multiple_sources(self):
+ # This test check that recipes fetching several sources can be used with devtool modify/build
+ # Check preconditions
+ testrecipe = 'bzip2'
+ src_uri = get_bb_var('SRC_URI', testrecipe)
+ src1 = 'https://' in src_uri
+ src2 = 'git://' in src_uri
+ self.assertTrue(src1 and src2, 'This test expects the %s recipe to fetch both a git source and a tarball and it seems that it no longer does' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertEqual(result.status, 0, "Could not modify recipe %s. Output: %s" % (testrecipe, result.output))
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Try building
+ result = bitbake(testrecipe)
+ self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
+
+class DevtoolUpdateTests(DevtoolBase):
- @OETestID(1169)
def test_devtool_update_recipe(self):
# Check preconditions
testrecipe = 'minicom'
@@ -718,15 +1169,15 @@ class DevtoolTests(DevtoolBase):
result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
+ result = runCmd('git add minicom', cwd=os.path.dirname(recipefile))
expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
- ('??', '.*/0001-Change-the-README.patch$'),
- ('??', '.*/0002-Add-a-new-file.patch$')]
+ ('A ', '.*/0001-Change-the-README.patch$'),
+ ('A ', '.*/0002-Add-a-new-file.patch$')]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
- @OETestID(1172)
def test_devtool_update_recipe_git(self):
# Check preconditions
- testrecipe = 'mtd-utils'
+ testrecipe = 'mtd-utils-selftest'
bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
recipefile = bb_vars['FILE']
src_uri = bb_vars['SRC_URI']
@@ -760,28 +1211,12 @@ class DevtoolTests(DevtoolBase):
self._check_repo_status(os.path.dirname(recipefile), expected_status)
result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
- addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"']
+ addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"']
srcurilines = src_uri.split()
srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
srcurilines.append('"')
removelines = ['SRCREV = ".*"'] + srcurilines
- for line in result.output.splitlines():
- if line.startswith('+++') or line.startswith('---'):
- continue
- elif line.startswith('+'):
- matched = False
- for item in addlines:
- if re.match(item, line[1:].strip()):
- matched = True
- break
- self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
- elif line.startswith('-'):
- matched = False
- for item in removelines:
- if re.match(item, line[1:].strip()):
- matched = True
- break
- self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+ self._check_diff(result.output, addlines, removelines)
# Now try with auto mode
runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
@@ -793,7 +1228,6 @@ class DevtoolTests(DevtoolBase):
('??', '%s/0002-Add-a-new-file.patch' % relpatchpath)]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
- @OETestID(1170)
def test_devtool_update_recipe_append(self):
# Check preconditions
testrecipe = 'mdadm'
@@ -832,7 +1266,7 @@ class DevtoolTests(DevtoolBase):
self.assertExists(patchfile, 'Patch file not created')
# Check bbappend contents
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://0001-Add-our-custom-version.patch"\n',
'\n']
@@ -847,7 +1281,7 @@ class DevtoolTests(DevtoolBase):
result = runCmd('git reset HEAD^', cwd=tempsrcdir)
result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
self.assertNotExists(patchfile, 'Patch file not deleted')
- expectedlines2 = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
with open(bbappendfile, 'r') as f:
self.assertEqual(expectedlines2, f.readlines())
@@ -862,13 +1296,13 @@ class DevtoolTests(DevtoolBase):
self.assertEqual(expectedlines, f.readlines())
# Deleting isn't expected to work under these circumstances
- @OETestID(1171)
def test_devtool_update_recipe_append_git(self):
# Check preconditions
- testrecipe = 'mtd-utils'
- bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ testrecipe = 'mtd-utils-selftest'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe)
recipefile = bb_vars['FILE']
src_uri = bb_vars['SRC_URI']
+ corenames = bb_vars['LAYERSERIES_CORENAMES']
self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
for entry in src_uri.split():
if entry.startswith('git://'):
@@ -899,6 +1333,7 @@ class DevtoolTests(DevtoolBase):
f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
+ f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames)
self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
# Create the bbappend
@@ -952,7 +1387,6 @@ class DevtoolTests(DevtoolBase):
self.assertEqual(expectedlines, set(f.readlines()))
# Deleting isn't expected to work under these circumstances
- @OETestID(1370)
def test_devtool_update_recipe_local_files(self):
"""Check that local source files are copied over instead of patched"""
testrecipe = 'makedevs'
@@ -975,20 +1409,39 @@ class DevtoolTests(DevtoolBase):
runCmd('echo "Bar" > new-file', cwd=tempdir)
runCmd('git add new-file', cwd=tempdir)
runCmd('git commit -m "Add new file"', cwd=tempdir)
- self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' %
- os.path.dirname(recipefile))
runCmd('devtool update-recipe %s' % testrecipe)
expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
(' M', '.*/makedevs/makedevs.c$'),
('??', '.*/makedevs/new-local$'),
('??', '.*/makedevs/0001-Add-new-file.patch$')]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ # Now try to update recipe in another layer, so first, clean it
+ runCmd('cd %s; git clean -fd .; git checkout .' % os.path.dirname(recipefile))
+ # Create a temporary layer and add it to bblayers.conf
+ self._create_temp_layer(templayerdir, True, 'templayer')
+ # Update recipe in templayer
+ result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+ self.assertNotIn('WARNING:', result.output)
+ # Check recipe is still clean
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ splitpath = os.path.dirname(recipefile).split(os.sep)
+ appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
+ bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
+ patchfile = os.path.join(appenddir, testrecipe, '0001-Add-new-file.patch')
+ new_local_file = os.path.join(appenddir, testrecipe, 'new_local')
+ local_file = os.path.join(appenddir, testrecipe, 'makedevs.c')
+ self.assertExists(patchfile, 'Patch file 0001-Add-new-file.patch not created')
+ self.assertExists(local_file, 'File makedevs.c not created')
+ self.assertExists(patchfile, 'File new_local not created')
- @OETestID(1371)
def test_devtool_update_recipe_local_files_2(self):
"""Check local source files support when oe-local-files is in Git"""
- testrecipe = 'lzo'
+ testrecipe = 'devtool-test-local'
recipefile = get_bb_var('FILE', testrecipe)
+ recipedir = os.path.dirname(recipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % testrecipe)
# Setup srctree for modifying the recipe
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
@@ -1002,9 +1455,9 @@ class DevtoolTests(DevtoolBase):
runCmd('git add oe-local-files', cwd=tempdir)
runCmd('git commit -m "Add local sources"', cwd=tempdir)
# Edit / commit local sources
- runCmd('echo "# Foobar" >> oe-local-files/acinclude.m4', cwd=tempdir)
+ runCmd('echo "# Foobar" >> oe-local-files/file1', cwd=tempdir)
runCmd('git commit -am "Edit existing file"', cwd=tempdir)
- runCmd('git rm oe-local-files/run-ptest', cwd=tempdir)
+ runCmd('git rm oe-local-files/file2', cwd=tempdir)
runCmd('git commit -m"Remove file"', cwd=tempdir)
runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir)
runCmd('git add oe-local-files/new-local', cwd=tempdir)
@@ -1016,16 +1469,68 @@ class DevtoolTests(DevtoolBase):
os.path.dirname(recipefile))
# Checkout unmodified file to working copy -> devtool should still pick
# the modified version from HEAD
- runCmd('git checkout HEAD^ -- oe-local-files/acinclude.m4', cwd=tempdir)
+ runCmd('git checkout HEAD^ -- oe-local-files/file1', cwd=tempdir)
runCmd('devtool update-recipe %s' % testrecipe)
expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
- (' M', '.*/acinclude.m4$'),
- (' D', '.*/run-ptest$'),
+ (' M', '.*/file1$'),
+ (' D', '.*/file2$'),
('??', '.*/new-local$'),
('??', '.*/0001-Add-new-file.patch$')]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
- @OETestID(1627)
+ def test_devtool_update_recipe_with_gitignore(self):
+ # First, modify the recipe
+ testrecipe = 'devtool-test-ignored'
+ bb_vars = get_bb_vars(['FILE'], testrecipe)
+ recipefile = bb_vars['FILE']
+ patchfile = os.path.join(os.path.dirname(recipefile), testrecipe, testrecipe + '.patch')
+ newpatchfile = os.path.join(os.path.dirname(recipefile), testrecipe, testrecipe + '.patch.expected')
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s' % testrecipe)
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool finish --force-patch-refresh %s meta-selftest' % testrecipe)
+ # Check recipe got changed as expected
+ with open(newpatchfile, 'r') as f:
+ desiredlines = f.readlines()
+ with open(patchfile, 'r') as f:
+ newlines = f.readlines()
+ # Ignore the initial lines, because oe-selftest creates own meta-selftest repo
+ # which changes the metadata subject which is added into the patch, but keep
+ # .patch.expected as it is in case someone runs devtool finish --force-patch-refresh
+ # devtool-test-ignored manually, then it should generate exactly the same .patch file
+ self.assertEqual(desiredlines[5:], newlines[5:])
+
+ def test_devtool_update_recipe_long_filename(self):
+ # First, modify the recipe
+ testrecipe = 'devtool-test-long-filename'
+ bb_vars = get_bb_vars(['FILE'], testrecipe)
+ recipefile = bb_vars['FILE']
+ patchfilename = '0001-I-ll-patch-you-only-if-devtool-lets-me-to-do-it-corr.patch'
+ patchfile = os.path.join(os.path.dirname(recipefile), testrecipe, patchfilename)
+ newpatchfile = os.path.join(os.path.dirname(recipefile), testrecipe, patchfilename + '.expected')
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s' % testrecipe)
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool finish --force-patch-refresh %s meta-selftest' % testrecipe)
+ # Check recipe got changed as expected
+ with open(newpatchfile, 'r') as f:
+ desiredlines = f.readlines()
+ with open(patchfile, 'r') as f:
+ newlines = f.readlines()
+ # Ignore the initial lines, because oe-selftest creates own meta-selftest repo
+ # which changes the metadata subject which is added into the patch, but keep
+ # .patch.expected as it is in case someone runs devtool finish --force-patch-refresh
+ # devtool-test-ignored manually, then it should generate exactly the same .patch file
+ self.assertEqual(desiredlines[5:], newlines[5:])
+
def test_devtool_update_recipe_local_files_3(self):
# First, modify the recipe
testrecipe = 'devtool-test-localonly'
@@ -1045,7 +1550,6 @@ class DevtoolTests(DevtoolBase):
expected_status = [(' M', '.*/%s/file2$' % testrecipe)]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
- @OETestID(1629)
def test_devtool_update_recipe_local_patch_gz(self):
# First, modify the recipe
testrecipe = 'devtool-test-patch-gz'
@@ -1063,7 +1567,7 @@ class DevtoolTests(DevtoolBase):
# Modify one file
srctree = os.path.join(self.workspacedir, 'sources', testrecipe)
runCmd('echo "Another line" >> README', cwd=srctree)
- runCmd('git commit -a --amend --no-edit', cwd=srctree)
+ runCmd('git commit -a --amend --no-edit --no-verify', cwd=srctree)
self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)]
@@ -1073,7 +1577,6 @@ class DevtoolTests(DevtoolBase):
if 'gzip compressed data' not in result.output:
self.fail('New patch file is not gzipped - file reports:\n%s' % result.output)
- @OETestID(1628)
def test_devtool_update_recipe_local_files_subdir(self):
# Try devtool update-recipe on a recipe that has a file with subdir= set in
# SRC_URI such that it overwrites a file that was in an archive that
@@ -1100,7 +1603,123 @@ class DevtoolTests(DevtoolBase):
expected_status = []
self._check_repo_status(os.path.dirname(recipefile), expected_status)
- @OETestID(1163)
+ def test_devtool_finish_modify_git_subdir(self):
+ # Check preconditions
+ testrecipe = 'dos2unix'
+ self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n')
+ bb_vars = get_bb_vars(['SRC_URI', 'S', 'WORKDIR', 'FILE'], testrecipe)
+ self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ workdir_git = '%s/git/' % bb_vars['WORKDIR']
+ if not bb_vars['S'].startswith(workdir_git):
+ self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe)
+ subdir = bb_vars['S'].split(workdir_git, 1)[1]
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ testsrcfile = os.path.join(tempdir, subdir, 'dos2unix.c')
+ self.assertExists(testsrcfile, 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ self.assertNotExists(os.path.join(tempdir, subdir, '.git'), 'Subdirectory has been initialised as a git repo')
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Modify file
+ runCmd("sed -i '1s:^:/* Add a comment */\\n:' %s" % testsrcfile)
+ result = runCmd('git commit -a -m "Add a comment"', cwd=tempdir)
+ # Now try updating original recipe
+ recipefile = bb_vars['FILE']
+ recipedir = os.path.dirname(recipefile)
+ self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ ('??', '.*/%s/%s/$' % (testrecipe, testrecipe))]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+ removelines = ['SRC_URI = "git://.*"']
+ addlines = [
+ 'SRC_URI = "git://.* \\\\',
+ 'file://0001-Add-a-comment.patch;patchdir=.. \\\\',
+ '"'
+ ]
+ self._check_diff(result.output, addlines, removelines)
+ # Put things back so we can run devtool finish on a different layer
+ runCmd('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
+ # Run devtool finish
+ res = re.search('recipes-.*', recipedir)
+ self.assertTrue(res, 'Unable to find recipe subdirectory')
+ recipesubdir = res[0]
+ self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, recipesubdir))
+ result = runCmd('devtool finish %s meta-selftest' % testrecipe)
+ # Check bbappend file contents
+ appendfn = os.path.join(self.testlayer_path, recipesubdir, '%s_%%.bbappend' % testrecipe)
+ with open(appendfn, 'r') as f:
+ appendlines = f.readlines()
+ expected_appendlines = [
+ 'FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://0001-Add-a-comment.patch;patchdir=.."\n',
+ '\n'
+ ]
+ self.assertEqual(appendlines, expected_appendlines)
+ self.assertExists(os.path.join(os.path.dirname(appendfn), testrecipe, '0001-Add-a-comment.patch'))
+ # Try building
+ bitbake('%s -c patch' % testrecipe)
+
+ def test_devtool_git_submodules(self):
+ # This tests if we can add a patch in a git submodule and extract it properly using devtool finish
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ recipe = 'vulkan-samples'
+ src_uri = get_bb_var('SRC_URI', recipe)
+ self.assertIn('gitsm://', src_uri, 'This test expects the %s recipe to be a git recipe with submodules' % recipe)
+ oldrecipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(oldrecipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ self.assertIn('/meta/', recipedir)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s %s' % (recipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(recipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Modify a source file in a submodule, (grab the first one)
+ result = runCmd('git submodule --quiet foreach \'echo $sm_path\'', cwd=tempdir)
+ submodule = result.output.splitlines()[0]
+ submodule_path = os.path.join(tempdir, submodule)
+ runCmd('echo "#This is a first comment" >> testfile', cwd=submodule_path)
+ result = runCmd('git status --porcelain . ', cwd=submodule_path)
+ self.assertIn("testfile", result.output)
+ runCmd('git add testfile; git commit -m "Adding a new file"', cwd=submodule_path)
+
+ # Try finish to the original layer
+ self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
+ runCmd('devtool finish -f %s meta' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)),
+ ('??', '.*/.*-Adding-a-new-file.patch$')]
+ self._check_repo_status(recipedir, expected_status)
+ # Make sure the patch is added to the recipe with the correct "patchdir" option
+ result = runCmd('git diff .', cwd=recipedir)
+ addlines = [
+ 'file://0001-Adding-a-new-file.patch;patchdir=%s \\\\' % submodule
+ ]
+ self._check_diff(result.output, addlines, [])
+
+class DevtoolExtractTests(DevtoolBase):
+
def test_devtool_extract(self):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
# Try devtool extract
@@ -1111,7 +1730,6 @@ class DevtoolTests(DevtoolBase):
self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
self._check_src_repo(tempdir)
- @OETestID(1379)
def test_devtool_extract_virtual(self):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
# Try devtool extract
@@ -1122,7 +1740,6 @@ class DevtoolTests(DevtoolBase):
self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
self._check_src_repo(tempdir)
- @OETestID(1168)
def test_devtool_reset_all(self):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
@@ -1149,30 +1766,9 @@ class DevtoolTests(DevtoolBase):
matches2 = glob.glob(stampprefix2 + '*')
self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2)
- @OETestID(1272)
+ @OETestTag("runqemu")
def test_devtool_deploy_target(self):
- # NOTE: Whilst this test would seemingly be better placed as a runtime test,
- # unfortunately the runtime tests run under bitbake and you can't run
- # devtool within bitbake (since devtool needs to run bitbake itself).
- # Additionally we are testing build-time functionality as well, so
- # really this has to be done as an oe-selftest test.
- #
- # Check preconditions
- machine = get_bb_var('MACHINE')
- if not machine.startswith('qemu'):
- self.skipTest('This test only works with qemu machines')
- if not os.path.exists('/etc/runqemu-nosudo'):
- self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
- result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
- if result.status != 0:
- result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
- if result.status != 0:
- self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
- for line in result.output.splitlines():
- if line.startswith('tap'):
- break
- else:
- self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ self._check_runqemu_prerequisites()
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
# Definitions
testrecipe = 'mdadm'
@@ -1190,8 +1786,8 @@ class DevtoolTests(DevtoolBase):
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
self.track_for_cleanup(tempdir)
self.track_for_cleanup(self.workspacedir)
- self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
# Test that deploy-target at this point fails (properly)
result = runCmd('devtool deploy-target -n %s root@localhost' % testrecipe, ignore_status=True)
@@ -1214,7 +1810,7 @@ class DevtoolTests(DevtoolBase):
installdir = bb_vars['D']
fakerootenv = bb_vars['FAKEROOTENV']
fakerootcmd = bb_vars['FAKEROOTCMD']
- result = runCmd('%s %s find . -type f -exec ls -l {} \;' % (fakerootenv, fakerootcmd), cwd=installdir)
+ result = runCmd('%s %s find . -type f -exec ls -l {} \\;' % (fakerootenv, fakerootcmd), cwd=installdir)
filelist1 = self._process_ls_output(result.output)
# Now look on the target
@@ -1235,15 +1831,14 @@ class DevtoolTests(DevtoolBase):
result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True)
self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have')
- @OETestID(1366)
def test_devtool_build_image(self):
"""Test devtool build-image plugin"""
# Check preconditions
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
image = 'core-image-minimal'
self.track_for_cleanup(self.workspacedir)
- self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.add_command_to_tearDown('bitbake -c clean %s' % image)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
bitbake('%s -c clean' % image)
# Add target and native recipes to workspace
recipes = ['mdadm', 'parted-native']
@@ -1269,7 +1864,16 @@ class DevtoolTests(DevtoolBase):
if reqpkgs:
self.fail('The following packages were not present in the image as expected: %s' % ', '.join(reqpkgs))
- @OETestID(1367)
+class DevtoolUpgradeTests(DevtoolBase):
+
+ def setUp(self):
+ super().setUp()
+ try:
+ runCmd("git config --global user.name")
+ runCmd("git config --global user.email")
+ except:
+ self.skip("Git user.name and user.email must be set")
+
def test_devtool_upgrade(self):
# Check preconditions
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1314,7 +1918,6 @@ class DevtoolTests(DevtoolBase):
self.assertNotIn(recipe, result.output)
self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
- @OETestID(1433)
def test_devtool_upgrade_git(self):
# Check preconditions
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1351,7 +1954,54 @@ class DevtoolTests(DevtoolBase):
self.assertNotIn(recipe, result.output)
self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
- @OETestID(1352)
+ def test_devtool_upgrade_drop_md5sum(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # For the moment, we are using a real recipe.
+ recipe = 'devtool-upgrade-test3'
+ version = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+ # we are downgrading instead of upgrading.
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+
+ def test_devtool_upgrade_all_checksums(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # For the moment, we are using a real recipe.
+ recipe = 'devtool-upgrade-test4'
+ version = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+ # we are downgrading instead of upgrading.
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+
def test_devtool_layer_plugins(self):
"""Test that devtool can use plugins from other layers.
@@ -1370,14 +2020,29 @@ class DevtoolTests(DevtoolBase):
for p in paths:
dstdir = os.path.join(dstdir, p)
if not os.path.exists(dstdir):
- os.makedirs(dstdir)
- self.track_for_cleanup(dstdir)
+ try:
+ os.makedirs(dstdir)
+ except PermissionError:
+ return False
+ except OSError as e:
+ if e.errno == errno.EROFS:
+ return False
+ else:
+ raise e
+ if p == "lib":
+ # Can race with other tests
+ self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
+ else:
+ self.track_for_cleanup(dstdir)
dstfile = os.path.join(dstdir, os.path.basename(srcfile))
if srcfile != dstfile:
- shutil.copy(srcfile, dstfile)
+ try:
+ shutil.copy(srcfile, dstfile)
+ except PermissionError:
+ return False
self.track_for_cleanup(dstfile)
+ return True
- @OETestID(1625)
def test_devtool_load_plugin(self):
"""Test that devtool loads only the first found plugin in BBPATH."""
@@ -1394,15 +2059,17 @@ class DevtoolTests(DevtoolBase):
plugincontent = fh.readlines()
try:
self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
- for path in searchpath:
- self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
+ searchpath = [
+ path for path in searchpath
+ if self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
+ ]
result = runCmd("devtool --quiet count")
self.assertEqual(result.output, '1')
result = runCmd("devtool --quiet multiloaded")
self.assertEqual(result.output, "no")
for path in searchpath:
result = runCmd("devtool --quiet bbdir")
- self.assertEqual(result.output, path)
+ self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py'))
finally:
with open(srcfile, 'w') as fh:
@@ -1442,12 +2109,13 @@ class DevtoolTests(DevtoolBase):
recipedir = os.path.dirname(oldrecipefile)
olddir = os.path.join(recipedir, recipe + '-' + oldversion)
patchfn = '0001-Add-a-note-line-to-the-quick-reference.patch'
+ backportedpatchfn = 'backported.patch'
self.assertExists(os.path.join(olddir, patchfn), 'Original patch file does not exist')
- return recipe, oldrecipefile, recipedir, olddir, newversion, patchfn
+ self.assertExists(os.path.join(olddir, backportedpatchfn), 'Backported patch file does not exist')
+ return recipe, oldrecipefile, recipedir, olddir, newversion, patchfn, backportedpatchfn
- @OETestID(1623)
def test_devtool_finish_upgrade_origlayer(self):
- recipe, oldrecipefile, recipedir, olddir, newversion, patchfn = self._setup_test_devtool_finish_upgrade()
+ recipe, oldrecipefile, recipedir, olddir, newversion, patchfn, backportedpatchfn = self._setup_test_devtool_finish_upgrade()
# Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
self.assertIn('/meta-selftest/', recipedir)
# Try finish to the original layer
@@ -1458,15 +2126,23 @@ class DevtoolTests(DevtoolBase):
self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
self.assertNotExists(oldrecipefile, 'Old recipe file should have been deleted but wasn\'t')
self.assertNotExists(os.path.join(olddir, patchfn), 'Old patch file should have been deleted but wasn\'t')
+ self.assertNotExists(os.path.join(olddir, backportedpatchfn), 'Old backported patch file should have been deleted but wasn\'t')
newrecipefile = os.path.join(recipedir, '%s_%s.bb' % (recipe, newversion))
newdir = os.path.join(recipedir, recipe + '-' + newversion)
self.assertExists(newrecipefile, 'New recipe file should have been copied into existing layer but wasn\'t')
self.assertExists(os.path.join(newdir, patchfn), 'Patch file should have been copied into new directory but wasn\'t')
+ self.assertNotExists(os.path.join(newdir, backportedpatchfn), 'Backported patch file should not have been copied into new directory but was')
self.assertExists(os.path.join(newdir, '0002-Add-a-comment-to-the-code.patch'), 'New patch file should have been created but wasn\'t')
+ with open(newrecipefile, 'r') as f:
+ newcontent = f.read()
+ self.assertNotIn(backportedpatchfn, newcontent, "Backported patch should have been removed from the recipe but wasn't")
+ self.assertIn(patchfn, newcontent, "Old patch should have not been removed from the recipe but was")
+ self.assertIn("0002-Add-a-comment-to-the-code.patch", newcontent, "New patch should have been added to the recipe but wasn't")
+ self.assertIn("http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz", newcontent, "New recipe no longer has upstream source in SRC_URI")
+
- @OETestID(1624)
def test_devtool_finish_upgrade_otherlayer(self):
- recipe, oldrecipefile, recipedir, olddir, newversion, patchfn = self._setup_test_devtool_finish_upgrade()
+ recipe, oldrecipefile, recipedir, olddir, newversion, patchfn, backportedpatchfn = self._setup_test_devtool_finish_upgrade()
# Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
self.assertIn('/meta-selftest/', recipedir)
# Try finish to a different layer - should create a bbappend
@@ -1482,10 +2158,18 @@ class DevtoolTests(DevtoolBase):
self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
self.assertExists(oldrecipefile, 'Old recipe file should not have been deleted')
self.assertExists(os.path.join(olddir, patchfn), 'Old patch file should not have been deleted')
+ self.assertExists(os.path.join(olddir, backportedpatchfn), 'Old backported patch file should not have been deleted')
newdir = os.path.join(newrecipedir, recipe + '-' + newversion)
self.assertExists(newrecipefile, 'New recipe file should have been copied into existing layer but wasn\'t')
self.assertExists(os.path.join(newdir, patchfn), 'Patch file should have been copied into new directory but wasn\'t')
+ self.assertNotExists(os.path.join(newdir, backportedpatchfn), 'Backported patch file should not have been copied into new directory but was')
self.assertExists(os.path.join(newdir, '0002-Add-a-comment-to-the-code.patch'), 'New patch file should have been created but wasn\'t')
+ with open(newrecipefile, 'r') as f:
+ newcontent = f.read()
+ self.assertNotIn(backportedpatchfn, newcontent, "Backported patch should have been removed from the recipe but wasn't")
+ self.assertIn(patchfn, newcontent, "Old patch should have not been removed from the recipe but was")
+ self.assertIn("0002-Add-a-comment-to-the-code.patch", newcontent, "New patch should have been added to the recipe but wasn't")
+ self.assertIn("http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz", newcontent, "New recipe no longer has upstream source in SRC_URI")
def _setup_test_devtool_finish_modify(self):
# Check preconditions
@@ -1520,7 +2204,6 @@ class DevtoolTests(DevtoolBase):
self.fail('Unable to find recipe files directory for %s' % recipe)
return recipe, oldrecipefile, recipedir, filesdir
- @OETestID(1621)
def test_devtool_finish_modify_origlayer(self):
recipe, oldrecipefile, recipedir, filesdir = self._setup_test_devtool_finish_modify()
# Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
@@ -1535,7 +2218,6 @@ class DevtoolTests(DevtoolBase):
('??', '.*/.*-Add-a-comment-to-the-code.patch$')]
self._check_repo_status(recipedir, expected_status)
- @OETestID(1622)
def test_devtool_finish_modify_otherlayer(self):
recipe, oldrecipefile, recipedir, filesdir = self._setup_test_devtool_finish_modify()
# Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
@@ -1568,7 +2250,52 @@ class DevtoolTests(DevtoolBase):
if files:
self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files))
- @OETestID(1626)
+ def test_devtool_finish_update_patch(self):
+ # This test uses a modified version of the sysdig recipe from meta-oe.
+ # - The patches have been renamed.
+ # - The dependencies are commented out since the recipe is not being
+ # built.
+ #
+ # The sysdig recipe is interesting in that it fetches two different Git
+ # repositories, and there are patches for both. This leads to that
+ # devtool will create ignore commits as it uses Git submodules to keep
+ # track of the second repository.
+ #
+ # This test will verify that the ignored commits actually are ignored
+ # when a commit in between is modified. It will also verify that the
+ # updated patch keeps its original name.
+
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Try modifying a recipe
+ self.track_for_cleanup(self.workspacedir)
+ recipe = 'sysdig-selftest'
+ recipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(recipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s %s' % (recipe, tempdir))
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (recipedir, recipe, recipe, os.path.basename(recipefile)))
+ self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
+ # Make a change to one of the existing commits
+ result = runCmd('echo "# A comment " >> CMakeLists.txt', cwd=tempdir)
+ result = runCmd('git status --porcelain', cwd=tempdir)
+ self.assertIn('M CMakeLists.txt', result.output)
+ result = runCmd('git commit --fixup HEAD^ CMakeLists.txt', cwd=tempdir)
+ result = runCmd('git show -s --format=%s', cwd=tempdir)
+ self.assertIn('fixup! cmake: Pass PROBE_NAME via CFLAGS', result.output)
+ result = runCmd('GIT_SEQUENCE_EDITOR=true git rebase -i --autosquash devtool-base', cwd=tempdir)
+ result = runCmd('devtool finish %s meta-selftest' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ expected_status = [(' M', '.*/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch$')]
+ self._check_repo_status(recipedir, expected_status)
+
def test_devtool_rename(self):
# Check preconditions
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1605,7 +2332,6 @@ class DevtoolTests(DevtoolBase):
self._test_recipe_contents(newrecipefile, checkvars, [])
# Try again - change just name this time
result = runCmd('devtool reset -n %s' % newrecipename)
- shutil.rmtree(newsrctree)
add_recipe()
newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever))
result = runCmd('devtool rename %s %s' % (recipename, newrecipename))
@@ -1618,7 +2344,6 @@ class DevtoolTests(DevtoolBase):
self._test_recipe_contents(newrecipefile, checkvars, [])
# Try again - change just version this time
result = runCmd('devtool reset -n %s' % newrecipename)
- shutil.rmtree(newsrctree)
add_recipe()
newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever))
result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever))
@@ -1629,7 +2354,6 @@ class DevtoolTests(DevtoolBase):
checkvars['SRC_URI'] = url
self._test_recipe_contents(newrecipefile, checkvars, [])
- @OETestID(1577)
def test_devtool_virtual_kernel_modify(self):
"""
Summary: The purpose of this test case is to verify that
@@ -1651,17 +2375,18 @@ class DevtoolTests(DevtoolBase):
Expected: devtool modify is able to checkout the source of the kernel
and modification to the source and configurations are reflected
when building the kernel.
- """
- kernel_provider = get_bb_var('PREFERRED_PROVIDER_virtual/kernel')
- # Clean up the enviroment
+ """
+ kernel_provider = self.td['PREFERRED_PROVIDER_virtual/kernel']
+
+ # Clean up the environment
bitbake('%s -c clean' % kernel_provider)
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
tempdir_cfg = tempfile.mkdtemp(prefix='config_qa')
self.track_for_cleanup(tempdir)
self.track_for_cleanup(tempdir_cfg)
self.track_for_cleanup(self.workspacedir)
- self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
self.add_command_to_tearDown('bitbake -c clean %s' % kernel_provider)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
#Step 1
#Here is just generated the config file instead of all the kernel to optimize the
#time of executing this test case.
@@ -1679,33 +2404,545 @@ class DevtoolTests(DevtoolBase):
self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
#Step 4.2
configfile = os.path.join(tempdir,'.config')
- diff = runCmd('diff %s %s' % (tmpconfig, configfile))
- self.assertEqual(0,diff.status,'Kernel .config file is not the same using bitbake and devtool')
+ runCmd('diff %s %s' % (tmpconfig, configfile))
+
#Step 4.3
#NOTE: virtual/kernel is mapped to kernel_provider
- result = runCmd('devtool build %s' % kernel_provider)
- self.assertEqual(0,result.status,'Cannot build kernel using `devtool build`')
+ runCmd('devtool build %s' % kernel_provider)
kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux')
self.assertExists(kernelfile, 'Kernel was not build correctly')
#Modify the kernel source
- modfile = os.path.join(tempdir,'arch/x86/boot/header.S')
- modstring = "Use a boot loader. Devtool testing."
- modapplied = runCmd("sed -i 's/Use a boot loader./%s/' %s" % (modstring, modfile))
- self.assertEqual(0,modapplied.status,'Modification to %s on kernel source failed' % modfile)
+ modfile = os.path.join(tempdir, 'init/version.c')
+ # Moved to uts.h in 6.1 onwards
+ modfile2 = os.path.join(tempdir, 'include/linux/uts.h')
+ runCmd("sed -i 's/Linux/LiNuX/g' %s %s" % (modfile, modfile2))
+
#Modify the configuration
- codeconfigfile = os.path.join(tempdir,'.config.new')
+ codeconfigfile = os.path.join(tempdir, '.config.new')
modconfopt = "CONFIG_SG_POOL=n"
- modconf = runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile))
- self.assertEqual(0,modconf.status,'Modification to %s failed' % codeconfigfile)
+ runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile))
+
#Build again kernel with devtool
- rebuild = runCmd('devtool build %s' % kernel_provider)
- self.assertEqual(0,rebuild.status,'Fail to build kernel after modification of source and config')
+ runCmd('devtool build %s' % kernel_provider)
+
#Step 4.4
- bzimagename = 'bzImage-' + get_bb_var('KERNEL_VERSION_NAME', kernel_provider)
- bzimagefile = os.path.join(get_bb_var('D', kernel_provider),'boot', bzimagename)
- checkmodcode = runCmd("grep '%s' %s" % (modstring, bzimagefile))
- self.assertEqual(0,checkmodcode.status,'Modification on kernel source failed')
+ runCmd("grep '%s' %s" % ('LiNuX', kernelfile))
+
#Step 4.5
- checkmodconfg = runCmd("grep %s %s" % (modconfopt, codeconfigfile))
- self.assertEqual(0,checkmodconfg.status,'Modification to configuration file failed')
+ runCmd("grep %s %s" % (modconfopt, codeconfigfile))
+
+
+class DevtoolIdeSdkTests(DevtoolBase):
+ def _write_bb_config(self, recipe_names):
+ """Helper to write the bitbake local.conf file"""
+ conf_lines = [
+ 'IMAGE_CLASSES += "image-combined-dbg"',
+ 'IMAGE_GEN_DEBUGFS = "1"',
+ 'IMAGE_INSTALL:append = " gdbserver %s"' % ' '.join(
+ [r + '-ptest' for r in recipe_names])
+ ]
+ self.write_config("\n".join(conf_lines))
+
+ def _check_workspace(self):
+ """Check if a workspace directory is available and setup the cleanup"""
+ self.assertTrue(not os.path.exists(self.workspacedir),
+ 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ def _workspace_scripts_dir(self, recipe_name):
+ return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts'))
+
+ def _sources_scripts_dir(self, src_dir):
+ return os.path.realpath(os.path.join(src_dir, 'oe-scripts'))
+
+ def _workspace_gdbinit_dir(self, recipe_name):
+ return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts', 'gdbinit'))
+
+ def _sources_gdbinit_dir(self, src_dir):
+ return os.path.realpath(os.path.join(src_dir, 'oe-gdbinit'))
+
+ def _devtool_ide_sdk_recipe(self, recipe_name, build_file, testimage):
+ """Setup a recipe for working with devtool ide-sdk
+
+ Basically devtool modify -x followed by some tests
+ """
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name)
+
+ result = runCmd('devtool modify %s -x %s' % (recipe_name, tempdir))
+ self.assertExists(os.path.join(tempdir, build_file),
+ 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf',
+ 'layer.conf'), 'Workspace directory not created')
+ matches = glob.glob(os.path.join(self.workspacedir,
+ 'appends', recipe_name + '.bbappend'))
+ self.assertTrue(matches, 'bbappend not created %s' % result.output)
+
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(recipe_name, result.output)
+ self.assertIn(tempdir, result.output)
+ self._check_src_repo(tempdir)
+
+ # Usually devtool ide-sdk would initiate the build of the SDK.
+ # But there is a circular dependency with starting Qemu and passing the IP of runqemu to devtool ide-sdk.
+ if testimage:
+ bitbake("%s qemu-native qemu-helper-native" % testimage)
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
+ self.add_command_to_tearDown(
+ 'rm -f %s/%s*' % (deploy_dir_image, testimage))
+
+ return tempdir
+
+ def _get_recipe_ids(self, recipe_name):
+ """IDs needed to write recipe specific config entries into IDE config files"""
+ package_arch = get_bb_var('PACKAGE_ARCH', recipe_name)
+ recipe_id = recipe_name + "-" + package_arch
+ recipe_id_pretty = recipe_name + ": " + package_arch
+ return (recipe_id, recipe_id_pretty)
+
+ def _verify_install_script_code(self, tempdir, recipe_name):
+ """Verify the scripts referred by the tasks.json file are fine.
+
+ This function does not depend on Qemu. Therefore it verifies the scripts
+ exists and the delete step works as expected. But it does not try to
+ deploy to Qemu.
+ """
+ recipe_id, recipe_id_pretty = self._get_recipe_ids(recipe_name)
+ with open(os.path.join(tempdir, '.vscode', 'tasks.json')) as tasks_j:
+ tasks_d = json.load(tasks_j)
+ tasks = tasks_d["tasks"]
+ task_install = next(
+ (task for task in tasks if task["label"] == "install && deploy-target %s" % recipe_id_pretty), None)
+ self.assertIsNot(task_install, None)
+ # execute only the bb_run_do_install script since the deploy would require e.g. Qemu running.
+ i_and_d_script = "install_and_deploy_" + recipe_id
+ i_and_d_script_path = os.path.join(
+ self._workspace_scripts_dir(recipe_name), i_and_d_script)
+ self.assertExists(i_and_d_script_path)
+ del_script = "delete_package_dirs_" + recipe_id
+ del_script_path = os.path.join(
+ self._workspace_scripts_dir(recipe_name), del_script)
+ self.assertExists(del_script_path)
+ runCmd(del_script_path, cwd=tempdir)
+
+ def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe):
+ """Verify deployment and execution in Qemu system work for one recipe.
+
+ This function checks the entire SDK workflow: changing the code, recompiling
+ it and deploying it back to Qemu, and checking that the changes have been
+ incorporated into the provided binaries. It also runs the tests of the recipe.
+ """
+ recipe_id, _ = self._get_recipe_ids(recipe_name)
+ i_and_d_script = "install_and_deploy_" + recipe_id
+ install_deploy_cmd = os.path.join(
+ self._workspace_scripts_dir(recipe_name), i_and_d_script)
+ self.assertExists(install_deploy_cmd,
+ '%s script not found' % install_deploy_cmd)
+ runCmd(install_deploy_cmd)
+
+ MAGIC_STRING_ORIG = "Magic: 123456789"
+ MAGIC_STRING_NEW = "Magic: 987654321"
+ ptest_cmd = "ptest-runner " + recipe_name
+
+ # validate that SSH is working
+ status, _ = qemu.run("uname")
+ self.assertEqual(
+ status, 0, msg="Failed to connect to the SSH server on Qemu")
+
+ # Verify the unmodified example prints the magic string
+ status, output = qemu.run(example_exe)
+ self.assertEqual(status, 0, msg="%s failed: %s" %
+ (example_exe, output))
+ self.assertIn(MAGIC_STRING_ORIG, output)
+
+ # Verify the unmodified ptests work
+ status, output = qemu.run(ptest_cmd)
+ self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
+ self.assertIn("PASS: cpp-example-lib", output)
+
+ # Verify remote debugging works
+ self._gdb_cross_debugging(
+ qemu, recipe_name, example_exe, MAGIC_STRING_ORIG)
+
+ # Replace the Magic String in the code, compile and deploy to Qemu
+ cpp_example_lib_hpp = os.path.join(tempdir, 'cpp-example-lib.hpp')
+ with open(cpp_example_lib_hpp, 'r') as file:
+ cpp_code = file.read()
+ cpp_code = cpp_code.replace(MAGIC_STRING_ORIG, MAGIC_STRING_NEW)
+ with open(cpp_example_lib_hpp, 'w') as file:
+ file.write(cpp_code)
+ runCmd(install_deploy_cmd, cwd=tempdir)
+
+ # Verify the modified example prints the modified magic string
+ status, output = qemu.run(example_exe)
+ self.assertEqual(status, 0, msg="%s failed: %s" %
+ (example_exe, output))
+ self.assertNotIn(MAGIC_STRING_ORIG, output)
+ self.assertIn(MAGIC_STRING_NEW, output)
+
+ # Verify the modified example ptests work
+ status, output = qemu.run(ptest_cmd)
+ self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
+ self.assertIn("PASS: cpp-example-lib", output)
+
+ # Verify remote debugging works wit the modified magic string
+ self._gdb_cross_debugging(
+ qemu, recipe_name, example_exe, MAGIC_STRING_NEW)
+
+ def _gdb_cross(self):
+ """Verify gdb-cross is provided by devtool ide-sdk"""
+ target_arch = self.td["TARGET_ARCH"]
+ target_sys = self.td["TARGET_SYS"]
+ gdb_recipe = "gdb-cross-" + target_arch
+ gdb_binary = target_sys + "-gdb"
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
+ r = runCmd("%s --version" % gdb_binary,
+ native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(r.status, 0)
+ self.assertIn("GNU gdb", r.output)
+
+ def _gdb_cross_debugging(self, qemu, recipe_name, example_exe, magic_string):
+ """Verify gdb-cross is working
+
+ Test remote debugging:
+ break main
+ run
+ continue
+ break CppExample::print_json()
+ continue
+ print CppExample::test_string.compare("cpp-example-lib Magic: 123456789")
+ $1 = 0
+ print CppExample::test_string.compare("cpp-example-lib Magic: 123456789aaa")
+ $2 = -3
+ list cpp-example-lib.hpp:13,13
+ 13 inline static const std::string test_string = "cpp-example-lib Magic: 123456789";
+ continue
+ """
+ sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ gdbserver_script = os.path.join(self._workspace_scripts_dir(
+ recipe_name), 'gdbserver_1234_usr-bin-' + example_exe + '_m')
+ gdb_script = os.path.join(self._workspace_scripts_dir(
+ recipe_name), 'gdb_1234_usr-bin-' + example_exe)
+
+ # Start a gdbserver
+ r = runCmd(gdbserver_script)
+ self.assertEqual(r.status, 0)
+
+ # Check there is a gdbserver running
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
+ self.assertEqual(r.status, 0)
+ self.assertIn("gdbserver ", r.output)
+
+ # Check the pid file is correct
+ test_cmd = "cat /proc/$(cat /tmp/gdbserver_1234_usr-bin-" + \
+ example_exe + "/pid)/cmdline"
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, test_cmd))
+ self.assertEqual(r.status, 0)
+ self.assertIn("gdbserver", r.output)
+
+ # Test remote debugging works
+ gdb_batch_cmd = " --batch -ex 'break main' -ex 'run'"
+ gdb_batch_cmd += " -ex 'break CppExample::print_json()' -ex 'continue'"
+ gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %s\")'" % magic_string
+ gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %saaa\")'" % magic_string
+ gdb_batch_cmd += " -ex 'list cpp-example-lib.hpp:13,13'"
+ gdb_batch_cmd += " -ex 'continue'"
+ r = runCmd(gdb_script + gdb_batch_cmd)
+ self.logger.debug("%s %s returned: %s", gdb_script,
+ gdb_batch_cmd, r.output)
+ self.assertEqual(r.status, 0)
+ self.assertIn("Breakpoint 1, main", r.output)
+ self.assertIn("$1 = 0", r.output) # test.string.compare equal
+ self.assertIn("$2 = -3", r.output) # test.string.compare longer
+ self.assertIn(
+ 'inline static const std::string test_string = "cpp-example-lib %s";' % magic_string, r.output)
+ self.assertIn("exited normally", r.output)
+
+ # Stop the gdbserver
+ r = runCmd(gdbserver_script + ' stop')
+ self.assertEqual(r.status, 0)
+
+ # Check there is no gdbserver running
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
+ self.assertEqual(r.status, 0)
+ self.assertNotIn("gdbserver ", r.output)
+
+ def _verify_cmake_preset(self, tempdir):
+ """Verify the generated cmake preset works as expected
+
+ Check if compiling works
+ Check if unit tests can be executed in qemu (not qemu-system)
+ """
+ with open(os.path.join(tempdir, 'CMakeUserPresets.json')) as cmake_preset_j:
+ cmake_preset_d = json.load(cmake_preset_j)
+ config_presets = cmake_preset_d["configurePresets"]
+ self.assertEqual(len(config_presets), 1)
+ cmake_exe = config_presets[0]["cmakeExecutable"]
+ preset_name = config_presets[0]["name"]
+
+ # Verify the wrapper for cmake native is available
+ self.assertExists(cmake_exe)
+
+ # Verify the cmake preset generated by devtool ide-sdk is available
+ result = runCmd('%s --list-presets' % cmake_exe, cwd=tempdir)
+ self.assertIn(preset_name, result.output)
+
+ # Verify cmake re-uses the o files compiled by bitbake
+ result = runCmd('%s --build --preset %s' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("ninja: no work to do.", result.output)
+
+ # Verify the unit tests work (in Qemu user mode)
+ result = runCmd('%s --build --preset %s --target test' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("100% tests passed", result.output)
+
+ # Verify re-building and testing works again
+ result = runCmd('%s --build --preset %s --target clean' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Cleaning", result.output)
+ result = runCmd('%s --build --preset %s' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Building", result.output)
+ self.assertIn("Linking", result.output)
+ result = runCmd('%s --build --preset %s --target test' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Running tests...", result.output)
+ self.assertIn("100% tests passed", result.output)
+
+ @OETestTag("runqemu")
+ def test_devtool_ide_sdk_none_qemu(self):
+ """Start qemu-system and run tests for multiple recipes. ide=none is used."""
+ recipe_names = ["cmake-example", "meson-example"]
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config(recipe_names)
+ self._check_runqemu_prerequisites()
+
+ # Verify deployment to Qemu (system mode) works
+ bitbake(testimage)
+ with runqemu(testimage, runqemuparams="nographic") as qemu:
+ # cmake-example recipe
+ recipe_name = "cmake-example"
+ example_exe = "cmake-example"
+ build_file = "CMakeLists.txt"
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
+ recipe_name, testimage, qemu.ip)
+ runCmd(bitbake_sdk_cmd)
+ self._gdb_cross()
+ self._verify_cmake_preset(tempdir)
+ self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
+ # Verify the oe-scripts sym-link is valid
+ self.assertEqual(self._workspace_scripts_dir(
+ recipe_name), self._sources_scripts_dir(tempdir))
+
+ # meson-example recipe
+ recipe_name = "meson-example"
+ example_exe = "mesonex"
+ build_file = "meson.build"
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
+ recipe_name, testimage, qemu.ip)
+ runCmd(bitbake_sdk_cmd)
+ self._gdb_cross()
+ self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
+ # Verify the oe-scripts sym-link is valid
+ self.assertEqual(self._workspace_scripts_dir(
+ recipe_name), self._sources_scripts_dir(tempdir))
+
+ def test_devtool_ide_sdk_code_cmake(self):
+ """Verify a cmake recipe works with ide=code mode"""
+ recipe_name = "cmake-example"
+ build_file = "CMakeLists.txt"
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config([recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
+ recipe_name, testimage)
+ runCmd(bitbake_sdk_cmd)
+ self._verify_cmake_preset(tempdir)
+ self._verify_install_script_code(tempdir, recipe_name)
+ self._gdb_cross()
+
+ def test_devtool_ide_sdk_code_meson(self):
+ """Verify a meson recipe works with ide=code mode"""
+ recipe_name = "meson-example"
+ build_file = "meson.build"
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config([recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
+ recipe_name, testimage)
+ runCmd(bitbake_sdk_cmd)
+
+ with open(os.path.join(tempdir, '.vscode', 'settings.json')) as settings_j:
+ settings_d = json.load(settings_j)
+ meson_exe = settings_d["mesonbuild.mesonPath"]
+ meson_build_folder = settings_d["mesonbuild.buildFolder"]
+
+ # Verify the wrapper for meson native is available
+ self.assertExists(meson_exe)
+
+ # Verify meson re-uses the o files compiled by bitbake
+ result = runCmd('%s compile -C %s' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("ninja: no work to do.", result.output)
+
+ # Verify the unit tests work (in Qemu)
+ runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
+
+ # Verify re-building and testing works again
+ result = runCmd('%s compile -C %s --clean' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("Cleaning...", result.output)
+ result = runCmd('%s compile -C %s' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("Linking target", result.output)
+ runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
+
+ self._verify_install_script_code(tempdir, recipe_name)
+ self._gdb_cross()
+
+ def test_devtool_ide_sdk_shared_sysroots(self):
+ """Verify the shared sysroot SDK"""
+
+ # Handle the workspace (which is not needed by this test case)
+ self._check_workspace()
+
+ result_init = runCmd(
+ 'devtool ide-sdk -m shared oe-selftest-image cmake-example meson-example --ide=code')
+ bb_vars = get_bb_vars(
+ ['REAL_MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'], "meta-ide-support")
+ environment_script = 'environment-setup-%s' % bb_vars['REAL_MULTIMACH_TARGET_SYS']
+ deploydir = bb_vars['DEPLOY_DIR_IMAGE']
+ environment_script_path = os.path.join(deploydir, environment_script)
+ cpp_example_src = os.path.join(
+ bb_vars['COREBASE'], 'meta-selftest', 'recipes-test', 'cpp', 'files')
+
+ # Verify the cross environment script is available
+ self.assertExists(environment_script_path)
+
+ def runCmdEnv(cmd, cwd):
+ cmd = '/bin/sh -c ". %s > /dev/null && %s"' % (
+ environment_script_path, cmd)
+ return runCmd(cmd, cwd)
+
+ # Verify building the C++ example works with CMake
+ tempdir_cmake = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir_cmake)
+
+ result_cmake = runCmdEnv("which cmake", cwd=tempdir_cmake)
+ cmake_native = os.path.normpath(result_cmake.output.strip())
+ self.assertExists(cmake_native)
+
+ runCmdEnv('cmake %s' % cpp_example_src, cwd=tempdir_cmake)
+ runCmdEnv('cmake --build %s' % tempdir_cmake, cwd=tempdir_cmake)
+
+ # Verify the printed note really referres to a cmake executable
+ cmake_native_code = ""
+ for line in result_init.output.splitlines():
+ m = re.search(r'"cmake.cmakePath": "(.*)"', line)
+ if m:
+ cmake_native_code = m.group(1)
+ break
+ self.assertExists(cmake_native_code)
+ self.assertEqual(cmake_native, cmake_native_code)
+
+ # Verify building the C++ example works with Meson
+ tempdir_meson = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir_meson)
+
+ result_cmake = runCmdEnv("which meson", cwd=tempdir_meson)
+ meson_native = os.path.normpath(result_cmake.output.strip())
+ self.assertExists(meson_native)
+
+ runCmdEnv('meson setup %s' % tempdir_meson, cwd=cpp_example_src)
+ runCmdEnv('meson compile', cwd=tempdir_meson)
+
+ def test_devtool_ide_sdk_plugins(self):
+ """Test that devtool ide-sdk can use plugins from other layers."""
+
+ # We need a workspace layer and a modified recipe (but no image)
+ modified_recipe_name = "meson-example"
+ modified_build_file = "meson.build"
+ testimage = "oe-selftest-image"
+ shared_recipe_name = "cmake-example"
+
+ self._check_workspace()
+ self._write_bb_config([modified_recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ modified_recipe_name, modified_build_file, None)
+
+ IDE_RE = re.compile(r'.*--ide \{(.*)\}.*')
+
+ def get_ides_from_help(help_str):
+ m = IDE_RE.search(help_str)
+ return m.group(1).split(',')
+
+ # verify the default plugins are available but the foo plugin is not
+ result = runCmd('devtool ide-sdk -h')
+ found_ides = get_ides_from_help(result.output)
+ self.assertIn('code', found_ides)
+ self.assertIn('none', found_ides)
+ self.assertNotIn('foo', found_ides)
+
+ shared_config_file = os.path.join(tempdir, 'shared-config.txt')
+ shared_config_str = 'Dummy shared IDE config'
+ modified_config_file = os.path.join(tempdir, 'modified-config.txt')
+ modified_config_str = 'Dummy modified IDE config'
+
+ # Generate a foo plugin in the workspace layer
+ plugin_dir = os.path.join(
+ self.workspacedir, 'lib', 'devtool', 'ide_plugins')
+ os.makedirs(plugin_dir)
+ plugin_code = 'from devtool.ide_plugins import IdeBase\n\n'
+ plugin_code += 'class IdeFoo(IdeBase):\n'
+ plugin_code += ' def setup_shared_sysroots(self, shared_env):\n'
+ plugin_code += ' with open("%s", "w") as config_file:\n' % shared_config_file
+ plugin_code += ' config_file.write("%s")\n\n' % shared_config_str
+ plugin_code += ' def setup_modified_recipe(self, args, image_recipe, modified_recipe):\n'
+ plugin_code += ' with open("%s", "w") as config_file:\n' % modified_config_file
+ plugin_code += ' config_file.write("%s")\n\n' % modified_config_str
+ plugin_code += 'def register_ide_plugin(ide_plugins):\n'
+ plugin_code += ' ide_plugins["foo"] = IdeFoo\n'
+
+ plugin_py = os.path.join(plugin_dir, 'ide_foo.py')
+ with open(plugin_py, 'w') as plugin_file:
+ plugin_file.write(plugin_code)
+
+ # Verify the foo plugin is available as well
+ result = runCmd('devtool ide-sdk -h')
+ found_ides = get_ides_from_help(result.output)
+ self.assertIn('code', found_ides)
+ self.assertIn('none', found_ides)
+ self.assertIn('foo', found_ides)
+
+ # Verify the foo plugin generates a shared config
+ result = runCmd(
+ 'devtool ide-sdk -m shared --skip-bitbake --ide foo %s' % shared_recipe_name)
+ with open(shared_config_file) as shared_config:
+ shared_config_new = shared_config.read()
+ self.assertEqual(shared_config_str, shared_config_new)
+
+ # Verify the foo plugin generates a modified config
+ result = runCmd('devtool ide-sdk --skip-bitbake --ide foo %s %s' %
+ (modified_recipe_name, testimage))
+ with open(modified_config_file) as modified_config:
+ modified_config_new = modified_config.read()
+ self.assertEqual(modified_config_str, modified_config_new)
diff --git a/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt b/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt
new file mode 100644
index 0000000000..f70f10e4db
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt
@@ -0,0 +1 @@
+A
diff --git a/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt b/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt
new file mode 100644
index 0000000000..223b7836fb
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt
@@ -0,0 +1 @@
+B
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index 12540adc7d..ad952c004b 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -1,31 +1,29 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
-from oeqa.utils.decorators import testcase
-from oeqa.utils.ftools import write_file
-from oeqa.core.decorator.oeid import OETestID
-class Distrodata(OESelftestTestCase):
+import oe.recipeutils
- @classmethod
- def setUpClass(cls):
- super(Distrodata, cls).setUpClass()
+class Distrodata(OESelftestTestCase):
- @OETestID(1902)
def test_checkpkg(self):
"""
Summary: Test that upstream version checks do not regress
Expected: Upstream version checks should succeed except for the recipes listed in the exception list.
Product: oe-core
- Author: Alexander Kanavin <alexander.kanavin@intel.com>
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
"""
- feature = 'INHERIT += "distrodata"\n'
- feature += 'LICENSE_FLAGS_WHITELIST += " commercial"\n'
-
+ feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n'
self.write_config(feature)
- bitbake('-c checkpkg world')
- checkpkg_result = open(os.path.join(get_bb_var("LOG_DIR"), "checkpkg.csv")).readlines()[1:]
- regressed_failures = [pkg_data[0] for pkg_data in [pkg_line.split('\t') for pkg_line in checkpkg_result] if pkg_data[11] == 'UNKNOWN_BROKEN']
- regressed_successes = [pkg_data[0] for pkg_data in [pkg_line.split('\t') for pkg_line in checkpkg_result] if pkg_data[11] == 'KNOWN_BROKEN']
+
+ pkgs = oe.recipeutils.get_recipe_upgrade_status()
+
+ regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN']
+ regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN']
msg = ""
if len(regressed_failures) > 0:
msg = msg + """
@@ -40,3 +38,80 @@ The following packages have been checked successfully for upstream versions,
but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please remove that line from the recipes.
""" + "\n".join(regressed_successes)
self.assertTrue(len(regressed_failures) == 0 and len(regressed_successes) == 0, msg)
+
+ def test_maintainers(self):
+ """
+ Summary: Test that oe-core recipes have a maintainer and entries in maintainers list have a recipe
+ Expected: All oe-core recipes (except a few special static/testing ones) should have a maintainer listed in maintainers.inc file.
+ Expected: All entries in maintainers list should have a recipe file that matches them
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ def is_exception(pkg):
+ exceptions = ["packagegroup-",]
+ for i in exceptions:
+ if i in pkg:
+ return True
+ return False
+
+ def is_maintainer_exception(entry):
+ exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
+ "cve-update-nvd2-native",]
+ for i in exceptions:
+ if i in entry:
+ return True
+ return False
+
+ feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_ACCEPTED += " commercial"\nPARSE_ALL_RECIPES = "1"\nPACKAGE_CLASSES = "package_ipk package_deb package_rpm"\n'
+ self.write_config(feature)
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False)
+
+ with_maintainer_list = []
+ no_maintainer_list = []
+
+ missing_recipes = []
+ recipes = []
+ prefix = "RECIPE_MAINTAINER:pn-"
+
+ # We could have used all_recipes() here, but this method will find
+ # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files
+ # instead of maintainers.inc
+ for fn in tinfoil.all_recipe_files(variants=False):
+ if not '/meta/recipes-' in fn:
+ # We are only interested in OE-Core
+ continue
+ rd = tinfoil.parse_recipe_file(fn, appends=False)
+ pn = rd.getVar('PN')
+ recipes.append(pn)
+ if is_exception(pn):
+ continue
+ if rd.getVar('RECIPE_MAINTAINER'):
+ with_maintainer_list.append((pn, fn))
+ else:
+ no_maintainer_list.append((pn, fn))
+
+ maintainers = tinfoil.config_data.keys()
+ for key in maintainers:
+ if key.startswith(prefix):
+ recipe = tinfoil.config_data.expand(key[len(prefix):])
+ if is_maintainer_exception(recipe):
+ continue
+ if recipe not in recipes:
+ missing_recipes.append(recipe)
+
+ if no_maintainer_list:
+ self.fail("""
+The following recipes do not have a maintainer assigned to them. Please add an entry to meta/conf/distro/include/maintainers.inc file.
+""" + "\n".join(['%s (%s)' % i for i in no_maintainer_list]))
+
+ if not with_maintainer_list:
+ self.fail("""
+The list of oe-core recipes with maintainers is empty. This may indicate that the test has regressed and needs fixing.
+""")
+
+ if missing_recipes:
+ self.fail("""
+Unable to find recipes for the following entries in maintainers.inc:
+""" + "\n".join(['%s' % i for i in missing_recipes]))
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py
new file mode 100644
index 0000000000..fa74103dec
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py
@@ -0,0 +1,33 @@
+# Based on runqemu.py test file
+#
+# Copyright (c) 2017 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, runqemu
+from oeqa.core.decorator.data import skipIfNotMachine
+import oe.types
+
+class GenericEFITest(OESelftestTestCase):
+ """EFI booting test class"""
+ @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently")
+ def test_boot_efi(self):
+ cmd = "runqemu nographic serial wic ovmf"
+ if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
+ cmd += " kvm"
+ image = "core-image-minimal"
+
+ self.write_config("""
+EFI_PROVIDER = "systemd-boot"
+IMAGE_FSTYPES:pn-%s:append = " wic"
+MACHINE_FEATURES:append = " efi"
+WKS_FILE = "efi-bootdisk.wks.in"
+IMAGE_INSTALL:append = " grub-efi systemd-boot kernel-image-bzimage"
+"""
+% (image))
+
+ bitbake(image + " ovmf")
+ with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
+ self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/esdk.py
index d03188f2f7..9f5de2cde7 100644
--- a/meta/lib/oeqa/selftest/cases/eSDK.py
+++ b/meta/lib/oeqa/selftest/cases/esdk.py
@@ -1,10 +1,16 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import tempfile
import shutil
import os
import glob
-from oeqa.core.decorator.oeid import OETestID
+import time
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
class oeSDKExtSelfTest(OESelftestTestCase):
"""
@@ -31,7 +37,7 @@ class oeSDKExtSelfTest(OESelftestTestCase):
if not 'shell' in options:
options['shell'] = True
- runCmd("cd %s; . %s; %s" % (tmpdir_eSDKQA, env_eSDK, cmd), **options)
+ runCmd("cd %s; unset BBPATH; unset BUILDDIR; . %s; %s" % (tmpdir_eSDKQA, env_eSDK, cmd), **options)
@staticmethod
def generate_eSDK(image):
@@ -59,7 +65,7 @@ class oeSDKExtSelfTest(OESelftestTestCase):
cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
sstate_config="""
-SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS"
+ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
SSTATE_MIRRORS = "file://.* file://%s/PATH"
CORE_IMAGE_EXTRA_INSTALL = "perl"
""" % sstate_dir
@@ -70,11 +76,13 @@ CORE_IMAGE_EXTRA_INSTALL = "perl"
@classmethod
def setUpClass(cls):
super(oeSDKExtSelfTest, cls).setUpClass()
- cls.tmpdir_eSDKQA = tempfile.mkdtemp(prefix='eSDKQA')
+ cls.image = 'core-image-minimal'
- sstate_dir = get_bb_var('SSTATE_DIR')
+ bb_vars = get_bb_vars(['SSTATE_DIR', 'WORKDIR'], cls.image)
+ bb.utils.mkdirhier(bb_vars["WORKDIR"])
+ cls.tmpdirobj = tempfile.TemporaryDirectory(prefix="selftest-esdk-", dir=bb_vars["WORKDIR"])
+ cls.tmpdir_eSDKQA = cls.tmpdirobj.name
- cls.image = 'core-image-minimal'
oeSDKExtSelfTest.generate_eSDK(cls.image)
# Install eSDK
@@ -85,25 +93,28 @@ CORE_IMAGE_EXTRA_INSTALL = "perl"
# Configure eSDK to use sstate mirror from poky
sstate_config="""
-SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS"
+ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
SSTATE_MIRRORS = "file://.* file://%s/PATH"
- """ % sstate_dir
+ """ % bb_vars["SSTATE_DIR"]
with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
f.write(sstate_config)
@classmethod
def tearDownClass(cls):
- shutil.rmtree(cls.tmpdir_eSDKQA, ignore_errors=True)
- super(oeSDKExtSelfTest, cls).tearDownClass()
+ for i in range(0, 10):
+ if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
+ time.sleep(1)
+ else:
+ break
+ cls.tmpdirobj.cleanup()
+ super().tearDownClass()
- @OETestID(1602)
def test_install_libraries_headers(self):
pn_sstate = 'bc'
bitbake(pn_sstate)
cmd = "devtool sdk-install %s " % pn_sstate
oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
- @OETestID(1603)
def test_image_generation_binary_feeds(self):
image = 'core-image-minimal'
cmd = "devtool build-image %s" % image
diff --git a/meta/lib/oeqa/selftest/cases/externalsrc.py b/meta/lib/oeqa/selftest/cases/externalsrc.py
new file mode 100644
index 0000000000..1d800dc82c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/externalsrc.py
@@ -0,0 +1,44 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import tempfile
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import get_bb_var, runCmd
+
+class ExternalSrc(OESelftestTestCase):
+ # test that srctree_hash_files does not crash
+ # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
+ # so we check only that a recipe with externalsrc can be parsed
+ def test_externalsrc_srctree_hash_files(self):
+ test_recipe = "git-submodule-test"
+ git_url = "git://git.yoctoproject.org/git-submodule-test"
+ externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
+
+ self.write_config(
+ """
+INHERIT += "externalsrc"
+EXTERNALSRC:pn-%s = "%s"
+""" % (test_recipe, externalsrc_dir)
+ )
+
+ # test with git without submodules
+ runCmd('git clone %s %s' % (git_url, externalsrc_dir))
+ os.unlink(externalsrc_dir + "/.gitmodules")
+ open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+ os.unlink(".gitmodules")
+
+ # test with git with submodules
+ runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
+ runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+
+ # test without git
+ shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
new file mode 100644
index 0000000000..44099176fc
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -0,0 +1,110 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import tempfile
+import textwrap
+import bb.tinfoil
+import oe.path
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class Fetch(OESelftestTestCase):
+ def test_git_mirrors(self):
+ """
+ Verify that the git fetcher will fall back to the HTTP mirrors. The
+ recipe needs to be one that we have on the Yocto Project source mirror
+ and is hosted in git.
+ """
+
+ # TODO: mktempd instead of hardcoding
+ dldir = os.path.join(self.builddir, "download-git-mirrors")
+ self.track_for_cleanup(dldir)
+
+ # No mirrors, should use git to fetch successfully
+ features = """
+DL_DIR = "%s"
+MIRRORS:forcevariable = ""
+PREMIRRORS:forcevariable = ""
+""" % dldir
+ self.write_config(features)
+ oe.path.remove(dldir, recurse=True)
+ bitbake("dbus-wait -c fetch -f")
+
+ # No mirrors and broken git, should fail
+ features = """
+DL_DIR = "%s"
+SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
+GIT_PROXY_COMMAND = "false"
+MIRRORS:forcevariable = ""
+PREMIRRORS:forcevariable = ""
+""" % dldir
+ self.write_config(features)
+ oe.path.remove(dldir, recurse=True)
+ with self.assertRaises(AssertionError):
+ bitbake("dbus-wait -c fetch -f")
+
+ # Broken git but a specific mirror
+ features = """
+DL_DIR = "%s"
+SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
+GIT_PROXY_COMMAND = "false"
+MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
+""" % dldir
+ self.write_config(features)
+ oe.path.remove(dldir, recurse=True)
+ bitbake("dbus-wait -c fetch -f")
+
+
+class Dependencies(OESelftestTestCase):
+ def write_recipe(self, content, tempdir):
+ f = os.path.join(tempdir, "test.bb")
+ with open(f, "w") as fd:
+ fd.write(content)
+ return f
+
+ def test_dependencies(self):
+ """
+ Verify that the correct dependencies are generated for specific SRC_URI entries.
+ """
+
+ with bb.tinfoil.Tinfoil() as tinfoil, tempfile.TemporaryDirectory(prefix="selftest-fetch") as tempdir:
+ tinfoil.prepare(config_only=False, quiet=2)
+
+ r = """
+ LICENSE="CLOSED"
+ SRC_URI="http://example.com/tarball.zip"
+ """
+ f = self.write_recipe(textwrap.dedent(r), tempdir)
+ d = tinfoil.parse_recipe_file(f)
+ self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
+ self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends"))
+
+ # Verify that the downloadfilename overrides the URI
+ r = """
+ LICENSE="CLOSED"
+ SRC_URI="https://example.com/tarball;downloadfilename=something.zip"
+ """
+ f = self.write_recipe(textwrap.dedent(r), tempdir)
+ d = tinfoil.parse_recipe_file(f)
+ self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
+ self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "")
+
+ r = """
+ LICENSE="CLOSED"
+ SRC_URI="ftp://example.com/tarball.lz"
+ """
+ f = self.write_recipe(textwrap.dedent(r), tempdir)
+ d = tinfoil.parse_recipe_file(f)
+ self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
+ self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends"))
+
+ r = """
+ LICENSE="CLOSED"
+ SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
+ """
+ f = self.write_recipe(textwrap.dedent(r), tempdir)
+ d = tinfoil.parse_recipe_file(f)
+ self.assertIn("git-native", d.getVarFlag("do_fetch", "depends"))
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
new file mode 100644
index 0000000000..347c065377
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -0,0 +1,846 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+import os
+import re
+
+class FitImageTests(OESelftestTestCase):
+
+ def test_fit_image(self):
+ """
+ Summary: Check if FIT image and Image Tree Source (its) are built
+ and the Image Tree Source has the correct fields.
+ Expected: 1. fitImage and fitImage-its can be built
+ 2. The type, load address, entrypoint address and
+ default values of kernel and ramdisk are as expected
+ in the Image Tree Source. Not all the fields are tested,
+ only the key fields that wont vary between different
+ architectures.
+ Product: oe-core
+ Author: Usama Arif <usama.arif@arm.com>
+ """
+ config = """
+# Enable creation of fitImage
+KERNEL_IMAGETYPE = "Image"
+KERNEL_IMAGETYPES += " fitImage "
+KERNEL_CLASSES = " kernel-fitimage "
+
+# RAM disk variables including load address and entrypoint for kernel and RAM disk
+IMAGE_FSTYPES += "cpio.gz"
+INITRAMFS_IMAGE = "core-image-minimal"
+# core-image-minimal is used as initramfs here, drop the rootfs suffix
+IMAGE_NAME_SUFFIX:pn-core-image-minimal = ""
+UBOOT_RD_LOADADDRESS = "0x88000000"
+UBOOT_RD_ENTRYPOINT = "0x88000000"
+UBOOT_LOADADDRESS = "0x80080000"
+UBOOT_ENTRYPOINT = "0x80080000"
+FIT_DESC = "A model description"
+"""
+ self.write_config(config)
+
+ # fitImage is created as part of linux recipe
+ image = "virtual/kernel"
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'INITRAMFS_IMAGE_NAME', 'KERNEL_FIT_LINK_NAME'], image)
+
+ fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-its-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
+ fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
+
+ self.assertTrue(os.path.exists(fitimage_its_path),
+ "%s image tree source doesn't exist" % (fitimage_its_path))
+ self.assertTrue(os.path.exists(fitimage_path),
+ "%s FIT image doesn't exist" % (fitimage_path))
+
+ # Check that the type, load address, entrypoint address and default
+ # values for kernel and ramdisk in Image Tree Source are as expected.
+ # The order of fields in the below array is important. Not all the
+ # fields are tested, only the key fields that wont vary between
+ # different architectures.
+ its_field_check = [
+ 'description = "A model description";',
+ 'type = "kernel";',
+ 'load = <0x80080000>;',
+ 'entry = <0x80080000>;',
+ 'type = "ramdisk";',
+ 'load = <0x88000000>;',
+ 'entry = <0x88000000>;',
+ 'default = "conf-1";',
+ 'kernel = "kernel-1";',
+ 'ramdisk = "ramdisk-1";'
+ ]
+
+ with open(fitimage_its_path) as its_file:
+ field_index = 0
+ for line in its_file:
+ if field_index == len(its_field_check):
+ break
+ if its_field_check[field_index] in line:
+ field_index +=1
+
+ if field_index != len(its_field_check): # if its equal, the test passed
+ self.assertTrue(field_index == len(its_field_check),
+ "Fields in Image Tree Source File %s did not match, error in finding %s"
+ % (fitimage_its_path, its_field_check[field_index]))
+
+
+ def test_sign_fit_image(self):
+ """
+ Summary: Check if FIT image and Image Tree Source (its) are created
+ and signed correctly.
+ Expected: 1) its and FIT image are built successfully
+ 2) Scanning the its file indicates signing is enabled
+ as requested by UBOOT_SIGN_ENABLE (using keys generated
+ via FIT_GENERATE_KEYS)
+ 3) Dumping the FIT image indicates signature values
+ are present (including for images as enabled via
+ FIT_SIGN_INDIVIDUAL)
+ 4) Examination of the do_assemble_fitimage runfile/logfile
+ indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and
+ UBOOT_MKIMAGE_SIGN_ARGS are working as expected.
+ Product: oe-core
+ Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon
+ work by Usama Arif <usama.arif@arm.com>
+ """
+ config = """
+# Enable creation of fitImage
+MACHINE = "beaglebone-yocto"
+KERNEL_IMAGETYPES += " fitImage "
+KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
+UBOOT_SIGN_ENABLE = "1"
+FIT_GENERATE_KEYS = "1"
+UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
+UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
+UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
+FIT_SIGN_INDIVIDUAL = "1"
+UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
+"""
+ self.write_config(config)
+
+ # fitImage is created as part of linux recipe
+ image = "virtual/kernel"
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'KERNEL_FIT_LINK_NAME'], image)
+
+ fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-its-%s" % (bb_vars['KERNEL_FIT_LINK_NAME']))
+ fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME']))
+
+ self.assertTrue(os.path.exists(fitimage_its_path),
+ "%s image tree source doesn't exist" % (fitimage_its_path))
+ self.assertTrue(os.path.exists(fitimage_path),
+ "%s FIT image doesn't exist" % (fitimage_path))
+
+ req_itspaths = [
+ ['/', 'images', 'kernel-1'],
+ ['/', 'images', 'kernel-1', 'signature-1'],
+ ['/', 'images', 'fdt-am335x-boneblack.dtb'],
+ ['/', 'images', 'fdt-am335x-boneblack.dtb', 'signature-1'],
+ ['/', 'configurations', 'conf-am335x-boneblack.dtb'],
+ ['/', 'configurations', 'conf-am335x-boneblack.dtb', 'signature-1'],
+ ]
+
+ itspath = []
+ itspaths = []
+ linect = 0
+ sigs = {}
+ with open(fitimage_its_path) as its_file:
+ linect += 1
+ for line in its_file:
+ line = line.strip()
+ if line.endswith('};'):
+ itspath.pop()
+ elif line.endswith('{'):
+ itspath.append(line[:-1].strip())
+ itspaths.append(itspath[:])
+ elif itspath and itspath[-1] == 'signature-1':
+ itsdotpath = '.'.join(itspath)
+ if not itsdotpath in sigs:
+ sigs[itsdotpath] = {}
+ if not '=' in line or not line.endswith(';'):
+ self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
+ key, value = line.split('=', 1)
+ sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
+
+ for reqpath in req_itspaths:
+ if not reqpath in itspaths:
+ self.fail('Missing section in its file: %s' % reqpath)
+
+ reqsigvalues_image = {
+ 'algo': '"sha256,rsa2048"',
+ 'key-name-hint': '"img-oe-selftest"',
+ }
+ reqsigvalues_config = {
+ 'algo': '"sha256,rsa2048"',
+ 'key-name-hint': '"cfg-oe-selftest"',
+ 'sign-images': '"kernel", "fdt"',
+ }
+
+ for itspath, values in sigs.items():
+ if 'conf-' in itspath:
+ reqsigvalues = reqsigvalues_config
+ else:
+ reqsigvalues = reqsigvalues_image
+ for reqkey, reqvalue in reqsigvalues.items():
+ value = values.get(reqkey, None)
+ if value is None:
+ self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
+ self.assertEqual(value, reqvalue)
+
+ # Dump the image to see if it really got signed
+ bitbake("u-boot-tools-native -c addto_recipe_sysroot")
+ result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
+ recipe_sysroot_native = result.output.split('=')[1].strip('"')
+ dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
+ result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
+ in_signed = None
+ signed_sections = {}
+ for line in result.output.splitlines():
+ if line.startswith((' Configuration', ' Image')):
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
+ elif re.match('^ *', line) in (' ', ''):
+ in_signed = None
+ elif in_signed:
+ if not in_signed in signed_sections:
+ signed_sections[in_signed] = {}
+ key, value = line.split(':', 1)
+ signed_sections[in_signed][key.strip()] = value.strip()
+ self.assertIn('kernel-1', signed_sections)
+ self.assertIn('fdt-am335x-boneblack.dtb', signed_sections)
+ self.assertIn('conf-am335x-boneblack.dtb', signed_sections)
+ for signed_section, values in signed_sections.items():
+ value = values.get('Sign algo', None)
+ if signed_section.startswith("conf"):
+ self.assertEqual(value, 'sha256,rsa2048:cfg-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
+ else:
+ self.assertEqual(value, 'sha256,rsa2048:img-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
+ value = values.get('Sign value', None)
+ self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
+
+ # Check for UBOOT_MKIMAGE_SIGN_ARGS
+ result = runCmd('bitbake -e virtual/kernel | grep ^T=')
+ tempdir = result.output.split('=', 1)[1].strip().strip('')
+ result = runCmd('grep "a smart comment" %s/run.do_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN_ARGS value did not get used')
+
+ # Check for evidence of test-mkimage-wrapper class
+ result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
+ result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
+
+ def test_uboot_fit_image(self):
+ """
+ Summary: Check if Uboot FIT image and Image Tree Source
+ (its) are built and the Image Tree Source has the
+ correct fields.
+ Expected: 1. u-boot-fitImage and u-boot-its can be built
+ 2. The type, load address, entrypoint address and
+ default values of U-boot image are correct in the
+ Image Tree Source. Not all the fields are tested,
+ only the key fields that wont vary between
+ different architectures.
+ Product: oe-core
+ Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
+ based on work by Usama Arif <usama.arif@arm.com>
+ """
+ config = """
+# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
+MACHINE = "qemuarm"
+UBOOT_MACHINE = "am57xx_evm_defconfig"
+SPL_BINARY = "MLO"
+
+# Enable creation of the U-Boot fitImage
+UBOOT_FITIMAGE_ENABLE = "1"
+
+# (U-boot) fitImage properties
+UBOOT_LOADADDRESS = "0x80080000"
+UBOOT_ENTRYPOINT = "0x80080000"
+UBOOT_FIT_DESC = "A model description"
+
+# Enable creation of Kernel fitImage
+KERNEL_IMAGETYPES += " fitImage "
+KERNEL_CLASSES = " kernel-fitimage"
+UBOOT_SIGN_ENABLE = "1"
+FIT_GENERATE_KEYS = "1"
+UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
+UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
+UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
+FIT_SIGN_INDIVIDUAL = "1"
+"""
+ self.write_config(config)
+
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
+
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ machine = get_bb_var('MACHINE')
+ fitimage_its_path = os.path.join(deploy_dir_image,
+ "u-boot-its-%s" % (machine,))
+ fitimage_path = os.path.join(deploy_dir_image,
+ "u-boot-fitImage-%s" % (machine,))
+
+ self.assertTrue(os.path.exists(fitimage_its_path),
+ "%s image tree source doesn't exist" % (fitimage_its_path))
+ self.assertTrue(os.path.exists(fitimage_path),
+ "%s FIT image doesn't exist" % (fitimage_path))
+
+ # Check that the type, load address, entrypoint address and default
+ # values for kernel and ramdisk in Image Tree Source are as expected.
+ # The order of fields in the below array is important. Not all the
+ # fields are tested, only the key fields that wont vary between
+ # different architectures.
+ its_field_check = [
+ 'description = "A model description";',
+ 'type = "standalone";',
+ 'load = <0x80080000>;',
+ 'entry = <0x80080000>;',
+ 'default = "conf";',
+ 'loadables = "uboot";',
+ 'fdt = "fdt";'
+ ]
+
+ with open(fitimage_its_path) as its_file:
+ field_index = 0
+ for line in its_file:
+ if field_index == len(its_field_check):
+ break
+ if its_field_check[field_index] in line:
+ field_index +=1
+
+ if field_index != len(its_field_check): # if its equal, the test passed
+ self.assertTrue(field_index == len(its_field_check),
+ "Fields in Image Tree Source File %s did not match, error in finding %s"
+ % (fitimage_its_path, its_field_check[field_index]))
+
+ def test_uboot_sign_fit_image(self):
+ """
+ Summary: Check if Uboot FIT image and Image Tree Source
+ (its) are built and the Image Tree Source has the
+ correct fields, in the scenario where the Kernel
+ is also creating/signing it's fitImage.
+ Expected: 1. u-boot-fitImage and u-boot-its can be built
+ 2. The type, load address, entrypoint address and
+ default values of U-boot image are correct in the
+ Image Tree Source. Not all the fields are tested,
+ only the key fields that wont vary between
+ different architectures.
+ Product: oe-core
+ Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
+ based on work by Usama Arif <usama.arif@arm.com>
+ """
+ config = """
+# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
+MACHINE = "qemuarm"
+UBOOT_MACHINE = "am57xx_evm_defconfig"
+SPL_BINARY = "MLO"
+
+# Enable creation of the U-Boot fitImage
+UBOOT_FITIMAGE_ENABLE = "1"
+
+# (U-boot) fitImage properties
+UBOOT_LOADADDRESS = "0x80080000"
+UBOOT_ENTRYPOINT = "0x80080000"
+UBOOT_FIT_DESC = "A model description"
+KERNEL_IMAGETYPES += " fitImage "
+KERNEL_CLASSES = " kernel-fitimage "
+INHERIT += "test-mkimage-wrapper"
+UBOOT_SIGN_ENABLE = "1"
+FIT_GENERATE_KEYS = "1"
+UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
+UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
+UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
+FIT_SIGN_INDIVIDUAL = "1"
+UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
+"""
+ self.write_config(config)
+
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
+
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ machine = get_bb_var('MACHINE')
+ fitimage_its_path = os.path.join(deploy_dir_image,
+ "u-boot-its-%s" % (machine,))
+ fitimage_path = os.path.join(deploy_dir_image,
+ "u-boot-fitImage-%s" % (machine,))
+
+ self.assertTrue(os.path.exists(fitimage_its_path),
+ "%s image tree source doesn't exist" % (fitimage_its_path))
+ self.assertTrue(os.path.exists(fitimage_path),
+ "%s FIT image doesn't exist" % (fitimage_path))
+
+ # Check that the type, load address, entrypoint address and default
+ # values for kernel and ramdisk in Image Tree Source are as expected.
+ # The order of fields in the below array is important. Not all the
+ # fields are tested, only the key fields that wont vary between
+ # different architectures.
+ its_field_check = [
+ 'description = "A model description";',
+ 'type = "standalone";',
+ 'load = <0x80080000>;',
+ 'entry = <0x80080000>;',
+ 'default = "conf";',
+ 'loadables = "uboot";',
+ 'fdt = "fdt";'
+ ]
+
+ with open(fitimage_its_path) as its_file:
+ field_index = 0
+ for line in its_file:
+ if field_index == len(its_field_check):
+ break
+ if its_field_check[field_index] in line:
+ field_index +=1
+
+ if field_index != len(its_field_check): # if its equal, the test passed
+ self.assertTrue(field_index == len(its_field_check),
+ "Fields in Image Tree Source File %s did not match, error in finding %s"
+ % (fitimage_its_path, its_field_check[field_index]))
+
+
+ def test_sign_standalone_uboot_fit_image(self):
+ """
+ Summary: Check if U-Boot FIT image and Image Tree Source (its) are
+ created and signed correctly for the scenario where only
+ the U-Boot proper fitImage is being created and signed.
+ Expected: 1) U-Boot its and FIT image are built successfully
+ 2) Scanning the its file indicates signing is enabled
+ as requested by SPL_SIGN_ENABLE (using keys generated
+ via UBOOT_FIT_GENERATE_KEYS)
+ 3) Dumping the FIT image indicates signature values
+ are present
+ 4) Examination of the do_uboot_assemble_fitimage
+ runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
+ and SPL_MKIMAGE_SIGN_ARGS are working as expected.
+ Product: oe-core
+ Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
+ work by Paul Eggleton <paul.eggleton@microsoft.com> and
+ Usama Arif <usama.arif@arm.com>
+ """
+ config = """
+# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
+# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
+MACHINE = "qemuarm"
+UBOOT_MACHINE = "am57xx_evm_defconfig"
+SPL_BINARY = "MLO"
+# The kernel-fitimage class is a dependency even if we're only
+# creating/signing the U-Boot fitImage
+KERNEL_CLASSES = " kernel-fitimage"
+INHERIT += "test-mkimage-wrapper"
+# Enable creation and signing of the U-Boot fitImage
+UBOOT_FITIMAGE_ENABLE = "1"
+SPL_SIGN_ENABLE = "1"
+SPL_SIGN_KEYNAME = "spl-oe-selftest"
+SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
+UBOOT_DTB_BINARY = "u-boot.dtb"
+UBOOT_ENTRYPOINT = "0x80000000"
+UBOOT_LOADADDRESS = "0x80000000"
+UBOOT_DTB_LOADADDRESS = "0x82000000"
+UBOOT_ARCH = "arm"
+SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
+SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
+UBOOT_EXTLINUX = "0"
+UBOOT_FIT_GENERATE_KEYS = "1"
+UBOOT_FIT_HASH_ALG = "sha256"
+"""
+ self.write_config(config)
+
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
+
+ image_type = "core-image-minimal"
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ machine = get_bb_var('MACHINE')
+ fitimage_its_path = os.path.join(deploy_dir_image,
+ "u-boot-its-%s" % (machine,))
+ fitimage_path = os.path.join(deploy_dir_image,
+ "u-boot-fitImage-%s" % (machine,))
+
+ self.assertTrue(os.path.exists(fitimage_its_path),
+ "%s image tree source doesn't exist" % (fitimage_its_path))
+ self.assertTrue(os.path.exists(fitimage_path),
+ "%s FIT image doesn't exist" % (fitimage_path))
+
+ req_itspaths = [
+ ['/', 'images', 'uboot'],
+ ['/', 'images', 'uboot', 'signature'],
+ ['/', 'images', 'fdt'],
+ ['/', 'images', 'fdt', 'signature'],
+ ]
+
+ itspath = []
+ itspaths = []
+ linect = 0
+ sigs = {}
+ with open(fitimage_its_path) as its_file:
+ linect += 1
+ for line in its_file:
+ line = line.strip()
+ if line.endswith('};'):
+ itspath.pop()
+ elif line.endswith('{'):
+ itspath.append(line[:-1].strip())
+ itspaths.append(itspath[:])
+ elif itspath and itspath[-1] == 'signature':
+ itsdotpath = '.'.join(itspath)
+ if not itsdotpath in sigs:
+ sigs[itsdotpath] = {}
+ if not '=' in line or not line.endswith(';'):
+ self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
+ key, value = line.split('=', 1)
+ sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
+
+ for reqpath in req_itspaths:
+ if not reqpath in itspaths:
+ self.fail('Missing section in its file: %s' % reqpath)
+
+ reqsigvalues_image = {
+ 'algo': '"sha256,rsa2048"',
+ 'key-name-hint': '"spl-oe-selftest"',
+ }
+
+ for itspath, values in sigs.items():
+ reqsigvalues = reqsigvalues_image
+ for reqkey, reqvalue in reqsigvalues.items():
+ value = values.get(reqkey, None)
+ if value is None:
+ self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
+ self.assertEqual(value, reqvalue)
+
+ # Dump the image to see if it really got signed
+ bitbake("u-boot-tools-native -c addto_recipe_sysroot")
+ result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
+ recipe_sysroot_native = result.output.split('=')[1].strip('"')
+ dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
+ result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
+ in_signed = None
+ signed_sections = {}
+ for line in result.output.splitlines():
+ if line.startswith((' Image')):
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
+ elif re.match(' \w', line):
+ in_signed = None
+ elif in_signed:
+ if not in_signed in signed_sections:
+ signed_sections[in_signed] = {}
+ key, value = line.split(':', 1)
+ signed_sections[in_signed][key.strip()] = value.strip()
+ self.assertIn('uboot', signed_sections)
+ self.assertIn('fdt', signed_sections)
+ for signed_section, values in signed_sections.items():
+ value = values.get('Sign algo', None)
+ self.assertEqual(value, 'sha256,rsa2048:spl-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
+ value = values.get('Sign value', None)
+ self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
+
+ # Check for SPL_MKIMAGE_SIGN_ARGS
+ result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
+ tempdir = result.output.split('=', 1)[1].strip().strip('')
+ result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
+
+ # Check for evidence of test-mkimage-wrapper class
+ result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
+ result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
+
+ def test_sign_cascaded_uboot_fit_image(self):
+ """
+ Summary: Check if U-Boot FIT image and Image Tree Source (its) are
+ created and signed correctly for the scenario where both
+ U-Boot proper and Kernel fitImages are being created and
+ signed.
+ Expected: 1) U-Boot its and FIT image are built successfully
+ 2) Scanning the its file indicates signing is enabled
+ as requested by SPL_SIGN_ENABLE (using keys generated
+ via UBOOT_FIT_GENERATE_KEYS)
+ 3) Dumping the FIT image indicates signature values
+ are present
+ 4) Examination of the do_uboot_assemble_fitimage
+ runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
+ and SPL_MKIMAGE_SIGN_ARGS are working as expected.
+ Product: oe-core
+ Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
+ work by Paul Eggleton <paul.eggleton@microsoft.com> and
+ Usama Arif <usama.arif@arm.com>
+ """
+ config = """
+# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
+# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
+MACHINE = "qemuarm"
+UBOOT_MACHINE = "am57xx_evm_defconfig"
+SPL_BINARY = "MLO"
+# Enable creation and signing of the U-Boot fitImage
+UBOOT_FITIMAGE_ENABLE = "1"
+SPL_SIGN_ENABLE = "1"
+SPL_SIGN_KEYNAME = "spl-cascaded-oe-selftest"
+SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
+UBOOT_DTB_BINARY = "u-boot.dtb"
+UBOOT_ENTRYPOINT = "0x80000000"
+UBOOT_LOADADDRESS = "0x80000000"
+UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
+UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded Kernel comment'"
+UBOOT_DTB_LOADADDRESS = "0x82000000"
+UBOOT_ARCH = "arm"
+SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
+SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'"
+UBOOT_EXTLINUX = "0"
+UBOOT_FIT_GENERATE_KEYS = "1"
+UBOOT_FIT_HASH_ALG = "sha256"
+KERNEL_IMAGETYPES += " fitImage "
+KERNEL_CLASSES = " kernel-fitimage "
+INHERIT += "test-mkimage-wrapper"
+UBOOT_SIGN_ENABLE = "1"
+FIT_GENERATE_KEYS = "1"
+UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
+UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
+UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
+FIT_SIGN_INDIVIDUAL = "1"
+"""
+ self.write_config(config)
+
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
+
+ image_type = "core-image-minimal"
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ machine = get_bb_var('MACHINE')
+ fitimage_its_path = os.path.join(deploy_dir_image,
+ "u-boot-its-%s" % (machine,))
+ fitimage_path = os.path.join(deploy_dir_image,
+ "u-boot-fitImage-%s" % (machine,))
+
+ self.assertTrue(os.path.exists(fitimage_its_path),
+ "%s image tree source doesn't exist" % (fitimage_its_path))
+ self.assertTrue(os.path.exists(fitimage_path),
+ "%s FIT image doesn't exist" % (fitimage_path))
+
+ req_itspaths = [
+ ['/', 'images', 'uboot'],
+ ['/', 'images', 'uboot', 'signature'],
+ ['/', 'images', 'fdt'],
+ ['/', 'images', 'fdt', 'signature'],
+ ]
+
+ itspath = []
+ itspaths = []
+ linect = 0
+ sigs = {}
+ with open(fitimage_its_path) as its_file:
+ linect += 1
+ for line in its_file:
+ line = line.strip()
+ if line.endswith('};'):
+ itspath.pop()
+ elif line.endswith('{'):
+ itspath.append(line[:-1].strip())
+ itspaths.append(itspath[:])
+ elif itspath and itspath[-1] == 'signature':
+ itsdotpath = '.'.join(itspath)
+ if not itsdotpath in sigs:
+ sigs[itsdotpath] = {}
+ if not '=' in line or not line.endswith(';'):
+ self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
+ key, value = line.split('=', 1)
+ sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
+
+ for reqpath in req_itspaths:
+ if not reqpath in itspaths:
+ self.fail('Missing section in its file: %s' % reqpath)
+
+ reqsigvalues_image = {
+ 'algo': '"sha256,rsa2048"',
+ 'key-name-hint': '"spl-cascaded-oe-selftest"',
+ }
+
+ for itspath, values in sigs.items():
+ reqsigvalues = reqsigvalues_image
+ for reqkey, reqvalue in reqsigvalues.items():
+ value = values.get(reqkey, None)
+ if value is None:
+ self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
+ self.assertEqual(value, reqvalue)
+
+ # Dump the image to see if it really got signed
+ bitbake("u-boot-tools-native -c addto_recipe_sysroot")
+ result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
+ recipe_sysroot_native = result.output.split('=')[1].strip('"')
+ dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
+ result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
+ in_signed = None
+ signed_sections = {}
+ for line in result.output.splitlines():
+ if line.startswith((' Image')):
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
+ elif re.match(' \w', line):
+ in_signed = None
+ elif in_signed:
+ if not in_signed in signed_sections:
+ signed_sections[in_signed] = {}
+ key, value = line.split(':', 1)
+ signed_sections[in_signed][key.strip()] = value.strip()
+ self.assertIn('uboot', signed_sections)
+ self.assertIn('fdt', signed_sections)
+ for signed_section, values in signed_sections.items():
+ value = values.get('Sign algo', None)
+ self.assertEqual(value, 'sha256,rsa2048:spl-cascaded-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
+ value = values.get('Sign value', None)
+ self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
+
+ # Check for SPL_MKIMAGE_SIGN_ARGS
+ result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
+ tempdir = result.output.split('=', 1)[1].strip().strip('')
+ result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
+
+ # Check for evidence of test-mkimage-wrapper class
+ result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
+ result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
+ self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
+
+
+
+ def test_initramfs_bundle(self):
+ """
+ Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its)
+ The FIT settings are set by the test case.
+ The machine used is beaglebone-yocto.
+ Expected: 1. The ITS is generated with initramfs bundle support
+ 2. All the fields in the kernel node are as expected (matching the
+ conf settings)
+ 3. The kernel is included in all the available configurations and
+ its hash is included in the configuration signature
+
+ Product: oe-core
+ Author: Abdellatif El Khlifi <abdellatif.elkhlifi@arm.com>
+ """
+
+ config = """
+DISTRO="poky"
+MACHINE = "beaglebone-yocto"
+INITRAMFS_IMAGE_BUNDLE = "1"
+INITRAMFS_IMAGE = "core-image-minimal-initramfs"
+INITRAMFS_SCRIPTS = ""
+UBOOT_MACHINE = "am335x_evm_defconfig"
+KERNEL_CLASSES = " kernel-fitimage "
+KERNEL_IMAGETYPES = "fitImage"
+UBOOT_SIGN_ENABLE = "1"
+UBOOT_SIGN_KEYNAME = "beaglebonekey"
+UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}"
+UBOOT_DTB_BINARY = "u-boot.dtb"
+UBOOT_ENTRYPOINT = "0x80000000"
+UBOOT_LOADADDRESS = "0x80000000"
+UBOOT_DTB_LOADADDRESS = "0x82000000"
+UBOOT_ARCH = "arm"
+UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
+UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
+UBOOT_EXTLINUX = "0"
+FIT_GENERATE_KEYS = "1"
+KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
+FIT_KERNEL_COMP_ALG = "none"
+FIT_HASH_ALG = "sha256"
+"""
+ self.write_config(config)
+
+ # fitImage is created as part of linux recipe
+ bitbake("virtual/kernel")
+
+ image_type = get_bb_var('INITRAMFS_IMAGE')
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ machine = get_bb_var('MACHINE')
+ fitimage_its_path = os.path.join(deploy_dir_image,
+ "fitImage-its-%s-%s-%s" % (image_type, machine, machine))
+ fitimage_path = os.path.join(deploy_dir_image,"fitImage")
+
+ self.assertTrue(os.path.exists(fitimage_its_path),
+ "%s image tree source doesn't exist" % (fitimage_its_path))
+ self.assertTrue(os.path.exists(fitimage_path),
+ "%s FIT image doesn't exist" % (fitimage_path))
+
+ kernel_load = str(get_bb_var('UBOOT_LOADADDRESS'))
+ kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT'))
+ kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE'))
+ kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
+ uboot_arch = str(get_bb_var('UBOOT_ARCH'))
+ fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
+
+ its_file = open(fitimage_its_path)
+
+ its_lines = [line.strip() for line in its_file.readlines()]
+
+ exp_node_lines = [
+ 'kernel-1 {',
+ 'description = "Linux kernel";',
+ 'data = /incbin/("linux.bin");',
+ 'type = "' + kernel_type + '";',
+ 'arch = "' + uboot_arch + '";',
+ 'os = "linux";',
+ 'compression = "' + kernel_compression + '";',
+ 'load = <' + kernel_load + '>;',
+ 'entry = <' + kernel_entry + '>;',
+ 'hash-1 {',
+ 'algo = "' + fit_hash_alg +'";',
+ '};',
+ '};'
+ ]
+
+ node_str = exp_node_lines[0]
+
+ test_passed = False
+
+ print ("checking kernel node\n")
+
+ if node_str in its_lines:
+ node_start_idx = its_lines.index(node_str)
+ node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))]
+ if node == exp_node_lines:
+ print("kernel node verified")
+ else:
+ self.assertTrue(test_passed == True,"kernel node does not match expectation")
+
+ rx_configs = re.compile("^conf-.*")
+ its_configs = list(filter(rx_configs.match, its_lines))
+
+ for cfg_str in its_configs:
+ cfg_start_idx = its_lines.index(cfg_str)
+ line_idx = cfg_start_idx + 2
+ node_end = False
+ while node_end == False:
+ if its_lines[line_idx] == "};" and its_lines[line_idx-1] == "};" :
+ node_end = True
+ line_idx = line_idx + 1
+
+ node = its_lines[cfg_start_idx:line_idx]
+ print("checking configuration " + cfg_str.rstrip(" {"))
+ rx_desc_line = re.compile("^description.*1 Linux kernel.*")
+ if len(list(filter(rx_desc_line.match, node))) != 1:
+ self.assertTrue(test_passed == True,"kernel keyword not found in the description line")
+ break
+ else:
+ print("kernel keyword found in the description line")
+
+ if 'kernel = "kernel-1";' not in node:
+ self.assertTrue(test_passed == True,"kernel line not found")
+ break
+ else:
+ print("kernel line found")
+
+ rx_sign_line = re.compile("^sign-images.*kernel.*")
+ if len(list(filter(rx_sign_line.match, node))) != 1:
+ self.assertTrue(test_passed == True,"kernel hash not signed")
+ break
+ else:
+ print("kernel hash signed")
+
+ test_passed = True
+ self.assertTrue(test_passed == True,"Initramfs bundle test success")
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
new file mode 100644
index 0000000000..89360178fe
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -0,0 +1,169 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import time
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu
+
+def parse_values(content):
+ for i in content:
+ for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
+ if i.startswith(v + ": "):
+ yield i[len(v) + 2:].strip(), v
+ break
+
+class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
+ def check_skip(self, suite):
+ targets = get_bb_var("RUNTIMETARGET", "gcc-runtime").split()
+ if suite not in targets:
+ self.skipTest("Target does not use {0}".format(suite))
+
+ def run_check(self, *suites, ssh = None):
+ targets = set()
+ for s in suites:
+ if s == "gcc":
+ targets.add("check-gcc-c")
+ elif s == "g++":
+ targets.add("check-gcc-c++")
+ else:
+ targets.add("check-target-{}".format(s))
+
+ # configure ssh target
+ features = []
+ features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets)))
+ if ssh is not None:
+ features.append('TOOLCHAIN_TEST_TARGET = "ssh"')
+ features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
+ features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
+ features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
+ self.write_config("\n".join(features))
+
+ recipe = "gcc-runtime"
+
+ start_time = time.time()
+
+ bitbake("{} -c check".format(recipe))
+
+ end_time = time.time()
+
+ bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
+ builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
+
+ for suite in suites:
+ sumspath = os.path.join(builddir, "gcc", "testsuite", suite, "{0}.sum".format(suite))
+ if not os.path.exists(sumspath): # check in target dirs
+ sumspath = os.path.join(builddir, target_sys, suite, "testsuite", "{0}.sum".format(suite))
+ if not os.path.exists(sumspath): # handle libstdc++-v3 -> libstdc++
+ sumspath = os.path.join(builddir, target_sys, suite, "testsuite", "{0}.sum".format(suite.split("-")[0]))
+ logpath = os.path.splitext(sumspath)[0] + ".log"
+
+ ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
+ ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
+ with open(sumspath, "r") as f:
+ for test, result in parse_values(f):
+ self.ptest_result(ptestsuite, test, result)
+
+ def run_check_emulated(self, *args, **kwargs):
+ # build core-image-minimal with required packages
+ default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
+ features = []
+ features.append('IMAGE_FEATURES += "ssh-server-openssh"')
+ features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
+ self.write_config("\n".join(features))
+ bitbake("core-image-minimal")
+
+ # wrap the execution with a qemu instance
+ with runqemu("core-image-minimal", runqemuparams = "nographic") as qemu:
+ # validate that SSH is working
+ status, _ = qemu.run("uname")
+ self.assertEqual(status, 0)
+
+ return self.run_check(*args, ssh=qemu.ip, **kwargs)
+
+@OETestTag("toolchain-user")
+class GccCrossSelfTest(GccSelfTestBase):
+ def test_cross_gcc(self):
+ self.run_check("gcc")
+
+@OETestTag("toolchain-user")
+class GxxCrossSelfTest(GccSelfTestBase):
+ def test_cross_gxx(self):
+ self.run_check("g++")
+
+@OETestTag("toolchain-user")
+class GccLibAtomicSelfTest(GccSelfTestBase):
+ def test_libatomic(self):
+ self.run_check("libatomic")
+
+@OETestTag("toolchain-user")
+class GccLibGompSelfTest(GccSelfTestBase):
+ def test_libgomp(self):
+ self.run_check("libgomp")
+
+@OETestTag("toolchain-user")
+class GccLibStdCxxSelfTest(GccSelfTestBase):
+ def test_libstdcxx(self):
+ self.run_check("libstdc++-v3")
+
+@OETestTag("toolchain-user")
+class GccLibSspSelfTest(GccSelfTestBase):
+ def test_libssp(self):
+ self.check_skip("libssp")
+ self.run_check("libssp")
+
+@OETestTag("toolchain-user")
+class GccLibItmSelfTest(GccSelfTestBase):
+ def test_libitm(self):
+ self.check_skip("libitm")
+ self.run_check("libitm")
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GccCrossSelfTestSystemEmulated(GccSelfTestBase):
+ def test_cross_gcc(self):
+ self.run_check_emulated("gcc")
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GxxCrossSelfTestSystemEmulated(GccSelfTestBase):
+ def test_cross_gxx(self):
+ self.run_check_emulated("g++")
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libatomic(self):
+ self.run_check_emulated("libatomic")
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GccLibGompSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libgomp(self):
+ self.run_check_emulated("libgomp")
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libstdcxx(self):
+ self.run_check_emulated("libstdc++-v3")
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GccLibSspSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libssp(self):
+ self.check_skip("libssp")
+ self.run_check_emulated("libssp")
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GccLibItmSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libitm(self):
+ self.check_skip("libitm")
+ self.run_check_emulated("libitm")
+
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py
new file mode 100644
index 0000000000..9da97ae780
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gdbserver.py
@@ -0,0 +1,67 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import time
+import tempfile
+import shutil
+import concurrent.futures
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd
+
+class GdbServerTest(OESelftestTestCase):
+ def test_gdb_server(self):
+ target_arch = self.td["TARGET_ARCH"]
+ target_sys = self.td["TARGET_SYS"]
+
+ features = """
+IMAGE_GEN_DEBUGFS = "1"
+IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
+CORE_IMAGE_EXTRA_INSTALL = "gdbserver"
+ """
+ self.write_config(features)
+
+ gdb_recipe = "gdb-cross-" + target_arch
+ gdb_binary = target_sys + "-gdb"
+
+ bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe)
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
+ r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(r.status, 0)
+ self.assertIn("GNU gdb", r.output)
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+
+ with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs:
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
+ shutil.unpack_archive(filename, debugfs)
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
+ shutil.unpack_archive(filename, debugfs)
+
+ with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
+ status, output = qemu.run_serial("kmod --help")
+ self.assertIn("modprobe", output)
+
+ with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
+ def run_gdb():
+ for _ in range(5):
+ time.sleep(2)
+ cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip)
+ self.logger.warning("starting gdb %s" % cmd)
+ r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(0, r.status)
+ line_re = r"Line \d+ of \"/usr/src/debug/kmod/.*/tools/kmod.c\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>"
+ self.assertRegex(r.output, line_re)
+ break
+ else:
+ self.fail("Timed out connecting to gdb")
+ future = executor.submit(run_gdb)
+
+ status, output = qemu.run_serial("gdbserver --once :9999 kmod --help")
+ self.assertEqual(status, 1)
+ # The future either returns None, or raises an exception
+ future.result()
diff --git a/meta/lib/oeqa/selftest/cases/gitarchivetests.py b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
new file mode 100644
index 0000000000..71382089c1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
@@ -0,0 +1,136 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+import oeqa.utils.gitarchive as ga
+from oeqa.utils.git import GitError
+import tempfile
+import shutil
+import scriptutils
+import logging
+from oeqa.selftest.case import OESelftestTestCase
+
+logger = scriptutils.logger_create('resulttool')
+
+def create_fake_repository(commit, tag_list=[], add_remote=True):
+ """ Create a testing git directory
+
+ Initialize a simple git repository with one initial commit, and as many
+ tags on this commit as listed in tag_list
+ Returns both git directory path and gitarchive git object
+ If commit is true, fake data will be commited, otherwise it will stay in staging area
+ If commit is true and tag_lsit is non empty, all tags in tag_list will be
+ created on the initial commit
+ Fake remote will also be added to make git ls-remote work
+ """
+ fake_data_file = "fake_data.txt"
+ tempdir = tempfile.mkdtemp(prefix='fake_results.')
+ repo = ga.init_git_repo(tempdir, False, False, logger)
+ if add_remote:
+ repo.run_cmd(["remote", "add", "origin", "."])
+ with open(os.path.join(tempdir, fake_data_file), "w") as fake_data:
+ fake_data.write("Fake data")
+ if commit:
+ repo.run_cmd(["add", fake_data_file])
+ repo.run_cmd(["commit", "-m", "\"Add fake data\""])
+ for tag in tag_list:
+ repo.run_cmd(["tag", tag])
+
+ return tempdir, repo
+
+def delete_fake_repository(path):
+ shutil.rmtree(path)
+
+def tag_exists(git_obj, target_tag):
+ for tag in git_obj.run_cmd(["tag"]).splitlines():
+ if target_tag == tag:
+ return True
+ return False
+
+class GitArchiveTests(OESelftestTestCase):
+ TEST_BRANCH="main"
+ TEST_COMMIT="0f7d5df"
+ TEST_COMMIT_COUNT="42"
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.log = logging.getLogger('gitarchivetests')
+ cls.log.setLevel(logging.DEBUG)
+
+ def test_create_first_test_tag(self):
+ path, git_obj = create_fake_repository(False)
+ keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
+ target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
+
+ ga.gitarchive(path, path, True, False,
+ "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
+ False, "{branch}/{commit_count}-g{commit}/{tag_number}",
+ 'Test run #{tag_number} of {branch}:{commit}', '',
+ [], [], False, keywords, logger)
+ self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created")
+ delete_fake_repository(path)
+
+ def test_create_second_test_tag(self):
+ first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
+ second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1"
+ keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
+
+ path, git_obj = create_fake_repository(True, [first_tag])
+ ga.gitarchive(path, path, True, False,
+ "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
+ False, "{branch}/{commit_count}-g{commit}/{tag_number}",
+ 'Test run #{tag_number} of {branch}:{commit}', '',
+ [], [], False, keywords, logger)
+ self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created")
+ delete_fake_repository(path)
+
+ def test_get_revs_on_branch(self):
+ fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
+ tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
+
+ path, git_obj = create_fake_repository(True, fake_tags_list)
+ revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main")
+ self.assertEqual(len(revs), 1)
+ self.assertEqual(revs[0].commit, "0f7d5df")
+ self.assertEqual(len(revs[0].tags), 2)
+ self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1'])
+ delete_fake_repository(path)
+
+ def test_get_tags_without_valid_remote(self):
+ url = 'git://git.yoctoproject.org/poky'
+ path, git_obj = create_fake_repository(False, None, False)
+
+ tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
+ """Test for some well established tags (released tags)"""
+ self.assertIn("yocto-4.0", tags)
+ self.assertIn("yocto-4.1", tags)
+ self.assertIn("yocto-4.2", tags)
+ delete_fake_repository(path)
+
+ def test_get_tags_with_only_local_tag(self):
+ fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
+ path, git_obj = create_fake_repository(True, fake_tags_list, False)
+
+ """No remote is configured and no url is passed: get_tags must fall
+ back to local tags
+ """
+ tags = ga.get_tags(git_obj, self.log)
+ self.assertCountEqual(tags, fake_tags_list)
+ delete_fake_repository(path)
+
+ def test_get_tags_without_valid_remote_and_wrong_url(self):
+ url = 'git://git.foo.org/bar'
+ path, git_obj = create_fake_repository(False, None, False)
+
+ """Test for some well established tags (released tags)"""
+ with self.assertRaises(GitError):
+ tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
+ delete_fake_repository(path)
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
new file mode 100644
index 0000000000..bd56b2f6e7
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -0,0 +1,99 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import time
+import contextlib
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, runqemu
+from oeqa.utils.nfs import unfs_server
+
+def parse_values(content):
+ for i in content:
+ for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
+ if i.startswith(v + ": "):
+ yield i[len(v) + 2:].strip(), v
+ break
+
+class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
+ def run_check(self, ssh = None):
+ # configure ssh target
+ features = []
+ if ssh is not None:
+ features.append('TOOLCHAIN_TEST_TARGET = "ssh"')
+ features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
+ features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
+ features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
+ # force single threaded test execution
+ features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
+ self.write_config("\n".join(features))
+
+ start_time = time.time()
+
+ bitbake("glibc-testsuite -c check")
+
+ end_time = time.time()
+
+ builddir = get_bb_var("B", "glibc-testsuite")
+
+ ptestsuite = "glibc-user" if ssh is None else "glibc"
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time))
+ with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
+ for test, result in parse_values(f):
+ self.ptest_result(ptestsuite, test, result)
+
+ def run_check_emulated(self):
+ with contextlib.ExitStack() as s:
+ # use the base work dir, as the nfs mount, since the recipe directory may not exist
+ tmpdir = get_bb_var("BASE_WORKDIR")
+ nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
+
+ # build core-image-minimal with required packages
+ default_installed_packages = [
+ "glibc-charmaps",
+ "libgcc",
+ "libstdc++",
+ "libatomic",
+ "libgomp",
+ # "python3",
+ # "python3-pexpect",
+ "nfs-utils",
+ ]
+ features = []
+ features.append('IMAGE_FEATURES += "ssh-server-openssh"')
+ features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
+ self.write_config("\n".join(features))
+ bitbake("core-image-minimal")
+
+ # start runqemu
+ qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
+
+ # validate that SSH is working
+ status, _ = qemu.run("uname")
+ self.assertEqual(status, 0)
+
+ # setup nfs mount
+ if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
+ raise Exception("Failed to setup NFS mount directory on target")
+ mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
+ status, output = qemu.run(mountcmd)
+ if status != 0:
+ raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
+
+ self.run_check(ssh = qemu.ip)
+
+@OETestTag("toolchain-user")
+class GlibcSelfTest(GlibcSelfTestBase):
+ def test_glibc(self):
+ self.run_check()
+
+@OETestTag("toolchain-system")
+@OETestTag("runqemu")
+class GlibcSelfTestSystemEmulated(GlibcSelfTestBase):
+ def test_glibc(self):
+ self.run_check_emulated()
+
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
index 1e23257f4d..ee2cf4b09a 100644
--- a/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import glob
import os
import shutil
@@ -42,26 +48,28 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
super(oeGoToolchainSelfTest, cls).tearDownClass()
- def run_sdk_go_command(self, gocmd):
- cmd = "cd %s; " % self.tmpdir_SDKQA
+ def run_sdk_go_command(self, gocmd, proj, name):
+ cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name)
cmd = cmd + ". %s; " % self.env_SDK
cmd = cmd + "export GOPATH=%s; " % self.go_path
+ cmd = cmd + "export GOFLAGS=-modcacherw; "
+ cmd = cmd + "export CGO_ENABLED=1; "
+ cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; "
cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
return runCmd(cmd).status
def test_go_dep_build(self):
- proj = "github.com/golang"
- name = "dep"
- ver = "v0.3.1"
+ proj = "github.com/direnv"
+ name = "direnv"
+ ver = "v2.27.0"
archive = ".tar.gz"
url = "https://%s/%s/archive/%s%s" % (proj, name, ver, archive)
runCmd("cd %s; wget %s" % (self.tmpdir_SDKQA, url))
runCmd("cd %s; tar -xf %s" % (self.tmpdir_SDKQA, ver+archive))
runCmd("mkdir -p %s/src/%s" % (self.go_path, proj))
- runCmd("mv %s/dep-0.3.1 %s/src/%s/%s"
+ runCmd("mv %s/direnv-2.27.0 %s/src/%s/%s"
% (self.tmpdir_SDKQA, self.go_path, proj, name))
- retv = self.run_sdk_go_command('build %s/%s/cmd/dep'
- % (proj, name))
+ retv = self.run_sdk_go_command('build', proj, name)
self.assertEqual(retv, 0,
msg="Running go build failed for %s" % name)
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py
index e6788853a3..17c98baf14 100644
--- a/meta/lib/oeqa/selftest/cases/image_typedep.py
+++ b/meta/lib/oeqa/selftest/cases/image_typedep.py
@@ -1,14 +1,18 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake
-from oeqa.core.decorator.oeid import OETestID
class ImageTypeDepTests(OESelftestTestCase):
- # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that
+ # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
# the conversion type bar gets added as a dep as well
- @OETestID(1633)
def test_conversion_typedep_added(self):
self.write_recipeinc('emptytest', """
@@ -20,7 +24,7 @@ LICENSE = "MIT"
IMAGE_FSTYPES = "testfstype"
IMAGE_TYPES_MASKED += "testfstype"
-IMAGE_TYPEDEP_testfstype = "tar.bz2"
+IMAGE_TYPEDEP:testfstype = "tar.bz2"
inherit image
@@ -29,11 +33,14 @@ inherit image
# like CONVERSION_DEPENDS_bz2="somedep"
result = bitbake('-e emptytest')
+ dep = None
for line in result.output.split('\n'):
if line.startswith('CONVERSION_DEPENDS_bz2'):
dep = line.split('=')[1].strip('"')
break
+ self.assertIsNotNone(dep, "CONVERSION_DEPENDS_bz2 dependency not found in bitbake -e output")
+
# Now get the dependency task list and check for the expected task
# dependency
bitbake('-g emptytest')
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index 0ffb686921..dc88c222bd 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -1,7 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
-from oeqa.core.decorator.oeid import OETestID
+from oeqa.core.decorator import OETestTag
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
from oeqa.utils.sshcontrol import SSHControl
+import glob
import os
import json
@@ -10,9 +17,7 @@ class ImageFeatures(OESelftestTestCase):
test_user = 'tester'
root_user = 'root'
- buffer = True
-
- @OETestID(1107)
+ @OETestTag("runqemu")
def test_non_root_user_can_connect_via_ssh_without_password(self):
"""
Summary: Check if non root user can connect via ssh without password
@@ -23,7 +28,7 @@ class ImageFeatures(OESelftestTestCase):
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
- features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh empty-root-password allow-empty-password"\n'
+ features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh empty-root-password allow-empty-password allow-root-login"\n'
features += 'INHERIT += "extrausers"\n'
features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
self.write_config(features)
@@ -38,7 +43,7 @@ class ImageFeatures(OESelftestTestCase):
status, output = ssh.run("true")
self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output))
- @OETestID(1115)
+ @OETestTag("runqemu")
def test_all_users_can_connect_via_ssh_without_password(self):
"""
Summary: Check if all users can connect via ssh without password
@@ -49,7 +54,7 @@ class ImageFeatures(OESelftestTestCase):
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
- features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh allow-empty-password"\n'
+ features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh allow-empty-password allow-root-login"\n'
features += 'INHERIT += "extrausers"\n'
features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
self.write_config(features)
@@ -68,20 +73,6 @@ class ImageFeatures(OESelftestTestCase):
self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output)
- @OETestID(1116)
- def test_clutter_image_can_be_built(self):
- """
- Summary: Check if clutter image can be built
- Expected: 1. core-image-clutter can be built
- Product: oe-core
- Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
- AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
- """
-
- # Build a core-image-clutter
- bitbake('core-image-clutter')
-
- @OETestID(1117)
def test_wayland_support_in_image(self):
"""
Summary: Check Wayland support in image
@@ -99,7 +90,6 @@ class ImageFeatures(OESelftestTestCase):
# Build a core-image-weston
bitbake('core-image-weston')
- @OETestID(1497)
def test_bmap(self):
"""
Summary: Check bmap support
@@ -112,12 +102,11 @@ class ImageFeatures(OESelftestTestCase):
features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
- image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME'])
bmap_path = "%s.bmap" % image_path
gzip_path = "%s.gz" % bmap_path
@@ -128,12 +117,11 @@ class ImageFeatures(OESelftestTestCase):
# check if result image is sparse
image_stat = os.stat(image_path)
- self.assertTrue(image_stat.st_size > image_stat.st_blocks * 512)
+ self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
- # check if the resulting gzip is valid
- self.assertTrue(runCmd('gzip -t %s' % gzip_path))
+ # check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink
+ self.assertTrue(runCmd('gzip --test --force %s' % gzip_path))
- @OETestID(1903)
def test_hypervisor_fmts(self):
"""
Summary: Check various hypervisor formats
@@ -147,17 +135,16 @@ class ImageFeatures(OESelftestTestCase):
img_types = [ 'vmdk', 'vdi', 'qcow2' ]
features = ""
for itype in img_types:
- features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype
+ features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
for itype in img_types:
- image_path = os.path.join(deploy_dir_image, "%s.wic.%s" %
- (link_name, itype))
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" %
+ (bb_vars['IMAGE_LINK_NAME'], itype))
# check if result image file is in deploy directory
self.assertTrue(os.path.exists(image_path))
@@ -166,31 +153,33 @@ class ImageFeatures(OESelftestTestCase):
sysroot = get_bb_var('STAGING_DIR_NATIVE', 'core-image-minimal')
result = runCmd('qemu-img info --output json %s' % image_path,
native_sysroot=sysroot)
- self.assertTrue(json.loads(result.output).get('format') == itype)
+ try:
+ data = json.loads(result.output)
+ self.assertEqual(data.get('format'), itype,
+ msg="Unexpected format in '%s'" % (result.output))
+ except json.decoder.JSONDecodeError:
+ self.fail("Could not parse '%ss'" % result.output)
- @OETestID(1905)
def test_long_chain_conversion(self):
"""
Summary: Check for chaining many CONVERSION_CMDs together
Expected: 1. core-image-minimal can be built with
- ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a
+ ext4.bmap.gz.bz2.zst.xz.u-boot and also create a
sha256sum
2. The above image has a valid sha256sum
Product: oe-core
Author: Tom Rini <trini@konsulko.com>
"""
- conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot"
+ conv = "ext4.bmap.gz.bz2.zst.xz.u-boot"
features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
-
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
- image_path = os.path.join(deploy_dir_image, "%s.%s" %
- (link_name, conv))
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" %
+ (bb_vars['IMAGE_LINK_NAME'], conv))
# check if resulting image is in the deploy directory
self.assertTrue(os.path.exists(image_path))
@@ -198,9 +187,8 @@ class ImageFeatures(OESelftestTestCase):
# check if the resulting sha256sum agrees
self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
- (deploy_dir_image, link_name, conv)))
+ (bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv)))
- @OETestID(1904)
def test_image_fstypes(self):
"""
Summary: Check if image of supported image fstypes can be built
@@ -208,26 +196,43 @@ class ImageFeatures(OESelftestTestCase):
Product: oe-core
Author: Ed Bartosh <ed.bartosh@linux.intel.com>
"""
- image_name = 'core-image-minimal'
-
- img_types = [itype for itype in get_bb_var("IMAGE_TYPES", image_name).split() \
- if itype not in ('container', 'elf', 'multiubi')]
+ image = 'core-image-minimal'
- config = 'IMAGE_FSTYPES += "%s"\n'\
- 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\
- 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types)
+ all_image_types = set(get_bb_var("IMAGE_TYPES", image).split())
+ skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat'))
+ img_types = all_image_types - skip_image_types
+ config = """
+IMAGE_FSTYPES += "%s"
+WKS_FILE = "wictestdisk.wks"
+MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"
+UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"
+MULTIUBI_BUILD += "mtd_2_128"
+MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047"
+UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512"
+MULTIUBI_BUILD += "mtd_4_256"
+MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096"
+UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB"
+""" % ' '.join(img_types)
self.write_config(config)
- bitbake(image_name)
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
for itype in img_types:
- image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype))
- # check if result image is in deploy directory
- self.assertTrue(os.path.exists(image_path),
- "%s image %s doesn't exist" % (itype, image_path))
+ if itype == 'multiubi':
+ # For multiubi build we need to manage MULTIUBI_BUILD entry to append
+ # specific name to IMAGE_LINK_NAME
+ for vname in bb_vars['MULTIUBI_BUILD'].split():
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname))
+ # check if result image is in deploy directory
+ self.assertTrue(os.path.exists(image_path),
+ "%s image %s doesn't exist" % (itype, image_path))
+ else:
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype))
+ # check if result image is in deploy directory
+ self.assertTrue(os.path.exists(image_path),
+ "%s image %s doesn't exist" % (itype, image_path))
def test_useradd_static(self):
config = """
@@ -238,3 +243,94 @@ USERADD_GID_TABLES += "files/static-group"
"""
self.write_config(config)
bitbake("core-image-base")
+
+ def test_no_busybox_base_utils(self):
+ config = """
+# Enable wayland
+DISTRO_FEATURES:append = " pam opengl wayland"
+
+# Switch to systemd
+DISTRO_FEATURES:append = " systemd usrmerge"
+VIRTUAL-RUNTIME_init_manager = "systemd"
+VIRTUAL-RUNTIME_initscripts = ""
+VIRTUAL-RUNTIME_syslog = ""
+VIRTUAL-RUNTIME_login_manager = "shadow-base"
+DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
+
+# Replace busybox
+PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils"
+VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils"
+VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock"
+VIRTUAL-RUNTIME_base-utils-syslog = ""
+
+# Skip busybox
+SKIP_RECIPE[busybox] = "Don't build this"
+"""
+ self.write_config(config)
+
+ bitbake("--graphviz core-image-weston")
+
+ def test_image_gen_debugfs(self):
+ """
+ Summary: Check debugfs generation
+ Expected: 1. core-image-minimal can be build with IMAGE_GEN_DEBUGFS variable set
+ 2. debug filesystem is created when variable set
+ 3. debug symbols available
+ Product: oe-core
+ Author: Humberto Ibarra <humberto.ibarra.lopez@intel.com>
+ Yeoh Ee Peng <ee.peng.yeoh@intel.com>
+ """
+
+ image = 'core-image-minimal'
+ image_fstypes_debugfs = 'tar.bz2'
+ features = 'IMAGE_GEN_DEBUGFS = "1"\n'
+ features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs
+ self.write_config(features)
+
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+
+ dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs))
+ self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file)
+ result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file))
+ self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output))
+ result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm"))
+ self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output)
+ dbg_symbols_targets = result.output.splitlines()
+ self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets)
+ for t in dbg_symbols_targets:
+ result = runCmd('objdump --syms %s | grep debug' % t)
+ self.assertTrue("debug" in result.output, msg='Failed to find debug symbol: %s' % result.output)
+
+ def test_empty_image(self):
+ """Test creation of image with no packages"""
+ image = 'test-empty-image'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME'])
+ self.assertTrue(os.path.exists(manifest))
+
+ with open(manifest, "r") as f:
+ self.assertEqual(len(f.read().strip()),0)
+
+ def test_mandb(self):
+ """
+ Test that an image containing manpages has working man and apropos commands.
+ """
+ config = """
+DISTRO_FEATURES:append = " api-documentation"
+CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc"
+"""
+ self.write_config(config)
+ bitbake("core-image-minimal")
+
+ with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
+ # This manpage is provided by man-pages
+ status, output = qemu.run_serial("apropos 8859")
+ self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
+ self.assertIn("iso_8859_15", output)
+
+ # This manpage is provided by kmod
+ status, output = qemu.run_serial("man --pager=cat modprobe")
+ self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
+ self.assertIn("force-modversion", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
new file mode 100644
index 0000000000..f4af67a239
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -0,0 +1,152 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class IncompatibleLicenseTestObsolete(OESelftestTestCase):
+
+ def lic_test(self, pn, pn_lic, lic, error_msg=None):
+ if not error_msg:
+ error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
+
+ self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
+
+ result = bitbake('%s --dry-run' % (pn), ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
+
+ # Verify that a package with an SPDX license cannot be built when
+ # INCOMPATIBLE_LICENSE contains an alias (in SPDXLICENSEMAP) of this SPDX
+ # license
+ def test_incompatible_alias_spdx_license(self):
+ self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
+
+ # Verify that a package with an SPDX license cannot be built when
+ # INCOMPATIBLE_LICENSE contains a wildcarded alias license matching this
+ # SPDX license
+ def test_incompatible_alias_spdx_license_wildcard(self):
+ self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains this alias
+ def test_incompatible_alias_spdx_license_alias(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
+ # license matching this SPDX license
+ def test_incompatible_spdx_license_alias_wildcard(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0', "*GPL-3.0 is an invalid license wildcard entry")
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
+ # alias license matching the SPDX license
+ def test_incompatible_alias_spdx_license_alias_wildcard(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
+
+
+ # Verify that a package with multiple SPDX licenses cannot be built when
+ # INCOMPATIBLE_LICENSE contains a wildcard to some of them
+ def test_incompatible_spdx_licenses_wildcard(self):
+ self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only', "*GPL-3.0-only is an invalid license wildcard entry")
+
+
+ # Verify that a package with multiple SPDX licenses cannot be built when
+ # INCOMPATIBLE_LICENSE contains a wildcard matching all licenses
+ def test_incompatible_all_licenses_wildcard(self):
+ self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*', "* is an invalid license wildcard entry")
+
+class IncompatibleLicenseTests(OESelftestTestCase):
+
+ def lic_test(self, pn, pn_lic, lic):
+ error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
+
+ self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
+
+ result = bitbake('%s --dry-run' % (pn), ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
+
+ # Verify that a package with an SPDX license cannot be built when
+ # INCOMPATIBLE_LICENSE contains this SPDX license
+ def test_incompatible_spdx_license(self):
+ self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
+
+ # Verify that a package with an SPDX license cannot be built when
+ # INCOMPATIBLE_LICENSE contains a wildcarded license matching this SPDX
+ # license
+ def test_incompatible_spdx_license_wildcard(self):
+ self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0*')
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
+ # license
+ def test_incompatible_spdx_license_alias(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
+
+ # Verify that a package with multiple SPDX licenses cannot be built when
+ # INCOMPATIBLE_LICENSE contains some of them
+ def test_incompatible_spdx_licenses(self):
+ self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
+
+ # Verify that a package with a non-SPDX license cannot be built when
+ # INCOMPATIBLE_LICENSE contains this license
+ def test_incompatible_nonspdx_license(self):
+ self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense')
+
+class IncompatibleLicensePerImageTests(OESelftestTestCase):
+ def default_config(self):
+ return """
+IMAGE_INSTALL:append = " bash"
+INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
+"""
+
+ def test_bash_default(self):
+ self.write_config(self.default_config())
+ error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
+
+ result = bitbake('core-image-minimal', ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
+
+ def test_bash_and_license(self):
+ self.disable_class("create-spdx")
+ self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"')
+ error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
+
+ result = bitbake('core-image-minimal', ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
+
+ def test_bash_or_license(self):
+ self.disable_class("create-spdx")
+ self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"')
+
+ bitbake('core-image-minimal')
+
+ def test_bash_license_exceptions(self):
+ self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"')
+
+ bitbake('core-image-minimal')
+
+class NoGPL3InImagesTests(OESelftestTestCase):
+ def test_core_image_minimal(self):
+ self.write_config("""
+INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
+""")
+ bitbake('core-image-minimal')
+
+ def test_core_image_full_cmdline_weston(self):
+ self.write_config("""
+IMAGE_CLASSES += "testimage"
+INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*"
+INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*"
+
+require conf/distro/include/no-gplv3.inc
+""")
+ bitbake('core-image-full-cmdline core-image-weston')
+ bitbake('-c testimage core-image-full-cmdline core-image-weston')
+
diff --git a/meta/lib/oeqa/selftest/cases/intercept.py b/meta/lib/oeqa/selftest/cases/intercept.py
new file mode 100644
index 0000000000..12583c3099
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/intercept.py
@@ -0,0 +1,21 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class GitCheck(OESelftestTestCase):
+ def test_git_intercept(self):
+ """
+ Git binaries with CVE-2022-24765 fixed will refuse to operate on a
+ repository which is owned by a different user. This breaks our
+ do_install task as that runs inside pseudo, so the git repository is
+ owned by the build user but git is running as (fake)root.
+
+ We have an intercept which disables pseudo, so verify that it works.
+ """
+ bitbake("git-submodule-test -c test_git_as_user")
+ bitbake("git-submodule-test -c test_git_as_root")
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
new file mode 100644
index 0000000000..b1f78a0cd1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
@@ -0,0 +1,74 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, get_bb_var
+from oeqa.utils.git import GitRepo
+
+class KernelDev(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(KernelDev, cls).setUpClass()
+ # Create the recipe directory structure inside the created layer
+ cls.layername = 'meta-kerneltest'
+ runCmd('bitbake-layers create-layer %s' % cls.layername)
+ runCmd('mkdir -p %s/recipes-kernel/linux/linux-yocto' % cls.layername)
+ cls.recipes_linuxyocto_dir = os.path.join \
+ (cls.builddir, cls.layername, 'recipes-kernel', 'linux', 'linux-yocto')
+ cls.recipeskernel_dir = os.path.dirname(cls.recipes_linuxyocto_dir)
+ runCmd('bitbake-layers add-layer %s' % cls.layername)
+
+ @classmethod
+ def tearDownClass(cls):
+ runCmd('bitbake-layers remove-layer %s' % cls.layername, ignore_status=True)
+ runCmd('rm -rf %s' % cls.layername)
+ super(KernelDev, cls).tearDownClass()
+
+ def setUp(self):
+ super(KernelDev, self).setUp()
+ self.set_machine_config('MACHINE = "qemux86-64"\n')
+
+ def test_apply_patches(self):
+ """
+ Summary: Able to apply a single patch to the Linux kernel source
+ Expected: The README file should exist and the patch changes should be
+ displayed at the end of the file.
+ Product: Kernel Development
+ Author: Yeoh Ee Peng <ee.peng.yeoh@intel.com>
+ AutomatedBy: Mazliana Mohamad <mazliana.mohamad@intel.com>
+ """
+ runCmd('bitbake virtual/kernel -c patch')
+ kernel_source = get_bb_var('STAGING_KERNEL_DIR')
+ readme = os.path.join(kernel_source, 'README')
+
+ # This test step adds modified file 'README' to git and creates a
+ # patch file '0001-KERNEL_DEV_TEST_CASE.patch' at the same location as file
+ patch_content = 'This is a test to apply a patch to the kernel'
+ with open(readme, 'a+') as f:
+ f.write(patch_content)
+ repo = GitRepo('%s' % kernel_source, is_topdir=True)
+ repo.run_cmd('add %s' % readme)
+ repo.run_cmd(['commit', '-m', 'KERNEL_DEV_TEST_CASE'])
+ repo.run_cmd(['format-patch', '-1'])
+ patch_name = '0001-KERNEL_DEV_TEST_CASE.patch'
+ patchpath = os.path.join(kernel_source, patch_name)
+ runCmd('mv %s %s' % (patchpath, self.recipes_linuxyocto_dir))
+ runCmd('rm %s ' % readme)
+ self.assertFalse(os.path.exists(readme))
+
+ recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
+ with open(recipe_append, 'w+') as fh:
+ fh.write('SRC_URI += "file://%s"\n' % patch_name)
+ fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n')
+ fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"')
+
+ runCmd('bitbake virtual/kernel -c clean')
+ runCmd('bitbake virtual/kernel -c patch')
+ self.assertTrue(os.path.exists(readme))
+ result = runCmd('tail -n 1 %s' % readme)
+ self.assertEqual(result.output, patch_content)
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 2fd5cdb0c6..379ed589ad 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -1,9 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.commands import bitbake, get_bb_var
import oeqa.utils.ftools as ftools
-from oeqa.core.decorator.oeid import OETestID
class LayerAppendTests(OESelftestTestCase):
layerconf = """
@@ -27,20 +32,20 @@ python do_build() {
addtask build
"""
append = """
-FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
+FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
-SRC_URI_append = " file://appendtest.txt"
+SRC_URI:append = " file://appendtest.txt"
-sysroot_stage_all_append() {
+sysroot_stage_all:append() {
install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
}
"""
append2 = """
-FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
+FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
-SRC_URI_append = " file://appendtest.txt"
+SRC_URI:append = " file://appendtest.txt"
"""
layerappend = ''
@@ -49,7 +54,6 @@ SRC_URI_append = " file://appendtest.txt"
ftools.remove_from_file(self.builddir + "/conf/bblayers.conf", self.layerappend)
super(LayerAppendTests, self).tearDownLocal()
- @OETestID(1196)
def test_layer_appends(self):
corebase = get_bb_var("COREBASE")
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py
index e84609246a..d5ffffdcb4 100644
--- a/meta/lib/oeqa/selftest/cases/liboe.py
+++ b/meta/lib/oeqa/selftest/cases/liboe.py
@@ -1,5 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.core.decorator.oeid import OETestID
from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake, runCmd
import oe.path
import os
@@ -11,7 +16,6 @@ class LibOE(OESelftestTestCase):
super(LibOE, cls).setUpClass()
cls.tmp_dir = get_bb_var('TMPDIR')
- @OETestID(1635)
def test_copy_tree_special(self):
"""
Summary: oe.path.copytree() should copy files with special character
@@ -37,7 +41,6 @@ class LibOE(OESelftestTestCase):
oe.path.remove(testloc)
- @OETestID(1636)
def test_copy_tree_xattr(self):
"""
Summary: oe.path.copytree() should preserve xattr on copied files
@@ -72,7 +75,6 @@ class LibOE(OESelftestTestCase):
oe.path.remove(testloc)
- @OETestID(1634)
def test_copy_hardlink_tree_count(self):
"""
Summary: oe.path.copyhardlinktree() shouldn't miss out files
@@ -97,6 +99,6 @@ class LibOE(OESelftestTestCase):
dstcnt = len(os.listdir(dst))
srccnt = len(os.listdir(src))
- self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
+ self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
oe.path.remove(testloc)
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py
index 37407157c1..2d0b805b90 100644
--- a/meta/lib/oeqa/selftest/cases/lic_checksum.py
+++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py
@@ -1,16 +1,38 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import tempfile
+import urllib
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake
-from oeqa.utils import CommandError
-from oeqa.core.decorator.oeid import OETestID
class LicenseTests(OESelftestTestCase):
+ def test_checksum_with_space(self):
+ bitbake_cmd = '-c populate_lic emptytest'
+
+ lic_file, lic_path = tempfile.mkstemp(" -afterspace")
+ os.close(lic_file)
+ #self.track_for_cleanup(lic_path)
+
+ self.write_config("INHERIT:remove = \"report-error\"")
+
+ self.write_recipeinc('emptytest', """
+INHIBIT_DEFAULT_DEPS = "1"
+LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
+SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
+""" % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path)))
+ result = bitbake(bitbake_cmd)
+ self.delete_recipeinc('emptytest')
+
+
# Verify that changing a license file that has an absolute path causes
# the license qa to fail due to a mismatched md5sum.
- @OETestID(1197)
def test_nonmatching_checksum(self):
bitbake_cmd = '-c populate_lic emptytest'
error_msg = 'emptytest: The new md5 checksum is 8d777f385d3dfec8815d20f7496026dc'
@@ -19,6 +41,8 @@ class LicenseTests(OESelftestTestCase):
os.close(lic_file)
self.track_for_cleanup(lic_path)
+ self.write_config("INHERIT:remove = \"report-error\"")
+
self.write_recipeinc('emptytest', """
INHIBIT_DEFAULT_DEPS = "1"
LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
@@ -29,7 +53,7 @@ SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
with open(lic_path, "w") as f:
f.write("data")
- self.write_config("INHERIT_remove = \"report-error\"")
result = bitbake(bitbake_cmd, ignore_status=True)
+ self.delete_recipeinc('emptytest')
if error_msg not in result.output:
raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
new file mode 100644
index 0000000000..4ca8ffb7aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -0,0 +1,54 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator import OETestTag
+from oeqa.utils.commands import bitbake, runqemu
+
+class LocalesTest(OESelftestTestCase):
+
+ @OETestTag("runqemu")
+
+ def run_locales_test(self, binary_enabled):
+ features = []
+ features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
+ features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
+ features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"')
+ features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
+ if binary_enabled:
+ features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
+ else:
+ features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"')
+ self.write_config("\n".join(features))
+
+ # Build a core-image-minimal
+ bitbake('core-image-minimal')
+
+ with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu:
+ cmd = "locale -a"
+ status, output = qemu.run_serial(cmd)
+ # output must includes fr_FR or fr_FR.UTF-8
+ self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output)
+ self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output)
+
+ cmd = "localedef --list-archive -v"
+ status, output = qemu.run_serial(cmd)
+ # output must includes fr_FR.utf8
+ self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output)
+ self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output)
+
+ def test_locales_on(self):
+ """
+ Summary: Test the locales are generated
+ Expected: 1. Check the locale exist in the locale-archive
+ 2. Check the locale exist for the glibc
+ 3. Check the locale can be generated
+ Product: oe-core
+ Author: Louis Rannou <lrannou@baylibre.com>
+ AutomatedBy: Louis Rannou <lrannou@baylibre.com>
+ """
+ self.run_locales_test(True)
+
+ def test_locales_off(self):
+ self.run_locales_test(False)
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py
index 146071934d..07a6c80489 100644
--- a/meta/lib/oeqa/selftest/cases/manifest.py
+++ b/meta/lib/oeqa/selftest/cases/manifest.py
@@ -1,8 +1,13 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake
-from oeqa.core.decorator.oeid import OETestID
+from oeqa.utils.commands import get_bb_var, bitbake
class ManifestEntry:
'''A manifest item of a collection able to list missing packages'''
@@ -59,7 +64,6 @@ class VerifyManifest(OESelftestTestCase):
self.skipTest("{}: Cannot setup testing scenario"\
.format(self.classname))
- @OETestID(1380)
def test_SDK_manifest_entries(self):
'''Verifying the SDK manifest entries exist, this may take a build'''
@@ -84,11 +88,8 @@ class VerifyManifest(OESelftestTestCase):
try:
mdir = self.get_dir_from_bb_var('SDK_DEPLOY', self.buildtarget)
for k in d_target.keys():
- bb_vars = get_bb_vars(['SDK_NAME', 'SDK_VERSION'], self.buildtarget)
- mfilename[k] = "{}-toolchain-{}.{}.manifest".format(
- bb_vars['SDK_NAME'],
- bb_vars['SDK_VERSION'],
- k)
+ toolchain_outputname = get_bb_var('TOOLCHAIN_OUTPUTNAME', self.buildtarget)
+ mfilename[k] = "{}.{}.manifest".format(toolchain_outputname, k)
mpath[k] = os.path.join(mdir, mfilename[k])
if not os.path.isfile(mpath[k]):
self.logger.debug("{}: {} does not exist".format(
@@ -126,7 +127,6 @@ class VerifyManifest(OESelftestTestCase):
self.logger.info(msg)
self.fail(logmsg)
- @OETestID(1381)
def test_image_manifest_entries(self):
'''Verifying the image manifest entries exist'''
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
new file mode 100644
index 0000000000..ffe0d2604d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -0,0 +1,60 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.sdk.utils.sdkbuildproject import SDKBuildProject
+from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
+from oeqa.core.decorator import OETestTag
+import tempfile
+import shutil
+
+@OETestTag("machine")
+class MetaIDE(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(MetaIDE, cls).setUpClass()
+ bitbake('meta-ide-support')
+ bitbake('build-sysroots -c build_native_sysroot')
+ bitbake('build-sysroots -c build_target_sysroot')
+ bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
+ cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS']
+ cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
+ cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
+ cls.corebasedir = bb_vars['COREBASE']
+ cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
+
+ @classmethod
+ def tearDownClass(cls):
+ shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
+ super(MetaIDE, cls).tearDownClass()
+
+ def test_meta_ide_had_installed_meta_ide_support(self):
+ self.assertExists(self.environment_script_path)
+
+ def test_meta_ide_can_compile_c_program(self):
+ runCmd('cp %s/test.c %s' % (self.tc.files_dir, self.tmpdir_metaideQA))
+ runCmd("cd %s; . %s; $CC test.c -lm" % (self.tmpdir_metaideQA, self.environment_script_path))
+ compiled_file = '%s/a.out' % self.tmpdir_metaideQA
+ self.assertExists(compiled_file)
+
+ def test_meta_ide_can_build_cpio_project(self):
+ dl_dir = self.td.get('DL_DIR', None)
+ self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
+ "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz",
+ self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
+ self.project.download_archive()
+ self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS'), 0,
+ msg="Running configure failed")
+ self.assertEqual(self.project.run_make(), 0,
+ msg="Running make failed")
+ self.assertEqual(self.project.run_install(), 0,
+ msg="Running make install failed")
+
+ def test_meta_ide_can_run_sdk_tests(self):
+ bitbake('-c populate_sysroot gtk+3')
+ bitbake('build-sysroots -c build_target_sysroot')
+ bitbake('-c testsdk meta-ide-support')
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
new file mode 100644
index 0000000000..2919f07939
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -0,0 +1,44 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import subprocess
+import tempfile
+import shutil
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
+
+
+class Minidebuginfo(OESelftestTestCase):
+ def test_minidebuginfo(self):
+ target_sys = get_bb_var("TARGET_SYS")
+ binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH"))
+
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image)
+
+ self.write_config("""
+DISTRO_FEATURES:append = " minidebuginfo"
+IMAGE_FSTYPES = "tar.bz2"
+""")
+ bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils))
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils)
+
+ # confirm that executables and shared libraries contain an ELF section
+ # ".gnu_debugdata" which stores minidebuginfo.
+ with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs:
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME']))
+ shutil.unpack_archive(filename, unpackedfs)
+
+ r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")],
+ native_sysroot = native_sysroot, target_sys = target_sys)
+ self.assertIn(".gnu_debugdata", r.output)
+
+ r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")],
+ native_sysroot = native_sysroot, target_sys = target_sys)
+ self.assertIn(".gnu_debugdata", r.output)
+
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py
new file mode 100644
index 0000000000..f509cbf607
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/multiconfig.py
@@ -0,0 +1,87 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import textwrap
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class MultiConfig(OESelftestTestCase):
+
+ def test_multiconfig(self):
+ """
+ Test that a simple multiconfig build works. This uses the mcextend class and the
+ multiconfig-image-packager test recipe to build a core-image-full-cmdline image which
+ contains a tiny core-image-minimal and a musl core-image-minimal, installed as packages.
+ """
+
+ config = """
+IMAGE_INSTALL:append:pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl"
+BBMULTICONFIG = "tiny musl"
+"""
+ self.write_config(config)
+
+ muslconfig = """
+MACHINE = "qemux86-64"
+DISTRO = "poky"
+TCLIBC = "musl"
+TMPDIR = "${TOPDIR}/tmp-mc-musl"
+"""
+ self.write_config(muslconfig, 'musl')
+
+ tinyconfig = """
+MACHINE = "qemux86"
+DISTRO = "poky-tiny"
+TMPDIR = "${TOPDIR}/tmp-mc-tiny"
+"""
+ self.write_config(tinyconfig, 'tiny')
+
+ # Build a core-image-minimal
+ bitbake('core-image-full-cmdline')
+
+ def test_multiconfig_reparse(self):
+ """
+ Test that changes to a multiconfig conf file are correctly detected and
+ cause a reparse/rebuild of a recipe.
+ """
+ config = textwrap.dedent('''\
+ MCTESTVAR = "test"
+ BBMULTICONFIG = "test"
+ ''')
+ self.write_config(config)
+
+ testconfig = textwrap.dedent('''\
+ MCTESTVAR:append = "1"
+ ''')
+ self.write_config(testconfig, 'test')
+
+ # Check that the 1) the task executed and 2) that it output the correct
+ # value. Note "bitbake -e" is not used because it always reparses the
+ # recipe and we want to ensure that the automatic reparsing and parse
+ # caching is detected.
+ result = bitbake('mc:test:multiconfig-test-parse -c showvar')
+ self.assertIn('MCTESTVAR=test1', result.output.splitlines())
+
+ testconfig = textwrap.dedent('''\
+ MCTESTVAR:append = "2"
+ ''')
+ self.write_config(testconfig, 'test')
+
+ result = bitbake('mc:test:multiconfig-test-parse -c showvar')
+ self.assertIn('MCTESTVAR=test2', result.output.splitlines())
+
+ def test_multiconfig_inlayer(self):
+ """
+ Test that a multiconfig from meta-selftest works.
+ """
+
+ config = """
+BBMULTICONFIG = "muslmc"
+"""
+ self.write_config(config)
+
+ # Build a core-image-minimal, only dry run needed to check config is present
+ bitbake('mc:muslmc:bash -n')
diff --git a/meta/lib/oeqa/selftest/cases/newlib.py b/meta/lib/oeqa/selftest/cases/newlib.py
new file mode 100644
index 0000000000..fe57aa51f2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/newlib.py
@@ -0,0 +1,13 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class NewlibTest(OESelftestTestCase):
+ def test_newlib(self):
+ self.write_config('TCLIBC = "newlib"')
+ bitbake("newlib libgloss")
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
index 08675fd820..042ccdd2b4 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
@@ -1,21 +1,27 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
+import sys
from oeqa.selftest.case import OESelftestTestCase
import tempfile
+import operator
from oeqa.utils.commands import get_bb_var
-from oeqa.core.decorator.oeid import OETestID
class TestBlobParsing(OESelftestTestCase):
def setUp(self):
- import time
self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
dir=get_bb_var('TOPDIR'))
try:
from git import Repo
self.repo = Repo.init(self.repo_path)
- except ImportError:
- self.skipTest('Python module GitPython is not present')
+ except ImportError as e:
+ self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path))
self.test_file = "test"
self.var_map = {}
@@ -24,6 +30,16 @@ class TestBlobParsing(OESelftestTestCase):
import shutil
shutil.rmtree(self.repo_path)
+ @property
+ def heads_default(self):
+ """
+ Support repos defaulting to master or to main branch
+ """
+ try:
+ return self.repo.heads.main
+ except AttributeError:
+ return self.repo.heads.master
+
def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
if len(to_add) == 0 and len(to_remove) == 0:
return
@@ -40,10 +56,9 @@ class TestBlobParsing(OESelftestTestCase):
self.repo.git.add("--all")
self.repo.git.commit(message=msg)
- @OETestID(1859)
def test_blob_to_dict(self):
"""
- Test convertion of git blobs to dictionary
+ Test conversion of git blobs to dictionary
"""
from oe.buildhistory_analysis import blob_to_dict
valuesmap = { "foo" : "1", "bar" : "2" }
@@ -53,7 +68,6 @@ class TestBlobParsing(OESelftestTestCase):
self.assertEqual(valuesmap, blob_to_dict(blob),
"commit was not translated correctly to dictionary")
- @OETestID(1860)
def test_compare_dict_blobs(self):
"""
Test comparisson of dictionaries extracted from git blobs
@@ -63,10 +77,10 @@ class TestBlobParsing(OESelftestTestCase):
changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
- blob1 = self.repo.heads.master.commit.tree.blobs[0]
+ blob1 = self.heads_default.commit.tree.blobs[0]
self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
- blob2 = self.repo.heads.master.commit.tree.blobs[0]
+ blob2 = self.heads_default.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
@@ -74,7 +88,6 @@ class TestBlobParsing(OESelftestTestCase):
var_changes = { x.fieldname : (x.oldvalue, x.newvalue) for x in change_records}
self.assertEqual(changesmap, var_changes, "Changes not reported correctly")
- @OETestID(1861)
def test_compare_dict_blobs_default(self):
"""
Test default values for comparisson of git blob dictionaries
@@ -83,10 +96,10 @@ class TestBlobParsing(OESelftestTestCase):
defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
self.commit_vars(to_add = { "foo" : "1" })
- blob1 = self.repo.heads.master.commit.tree.blobs[0]
+ blob1 = self.heads_default.commit.tree.blobs[0]
self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
- blob2 = self.repo.heads.master.commit.tree.blobs[0]
+ blob2 = self.heads_default.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
@@ -97,3 +110,48 @@ class TestBlobParsing(OESelftestTestCase):
var_changes[x.fieldname] = (oldvalue, x.newvalue)
self.assertEqual(defaultmap, var_changes, "Defaults not set properly")
+
+class TestFileListCompare(OESelftestTestCase):
+
+ def test_compare_file_lists(self):
+ # Test that a directory tree that moves location such as /lib/modules/5.4.40-yocto-standard -> /lib/modules/5.4.43-yocto-standard
+ # is correctly identified as a move
+ from oe.buildhistory_analysis import compare_file_lists, FileChange
+
+ with open(self.tc.files_dir + "/buildhistory_filelist1.txt", "r") as f:
+ filelist1 = f.readlines()
+ with open(self.tc.files_dir + "/buildhistory_filelist2.txt", "r") as f:
+ filelist2 = f.readlines()
+
+ expectedResult = [
+ '/lib/libcap.so.2 changed symlink target from libcap.so.2.33 to libcap.so.2.34',
+ '/lib/libcap.so.2.33 moved to /lib/libcap.so.2.34',
+ '/lib/modules/5.4.40-yocto-standard moved to /lib/modules/5.4.43-yocto-standard',
+ '/lib/modules/5.4.43-yocto-standard/modules.builtin.alias.bin was added',
+ '/usr/bin/gawk-5.0.1 moved to /usr/bin/gawk-5.1.0',
+ '/usr/lib/libbtrfsutil.so changed symlink target from libbtrfsutil.so.1.1.1 to libbtrfsutil.so.1.2.0',
+ '/usr/lib/libbtrfsutil.so.1 changed symlink target from libbtrfsutil.so.1.1.1 to libbtrfsutil.so.1.2.0',
+ '/usr/lib/libbtrfsutil.so.1.1.1 moved to /usr/lib/libbtrfsutil.so.1.2.0',
+ '/usr/lib/libkmod.so changed symlink target from libkmod.so.2.3.4 to libkmod.so.2.3.5',
+ '/usr/lib/libkmod.so.2 changed symlink target from libkmod.so.2.3.4 to libkmod.so.2.3.5',
+ '/usr/lib/libkmod.so.2.3.4 moved to /usr/lib/libkmod.so.2.3.5',
+ '/usr/lib/libpixman-1.so.0 changed symlink target from libpixman-1.so.0.38.4 to libpixman-1.so.0.40.0',
+ '/usr/lib/libpixman-1.so.0.38.4 moved to /usr/lib/libpixman-1.so.0.40.0',
+ '/usr/lib/opkg/alternatives/rtcwake was added',
+ '/usr/lib/python3.8/site-packages/PyGObject-3.34.0.egg-info moved to /usr/lib/python3.8/site-packages/PyGObject-3.36.1.egg-info',
+ '/usr/lib/python3.8/site-packages/btrfsutil-1.1.1-py3.8.egg-info moved to /usr/lib/python3.8/site-packages/btrfsutil-1.2.0-py3.8.egg-info',
+ '/usr/lib/python3.8/site-packages/pycairo-1.19.0.egg-info moved to /usr/lib/python3.8/site-packages/pycairo-1.19.1.egg-info',
+ '/usr/sbin/rtcwake changed type from file to symlink',
+ '/usr/sbin/rtcwake changed permissions from rwxr-xr-x to rwxrwxrwx',
+ '/usr/sbin/rtcwake changed symlink target from None to /usr/sbin/rtcwake.util-linux',
+ '/usr/sbin/rtcwake.util-linux was added'
+ ]
+
+ result = compare_file_lists(filelist1, filelist2)
+ rendered = []
+ for entry in sorted(result, key=operator.attrgetter("path")):
+ rendered.append(str(entry))
+
+ self.maxDiff = None
+ self.assertCountEqual(rendered, expectedResult)
+
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py
index 74ee6a11cc..7bf550b6fd 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/elf.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from unittest.case import TestCase
import oe.qa
@@ -15,7 +21,8 @@ class TestElf(TestCase):
self.assertEqual(oe.qa.elf_machine_to_string(0x32), "IA-64")
self.assertEqual(oe.qa.elf_machine_to_string(0x3E), "x86-64")
self.assertEqual(oe.qa.elf_machine_to_string(0xB7), "AArch64")
+ self.assertEqual(oe.qa.elf_machine_to_string(0xF7), "BPF")
- self.assertEqual(oe.qa.elf_machine_to_string(0x00), "Unknown (0)")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x00), "Unset")
self.assertEqual(oe.qa.elf_machine_to_string(0xDEADBEEF), "Unknown (3735928559)")
self.assertEqual(oe.qa.elf_machine_to_string("foobar"), "Unknown ('foobar')")
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py
index d7f91fb2f4..5eea12e761 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/license.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/license.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from unittest.case import TestCase
import oe.license
@@ -11,11 +17,11 @@ class SeenVisitor(oe.license.LicenseVisitor):
class TestSingleLicense(TestCase):
licenses = [
- "GPLv2",
- "LGPL-2.0",
- "Artistic",
+ "GPL-2.0-only",
+ "LGPL-2.0-only",
+ "Artistic-1.0",
"MIT",
- "GPLv3+",
+ "GPL-3.0-or-later",
"FOO_BAR",
]
invalid_licenses = ["GPL/BSD"]
@@ -63,9 +69,9 @@ class TestComplexCombinations(TestSimpleCombinations):
"FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"],
"(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"],
"((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"],
- "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"],
+ "(GPL-2.0-only|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0-only", "BSD-4-clause", "MIT"],
}
- preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"]
+ preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0-only"]
class TestIsIncluded(TestCase):
tests = {
@@ -83,12 +89,12 @@ class TestIsIncluded(TestCase):
[True, ["BAR", "FOOBAR"]],
("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"):
[True, ["BAZ", "MOO", "BARFOO"]],
- ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, None):
- [True, ["GPL-3.0", "GPL-2.0", "LGPL-2.1"]],
- ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0"):
+ ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, None):
+ [True, ["GPL-3.0-or-later", "GPL-2.0-only", "LGPL-2.1-only"]],
+ ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later"):
[True, ["Proprietary"]],
- ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0 Proprietary"):
- [False, ["GPL-3.0"]]
+ ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later Proprietary"):
+ [False, ["GPL-3.0-or-later"]]
}
def test_tests(self):
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py
index 75a27c06f7..b963e447e3 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/path.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/path.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from unittest.case import TestCase
import oe, oe.path
import tempfile
@@ -38,13 +44,6 @@ class TestRealPath(TestCase):
( "b/test", errno.ENOENT ),
]
- def __del__(self):
- try:
- #os.system("tree -F %s" % self.tmpdir)
- shutil.rmtree(self.tmpdir)
- except:
- pass
-
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix = "oe-test_path")
self.root = os.path.join(self.tmpdir, "R")
@@ -59,6 +58,9 @@ class TestRealPath(TestCase):
for l in self.LINKS:
os.symlink(l[1], os.path.join(self.root, l[0]))
+ def tearDown(self):
+ shutil.rmtree(self.tmpdir)
+
def __realpath(self, file, use_physdir, assume_dir = True):
return oe.path.realpath(os.path.join(self.root, file), self.root,
use_physdir, assume_dir = assume_dir)
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py
index 6b53aa64e5..58318b18b2 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/types.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/types.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from unittest.case import TestCase
from oe.maketype import create
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
index 9fb6c1576e..0cb46425a0 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/utils.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -1,5 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import sys
from unittest.case import TestCase
-from oe.utils import packages_filter_out_system, trim_version
+from contextlib import contextmanager
+from io import StringIO
+from oe.utils import packages_filter_out_system, trim_version, multiprocess_launch
class TestPackagesFilterOutSystem(TestCase):
def test_filter(self):
@@ -49,3 +58,47 @@ class TestTrimVersion(TestCase):
self.assertEqual(trim_version("1.2.3", 2), "1.2")
self.assertEqual(trim_version("1.2.3", 3), "1.2.3")
self.assertEqual(trim_version("1.2.3", 4), "1.2.3")
+
+
+class TestMultiprocessLaunch(TestCase):
+
+ def test_multiprocesslaunch(self):
+ import bb
+
+ def testfunction(item, d):
+ if item == "2":
+ raise KeyError("Invalid number %s" % item)
+ return "Found %s" % item
+
+ def dummyerror(msg):
+ print("ERROR: %s" % msg)
+ def dummyfatal(msg):
+ print("ERROR: %s" % msg)
+ raise bb.BBHandledException()
+
+ @contextmanager
+ def captured_output():
+ new_out, new_err = StringIO(), StringIO()
+ old_out, old_err = sys.stdout, sys.stderr
+ try:
+ sys.stdout, sys.stderr = new_out, new_err
+ yield sys.stdout, sys.stderr
+ finally:
+ sys.stdout, sys.stderr = old_out, old_err
+
+ d = bb.data_smart.DataSmart()
+ bb.error = dummyerror
+ bb.fatal = dummyfatal
+
+ # Assert the function returns the right results
+ result = multiprocess_launch(testfunction, ["3", "4", "5", "6"], d, extraargs=(d,))
+ self.assertIn("Found 3", result)
+ self.assertIn("Found 4", result)
+ self.assertIn("Found 5", result)
+ self.assertIn("Found 6", result)
+ self.assertEqual(len(result), 4)
+
+ # Assert the function prints exceptions
+ with captured_output() as (out, err):
+ self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,))
+ self.assertIn("KeyError: 'Invalid number 2'", out.getvalue())
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index 1ee753763e..f69efccfee 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -1,15 +1,194 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import importlib
+import unittest
from oeqa.selftest.case import OESelftestTestCase
from oeqa.selftest.cases.buildhistory import BuildhistoryBase
-from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer
-from oeqa.core.decorator.oeid import OETestID
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils import CommandError
class BuildhistoryDiffTests(BuildhistoryBase):
- @OETestID(295)
def test_buildhistory_diff(self):
target = 'xcursor-transparent-theme'
self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
+ result = runCmd("oe-pkgdata-util read-value PKGV %s" % target)
+ pkgv = result.output.rstrip()
result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
- expected_output = 'PR changed from "r1" to "r0"'
- self.assertTrue(expected_output in result.output, msg="Did not find expected output: %s" % result.output)
+ expected_endlines = [
+ "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
+ "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
+ ]
+ for line in result.output.splitlines():
+ for el in expected_endlines:
+ if line.endswith(el):
+ expected_endlines.remove(el)
+ break
+ else:
+ self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines)))
+ if expected_endlines:
+ self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
+
+@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present")
+class OEPybootchartguyTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ bitbake("core-image-minimal -c rootfs -f")
+ cls.tmpdir = get_bb_var('TMPDIR')
+ cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
+
+ def test_pybootchartguy_help(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
+
+ def test_pybootchartguy_to_generate_build_png_output(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f png' % (self.scripts_dir, self.buildstats, self.tmpdir))
+ self.assertTrue(os.path.exists(self.tmpdir + "/charts.png"))
+
+ def test_pybootchartguy_to_generate_build_svg_output(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f svg' % (self.scripts_dir, self.buildstats, self.tmpdir))
+ self.assertTrue(os.path.exists(self.tmpdir + "/charts.svg"))
+
+ def test_pybootchartguy_to_generate_build_pdf_output(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f pdf' % (self.scripts_dir, self.buildstats, self.tmpdir))
+ self.assertTrue(os.path.exists(self.tmpdir + "/charts.pdf"))
+
+
+class OEGitproxyTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
+
+ def test_oegitproxy_help(self):
+ try:
+ res = runCmd('%s/oe-git-proxy --help' % self.scripts_dir, assert_error=False)
+ self.assertTrue(False)
+ except CommandError as e:
+ self.assertEqual(2, e.retcode)
+
+ def run_oegitproxy(self, custom_shell=None):
+ os.environ['SOCAT'] = shutil.which("echo")
+ os.environ['ALL_PROXY'] = "https://proxy.example.com:3128"
+ os.environ['NO_PROXY'] = "*.example.com,.no-proxy.org,192.168.42.0/24,127.*.*.*"
+
+ if custom_shell is None:
+ prefix = ''
+ else:
+ prefix = custom_shell + ' '
+
+ # outside, use the proxy
+ res = runCmd('%s%s/oe-git-proxy host.outside-example.com 9418' %
+ (prefix,self.scripts_dir))
+ self.assertIn('PROXY:', res.output)
+ # match with wildcard suffix
+ res = runCmd('%s%s/oe-git-proxy host.example.com 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+ # match just suffix
+ res = runCmd('%s%s/oe-git-proxy host.no-proxy.org 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+ # match IP subnet
+ res = runCmd('%s%s/oe-git-proxy 192.168.42.42 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+ # match IP wildcard
+ res = runCmd('%s%s/oe-git-proxy 127.1.2.3 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+
+ # test that * globbering is off
+ os.environ['NO_PROXY'] = "*"
+ res = runCmd('%s%s/oe-git-proxy host.example.com 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+
+ def test_oegitproxy_proxy(self):
+ self.run_oegitproxy()
+
+ def test_oegitproxy_proxy_dash(self):
+ dash = shutil.which("dash")
+ if dash is None:
+ self.skipTest("No \"dash\" found on test system.")
+ self.run_oegitproxy(custom_shell=dash)
+
+class OeRunNativeTest(OESelftestTestCase):
+ def test_oe_run_native(self):
+ bitbake("qemu-helper-native -c addto_recipe_sysroot")
+ result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help")
+ self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output)
+
+class OEListPackageconfigTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
+
+ #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
+ def check_endlines(self, results, expected_endlines):
+ for line in results.output.splitlines():
+ for el in expected_endlines:
+ if line and line.split()[0] == el.split()[0] and \
+ ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
+ expected_endlines.remove(el)
+ break
+
+ if expected_endlines:
+ self.fail('Missing expected listings:\n %s' % '\n '.join(expected_endlines))
+
+
+ #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
+ def test_packageconfig_flags_help(self):
+ runCmd('%s/contrib/list-packageconfig-flags.py -h' % self.scripts_dir)
+
+ def test_packageconfig_flags_default(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
+ expected_endlines.append("pinentry gtk2 ncurses qt secret")
+ expected_endlines.append("tar acl selinux")
+
+ self.check_endlines(results, expected_endlines)
+
+
+ def test_packageconfig_flags_option_flags(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py -f' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("PACKAGECONFIG FLAG RECIPE NAMES")
+ expected_endlines.append("qt nativesdk-pinentry pinentry pinentry-native")
+ expected_endlines.append("secret nativesdk-pinentry pinentry pinentry-native")
+
+ self.check_endlines(results, expected_endlines)
+
+ def test_packageconfig_flags_option_all(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("pinentry-1.2.1")
+ expected_endlines.append("PACKAGECONFIG ncurses")
+ expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
+ expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
+ expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
+ expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
+
+ self.check_endlines(results, expected_endlines)
+
+ def test_packageconfig_flags_options_preferred_only(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
+ expected_endlines.append("pinentry gtk2 ncurses qt secret")
+
+ self.check_endlines(results, expected_endlines)
+
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
new file mode 100644
index 0000000000..e31063567b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -0,0 +1,502 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, runqemu
+from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotMachine
+
+def getline_qemu(out, line):
+ for l in out.split('\n'):
+ if line in l:
+ return l
+
+def getline(res, line):
+ return getline_qemu(res.output, line)
+
+class OverlayFSTests(OESelftestTestCase):
+ """Overlayfs class usage tests"""
+
+ def add_overlay_conf_to_machine(self):
+ machine_inc = """
+OVERLAYFS_MOUNT_POINT[mnt-overlay] = "/mnt/overlay"
+"""
+ self.set_machine_config(machine_inc)
+
+ def test_distro_features_missing(self):
+ """
+ Summary: Check that required DISTRO_FEATURES are set
+ Expected: Fail when either systemd or overlayfs are not in DISTRO_FEATURES
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = """
+IMAGE_INSTALL:append = " overlayfs-user"
+"""
+ overlayfs_recipe_append = """
+inherit overlayfs
+"""
+ self.write_config(config)
+ self.add_overlay_conf_to_machine()
+ self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
+
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, "overlayfs-user was skipped: missing required distro features")
+ self.assertTrue("overlayfs" in res.output, msg=res.output)
+ self.assertTrue("systemd" in res.output, msg=res.output)
+ self.assertTrue("ERROR: Required build target 'core-image-minimal' has no buildable providers." in res.output, msg=res.output)
+
+ def test_not_all_units_installed(self):
+ """
+ Summary: Test QA check that we have required mount units in the image
+ Expected: Fail because mount unit for overlay partition is not installed
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = """
+IMAGE_INSTALL:append = " overlayfs-user"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
+"""
+
+ self.write_config(config)
+ self.add_overlay_conf_to_machine()
+
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, " Mount path /mnt/overlay not found in fstab and unit mnt-overlay.mount not found in systemd unit directories")
+ self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
+ line = getline(res, "Not all mount paths and units are installed in the image")
+ self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
+
+ def test_not_all_units_installed_but_qa_skipped(self):
+ """
+ Summary: Test skipping the QA check
+ Expected: Image is created successfully
+ Author: Claudius Heine <ch@denx.de>
+ """
+
+ config = """
+IMAGE_INSTALL:append = " overlayfs-user"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
+OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
+"""
+
+ self.write_config(config)
+ self.add_overlay_conf_to_machine()
+
+ bitbake('core-image-minimal')
+
+ def test_mount_unit_not_set(self):
+ """
+ Summary: Test whether mount unit was set properly
+ Expected: Fail because mount unit was not set
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = """
+IMAGE_INSTALL:append = " overlayfs-user"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
+"""
+
+ self.write_config(config)
+
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, "A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
+ self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
+
+ def test_wrong_mount_unit_set(self):
+ """
+ Summary: Test whether mount unit was set properly
+ Expected: Fail because not the correct flag used for mount unit
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = """
+IMAGE_INSTALL:append = " overlayfs-user"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
+"""
+
+ wrong_machine_config = """
+OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay"
+"""
+
+ self.write_config(config)
+ self.set_machine_config(wrong_machine_config)
+
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, "Missing required mount point for OVERLAYFS_MOUNT_POINT[mnt-overlay] in your MACHINE configuration")
+ self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
+
+ def _test_correct_image(self, recipe, data):
+ """
+ Summary: Check that we can create an image when all parameters are
+ set correctly
+ Expected: Image is created successfully
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = """
+IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units"
+DISTRO_FEATURES:append = " overlayfs"
+
+# Use systemd as init manager
+INIT_MANAGER = "systemd"
+
+# enable overlayfs in the kernel
+KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
+"""
+
+ overlayfs_recipe_append = """
+OVERLAYFS_WRITABLE_PATHS[mnt-overlay] += "/usr/share/another-overlay-mount"
+
+SYSTEMD_SERVICE:${PN} += " \
+ my-application.service \
+"
+
+do_install:append() {
+ install -d ${D}${systemd_system_unitdir}
+ cat <<EOT > ${D}${systemd_system_unitdir}/my-application.service
+[Unit]
+Description=Sample application start-up unit
+After=overlayfs-user-overlays.service
+Requires=overlayfs-user-overlays.service
+
+[Service]
+Type=oneshot
+ExecStart=/bin/true
+RemainAfterExit=true
+
+[Install]
+WantedBy=multi-user.target
+EOT
+}
+"""
+
+ self.write_config(config)
+ self.add_overlay_conf_to_machine()
+ self.write_recipeinc(recipe, data)
+ self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
+
+ bitbake('core-image-minimal')
+
+ with runqemu('core-image-minimal') as qemu:
+ # Check that application service started
+ status, output = qemu.run_serial("systemctl status my-application")
+ self.assertTrue("active (exited)" in output, msg=output)
+
+ # Check that overlay mounts are dependencies of our application unit
+ status, output = qemu.run_serial("systemctl list-dependencies my-application")
+ self.assertTrue("overlayfs-user-overlays.service" in output, msg=output)
+
+ status, output = qemu.run_serial("systemctl list-dependencies overlayfs-user-overlays")
+ self.assertTrue("usr-share-another\\x2doverlay\\x2dmount.mount" in output, msg=output)
+ self.assertTrue("usr-share-my\\x2dapplication.mount" in output, msg=output)
+
+ # Check that we have /mnt/overlay fs mounted as tmpfs and
+ # /usr/share/my-application as an overlay (see overlayfs-user recipe)
+ status, output = qemu.run_serial("/bin/mount -t tmpfs,overlay")
+
+ line = getline_qemu(output, "on /mnt/overlay")
+ self.assertTrue(line and line.startswith("tmpfs"), msg=output)
+
+ line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/my-application")
+ self.assertTrue(line and line.startswith("overlay"), msg=output)
+
+ line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/another-overlay-mount")
+ self.assertTrue(line and line.startswith("overlay"), msg=output)
+
+ @OETestTag("runqemu")
+ def test_correct_image_fstab(self):
+ """
+ Summary: Check that we can create an image when all parameters are
+ set correctly via fstab
+ Expected: Image is created successfully
+ Author: Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
+ """
+
+ base_files_append = """
+do_install:append() {
+ cat <<EOT >> ${D}${sysconfdir}/fstab
+tmpfs /mnt/overlay tmpfs mode=1777,strictatime,nosuid,nodev 0 0
+EOT
+}
+"""
+
+ self._test_correct_image('base-files', base_files_append)
+
+ @OETestTag("runqemu")
+ def test_correct_image_unit(self):
+ """
+ Summary: Check that we can create an image when all parameters are
+ set correctly via mount unit
+ Expected: Image is created successfully
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ systemd_machine_unit_append = """
+SYSTEMD_SERVICE:${PN} += " \
+ mnt-overlay.mount \
+"
+
+do_install:append() {
+ install -d ${D}${systemd_system_unitdir}
+ cat <<EOT > ${D}${systemd_system_unitdir}/mnt-overlay.mount
+[Unit]
+Description=Tmpfs directory
+DefaultDependencies=no
+
+[Mount]
+What=tmpfs
+Where=/mnt/overlay
+Type=tmpfs
+Options=mode=1777,strictatime,nosuid,nodev
+
+[Install]
+WantedBy=multi-user.target
+EOT
+}
+
+"""
+
+ self._test_correct_image('systemd-machine-units', systemd_machine_unit_append)
+
+@OETestTag("runqemu")
+class OverlayFSEtcRunTimeTests(OESelftestTestCase):
+ """overlayfs-etc class tests"""
+
+ def test_all_required_variables_set(self):
+ """
+ Summary: Check that required variables are set
+ Expected: Fail when any of required variables is missing
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ configBase = """
+# Use systemd as init manager
+INIT_MANAGER = "systemd"
+
+# enable overlayfs in the kernel
+KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
+
+# Image configuration for overlayfs-etc
+EXTRA_IMAGE_FEATURES += "overlayfs-etc"
+IMAGE_FEATURES:remove = "package-management"
+"""
+ configMountPoint = """
+OVERLAYFS_ETC_MOUNT_POINT = "/data"
+"""
+ configDevice = """
+OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1"
+"""
+
+ self.write_config(configBase)
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, "OVERLAYFS_ETC_MOUNT_POINT must be set in your MACHINE configuration")
+ self.assertTrue(line, msg=res.output)
+
+ self.append_config(configMountPoint)
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, "OVERLAYFS_ETC_DEVICE must be set in your MACHINE configuration")
+ self.assertTrue(line, msg=res.output)
+
+ self.append_config(configDevice)
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, "OVERLAYFS_ETC_FSTYPE should contain a valid file system type on /dev/mmcblk0p1")
+ self.assertTrue(line, msg=res.output)
+
+ def test_image_feature_conflict(self):
+ """
+ Summary: Overlayfs-etc is not allowed to be used with package-management
+ Expected: Feature conflict
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = """
+# Use systemd as init manager
+INIT_MANAGER = "systemd"
+
+# enable overlayfs in the kernel
+KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
+EXTRA_IMAGE_FEATURES += "overlayfs-etc"
+EXTRA_IMAGE_FEATURES += "package-management"
+"""
+
+ self.write_config(config)
+
+ res = bitbake('core-image-minimal', ignore_status=True)
+ line = getline(res, "contains conflicting IMAGE_FEATURES")
+ self.assertTrue("overlayfs-etc" in res.output, msg=res.output)
+ self.assertTrue("package-management" in res.output, msg=res.output)
+
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_image_feature_is_missing(self):
+ """
+ Summary: Overlayfs-etc class is not applied when image feature is not set
+ Expected: Image is created successfully but /etc is not an overlay
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = """
+# Use systemd as init manager
+INIT_MANAGER = "systemd"
+
+# enable overlayfs in the kernel
+KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
+
+IMAGE_FSTYPES += "wic"
+WKS_FILE = "overlayfs_etc.wks.in"
+
+EXTRA_IMAGE_FEATURES += "read-only-rootfs"
+# Image configuration for overlayfs-etc
+OVERLAYFS_ETC_MOUNT_POINT = "/data"
+OVERLAYFS_ETC_DEVICE = "/dev/sda3"
+OVERLAYFS_ROOTFS_TYPE = "ext4"
+"""
+
+ self.write_config(config)
+
+ bitbake('core-image-minimal')
+
+ with runqemu('core-image-minimal', image_fstype='wic') as qemu:
+ status, output = qemu.run_serial("/bin/mount")
+
+ line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
+ self.assertFalse(line, msg=output)
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_sbin_init_preinit(self):
+ self.run_sbin_init(False, "ext4")
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_sbin_init_original(self):
+ self.run_sbin_init(True, "ext4")
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_sbin_init_read_only(self):
+ self.run_sbin_init(True, "squashfs")
+
+ def run_sbin_init(self, origInit, rootfsType):
+ """
+ Summary: Confirm we can replace original init and mount overlay on top of /etc
+ Expected: Image is created successfully and /etc is mounted as an overlay
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = self.get_working_config()
+
+ args = {
+ 'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit",
+ 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True),
+ 'OVERLAYFS_ROOTFS_TYPE': rootfsType,
+ 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4")
+ }
+
+ self.write_config(config.format(**args))
+
+ bitbake('core-image-minimal')
+ testFile = "/etc/my-test-data"
+
+ with runqemu('core-image-minimal', image_fstype='wic', discard_writes=False) as qemu:
+ status, output = qemu.run_serial("/bin/mount")
+
+ line = getline_qemu(output, "/dev/sda3")
+ self.assertTrue("/data" in output, msg=output)
+
+ line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
+ self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output)
+
+ # check that lower layer is not available
+ status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower")
+ line = getline_qemu(output, "No such file or directory")
+ self.assertTrue(line, msg=output)
+
+ status, output = qemu.run_serial("touch " + testFile)
+ status, output = qemu.run_serial("sync")
+ status, output = qemu.run_serial("ls -1 " + testFile)
+ line = getline_qemu(output, testFile)
+ self.assertTrue(line and line.startswith(testFile), msg=output)
+
+ # Check that file exists in /etc after reboot
+ with runqemu('core-image-minimal', image_fstype='wic') as qemu:
+ status, output = qemu.run_serial("ls -1 " + testFile)
+ line = getline_qemu(output, testFile)
+ self.assertTrue(line and line.startswith(testFile), msg=output)
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_lower_layer_access(self):
+ """
+ Summary: Test that lower layer of /etc is available read-only when configured
+ Expected: Can't write to lower layer. The files on lower and upper different after
+ modification
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = self.get_working_config()
+
+ configLower = """
+OVERLAYFS_ETC_EXPOSE_LOWER = "1"
+IMAGE_INSTALL:append = " overlayfs-user"
+"""
+ testFile = "lower-layer-test.txt"
+
+ args = {
+ 'OVERLAYFS_INIT_OPTION': "",
+ 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
+ 'OVERLAYFS_ROOTFS_TYPE': "ext4",
+ 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
+ }
+
+ self.write_config(config.format(**args))
+
+ self.append_config(configLower)
+ bitbake('core-image-minimal')
+
+ with runqemu('core-image-minimal', image_fstype='wic') as qemu:
+ status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile)
+ status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile)
+ line = getline_qemu(output, "Modified in upper")
+ self.assertTrue(line, msg=output)
+ line = getline_qemu(output, "Original file")
+ self.assertTrue(line, msg=output)
+
+ status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt")
+ line = getline_qemu(output, "Read-only file system")
+ self.assertTrue(line, msg=output)
+
+ def get_working_config(self):
+ return """
+# Use systemd as init manager
+INIT_MANAGER = "systemd"
+
+# enable overlayfs in the kernel
+KERNEL_EXTRA_FEATURES:append = " \
+ features/overlayfs/overlayfs.scc \
+ cfg/fs/squashfs.scc"
+
+IMAGE_FSTYPES += "wic"
+OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
+OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}"
+OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}"
+WKS_FILE = "overlayfs_etc.wks.in"
+
+EXTRA_IMAGE_FEATURES += "read-only-rootfs"
+# Image configuration for overlayfs-etc
+EXTRA_IMAGE_FEATURES += "overlayfs-etc"
+IMAGE_FEATURES:remove = "package-management"
+OVERLAYFS_ETC_MOUNT_POINT = "/data"
+OVERLAYFS_ETC_FSTYPE = "ext4"
+OVERLAYFS_ETC_DEVICE = "/dev/sda3"
+OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
+
+ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs"
+
+ext4_rootfs() {{
+}}
+
+squashfs_rootfs() {{
+ mkdir -p ${{IMAGE_ROOTFS}}/data
+}}
+"""
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 169698f780..1aa6c03f8a 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -1,8 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.core.decorator.oeid import OETestID
-from oeqa.utils.commands import bitbake, get_bb_vars
+from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu
import subprocess, os
import oe.path
+import re
class VersionOrdering(OESelftestTestCase):
# version1, version2, sort order
@@ -29,13 +35,12 @@ class VersionOrdering(OESelftestTestCase):
cls.bindir = oe.path.join(cls.staging, vars["bindir_native"])
cls.libdir = oe.path.join(cls.staging, vars["libdir_native"])
- def setUp(self):
+ def setUpLocal(self):
# Just for convenience
self.staging = type(self).staging
self.bindir = type(self).bindir
self.libdir = type(self).libdir
- @OETestID(1880)
def test_dpkg(self):
for ver1, ver2, sort in self.tests:
op = { -1: "<<", 0: "=", 1: ">>" }[sort]
@@ -52,7 +57,6 @@ class VersionOrdering(OESelftestTestCase):
status = subprocess.call((oe.path.join(self.bindir, "dpkg"), "--compare-versions", ver1, op, ver2))
self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
- @OETestID(1881)
def test_opkg(self):
for ver1, ver2, sort in self.tests:
op = { -1: "<<", 0: "=", 1: ">>" }[sort]
@@ -69,7 +73,6 @@ class VersionOrdering(OESelftestTestCase):
status = subprocess.call((oe.path.join(self.bindir, "opkg"), "compare-versions", ver1, op, ver2))
self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
- @OETestID(1882)
def test_rpm(self):
# Need to tell the Python bindings where to find its configuration
env = os.environ.copy()
@@ -84,3 +87,98 @@ class VersionOrdering(OESelftestTestCase):
status = subprocess.call(command, env=env)
self.assertIn(status, (99, 100, 101))
self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
+
+class PackageTests(OESelftestTestCase):
+ # Verify that a recipe cannot rename a package into an existing one
+ def test_package_name_conflict(self):
+ res = bitbake("packagenameconflict", ignore_status=True)
+ self.assertNotEqual(res.status, 0)
+ err = "package name already exists"
+ self.assertTrue(err in res.output)
+
+ # Verify that a recipe which sets up hardlink files has those preserved into split packages
+ # Also test file sparseness is preserved
+ def test_preserve_sparse_hardlinks(self):
+ bitbake("selftest-hardlink -c package")
+
+ dest = get_bb_var('PKGDEST', 'selftest-hardlink')
+ bindir = get_bb_var('bindir', 'selftest-hardlink')
+
+ def checkfiles():
+ # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/
+ # so expect 8 in total.
+ self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8)
+
+ # Test a sparse file remains sparse
+ sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest")
+ self.assertEqual(sparsestat.st_blocks, 0)
+ self.assertEqual(sparsestat.st_size, 1048576)
+
+ checkfiles()
+
+ # Clean and reinstall so its now definitely from sstate, then retest.
+ bitbake("selftest-hardlink -c clean")
+ bitbake("selftest-hardlink -c package")
+
+ checkfiles()
+
+ # Verify gdb to read symbols from separated debug hardlink file correctly
+ def test_gdb_hardlink_debug(self):
+ features = 'IMAGE_INSTALL:append = " selftest-hardlink"\n'
+ features += 'IMAGE_INSTALL:append = " selftest-hardlink-dbg"\n'
+ features += 'IMAGE_INSTALL:append = " selftest-hardlink-gdb"\n'
+ self.write_config(features)
+ bitbake("core-image-minimal")
+
+ def gdbtest(qemu, binary):
+ """
+ Check that gdb ``binary`` to read symbols from separated debug file
+ """
+ self.logger.info("gdbtest %s" % binary)
+ status, output = qemu.run_serial('/usr/bin/gdb.sh %s' % binary, timeout=60)
+ for l in output.split('\n'):
+ # Check debugging symbols exists
+ if '(no debugging symbols found)' in l:
+ self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
+ return False
+
+ # Check debugging symbols works correctly. Don't look for a
+ # source file as optimisation can put the breakpoint inside
+ # stdio.h.
+ elif "Breakpoint 1 at" in l:
+ return True
+
+ self.logger.error("GDB result:\n%d: %s", status, output)
+ return False
+
+ with runqemu('core-image-minimal') as qemu:
+ for binary in ['/usr/bin/hello1',
+ '/usr/bin/hello2',
+ '/usr/libexec/hello3',
+ '/usr/libexec/hello4']:
+ if not gdbtest(qemu, binary):
+ self.fail('GDB %s failed' % binary)
+
+ def test_preserve_ownership(self):
+ features = 'IMAGE_INSTALL:append = " selftest-chown"\n'
+ self.write_config(features)
+ bitbake("core-image-minimal")
+
+ def check_ownership(qemu, expected_gid, expected_uid, path):
+ self.logger.info("Check ownership of %s", path)
+ status, output = qemu.run_serial('stat -c "%U %G" ' + path)
+ self.assertEqual(status, 1, "stat failed: " + output)
+ try:
+ uid, gid = output.split()
+ self.assertEqual(uid, expected_uid)
+ self.assertEqual(gid, expected_gid)
+ except ValueError:
+ self.fail("Cannot parse output: " + output)
+
+ sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
+ with runqemu('core-image-minimal') as qemu:
+ for path in [ sysconfdir + "/selftest-chown/file",
+ sysconfdir + "/selftest-chown/dir",
+ sysconfdir + "/selftest-chown/symlink",
+ sysconfdir + "/selftest-chown/fifotest/fifo"]:
+ check_ownership(qemu, "test", "test", path)
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py
index 0b4caf1b2c..d786c33018 100644
--- a/meta/lib/oeqa/selftest/cases/pkgdata.py
+++ b/meta/lib/oeqa/selftest/cases/pkgdata.py
@@ -1,10 +1,15 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import tempfile
import fnmatch
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
-from oeqa.core.decorator.oeid import OETestID
class OePkgdataUtilTests(OESelftestTestCase):
@@ -13,9 +18,9 @@ class OePkgdataUtilTests(OESelftestTestCase):
super(OePkgdataUtilTests, cls).setUpClass()
# Ensure we have the right data in pkgdata
cls.logger.info('Running bitbake to generate pkgdata')
+ bitbake('target-sdk-provides-dummy -c clean')
bitbake('busybox zlib m4')
- @OETestID(1203)
def test_lookup_pkg(self):
# Forward tests
result = runCmd('oe-pkgdata-util lookup-pkg "zlib busybox"')
@@ -34,7 +39,6 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
- @OETestID(1205)
def test_read_value(self):
result = runCmd('oe-pkgdata-util read-value PN libz1')
self.assertEqual(result.output, 'zlib')
@@ -44,17 +48,15 @@ class OePkgdataUtilTests(OESelftestTestCase):
pkgsize = int(result.output.strip())
self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
- @OETestID(1198)
def test_find_path(self):
- result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1')
- self.assertEqual(result.output, 'zlib: /lib/libz.so.1')
+ result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1')
+ self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1')
result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
self.assertEqual(result.output, 'm4: /usr/bin/m4')
result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
self.assertEqual(result.output, 'ERROR: Unable to find any package producing path /not/exist')
- @OETestID(1204)
def test_lookup_recipe(self):
result = runCmd('oe-pkgdata-util lookup-recipe "libz-staticdev busybox"')
self.assertEqual(result.output, 'zlib\nbusybox')
@@ -64,7 +66,6 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
- @OETestID(1202)
def test_list_pkgs(self):
# No arguments
result = runCmd('oe-pkgdata-util list-pkgs')
@@ -82,7 +83,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
pkglist.remove('zlib-ptest') # in case ptest is disabled
except ValueError:
pass
- self.assertEqual(pkglist, ['zlib', 'zlib-dbg', 'zlib-dev', 'zlib-doc', 'zlib-staticdev'], "Packages listed after remove: %s" % result.output)
+ self.assertEqual(pkglist, ['zlib', 'zlib-dbg', 'zlib-dev', 'zlib-doc', 'zlib-src', 'zlib-staticdev'], "Packages listed after remove: %s" % result.output)
# With recipe specified, runtime
result = runCmd('oe-pkgdata-util list-pkgs -p zlib -r')
pkglist = sorted(result.output.split())
@@ -90,7 +91,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
pkglist.remove('libz-ptest') # in case ptest is disabled
except ValueError:
pass
- self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc', 'libz-staticdev', 'libz1'], "Packages listed after remove: %s" % result.output)
+ self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc', 'libz-src', 'libz-staticdev', 'libz1'], "Packages listed after remove: %s" % result.output)
# With recipe specified and unpackaged
result = runCmd('oe-pkgdata-util list-pkgs -p zlib -u')
pkglist = sorted(result.output.split())
@@ -108,7 +109,6 @@ class OePkgdataUtilTests(OESelftestTestCase):
pkglist = sorted(result.output.split())
self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc'], "Packages listed: %s" % result.output)
- @OETestID(1201)
def test_list_pkg_files(self):
def splitoutput(output):
files = {}
@@ -122,8 +122,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
curpkg = line.split(':')[0]
files[curpkg] = []
return files
- bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir'])
- base_libdir = bb_vars['base_libdir']
+ bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir'])
libdir = bb_vars['libdir']
includedir = bb_vars['includedir']
mandir = bb_vars['mandir']
@@ -140,7 +139,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
self.assertGreater(len(files['libz1']), 1)
- libspec = os.path.join(base_libdir, 'libz.so.1.*')
+ libspec = os.path.join(libdir, 'libz.so.1.*')
found = False
for fileitem in files['libz1']:
if fnmatch.fnmatchcase(fileitem, libspec):
@@ -198,7 +197,6 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc'])
self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev'])
- @OETestID(1200)
def test_glob(self):
tempdir = tempfile.mkdtemp(prefix='pkgdataqa')
self.track_for_cleanup(tempdir)
@@ -218,7 +216,12 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertNotIn('libz-dev', resultlist)
self.assertNotIn('libz-dbg', resultlist)
- @OETestID(1206)
def test_specify_pkgdatadir(self):
result = runCmd('oe-pkgdata-util -p %s lookup-pkg zlib' % get_bb_var('PKGDATA_DIR'))
self.assertEqual(result.output, 'libz1')
+
+ def test_no_param(self):
+ result = runCmd('oe-pkgdata-util', ignore_status=True)
+ self.assertEqual(result.status, 2, "Status different than 2. output: %s" % result.output)
+ currpos = result.output.find('usage: oe-pkgdata-util')
+ self.assertTrue(currpos != -1, msg = "Test is Failed. Help is not Displayed in %s" % result.output)
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
index 479e520618..8da3739c57 100644
--- a/meta/lib/oeqa/selftest/cases/prservice.py
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import re
import shutil
@@ -6,9 +12,10 @@ import datetime
import oeqa.utils.ftools as ftools
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
-from oeqa.core.decorator.oeid import OETestID
from oeqa.utils.network import get_free_port
+import bb.utils
+
class BitbakePrTests(OESelftestTestCase):
@classmethod
@@ -16,11 +23,21 @@ class BitbakePrTests(OESelftestTestCase):
super(BitbakePrTests, cls).setUpClass()
cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
+ cls.exported_db_path = os.path.join(cls.builddir, 'export.inc')
+ cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
+
+ def cleanup(self):
+ # Ensure any memory resident bitbake is stopped
+ bitbake("-m")
+ # Remove any existing export file or prserv database
+ bb.utils.remove(self.exported_db_path)
+ bb.utils.remove(self.current_db_path + "*")
+
def get_pr_version(self, package_name):
package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
package_data = ftools.read_file(package_data_file)
- find_pr = re.search("PKGR: r[0-9]+\.([0-9]+)", package_data)
- self.assertTrue(find_pr, "No PKG revision found in %s" % package_data_file)
+ find_pr = re.search(r"PKGR: r[0-9]+\.([0-9]+)", package_data)
+ self.assertTrue(find_pr, "No PKG revision found via regex 'PKGR: r[0-9]+\.([0-9]+)' in %s" % package_data_file)
return int(find_pr.group(1))
def get_task_stamp(self, package_name, recipe_task):
@@ -29,7 +46,7 @@ class BitbakePrTests(OESelftestTestCase):
package_stamps_path = "/".join(stampdata[:-1])
stamps = []
for stamp in os.listdir(package_stamps_path):
- find_stamp = re.match("%s\.%s\.([a-z0-9]{32})" % (re.escape(prefix), recipe_task), stamp)
+ find_stamp = re.match(r"%s\.%s\.([a-z0-9]{32})" % (re.escape(prefix), recipe_task), stamp)
if find_stamp:
stamps.append(find_stamp.group(1))
self.assertFalse(len(stamps) == 0, msg="Cound not find stamp for task %s for recipe %s" % (recipe_task, package_name))
@@ -37,13 +54,14 @@ class BitbakePrTests(OESelftestTestCase):
return str(stamps[0])
def increment_package_pr(self, package_name):
- inc_data = "do_package_append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\"\n}" % datetime.datetime.now()
+ inc_data = "do_package:append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now()
self.write_recipeinc(package_name, inc_data)
res = bitbake(package_name, ignore_status=True)
self.delete_recipeinc(package_name)
self.assertEqual(res.status, 0, msg=res.output)
def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
+ self.cleanup()
config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
self.write_config(config_package_data)
config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
@@ -60,67 +78,60 @@ class BitbakePrTests(OESelftestTestCase):
pr_2 = self.get_pr_version(package_name)
stamp_2 = self.get_task_stamp(package_name, track_task)
- self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
+ self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
+ self.cleanup()
+
def run_test_pr_export_import(self, package_name, replace_current_db=True):
self.config_pr_tests(package_name)
self.increment_package_pr(package_name)
pr_1 = self.get_pr_version(package_name)
- exported_db_path = os.path.join(self.builddir, 'export.inc')
- export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
+ export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True)
self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
- self.assertTrue(os.path.exists(exported_db_path))
+ self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output))
if replace_current_db:
- current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
- self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path)
- os.remove(current_db_path)
+ self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path)
+ os.remove(self.current_db_path)
- import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True)
- os.remove(exported_db_path)
+ import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True)
+ #os.remove(self.exported_db_path)
self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
self.increment_package_pr(package_name)
pr_2 = self.get_pr_version(package_name)
- self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
+ self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
+
+ self.cleanup()
- @OETestID(930)
def test_import_export_replace_db(self):
self.run_test_pr_export_import('m4')
- @OETestID(931)
def test_import_export_override_db(self):
self.run_test_pr_export_import('m4', replace_current_db=False)
- @OETestID(932)
def test_pr_service_rpm_arch_dep(self):
self.run_test_pr_service('m4', 'rpm', 'do_package')
- @OETestID(934)
def test_pr_service_deb_arch_dep(self):
self.run_test_pr_service('m4', 'deb', 'do_package')
- @OETestID(933)
def test_pr_service_ipk_arch_dep(self):
self.run_test_pr_service('m4', 'ipk', 'do_package')
- @OETestID(935)
def test_pr_service_rpm_arch_indep(self):
self.run_test_pr_service('xcursor-transparent-theme', 'rpm', 'do_package')
- @OETestID(937)
def test_pr_service_deb_arch_indep(self):
self.run_test_pr_service('xcursor-transparent-theme', 'deb', 'do_package')
- @OETestID(936)
def test_pr_service_ipk_arch_indep(self):
self.run_test_pr_service('xcursor-transparent-theme', 'ipk', 'do_package')
- @OETestID(1419)
def test_stopping_prservice_message(self):
port = get_free_port()
diff --git a/meta/lib/oeqa/selftest/cases/pseudo.py b/meta/lib/oeqa/selftest/cases/pseudo.py
new file mode 100644
index 0000000000..3ef8786022
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/pseudo.py
@@ -0,0 +1,29 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import glob
+import os
+import shutil
+from oeqa.utils.commands import bitbake, get_test_layer
+from oeqa.selftest.case import OESelftestTestCase
+
+class Pseudo(OESelftestTestCase):
+
+ def test_pseudo_pyc_creation(self):
+ self.write_config("")
+
+ metaselftestpath = get_test_layer()
+ pycache_path = os.path.join(metaselftestpath, 'lib/__pycache__')
+ if os.path.exists(pycache_path):
+ shutil.rmtree(pycache_path)
+
+ bitbake('pseudo-pyc-test -c install')
+
+ test1_pyc_present = len(glob.glob(os.path.join(pycache_path, 'pseudo_pyc_test1.*.pyc')))
+ self.assertTrue(test1_pyc_present, 'test1 pyc file missing, should be created outside of pseudo context.')
+
+ test2_pyc_present = len(glob.glob(os.path.join(pycache_path, 'pseudo_pyc_test2.*.pyc')))
+ self.assertFalse(test2_pyc_present, 'test2 pyc file present, should not be created in pseudo context.')
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 754ea94982..aebea42502 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -1,3 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import errno
import os
import shutil
import tempfile
@@ -5,7 +12,6 @@ import urllib.parse
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
from oeqa.utils.commands import get_bb_vars, create_temp_layer
-from oeqa.core.decorator.oeid import OETestID
from oeqa.selftest.cases import devtool
templayerdir = None
@@ -22,7 +28,17 @@ def tearDownModule():
runCmd('rm -rf %s' % templayerdir)
-class RecipetoolBase(devtool.DevtoolBase):
+def needTomllib(test):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ try:
+ import tomllib
+ except ImportError:
+ try:
+ import tomli
+ except ImportError:
+ test.skipTest('Test requires python 3.11 or above for tomllib module or tomli module')
+
+class RecipetoolBase(devtool.DevtoolTestCase):
def setUpLocal(self):
super(RecipetoolBase, self).setUpLocal()
@@ -32,6 +48,8 @@ class RecipetoolBase(devtool.DevtoolBase):
self.testfile = os.path.join(self.tempdir, 'testfile')
with open(self.testfile, 'w') as f:
f.write('Test file\n')
+ config = 'BBMASK += "meta-poky/recipes-core/base-files/base-files_%.bbappend"\n'
+ self.append_config(config)
def tearDownLocal(self):
runCmd('rm -rf %s/recipes-*' % self.templayerdir)
@@ -65,17 +83,16 @@ class RecipetoolBase(devtool.DevtoolBase):
return bbappendfile, result.output
-class RecipetoolTests(RecipetoolBase):
+class RecipetoolAppendTests(RecipetoolBase):
@classmethod
def setUpClass(cls):
- super(RecipetoolTests, cls).setUpClass()
+ super(RecipetoolAppendTests, cls).setUpClass()
# Ensure we have the right data in shlibs/pkgdata
cls.logger.info('Running bitbake to generate pkgdata')
bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile')
- bb_vars = get_bb_vars(['COREBASE', 'BBPATH'])
+ bb_vars = get_bb_vars(['COREBASE'])
cls.corebase = bb_vars['COREBASE']
- cls.bbpath = bb_vars['BBPATH']
def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles):
cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options)
@@ -89,22 +106,19 @@ class RecipetoolTests(RecipetoolBase):
for errorstr in checkerror:
self.assertIn(errorstr, result.output)
- @OETestID(1177)
def test_recipetool_appendfile_basic(self):
# Basic test
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
_, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1183)
def test_recipetool_appendfile_invalid(self):
# Test some commands that should error
self._try_recipetool_appendfile_fail('/etc/passwd', self.testfile, ['ERROR: /etc/passwd cannot be handled by this tool', 'useradd', 'extrausers'])
self._try_recipetool_appendfile_fail('/etc/timestamp', self.testfile, ['ERROR: /etc/timestamp cannot be handled by this tool'])
self._try_recipetool_appendfile_fail('/dev/console', self.testfile, ['ERROR: /dev/console cannot be handled by this tool'])
- @OETestID(1176)
def test_recipetool_appendfile_alternatives(self):
# Now try with a file we know should be an alternative
# (this is very much a fake example, but one we know is reliably an alternative)
@@ -112,11 +126,11 @@ class RecipetoolTests(RecipetoolBase):
# Need a test file - should be executable
testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
testfile2name = os.path.basename(testfile2)
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://%s"\n' % testfile2name,
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${base_bindir}\n',
' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name,
'}\n']
@@ -128,7 +142,6 @@ class RecipetoolTests(RecipetoolBase):
result = runCmd('diff -q %s %s' % (testfile2, copiedfile), ignore_status=True)
self.assertNotEqual(result.status, 0, 'New file should have been copied but was not %s' % result.output)
- @OETestID(1178)
def test_recipetool_appendfile_binary(self):
# Try appending a binary file
# /bin/ls can be a symlink to /usr/bin/ls
@@ -137,14 +150,13 @@ class RecipetoolTests(RecipetoolBase):
self.assertIn('WARNING: ', result.output)
self.assertIn('is a binary', result.output)
- @OETestID(1173)
def test_recipetool_appendfile_add(self):
# Try arbitrary file add to a recipe
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://testfile"\n',
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${datadir}\n',
' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
'}\n']
@@ -153,125 +165,102 @@ class RecipetoolTests(RecipetoolBase):
# (so we're testing that, plus modifying an existing bbappend)
testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
testfile2name = os.path.basename(testfile2)
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://testfile \\\n',
' file://%s \\\n' % testfile2name,
' "\n',
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${datadir}\n',
' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
'}\n']
self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name])
- @OETestID(1174)
def test_recipetool_appendfile_add_bindir(self):
# Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://testfile"\n',
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${bindir}\n',
' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
'}\n']
_, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1175)
def test_recipetool_appendfile_add_machine(self):
# Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
'\n',
- 'SRC_URI_append_mymachine = " file://testfile"\n',
+ 'SRC_URI:append:mymachine = " file://testfile"\n',
'\n',
- 'do_install_append_mymachine() {\n',
+ 'do_install:append:mymachine() {\n',
' install -d ${D}${datadir}\n',
' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
'}\n']
_, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1184)
def test_recipetool_appendfile_orig(self):
# A file that's in SRC_URI and in do_install with the same name
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1191)
def test_recipetool_appendfile_todir(self):
# A file that's in SRC_URI and in do_install with destination directory rather than file
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1187)
def test_recipetool_appendfile_renamed(self):
# A file that's in SRC_URI with a different name to the destination file
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1190)
def test_recipetool_appendfile_subdir(self):
# A file that's in SRC_URI in a subdir
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://testfile"\n',
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${datadir}\n',
' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
'}\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1189)
- def test_recipetool_appendfile_src_glob(self):
- # A file that's in SRC_URI as a glob
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
- '\n',
- 'SRC_URI += "file://testfile"\n',
- '\n',
- 'do_install_append() {\n',
- ' install -d ${D}${datadir}\n',
- ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-src-globfile\n',
- '}\n']
- _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-src-globfile', self.testfile, '', expectedlines, ['testfile'])
- self.assertNotIn('WARNING: ', output)
-
- @OETestID(1181)
def test_recipetool_appendfile_inst_glob(self):
# A file that's in do_install as a glob
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1182)
def test_recipetool_appendfile_inst_todir_glob(self):
# A file that's in do_install as a glob with destination as a directory
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1185)
def test_recipetool_appendfile_patch(self):
# A file that's added by a patch in SRC_URI
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://testfile"\n',
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${sysconfdir}\n',
' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
'}\n']
@@ -283,45 +272,41 @@ class RecipetoolTests(RecipetoolBase):
else:
self.fail('Patch warning not found in output:\n%s' % output)
- @OETestID(1188)
def test_recipetool_appendfile_script(self):
# Now, a file that's in SRC_URI but installed by a script (so no mention in do_install)
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://testfile"\n',
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${datadir}\n',
' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
'}\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1180)
def test_recipetool_appendfile_inst_func(self):
# A file that's installed from a function called by do_install
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func'])
self.assertNotIn('WARNING: ', output)
- @OETestID(1186)
def test_recipetool_appendfile_postinstall(self):
# A file that's created by a postinstall script (and explicitly mentioned in it)
# First try without specifying recipe
self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile'])
# Now specify recipe
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n',
'SRC_URI += "file://testfile"\n',
'\n',
- 'do_install_append() {\n',
+ 'do_install:append() {\n',
' install -d ${D}${datadir}\n',
' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
'}\n']
_, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile'])
- @OETestID(1179)
def test_recipetool_appendfile_extlayer(self):
# Try creating a bbappend in a layer that's not in bblayers.conf and has a different structure
exttemplayerdir = os.path.join(self.tempdir, 'extlayer')
@@ -337,7 +322,6 @@ class RecipetoolTests(RecipetoolBase):
'metadata/recipes/recipes-test/selftest-recipetool-appendfile/selftest-recipetool-appendfile/selftest-replaceme-orig']
self.assertEqual(sorted(createdfiles), sorted(expectedfiles))
- @OETestID(1192)
def test_recipetool_appendfile_wildcard(self):
def try_appendfile_wc(options):
@@ -362,7 +346,9 @@ class RecipetoolTests(RecipetoolBase):
filename = try_appendfile_wc('-w')
self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend')
- @OETestID(1193)
+
+class RecipetoolCreateTests(RecipetoolBase):
+
def test_recipetool_create(self):
# Try adding a recipe
tempsrc = os.path.join(self.tempdir, 'srctree')
@@ -372,15 +358,13 @@ class RecipetoolTests(RecipetoolBase):
result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
- checkvars['LICENSE'] = 'GPLv2'
+ checkvars['LICENSE'] = 'GPL-2.0-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz'
- checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288'
checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07'
self._test_recipe_contents(recipefile, checkvars, [])
- @OETestID(1194)
- def test_recipetool_create_git(self):
+ def test_recipetool_create_autotools(self):
if 'x11' not in get_bb_var('DISTRO_FEATURES'):
self.skipTest('Test requires x11 as distro feature')
# Ensure we have the right data in shlibs/pkgdata
@@ -389,26 +373,25 @@ class RecipetoolTests(RecipetoolBase):
tempsrc = os.path.join(self.tempdir, 'srctree')
os.makedirs(tempsrc)
recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
- srcuri = 'git://git.yoctoproject.org/libmatchbox'
+ srcuri = 'git://git.yoctoproject.org/libmatchbox;protocol=https'
result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
checkvars = {}
- checkvars['LICENSE'] = 'LGPLv2.1'
+ checkvars['LICENSE'] = 'LGPL-2.1-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.11+git${SRCPV}'
- checkvars['SRC_URI'] = srcuri
+ checkvars['PV'] = '1.11+git'
+ checkvars['SRC_URI'] = srcuri + ';branch=master'
checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango'])
inherits = ['autotools', 'pkgconfig']
self._test_recipe_contents(recipefile, checkvars, inherits)
- @OETestID(1392)
def test_recipetool_create_simple(self):
# Try adding a recipe
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- pv = '1.7.3.0'
- srcuri = 'http://www.dest-unreach.org/socat/download/socat-%s.tar.bz2' % pv
+ pv = '1.7.4.1'
+ srcuri = 'http://www.dest-unreach.org/socat/download/Archive/socat-%s.tar.bz2' % pv
result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe))
dirlist = os.listdir(temprecipe)
if len(dirlist) > 1:
@@ -417,7 +400,7 @@ class RecipetoolTests(RecipetoolBase):
self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named')
checkvars = {}
- checkvars['LICENSE'] = set(['Unknown', 'GPLv2'])
+ checkvars['LICENSE'] = set(['Unknown', 'GPL-2.0-only'])
checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'])
# We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot
checkvars['S'] = None
@@ -425,84 +408,621 @@ class RecipetoolTests(RecipetoolBase):
inherits = ['autotools']
self._test_recipe_contents(os.path.join(temprecipe, dirlist[0]), checkvars, inherits)
- @OETestID(1418)
def test_recipetool_create_cmake(self):
- # Try adding a recipe
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- recipefile = os.path.join(temprecipe, 'navit_0.5.0.bb')
- srcuri = 'http://downloads.sourceforge.net/project/navit/v0.5.0/navit-0.5.0.tar.gz'
+ recipefile = os.path.join(temprecipe, 'taglib_1.11.1.bb')
+ srcuri = 'http://taglib.github.io/releases/taglib-1.11.1.tar.gz'
result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
- checkvars['LICENSE'] = set(['Unknown', 'GPLv2', 'LGPLv2'])
- checkvars['SRC_URI'] = 'http://downloads.sourceforge.net/project/navit/v${PV}/navit-${PV}.tar.gz'
- checkvars['SRC_URI[md5sum]'] = '242f398e979a6b8c0f3c802b63435b68'
- checkvars['SRC_URI[sha256sum]'] = '13353481d7fc01a4f64e385dda460b51496366bba0fd2cc85a89a0747910e94d'
- checkvars['DEPENDS'] = set(['freetype', 'zlib', 'openssl', 'glib-2.0', 'virtual/libgl', 'virtual/egl', 'gtk+', 'libpng', 'libsdl', 'freeglut', 'dbus-glib'])
- inherits = ['cmake', 'python-dir', 'gettext', 'pkgconfig']
+ checkvars['LICENSE'] = set(['LGPL-2.1-only', 'MPL-1.1-only'])
+ checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz'
+ checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b'
+ checkvars['DEPENDS'] = set(['boost', 'zlib'])
+ inherits = ['cmake']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_npm(self):
+ collections = get_bb_var('BBFILE_COLLECTIONS').split()
+ if "openembedded-layer" not in collections:
+ self.skipTest("Test needs meta-oe for nodejs")
+
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ recipefile = os.path.join(temprecipe, 'savoirfairelinux-node-server-example_1.0.0.bb')
+ shrinkwrap = os.path.join(temprecipe, 'savoirfairelinux-node-server-example', 'npm-shrinkwrap.json')
+ srcuri = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'
+ result = runCmd('recipetool create -o %s \'%s\'' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ self.assertTrue(os.path.isfile(shrinkwrap))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'Node Server Example'
+ checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme'
+ checkvars['LICENSE'] = 'BSD-3-Clause & ISC & MIT & Unknown'
+ urls = []
+ urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}')
+ urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json')
+ checkvars['SRC_URI'] = set(urls)
+ checkvars['S'] = '${WORKDIR}/npm'
+ checkvars['LICENSE:${PN}'] = 'MIT'
+ checkvars['LICENSE:${PN}-base64'] = 'Unknown'
+ checkvars['LICENSE:${PN}-accepts'] = 'MIT'
+ checkvars['LICENSE:${PN}-inherits'] = 'ISC'
+ inherits = ['npm']
self._test_recipe_contents(recipefile, checkvars, inherits)
- @OETestID(1638)
def test_recipetool_create_github(self):
- # Basic test to see if github URL mangling works
+ # Basic test to see if github URL mangling works. Deliberately use an
+ # older release of Meson at present so we don't need a toml parser.
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- recipefile = os.path.join(temprecipe, 'meson_git.bb')
- srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0'
- result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri])
+ recipefile = os.path.join(temprecipe, 'python3-meson_git.bb')
+ srcuri = 'https://github.com/mesonbuild/meson;rev=0.52.1'
+ cmd = ['recipetool', 'create', '-o', temprecipe, srcuri]
+ result = runCmd(cmd)
+ self.assertTrue(os.path.isfile(recipefile), msg="recipe %s not created for command %s, output %s" % (recipefile, " ".join(cmd), result.output))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['Apache-2.0', "Unknown"])
+ checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=0.52'
+ inherits = ['setuptools3']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_setuptools(self):
+ # Test creating python3 package from tarball (using setuptools3 class)
+ # Use the --no-pypi switch to avoid creating a pypi enabled recipe and
+ # and check the created recipe as if it was a more general tarball
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
+ result = runCmd('recipetool create --no-pypi -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
- checkvars['LICENSE'] = set(['Apache-2.0'])
- checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https'
- inherits = ['setuptools']
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ inherits = ['setuptools3']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_setuptools_pypi_tarball(self):
+ # Test creating python3 package from tarball (using setuptools3 and pypi classes)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', 'pypi']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_setuptools_pypi(self):
+ # Test creating python3 package from pypi url (using setuptools3 and pypi classes)
+ # Intentionnaly using setuptools3 class here instead of any of the pep517 class
+ # to avoid the toml dependency and allows this test to run on host autobuilders
+ # with older version of python
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ # First specify the required version in the url
+ srcuri = 'https://pypi.org/project/%s/%s' % (pn, pv)
+ runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # Now specify the version as a recipetool parameter
+ runCmd('rm -rf %s' % recipefile)
+ self.assertFalse(os.path.isfile(recipefile))
+ srcuri = 'https://pypi.org/project/%s' % pn
+ runCmd('recipetool create -o %s %s --version %s' % (temprecipe, srcuri, pv))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # Now, try to grab latest version of the package, so we cannot guess the name of the recipe,
+ # unless hardcoding the latest version but it means we will need to update the test for each release,
+ # so use a regexp
+ runCmd('rm -rf %s' % recipefile)
+ self.assertFalse(os.path.isfile(recipefile))
+ recipefile_re = r'%s_(.*)\.bb' % pn
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ dirlist = os.listdir(temprecipe)
+ if len(dirlist) > 1:
+ self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])):
+ self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ import re
+ match = re.match(recipefile_re, dirlist[0])
+ self.assertTrue(match)
+ latest_pv = match.group(1)
+ self.assertTrue(latest_pv != pv)
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, latest_pv))
+ # Do not check LIC_FILES_CHKSUM and SRC_URI checksum here to avoid having updating the test on each release
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_setuptools_build_meta(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using setuptools.build_meta class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'webcolors'
+ pv = '1.13'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/a1/fb/f95560c6a5d4469d9c49e24cf1b5d4d21ffab5608251c6020a965fb7791c/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'A library for working with the color formats defined by HTML and CSS.'
+ checkvars['LICENSE'] = set(['BSD-3-Clause'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19'
+ checkvars['SRC_URI[sha256sum]'] = 'c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a'
+ inherits = ['python_setuptools_build_meta', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_poetry_core_masonry_api(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using poetry.core.masonry.api class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'iso8601'
+ pv = '2.1.0'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'Simple module to parse ISO 8601 dates'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367'
+ checkvars['SRC_URI[sha256sum]'] = '6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df'
+ inherits = ['python_poetry_core', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_flit_core_buildapi(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using flit_core.buildapi class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'typing-extensions'
+ pv = '4.8.0'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/1f/7a/8b94bb016069caa12fc9f587b28080ac33b4fbb8ca369b98bc0a4828543e/typing_extensions-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'Backported and Experimental Type Hints for Python 3.8+'
+ checkvars['LICENSE'] = set(['PSF-2.0'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2'
+ checkvars['SRC_URI[sha256sum]'] = 'df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef'
+ inherits = ['python_flit_core', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_hatchling(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using hatchling class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'jsonschema'
+ pv = '4.19.1'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/e4/43/087b24516db11722c8687e0caf0f66c7785c0b1c51b0ab951dfde924e3f5/jsonschema-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'An implementation of JSON Schema validation for Python'
+ checkvars['HOMEPAGE'] = 'https://github.com/python-jsonschema/jsonschema'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af'
+ checkvars['SRC_URI[sha256sum]'] = 'ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf'
+ inherits = ['python_hatchling', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_maturin(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using maturin class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'pydantic-core'
+ pv = '2.14.5'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/64/26/cffb93fe9c6b5a91c497f37fae14a4b073ecbc47fc36a9979c7aa888b245/pydantic_core-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['HOMEPAGE'] = 'https://github.com/pydantic/pydantic-core'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=ab599c188b4a314d2856b3a55030c75c'
+ checkvars['SRC_URI[sha256sum]'] = '6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71'
+ inherits = ['python_maturin', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_mesonpy(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using mesonpy class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'siphash24'
+ pv = '1.4'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/c2/32/b934a70592f314afcfa86c7f7e388804a8061be65b822e2aa07e573b6477/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SRC_URI[sha256sum]'] = '7fd65e39b2a7c8c4ddc3a168a687f4610751b0ac2ebb518783c0cdfc30bec4a0'
+ inherits = ['python_mesonpy', 'pypi']
+
self._test_recipe_contents(recipefile, checkvars, inherits)
- @OETestID(1639)
def test_recipetool_create_github_tarball(self):
- # Basic test to ensure github URL mangling doesn't apply to release tarballs
+ # Basic test to ensure github URL mangling doesn't apply to release tarballs.
+ # Deliberately use an older release of Meson at present so we don't need a toml parser.
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- pv = '0.32.0'
- recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv)
+ pv = '0.52.1'
+ recipefile = os.path.join(temprecipe, 'python3-meson_%s.bb' % pv)
srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv)
result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
checkvars['LICENSE'] = set(['Apache-2.0'])
checkvars['SRC_URI'] = 'https://github.com/mesonbuild/meson/releases/download/${PV}/meson-${PV}.tar.gz'
- inherits = ['setuptools']
+ inherits = ['setuptools3']
self._test_recipe_contents(recipefile, checkvars, inherits)
- @OETestID(1637)
- def test_recipetool_create_git_http(self):
+ def _test_recipetool_create_git(self, srcuri, branch=None):
# Basic test to check http git URL mangling works
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- recipefile = os.path.join(temprecipe, 'matchbox-terminal_git.bb')
- srcuri = 'http://git.yoctoproject.org/git/matchbox-terminal'
- result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ name = srcuri.split(';')[0].split('/')[-1]
+ recipefile = os.path.join(temprecipe, name + '_git.bb')
+ options = ' -B %s' % branch if branch else ''
+ result = runCmd('recipetool create -o %s%s "%s"' % (temprecipe, options, srcuri))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
- checkvars['LICENSE'] = set(['GPLv2'])
- checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http'
- inherits = ['pkgconfig', 'autotools']
+ checkvars['SRC_URI'] = srcuri
+ for scheme in ['http', 'https']:
+ if srcuri.startswith(scheme + ":"):
+ checkvars['SRC_URI'] = 'git%s;protocol=%s' % (srcuri[len(scheme):], scheme)
+ if ';branch=' not in srcuri:
+ checkvars['SRC_URI'] += ';branch=' + (branch or 'master')
+ self._test_recipe_contents(recipefile, checkvars, [])
+
+ def test_recipetool_create_git_http(self):
+ self._test_recipetool_create_git('http://git.yoctoproject.org/git/matchbox-keyboard')
+
+ def test_recipetool_create_git_srcuri_master(self):
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master;protocol=https')
+
+ def test_recipetool_create_git_srcuri_branch(self):
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1;protocol=https')
+
+ def test_recipetool_create_git_srcbranch(self):
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;protocol=https', 'matchbox-keyboard-0-1')
+
+ def _go_urifiy(self, url, version, modulepath = None, pathmajor = None, subdir = None):
+ modulepath = ",path='%s'" % modulepath if len(modulepath) else ''
+ pathmajor = ",pathmajor='%s'" % pathmajor if len(pathmajor) else ''
+ subdir = ",subdir='%s'" % subdir if len(subdir) else ''
+ return "${@go_src_uri('%s','%s'%s%s%s)}" % (url, version, modulepath, pathmajor, subdir)
+
+ def test_recipetool_create_go(self):
+ # Basic test to check go recipe generation
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+
+ recipefile = os.path.join(temprecipe, 'edgex-go_git.bb')
+ deps_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-modules.inc')
+ lics_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-licenses.inc')
+ modules_txt_file = os.path.join(temprecipe, 'edgex-go', 'modules.txt')
+
+ srcuri = 'https://github.com/edgexfoundry/edgex-go.git'
+ srcrev = "v3.0.0"
+ srcbranch = "main"
+
+ result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch))
+
+ self.maxDiff = None
+ inherits = ['go-vendor']
+
+ checkvars = {}
+ checkvars['GO_IMPORT'] = "github.com/edgexfoundry/edgex-go"
+ checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
+ 'file://modules.txt'}
+ checkvars['LIC_FILES_CHKSUM'] = {'file://src/${GO_IMPORT}/LICENSE;md5=8f8bc924cf73f6a32381e5fd4c58d603'}
+
+ self.assertTrue(os.path.isfile(recipefile))
self._test_recipe_contents(recipefile, checkvars, inherits)
+ checkvars = {}
+ checkvars['VENDORED_LIC_FILES_CHKSUM'] = set(
+ ['file://src/${GO_IMPORT}/vendor/github.com/Microsoft/go-winio/LICENSE;md5=69205ff73858f2c22b2ca135b557e8ef',
+ 'file://src/${GO_IMPORT}/vendor/github.com/armon/go-metrics/LICENSE;md5=d2d77030c0183e3d1e66d26dc1f243be',
+ 'file://src/${GO_IMPORT}/vendor/github.com/cenkalti/backoff/LICENSE;md5=1571d94433e3f3aa05267efd4dbea68b',
+ 'file://src/${GO_IMPORT}/vendor/github.com/davecgh/go-spew/LICENSE;md5=c06795ed54b2a35ebeeb543cd3a73e56',
+ 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE;md5=dcdb33474b60c38efd27356d8f2edec7',
+ 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10;md5=3adfcc70f5aeb7a44f3f9b495aa1fbf3',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-bootstrap/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-configuration/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-core-contracts/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-messaging/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-registry/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-secrets/v3/LICENSE;md5=f9fa2f4f8e0ef8cc7b5dd150963eb457',
+ 'file://src/${GO_IMPORT}/vendor/github.com/fatih/color/LICENSE.md;md5=316e6d590bdcde7993fb175662c0dd5a',
+ 'file://src/${GO_IMPORT}/vendor/github.com/fxamacker/cbor/v2/LICENSE;md5=827f5a2fa861382d35a3943adf9ebb86',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/json/LICENSE;md5=591778525c869cdde0ab5a1bf283cd81',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-kit/log/LICENSE;md5=5b7c15ad5fffe2ff6e9d58a6c161f082',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-logfmt/logfmt/LICENSE;md5=98e39517c38127f969de33057067091e',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/locales/LICENSE;md5=3ccbda375ee345400ad1da85ba522301',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/universal-translator/LICENSE;md5=2e2b21ef8f61057977d27c727c84bef1',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/validator/v10/LICENSE;md5=a718a0f318d76f7c5d510cbae84f0b60',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-redis/redis/v7/LICENSE;md5=58103aa5ea1ee9b7a369c9c4a95ef9b5',
+ 'file://src/${GO_IMPORT}/vendor/github.com/golang/protobuf/LICENSE;md5=939cce1ec101726fa754e698ac871622',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gomodule/redigo/LICENSE;md5=2ee41112a44fe7014dce33e26468ba93',
+ 'file://src/${GO_IMPORT}/vendor/github.com/google/uuid/LICENSE;md5=88073b6dd8ec00fe09da59e0b6dfded1',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/mux/LICENSE;md5=33fa1116c45f9e8de714033f99edde13',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/websocket/LICENSE;md5=c007b54a1743d596f46b2748d9f8c044',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/consul/api/LICENSE;md5=b8a277a612171b7526e9be072f405ef4',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/errwrap/LICENSE;md5=b278a92d2c1509760384428817710378',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-cleanhttp/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-hclog/LICENSE;md5=ec7f605b74b9ad03347d0a93a5cc7eb8',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-immutable-radix/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-multierror/LICENSE;md5=d44fdeb607e2d2614db9464dbedd4094',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-rootcerts/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/golang-lru/LICENSE;md5=f27a50d2e878867827842f2c60e30bfc',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/serf/LICENSE;md5=b278a92d2c1509760384428817710378',
+ 'file://src/${GO_IMPORT}/vendor/github.com/leodido/go-urn/LICENSE;md5=8f50db5538ec1148a9b3d14ed96c3418',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-colorable/LICENSE;md5=24ce168f90aec2456a73de1839037245',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-isatty/LICENSE;md5=f509beadd5a11227c27b5d2ad6c9f2c6',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/consulstructure/LICENSE;md5=96ada10a9e51c98c4656f2cede08c673',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/copystructure/LICENSE;md5=56da355a12d4821cda57b8f23ec34bc4',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/go-homedir/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/mapstructure/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/reflectwalk/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nats.go/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nkeys/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nuid/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/pmezard/go-difflib/LICENSE;md5=e9a2ebb8de779a07500ddecca806145e',
+ 'file://src/${GO_IMPORT}/vendor/github.com/rcrowley/go-metrics/LICENSE;md5=1bdf5d819f50f141366dabce3be1460f',
+ 'file://src/${GO_IMPORT}/vendor/github.com/spiffe/go-spiffe/v2/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/objx/LICENSE;md5=d023fd31d3ca39ec61eec65a91732735',
+ 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/testify/LICENSE;md5=188f01994659f3c0d310612333d2a26f',
+ 'file://src/${GO_IMPORT}/vendor/github.com/x448/float16/LICENSE;md5=de8f8e025d57fe7ee0b67f30d571323b',
+ 'file://src/${GO_IMPORT}/vendor/github.com/zeebo/errs/LICENSE;md5=84914ab36fc0eb48edbaa53e66e8d326',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/crypto/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/mod/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/net/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/sync/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/sys/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/text/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/tools/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/genproto/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/grpc/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/protobuf/LICENSE;md5=02d4002e9171d41a8fad93aa7faf3956',
+ 'file://src/${GO_IMPORT}/vendor/gopkg.in/eapache/queue.v1/LICENSE;md5=1bfd4408d3de090ef6b908b0cc45a316',
+ 'file://src/${GO_IMPORT}/vendor/gopkg.in/yaml.v3/LICENSE;md5=3c91c17266710e16afdbb2b6d15c761c'])
+
+ self.assertTrue(os.path.isfile(lics_require_file))
+ self._test_recipe_contents(lics_require_file, checkvars, [])
+
+ dependencies = \
+ [ ('github.com/eclipse/paho.mqtt.golang','v1.4.2', '', '', ''),
+ ('github.com/edgexfoundry/go-mod-bootstrap','v3.0.1','github.com/edgexfoundry/go-mod-bootstrap/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-configuration','v3.0.0','github.com/edgexfoundry/go-mod-configuration/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-core-contracts','v3.0.0','github.com/edgexfoundry/go-mod-core-contracts/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-messaging','v3.0.0','github.com/edgexfoundry/go-mod-messaging/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-secrets','v3.0.1','github.com/edgexfoundry/go-mod-secrets/v3','/v3', ''),
+ ('github.com/fxamacker/cbor','v2.4.0','github.com/fxamacker/cbor/v2','/v2', ''),
+ ('github.com/gomodule/redigo','v1.8.9', '', '', ''),
+ ('github.com/google/uuid','v1.3.0', '', '', ''),
+ ('github.com/gorilla/mux','v1.8.0', '', '', ''),
+ ('github.com/rcrowley/go-metrics','v0.0.0-20201227073835-cf1acfcdf475', '', '', ''),
+ ('github.com/spiffe/go-spiffe','v2.1.4','github.com/spiffe/go-spiffe/v2','/v2', ''),
+ ('github.com/stretchr/testify','v1.8.2', '', '', ''),
+ ('go.googlesource.com/crypto','v0.8.0','golang.org/x/crypto', '', ''),
+ ('gopkg.in/eapache/queue.v1','v1.1.0', '', '', ''),
+ ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
+ ('github.com/microsoft/go-winio','v0.6.0','github.com/Microsoft/go-winio', '', ''),
+ ('github.com/hashicorp/go-metrics','v0.3.10','github.com/armon/go-metrics', '', ''),
+ ('github.com/cenkalti/backoff','v2.2.1+incompatible', '', '', ''),
+ ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
+ ('github.com/edgexfoundry/go-mod-registry','v3.0.0','github.com/edgexfoundry/go-mod-registry/v3','/v3', ''),
+ ('github.com/fatih/color','v1.9.0', '', '', ''),
+ ('github.com/go-jose/go-jose','v3.0.0','github.com/go-jose/go-jose/v3','/v3', ''),
+ ('github.com/go-kit/log','v0.2.1', '', '', ''),
+ ('github.com/go-logfmt/logfmt','v0.5.1', '', '', ''),
+ ('github.com/go-playground/locales','v0.14.1', '', '', ''),
+ ('github.com/go-playground/universal-translator','v0.18.1', '', '', ''),
+ ('github.com/go-playground/validator','v10.13.0','github.com/go-playground/validator/v10','/v10', ''),
+ ('github.com/go-redis/redis','v7.3.0','github.com/go-redis/redis/v7','/v7', ''),
+ ('github.com/golang/protobuf','v1.5.2', '', '', ''),
+ ('github.com/gorilla/websocket','v1.4.2', '', '', ''),
+ ('github.com/hashicorp/consul','v1.20.0','github.com/hashicorp/consul/api', '', 'api'),
+ ('github.com/hashicorp/errwrap','v1.0.0', '', '', ''),
+ ('github.com/hashicorp/go-cleanhttp','v0.5.1', '', '', ''),
+ ('github.com/hashicorp/go-hclog','v0.14.1', '', '', ''),
+ ('github.com/hashicorp/go-immutable-radix','v1.3.0', '', '', ''),
+ ('github.com/hashicorp/go-multierror','v1.1.1', '', '', ''),
+ ('github.com/hashicorp/go-rootcerts','v1.0.2', '', '', ''),
+ ('github.com/hashicorp/golang-lru','v0.5.4', '', '', ''),
+ ('github.com/hashicorp/serf','v0.10.1', '', '', ''),
+ ('github.com/leodido/go-urn','v1.2.3', '', '', ''),
+ ('github.com/mattn/go-colorable','v0.1.12', '', '', ''),
+ ('github.com/mattn/go-isatty','v0.0.14', '', '', ''),
+ ('github.com/mitchellh/consulstructure','v0.0.0-20190329231841-56fdc4d2da54', '', '', ''),
+ ('github.com/mitchellh/copystructure','v1.2.0', '', '', ''),
+ ('github.com/mitchellh/go-homedir','v1.1.0', '', '', ''),
+ ('github.com/mitchellh/mapstructure','v1.5.0', '', '', ''),
+ ('github.com/mitchellh/reflectwalk','v1.0.2', '', '', ''),
+ ('github.com/nats-io/nats.go','v1.25.0', '', '', ''),
+ ('github.com/nats-io/nkeys','v0.4.4', '', '', ''),
+ ('github.com/nats-io/nuid','v1.0.1', '', '', ''),
+ ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
+ ('github.com/stretchr/objx','v0.5.0', '', '', ''),
+ ('github.com/x448/float16','v0.8.4', '', '', ''),
+ ('github.com/zeebo/errs','v1.3.0', '', '', ''),
+ ('go.googlesource.com/mod','v0.8.0','golang.org/x/mod', '', ''),
+ ('go.googlesource.com/net','v0.9.0','golang.org/x/net', '', ''),
+ ('go.googlesource.com/sync','v0.1.0','golang.org/x/sync', '', ''),
+ ('go.googlesource.com/sys','v0.7.0','golang.org/x/sys', '', ''),
+ ('go.googlesource.com/text','v0.9.0','golang.org/x/text', '', ''),
+ ('go.googlesource.com/tools','v0.6.0','golang.org/x/tools', '', ''),
+ ('github.com/googleapis/go-genproto','v0.0.0-20230223222841-637eb2293923','google.golang.org/genproto', '', ''),
+ ('github.com/grpc/grpc-go','v1.53.0','google.golang.org/grpc', '', ''),
+ ('go.googlesource.com/protobuf','v1.28.1','google.golang.org/protobuf', '', ''),
+ ]
+
+ src_uri = set()
+ for d in dependencies:
+ src_uri.add(self._go_urifiy(*d))
+
+ checkvars = {}
+ checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
+
+ self.assertTrue(os.path.isfile(deps_require_file))
+ self._test_recipe_contents(deps_require_file, checkvars, [])
+
+ def test_recipetool_create_go_replace_modules(self):
+ # Check handling of replaced modules
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+
+ recipefile = os.path.join(temprecipe, 'openapi-generator_git.bb')
+ deps_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-modules.inc')
+ lics_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-licenses.inc')
+ modules_txt_file = os.path.join(temprecipe, 'openapi-generator', 'modules.txt')
+
+ srcuri = 'https://github.com/OpenAPITools/openapi-generator.git'
+ srcrev = "v7.2.0"
+ srcbranch = "master"
+ srcsubdir = "samples/openapi3/client/petstore/go"
+
+ result = runCmd('recipetool create -o %s %s -S %s -B %s --src-subdir %s' % (temprecipe, srcuri, srcrev, srcbranch, srcsubdir))
+
+ self.maxDiff = None
+ inherits = ['go-vendor']
+
+ checkvars = {}
+ checkvars['GO_IMPORT'] = "github.com/OpenAPITools/openapi-generator/samples/openapi3/client/petstore/go"
+ checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
+ 'file://modules.txt'}
+
+ self.assertNotIn('Traceback', result.output)
+ self.assertIn('No license file was detected for the main module', result.output)
+ self.assertTrue(os.path.isfile(recipefile))
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # make sure that dependencies don't mention local directory ./go-petstore
+ dependencies = \
+ [ ('github.com/stretchr/testify','v1.8.4', '', '', ''),
+ ('go.googlesource.com/oauth2','v0.10.0','golang.org/x/oauth2', '', ''),
+ ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
+ ('github.com/golang/protobuf','v1.5.3', '', '', ''),
+ ('github.com/kr/pretty','v0.3.0', '', '', ''),
+ ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
+ ('github.com/rogpeppe/go-internal','v1.9.0', '', '', ''),
+ ('go.googlesource.com/net','v0.12.0','golang.org/x/net', '', ''),
+ ('github.com/golang/appengine','v1.6.7','google.golang.org/appengine', '', ''),
+ ('go.googlesource.com/protobuf','v1.31.0','google.golang.org/protobuf', '', ''),
+ ('gopkg.in/check.v1','v1.0.0-20201130134442-10cb98267c6c', '', '', ''),
+ ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
+ ]
+
+ src_uri = set()
+ for d in dependencies:
+ src_uri.add(self._go_urifiy(*d))
+
+ checkvars = {}
+ checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
+
+ self.assertTrue(os.path.isfile(deps_require_file))
+ self._test_recipe_contents(deps_require_file, checkvars, [])
+
+class RecipetoolTests(RecipetoolBase):
+
+ @classmethod
+ def setUpClass(cls):
+ import sys
+
+ super(RecipetoolTests, cls).setUpClass()
+ bb_vars = get_bb_vars(['BBPATH'])
+ cls.bbpath = bb_vars['BBPATH']
+ libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'recipetool')
+ sys.path.insert(0, libpath)
+
def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths):
dstdir = basedstdir
self.assertTrue(os.path.exists(dstdir))
for p in paths:
dstdir = os.path.join(dstdir, p)
if not os.path.exists(dstdir):
- os.makedirs(dstdir)
- self.track_for_cleanup(dstdir)
+ try:
+ os.makedirs(dstdir)
+ except PermissionError:
+ return False
+ except OSError as e:
+ if e.errno == errno.EROFS:
+ return False
+ else:
+ raise e
+ if p == "lib":
+ # Can race with other tests
+ self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
+ else:
+ self.track_for_cleanup(dstdir)
dstfile = os.path.join(dstdir, os.path.basename(srcfile))
if srcfile != dstfile:
- shutil.copy(srcfile, dstfile)
+ try:
+ shutil.copy(srcfile, dstfile)
+ except PermissionError:
+ return False
self.track_for_cleanup(dstfile)
+ return True
- @OETestID(1640)
def test_recipetool_load_plugin(self):
"""Test that recipetool loads only the first found plugin in BBPATH."""
@@ -515,20 +1035,147 @@ class RecipetoolTests(RecipetoolBase):
plugincontent = fh.readlines()
try:
self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
- for path in searchpath:
- self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
+ searchpath = [
+ path for path in searchpath
+ if self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
+ ]
result = runCmd("recipetool --quiet count")
self.assertEqual(result.output, '1')
result = runCmd("recipetool --quiet multiloaded")
self.assertEqual(result.output, "no")
for path in searchpath:
result = runCmd("recipetool --quiet bbdir")
- self.assertEqual(result.output, path)
+ self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py'))
finally:
with open(srcfile, 'w') as fh:
fh.writelines(plugincontent)
+ def test_recipetool_handle_license_vars(self):
+ from create import handle_license_vars
+ from unittest.mock import Mock
+
+ commonlicdir = get_bb_var('COMMON_LICENSE_DIR')
+
+ class DataConnectorCopy(bb.tinfoil.TinfoilDataStoreConnector):
+ pass
+
+ d = DataConnectorCopy
+ d.getVar = Mock(return_value=commonlicdir)
+
+ srctree = tempfile.mkdtemp(prefix='recipetoolqa')
+ self.track_for_cleanup(srctree)
+
+ # Multiple licenses
+ licenses = ['MIT', 'ISC', 'BSD-3-Clause', 'Apache-2.0']
+ for licence in licenses:
+ shutil.copy(os.path.join(commonlicdir, licence), os.path.join(srctree, 'LICENSE.' + licence))
+ # Duplicate license
+ shutil.copy(os.path.join(commonlicdir, 'MIT'), os.path.join(srctree, 'LICENSE'))
+
+ extravalues = {
+ # Duplicate and missing licenses
+ 'LICENSE': 'Zlib & BSD-2-Clause & Zlib',
+ 'LIC_FILES_CHKSUM': [
+ 'file://README.md;md5=0123456789abcdef0123456789abcd'
+ ]
+ }
+ lines_before = []
+ handled = []
+ licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d)
+ expected_lines_before = [
+ '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is',
+ '# your responsibility to verify that the values are complete and correct.',
+ '# NOTE: Original package / source metadata indicates license is: BSD-2-Clause & Zlib',
+ '#',
+ '# NOTE: multiple licenses have been detected; they have been separated with &',
+ '# in the LICENSE value for now since it is a reasonable assumption that all',
+ '# of the licenses apply. If instead there is a choice between the multiple',
+ '# licenses then you should change the value to separate the licenses with |',
+ '# instead of &. If there is any doubt, check the accompanying documentation',
+ '# to determine which situation is applicable.',
+ 'LICENSE = "Apache-2.0 & BSD-2-Clause & BSD-3-Clause & ISC & MIT & Zlib"',
+ 'LIC_FILES_CHKSUM = "file://LICENSE;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
+ ' file://LICENSE.Apache-2.0;md5=89aea4e17d99a7cacdbeed46a0096b10 \\\n'
+ ' file://LICENSE.BSD-3-Clause;md5=550794465ba0ec5312d6919e203a55f9 \\\n'
+ ' file://LICENSE.ISC;md5=f3b90e78ea0cffb20bf5cca7947a896d \\\n'
+ ' file://LICENSE.MIT;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
+ ' file://README.md;md5=0123456789abcdef0123456789abcd"',
+ ''
+ ]
+ self.assertEqual(lines_before, expected_lines_before)
+ expected_licvalues = [
+ ('MIT', 'LICENSE', '0835ade698e0bcf8506ecda2f7b4f302'),
+ ('Apache-2.0', 'LICENSE.Apache-2.0', '89aea4e17d99a7cacdbeed46a0096b10'),
+ ('BSD-3-Clause', 'LICENSE.BSD-3-Clause', '550794465ba0ec5312d6919e203a55f9'),
+ ('ISC', 'LICENSE.ISC', 'f3b90e78ea0cffb20bf5cca7947a896d'),
+ ('MIT', 'LICENSE.MIT', '0835ade698e0bcf8506ecda2f7b4f302')
+ ]
+ self.assertEqual(handled, [('license', expected_licvalues)])
+ self.assertEqual(extravalues, {})
+ self.assertEqual(licvalues, expected_licvalues)
+
+
+ def test_recipetool_split_pkg_licenses(self):
+ from create import split_pkg_licenses
+ licvalues = [
+ # Duplicate licenses
+ ('BSD-2-Clause', 'x/COPYING', None),
+ ('BSD-2-Clause', 'x/LICENSE', None),
+ # Multiple licenses
+ ('MIT', 'x/a/LICENSE.MIT', None),
+ ('ISC', 'x/a/LICENSE.ISC', None),
+ # Alternative licenses
+ ('(MIT | ISC)', 'x/b/LICENSE', None),
+ # Alternative licenses without brackets
+ ('MIT | BSD-2-Clause', 'x/c/LICENSE', None),
+ # Multi licenses with alternatives
+ ('MIT', 'x/d/COPYING', None),
+ ('MIT | BSD-2-Clause', 'x/d/LICENSE', None),
+ # Multi licenses with alternatives and brackets
+ ('Apache-2.0 & ((MIT | ISC) & BSD-3-Clause)', 'x/e/LICENSE', None)
+ ]
+ packages = {
+ '${PN}': '',
+ 'a': 'x/a',
+ 'b': 'x/b',
+ 'c': 'x/c',
+ 'd': 'x/d',
+ 'e': 'x/e',
+ 'f': 'x/f',
+ 'g': 'x/g',
+ }
+ fallback_licenses = {
+ # Ignored
+ 'a': 'BSD-3-Clause',
+ # Used
+ 'f': 'BSD-3-Clause'
+ }
+ outlines = []
+ outlicenses = split_pkg_licenses(licvalues, packages, outlines, fallback_licenses)
+ expected_outlicenses = {
+ '${PN}': ['BSD-2-Clause'],
+ 'a': ['ISC', 'MIT'],
+ 'b': ['(ISC | MIT)'],
+ 'c': ['(BSD-2-Clause | MIT)'],
+ 'd': ['(BSD-2-Clause | MIT)', 'MIT'],
+ 'e': ['(ISC | MIT)', 'Apache-2.0', 'BSD-3-Clause'],
+ 'f': ['BSD-3-Clause'],
+ 'g': ['Unknown']
+ }
+ self.assertEqual(outlicenses, expected_outlicenses)
+ expected_outlines = [
+ 'LICENSE:${PN} = "BSD-2-Clause"',
+ 'LICENSE:a = "ISC & MIT"',
+ 'LICENSE:b = "(ISC | MIT)"',
+ 'LICENSE:c = "(BSD-2-Clause | MIT)"',
+ 'LICENSE:d = "(BSD-2-Clause | MIT) & MIT"',
+ 'LICENSE:e = "(ISC | MIT) & Apache-2.0 & BSD-3-Clause"',
+ 'LICENSE:f = "BSD-3-Clause"',
+ 'LICENSE:g = "Unknown"'
+ ]
+ self.assertEqual(outlines, expected_outlines)
+
class RecipetoolAppendsrcBase(RecipetoolBase):
def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles):
@@ -561,9 +1208,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
for uri in src_uri:
p = urllib.parse.urlparse(uri)
if p.scheme == 'file':
- return p.netloc + p.path
+ return p.netloc + p.path, uri
- def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''):
+ def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, remove=None, machine=None , options=''):
if newfile is None:
newfile = self.testfile
@@ -588,14 +1235,42 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
else:
destpath = '.' + os.sep
- expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
+
+ override = ""
+ if machine:
+ options += ' -m %s' % machine
+ override = ':append:%s' % machine
+ expectedlines.extend(['PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
+ '\n'])
+
+ if remove:
+ for entry in remove:
+ if machine:
+ entry_remove_line = 'SRC_URI:remove:%s = " %s"\n' % (machine, entry)
+ else:
+ entry_remove_line = 'SRC_URI:remove = "%s"\n' % entry
+
+ expectedlines.extend([entry_remove_line,
+ '\n'])
+
if has_src_uri:
uri = 'file://%s' % filename
if expected_subdir:
uri += ';subdir=%s' % expected_subdir
- expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri,
- '\n']
+ if machine:
+ src_uri_line = 'SRC_URI%s = " %s"\n' % (override, uri)
+ else:
+ src_uri_line = 'SRC_URI += "%s"\n' % uri
+
+ expectedlines.extend([src_uri_line, '\n'])
+
+ with open("/tmp/tmp.txt", "w") as file:
+ print(expectedlines, file=file)
+
+ if machine:
+ filename = '%s/%s' % (machine, filename)
return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
@@ -624,11 +1299,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
- @OETestID(1273)
def test_recipetool_appendsrcfile_basic(self):
self._test_appendsrcfile('base-files', 'a-file')
- @OETestID(1274)
def test_recipetool_appendsrcfile_basic_wildcard(self):
testrecipe = 'base-files'
self._test_appendsrcfile(testrecipe, 'a-file', options='-w')
@@ -636,15 +1309,12 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
self.assertEqual(os.path.basename(bbappendfile), '%s_%%.bbappend' % testrecipe)
- @OETestID(1281)
def test_recipetool_appendsrcfile_subdir_basic(self):
self._test_appendsrcfile('base-files', 'a-file', 'tmp')
- @OETestID(1282)
def test_recipetool_appendsrcfile_subdir_basic_dirdest(self):
self._test_appendsrcfile('base-files', destdir='tmp')
- @OETestID(1280)
def test_recipetool_appendsrcfile_srcdir_basic(self):
testrecipe = 'bash'
bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe)
@@ -653,24 +1323,49 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
subdir = os.path.relpath(srcdir, workdir)
self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir)
- @OETestID(1275)
def test_recipetool_appendsrcfile_existing_in_src_uri(self):
testrecipe = 'base-files'
- filepath = self._get_first_file_uri(testrecipe)
+ filepath,_ = self._get_first_file_uri(testrecipe)
self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
- @OETestID(1276)
- def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self):
+ def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self, machine=None):
testrecipe = 'base-files'
subdir = 'tmp'
- filepath = self._get_first_file_uri(testrecipe)
+ filepath, srcuri_entry = self._get_first_file_uri(testrecipe)
self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
- output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False)
- self.assertTrue(any('with different parameters' in l for l in output))
+ self._test_appendsrcfile(testrecipe, filepath, subdir, machine=machine, remove=[srcuri_entry])
+
+ def test_recipetool_appendsrcfile_machine(self):
+ # A very basic test
+ self._test_appendsrcfile('base-files', 'a-file', machine='mymachine')
+
+ # Force cleaning the output of previous test
+ self.tearDownLocal()
+
+ # A more complex test: existing entry in src_uri with different param
+ self.test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(machine='mymachine')
+
+ def test_recipetool_appendsrcfile_update_recipe_basic(self):
+ testrecipe = "mtd-utils-selftest"
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('meta-selftest', recipefile, 'This test expect %s recipe to be in meta-selftest')
+ cmd = 'recipetool appendsrcfile -W -u meta-selftest %s %s' % (testrecipe, self.testfile)
+ result = runCmd(cmd)
+ self.assertNotIn('Traceback', result.output)
+ self.add_command_to_tearDown('cd %s; rm -f %s/%s; git checkout .' % (os.path.dirname(recipefile), testrecipe, os.path.basename(self.testfile)))
+
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ ('??', '.*/%s/%s$' % (testrecipe, os.path.basename(self.testfile)))]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+ removelines = []
+ addlines = [
+ 'file://%s \\\\' % os.path.basename(self.testfile),
+ ]
+ self._check_diff(result.output, addlines, removelines)
- @OETestID(1277)
def test_recipetool_appendsrcfile_replace_file_srcdir(self):
testrecipe = 'bash'
filepath = 'Makefile.in'
@@ -681,9 +1376,10 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir)
bitbake('%s:do_unpack' % testrecipe)
- self.assertEqual(open(self.testfile, 'r').read(), open(os.path.join(srcdir, filepath), 'r').read())
+ with open(self.testfile, 'r') as testfile:
+ with open(os.path.join(srcdir, filepath), 'r') as makefilein:
+ self.assertEqual(testfile.read(), makefilein.read())
- @OETestID(1278)
def test_recipetool_appendsrcfiles_basic(self, destdir=None):
newfiles = [self.testfile]
for i in range(1, 5):
@@ -693,6 +1389,5 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
newfiles.append(testfile)
self._test_appendsrcfiles('gcc', newfiles, destdir=destdir, options='-W')
- @OETestID(1279)
def test_recipetool_appendsrcfiles_basic_subdir(self):
self.test_recipetool_appendsrcfiles_basic(destdir='testdir')
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
new file mode 100644
index 0000000000..2cb4445f81
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -0,0 +1,138 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import bb.tinfoil
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import get_test_layer
+
+
+def setUpModule():
+ global tinfoil
+ global metaselftestpath
+ metaselftestpath = get_test_layer()
+ tinfoil = bb.tinfoil.Tinfoil(tracking=True)
+ tinfoil.prepare(config_only=False, quiet=2)
+
+
+def tearDownModule():
+ tinfoil.shutdown()
+
+
+class RecipeUtilsTests(OESelftestTestCase):
+ """ Tests for the recipeutils module functions """
+
+ def test_patch_recipe_varflag(self):
+ import oe.recipeutils
+ rd = tinfoil.parse_recipe('python3-async-test')
+ vals = {'SRC_URI[md5sum]': 'aaaaaa', 'LICENSE': 'something'}
+ patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
+
+ expected_patch = """
+--- a/recipes-devtools/python/python-async-test.inc
++++ b/recipes-devtools/python/python-async-test.inc
+@@ -1,14 +1,14 @@
+ SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
+ HOMEPAGE = "http://github.com/gitpython-developers/async"
+ SECTION = "devel/python"
+-LICENSE = "BSD-3-Clause"
++LICENSE = "something"
+ LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
+
+ inherit pypi
+
+ PYPI_PACKAGE = "async"
+
+-SRC_URI[md5sum] = "9b06b5997de2154f3bc0273f80bcef6b"
++SRC_URI[md5sum] = "aaaaaa"
+ SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
+
+ RDEPENDS:${PN} += "python3-threading"
+"""
+ patchlines = []
+ for f in patches:
+ for line in f:
+ patchlines.append(line)
+ self.maxDiff = None
+ self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
+
+
+ def test_patch_recipe_singleappend(self):
+ import oe.recipeutils
+ rd = tinfoil.parse_recipe('recipeutils-test')
+ val = rd.getVar('SRC_URI', False).split()
+ del val[1]
+ val = ' '.join(val)
+ vals = {'SRC_URI': val}
+ patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
+
+ expected_patch = """
+--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
++++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
+@@ -8,6 +8,4 @@
+
+ BBCLASSEXTEND = "native nativesdk"
+
+-SRC_URI += "file://somefile"
+-
+ SRC_URI:append = " file://anotherfile"
+"""
+ patchlines = []
+ for f in patches:
+ for line in f:
+ patchlines.append(line)
+ self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
+
+
+ def test_patch_recipe_appends(self):
+ import oe.recipeutils
+ rd = tinfoil.parse_recipe('recipeutils-test')
+ val = rd.getVar('SRC_URI', False).split()
+ vals = {'SRC_URI': val[0]}
+ patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
+
+ expected_patch = """
+--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
++++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
+@@ -8,6 +8,3 @@
+
+ BBCLASSEXTEND = "native nativesdk"
+
+-SRC_URI += "file://somefile"
+-
+-SRC_URI:append = " file://anotherfile"
+"""
+ patchlines = []
+ for f in patches:
+ for line in f:
+ patchlines.append(line)
+ self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
+
+
+ def test_validate_pn(self):
+ import oe.recipeutils
+ expected_results = {
+ 'test': '',
+ 'glib-2.0': '',
+ 'gtk+': '',
+ 'forcevariable': 'reserved',
+ 'pn-something': 'reserved',
+ 'test.bb': 'file',
+ 'test_one': 'character',
+ 'test!': 'character',
+ }
+
+ for pn, expected in expected_results.items():
+ result = oe.recipeutils.validate_pn(pn)
+ if expected:
+ self.assertIn(expected, result)
+ else:
+ self.assertEqual(result, '')
+
+ def test_split_var_value(self):
+ import oe.recipeutils
+ res = oe.recipeutils.split_var_value('test.1 test.2 ${@call_function("hi there world", false)} test.4')
+ self.assertEqual(res, ['test.1', 'test.2', '${@call_function("hi there world", false)}', 'test.4'])
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
new file mode 100644
index 0000000000..80e830136f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -0,0 +1,338 @@
+#
+# SPDX-License-Identifier: MIT
+#
+# Copyright 2019-2020 by Garmin Ltd. or its subsidiaries
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+import bb.utils
+import functools
+import multiprocessing
+import textwrap
+import tempfile
+import shutil
+import stat
+import os
+import datetime
+
+exclude_packages = [
+ ]
+
+def is_excluded(package):
+ package_name = os.path.basename(package)
+ for i in exclude_packages:
+ if package_name.startswith(i):
+ return i
+ return None
+
+MISSING = 'MISSING'
+DIFFERENT = 'DIFFERENT'
+SAME = 'SAME'
+
+@functools.total_ordering
+class CompareResult(object):
+ def __init__(self):
+ self.reference = None
+ self.test = None
+ self.status = 'UNKNOWN'
+
+ def __eq__(self, other):
+ return (self.status, self.test) == (other.status, other.test)
+
+ def __lt__(self, other):
+ return (self.status, self.test) < (other.status, other.test)
+
+class PackageCompareResults(object):
+ def __init__(self, exclusions):
+ self.total = []
+ self.missing = []
+ self.different = []
+ self.different_excluded = []
+ self.same = []
+ self.active_exclusions = set()
+ exclude_packages.extend((exclusions or "").split())
+
+ def add_result(self, r):
+ self.total.append(r)
+ if r.status == MISSING:
+ self.missing.append(r)
+ elif r.status == DIFFERENT:
+ exclusion = is_excluded(r.reference)
+ if exclusion:
+ self.different_excluded.append(r)
+ self.active_exclusions.add(exclusion)
+ else:
+ self.different.append(r)
+ else:
+ self.same.append(r)
+
+ def sort(self):
+ self.total.sort()
+ self.missing.sort()
+ self.different.sort()
+ self.different_excluded.sort()
+ self.same.sort()
+
+ def __str__(self):
+ return 'same=%i different=%i different_excluded=%i missing=%i total=%i\nunused_exclusions=%s' % (len(self.same), len(self.different), len(self.different_excluded), len(self.missing), len(self.total), self.unused_exclusions())
+
+ def unused_exclusions(self):
+ return sorted(set(exclude_packages) - self.active_exclusions)
+
+def compare_file(reference, test, diffutils_sysroot):
+ result = CompareResult()
+ result.reference = reference
+ result.test = test
+
+ if not os.path.exists(reference):
+ result.status = MISSING
+ return result
+
+ r = runCmd(['cmp', '--quiet', reference, test], native_sysroot=diffutils_sysroot, ignore_status=True, sync=False)
+
+ if r.status:
+ result.status = DIFFERENT
+ return result
+
+ result.status = SAME
+ return result
+
+def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, **kwargs):
+ return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size),
+ '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir],
+ **kwargs)
+
+class DiffoscopeTests(OESelftestTestCase):
+ diffoscope_test_files = os.path.join(os.path.dirname(os.path.abspath(__file__)), "diffoscope")
+
+ def test_diffoscope(self):
+ bitbake("diffoscope-native -c addto_recipe_sysroot")
+ diffoscope_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffoscope-native")
+
+ # Check that diffoscope doesn't return an error when the files compare
+ # the same (a general check that diffoscope is working)
+ with tempfile.TemporaryDirectory() as tmpdir:
+ run_diffoscope('A', 'A', tmpdir,
+ native_sysroot=diffoscope_sysroot, cwd=self.diffoscope_test_files)
+
+ # Check that diffoscope generates an index.html file when the files are
+ # different
+ with tempfile.TemporaryDirectory() as tmpdir:
+ r = run_diffoscope('A', 'B', tmpdir,
+ native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=self.diffoscope_test_files)
+
+ self.assertNotEqual(r.status, 0, msg="diffoscope was successful when an error was expected")
+ self.assertTrue(os.path.exists(os.path.join(tmpdir, 'index.html')), "HTML index not found!")
+
+class ReproducibleTests(OESelftestTestCase):
+ # Test the reproducibility of whatever is built between sstate_targets and targets
+
+ package_classes = ['deb', 'ipk', 'rpm']
+
+ # Maximum report size, in bytes
+ max_report_size = 250 * 1024 * 1024
+
+ # targets are the things we want to test the reproducibility of
+ targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world']
+
+ # sstate targets are things to pull from sstate to potentially cut build/debugging time
+ sstate_targets = []
+
+ save_results = False
+ if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ:
+ save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT']
+
+ # This variable controls if one of the test builds is allowed to pull from
+ # an sstate cache/mirror. The other build is always done clean as a point of
+ # comparison.
+ # If you know that your sstate archives are reproducible, enabling this
+ # will test that and also make the test run faster. If your sstate is not
+ # reproducible, disable this in your derived test class
+ build_from_sstate = True
+
+ def setUpLocal(self):
+ super().setUpLocal()
+ needed_vars = [
+ 'TOPDIR',
+ 'TARGET_PREFIX',
+ 'BB_NUMBER_THREADS',
+ 'BB_HASHSERVE',
+ 'OEQA_REPRODUCIBLE_TEST_PACKAGE',
+ 'OEQA_REPRODUCIBLE_TEST_TARGET',
+ 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS',
+ 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES',
+ ]
+ bb_vars = get_bb_vars(needed_vars)
+ for v in needed_vars:
+ setattr(self, v.lower(), bb_vars[v])
+
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']:
+ self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split()
+
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET']:
+ self.targets = bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'].split()
+
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']:
+ self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split()
+
+ self.extraresults = {}
+ self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = ''
+ self.extraresults.setdefault('reproducible', {}).setdefault('files', {})
+
+ def append_to_log(self, msg):
+ self.extraresults['reproducible.rawlogs']['log'] += msg
+
+ def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
+ result = PackageCompareResults(self.oeqa_reproducible_excluded_packages)
+
+ old_cwd = os.getcwd()
+ try:
+ file_result = {}
+ os.chdir(test_dir)
+ with multiprocessing.Pool(processes=int(self.bb_number_threads or 0)) as p:
+ for root, dirs, files in os.walk('.'):
+ async_result = []
+ for f in files:
+ reference_path = os.path.join(reference_dir, root, f)
+ test_path = os.path.join(test_dir, root, f)
+ async_result.append(p.apply_async(compare_file, (reference_path, test_path, diffutils_sysroot)))
+
+ for a in async_result:
+ result.add_result(a.get())
+
+ finally:
+ os.chdir(old_cwd)
+
+ result.sort()
+ return result
+
+ def write_package_list(self, package_class, name, packages):
+ self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [
+ {'reference': p.reference, 'test': p.test} for p in packages]
+
+ def copy_file(self, source, dest):
+ bb.utils.mkdirhier(os.path.dirname(dest))
+ shutil.copyfile(source, dest)
+
+ def do_test_build(self, name, use_sstate):
+ capture_vars = ['DEPLOY_DIR_' + c.upper() for c in self.package_classes]
+
+ tmpdir = os.path.join(self.topdir, name, 'tmp')
+ if os.path.exists(tmpdir):
+ bb.utils.remove(tmpdir, recurse=True)
+
+ config = textwrap.dedent('''\
+ PACKAGE_CLASSES = "{package_classes}"
+ TMPDIR = "{tmpdir}"
+ LICENSE_FLAGS_ACCEPTED = "commercial"
+ DISTRO_FEATURES:append = ' pam'
+ USERADDEXTENSION = "useradd-staticids"
+ USERADD_ERROR_DYNAMIC = "skip"
+ USERADD_UID_TABLES += "files/static-passwd"
+ USERADD_GID_TABLES += "files/static-group"
+ ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes),
+ tmpdir=tmpdir)
+
+ if not use_sstate:
+ if self.sstate_targets:
+ self.logger.info("Building prebuild for %s (sstate allowed)..." % (name))
+ self.write_config(config)
+ bitbake(' '.join(self.sstate_targets))
+
+ # This config fragment will disable using shared and the sstate
+ # mirror, forcing a complete build from scratch
+ config += textwrap.dedent('''\
+ SSTATE_DIR = "${TMPDIR}/sstate"
+ SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
+ ''')
+
+ self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
+ self.write_config(config)
+ d = get_bb_vars(capture_vars)
+ # targets used to be called images
+ bitbake(' '.join(getattr(self, 'images', self.targets)))
+ return d
+
+ def test_reproducible_builds(self):
+ def strip_topdir(s):
+ if s.startswith(self.topdir):
+ return s[len(self.topdir):]
+ return s
+
+ # Build native utilities
+ self.write_config('')
+ bitbake("diffoscope-native diffutils-native jquery-native -c addto_recipe_sysroot")
+ diffutils_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffutils-native")
+ diffoscope_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffoscope-native")
+ jquery_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "jquery-native")
+
+ if self.save_results:
+ os.makedirs(self.save_results, exist_ok=True)
+ datestr = datetime.datetime.now().strftime('%Y%m%d')
+ save_dir = tempfile.mkdtemp(prefix='oe-reproducible-%s-' % datestr, dir=self.save_results)
+ os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+ self.logger.info('Non-reproducible packages will be copied to %s', save_dir)
+
+ vars_A = self.do_test_build('reproducibleA', self.build_from_sstate)
+
+ vars_B = self.do_test_build('reproducibleB', False)
+
+ # NOTE: The temp directories from the reproducible build are purposely
+ # kept after the build so it can be diffed for debugging.
+
+ fails = []
+
+ for c in self.package_classes:
+ with self.subTest(package_class=c):
+ package_class = 'package_' + c
+
+ deploy_A = vars_A['DEPLOY_DIR_' + c.upper()]
+ deploy_B = vars_B['DEPLOY_DIR_' + c.upper()]
+
+ self.logger.info('Checking %s packages for differences...' % c)
+ result = self.compare_packages(deploy_A, deploy_B, diffutils_sysroot)
+
+ self.logger.info('Reproducibility summary for %s: %s' % (c, result))
+
+ self.append_to_log('\n'.join("%s: %s" % (r.status, r.test) for r in result.total))
+
+ self.write_package_list(package_class, 'missing', result.missing)
+ self.write_package_list(package_class, 'different', result.different)
+ self.write_package_list(package_class, 'different_excluded', result.different_excluded)
+ self.write_package_list(package_class, 'same', result.same)
+
+ if self.save_results:
+ for d in result.different:
+ self.copy_file(d.reference, '/'.join([save_dir, 'packages', strip_topdir(d.reference)]))
+ self.copy_file(d.test, '/'.join([save_dir, 'packages', strip_topdir(d.test)]))
+
+ for d in result.different_excluded:
+ self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)]))
+ self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)]))
+
+ if result.different:
+ fails.append("The following %s packages are different and not in exclusion list:\n%s" %
+ (c, '\n'.join(r.test for r in (result.different))))
+
+ if result.missing and len(self.sstate_targets) == 0:
+ fails.append("The following %s packages are missing and not in exclusion list:\n%s" %
+ (c, '\n'.join(r.test for r in (result.missing))))
+
+ # Clean up empty directories
+ if self.save_results:
+ if not os.listdir(save_dir):
+ os.rmdir(save_dir)
+ else:
+ self.logger.info('Running diffoscope')
+ package_dir = os.path.join(save_dir, 'packages')
+ package_html_dir = os.path.join(package_dir, 'diff-html')
+
+ # Copy jquery to improve the diffoscope output usability
+ self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
+
+ run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, max_report_size=self.max_report_size,
+ native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
+
+ if fails:
+ self.fail('\n'.join(fails))
+
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py
new file mode 100644
index 0000000000..c3303f3fbb
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py
@@ -0,0 +1,375 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+from resulttool.report import ResultsTextReport
+from resulttool import regression as regression
+from resulttool import resultutils as resultutils
+from oeqa.selftest.case import OESelftestTestCase
+
+class ResultToolTests(OESelftestTestCase):
+ base_results_data = {'base_result1': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86"},
+ 'result': {}},
+ 'base_result2': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86-64"},
+ 'result': {}}}
+ target_results_data = {'target_result1': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86"},
+ 'result': {}},
+ 'target_result2': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86"},
+ 'result': {}},
+ 'target_result3': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86-64"},
+ 'result': {}}}
+
+ def test_report_can_aggregate_test_result(self):
+ result_data = {'result': {'test1': {'status': 'PASSED'},
+ 'test2': {'status': 'PASSED'},
+ 'test3': {'status': 'FAILED'},
+ 'test4': {'status': 'ERROR'},
+ 'test5': {'status': 'SKIPPED'}}}
+ report = ResultsTextReport()
+ result_report = report.get_aggregated_test_result(None, result_data, 'DummyMachine')
+ self.assertTrue(result_report['passed'] == 2, msg="Passed count not correct:%s" % result_report['passed'])
+ self.assertTrue(result_report['failed'] == 2, msg="Failed count not correct:%s" % result_report['failed'])
+ self.assertTrue(result_report['skipped'] == 1, msg="Skipped count not correct:%s" % result_report['skipped'])
+
+ def test_regression_can_get_regression_base_target_pair(self):
+
+ results = {}
+ resultutils.append_resultsdata(results, ResultToolTests.base_results_data)
+ resultutils.append_resultsdata(results, ResultToolTests.target_results_data)
+ self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
+ self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
+
+ def test_regression_can_get_regression_result(self):
+ base_result_data = {'result': {'test1': {'status': 'PASSED'},
+ 'test2': {'status': 'PASSED'},
+ 'test3': {'status': 'FAILED'},
+ 'test4': {'status': 'ERROR'},
+ 'test5': {'status': 'SKIPPED'}}}
+ target_result_data = {'result': {'test1': {'status': 'PASSED'},
+ 'test2': {'status': 'FAILED'},
+ 'test3': {'status': 'PASSED'},
+ 'test4': {'status': 'ERROR'},
+ 'test5': {'status': 'SKIPPED'}}}
+ result, text = regression.compare_result(self.logger, "BaseTestRunName", "TargetTestRunName", base_result_data, target_result_data)
+ self.assertTrue(result['test2']['base'] == 'PASSED',
+ msg="regression not correct:%s" % result['test2']['base'])
+ self.assertTrue(result['test2']['target'] == 'FAILED',
+ msg="regression not correct:%s" % result['test2']['target'])
+ self.assertTrue(result['test3']['base'] == 'FAILED',
+ msg="regression not correct:%s" % result['test3']['base'])
+ self.assertTrue(result['test3']['target'] == 'PASSED',
+ msg="regression not correct:%s" % result['test3']['target'])
+
+ def test_merge_can_merged_results(self):
+ results = {}
+ resultutils.append_resultsdata(results, ResultToolTests.base_results_data, configmap=resultutils.flatten_map)
+ resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
+ self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
+
+ def test_results_without_metadata_can_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests without metadata should be compared")
+
+ def test_target_result_with_missing_metadata_can_not_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "OESELFTEST_METADATA": {
+ "run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None
+ }}, "result": {}}
+ target_configuration = {"configuration": {"TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests should not be compared")
+
+ def test_results_with_matching_metadata_can_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests with matching metadata should be compared")
+
+ def test_results_with_mismatching_metadata_can_not_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["machine"],
+ "exclude_tags": None}
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests with mismatching metadata should not be compared")
+
+ def test_metadata_matching_is_only_checked_for_relevant_test_type(self):
+ base_configuration = {"configuration": {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}}, "result": {}}
+ target_configuration = {"configuration": {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["machine"],
+ "exclude_tags": None}}, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE'])
+
+ def test_machine_matches(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"}, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect machine filtering, identical machine tests should be compared")
+
+ def test_machine_mismatches(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86_64"
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect machine filtering, mismatching machine tests should not be compared")
+
+ def test_can_not_compare_non_ltp_tests(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86_64"
+ }, "result": {
+ "bar": {
+ "status": "PASSED"
+ }}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared")
+
+ def test_can_compare_ltp_tests(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect ltpresult filtering, matching ltpresult content should be compared")
+
+ def test_can_match_non_static_ptest_names(self):
+ base_configuration = {"a": {
+ "conf_X": {
+ "configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ptestresult.lttng-tools.foo_-_bar_-_moo": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace.bar_-_moo_-_foo": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace2.moo_-_foo_-_bar": {
+ "status": "PASSED"
+ },
+ "ptestresult.curl.test_0000__foo_out_of_bar": {
+ "status": "PASSED"
+ },
+ "ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": {
+ "status": "PASSED"
+ },
+ "ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
+ "status": "PASSED"
+ }
+ }}}}
+ target_configuration = {"a": {
+ "conf_Y": {
+ "configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ptestresult.lttng-tools.foo_-_yyy_-_zzz": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace.bar_-_zzz_-_xxx": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace2.moo_-_xxx_-_yyy": {
+ "status": "PASSED"
+ },
+ "ptestresult.curl.test_0000__xxx_out_of_yyy": {
+ "status": "PASSED"
+ },
+ "ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": {
+ "status": "PASSED"
+ },
+ "ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
+ "status": "PASSED"
+ }
+ }}}}
+ regression.fixup_ptest_names(base_configuration, self.logger)
+ regression.fixup_ptest_names(target_configuration, self.logger)
+ result, resultstring = regression.compare_result(
+ self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"])
+ self.assertDictEqual(
+ result, {}, msg=f"ptests should be compared: {resultstring}")
diff --git a/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
new file mode 100644
index 0000000000..44e2c09a6f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
@@ -0,0 +1,97 @@
+# SPDX-FileCopyrightText: Huawei Inc.
+#
+# SPDX-License-Identifier: MIT
+
+import os
+import oe
+import unittest
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
+
+class ShadowUtilsTidyFiles(OESelftestTestCase):
+ """
+ Check if shadow image rootfs files are tidy.
+
+ The tests are focused on testing the functionality provided by the
+ 'tidy_shadowutils_files' rootfs postprocess command (via
+ SORT_PASSWD_POSTPROCESS_COMMAND).
+ """
+
+ def sysconf_build(self):
+ """
+ Verify if shadow tidy files tests are to be run and if yes, build a
+ test image and return its sysconf rootfs path.
+ """
+
+ test_image = "core-image-minimal"
+
+ config = 'IMAGE_CLASSES += "extrausers"\n'
+ config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n'
+ config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n'
+ self.write_config(config)
+
+ vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"),
+ test_image)
+ passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"]
+ self.assertIsNotNone(passwd_postprocess_cmd)
+ if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'):
+ raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' "
+ "rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.")
+
+ rootfs = vars["IMAGE_ROOTFS"]
+ self.assertIsNotNone(rootfs)
+ sysconfdir = vars["sysconfdir"]
+ bitbake(test_image)
+ self.assertIsNotNone(sysconfdir)
+
+ return oe.path.join(rootfs, sysconfdir)
+
+ def test_shadowutils_backup_files(self):
+ """
+ Test that the rootfs doesn't include any known shadow backup files.
+ """
+
+ backup_files = (
+ 'group-',
+ 'gshadow-',
+ 'passwd-',
+ 'shadow-',
+ 'subgid-',
+ 'subuid-',
+ )
+
+ rootfs_sysconfdir = self.sysconf_build()
+ found = []
+ for backup_file in backup_files:
+ backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file)
+ if os.path.exists(backup_filepath):
+ found.append(backup_file)
+ if (found):
+ raise Exception('The following shadow backup files were found in '
+ 'the rootfs: %s' % found)
+
+ def test_shadowutils_sorted_files(self):
+ """
+ Test that the 'passwd' and the 'group' shadow utils files are ordered
+ by ID.
+ """
+
+ files = (
+ 'passwd',
+ 'group',
+ )
+
+ rootfs_sysconfdir = self.sysconf_build()
+ unsorted = []
+ for file in files:
+ filepath = oe.path.join(rootfs_sysconfdir, file)
+ with open(filepath, 'rb') as f:
+ ids = []
+ lines = f.readlines()
+ for line in lines:
+ entries = line.split(b':')
+ ids.append(int(entries[2]))
+ if (ids != sorted(ids)):
+ unsorted.append(file)
+ if (unsorted):
+ raise Exception("The following files were not sorted by ID as expected: %s" % unsorted)
diff --git a/meta/lib/oeqa/selftest/cases/rpmtests.py b/meta/lib/oeqa/selftest/cases/rpmtests.py
new file mode 100644
index 0000000000..902d7dca3d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rpmtests.py
@@ -0,0 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BitbakeTests(OESelftestTestCase):
+
+ def test_rpm_filenames(self):
+ test_recipe = "testrpm"
+ bitbake(test_recipe)
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
index d76d7063c6..70047ca0ca 100644
--- a/meta/lib/oeqa/selftest/cases/runcmd.py
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -1,7 +1,12 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd
from oeqa.utils import CommandError
-from oeqa.core.decorator.oeid import OETestID
import subprocess
import threading
@@ -24,111 +29,96 @@ class RunCmdTests(OESelftestTestCase):
# The delta is intentionally smaller than the timeout, to detect cases where
# we incorrectly apply the timeout more than once.
- TIMEOUT = 2
- DELTA = 1
+ TIMEOUT = 10
+ DELTA = 8
- @OETestID(1916)
def test_result_okay(self):
result = runCmd("true")
self.assertEqual(result.status, 0)
- @OETestID(1915)
def test_result_false(self):
result = runCmd("false", ignore_status=True)
self.assertEqual(result.status, 1)
- @OETestID(1917)
def test_shell(self):
# A shell is used for all string commands.
result = runCmd("false; true", ignore_status=True)
self.assertEqual(result.status, 0)
- @OETestID(1910)
def test_no_shell(self):
self.assertRaises(FileNotFoundError,
runCmd, "false; true", shell=False)
- @OETestID(1906)
def test_list_not_found(self):
self.assertRaises(FileNotFoundError,
runCmd, ["false; true"])
- @OETestID(1907)
def test_list_okay(self):
result = runCmd(["true"])
self.assertEqual(result.status, 0)
- @OETestID(1913)
def test_result_assertion(self):
- self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
+ self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
runCmd, "echo foobar >&2; false", shell=True)
- @OETestID(1914)
def test_result_exception(self):
- self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
+ self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
- @OETestID(1911)
def test_output(self):
- result = runCmd("echo stdout; echo stderr >&2", shell=True)
+ result = runCmd("echo stdout; echo stderr >&2", shell=True, sync=False)
self.assertEqual("stdout\nstderr", result.output)
self.assertEqual("", result.error)
- @OETestID(1912)
def test_output_split(self):
- result = runCmd("echo stdout; echo stderr >&2", shell=True, stderr=subprocess.PIPE)
+ result = runCmd("echo stdout; echo stderr >&2", shell=True, stderr=subprocess.PIPE, sync=False)
self.assertEqual("stdout", result.output)
self.assertEqual("stderr", result.error)
- @OETestID(1920)
def test_timeout(self):
numthreads = threading.active_count()
start = time.time()
# Killing a hanging process only works when not using a shell?!
- result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True)
+ result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True, sync=False)
self.assertEqual(result.status, -signal.SIGTERM)
end = time.time()
self.assertLess(end - start, self.TIMEOUT + self.DELTA)
- self.assertEqual(numthreads, threading.active_count())
+ self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
- @OETestID(1921)
def test_timeout_split(self):
numthreads = threading.active_count()
start = time.time()
# Killing a hanging process only works when not using a shell?!
- result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True, stderr=subprocess.PIPE)
+ result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True, stderr=subprocess.PIPE, sync=False)
self.assertEqual(result.status, -signal.SIGTERM)
end = time.time()
self.assertLess(end - start, self.TIMEOUT + self.DELTA)
- self.assertEqual(numthreads, threading.active_count())
+ self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
- @OETestID(1918)
def test_stdin(self):
numthreads = threading.active_count()
- result = runCmd("cat", data=b"hello world", timeout=self.TIMEOUT)
+ result = runCmd("cat", data=b"hello world", timeout=self.TIMEOUT, sync=False)
self.assertEqual("hello world", result.output)
- self.assertEqual(numthreads, threading.active_count())
+ self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
+ self.assertEqual(numthreads, 1)
- @OETestID(1919)
def test_stdin_timeout(self):
numthreads = threading.active_count()
start = time.time()
- result = runCmd(['sleep', '60'], data=b"hello world", timeout=self.TIMEOUT, ignore_status=True)
+ result = runCmd(['sleep', '60'], data=b"hello world", timeout=self.TIMEOUT, ignore_status=True, sync=False)
self.assertEqual(result.status, -signal.SIGTERM)
end = time.time()
self.assertLess(end - start, self.TIMEOUT + self.DELTA)
- self.assertEqual(numthreads, threading.active_count())
+ self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
- @OETestID(1908)
def test_log(self):
log = MemLogger()
- result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log)
+ result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log, sync=False)
self.assertEqual(["Running: echo stdout; echo stderr >&2", "stdout", "stderr"], log.info_msgs)
self.assertEqual([], log.error_msgs)
- @OETestID(1909)
def test_log_split(self):
log = MemLogger()
- result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log, stderr=subprocess.PIPE)
+ result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log, stderr=subprocess.PIPE, sync=False)
self.assertEqual(["Running: echo stdout; echo stderr >&2", "stdout"], log.info_msgs)
self.assertEqual(["stderr"], log.error_msgs)
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py
index 47d41f5218..f01e1eec66 100644
--- a/meta/lib/oeqa/selftest/cases/runqemu.py
+++ b/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -1,138 +1,216 @@
#
# Copyright (c) 2017 Wind River Systems, Inc.
#
+# SPDX-License-Identifier: MIT
+#
+import os
import re
-
+import time
+import oe.types
+from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake, runqemu, get_bb_var
-from oeqa.core.decorator.oeid import OETestID
+
+@OETestTag("runqemu")
class RunqemuTests(OESelftestTestCase):
"""Runqemu test class"""
image_is_ready = False
deploy_dir_image = ''
- # We only want to print runqemu stdout/stderr if there is a test case failure
- buffer = True
def setUpLocal(self):
super(RunqemuTests, self).setUpLocal()
self.recipe = 'core-image-minimal'
- self.machine = 'qemux86-64'
- self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi"
+ self.machine = self.td['MACHINE']
+ self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe)
+
+ self.fstypes = "ext4"
+ if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'):
+ self.fstypes += " iso hddimg"
+ if self.machine == "qemux86-64":
+ self.fstypes += " wic.vmdk wic.qcow2 wic.vdi"
+
self.cmd_common = "runqemu nographic"
+ kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"])
+ if kvm:
+ self.cmd_common += " kvm"
self.write_config(
"""
-MACHINE = "%s"
IMAGE_FSTYPES = "%s"
# 10 means 1 second
SYSLINUX_TIMEOUT = "10"
-"""
-% (self.machine, self.fstypes)
- )
+""" % self.fstypes)
if not RunqemuTests.image_is_ready:
RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
bitbake(self.recipe)
RunqemuTests.image_is_ready = True
- @OETestID(2001)
def test_boot_machine(self):
"""Test runqemu machine"""
cmd = "%s %s" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
- self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
- @OETestID(2002)
def test_boot_machine_ext4(self):
"""Test runqemu machine ext4"""
cmd = "%s %s ext4" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertTrue('rootfs.ext4' in f.read(), "Failed: %s" % cmd)
+ regexp = r'\nROOTFS: .*\.ext4]\n'
+ self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd))
- @OETestID(2003)
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_boot_machine_iso(self):
"""Test runqemu machine iso"""
cmd = "%s %s iso" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertTrue('media=cdrom' in f.read(), "Failed: %s" % cmd)
+ text_in = 'media=cdrom'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
- @OETestID(2004)
def test_boot_recipe_image(self):
"""Test runqemu recipe-image"""
cmd = "%s %s" % (self.cmd_common, self.recipe)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
- self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
- @OETestID(2005)
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_recipe_image_vmdk(self):
"""Test runqemu recipe-image vmdk"""
cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertTrue('format=vmdk' in f.read(), "Failed: %s" % cmd)
+ text_in = 'format=vmdk'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
- @OETestID(2006)
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_recipe_image_vdi(self):
"""Test runqemu recipe-image vdi"""
cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertTrue('format=vdi' in f.read(), "Failed: %s" % cmd)
+ text_in = 'format=vdi'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
- @OETestID(2007)
def test_boot_deploy(self):
"""Test runqemu deploy_dir_image"""
cmd = "%s %s" % (self.cmd_common, self.deploy_dir_image)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
- self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
- @OETestID(2008)
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_boot_deploy_hddimg(self):
"""Test runqemu deploy_dir_image hddimg"""
cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertTrue(re.search('file=.*.hddimg', f.read()), "Failed: %s" % cmd)
+ self.assertTrue(re.search('file=.*.hddimg', f.read()), "Failed: %s, %s" % (cmd, f.read()))
- @OETestID(2009)
def test_boot_machine_slirp(self):
"""Test runqemu machine slirp"""
cmd = "%s slirp %s" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertTrue(' -netdev user' in f.read(), "Failed: %s" % cmd)
+ self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
- @OETestID(2009)
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_machine_slirp_qcow2(self):
"""Test runqemu machine slirp qcow2"""
cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertTrue('format=qcow2' in f.read(), "Failed: %s" % cmd)
+ self.assertIn('format=qcow2', f.read(), "Failed: %s" % cmd)
- @OETestID(2010)
def test_boot_qemu_boot(self):
"""Test runqemu /path/to/image.qemuboot.conf"""
- qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine)
+ qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name)
qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
if not os.path.exists(qemuboot_conf):
self.skipTest("%s not found" % qemuboot_conf)
cmd = "%s %s" % (self.cmd_common, qemuboot_conf)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
- self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
- @OETestID(2011)
def test_boot_rootfs(self):
"""Test runqemu /path/to/rootfs.ext4"""
- rootfs = "%s-%s.ext4" % (self.recipe, self.machine)
+ rootfs = "%s.ext4" % (self.image_link_name)
rootfs = os.path.join(self.deploy_dir_image, rootfs)
if not os.path.exists(rootfs):
self.skipTest("%s not found" % rootfs)
cmd = "%s %s" % (self.cmd_common, rootfs)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
- self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
+
+
+# This test was designed as a separate class to test that shutdown
+# command will shutdown qemu as expected on each qemu architecture
+# based on the MACHINE configuration inside the config file
+# (eg. local.conf).
+#
+# This was different compared to RunqemuTests, where RunqemuTests was
+# dedicated for MACHINE=qemux86-64 where it test that qemux86-64 will
+# bootup various filesystem types, including live image(iso and hddimg)
+# where live image was not supported on all qemu architecture.
+@OETestTag("machine")
+@OETestTag("runqemu")
+class QemuTest(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(QemuTest, cls).setUpClass()
+ cls.recipe = 'core-image-minimal'
+ cls.machine = get_bb_var('MACHINE')
+ cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe)
+ cls.cmd_common = "runqemu nographic"
+ cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name)
+ cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
+ bitbake(cls.recipe)
+
+ def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout):
+ # Allow the runner's LoggingThread instance to exit without errors
+ # (such as the exception "Console connection closed unexpectedly")
+ # as qemu will disappear when we shut it down
+ qemu.runner.allowexit()
+ qemu.run_serial("shutdown -h now")
+ time_track = 0
+ try:
+ while True:
+ is_alive = qemu.check()
+ if not is_alive:
+ return True
+ if time_track > timeout:
+ return False
+ time.sleep(1)
+ time_track += 1
+ except SystemExit:
+ return True
+
+ def test_qemu_can_shutdown(self):
+ self.assertExists(self.qemuboot_conf)
+ cmd = "%s %s" % (self.cmd_common, self.qemuboot_conf)
+ shutdown_timeout = 120
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
+ self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
+
+ def test_qemu_can_boot_nfs_and_shutdown(self):
+ rootfs_tar = "%s.tar.bz2" % (self.image_link_name)
+ rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
+ self.assertExists(rootfs_tar)
+ cmd = "%s %s" % (self.cmd_common, rootfs_tar)
+ shutdown_timeout = 120
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
+ self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index 1c69255b56..12000aac16 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -1,20 +1,20 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
-from oeqa.utils.sshcontrol import SSHControl
-from oeqa.core.decorator.oeid import OETestID
+from oeqa.core.decorator import OETestTag
import os
-import re
import tempfile
-import shutil
+import oe.lsb
+from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine
class TestExport(OESelftestTestCase):
- @classmethod
- def tearDownClass(cls):
- runCmd("rm -rf /tmp/sdk")
- super(TestExport, cls).tearDownClass()
-
- @OETestID(1499)
+ @OETestTag("runqemu")
def test_testexport_basic(self):
"""
Summary: Check basic testexport functionality with only ping test enabled.
@@ -25,7 +25,7 @@ class TestExport(OESelftestTestCase):
Author: Mariano Lopez <mariano.lopez@intel.com>
"""
- features = 'INHERIT += "testexport"\n'
+ features = 'IMAGE_CLASSES += "testexport"\n'
# These aren't the actual IP addresses but testexport class needs something defined
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -54,7 +54,6 @@ class TestExport(OESelftestTestCase):
# Verify ping test was succesful
self.assertEqual(0, result.status, 'oe-test runtime returned a non 0 status')
- @OETestID(1641)
def test_testexport_sdk(self):
"""
Summary: Check sdk functionality for testexport.
@@ -67,7 +66,7 @@ class TestExport(OESelftestTestCase):
Author: Mariano Lopez <mariano.lopez@intel.com>
"""
- features = 'INHERIT += "testexport"\n'
+ features = 'IMAGE_CLASSES += "testexport"\n'
# These aren't the actual IP addresses but testexport class needs something defined
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -92,24 +91,25 @@ class TestExport(OESelftestTestCase):
msg = "Couldn't find SDK tarball: %s" % tarball_path
self.assertEqual(os.path.isfile(tarball_path), True, msg)
- # Extract SDK and run tar from SDK
- result = runCmd("%s -y -d /tmp/sdk" % tarball_path)
- self.assertEqual(0, result.status, "Couldn't extract SDK")
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ # Extract SDK and run tar from SDK
+ result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname))
+ self.assertEqual(0, result.status, "Couldn't extract SDK")
- env_script = result.output.split()[-1]
- result = runCmd(". %s; which tar" % env_script, shell=True)
- self.assertEqual(0, result.status, "Couldn't setup SDK environment")
- is_sdk_tar = True if "/tmp/sdk" in result.output else False
- self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
+ env_script = result.output.split()[-1]
+ result = runCmd(". %s; which tar" % env_script, shell=True)
+ self.assertEqual(0, result.status, "Couldn't setup SDK environment")
+ is_sdk_tar = True if tmpdirname in result.output else False
+ self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
- tar_sdk = result.output
- result = runCmd("%s --version" % tar_sdk)
- self.assertEqual(0, result.status, "Couldn't run tar from SDK")
+ tar_sdk = result.output
+ result = runCmd("%s --version" % tar_sdk)
+ self.assertEqual(0, result.status, "Couldn't run tar from SDK")
+@OETestTag("runqemu")
class TestImage(OESelftestTestCase):
- @OETestID(1644)
def test_testimage_install(self):
"""
Summary: Check install packages functionality for testimage/testexport.
@@ -121,26 +121,41 @@ class TestImage(OESelftestTestCase):
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
+ features += 'IMAGE_INSTALL:append = " libssl"\n'
features += 'TEST_SUITES = "ping ssh selftest"\n'
self.write_config(features)
- # Build core-image-sato and testimage
bitbake('core-image-full-cmdline socat')
bitbake('-c testimage core-image-full-cmdline')
- @OETestID(1883)
+ def test_testimage_slirp(self):
+ """
+ Summary: Check basic testimage functionality with qemu and slirp networking.
+ """
+
+ features = '''
+IMAGE_CLASSES:append = " testimage"
+IMAGE_FEATURES:append = " ssh-server-dropbear"
+IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}"
+TEST_RUNQEMUPARAMS += " slirp"
+'''
+ self.write_config(features)
+
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+
def test_testimage_dnf(self):
"""
Summary: Check package feeds functionality for dnf
Expected: 1. Check that remote package feeds can be accessed
Product: oe-core
- Author: Alexander Kanavin <alexander.kanavin@intel.com>
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
"""
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
# We don't yet know what the server ip and port will be - they will be patched
# in at the start of the on-image test
@@ -148,27 +163,196 @@ class TestImage(OESelftestTestCase):
features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
features += 'PACKAGE_CLASSES = "package_rpm"\n'
+ bitbake('gnupg-native -c addto_recipe_sysroot')
+
+ # Enable package feed signing
+ self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
+ self.track_for_cleanup(self.gpg_home)
+ signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
+ runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True)
+ features += 'INHERIT += "sign_package_feed"\n'
+ features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
+ features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
+ features += 'GPG_PATH = "%s"\n' % self.gpg_home
+ features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home
+ self.write_config(features)
+
+ bitbake('core-image-full-cmdline socat')
+ bitbake('-c testimage core-image-full-cmdline')
+
+ def test_testimage_apt(self):
+ """
+ Summary: Check package feeds functionality for apt
+ Expected: 1. Check that remote package feeds can be accessed
+ Product: oe-core
+ Author: Ferry Toth <fntoth@gmail.com>
+ """
+ if get_bb_var('DISTRO') == 'poky-tiny':
+ self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
+
+ features = 'IMAGE_CLASSES += "testimage"\n'
+ features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n'
+ # We don't yet know what the server ip and port will be - they will be patched
+ # in at the start of the on-image test
+ features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n'
+ features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
+ features += 'PACKAGE_CLASSES = "package_deb"\n'
+ # We need gnupg on the target to install keys
+ features += 'IMAGE_INSTALL:append:pn-core-image-full-cmdline = " gnupg"\n'
+
+ bitbake('gnupg-native -c addto_recipe_sysroot')
+
# Enable package feed signing
self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
+ self.track_for_cleanup(self.gpg_home)
signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
- runCmd('gpg --batch --homedir %s --import %s' % (self.gpg_home, os.path.join(signing_key_dir, 'key.secret')))
+ runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True)
features += 'INHERIT += "sign_package_feed"\n'
features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
features += 'GPG_PATH = "%s"\n' % self.gpg_home
+ features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home
self.write_config(features)
# Build core-image-sato and testimage
bitbake('core-image-full-cmdline socat')
bitbake('-c testimage core-image-full-cmdline')
- # remove the oeqa-feed-sign temporal directory
- shutil.rmtree(self.gpg_home, ignore_errors=True)
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966
+ @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
+ def test_testimage_virgl_gtk_sdl(self):
+ """
+ Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
+ Expected: 1. Check that virgl kernel driver is loaded and 3d acceleration is enabled
+ 2. Check that kmscube demo runs without crashing.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ if "DISPLAY" not in os.environ:
+ self.skipTest("virgl gtk test must be run inside a X session")
+ distro = oe.lsb.distro_identifier()
+ if distro and distro == 'debian-8':
+ self.skipTest('virgl isn\'t working with Debian 8')
+ if distro and distro == 'debian-9':
+ self.skipTest('virgl isn\'t working with Debian 9')
+ if distro and distro == 'centos-7':
+ self.skipTest('virgl isn\'t working with Centos 7')
+ if distro and distro == 'opensuseleap-15.0':
+ self.skipTest('virgl isn\'t working with Opensuse 15.0')
+
+ qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
+ qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
+ features = 'IMAGE_CLASSES += "testimage"\n'
+ if 'gtk+' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n'
+ if 'sdl' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG:append:pn-qemu-system-native = " sdl"\n'
+ if 'opengl' not in qemu_distrofeatures:
+ features += 'DISTRO_FEATURES:append = " opengl"\n'
+ features += 'TEST_SUITES = "ping ssh virgl"\n'
+ features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
+ features += 'IMAGE_INSTALL:append = " kmscube"\n'
+ features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n'
+ self.write_config(features_gtk)
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+ features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n'
+ self.write_config(features_sdl)
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+
+ @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
+ def test_testimage_virgl_headless(self):
+ """
+ Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
+ Expected: 1. Check that virgl kernel driver is loaded and 3d acceleration is enabled
+ 2. Check that kmscube demo runs without crashing.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ import subprocess, os
+
+ distro = oe.lsb.distro_identifier()
+ if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or
+ distro.startswith('almalinux') or distro.startswith('rocky')):
+ self.skipTest('virgl headless cannot be tested with %s' %(distro))
+
+ qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
+ features = 'IMAGE_CLASSES += "testimage"\n'
+ if 'opengl' not in qemu_distrofeatures:
+ features += 'DISTRO_FEATURES:append = " opengl"\n'
+ features += 'TEST_SUITES = "ping ssh virgl"\n'
+ features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
+ features += 'IMAGE_INSTALL:append = " kmscube"\n'
+ features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n'
+ self.write_config(features)
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+@OETestTag("runqemu")
class Postinst(OESelftestTestCase):
- @OETestID(1540)
- @OETestID(1545)
- def test_postinst_rootfs_and_boot(self):
+
+ def init_manager_loop(self, init_manager):
+ import oe.path
+
+ vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
+ rootfs = vars["IMAGE_ROOTFS"]
+ self.assertIsNotNone(rootfs)
+ sysconfdir = vars["sysconfdir"]
+ self.assertIsNotNone(sysconfdir)
+ # Need to use oe.path here as sysconfdir starts with /
+ hosttestdir = oe.path.join(rootfs, sysconfdir, "postinst-test")
+ targettestdir = os.path.join(sysconfdir, "postinst-test")
+
+ for classes in ("package_rpm", "package_deb", "package_ipk"):
+ with self.subTest(init_manager=init_manager, package_class=classes):
+ features = 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
+ features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
+ features += 'PACKAGE_CLASSES = "%s"\n' % classes
+ if init_manager == "systemd":
+ features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n'
+ features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
+ features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
+ features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
+ self.write_config(features)
+
+ bitbake('core-image-minimal')
+
+ self.assertTrue(os.path.isfile(os.path.join(hosttestdir, "rootfs")),
+ "rootfs state file was not created")
+
+ with runqemu('core-image-minimal') as qemu:
+ # Make the test echo a string and search for that as
+ # run_serial()'s status code is useless.'
+ for filename in ("rootfs", "delayed-a", "delayed-b"):
+ status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
+ self.assertIn("found", output, "%s was not present on boot" % filename)
+
+
+
+ @skipIfNotQemu()
+ def test_postinst_rootfs_and_boot_sysvinit(self):
+ """
+ Summary: The purpose of this test case is to verify Post-installation
+ scripts are called when rootfs is created and also test
+ that script can be delayed to run at first boot.
+ Dependencies: NA
+ Steps: 1. Add proper configuration to local.conf file
+ 2. Build a "core-image-minimal" image
+ 3. Verify that file created by postinst_rootfs recipe is
+ present on rootfs dir.
+ 4. Boot the image created on qemu and verify that the file
+ created by postinst_boot recipe is present on image.
+ Expected: The files are successfully created during rootfs and boot
+ time for 3 different package managers: rpm,ipk,deb and
+ for initialization managers: sysvinit.
+
+ """
+ self.init_manager_loop("sysvinit")
+
+
+ @skipIfNotQemu()
+ def test_postinst_rootfs_and_boot_systemd(self):
"""
Summary: The purpose of this test case is to verify Post-installation
scripts are called when rootfs is created and also test
@@ -182,10 +366,23 @@ class Postinst(OESelftestTestCase):
created by postinst_boot recipe is present on image.
Expected: The files are successfully created during rootfs and boot
time for 3 different package managers: rpm,ipk,deb and
- for initialization managers: sysvinit and systemd.
+ for initialization managers: systemd.
"""
+ self.init_manager_loop("systemd")
+
+
+ def test_failing_postinst(self):
+ """
+ Summary: The purpose of this test case is to verify that post-installation
+ scripts that contain errors are properly reported.
+ Expected: The scriptlet failure is properly reported.
+ The file that is created after the error in the scriptlet is not present.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+
import oe.path
vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
@@ -195,29 +392,95 @@ class Postinst(OESelftestTestCase):
self.assertIsNotNone(sysconfdir)
# Need to use oe.path here as sysconfdir starts with /
hosttestdir = oe.path.join(rootfs, sysconfdir, "postinst-test")
- targettestdir = os.path.join(sysconfdir, "postinst-test")
- for init_manager in ("sysvinit", "systemd"):
- for classes in ("package_rpm", "package_deb", "package_ipk"):
- with self.subTest(init_manager=init_manager, package_class=classes):
- features = 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
- features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
- features += 'PACKAGE_CLASSES = "%s"\n' % classes
- if init_manager == "systemd":
- features += 'DISTRO_FEATURES_append = " systemd"\n'
- features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
- features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
- features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
- self.write_config(features)
-
- bitbake('core-image-minimal')
-
- self.assertTrue(os.path.isfile(os.path.join(hosttestdir, "rootfs")),
- "rootfs state file was not created")
-
- with runqemu('core-image-minimal') as qemu:
- # Make the test echo a string and search for that as
- # run_serial()'s status code is useless.'
- for filename in ("rootfs", "delayed-a", "delayed-b"):
- status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
- self.assertEqual(output, "found", "%s was not present on boot" % filename)
+ for classes in ("package_rpm", "package_deb", "package_ipk"):
+ with self.subTest(package_class=classes):
+ features = 'CORE_IMAGE_EXTRA_INSTALL = "postinst-rootfs-failing"\n'
+ features += 'PACKAGE_CLASSES = "%s"\n' % classes
+ self.write_config(features)
+ bb_result = bitbake('core-image-minimal', ignore_status=True)
+ self.assertGreaterEqual(bb_result.output.find("Postinstall scriptlets of ['postinst-rootfs-failing'] have failed."), 0,
+ "Warning about a failed scriptlet not found in bitbake output: %s" %(bb_result.output))
+
+ self.assertTrue(os.path.isfile(os.path.join(hosttestdir, "rootfs-before-failure")),
+ "rootfs-before-failure file was not created")
+ self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")),
+ "rootfs-after-failure file was created")
+
+@OETestTag("runqemu")
+class SystemTap(OESelftestTestCase):
+ """
+ Summary: The purpose of this test case is to verify native crosstap
+ works while talking to a target.
+ Expected: The script should successfully connect to the qemu machine
+ and run some systemtap examples on a qemu machine.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ super(SystemTap, cls).setUpClass()
+ cls.image = "core-image-minimal"
+
+ def default_config(self):
+ return """
+# These aren't the actual IP addresses but testexport class needs something defined
+TEST_SERVER_IP = "192.168.7.1"
+TEST_TARGET_IP = "192.168.7.2"
+
+EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs"
+IMAGE_FEATURES:append = " ssh-server-dropbear"
+
+# enables kernel debug symbols
+KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
+KERNEL_EXTRA_FEATURES:append = " features/systemtap/systemtap.scc"
+
+# add systemtap run-time into target image if it is not there yet
+IMAGE_INSTALL:append = " systemtap-runtime"
+"""
+
+ def test_crosstap_helloworld(self):
+ self.write_config(self.default_config())
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/general/helloworld.stp " % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap helloworld returned a non 0 status:%s' % result.output)
+
+ def test_crosstap_pstree(self):
+ self.write_config(self.default_config())
+
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/process/pstree.stp" % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap pstree returned a non 0 status:%s' % result.output)
+
+ def test_crosstap_syscalls_by_proc(self):
+ self.write_config(self.default_config())
+
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_proc.stp" % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap syscalls_by_proc returned a non 0 status:%s' % result.output)
+
+ def test_crosstap_syscalls_by_pid(self):
+ self.write_config(self.default_config())
+
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output)
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
new file mode 100644
index 0000000000..ad14189c6d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -0,0 +1,231 @@
+# SPDX-License-Identifier: MIT
+import os
+import subprocess
+import time
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu, Command
+from oeqa.utils.sshcontrol import SSHControl
+
+def parse_results(filename):
+ tests = {}
+ with open(filename, "r") as f:
+ lines = f.readlines()
+ for line in lines:
+ if "..." in line and "test [" in line:
+ test = line.split("test ")[1].split(" ... ")[0]
+ if "] " in test:
+ test = test.split("] ", 1)[1]
+ result = line.split(" ... ")[1].strip()
+ if result == "ok":
+ result = "PASS"
+ elif result == "failed":
+ result = "FAIL"
+ elif "ignored" in result:
+ result = "SKIPPED"
+ if test in tests:
+ if tests[test] != result:
+ print("Duplicate and mismatching result %s for %s" % (result, test))
+ else:
+ print("Duplicate result %s for %s" % (result, test))
+ else:
+ tests[test] = result
+ return tests
+
+# Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs.
+@OETestTag("toolchain-system")
+@OETestTag("toolchain-user")
+@OETestTag("runqemu")
+class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
+ def test_rust(self, *args, **kwargs):
+ # Disable Rust Oe-selftest
+ #self.skipTest("The Rust Oe-selftest is disabled.")
+
+ # Skip mips32 target since it is unstable with rust tests
+ machine = get_bb_var('MACHINE')
+ if machine == "qemumips":
+ self.skipTest("The mips32 target is skipped for Rust Oe-selftest.")
+
+ # build remote-test-server before image build
+ recipe = "rust"
+ start_time = time.time()
+ bitbake("{} -c test_compile".format(recipe))
+ builddir = get_bb_var("RUSTSRC", "rust")
+ # build core-image-minimal with required packages
+ default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
+ features = []
+ features.append('IMAGE_FEATURES += "ssh-server-dropbear"')
+ features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
+ self.write_config("\n".join(features))
+ bitbake("core-image-minimal")
+
+ # Exclude the test folders that error out while building
+ # TODO: Fix the errors and include them for testing
+ # no-fail-fast: Run all tests regardless of failure.
+ # bless: First runs rustfmt to format the codebase,
+ # then runs tidy checks.
+ exclude_list = [
+ 'compiler/rustc',
+ 'compiler/rustc_interface/src/tests.rs',
+ 'library/panic_abort',
+ 'library/panic_unwind',
+ 'library/test/src/stats/tests.rs',
+ 'src/bootstrap/builder/tests.rs',
+ 'src/doc/rustc',
+ 'src/doc/rustdoc',
+ 'src/doc/unstable-book',
+ 'src/librustdoc',
+ 'src/rustdoc-json-types',
+ 'src/tools/compiletest/src/common.rs',
+ 'src/tools/lint-docs',
+ 'src/tools/rust-analyzer',
+ 'src/tools/rustdoc-themes',
+ 'src/tools/tidy',
+ 'tests/assembly/asm/aarch64-outline-atomics.rs',
+ 'tests/codegen/abi-main-signature-32bit-c-int.rs',
+ 'tests/codegen/abi-repr-ext.rs',
+ 'tests/codegen/abi-x86-interrupt.rs',
+ 'tests/codegen/branch-protection.rs',
+ 'tests/codegen/catch-unwind.rs',
+ 'tests/codegen/cf-protection.rs',
+ 'tests/codegen/enum-bounds-check-derived-idx.rs',
+ 'tests/codegen/force-unwind-tables.rs',
+ 'tests/codegen/intrinsic-no-unnamed-attr.rs',
+ 'tests/codegen/issues/issue-103840.rs',
+ 'tests/codegen/issues/issue-47278.rs',
+ 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs',
+ 'tests/codegen/lifetime_start_end.rs',
+ 'tests/codegen/local-generics-in-exe-internalized.rs',
+ 'tests/codegen/match-unoptimized.rs',
+ 'tests/codegen/noalias-rwlockreadguard.rs',
+ 'tests/codegen/non-terminate/nonempty-infinite-loop.rs',
+ 'tests/codegen/noreturn-uninhabited.rs',
+ 'tests/codegen/repr-transparent-aggregates-3.rs',
+ 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs',
+ 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs',
+ 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs',
+ 'tests/codegen/sse42-implies-crc32.rs',
+ 'tests/codegen/thread-local.rs',
+ 'tests/codegen/uninit-consts.rs',
+ 'tests/pretty/raw-str-nonexpr.rs',
+ 'tests/run-make',
+ 'tests/run-make-fulldeps',
+ 'tests/rustdoc',
+ 'tests/rustdoc-json',
+ 'tests/rustdoc-js-std',
+ 'tests/rustdoc-ui/cfg-test.rs',
+ 'tests/rustdoc-ui/check-cfg-test.rs',
+ 'tests/rustdoc-ui/display-output.rs',
+ 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs',
+ 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs',
+ 'tests/rustdoc-ui/doc-test-doctest-feature.rs',
+ 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs',
+ 'tests/rustdoc-ui/doctest-output.rs',
+ 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
+ 'tests/rustdoc-ui/failed-doctest-compile-fail.rs',
+ 'tests/rustdoc-ui/issue-80992.rs',
+ 'tests/rustdoc-ui/issue-91134.rs',
+ 'tests/rustdoc-ui/nocapture-fail.rs',
+ 'tests/rustdoc-ui/nocapture.rs',
+ 'tests/rustdoc-ui/no-run-flag.rs',
+ 'tests/rustdoc-ui/run-directory.rs',
+ 'tests/rustdoc-ui/test-no_std.rs',
+ 'tests/rustdoc-ui/test-type.rs',
+ 'tests/rustdoc/unit-return.rs',
+ 'tests/ui/abi/stack-probes-lto.rs',
+ 'tests/ui/abi/stack-probes.rs',
+ 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs',
+ 'tests/ui/asm/x86_64/sym.rs',
+ 'tests/ui/associated-type-bounds/fn-apit.rs',
+ 'tests/ui/associated-type-bounds/fn-dyn-apit.rs',
+ 'tests/ui/associated-type-bounds/fn-wrap-apit.rs',
+ 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
+ 'tests/ui/drop/dynamic-drop.rs',
+ 'tests/ui/empty_global_asm.rs',
+ 'tests/ui/functions-closures/fn-help-with-err.rs',
+ 'tests/ui/linkage-attr/issue-10755.rs',
+ 'tests/ui/macros/restricted-shadowing-legacy.rs',
+ 'tests/ui/process/nofile-limit.rs',
+ 'tests/ui/process/process-panic-after-fork.rs',
+ 'tests/ui/process/process-sigpipe.rs',
+ 'tests/ui/simd/target-feature-mixup.rs',
+ 'tests/ui/structs-enums/multiple-reprs.rs',
+ 'src/tools/jsondoclint',
+ 'src/tools/replace-version-placeholder',
+ 'tests/codegen/abi-efiapi.rs',
+ 'tests/codegen/abi-sysv64.rs',
+ 'tests/codegen/align-byval.rs',
+ 'tests/codegen/align-fn.rs',
+ 'tests/codegen/asm-powerpc-clobbers.rs',
+ 'tests/codegen/async-fn-debug-awaitee-field.rs',
+ 'tests/codegen/binary-search-index-no-bound-check.rs',
+ 'tests/codegen/call-metadata.rs',
+ 'tests/codegen/debug-column.rs',
+ 'tests/codegen/debug-limited.rs',
+ 'tests/codegen/debuginfo-generic-closure-env-names.rs',
+ 'tests/codegen/drop.rs',
+ 'tests/codegen/dst-vtable-align-nonzero.rs',
+ 'tests/codegen/enable-lto-unit-splitting.rs',
+ 'tests/codegen/enum/enum-u128.rs',
+ 'tests/codegen/fn-impl-trait-self.rs',
+ 'tests/codegen/inherit_overflow.rs',
+ 'tests/codegen/inline-function-args-debug-info.rs',
+ 'tests/codegen/intrinsics/mask.rs',
+ 'tests/codegen/intrinsics/transmute-niched.rs',
+ 'tests/codegen/issues/issue-73258.rs',
+ 'tests/codegen/issues/issue-75546.rs',
+ 'tests/codegen/issues/issue-77812.rs',
+ 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs',
+ 'tests/codegen/llvm-ident.rs',
+ 'tests/codegen/mainsubprogram.rs',
+ 'tests/codegen/move-operands.rs',
+ 'tests/codegen/repr/transparent-mips64.rs',
+ 'tests/mir-opt/',
+ 'tests/rustdoc-json',
+ 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
+ 'tests/rustdoc-ui/no-run-flag.rs',
+ 'tests/ui-fulldeps/',
+ 'tests/ui/numbers-arithmetic/u128.rs'
+ ]
+
+ exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
+ # Add exclude_fail_tests with other test arguments
+ testargs = exclude_fail_tests + " --doc --no-fail-fast --bless"
+
+ # wrap the execution with a qemu instance.
+ # Tests are run with 512 tasks in parallel to execute all tests very quickly
+ with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu:
+ # Copy remote-test-server to image through scp
+ host_sys = get_bb_var("RUST_BUILD_SYS", "rust")
+ ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root")
+ ssh.copy_to(builddir + "/build/" + host_sys + "/stage1-tools-bin/remote-test-server","~/")
+ # Execute remote-test-server on image through background ssh
+ command = '~/remote-test-server --bind 0.0.0.0:12345 -v'
+ sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # Get the values of variables.
+ tcpath = get_bb_var("TARGET_SYS", "rust")
+ targetsys = get_bb_var("RUST_TARGET_SYS", "rust")
+ rustlibpath = get_bb_var("WORKDIR", "rust")
+ tmpdir = get_bb_var("TMPDIR", "rust")
+
+ # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
+ cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath
+ cmd = cmd + " export TARGET_VENDOR=\"-poky\";"
+ cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir)
+ cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
+ # Trigger testing.
+ cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
+ cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys)
+ retval = runCmd(cmd)
+ end_time = time.time()
+
+ resultlog = rustlibpath + "/results-log.txt"
+ with open(resultlog, "w") as f:
+ f.write(retval.output)
+
+ ptestsuite = "rust"
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog)
+ test_results = parse_results(resultlog)
+ for test in test_results:
+ self.ptest_result(ptestsuite, test, test_results[test])
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py
index 4b3cb14463..a80a8651a5 100644
--- a/meta/lib/oeqa/selftest/cases/selftest.py
+++ b/meta/lib/oeqa/selftest/cases/selftest.py
@@ -1,12 +1,15 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import importlib
-from oeqa.utils.commands import runCmd
import oeqa.selftest
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.core.decorator.oeid import OETestID
class ExternalLayer(OESelftestTestCase):
- @OETestID(1885)
def test_list_imported(self):
"""
Summary: Checks functionality to import tests from other layers.
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py
index b3d1a8292e..18cce0ba25 100644
--- a/meta/lib/oeqa/selftest/cases/signing.py
+++ b/meta/lib/oeqa/selftest/cases/signing.py
@@ -1,11 +1,18 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, create_temp_layer
import os
+import oe
import glob
import re
import shutil
import tempfile
-from oeqa.core.decorator.oeid import OETestID
+from contextlib import contextmanager
from oeqa.utils.ftools import write_file
@@ -15,25 +22,43 @@ class Signing(OESelftestTestCase):
pub_key_path = ""
secret_key_path = ""
- @classmethod
- def setUpClass(cls):
- super(Signing, cls).setUpClass()
- # Check that we can find the gpg binary and fail early if we can't
- if not shutil.which("gpg"):
- raise AssertionError("This test needs GnuPG")
+ def setup_gpg(self):
+ bitbake('gnupg-native -c addto_recipe_sysroot')
+
+ self.gpg_dir = tempfile.mkdtemp(prefix="oeqa-signing-")
+ self.track_for_cleanup(self.gpg_dir)
+
+ self.pub_key_path = os.path.join(self.testlayer_path, 'files', 'signing', "key.pub")
+ self.secret_key_path = os.path.join(self.testlayer_path, 'files', 'signing', "key.secret")
+
+ nsysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native")
+
+ runCmd('gpg --agent-program=`which gpg-agent`\|--auto-expand-secmem --batch --homedir %s --import %s %s' % (self.gpg_dir, self.pub_key_path, self.secret_key_path), native_sysroot=nsysroot)
+ return nsysroot + get_bb_var("bindir_native")
- cls.gpg_dir = tempfile.mkdtemp(prefix="oeqa-signing-")
- cls.pub_key_path = os.path.join(cls.testlayer_path, 'files', 'signing', "key.pub")
- cls.secret_key_path = os.path.join(cls.testlayer_path, 'files', 'signing', "key.secret")
+ @contextmanager
+ def create_new_builddir(self, builddir, newbuilddir):
+ bb.utils.mkdirhier(newbuilddir)
+ oe.path.copytree(builddir + "/conf", newbuilddir + "/conf")
+ oe.path.copytree(builddir + "/cache", newbuilddir + "/cache")
- runCmd('gpg --batch --homedir %s --import %s %s' % (cls.gpg_dir, cls.pub_key_path, cls.secret_key_path))
+ origenv = os.environ.copy()
- @classmethod
- def tearDownClass(cls):
- shutil.rmtree(cls.gpg_dir, ignore_errors=True)
+ for e in os.environ:
+ if builddir + "/" in os.environ[e]:
+ os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
+ if os.environ[e].endswith(builddir):
+ os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
+
+ os.chdir(newbuilddir)
+ try:
+ yield
+ finally:
+ for e in origenv:
+ os.environ[e] = origenv[e]
+ os.chdir(builddir)
- @OETestID(1362)
def test_signing_packages(self):
"""
Summary: Test that packages can be signed in the package feed
@@ -41,11 +66,13 @@ class Signing(OESelftestTestCase):
Expected: Images can be created from signed packages
Product: oe-core
Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
- Author: Alexander Kanavin <alexander.kanavin@intel.com>
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
import oe.packagedata
+ self.setup_gpg()
+
package_classes = get_bb_var('PACKAGE_CLASSES')
if 'package_rpm' not in package_classes:
self.skipTest('This test requires RPM Packaging.')
@@ -87,7 +114,7 @@ class Signing(OESelftestTestCase):
ret = runCmd('%s/rpmkeys --define "_dbpath %s" --checksig %s' %
(staging_bindir_native, rpmdb, pkg_deploy))
# tmp/deploy/rpm/i586/ed-1.9-r0.i586.rpm: rsa sha1 md5 OK
- self.assertIn('rsa sha1 (md5) pgp md5 OK', ret.output, 'Package signed incorrectly.')
+ self.assertIn('digests signatures OK', ret.output, 'Package signed incorrectly.')
shutil.rmtree(rpmdb)
#Check that an image can be built from signed packages
@@ -96,7 +123,6 @@ class Signing(OESelftestTestCase):
bitbake('core-image-minimal')
- @OETestID(1382)
def test_signing_sstate_archive(self):
"""
Summary: Test that sstate archives can be signed
@@ -108,11 +134,12 @@ class Signing(OESelftestTestCase):
test_recipe = 'ed'
- builddir = os.environ.get('BUILDDIR')
+ # Since we need gpg but we can't use gpg-native for sstate signatures, we
+ # build gpg-native in our original builddir then run the tests in a second one.
+ builddir = os.environ.get('BUILDDIR') + "-testsign"
sstatedir = os.path.join(builddir, 'test-sstate')
- self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
- self.add_command_to_tearDown('rm -rf %s' % sstatedir)
+ nsysroot = self.setup_gpg()
feature = 'SSTATE_SIG_KEY ?= "testuser"\n'
feature += 'SSTATE_SIG_PASSPHRASE ?= "test123"\n'
@@ -120,28 +147,34 @@ class Signing(OESelftestTestCase):
feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
feature += 'SSTATE_DIR = "%s"\n' % sstatedir
# Any mirror might have partial sstate without .sig files, triggering failures
- feature += 'SSTATE_MIRRORS_forcevariable = ""\n'
+ feature += 'SSTATE_MIRRORS:forcevariable = ""\n'
self.write_config(feature)
- bitbake('-c clean %s' % test_recipe)
- bitbake(test_recipe)
+ with self.create_new_builddir(os.environ['BUILDDIR'], builddir):
+
+ os.environ["PATH"] = nsysroot + ":" + os.environ["PATH"]
+ self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
+ self.add_command_to_tearDown('rm -rf %s' % sstatedir)
+ self.add_command_to_tearDown('rm -rf %s' % builddir)
- recipe_sig = glob.glob(sstatedir + '/*/*:ed:*_package.tgz.sig')
- recipe_tgz = glob.glob(sstatedir + '/*/*:ed:*_package.tgz')
+ bitbake('-c clean %s' % test_recipe)
+ bitbake('-c populate_lic %s' % test_recipe)
- self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
- self.assertEqual(len(recipe_tgz), 1, 'Failed to find .tgz file.')
+ recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst.sig')
+ recipe_archive = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst')
- ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_tgz[0]))
- # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
- # gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
- self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
+ self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
+ self.assertEqual(len(recipe_archive), 1, 'Failed to find .tar.zst file.')
+
+ ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_archive[0]))
+ # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
+ # gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
+ self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
class LockedSignatures(OESelftestTestCase):
- @OETestID(1420)
def test_locked_signatures(self):
"""
Summary: Test locked signature mechanism
@@ -151,14 +184,14 @@ class LockedSignatures(OESelftestTestCase):
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
+ import uuid
+
test_recipe = 'ed'
locked_sigs_file = 'locked-sigs.inc'
- self.add_command_to_tearDown('rm -f %s' % os.path.join(self.builddir, locked_sigs_file))
-
bitbake(test_recipe)
# Generate locked sigs include file
- bitbake('-S none %s' % test_recipe)
+ bitbake('-S lockedsigs %s' % test_recipe)
feature = 'require %s\n' % locked_sigs_file
feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
@@ -167,21 +200,29 @@ class LockedSignatures(OESelftestTestCase):
# Build a locked recipe
bitbake(test_recipe)
+ templayerdir = tempfile.mkdtemp(prefix='signingqa')
+ create_temp_layer(templayerdir, 'selftestsigning')
+ runCmd('bitbake-layers add-layer %s' % templayerdir)
+
# Make a change that should cause the locked task signature to change
+ # Use uuid so hash equivalance server isn't triggered
recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend'
- recipe_append_path = os.path.join(self.testlayer_path, 'recipes-test', test_recipe, recipe_append_file)
- feature = 'SUMMARY += "test locked signature"\n'
+ recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file)
+ feature = 'SUMMARY:${PN} = "test locked signature%s"\n' % uuid.uuid4()
- os.mkdir(os.path.join(self.testlayer_path, 'recipes-test', test_recipe))
+ os.mkdir(os.path.join(templayerdir, 'recipes-test'))
+ os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe))
write_file(recipe_append_path, feature)
- self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, 'recipes-test', test_recipe))
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % templayerdir)
+ self.add_command_to_tearDown('rm -f %s' % os.path.join(self.builddir, locked_sigs_file))
+ self.add_command_to_tearDown('rm -rf %s' % templayerdir)
# Build the recipe again
ret = bitbake(test_recipe)
# Verify you get the warning and that the real task *isn't* run (i.e. the locked signature has worked)
- patt = r'WARNING: The %s:do_package sig is computed to be \S+, but the sig is locked to \S+ in SIGGEN_LOCKEDSIGS\S+' % test_recipe
+ patt = r'The %s:do_package sig is computed to be \S+, but the sig is locked to \S+ in SIGGEN_LOCKEDSIGS\S+' % test_recipe
found_warn = re.search(patt, ret.output)
self.assertIsNotNone(found_warn, "Didn't find the expected warning message. Output: %s" % ret.output)
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
new file mode 100644
index 0000000000..05fc4e390b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -0,0 +1,54 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import json
+import os
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, runCmd
+
+class SPDXCheck(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(SPDXCheck, cls).setUpClass()
+ bitbake("python3-spdx-tools-native")
+ bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
+
+ def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
+ config = """
+INHERIT += "create-spdx"
+"""
+ self.write_config(config)
+
+ deploy_dir = get_bb_var("DEPLOY_DIR")
+ machine_var = get_bb_var("MACHINE")
+ # qemux86-64 creates the directory qemux86_64
+ machine_dir = machine_var.replace("-", "_")
+
+ full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file)
+
+ try:
+ os.remove(full_file_path)
+ except FileNotFoundError:
+ pass
+
+ bitbake("%s -c create_spdx" % target_name)
+
+ def check_spdx_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertNotEqual(report, None)
+ self.assertNotEqual(report["SPDXID"], None)
+
+ python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3')
+ validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools')
+ result = runCmd("{} {} -i {}".format(python, validator, filename))
+
+ self.assertExists(full_file_path)
+ result = check_spdx_json(full_file_path)
+
+ def test_spdx_base_files(self):
+ self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py
deleted file mode 100644
index bc2fdbd8cc..0000000000
--- a/meta/lib/oeqa/selftest/cases/sstate.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import datetime
-import unittest
-import os
-import re
-import shutil
-
-import oeqa.utils.ftools as ftools
-from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_test_layer
-
-
-class SStateBase(OESelftestTestCase):
-
- def setUpLocal(self):
- super(SStateBase, self).setUpLocal()
- self.temp_sstate_location = None
- needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
- 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
- bb_vars = get_bb_vars(needed_vars)
- self.sstate_path = bb_vars['SSTATE_DIR']
- self.hostdistro = bb_vars['NATIVELSBSTRING']
- self.tclibc = bb_vars['TCLIBC']
- self.tune_arch = bb_vars['TUNE_ARCH']
- self.topdir = bb_vars['TOPDIR']
- self.target_vendor = bb_vars['TARGET_VENDOR']
- self.target_os = bb_vars['TARGET_OS']
- self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
-
- # Creates a special sstate configuration with the option to add sstate mirrors
- def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
- self.temp_sstate_location = temp_sstate_location
-
- if self.temp_sstate_location:
- temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
- config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
- self.append_config(config_temp_sstate)
- self.track_for_cleanup(temp_sstate_path)
- bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
- self.sstate_path = bb_vars['SSTATE_DIR']
- self.hostdistro = bb_vars['NATIVELSBSTRING']
- self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
-
- if add_local_mirrors:
- config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
- self.append_config(config_set_sstate_if_not_set)
- for local_mirror in add_local_mirrors:
- self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
- config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
- self.append_config(config_sstate_mirror)
-
- # Returns a list containing sstate files
- def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
- result = []
- for root, dirs, files in os.walk(self.sstate_path):
- if distro_specific and re.search("%s/[a-z0-9]{2}$" % self.hostdistro, root):
- for f in files:
- if re.search(filename_regex, f):
- result.append(f)
- if distro_nonspecific and re.search("%s/[a-z0-9]{2}$" % self.sstate_path, root):
- for f in files:
- if re.search(filename_regex, f):
- result.append(f)
- return result
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index 47900886a3..86d6cd7464 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -1,18 +1,85 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import shutil
import glob
import subprocess
+import tempfile
+import datetime
+import re
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, get_bb_vars
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer
-from oeqa.selftest.cases.sstate import SStateBase
-from oeqa.core.decorator.oeid import OETestID
+from oeqa.core.decorator import OETestTag
+import oe
import bb.siggen
-class SStateTests(SStateBase):
-
- # Test sstate files creation and their location
+# Set to True to preserve stamp files after test execution for debugging failures
+keep_temp_files = False
+
+class SStateBase(OESelftestTestCase):
+
+ def setUpLocal(self):
+ super(SStateBase, self).setUpLocal()
+ self.temp_sstate_location = None
+ needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
+ 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
+ bb_vars = get_bb_vars(needed_vars)
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.tclibc = bb_vars['TCLIBC']
+ self.tune_arch = bb_vars['TUNE_ARCH']
+ self.topdir = bb_vars['TOPDIR']
+ self.target_vendor = bb_vars['TARGET_VENDOR']
+ self.target_os = bb_vars['TARGET_OS']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ def track_for_cleanup(self, path):
+ if not keep_temp_files:
+ super().track_for_cleanup(path)
+
+ # Creates a special sstate configuration with the option to add sstate mirrors
+ def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
+ self.temp_sstate_location = temp_sstate_location
+
+ if self.temp_sstate_location:
+ temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+ config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
+ self.append_config(config_temp_sstate)
+ self.track_for_cleanup(temp_sstate_path)
+ bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ if add_local_mirrors:
+ config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
+ self.append_config(config_set_sstate_if_not_set)
+ for local_mirror in add_local_mirrors:
+ self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
+ config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
+ self.append_config(config_sstate_mirror)
+
+ # Returns a list containing sstate files
+ def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
+ result = []
+ for root, dirs, files in os.walk(self.sstate_path):
+ if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ return result
+
+ # Test sstate files creation and their location and directory perms
def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True):
self.config_sstate(temp_sstate_location, [self.sstate_path])
@@ -21,12 +88,25 @@ class SStateTests(SStateBase):
else:
bitbake(['-ccleansstate'] + targets)
+ # We need to test that the env umask have does not effect sstate directory creation
+ # So, first, we'll get the current umask and set it to something we know incorrect
+ # See: sstate_task_postfunc for correct umask of os.umask(0o002)
+ import os
+ def current_umask():
+ current_umask = os.umask(0)
+ os.umask(current_umask)
+ return current_umask
+
+ orig_umask = current_umask()
+ # Set it to a umask we know will be 'wrong'
+ os.umask(0o022)
+
bitbake(targets)
file_tracker = []
results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
if distro_nonspecific:
for r in results:
- if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo", "_fetch.tgz.siginfo", "_unpack.tgz.siginfo", "_patch.tgz.siginfo")):
+ if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")):
continue
file_tracker.append(r)
else:
@@ -37,21 +117,18 @@ class SStateTests(SStateBase):
else:
self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker)))
- @OETestID(975)
- def test_sstate_creation_distro_specific_pass(self):
- self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+ # Now we'll walk the tree to check the mode and see if things are incorrect.
+ badperms = []
+ for root, dirs, files in os.walk(self.sstate_path):
+ for directory in dirs:
+ if (os.stat(os.path.join(root, directory)).st_mode & 0o777) != 0o775:
+ badperms.append(os.path.join(root, directory))
- @OETestID(1374)
- def test_sstate_creation_distro_specific_fail(self):
- self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
-
- @OETestID(976)
- def test_sstate_creation_distro_nonspecific_pass(self):
- self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+ # Return to original umask
+ os.umask(orig_umask)
- @OETestID(1375)
- def test_sstate_creation_distro_nonspecific_fail(self):
- self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+ if should_pass:
+ self.assertTrue(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms)))
# Test the sstate files deletion part of the do_cleansstate task
def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
@@ -60,32 +137,15 @@ class SStateTests(SStateBase):
bitbake(['-ccleansstate'] + targets)
bitbake(targets)
- tgz_created = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
- self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_created)))
+ archives_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
+ self.assertTrue(archives_created, msg="Could not find sstate .tar.zst files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_created)))
- siginfo_created = self.search_sstate('|'.join(map(str, [s + '.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
+ siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
bitbake(['-ccleansstate'] + targets)
- tgz_removed = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
- self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_removed)))
-
- @OETestID(977)
- def test_cleansstate_task_distro_specific_nonspecific(self):
- targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
- targets.append('linux-libc-headers')
- self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
-
- @OETestID(1376)
- def test_cleansstate_task_distro_nonspecific(self):
- self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
-
- @OETestID(1377)
- def test_cleansstate_task_distro_specific(self):
- targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
- targets.append('linux-libc-headers')
- self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
-
+ archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
+ self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed)))
# Test rebuilding of distro-specific sstate files
def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
@@ -94,15 +154,15 @@ class SStateTests(SStateBase):
bitbake(['-ccleansstate'] + targets)
bitbake(targets)
- results = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True)
+ results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True)
filtered_results = []
for r in results:
- if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo")):
+ if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo")):
continue
filtered_results.append(r)
self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
- file_tracker_1 = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
- self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+ file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+ self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
self.track_for_cleanup(self.distro_specific_sstate + "_old")
shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
@@ -110,71 +170,166 @@ class SStateTests(SStateBase):
bitbake(['-cclean'] + targets)
bitbake(targets)
- file_tracker_2 = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
- self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+ file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+ self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
- self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated)))
+ self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated)))
created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
- self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once)))
+ self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
+
+ def sstate_common_samesigs(self, configA, configB, allarch=False):
+
+ self.write_config(configA)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("world meta-toolchain -S none")
+ self.write_config(configB)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("world meta-toolchain -S none")
- @OETestID(175)
+ def get_files(d, result):
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ if "meta-environment" in root or "cross-canadian" in root:
+ continue
+ if "do_build" not in name:
+ # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
+ (_, task, _, shash) = name.rsplit(".", 3)
+ result[os.path.join(os.path.basename(root), task)] = shash
+
+ files1 = {}
+ files2 = {}
+ subdirs = sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux"))
+ if allarch:
+ subdirs.extend(sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")))
+
+ for subdir in subdirs:
+ nativesdkdir = os.path.basename(subdir)
+ get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir, files1)
+ get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir, files2)
+
+ self.maxDiff = None
+ self.assertEqual(files1, files2)
+
+class SStateTests(SStateBase):
+ def test_autorev_sstate_works(self):
+ # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
+
+ tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
+ tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(tempdldir)
+ create_temp_layer(tempdir, 'selftestrecipetool')
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir)
+ self.append_config("DL_DIR = \"%s\"" % tempdldir)
+ runCmd('bitbake-layers add-layer %s' % tempdir)
+
+ # Use dbus-wait as a local git repo we can add a commit between two builds in
+ pn = 'dbus-wait'
+ srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
+ url = 'git://git.yoctoproject.org/dbus-wait'
+ result = runCmd('git clone %s noname' % url, cwd=tempdir)
+ srcdir = os.path.join(tempdir, 'noname')
+ result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
+
+ recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
+ os.makedirs(os.path.dirname(recipefile))
+ srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
+ result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
+ self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
+
+ with open(recipefile, 'a') as f:
+ f.write('SRCREV = "${AUTOREV}"\n')
+ f.write('PV = "1.0"\n')
+
+ bitbake("dbus-wait-test -c fetch")
+ with open(os.path.join(srcdir, "bar.txt"), "w") as f:
+ f.write("foo")
+ result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
+ bitbake("dbus-wait-test -c unpack")
+
+class SStateCreation(SStateBase):
+ def test_sstate_creation_distro_specific_pass(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_specific_fail(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
+
+ def test_sstate_creation_distro_nonspecific_pass(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_nonspecific_fail(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+
+class SStateCleanup(SStateBase):
+ def test_cleansstate_task_distro_specific_nonspecific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_nonspecific(self):
+ self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_specific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+class SStateDistroTests(SStateBase):
def test_rebuild_distro_specific_sstate_cross_native_targets(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
- @OETestID(1372)
def test_rebuild_distro_specific_sstate_cross_target(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch], temp_sstate_location=True)
- @OETestID(1373)
def test_rebuild_distro_specific_sstate_native_target(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
-
+class SStateCacheManagement(SStateBase):
# Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
- # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE)
+ # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.py (such as changing the value of MACHINE)
def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
self.assertTrue(global_config)
self.assertTrue(target_config)
self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
- self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
- # If buildhistory is enabled, we need to disable version-going-backwards
- # QA checks for this test. It may report errors otherwise.
- self.append_config('ERROR_QA_remove = "version-going-backwards"')
+ for idx in range(len(target_config)):
+ self.append_config(global_config[idx])
+ self.append_recipeinc(target, target_config[idx])
+ bitbake(target)
+ self.remove_config(global_config[idx])
+ self.remove_recipeinc(target, target_config[idx])
+
+ self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
- # For not this only checks if random sstate tasks are handled correctly as a group.
+ # For now this only checks if random sstate tasks are handled correctly as a group.
# In the future we should add control over what tasks we check for.
- sstate_archs_list = []
expected_remaining_sstate = []
for idx in range(len(target_config)):
self.append_config(global_config[idx])
self.append_recipeinc(target, target_config[idx])
- sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
- if not sstate_arch in sstate_archs_list:
- sstate_archs_list.append(sstate_arch)
if target_config[idx] == target_config[-1]:
- target_sstate_before_build = self.search_sstate(target + '.*?\.tgz$')
+ target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$')
bitbake("-cclean %s" % target)
result = bitbake(target, ignore_status=True)
if target_config[idx] == target_config[-1]:
- target_sstate_after_build = self.search_sstate(target + '.*?\.tgz$')
+ target_sstate_after_build = self.search_sstate(target + r'.*?\.tar.zst$')
expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)]
self.remove_config(global_config[idx])
self.remove_recipeinc(target, target_config[idx])
self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
- runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list))))
- actual_remaining_sstate = [x for x in self.search_sstate(target + '.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)]
+ runCmd("sstate-cache-management.py -y --cache-dir=%s --remove-duplicated" % (self.sstate_path))
+ actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)]
actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
- self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected)))
+ self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected)))
expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
- self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual)))
+ self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual)))
- @OETestID(973)
def test_sstate_cache_management_script_using_pr_1(self):
global_config = []
target_config = []
@@ -182,7 +337,6 @@ class SStateTests(SStateBase):
target_config.append('PR = "0"')
self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
- @OETestID(978)
def test_sstate_cache_management_script_using_pr_2(self):
global_config = []
target_config = []
@@ -192,7 +346,6 @@ class SStateTests(SStateBase):
target_config.append('PR = "1"')
self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
- @OETestID(979)
def test_sstate_cache_management_script_using_pr_3(self):
global_config = []
target_config = []
@@ -204,7 +357,6 @@ class SStateTests(SStateBase):
target_config.append('PR = "1"')
self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
- @OETestID(974)
def test_sstate_cache_management_script_using_machine(self):
global_config = []
target_config = []
@@ -214,7 +366,7 @@ class SStateTests(SStateBase):
target_config.append('')
self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
- @OETestID(1270)
+class SStateHashSameSigs(SStateBase):
def test_sstate_32_64_same_hash(self):
"""
The sstate checksums for both native and target should not vary whether
@@ -226,28 +378,32 @@ class SStateTests(SStateBase):
self.write_config("""
MACHINE = "qemux86"
TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
+TCLIBCAPPEND = ""
BUILD_ARCH = "x86_64"
BUILD_OS = "linux"
SDKMACHINE = "x86_64"
PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
- bitbake("core-image-sato -S none")
+ bitbake("core-image-weston -S none")
self.write_config("""
MACHINE = "qemux86"
TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
+TCLIBCAPPEND = ""
BUILD_ARCH = "i686"
BUILD_OS = "linux"
SDKMACHINE = "i686"
PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
- bitbake("core-image-sato -S none")
+ bitbake("core-image-weston -S none")
def get_files(d):
f = []
for root, dirs, files in os.walk(d):
- if "core-image-sato" in root:
+ if "core-image-weston" in root:
# SDKMACHINE changing will change
# do_rootfs/do_testimage/do_build stamps of images which
# is safe to ignore.
@@ -261,7 +417,6 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
self.assertCountEqual(files1, files2)
- @OETestID(1271)
def test_sstate_nativelsbstring_same_hash(self):
"""
The sstate checksums should be independent of whichever NATIVELSBSTRING is
@@ -271,16 +426,20 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
self.write_config("""
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
NATIVELSBSTRING = \"DistroA\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
- bitbake("core-image-sato -S none")
+ bitbake("core-image-weston -S none")
self.write_config("""
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
NATIVELSBSTRING = \"DistroB\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
- bitbake("core-image-sato -S none")
+ bitbake("core-image-weston -S none")
def get_files(d):
f = []
@@ -293,7 +452,7 @@ NATIVELSBSTRING = \"DistroB\"
self.maxDiff = None
self.assertCountEqual(files1, files2)
- @OETestID(1368)
+class SStateHashSameSigs2(SStateBase):
def test_sstate_allarch_samesigs(self):
"""
The sstate checksums of allarch packages should be independent of whichever
@@ -304,71 +463,45 @@ NATIVELSBSTRING = \"DistroB\"
configA = """
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
MACHINE = \"qemux86-64\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
"""
+ #OLDEST_KERNEL is arch specific so set to a different value here for testing
configB = """
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
MACHINE = \"qemuarm\"
+OLDEST_KERNEL = \"3.3.0\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
"""
- self.sstate_allarch_samesigs(configA, configB)
+ self.sstate_common_samesigs(configA, configB, allarch=True)
- @OETestID(1645)
- def test_sstate_allarch_samesigs_multilib(self):
+ def test_sstate_nativesdk_samesigs_multilib(self):
"""
- The sstate checksums of allarch multilib packages should be independent of whichever
- MACHINE is set. Check this using bitbake -S.
- Also, rather than duplicate the test, check nativesdk stamps are the same between
- the two MACHINE values.
+ check nativesdk stamps are the same between the two MACHINE values.
"""
configA = """
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
MACHINE = \"qemux86-64\"
require conf/multilib.conf
MULTILIBS = \"multilib:lib32\"
-DEFAULTTUNE_virtclass-multilib-lib32 = \"x86\"
+DEFAULTTUNE:virtclass-multilib-lib32 = \"x86\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
"""
configB = """
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
MACHINE = \"qemuarm\"
require conf/multilib.conf
MULTILIBS = \"\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
"""
- self.sstate_allarch_samesigs(configA, configB)
-
- def sstate_allarch_samesigs(self, configA, configB):
-
- self.write_config(configA)
- self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
- bitbake("world meta-toolchain -S none")
- self.write_config(configB)
- self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
- bitbake("world meta-toolchain -S none")
-
- def get_files(d):
- f = {}
- for root, dirs, files in os.walk(d):
- for name in files:
- if "meta-environment" in root or "cross-canadian" in root:
- continue
- if "do_build" not in name:
- # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
- (_, task, _, shash) = name.rsplit(".", 3)
- f[os.path.join(os.path.basename(root), task)] = shash
- return f
- files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/all" + self.target_vendor + "-" + self.target_os)
- files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/all" + self.target_vendor + "-" + self.target_os)
- self.maxDiff = None
- self.assertEqual(files1, files2)
-
- nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0])
+ self.sstate_common_samesigs(configA, configB)
- files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir)
- files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir)
- self.maxDiff = None
- self.assertEqual(files1, files2)
-
- @OETestID(1369)
+class SStateHashSameSigs3(SStateBase):
def test_sstate_sametune_samesigs(self):
"""
The sstate checksums of two identical machines (using the same tune) should be the
@@ -378,19 +511,23 @@ MULTILIBS = \"\"
self.write_config("""
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
MACHINE = \"qemux86\"
require conf/multilib.conf
MULTILIBS = "multilib:lib32"
-DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
+DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
bitbake("world meta-toolchain -S none")
self.write_config("""
TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
MACHINE = \"qemux86copy\"
require conf/multilib.conf
MULTILIBS = "multilib:lib32"
-DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
+DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
bitbake("world meta-toolchain -S none")
@@ -399,7 +536,7 @@ DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
f = []
for root, dirs, files in os.walk(d):
for name in files:
- if "meta-environment" in root or "cross-canadian" in root:
+ if "meta-environment" in root or "cross-canadian" in root or 'meta-ide-support' in root:
continue
if "qemux86copy-" in root or "qemux86-" in root:
continue
@@ -413,7 +550,46 @@ DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
self.assertCountEqual(files1, files2)
- @OETestID(1498)
+ def test_sstate_multilib_or_not_native_samesigs(self):
+ """The sstate checksums of two native recipes (and their dependencies)
+ where the target is using multilib in one but not the other
+ should be the same. We use the qemux86copy machine to test
+ this.
+ """
+
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("binutils-native -S none")
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86copy\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("binutils-native -S none")
+
+ def get_files(d):
+ f = []
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ f.append(os.path.join(root, name))
+ return f
+ files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps")
+ files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps")
+ files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+ self.maxDiff = None
+ self.assertCountEqual(files1, files2)
+
+class SStateHashSameSigs4(SStateBase):
def test_sstate_noop_samesigs(self):
"""
The sstate checksums of two builds with these variables changed or
@@ -422,28 +598,32 @@ DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
self.write_config("""
TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
+TCLIBCAPPEND = ""
BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}"
PARALLEL_MAKE = "-j 1"
DL_DIR = "${TOPDIR}/download1"
TIME = "111111"
DATE = "20161111"
-INHERIT_remove = "buildstats-summary buildhistory uninative"
+INHERIT:remove = "buildstats-summary buildhistory uninative"
http_proxy = ""
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
self.track_for_cleanup(self.topdir + "/download1")
bitbake("world meta-toolchain -S none")
self.write_config("""
TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
+TCLIBCAPPEND = ""
BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}"
PARALLEL_MAKE = "-j 2"
DL_DIR = "${TOPDIR}/download2"
TIME = "222222"
DATE = "20161212"
# Always remove uninative as we're changing proxies
-INHERIT_remove = "uninative"
+INHERIT:remove = "uninative"
INHERIT += "buildstats-summary buildhistory"
http_proxy = "http://example.com/"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
""")
self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
self.track_for_cleanup(self.topdir + "/download2")
@@ -490,7 +670,339 @@ http_proxy = "http://example.com/"
# this is an expensive computation, thus just compare the first 'max_sigfiles_to_compare' k files
max_sigfiles_to_compare = 20
first, rest = files[:max_sigfiles_to_compare], files[max_sigfiles_to_compare:]
- compare_sigfiles(first, files1.keys(), files2.keys(), compare=True)
- compare_sigfiles(rest, files1.keys(), files2.keys(), compare=False)
+ compare_sigfiles(first, files1, files2, compare=True)
+ compare_sigfiles(rest, files1, files2, compare=False)
self.fail("sstate hashes not identical.")
+
+ def test_sstate_movelayer_samesigs(self):
+ """
+ The sstate checksums of two builds with the same oe-core layer in two
+ different locations should be the same.
+ """
+ core_layer = os.path.join(
+ self.tc.td["COREBASE"], 'meta')
+ copy_layer_1 = self.topdir + "/meta-copy1/meta"
+ copy_layer_2 = self.topdir + "/meta-copy2/meta"
+
+ oe.path.copytree(core_layer, copy_layer_1)
+ os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy1/scripts")
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
+""")
+ bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_1, core_layer)
+ self.write_bblayers_config(bblayers_conf)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("bash -S none")
+
+ oe.path.copytree(core_layer, copy_layer_2)
+ os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy2/scripts")
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
+""")
+ bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_2, core_layer)
+ self.write_bblayers_config(bblayers_conf)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("bash -S none")
+
+ def get_files(d):
+ f = []
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ f.append(os.path.join(root, name))
+ return f
+ files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps")
+ files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps")
+ files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+ self.maxDiff = None
+ self.assertCountEqual(files1, files2)
+
+class SStateFindSiginfo(SStateBase):
+ def test_sstate_compare_sigfiles_and_find_siginfo(self):
+ """
+ Test the functionality of the find_siginfo: basic function and callback in compare_sigfiles
+ """
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86-64\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstates-findsiginfo")
+
+ pns = ["binutils", "binutils-native", "lib32-binutils"]
+ target_configs = [
+"""
+TMPVAL1 = "tmpval1"
+TMPVAL2 = "tmpval2"
+do_tmptask1() {
+ echo ${TMPVAL1}
+}
+do_tmptask2() {
+ echo ${TMPVAL2}
+}
+addtask do_tmptask1
+addtask tmptask2 before do_tmptask1
+""",
+"""
+TMPVAL3 = "tmpval3"
+TMPVAL4 = "tmpval4"
+do_tmptask1() {
+ echo ${TMPVAL3}
+}
+do_tmptask2() {
+ echo ${TMPVAL4}
+}
+addtask do_tmptask1
+addtask tmptask2 before do_tmptask1
+"""
+ ]
+
+ for target_config in target_configs:
+ self.write_recipeinc("binutils", target_config)
+ for pn in pns:
+ bitbake("%s -c do_tmptask1 -S none" % pn)
+ self.delete_recipeinc("binutils")
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+
+ def find_siginfo(pn, taskname, sigs=None):
+ result = None
+ command_complete = False
+ tinfoil.set_event_mask(["bb.event.FindSigInfoResult",
+ "bb.command.CommandCompleted"])
+ ret = tinfoil.run_command("findSigInfo", pn, taskname, sigs)
+ if ret:
+ while result is None or not command_complete:
+ event = tinfoil.wait_event(1)
+ if event:
+ if isinstance(event, bb.command.CommandCompleted):
+ command_complete = True
+ elif isinstance(event, bb.event.FindSigInfoResult):
+ result = event.result
+ return result
+
+ def recursecb(key, hash1, hash2):
+ nonlocal recursecb_count
+ recursecb_count += 1
+ hashes = [hash1, hash2]
+ hashfiles = find_siginfo(key, None, hashes)
+ self.assertCountEqual(hashes, hashfiles)
+ bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
+
+ for pn in pns:
+ recursecb_count = 0
+ matches = find_siginfo(pn, "do_tmptask1")
+ self.assertGreaterEqual(len(matches), 2)
+ latesthashes = sorted(matches.keys(), key=lambda h: matches[h]['time'])[-2:]
+ bb.siggen.compare_sigfiles(matches[latesthashes[-2]]['path'], matches[latesthashes[-1]]['path'], recursecb)
+ self.assertEqual(recursecb_count,1)
+
+class SStatePrintdiff(SStateBase):
+ def run_test_printdiff_changerecipe(self, target, change_recipe, change_bbtask, change_content, expected_sametmp_output, expected_difftmp_output):
+ import time
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
+""".format(time.time()))
+ # Use runall do_build to ensure any indirect sstate is created, e.g. tzcode-native on both x86 and
+ # aarch64 hosts since only allarch target recipes depend upon it and it may not be built otherwise.
+ # A bitbake -c cleansstate tzcode-native would cause some of these tests to error for example.
+ bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
+ bitbake("-S none {}".format(target))
+ bitbake(change_bbtask)
+ self.write_recipeinc(change_recipe, change_content)
+ result_sametmp = bitbake("-S printdiff {}".format(target))
+
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
+""".format(time.time()))
+ result_difftmp = bitbake("-S printdiff {}".format(target))
+
+ self.delete_recipeinc(change_recipe)
+ for item in expected_sametmp_output:
+ self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
+ for item in expected_difftmp_output:
+ self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
+
+ def run_test_printdiff_changeconfig(self, target, change_bbtasks, change_content, expected_sametmp_output, expected_difftmp_output):
+ import time
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
+""".format(time.time()))
+ bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
+ bitbake("-S none {}".format(target))
+ bitbake(" ".join(change_bbtasks))
+ self.append_config(change_content)
+ result_sametmp = bitbake("-S printdiff {}".format(target))
+
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
+""".format(time.time()))
+ self.append_config(change_content)
+ result_difftmp = bitbake("-S printdiff {}".format(target))
+
+ for item in expected_sametmp_output:
+ self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
+ for item in expected_difftmp_output:
+ self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
+
+
+ # Check if printdiff walks the full dependency chain from the image target to where the change is in a specific recipe
+ def test_image_minimal_vs_perlcross(self):
+ expected_output = ("Task perlcross-native:do_install couldn't be used from the cache because:",
+"We need hash",
+"most recent matching task was")
+ expected_sametmp_output = expected_output + (
+"Variable do_install value changed",
+'+ echo "this changes the task signature"')
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changerecipe("core-image-minimal", "perlcross", "-c do_install perlcross-native",
+"""
+do_install:append() {
+ echo "this changes the task signature"
+}
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+ # Check if changes to gcc-source (which uses tmp/work-shared) are correctly discovered
+ def test_gcc_runtime_vs_gcc_source(self):
+ gcc_source_pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
+
+ expected_output = ("Task {}:do_preconfigure couldn't be used from the cache because:".format(gcc_source_pn),
+"We need hash",
+"most recent matching task was")
+ expected_sametmp_output = expected_output + (
+"Variable do_preconfigure value changed",
+'+ print("this changes the task signature")')
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changerecipe("gcc-runtime", "gcc-source", "-c do_preconfigure {}".format(gcc_source_pn),
+"""
+python do_preconfigure:append() {
+ print("this changes the task signature")
+}
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+ # Check if changing a really base task definiton is reported against multiple core recipes using it
+ def test_image_minimal_vs_base_do_configure(self):
+ change_bbtasks = ('zstd-native:do_configure',
+'texinfo-dummy-native:do_configure',
+'ldconfig-native:do_configure',
+'gettext-minimal-native:do_configure',
+'tzcode-native:do_configure',
+'makedevs-native:do_configure',
+'pigz-native:do_configure',
+'update-rc.d-native:do_configure',
+'unzip-native:do_configure',
+'gnu-config-native:do_configure')
+
+ expected_output = ["Task {} couldn't be used from the cache because:".format(t) for t in change_bbtasks] + [
+"We need hash",
+"most recent matching task was"]
+
+ expected_sametmp_output = expected_output + [
+"Variable base_do_configure value changed",
+'+ echo "this changes base_do_configure() definiton "']
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changeconfig("core-image-minimal",change_bbtasks,
+"""
+INHERIT += "base-do-configure-modified"
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+@OETestTag("yocto-mirrors")
+class SStateMirrors(SStateBase):
+ def check_bb_output(self, output, exceptions, check_cdn):
+ def is_exception(object, exceptions):
+ for e in exceptions:
+ if re.search(e, object):
+ return True
+ return False
+
+ output_l = output.splitlines()
+ for l in output_l:
+ if l.startswith("Sstate summary"):
+ for idx, item in enumerate(l.split()):
+ if item == 'Missed':
+ missing_objects = int(l.split()[idx+1])
+ break
+ else:
+ self.fail("Did not find missing objects amount in sstate summary: {}".format(l))
+ break
+ else:
+ self.fail("Did not find 'Sstate summary' line in bitbake output")
+
+ failed_urls = []
+ failed_urls_extrainfo = []
+ for l in output_l:
+ if "SState: Unsuccessful fetch test for" in l and check_cdn:
+ missing_object = l.split()[6]
+ elif "SState: Looked for but didn't find file" in l and not check_cdn:
+ missing_object = l.split()[8]
+ else:
+ missing_object = None
+ if missing_object:
+ if not is_exception(missing_object, exceptions):
+ failed_urls.append(missing_object)
+ else:
+ missing_objects -= 1
+
+ if "urlopen failed for" in l and not is_exception(l, exceptions):
+ failed_urls_extrainfo.append(l)
+
+ self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
+ self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
+
+ def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
+ # sstate is checked for existence of these, but they never get written out to begin with
+ exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
+ exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
+ exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
+ exceptions += ["linux-yocto.*shared_workdir"]
+ # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
+ # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
+ # which makes tracing other changes difficult
+ exceptions += ["{}.*create_spdx".format(t) for t in targets.split()]
+ exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()]
+
+ if check_cdn:
+ self.config_sstate(True)
+ self.append_config("""
+MACHINE = "{}"
+BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
+SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH"
+""".format(machine))
+ else:
+ self.append_config("""
+MACHINE = "{}"
+""".format(machine))
+ result = bitbake("-DD -n {}".format(targets))
+ bitbake("-S none {}".format(targets))
+ if ignore_errors:
+ return
+ self.check_bb_output(result.output, exceptions, check_cdn)
+
+ def test_cdn_mirror_qemux86_64(self):
+ exceptions = []
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
+
+ def test_cdn_mirror_qemuarm64(self):
+ exceptions = []
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
+
+ def test_local_cache_qemux86_64(self):
+ exceptions = []
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
+
+ def test_local_cache_qemuarm64(self):
+ exceptions = []
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
diff --git a/meta/lib/oeqa/selftest/cases/sysroot.py b/meta/lib/oeqa/selftest/cases/sysroot.py
new file mode 100644
index 0000000000..ef854f6fee
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/sysroot.py
@@ -0,0 +1,86 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import uuid
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class SysrootTests(OESelftestTestCase):
+ def test_sysroot_cleanup(self):
+ """
+ Build sysroot test which depends on virtual/sysroot-test for one machine,
+ switch machine, switch provider of virtual/sysroot-test and check that the
+ sysroot is correctly cleaned up. The files in the two providers overlap
+ so can cause errors if the sysroot code doesn't function correctly.
+ Yes, sysroot-test should be machine specific really to avoid this, however
+ the sysroot cleanup should also work [YOCTO #13702].
+ """
+
+ uuid1 = uuid.uuid4()
+ uuid2 = uuid.uuid4()
+
+ self.write_config("""
+PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1"
+MACHINE = "qemux86"
+TESTSTRING:pn-sysroot-test-arch1 = "%s"
+TESTSTRING:pn-sysroot-test-arch2 = "%s"
+""" % (uuid1, uuid2))
+ bitbake("sysroot-test")
+ self.write_config("""
+PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2"
+MACHINE = "qemux86copy"
+TESTSTRING:pn-sysroot-test-arch1 = "%s"
+TESTSTRING:pn-sysroot-test-arch2 = "%s"
+""" % (uuid1, uuid2))
+ bitbake("sysroot-test")
+
+ def test_sysroot_max_shebang(self):
+ """
+ Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed.
+ Expected: Fail when a shebang bigger than the max shebang-size is reached.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]"
+ res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ def test_sysroot_la(self):
+ """
+ Summary: Check that workdir paths are not contained in .la files.
+ Expected: Fail when a workdir path is found in the file content.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "la-test.la failed sanity test (workdir) in path"
+
+ res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[la]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[la]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ def test_sysroot_pkgconfig(self):
+ """
+ Summary: Check that tmpdir paths are not contained in .pc files.
+ Expected: Fail when a tmpdir path is found in the file content.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "test.pc failed sanity test (tmpdir) in path"
+
+ res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True)
+ self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
index f889a47b26..21c8686b2a 100644
--- a/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
import re
import time
@@ -5,13 +11,10 @@ import logging
import bb.tinfoil
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd
-from oeqa.core.decorator.oeid import OETestID
class TinfoilTests(OESelftestTestCase):
""" Basic tests for the tinfoil API """
- @OETestID(1568)
def test_getvar(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(True)
@@ -19,7 +22,6 @@ class TinfoilTests(OESelftestTestCase):
if not machine:
self.fail('Unable to get MACHINE value - returned %s' % machine)
- @OETestID(1569)
def test_expand(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(True)
@@ -28,7 +30,6 @@ class TinfoilTests(OESelftestTestCase):
if not pid:
self.fail('Unable to expand "%s" - returned %s' % (expr, pid))
- @OETestID(1570)
def test_getvar_bb_origenv(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(True)
@@ -37,7 +38,6 @@ class TinfoilTests(OESelftestTestCase):
self.fail('Unable to get BB_ORIGENV value - returned %s' % origenv)
self.assertEqual(origenv.getVar('HOME', False), os.environ['HOME'])
- @OETestID(1571)
def test_parse_recipe(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
@@ -48,7 +48,17 @@ class TinfoilTests(OESelftestTestCase):
rd = tinfoil.parse_recipe_file(best[3])
self.assertEqual(testrecipe, rd.getVar('PN'))
- @OETestID(1572)
+ def test_parse_virtual_recipe(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ testrecipe = 'nativesdk-gcc'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3])
+ self.assertEqual(testrecipe, rd.getVar('PN'))
+ self.assertIsNotNone(rd.getVar('FILE_LAYERNAME'))
+
def test_parse_recipe_copy_expand(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
@@ -67,21 +77,32 @@ class TinfoilTests(OESelftestTestCase):
localdata.setVar('PN', 'hello')
self.assertEqual('hello', localdata.getVar('BPN'))
- @OETestID(1573)
- def test_parse_recipe_initial_datastore(self):
+ # The config_data API to parse_recipe_file is used by:
+ # layerindex-web layerindex/update_layer.py
+ def test_parse_recipe_custom_data(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
testrecipe = 'mdadm'
best = tinfoil.find_best_provider(testrecipe)
if not best:
self.fail('Unable to find recipe providing %s' % testrecipe)
- dcopy = bb.data.createCopy(tinfoil.config_data)
- dcopy.setVar('MYVARIABLE', 'somevalue')
- rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy)
- # Check we can get variable values
- self.assertEqual('somevalue', rd.getVar('MYVARIABLE'))
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
+
+ def test_parse_virtual_recipe_custom_data(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
+ testrecipe = 'nativesdk-gcc'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
- @OETestID(1574)
def test_list_recipes(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
@@ -100,43 +121,44 @@ class TinfoilTests(OESelftestTestCase):
if checkpns:
self.fail('Unable to find pkg_fn entries for: %s' % ', '.join(checkpns))
- @OETestID(1575)
def test_wait_event(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=True)
- tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted'])
+ tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit'])
# Need to drain events otherwise events that were masked may still be in the queue
while tinfoil.wait_event():
pass
pattern = 'conf'
- res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine')
+ res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False)
self.assertTrue(res)
eventreceived = False
commandcomplete = False
start = time.time()
- # Wait for 5s in total so we'd detect spurious heartbeat events for example
- while time.time() - start < 5:
+ # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example
+ while (not (eventreceived == True and commandcomplete == True)
+ and (time.time() - start < 60)):
+ # if we received both events (on let's say a good day), we are done
event = tinfoil.wait_event(1)
if event:
if isinstance(event, bb.command.CommandCompleted):
commandcomplete = True
elif isinstance(event, bb.event.FilesMatchingFound):
self.assertEqual(pattern, event._pattern)
- self.assertIn('qemuarm.conf', event._matches)
+ self.assertIn('A', event._matches)
+ self.assertIn('B', event._matches)
eventreceived = True
elif isinstance(event, logging.LogRecord):
continue
else:
self.fail('Unexpected event: %s' % event)
- self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server')
+ self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived))
self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
- @OETestID(1576)
def test_setvariable_clean(self):
# First check that setVariable affects the datastore
with bb.tinfoil.Tinfoil() as tinfoil:
@@ -159,7 +181,6 @@ class TinfoilTests(OESelftestTestCase):
value = tinfoil.run_command('getVariable', 'TESTVAR')
self.assertEqual(value, 'specialvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()')
- @OETestID(1884)
def test_datastore_operations(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=True)
@@ -190,8 +211,8 @@ class TinfoilTests(OESelftestTestCase):
self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name')
# Test overrides
tinfoil.config_data.setVar('TESTVAR', 'original')
- tinfoil.config_data.setVar('TESTVAR_overrideone', 'one')
- tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two')
+ tinfoil.config_data.setVar('TESTVAR:overrideone', 'one')
+ tinfoil.config_data.setVar('TESTVAR:overridetwo', 'two')
tinfoil.config_data.appendVar('OVERRIDES', ':overrideone')
value = tinfoil.config_data.getVar('TESTVAR')
self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
diff --git a/meta/lib/oeqa/selftest/cases/usergrouptests.py b/meta/lib/oeqa/selftest/cases/usergrouptests.py
new file mode 100644
index 0000000000..3c59b0f290
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/usergrouptests.py
@@ -0,0 +1,57 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer
+
+class UserGroupTests(OESelftestTestCase):
+ def test_group_from_dep_package(self):
+ self.logger.info("Building creategroup2")
+ bitbake(' creategroup2 creategroup1')
+ bitbake(' creategroup2 creategroup1 -c clean')
+ self.logger.info("Packaging creategroup2")
+ self.assertTrue(bitbake(' creategroup2 -c package'))
+
+ def test_add_task_between_p_sysroot_and_package(self):
+ # Test for YOCTO #14961
+ self.assertTrue(bitbake('useraddbadtask -C fetch'))
+
+ def test_postinst_order(self):
+ self.logger.info("Building dcreategroup")
+ self.assertTrue(bitbake(' dcreategroup'))
+
+ def test_static_useradd_from_dynamic(self):
+ metaselftestpath = get_test_layer()
+ self.logger.info("Building core-image-minimal to generate passwd/group file")
+ bitbake(' core-image-minimal')
+ self.logger.info("Setting up useradd-staticids")
+ repropassdir = os.path.join(metaselftestpath, "conf/include")
+ os.makedirs(repropassdir)
+ etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \
+ os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc")))
+ shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd"))
+ shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group"))
+ # Copy the original local.conf
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'))
+
+ self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
+ self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
+ self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"")
+ self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"")
+ self.logger.info("Rebuild with staticids")
+ bitbake(' core-image-minimal')
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'))
+ self.logger.info("Rebuild without staticids")
+ bitbake(' core-image-minimal')
+ self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
+ self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
+ self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"")
+ self.write_config("USERADD_GID_TABLES += \"files/static-group\"")
+ self.logger.info("Rebuild with other staticids")
+ self.assertTrue(bitbake(' core-image-minimal'))
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py
index 651d575dc3..b616759209 100644
--- a/meta/lib/oeqa/selftest/cases/wic.py
+++ b/meta/lib/oeqa/selftest/cases/wic.py
@@ -1,22 +1,7 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2015, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# AUTHORS
# Ed Bartosh <ed.bartosh@linux.intel.com>
@@ -26,243 +11,306 @@
import os
import sys
import unittest
+import hashlib
from glob import glob
from shutil import rmtree, copy
-from functools import wraps, lru_cache
from tempfile import NamedTemporaryFile
+from tempfile import TemporaryDirectory
from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotArch
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
-from oeqa.core.decorator.oeid import OETestID
-
-
-@lru_cache(maxsize=32)
-def get_host_arch(recipe):
- """A cached call to get_bb_var('HOST_ARCH', <recipe>)"""
- return get_bb_var('HOST_ARCH', recipe)
-def only_for_arch(archs, image='core-image-minimal'):
- """Decorator for wrapping test cases that can be run only for specific target
- architectures. A list of compatible architectures is passed in `archs`.
- Current architecture will be determined by parsing bitbake output for
- `image` recipe.
+def extract_files(debugfs_output):
"""
- def wrapper(func):
- @wraps(func)
- def wrapped_f(*args, **kwargs):
- arch = get_host_arch(image)
- if archs and arch not in archs:
- raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch)
- return func(*args, **kwargs)
- wrapped_f.__name__ = func.__name__
- return wrapped_f
- return wrapper
-
-
-class Wic(OESelftestTestCase):
+ extract file names from the output of debugfs -R 'ls -p',
+ which looks like this:
+
+ /2/040755/0/0/.//\n
+ /2/040755/0/0/..//\n
+ /11/040700/0/0/lost+found^M//\n
+ /12/040755/1002/1002/run//\n
+ /13/040755/1002/1002/sys//\n
+ /14/040755/1002/1002/bin//\n
+ /80/040755/1002/1002/var//\n
+ /92/040755/1002/1002/tmp//\n
+ """
+ # NOTE the occasional ^M in file names
+ return [line.split('/')[5].strip() for line in \
+ debugfs_output.strip().split('/\n')]
+
+def files_own_by_root(debugfs_output):
+ for line in debugfs_output.strip().split('/\n'):
+ if line.split('/')[3:5] != ['0', '0']:
+ print(debugfs_output)
+ return False
+ return True
+
+class WicTestCase(OESelftestTestCase):
"""Wic test class."""
- resultdir = "/var/tmp/wic.oe-selftest/"
image_is_ready = False
- native_sysroot = None
wicenv_cache = {}
def setUpLocal(self):
"""This code is executed before each test method."""
- super(Wic, self).setUpLocal()
- if not self.native_sysroot:
- Wic.native_sysroot = get_bb_var('STAGING_DIR_NATIVE', 'wic-tools')
+ self.resultdir = os.path.join(self.builddir, "wic-tmp")
+ super(WicTestCase, self).setUpLocal()
# Do this here instead of in setUpClass as the base setUp does some
# clean up which can result in the native tools built earlier in
# setUpClass being unavailable.
- if not Wic.image_is_ready:
- if get_bb_var('USE_NLS') == 'yes':
- bitbake('wic-tools')
- else:
- self.skipTest('wic-tools cannot be built due its (intltool|gettext)-native dependency and NLS disable')
-
- bitbake('core-image-minimal')
- Wic.image_is_ready = True
+ if not WicTestCase.image_is_ready:
+ if self.td['USE_NLS'] != 'yes':
+ self.skipTest('wic-tools needs USE_NLS=yes')
+ bitbake('wic-tools core-image-minimal core-image-minimal-mtdutils')
+ WicTestCase.image_is_ready = True
rmtree(self.resultdir, ignore_errors=True)
def tearDownLocal(self):
"""Remove resultdir as it may contain images."""
rmtree(self.resultdir, ignore_errors=True)
- super(Wic, self).tearDownLocal()
+ super(WicTestCase, self).tearDownLocal()
- @OETestID(1552)
+ def _get_image_env_path(self, image):
+ """Generate and obtain the path to <image>.env"""
+ if image not in WicTestCase.wicenv_cache:
+ bitbake('%s -c do_rootfs_wicenv' % image)
+ stdir = get_bb_var('STAGING_DIR', image)
+ machine = self.td["MACHINE"]
+ WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata')
+ return WicTestCase.wicenv_cache[image]
+
+class CLITests(OESelftestTestCase):
def test_version(self):
"""Test wic --version"""
- self.assertEqual(0, runCmd('wic --version').status)
+ runCmd('wic --version')
- @OETestID(1208)
def test_help(self):
"""Test wic --help and wic -h"""
- self.assertEqual(0, runCmd('wic --help').status)
- self.assertEqual(0, runCmd('wic -h').status)
+ runCmd('wic --help')
+ runCmd('wic -h')
- @OETestID(1209)
def test_createhelp(self):
"""Test wic create --help"""
- self.assertEqual(0, runCmd('wic create --help').status)
+ runCmd('wic create --help')
- @OETestID(1210)
def test_listhelp(self):
"""Test wic list --help"""
- self.assertEqual(0, runCmd('wic list --help').status)
+ runCmd('wic list --help')
- @OETestID(1553)
def test_help_create(self):
"""Test wic help create"""
- self.assertEqual(0, runCmd('wic help create').status)
+ runCmd('wic help create')
- @OETestID(1554)
def test_help_list(self):
"""Test wic help list"""
- self.assertEqual(0, runCmd('wic help list').status)
+ runCmd('wic help list')
- @OETestID(1215)
def test_help_overview(self):
"""Test wic help overview"""
- self.assertEqual(0, runCmd('wic help overview').status)
+ runCmd('wic help overview')
- @OETestID(1216)
def test_help_plugins(self):
"""Test wic help plugins"""
- self.assertEqual(0, runCmd('wic help plugins').status)
+ runCmd('wic help plugins')
- @OETestID(1217)
def test_help_kickstart(self):
"""Test wic help kickstart"""
- self.assertEqual(0, runCmd('wic help kickstart').status)
+ runCmd('wic help kickstart')
- @OETestID(1555)
def test_list_images(self):
"""Test wic list images"""
- self.assertEqual(0, runCmd('wic list images').status)
+ runCmd('wic list images')
- @OETestID(1556)
def test_list_source_plugins(self):
"""Test wic list source-plugins"""
- self.assertEqual(0, runCmd('wic list source-plugins').status)
+ runCmd('wic list source-plugins')
- @OETestID(1557)
def test_listed_images_help(self):
"""Test wic listed images help"""
output = runCmd('wic list images').output
imagelist = [line.split()[0] for line in output.splitlines()]
for image in imagelist:
- self.assertEqual(0, runCmd('wic list %s help' % image).status)
+ runCmd('wic list %s help' % image)
- @OETestID(1213)
def test_unsupported_subcommand(self):
"""Test unsupported subcommand"""
self.assertNotEqual(0, runCmd('wic unsupported', ignore_status=True).status)
- @OETestID(1214)
def test_no_command(self):
"""Test wic without command"""
self.assertEqual(1, runCmd('wic', ignore_status=True).status)
- @OETestID(1211)
+class Wic(WicTestCase):
+ def test_skip_kernel_install(self):
+ """Test the functionality of not installing the kernel in the boot directory using the wic plugin"""
+ # create a temporary file for the WKS content
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.write(
+ 'part --source bootimg-efi '
+ '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" '
+ '--label boot --active\n'
+ )
+ wks.flush()
+ # create a temporary directory to extract the disk image to
+ with TemporaryDirectory() as tmpdir:
+ img = 'core-image-minimal'
+ # build the image using the WKS file
+ cmd = "wic create %s -e %s -o %s" % (
+ wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(
+ self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ # extract the content of the disk image to the temporary directory
+ cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
+ runCmd(cmd)
+ # check if the kernel is installed or not
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
+ for file in os.listdir(tmpdir):
+ if file == kimgtype:
+ raise AssertionError(
+ "The kernel image '{}' was found in the partition".format(kimgtype)
+ )
+
+ def test_kernel_install(self):
+ """Test the installation of the kernel to the boot directory in the wic plugin"""
+ # create a temporary file for the WKS content
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.write(
+ 'part --source bootimg-efi '
+ '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" '
+ '--label boot --active\n'
+ )
+ wks.flush()
+ # create a temporary directory to extract the disk image to
+ with TemporaryDirectory() as tmpdir:
+ img = 'core-image-minimal'
+ # build the image using the WKS file
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ # extract the content of the disk image to the temporary directory
+ cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
+ runCmd(cmd)
+ # check if the kernel is installed or not
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
+ found = False
+ for file in os.listdir(tmpdir):
+ if file == kimgtype:
+ found = True
+ break
+ self.assertTrue(
+ found, "The kernel image '{}' was not found in the boot partition".format(kimgtype)
+ )
+
def test_build_image_name(self):
"""Test wic create wictestdisk --image-name=core-image-minimal"""
cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join (self.resultdir, "wictestdisk-*.direct"))))
- @OETestID(1157)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_gpt_image(self):
"""Test creation of core-image-minimal with gpt table and UUID boot"""
cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
- @OETestID(1346)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_iso_image(self):
"""Test creation of hybrid iso image with legacy and EFI boot"""
config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
- 'MACHINE_FEATURES_append = " efi"\n'\
- 'DEPENDS_pn-core-image-minimal += "syslinux"\n'
+ 'MACHINE_FEATURES:append = " efi"\n'\
+ 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
self.append_config(config)
- bitbake('core-image-minimal')
+ bitbake('core-image-minimal core-image-minimal-initramfs')
self.remove_config(config)
cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct")))
- self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "HYBRID_ISO_IMG-*.direct"))))
+ self.assertEqual(1, len(glob(os.path.join (self.resultdir, "HYBRID_ISO_IMG-*.iso"))))
- @OETestID(1348)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_qemux86_directdisk(self):
"""Test creation of qemux-86-directdisk image"""
cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "qemux86-directdisk-*direct")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "qemux86-directdisk-*direct"))))
- @OETestID(1350)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
def test_mkefidisk(self):
"""Test creation of mkefidisk image"""
cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "mkefidisk-*direct")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "mkefidisk-*direct"))))
- @OETestID(1385)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_bootloader_config(self):
"""Test creation of directdisk-bootloader-config image"""
- config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n'
+ config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
self.append_config(config)
bitbake('core-image-minimal')
self.remove_config(config)
cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "directdisk-bootloader-config-*direct")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-bootloader-config-*direct"))))
- @OETestID(1560)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
def test_systemd_bootdisk(self):
"""Test creation of systemd-bootdisk image"""
- config = 'MACHINE_FEATURES_append = " efi"\n'
+ config = 'MACHINE_FEATURES:append = " efi"\n'
self.append_config(config)
bitbake('core-image-minimal')
self.remove_config(config)
cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "systemd-bootdisk-*direct")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "systemd-bootdisk-*direct"))))
+
+ def test_efi_bootpart(self):
+ """Test creation of efi-bootpart image"""
+ cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal')
+ self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype)
+ runCmd(cmd)
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
+ result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
+ self.assertIn("kernel",result.output)
- @OETestID(1561)
def test_sdimage_bootpart(self):
"""Test creation of sdimage-bootpart image"""
cmd = "wic create sdimage-bootpart -e core-image-minimal -o %s" % self.resultdir
kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal')
self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype)
- self.assertEqual(0, runCmd(cmd).status)
- self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
- @OETestID(1562)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ # TODO this doesn't have to be x86-specific
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_default_output_dir(self):
"""Test default output location"""
for fname in glob("directdisk-*.direct"):
os.remove(fname)
- config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n'
+ config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
self.append_config(config)
bitbake('core-image-minimal')
self.remove_config(config)
cmd = "wic create directdisk -e core-image-minimal"
- self.assertEqual(0, runCmd(cmd).status)
+ runCmd(cmd)
self.assertEqual(1, len(glob("directdisk-*.direct")))
- @OETestID(1212)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_build_artifacts(self):
"""Test wic create directdisk providing all artifacts."""
bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -271,40 +319,35 @@ class Wic(OESelftestTestCase):
'core-image-minimal'))
bbvars = {key.lower(): value for key, value in bb_vars.items()}
bbvars['resultdir'] = self.resultdir
- status = runCmd("wic create directdisk "
+ runCmd("wic create directdisk "
"-b %(staging_datadir)s "
"-k %(deploy_dir_image)s "
"-n %(recipe_sysroot_native)s "
"-r %(image_rootfs)s "
- "-o %(resultdir)s" % bbvars).status
- self.assertEqual(0, status)
- self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
+ "-o %(resultdir)s" % bbvars)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
- @OETestID(1264)
def test_compress_gzip(self):
"""Test compressing an image with gzip"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name core-image-minimal "
- "-c gzip -o %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.gz")))
+ "-c gzip -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.gz"))))
- @OETestID(1265)
def test_compress_bzip2(self):
"""Test compressing an image with bzip2"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-c bzip2 -o %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.bz2")))
+ "-c bzip2 -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.bz2"))))
- @OETestID(1266)
def test_compress_xz(self):
"""Test compressing an image with xz"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "--compress-with=xz -o %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.xz")))
+ "--compress-with=xz -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.xz"))))
- @OETestID(1267)
def test_wrong_compressor(self):
"""Test how wic breaks if wrong compressor is provided"""
self.assertEqual(2, runCmd("wic create wictestdisk "
@@ -312,70 +355,65 @@ class Wic(OESelftestTestCase):
"-c wrong -o %s" % self.resultdir,
ignore_status=True).status)
- @OETestID(1558)
def test_debug_short(self):
"""Test -D option"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-D -o %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+ "-D -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
- @OETestID(1658)
def test_debug_long(self):
"""Test --debug option"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "--debug -o %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+ "--debug -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
- @OETestID(1563)
def test_skip_build_check_short(self):
"""Test -s option"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-s -o %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+ "-s -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
- @OETestID(1671)
def test_skip_build_check_long(self):
"""Test --skip-build-check option"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
"--skip-build-check "
- "--outdir %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+ "--outdir %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
- @OETestID(1564)
def test_build_rootfs_short(self):
"""Test -f option"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-f -o %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+ "-f -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
- @OETestID(1656)
def test_build_rootfs_long(self):
"""Test --build-rootfs option"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
"--build-rootfs "
- "--outdir %s" % self.resultdir).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+ "--outdir %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
- @OETestID(1268)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ # TODO this doesn't have to be x86-specific
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_rootfs_indirect_recipes(self):
"""Test usage of rootfs plugin with rootfs recipes"""
- status = runCmd("wic create directdisk-multi-rootfs "
+ runCmd("wic create directdisk-multi-rootfs "
"--image-name=core-image-minimal "
"--rootfs rootfs1=core-image-minimal "
"--rootfs rootfs2=core-image-minimal "
- "--outdir %s" % self.resultdir).status
- self.assertEqual(0, status)
- self.assertEqual(1, len(glob(self.resultdir + "directdisk-multi-rootfs*.direct")))
+ "--outdir %s" % self.resultdir)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-multi-rootfs*.direct"))))
- @OETestID(1269)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ # TODO this doesn't have to be x86-specific
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_rootfs_artifacts(self):
"""Test usage of rootfs plugin with rootfs paths"""
bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -385,17 +423,15 @@ class Wic(OESelftestTestCase):
bbvars = {key.lower(): value for key, value in bb_vars.items()}
bbvars['wks'] = "directdisk-multi-rootfs"
bbvars['resultdir'] = self.resultdir
- status = runCmd("wic create %(wks)s "
+ runCmd("wic create %(wks)s "
"--bootimg-dir=%(staging_datadir)s "
"--kernel-dir=%(deploy_dir_image)s "
"--native-sysroot=%(recipe_sysroot_native)s "
"--rootfs-dir rootfs1=%(image_rootfs)s "
"--rootfs-dir rootfs2=%(image_rootfs)s "
- "--outdir %(resultdir)s" % bbvars).status
- self.assertEqual(0, status)
- self.assertEqual(1, len(glob(self.resultdir + "%(wks)s-*.direct" % bbvars)))
+ "--outdir %(resultdir)s" % bbvars)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "%(wks)s-*.direct" % bbvars))))
- @OETestID(1661)
def test_exclude_path(self):
"""Test --exclude-path wks option."""
@@ -411,18 +447,17 @@ part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr
part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr
part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr"""
% (rootfs_dir, rootfs_dir))
- self.assertEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
- % (wks_file, self.resultdir)).status)
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
os.remove(wks_file)
- wicout = glob(self.resultdir + "%s-*direct" % 'temp')
+ wicout = glob(os.path.join(self.resultdir, "%s-*direct" % 'temp'))
self.assertEqual(1, len(wicout))
wicimg = wicout[0]
# verify partition size with wic
res = runCmd("parted -m %s unit b p 2>/dev/null" % wicimg)
- self.assertEqual(0, res.status)
# parse parted output which looks like this:
# BYT;\n
@@ -438,32 +473,13 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
self.assertEqual(7, len(partln))
start = int(partln[1].rstrip("B")) / 512
length = int(partln[3].rstrip("B")) / 512
- self.assertEqual(0, runCmd("dd if=%s of=%s skip=%d count=%d" %
- (wicimg, part_file, start, length)).status)
-
- def extract_files(debugfs_output):
- """
- extract file names from the output of debugfs -R 'ls -p',
- which looks like this:
-
- /2/040755/0/0/.//\n
- /2/040755/0/0/..//\n
- /11/040700/0/0/lost+found^M//\n
- /12/040755/1002/1002/run//\n
- /13/040755/1002/1002/sys//\n
- /14/040755/1002/1002/bin//\n
- /80/040755/1002/1002/var//\n
- /92/040755/1002/1002/tmp//\n
- """
- # NOTE the occasional ^M in file names
- return [line.split('/')[5].strip() for line in \
- debugfs_output.strip().split('/\n')]
+ runCmd("dd if=%s of=%s skip=%d count=%d" %
+ (wicimg, part_file, start, length))
# Test partition 1, should contain the normal root directories, except
# /usr.
res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \
os.path.join(self.resultdir, "selftest_img.part1"))
- self.assertEqual(0, res.status)
files = extract_files(res.output)
self.assertIn("etc", files)
self.assertNotIn("usr", files)
@@ -472,7 +488,6 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
# directories.
res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \
os.path.join(self.resultdir, "selftest_img.part2"))
- self.assertEqual(0, res.status)
files = extract_files(res.output)
self.assertNotIn("etc", files)
self.assertNotIn("usr", files)
@@ -482,7 +497,6 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
# directory, but not the files inside it.
res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \
os.path.join(self.resultdir, "selftest_img.part3"))
- self.assertEqual(0, res.status)
files = extract_files(res.output)
self.assertNotIn("etc", files)
self.assertNotIn("usr", files)
@@ -490,7 +504,6 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
self.assertIn("bin", files)
res = runCmd("debugfs -R 'ls -p bin' %s 2>/dev/null" % \
os.path.join(self.resultdir, "selftest_img.part3"))
- self.assertEqual(0, res.status)
files = extract_files(res.output)
self.assertIn(".", files)
self.assertIn("..", files)
@@ -503,7 +516,104 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
finally:
os.environ['PATH'] = oldpath
- @OETestID(1662)
+ def test_include_path(self):
+ """Test --include-path wks option."""
+
+ oldpath = os.environ['PATH']
+ os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
+
+ try:
+ include_path = os.path.join(self.resultdir, 'test-include')
+ os.makedirs(include_path)
+ with open(os.path.join(include_path, 'test-file'), 'w') as t:
+ t.write("test\n")
+ wks_file = os.path.join(include_path, 'temp.wks')
+ with open(wks_file, 'w') as wks:
+ rootfs_dir = get_bb_var('IMAGE_ROOTFS', 'core-image-minimal')
+ wks.write("""
+part /part1 --source rootfs --ondisk mmcblk0 --fstype=ext4
+part /part2 --source rootfs --ondisk mmcblk0 --fstype=ext4 --include-path %s"""
+ % (include_path))
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+
+ part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0]
+ part2 = glob(os.path.join(self.resultdir, 'temp-*.direct.p2'))[0]
+
+ # Test partition 1, should not contain 'test-file'
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1))
+ files = extract_files(res.output)
+ self.assertNotIn('test-file', files)
+ self.assertEqual(True, files_own_by_root(res.output))
+
+ # Test partition 2, should contain 'test-file'
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part2))
+ files = extract_files(res.output)
+ self.assertIn('test-file', files)
+ self.assertEqual(True, files_own_by_root(res.output))
+
+ finally:
+ os.environ['PATH'] = oldpath
+
+ def test_include_path_embeded(self):
+ """Test --include-path wks option."""
+
+ oldpath = os.environ['PATH']
+ os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
+
+ try:
+ include_path = os.path.join(self.resultdir, 'test-include')
+ os.makedirs(include_path)
+ with open(os.path.join(include_path, 'test-file'), 'w') as t:
+ t.write("test\n")
+ wks_file = os.path.join(include_path, 'temp.wks')
+ with open(wks_file, 'w') as wks:
+ wks.write("""
+part / --source rootfs --fstype=ext4 --include-path %s --include-path core-image-minimal-mtdutils export/"""
+ % (include_path))
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+
+ part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0]
+
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1))
+ files = extract_files(res.output)
+ self.assertIn('test-file', files)
+ self.assertEqual(True, files_own_by_root(res.output))
+
+ res = runCmd("debugfs -R 'ls -p /export/etc/' %s 2>/dev/null" % (part1))
+ files = extract_files(res.output)
+ self.assertIn('passwd', files)
+ self.assertEqual(True, files_own_by_root(res.output))
+
+ finally:
+ os.environ['PATH'] = oldpath
+
+ def test_include_path_errors(self):
+ """Test --include-path wks option error handling."""
+ wks_file = 'temp.wks'
+
+ # Absolute argument.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --fstype=ext4 --include-path core-image-minimal-mtdutils /export")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+ # Argument pointing to parent directory.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --fstype=ext4 --include-path core-image-minimal-mtdutils ././..")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+ # 3 Argument pointing to parent directory.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --fstype=ext4 --include-path core-image-minimal-mtdutils export/ dummy")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
def test_exclude_path_errors(self):
"""Test --exclude-path wks option error handling."""
wks_file = 'temp.wks'
@@ -522,35 +632,230 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
% (wks_file, self.resultdir), ignore_status=True).status)
os.remove(wks_file)
- @OETestID(1496)
+ def test_permissions(self):
+ """Test permissions are respected"""
+
+ # prepare wicenv and rootfs
+ bitbake('core-image-minimal core-image-minimal-mtdutils -c do_rootfs_wicenv')
+
+ oldpath = os.environ['PATH']
+ os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
+
+ t_normal = """
+part / --source rootfs --fstype=ext4
+"""
+ t_exclude = """
+part / --source rootfs --fstype=ext4 --exclude-path=home
+"""
+ t_multi = """
+part / --source rootfs --ondisk sda --fstype=ext4
+part /export --source rootfs --rootfs=core-image-minimal-mtdutils --fstype=ext4
+"""
+ t_change = """
+part / --source rootfs --ondisk sda --fstype=ext4 --exclude-path=etc/   
+part /etc --source rootfs --fstype=ext4 --change-directory=etc
+"""
+ tests = [t_normal, t_exclude, t_multi, t_change]
+
+ try:
+ for test in tests:
+ include_path = os.path.join(self.resultdir, 'test-include')
+ os.makedirs(include_path)
+ wks_file = os.path.join(include_path, 'temp.wks')
+ with open(wks_file, 'w') as wks:
+ wks.write(test)
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+
+ for part in glob(os.path.join(self.resultdir, 'temp-*.direct.p*')):
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part))
+ self.assertEqual(True, files_own_by_root(res.output))
+
+ config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "%s"\n' % wks_file
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ tmpdir = os.path.join(get_bb_var('WORKDIR', 'core-image-minimal'),'build-wic')
+
+ # check each partition for permission
+ for part in glob(os.path.join(tmpdir, 'temp-*.direct.p*')):
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part))
+ self.assertTrue(files_own_by_root(res.output)
+ ,msg='Files permission incorrect using wks set "%s"' % test)
+
+ # clean config and result directory for next cases
+ self.remove_config(config)
+ rmtree(self.resultdir, ignore_errors=True)
+
+ finally:
+ os.environ['PATH'] = oldpath
+
+ def test_change_directory(self):
+ """Test --change-directory wks option."""
+
+ oldpath = os.environ['PATH']
+ os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
+
+ try:
+ include_path = os.path.join(self.resultdir, 'test-include')
+ os.makedirs(include_path)
+ wks_file = os.path.join(include_path, 'temp.wks')
+ with open(wks_file, 'w') as wks:
+ wks.write("part /etc --source rootfs --fstype=ext4 --change-directory=etc")
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+
+ part1 = glob(os.path.join(self.resultdir, 'temp-*.direct.p1'))[0]
+
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % (part1))
+ files = extract_files(res.output)
+ self.assertIn('passwd', files)
+
+ finally:
+ os.environ['PATH'] = oldpath
+
+ def test_change_directory_errors(self):
+ """Test --change-directory wks option error handling."""
+ wks_file = 'temp.wks'
+
+ # Absolute argument.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --fstype=ext4 --change-directory /usr")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+ # Argument pointing to parent directory.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --fstype=ext4 --change-directory ././..")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+ def test_no_fstab_update(self):
+ """Test --no-fstab-update wks option."""
+
+ oldpath = os.environ['PATH']
+ os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
+
+ # Get stock fstab from base-files recipe
+ bitbake('base-files -c do_install')
+ bf_fstab = os.path.join(get_bb_var('D', 'base-files'), 'etc', 'fstab')
+ self.assertEqual(True, os.path.exists(bf_fstab))
+ bf_fstab_md5sum = runCmd('md5sum %s 2>/dev/null' % bf_fstab).output.split(" ")[0]
+
+ try:
+ no_fstab_update_path = os.path.join(self.resultdir, 'test-no-fstab-update')
+ os.makedirs(no_fstab_update_path)
+ wks_file = os.path.join(no_fstab_update_path, 'temp.wks')
+ with open(wks_file, 'w') as wks:
+ wks.writelines(['part / --source rootfs --fstype=ext4 --label rootfs\n',
+ 'part /mnt/p2 --source rootfs --rootfs-dir=core-image-minimal ',
+ '--fstype=ext4 --label p2 --no-fstab-update\n'])
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+
+ part_fstab_md5sum = []
+ for i in range(1, 3):
+ part = glob(os.path.join(self.resultdir, 'temp-*.direct.p') + str(i))[0]
+ part_fstab = runCmd("debugfs -R 'cat etc/fstab' %s 2>/dev/null" % (part))
+ part_fstab_md5sum.append(hashlib.md5((part_fstab.output + "\n\n").encode('utf-8')).hexdigest())
+
+ # '/etc/fstab' in partition 2 should contain the same stock fstab file
+ # as the one installed by the base-file recipe.
+ self.assertEqual(bf_fstab_md5sum, part_fstab_md5sum[1])
+
+ # '/etc/fstab' in partition 1 should contain an updated fstab file.
+ self.assertNotEqual(bf_fstab_md5sum, part_fstab_md5sum[0])
+
+ finally:
+ os.environ['PATH'] = oldpath
+
+ def test_no_fstab_update_errors(self):
+ """Test --no-fstab-update wks option error handling."""
+ wks_file = 'temp.wks'
+
+ # Absolute argument.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update /etc")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+ # Argument pointing to parent directory.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update ././..")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+ def test_extra_space(self):
+ """Test --extra-space wks option."""
+ extraspace = 1024**3
+ runCmd("wic create wictestdisk "
+ "--image-name core-image-minimal "
+ "--extra-space %i -o %s" % (extraspace ,self.resultdir))
+ wicout = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
+ self.assertEqual(1, len(wicout))
+ size = os.path.getsize(wicout[0])
+ self.assertTrue(size > extraspace, msg="Extra space not present (%s vs %s)" % (size, extraspace))
+
+ def test_no_table(self):
+ """Test --no-table wks option."""
+ wks_file = 'temp.wks'
+
+ # Absolute argument.
+ with open(wks_file, 'w') as wks:
+ wks.write("part testspace --no-table --fixed-size 16k --offset 4080k")
+ runCmd("wic create %s --image-name core-image-minimal -o %s" % (wks_file, self.resultdir))
+
+ wicout = glob(os.path.join(self.resultdir, "*.*"))
+
+ self.assertEqual(1, len(wicout))
+ size = os.path.getsize(wicout[0])
+ self.assertEqual(size, 4 * 1024 * 1024)
+
+ os.remove(wks_file)
+
+ def test_partition_hidden_attributes(self):
+ """Test --hidden wks option."""
+ wks_file = 'temp.wks'
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ try:
+ with open(wks_file, 'w') as wks:
+ wks.write("""
+part / --source rootfs --fstype=ext4
+part / --source rootfs --fstype=ext4 --hidden
+bootloader --ptable gpt""")
+
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+ wicout = os.path.join(self.resultdir, "*.direct")
+
+ result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 1" % (sysroot, wicout))
+ self.assertEqual('', result.output)
+ result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 2" % (sysroot, wicout))
+ self.assertEqual('RequiredPartition', result.output)
+
+ finally:
+ os.remove(wks_file)
+
+
+class Wic2(WicTestCase):
+
def test_bmap_short(self):
"""Test generation of .bmap file -m option"""
cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir
- status = runCmd(cmd).status
- self.assertEqual(0, status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
- @OETestID(1655)
def test_bmap_long(self):
"""Test generation of .bmap file --bmap option"""
cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir
- status = runCmd(cmd).status
- self.assertEqual(0, status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap")))
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
- def _get_image_env_path(self, image):
- """Generate and obtain the path to <image>.env"""
- if image not in self.wicenv_cache:
- self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status)
- bb_vars = get_bb_vars(['STAGING_DIR', 'MACHINE'], image)
- stdir = bb_vars['STAGING_DIR']
- machine = bb_vars['MACHINE']
- self.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata')
- return self.wicenv_cache[image]
-
- @OETestID(1347)
def test_image_env(self):
"""Test generation of <image>.env files."""
image = 'core-image-minimal'
@@ -560,96 +865,102 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
basename = bb_vars['IMAGE_BASENAME']
self.assertEqual(basename, image)
path = os.path.join(imgdatadir, basename) + '.env'
- self.assertTrue(os.path.isfile(path))
+ self.assertTrue(os.path.isfile(path), msg="File %s wasn't generated as expected" % path)
wicvars = set(bb_vars['WICVARS'].split())
# filter out optional variables
wicvars = wicvars.difference(('DEPLOY_DIR_IMAGE', 'IMAGE_BOOT_FILES',
- 'INITRD', 'INITRD_LIVE', 'ISODIR'))
+ 'INITRD', 'INITRD_LIVE', 'ISODIR','INITRAMFS_IMAGE',
+ 'INITRAMFS_IMAGE_BUNDLE', 'INITRAMFS_LINK_NAME',
+ 'APPEND', 'IMAGE_EFI_BOOT_FILES'))
with open(path) as envfile:
content = dict(line.split("=", 1) for line in envfile)
# test if variables used by wic present in the .env file
for var in wicvars:
self.assertTrue(var in content, "%s is not in .env file" % var)
- self.assertTrue(content[var])
+ self.assertTrue(content[var], "%s doesn't have a value (%s)" % (var, content[var]))
- @OETestID(1559)
def test_image_vars_dir_short(self):
"""Test image vars directory selection -v option"""
image = 'core-image-minimal'
imgenvdir = self._get_image_env_path(image)
native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=%s -v %s -n %s -o %s"
% (image, imgenvdir, native_sysroot,
- self.resultdir)).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
+ self.resultdir))
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
- @OETestID(1665)
def test_image_vars_dir_long(self):
"""Test image vars directory selection --vars option"""
image = 'core-image-minimal'
imgenvdir = self._get_image_env_path(image)
native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=%s "
"--vars %s "
"--native-sysroot %s "
"--outdir %s"
% (image, imgenvdir, native_sysroot,
- self.resultdir)).status)
- self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
+ self.resultdir))
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
- @OETestID(1351)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ # TODO this test could also work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_wic_image_type(self):
"""Test building wic images by bitbake"""
config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
- 'MACHINE_FEATURES_append = " efi"\n'
+ 'MACHINE_FEATURES:append = " efi"\n'
self.append_config(config)
- self.assertEqual(0, bitbake('wic-image-minimal').status)
+ image = 'wic-image-minimal'
+ bitbake(image)
self.remove_config(config)
- bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'])
- deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
- machine = bb_vars['MACHINE']
- prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME'])
+
# check if we have result image and manifests symlinks
# pointing to existing files
for suffix in ('wic', 'manifest'):
path = prefix + suffix
- self.assertTrue(os.path.islink(path))
- self.assertTrue(os.path.isfile(os.path.realpath(path)))
+ self.assertTrue(os.path.islink(path), msg="Link %s wasn't generated as expected" % path)
+ self.assertTrue(os.path.isfile(os.path.realpath(path)), msg="File linked to by %s wasn't generated as expected" % path)
- @OETestID(1422)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ # TODO this should work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
+ @OETestTag("runqemu")
def test_qemu(self):
"""Test wic-image-minimal under qemu"""
config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
- 'MACHINE_FEATURES_append = " efi"\n'
+ 'MACHINE_FEATURES:append = " efi"\n'
self.append_config(config)
- self.assertEqual(0, bitbake('wic-image-minimal').status)
+ bitbake('wic-image-minimal')
self.remove_config(config)
- with runqemu('wic-image-minimal', ssh=False) as qemu:
- cmd = "mount |grep '^/dev/' | cut -f1,3 -d ' '"
+ with runqemu('wic-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
+ cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \
+ "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'"
status, output = qemu.run_serial(cmd)
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
- self.assertEqual(output, '/dev/root /\r\n/dev/sda1 /boot\r\n/dev/sda3 /mnt')
+ self.assertEqual(output, '4')
+ cmd = "grep UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba /etc/fstab"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0')
- @only_for_arch(['i586', 'i686', 'x86_64'])
- @OETestID(1852)
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
+ @OETestTag("runqemu")
def test_qemu_efi(self):
"""Test core-image-minimal efi image under qemu"""
config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n'
self.append_config(config)
- self.assertEqual(0, bitbake('core-image-minimal ovmf').status)
+ bitbake('core-image-minimal ovmf')
self.remove_config(config)
with runqemu('core-image-minimal', ssh=False,
- runqemuparams='ovmf', image_fstype='wic') as qemu:
+ runqemuparams='nographic ovmf', image_fstype='wic') as qemu:
cmd = "grep sda. /proc/partitions |wc -l"
status, output = qemu.run_serial(cmd)
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
@@ -666,91 +977,344 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
tempf.write("part " \
"--source rootfs --ondisk hda --align 4 --fixed-size %d "
"--fstype=ext4\n" % size)
- wksname = os.path.splitext(os.path.basename(wkspath))[0]
- return wkspath, wksname
+ return wkspath
- @OETestID(1847)
- def test_fixed_size(self):
- """
- Test creation of a simple image with partition size controlled through
- --fixed-size flag
- """
- wkspath, wksname = Wic._make_fixed_size_wks(200)
+ def _get_wic_partitions(self, wkspath, native_sysroot=None, ignore_status=False):
+ p = runCmd("wic create %s -e core-image-minimal -o %s" % (wkspath, self.resultdir),
+ ignore_status=ignore_status)
- self.assertEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
- % (wkspath, self.resultdir)).status)
- os.remove(wkspath)
- wicout = glob(self.resultdir + "%s-*direct" % wksname)
- self.assertEqual(1, len(wicout))
+ if p.status:
+ return (p, None)
+
+ wksname = os.path.splitext(os.path.basename(wkspath))[0]
+
+ wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
+
+ if not wicout:
+ return (p, None)
wicimg = wicout[0]
+ if not native_sysroot:
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
+
# verify partition size with wic
- res = runCmd("parted -m %s unit mib p 2>/dev/null" % wicimg,
- ignore_status=True,
- native_sysroot=self.native_sysroot)
- self.assertEqual(0, res.status)
+ res = runCmd("parted -m %s unit kib p 2>/dev/null" % wicimg,
+ native_sysroot=native_sysroot)
# parse parted output which looks like this:
# BYT;\n
# /var/tmp/wic/build/tmpfwvjjkf_-201611101222-hda.direct:200MiB:file:512:512:msdos::;\n
# 1:0.00MiB:200MiB:200MiB:ext4::;\n
- partlns = res.output.splitlines()[2:]
+ return (p, res.output.splitlines()[2:])
- self.assertEqual(1, len(partlns))
- self.assertEqual("1:0.00MiB:200MiB:200MiB:ext4::;", partlns[0])
+ def test_fixed_size(self):
+ """
+ Test creation of a simple image with partition size controlled through
+ --fixed-size flag
+ """
+ wkspath = Wic2._make_fixed_size_wks(200)
+ _, partlns = self._get_wic_partitions(wkspath)
+ os.remove(wkspath)
+
+ self.assertEqual(partlns, [
+ "1:4.00kiB:204804kiB:204800kiB:ext4::;",
+ ])
- @OETestID(1848)
def test_fixed_size_error(self):
"""
Test creation of a simple image with partition size controlled through
--fixed-size flag. The size of partition is intentionally set to 1MiB
in order to trigger an error in wic.
"""
- wkspath, wksname = Wic._make_fixed_size_wks(1)
-
- self.assertEqual(1, runCmd("wic create %s -e core-image-minimal -o %s" \
- % (wkspath, self.resultdir), ignore_status=True).status)
+ wkspath = Wic2._make_fixed_size_wks(1)
+ p, _ = self._get_wic_partitions(wkspath, ignore_status=True)
os.remove(wkspath)
- wicout = glob(self.resultdir + "%s-*direct" % wksname)
- self.assertEqual(0, len(wicout))
- @only_for_arch(['i586', 'i686', 'x86_64'])
- @OETestID(1854)
+ self.assertNotEqual(p.status, 0, "wic exited successfully when an error was expected:\n%s" % p.output)
+
+ def test_offset(self):
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ # Test that partitions are placed at the correct offsets, default KB
+ tempf.write("bootloader --ptable gpt\n" \
+ "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \
+ "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n")
+ tempf.flush()
+
+ _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
+ self.assertEqual(partlns, [
+ "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;",
+ "2:102432kiB:204832kiB:102400kiB:ext4:primary:;",
+ ])
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ # Test that partitions are placed at the correct offsets, same with explicit KB
+ tempf.write("bootloader --ptable gpt\n" \
+ "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \
+ "part /bar --ondisk hda --offset 102432K --fixed-size 100M --fstype=ext4\n")
+ tempf.flush()
+
+ _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
+ self.assertEqual(partlns, [
+ "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;",
+ "2:102432kiB:204832kiB:102400kiB:ext4:primary:;",
+ ])
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ # Test that partitions are placed at the correct offsets using MB
+ tempf.write("bootloader --ptable gpt\n" \
+ "part / --source rootfs --ondisk hda --offset 32K --fixed-size 100M --fstype=ext4\n" \
+ "part /bar --ondisk hda --offset 101M --fixed-size 100M --fstype=ext4\n")
+ tempf.flush()
+
+ _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
+ self.assertEqual(partlns, [
+ "1:32.0kiB:102432kiB:102400kiB:ext4:primary:;",
+ "2:103424kiB:205824kiB:102400kiB:ext4:primary:;",
+ ])
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ # Test that partitions can be placed on a 512 byte sector boundary
+ tempf.write("bootloader --ptable gpt\n" \
+ "part / --source rootfs --ondisk hda --offset 65s --fixed-size 99M --fstype=ext4\n" \
+ "part /bar --ondisk hda --offset 102432 --fixed-size 100M --fstype=ext4\n")
+ tempf.flush()
+
+ _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
+ self.assertEqual(partlns, [
+ "1:32.5kiB:101408kiB:101376kiB:ext4:primary:;",
+ "2:102432kiB:204832kiB:102400kiB:ext4:primary:;",
+ ])
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ # Test that a partition can be placed immediately after a MSDOS partition table
+ tempf.write("bootloader --ptable msdos\n" \
+ "part / --source rootfs --ondisk hda --offset 1s --fixed-size 100M --fstype=ext4\n")
+ tempf.flush()
+
+ _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
+ self.assertEqual(partlns, [
+ "1:0.50kiB:102400kiB:102400kiB:ext4::;",
+ ])
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ # Test that image creation fails if the partitions would overlap
+ tempf.write("bootloader --ptable gpt\n" \
+ "part / --source rootfs --ondisk hda --offset 32 --fixed-size 100M --fstype=ext4\n" \
+ "part /bar --ondisk hda --offset 102431 --fixed-size 100M --fstype=ext4\n")
+ tempf.flush()
+
+ p, _ = self._get_wic_partitions(tempf.name, ignore_status=True)
+ self.assertNotEqual(p.status, 0, "wic exited successfully when an error was expected:\n%s" % p.output)
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ # Test that partitions are not allowed to overlap with the booloader
+ tempf.write("bootloader --ptable gpt\n" \
+ "part / --source rootfs --ondisk hda --offset 8 --fixed-size 100M --fstype=ext4\n")
+ tempf.flush()
+
+ p, _ = self._get_wic_partitions(tempf.name, ignore_status=True)
+ self.assertNotEqual(p.status, 0, "wic exited successfully when an error was expected:\n%s" % p.output)
+
+ def test_extra_space(self):
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
+
+ with NamedTemporaryFile("w", suffix=".wks") as tempf:
+ tempf.write("bootloader --ptable gpt\n" \
+ "part / --source rootfs --ondisk hda --extra-space 200M --fstype=ext4\n")
+ tempf.flush()
+
+ _, partlns = self._get_wic_partitions(tempf.name, native_sysroot)
+ self.assertEqual(len(partlns), 1)
+ size = partlns[0].split(':')[3]
+ self.assertRegex(size, r'^[0-9]+kiB$')
+ size = int(size[:-3])
+ self.assertGreaterEqual(size, 204800)
+
+ # TODO this test could also work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
+ @OETestTag("runqemu")
def test_rawcopy_plugin_qemu(self):
"""Test rawcopy plugin in qemu"""
- # build ext4 and wic images
- for fstype in ("ext4", "wic"):
- config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype
- self.append_config(config)
- self.assertEqual(0, bitbake('core-image-minimal').status)
- self.remove_config(config)
-
- with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu:
+ # build ext4 and then use it for a wic image
+ config = 'IMAGE_FSTYPES = "ext4"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ image_link_name = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal')
+ self.remove_config(config)
+
+ config = 'IMAGE_FSTYPES = "wic"\n' \
+ 'IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL = "%s"\n'\
+ 'WKS_FILE = "test_rawcopy_plugin.wks.in"\n'\
+ % image_link_name
+ self.append_config(config)
+ bitbake('core-image-minimal-mtdutils')
+ self.remove_config(config)
+
+ with runqemu('core-image-minimal-mtdutils', ssh=False,
+ runqemuparams='nographic', image_fstype='wic') as qemu:
cmd = "grep sda. /proc/partitions |wc -l"
status, output = qemu.run_serial(cmd)
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
self.assertEqual(output, '2')
- @OETestID(1853)
- def test_rawcopy_plugin(self):
+ def _rawcopy_plugin(self, fstype):
"""Test rawcopy plugin"""
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ params = ',unpack' if fstype.endswith('.gz') else ''
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.write('part / --source rawcopy --sourceparams="file=%s.%s%s"\n'\
+ % (bb_vars['IMAGE_LINK_NAME'], fstype, params))
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, image, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
+ self.assertEqual(1, len(out))
+
+ def test_rawcopy_plugin(self):
+ self._rawcopy_plugin('ext4')
+
+ def test_rawcopy_plugin_unpack(self):
+ fstype = 'ext4.gz'
+ config = 'IMAGE_FSTYPES = "%s"\n' % fstype
+ self.append_config(config)
+ self.assertEqual(0, bitbake('core-image-minimal').status)
+ self.remove_config(config)
+ self._rawcopy_plugin(fstype)
+
+ def test_empty_plugin(self):
+ """Test empty plugin"""
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n'
+ self.append_config(config)
+ image = 'core-image-minimal'
+ bitbake(image)
+ self.remove_config(config)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+ self.assertTrue(os.path.exists(image_path), msg="Image file %s wasn't generated as expected" % image_path)
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # Fstype column from 'wic ls' should be empty for the second partition
+ # as listed in test_empty_plugin.wks
+ result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot))
+ self.assertEqual('1', result.output)
+
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
+ @OETestTag("runqemu")
+ def test_biosplusefi_plugin_qemu(self):
+ """Test biosplusefi plugin in qemu"""
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+
+ with runqemu('core-image-minimal', ssh=False,
+ runqemuparams='nographic', image_fstype='wic') as qemu:
+ # Check that we have ONLY two /dev/sda* partitions (/boot and /)
+ cmd = "grep sda. /proc/partitions | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '2')
+ # Check that /dev/sda1 is /boot and that either /dev/root OR /dev/sda2 is /
+ cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' -e '/dev/root /|/dev/sda2 /'"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '2')
+ # Check that /boot has EFI bootx64.efi (required for EFI)
+ cmd = "ls /boot/EFI/BOOT/bootx64.efi | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '1')
+ # Check that "BOOTABLE" flag is set on boot partition (required for PC-Bios)
+ # Trailing "cat" seems to be required; otherwise run_serial() sends back echo of the input command
+ cmd = "fdisk -l /dev/sda | grep /dev/sda1 | awk {print'$2'} | cat"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '*')
+
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
+ def test_biosplusefi_plugin(self):
+ """Test biosplusefi plugin"""
+ # Wic generation below may fail depending on the order of the unittests
+ # This is because bootimg-pcbios (that bootimg-biosplusefi uses) generate its MBR inside STAGING_DATADIR directory
+ # which may or may not exists depending on what was built already
+ # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir()
+ # will raise with "Couldn't find correct bootimg_dir"
+ # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+
+ img = 'core-image-minimal'
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part /boot --active --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\n',
+ 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
+ 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
+ def test_uefi_kernel(self):
+ """ Test uefi-kernel in wic """
+ config = 'IMAGE_EFI_BOOT_FILES="/etc/fstab;testfile"\nIMAGE_FSTYPES = "wic"\nWKS_FILE = "test_uefikernel.wks"\nMACHINE_FEATURES:append = " efi"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+
img = 'core-image-minimal'
- machine = get_bb_var('MACHINE', img)
with NamedTemporaryFile("w", suffix=".wks") as wks:
- wks.writelines(['part /boot --active --source bootimg-pcbios\n',
- 'part / --source rawcopy --sourceparams="file=%s-%s.ext4" --use-uuid\n'\
- % (img, machine),
+ wks.writelines(['part /boot --source bootimg-efi --sourceparams="loader=uefi-kernel"\n'
+ 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
wks.flush()
cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
- self.assertEqual(0, runCmd(cmd).status)
+ runCmd(cmd)
wksname = os.path.splitext(os.path.basename(wks.name))[0]
- out = glob(self.resultdir + "%s-*direct" % wksname)
+ out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
self.assertEqual(1, len(out))
- @OETestID(1849)
+ # TODO this test could also work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
+ @OETestTag("runqemu")
+ def test_efi_plugin_unified_kernel_image_qemu(self):
+ """Test efi plugin's Unified Kernel Image feature in qemu"""
+ config = 'IMAGE_FSTYPES = "wic"\n'\
+ 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
+ 'WKS_FILE = "test_efi_plugin.wks"\n'\
+ 'MACHINE_FEATURES:append = " efi"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal core-image-minimal-initramfs ovmf')
+ self.remove_config(config)
+
+ with runqemu('core-image-minimal', ssh=False,
+ runqemuparams='nographic ovmf', image_fstype='wic') as qemu:
+ # Check that /boot has EFI bootx64.efi (required for EFI)
+ cmd = "ls /boot/EFI/BOOT/bootx64.efi | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '1')
+ # Check that /boot has EFI/Linux/linux.efi (required for Unified Kernel Images auto detection)
+ cmd = "ls /boot/EFI/Linux/linux.efi | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '1')
+ # Check that /boot doesn't have loader/entries/boot.conf (Unified Kernel Images are auto detected by the bootloader)
+ cmd = "ls /boot/loader/entries/boot.conf 2&>/dev/null | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '0')
+
def test_fs_types(self):
"""Test filesystem types for empty and not empty partitions"""
img = 'core-image-minimal'
@@ -762,15 +1326,14 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
'part emptyvfat --fstype vfat --size 1M\n',
'part emptymsdos --fstype msdos --size 1M\n',
'part emptyext2 --fstype ext2 --size 1M\n',
- 'part emptybtrfs --fstype btrfs --size 100M\n'])
+ 'part emptybtrfs --fstype btrfs --size 150M\n'])
wks.flush()
cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
- self.assertEqual(0, runCmd(cmd).status)
+ runCmd(cmd)
wksname = os.path.splitext(os.path.basename(wks.name))[0]
- out = glob(self.resultdir + "%s-*direct" % wksname)
+ out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
self.assertEqual(1, len(out))
- @OETestID(1851)
def test_kickstart_parser(self):
"""Test wks parser options"""
with NamedTemporaryFile("w", suffix=".wks") as wks:
@@ -778,26 +1341,24 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
'--overhead-factor 1.2 --size 100k\n'])
wks.flush()
cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir)
- self.assertEqual(0, runCmd(cmd).status)
+ runCmd(cmd)
wksname = os.path.splitext(os.path.basename(wks.name))[0]
- out = glob(self.resultdir + "%s-*direct" % wksname)
+ out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
self.assertEqual(1, len(out))
- @OETestID(1850)
def test_image_bootpart_globbed(self):
"""Test globbed sources with image-bootpart plugin"""
img = "core-image-minimal"
cmd = "wic create sdimage-bootpart -e %s -o %s" % (img, self.resultdir)
config = 'IMAGE_BOOT_FILES = "%s*"' % get_bb_var('KERNEL_IMAGETYPE', img)
self.append_config(config)
- self.assertEqual(0, runCmd(cmd).status)
+ runCmd(cmd)
self.remove_config(config)
- self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct")))
+ self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
- @OETestID(1855)
def test_sparse_copy(self):
"""Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs"""
- libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'wic')
+ libpath = os.path.join(self.td['COREBASE'], 'scripts', 'lib', 'wic')
sys.path.insert(0, libpath)
from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp
with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse:
@@ -823,55 +1384,184 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
self.assertEqual(dest_stat.st_blocks, 8)
os.unlink(dest)
- @OETestID(1857)
+ def test_mkfs_extraopts(self):
+ """Test wks option --mkfs-extraopts for empty and not empty partitions"""
+ img = 'core-image-minimal'
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(
+ ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
+ "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
+ 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
+ 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
+ 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
+ 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
+ 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
+ self.assertEqual(1, len(out))
+
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
+ @OETestTag("runqemu")
+ def test_expand_mbr_image(self):
+ """Test wic write --expand command for mbr image"""
+ # build an image
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
+ self.append_config(config)
+ image = 'core-image-minimal'
+ bitbake(image)
+
+ # get path to the image
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+
+ self.remove_config(config)
+
+ try:
+ # expand image to 1G
+ new_image_path = None
+ with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
+ dir=bb_vars['DEPLOY_DIR_IMAGE'], delete=False) as sparse:
+ sparse.truncate(1024 ** 3)
+ new_image_path = sparse.name
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
+ runCmd(cmd)
+
+ # check if partitions are expanded
+ orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
+ exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
+ orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
+ exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
+ self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
+ self.assertTrue(orig_sizes[1] < exp_sizes[1], msg="Parition size wasn't enlarged (%s vs %s)" % (orig_sizes[1], exp_sizes[1]))
+
+ # Check if all free space is partitioned
+ result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
+ self.assertIn("0 B, 0 bytes, 0 sectors", result.output)
+
+ os.rename(image_path, image_path + '.bak')
+ os.rename(new_image_path, image_path)
+
+ # Check if it boots in qemu
+ with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
+ cmd = "ls /etc/"
+ status, output = qemu.run_serial('true')
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ finally:
+ if os.path.exists(new_image_path):
+ os.unlink(new_image_path)
+ if os.path.exists(image_path + '.bak'):
+ os.rename(image_path + '.bak', image_path)
+
+ def test_gpt_partition_name(self):
+ """Test --part-name argument to set partition name in GPT table"""
+ config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "test_gpt_partition_name.wks"\n'
+ self.append_config(config)
+ image = 'core-image-minimal'
+ bitbake(image)
+ self.remove_config(config)
+ deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # Image is created
+ self.assertTrue(os.path.exists(image_path), "image file %s doesn't exist" % image_path)
+
+ # Check the names of the three partitions
+ # as listed in test_gpt_partition_name.wks
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 1" % (sysroot, image_path))
+ self.assertEqual('boot-A', result.output)
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 2" % (sysroot, image_path))
+ self.assertEqual('root-A', result.output)
+ # When the --part-name is not defined, the partition name is equal to the --label
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 3" % (sysroot, image_path))
+ self.assertEqual('ext-space', result.output)
+
+ def test_empty_zeroize_plugin(self):
+ img = 'core-image-minimal'
+ expected_size = [ 1024*1024, # 1M
+ 512*1024, # 512K
+ 2*1024*1024] # 2M
+ # Check combination of sourceparams
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(
+ ['part empty --source empty --sourceparams="fill" --ondisk sda --fixed-size 1M\n',
+ 'part empty --source empty --sourceparams="size=512K" --ondisk sda --size 1M --align 1024\n',
+ 'part empty --source empty --sourceparams="size=2048k,bs=512K" --ondisk sda --size 4M --align 1024\n'
+ ])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
+ # Skip the complete image and just look at the single partitions
+ for idx, value in enumerate(wicout[1:]):
+ self.logger.info(wicout[idx])
+ # Check if partitions are actually zeroized
+ with open(wicout[idx], mode="rb") as fd:
+ ba = bytearray(fd.read())
+ for b in ba:
+ self.assertEqual(b, 0)
+ self.assertEqual(expected_size[idx], os.path.getsize(wicout[idx]))
+
+ # Check inconsistancy check between "fill" and "--size" parameter
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part empty --source empty --sourceparams="fill" --ondisk sda --size 1M\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ result = runCmd(cmd, ignore_status=True)
+ self.assertIn("Source parameter 'fill' only works with the '--fixed-size' option, exiting.", result.output)
+ self.assertNotEqual(0, result.status)
+
+class ModifyTests(WicTestCase):
def test_wic_ls(self):
"""Test listing image content using 'wic ls'"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-D -o %s" % self.resultdir).status)
- images = glob(self.resultdir + "wictestdisk-*.direct")
+ "-D -o %s" % self.resultdir)
+ images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
self.assertEqual(1, len(images))
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
# list partitions
result = runCmd("wic ls %s -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
self.assertEqual(3, len(result.output.split('\n')))
# list directory content of the first partition
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
self.assertEqual(6, len(result.output.split('\n')))
- @OETestID(1856)
def test_wic_cp(self):
"""Test copy files and directories to the the wic image."""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-D -o %s" % self.resultdir).status)
- images = glob(self.resultdir + "wictestdisk-*.direct")
+ "-D -o %s" % self.resultdir)
+ images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
self.assertEqual(1, len(images))
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
# list directory content of the first partition
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
self.assertEqual(6, len(result.output.split('\n')))
with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
testfile.write("test")
# copy file to the partition
- result = runCmd("wic cp %s %s:1/ -n %s" % (testfile.name, images[0], sysroot))
- self.assertEqual(0, result.status)
+ runCmd("wic cp %s %s:1/ -n %s" % (testfile.name, images[0], sysroot))
# check if file is there
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
self.assertEqual(7, len(result.output.split('\n')))
- self.assertTrue(os.path.basename(testfile.name) in result.output)
+ self.assertIn(os.path.basename(testfile.name), result.output)
# prepare directory
testdir = os.path.join(self.resultdir, 'wic-test-cp-dir')
@@ -880,184 +1570,123 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
copy(testfile.name, testdir)
# copy directory to the partition
- result = runCmd("wic cp %s %s:1/ -n %s" % (testdir, images[0], sysroot))
- self.assertEqual(0, result.status)
+ runCmd("wic cp %s %s:1/ -n %s" % (testdir, images[0], sysroot))
# check if directory is there
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
self.assertEqual(8, len(result.output.split('\n')))
- self.assertTrue(os.path.basename(testdir) in result.output)
+ self.assertIn(os.path.basename(testdir), result.output)
+
+ # copy the file from the partition and check if it success
+ dest = '%s-cp' % testfile.name
+ runCmd("wic cp %s:1/%s %s -n %s" % (images[0],
+ os.path.basename(testfile.name), dest, sysroot))
+ self.assertTrue(os.path.exists(dest), msg="File %s wasn't generated as expected" % dest)
+
- @OETestID(1858)
def test_wic_rm(self):
"""Test removing files and directories from the the wic image."""
- self.assertEqual(0, runCmd("wic create mkefidisk "
+ runCmd("wic create mkefidisk "
"--image-name=core-image-minimal "
- "-D -o %s" % self.resultdir).status)
- images = glob(self.resultdir + "mkefidisk-*.direct")
+ "-D -o %s" % self.resultdir)
+ images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
self.assertEqual(1, len(images))
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ # Not bulletproof but hopefully sufficient
+ kerneltype = get_bb_var('KERNEL_IMAGETYPE', 'virtual/kernel')
# list directory content of the first partition
result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
- self.assertIn('\nBZIMAGE ', result.output)
+ self.assertIn('\n%s ' % kerneltype.upper(), result.output)
self.assertIn('\nEFI <DIR> ', result.output)
- # remove file
- result = runCmd("wic rm %s:1/bzimage -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
+ # remove file. EFI partitions are case-insensitive so exercise that too
+ runCmd("wic rm %s:1/%s -n %s" % (images[0], kerneltype.lower(), sysroot))
# remove directory
- result = runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
+ runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot))
# check if they're removed
result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
- self.assertNotIn('\nBZIMAGE ', result.output)
+ self.assertNotIn('\n%s ' % kerneltype.upper(), result.output)
self.assertNotIn('\nEFI <DIR> ', result.output)
- @OETestID(1922)
- def test_mkfs_extraopts(self):
- """Test wks option --mkfs-extraopts for empty and not empty partitions"""
- img = 'core-image-minimal'
- with NamedTemporaryFile("w", suffix=".wks") as wks:
- wks.writelines(
- ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
- "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
- 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
- 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
- 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
- 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
- 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
- wks.flush()
- cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
- self.assertEqual(0, runCmd(cmd).status)
- wksname = os.path.splitext(os.path.basename(wks.name))[0]
- out = glob(self.resultdir + "%s-*direct" % wksname)
- self.assertEqual(1, len(out))
-
- def test_expand_mbr_image(self):
- """Test wic write --expand command for mbr image"""
- # build an image
- config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
- self.append_config(config)
- self.assertEqual(0, bitbake('core-image-minimal').status)
-
- # get path to the image
- bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'])
- deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
- machine = bb_vars['MACHINE']
- image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
-
- self.remove_config(config)
-
- try:
- # expand image to 1G
- new_image_path = None
- with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
- dir=deploy_dir, delete=False) as sparse:
- sparse.truncate(1024 ** 3)
- new_image_path = sparse.name
-
- sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
- cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
- self.assertEqual(0, runCmd(cmd).status)
-
- # check if partitions are expanded
- orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
- exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
- orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
- exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
- self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
- self.assertTrue(orig_sizes[1] < exp_sizes[1])
-
- # Check if all free space is partitioned
- result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
- self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output)
-
- os.rename(image_path, image_path + '.bak')
- os.rename(new_image_path, image_path)
-
- # Check if it boots in qemu
- with runqemu('core-image-minimal', ssh=False) as qemu:
- cmd = "ls /etc/"
- status, output = qemu.run_serial('true')
- self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
- finally:
- if os.path.exists(new_image_path):
- os.unlink(new_image_path)
- if os.path.exists(image_path + '.bak'):
- os.rename(image_path + '.bak', image_path)
-
def test_wic_ls_ext(self):
"""Test listing content of the ext partition using 'wic ls'"""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-D -o %s" % self.resultdir).status)
- images = glob(self.resultdir + "wictestdisk-*.direct")
+ "-D -o %s" % self.resultdir)
+ images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
self.assertEqual(1, len(images))
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
# list directory content of the second ext4 partition
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(
- set(line.split()[-1] for line in result.output.split('\n') if line)))
+ set(line.split()[-1] for line in result.output.split('\n') if line)), msg="Expected directories not present %s" % result.output)
def test_wic_cp_ext(self):
"""Test copy files and directories to the ext partition."""
- self.assertEqual(0, runCmd("wic create wictestdisk "
+ runCmd("wic create wictestdisk "
"--image-name=core-image-minimal "
- "-D -o %s" % self.resultdir).status)
- images = glob(self.resultdir + "wictestdisk-*.direct")
+ "-D -o %s" % self.resultdir)
+ images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
self.assertEqual(1, len(images))
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
# list directory content of the ext4 partition
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
dirs = set(line.split()[-1] for line in result.output.split('\n') if line)
- self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs))
+ self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs), msg="Expected directories not present %s" % dirs)
with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
testfile.write("test")
# copy file to the partition
- result = runCmd("wic cp %s %s:2/ -n %s" % (testfile.name, images[0], sysroot))
- self.assertEqual(0, result.status)
+ runCmd("wic cp %s %s:2/ -n %s" % (testfile.name, images[0], sysroot))
# check if file is there
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
newdirs = set(line.split()[-1] for line in result.output.split('\n') if line)
self.assertEqual(newdirs.difference(dirs), set([os.path.basename(testfile.name)]))
+ # check if the file to copy is in the partition
+ result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
+ self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
+
+ # copy file from the partition, replace the temporary file content with it and
+ # check for the file size to validate the copy
+ runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot))
+ self.assertTrue(os.stat(testfile.name).st_size > 0, msg="Filesize not as expected %s" % os.stat(testfile.name).st_size)
+
+
def test_wic_rm_ext(self):
"""Test removing files from the ext partition."""
- self.assertEqual(0, runCmd("wic create mkefidisk "
+ runCmd("wic create mkefidisk "
"--image-name=core-image-minimal "
- "-D -o %s" % self.resultdir).status)
- images = glob(self.resultdir + "mkefidisk-*.direct")
+ "-D -o %s" % self.resultdir)
+ images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
self.assertEqual(1, len(images))
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
# list directory content of the /etc directory on ext4 partition
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
- self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
# remove file
- result = runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
+ runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
# check if it's removed
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertEqual(0, result.status)
- self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertNotIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
+
+ # remove non-empty directory
+ runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot))
+
+ # check if it's removed
+ result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
+ self.assertNotIn('etc', [line.split()[-1] for line in result.output.split('\n') if line])
diff --git a/meta/lib/oeqa/selftest/cases/wrapper.py b/meta/lib/oeqa/selftest/cases/wrapper.py
new file mode 100644
index 0000000000..f2be44262c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/wrapper.py
@@ -0,0 +1,16 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class WrapperTests(OESelftestTestCase):
+ def test_shebang_wrapper(self):
+ """
+ Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective.
+ Expected: Exit status to be 0.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False)
diff --git a/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
new file mode 100644
index 0000000000..312edb6431
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
@@ -0,0 +1,39 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+import subprocess
+import shutil
+from oeqa.selftest.case import OESelftestTestCase
+from yocto_testresults_query import get_sha1, create_workdir
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+
+
+class TestResultsQueryTests(OESelftestTestCase):
+ def test_get_sha1(self):
+ test_data_get_sha1 = [
+ {"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"},
+ {"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"},
+ ]
+ for data in test_data_get_sha1:
+ test_name = data["input"]
+ with self.subTest(f"Test SHA1 from {test_name}"):
+ self.assertEqual(
+ get_sha1(basepath, data["input"]), data["expected"])
+
+ def test_create_workdir(self):
+ workdir = create_workdir()
+ try:
+ url = subprocess.check_output(
+ ["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8")
+ except:
+ shutil.rmtree(workdir, ignore_errors=True)
+ self.fail(f"Can not execute git commands in {workdir}")
+ shutil.rmtree(workdir)
+ self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults")