summaryrefslogtreecommitdiffstats
path: root/meta/lib/oeqa/selftest/cases
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oeqa/selftest/cases')
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py96
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py197
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py118
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py299
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py50
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py50
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py198
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py88
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py1779
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py89
-rw-r--r--meta/lib/oeqa/selftest/cases/eSDK.py120
-rw-r--r--meta/lib/oeqa/selftest/cases/efibootpartition.py46
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py51
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py152
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py89
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py71
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py58
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py264
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py135
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py97
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py102
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py38
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py164
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py51
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py72
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/__init__.py0
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py99
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py103
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py89
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py103
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py188
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py150
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py220
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py125
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py702
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py140
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py202
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py98
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py121
-rw-r--r--meta/lib/oeqa/selftest/cases/runqemu.py211
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py439
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py53
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py224
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py532
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py224
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py1051
50 files changed, 9762 insertions, 0 deletions
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
new file mode 100644
index 0000000000..f7c356ad09
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
@@ -0,0 +1,96 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer
+from oeqa.selftest.cases.sstate import SStateBase
+
+
+class RebuildFromSState(SStateBase):
+
+ @classmethod
+ def setUpClass(self):
+ super(RebuildFromSState, self).setUpClass()
+ self.builddir = os.path.join(os.environ.get('BUILDDIR'))
+
+ def get_dep_targets(self, primary_targets):
+ found_targets = []
+ bitbake("-g " + ' '.join(map(str, primary_targets)))
+ with open(os.path.join(self.builddir, 'pn-buildlist'), 'r') as pnfile:
+ found_targets = pnfile.read().splitlines()
+ return found_targets
+
+ def configure_builddir(self, builddir):
+ os.mkdir(builddir)
+ self.track_for_cleanup(builddir)
+ os.mkdir(os.path.join(builddir, 'conf'))
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(builddir, 'conf/local.conf'))
+ config = {}
+ config['default_sstate_dir'] = "SSTATE_DIR ?= \"${TOPDIR}/sstate-cache\""
+ config['null_sstate_mirrors'] = "SSTATE_MIRRORS = \"\""
+ config['default_tmp_dir'] = "TMPDIR = \"${TOPDIR}/tmp\""
+ for key in config:
+ ftools.append_file(os.path.join(builddir, 'conf/selftest.inc'), config[key])
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/bblayers.conf'), os.path.join(builddir, 'conf/bblayers.conf'))
+ try:
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/auto.conf'), os.path.join(builddir, 'conf/auto.conf'))
+ except:
+ pass
+
+ def hardlink_tree(self, src, dst):
+ os.mkdir(dst)
+ self.track_for_cleanup(dst)
+ for root, dirs, files in os.walk(src):
+ if root == src:
+ continue
+ os.mkdir(os.path.join(dst, root.split(src)[1][1:]))
+ for sstate_file in files:
+ os.link(os.path.join(root, sstate_file), os.path.join(dst, root.split(src)[1][1:], sstate_file))
+
+ def run_test_sstate_rebuild(self, primary_targets, relocate=False, rebuild_dependencies=False):
+ buildA = os.path.join(self.builddir, 'buildA')
+ if relocate:
+ buildB = os.path.join(self.builddir, 'buildB')
+ else:
+ buildB = buildA
+
+ if rebuild_dependencies:
+ rebuild_targets = self.get_dep_targets(primary_targets)
+ else:
+ rebuild_targets = primary_targets
+
+ self.configure_builddir(buildA)
+ runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildA)) + 'bitbake ' + ' '.join(map(str, primary_targets)), shell=True, executable='/bin/bash')
+ self.hardlink_tree(os.path.join(buildA, 'sstate-cache'), os.path.join(self.builddir, 'sstate-cache-buildA'))
+ shutil.rmtree(buildA)
+
+ failed_rebuild = []
+ failed_cleansstate = []
+ for target in rebuild_targets:
+ self.configure_builddir(buildB)
+ self.hardlink_tree(os.path.join(self.builddir, 'sstate-cache-buildA'), os.path.join(buildB, 'sstate-cache'))
+
+ result_cleansstate = runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildB)) + 'bitbake -ccleansstate ' + target, ignore_status=True, shell=True, executable='/bin/bash')
+ if not result_cleansstate.status == 0:
+ failed_cleansstate.append(target)
+ shutil.rmtree(buildB)
+ continue
+
+ result_build = runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildB)) + 'bitbake ' + target, ignore_status=True, shell=True, executable='/bin/bash')
+ if not result_build.status == 0:
+ failed_rebuild.append(target)
+
+ shutil.rmtree(buildB)
+
+ self.assertFalse(failed_rebuild, msg="The following recipes have failed to rebuild: %s" % ' '.join(map(str, failed_rebuild)))
+ self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate)))
+
+ def test_sstate_relocation(self):
+ self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=True, rebuild_dependencies=True)
+
+ def test_sstate_rebuild(self):
+ self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=False, rebuild_dependencies=True)
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
new file mode 100644
index 0000000000..6bd0e06ec4
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -0,0 +1,197 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import glob
+from oeqa.utils.commands import bitbake, get_bb_vars
+from oeqa.selftest.case import OESelftestTestCase
+
+class Archiver(OESelftestTestCase):
+
+ def test_archiver_allows_to_filter_on_recipe_name(self):
+ """
+ Summary: The archiver should offer the possibility to filter on the recipe. (#6929)
+ Expected: 1. Included recipe (busybox) should be included
+ 2. Excluded recipe (zlib) should be excluded
+ Product: oe-core
+ Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ include_recipe = 'busybox'
+ exclude_recipe = 'zlib'
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "original"\n'
+ features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % include_recipe
+ features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % exclude_recipe
+ self.write_config(features)
+
+ bitbake('-c clean %s %s' % (include_recipe, exclude_recipe))
+ bitbake("-c deploy_archives %s %s" % (include_recipe, exclude_recipe))
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS'])
+ src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
+
+ # Check that include_recipe was included
+ included_present = len(glob.glob(src_path + '/%s-*' % include_recipe))
+ self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
+
+ # Check that exclude_recipe was excluded
+ excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe))
+ self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
+
+ def test_archiver_filters_by_type(self):
+ """
+ Summary: The archiver is documented to filter on the recipe type.
+ Expected: 1. included recipe type (target) should be included
+ 2. other types should be excluded
+ Product: oe-core
+ Author: André Draszik <adraszik@tycoint.com>
+ """
+
+ target_recipe = 'initscripts'
+ native_recipe = 'zlib-native'
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "original"\n'
+ features += 'COPYLEFT_RECIPE_TYPES = "target"\n'
+ self.write_config(features)
+
+ bitbake('-c clean %s %s' % (target_recipe, native_recipe))
+ bitbake("%s -c deploy_archives %s" % (target_recipe, native_recipe))
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS', 'BUILD_SYS'])
+ src_path_target = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
+ src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
+
+ # Check that target_recipe was included
+ included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe))
+ self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe)
+
+ # Check that native_recipe was excluded
+ excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe))
+ self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe)
+
+ def test_archiver_filters_by_type_and_name(self):
+ """
+ Summary: Test that the archiver archives by recipe type, taking the
+ recipe name into account.
+ Expected: 1. included recipe type (target) should be included
+ 2. other types should be excluded
+ 3. recipe by name should be included / excluded,
+ overriding previous decision by type
+ Product: oe-core
+ Author: André Draszik <adraszik@tycoint.com>
+ """
+
+ target_recipes = [ 'initscripts', 'zlib' ]
+ native_recipes = [ 'update-rc.d-native', 'zlib-native' ]
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "original"\n'
+ features += 'COPYLEFT_RECIPE_TYPES = "target"\n'
+ features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % native_recipes[1]
+ features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % target_recipes[1]
+ self.write_config(features)
+
+ bitbake('-c clean %s %s' % (' '.join(target_recipes), ' '.join(native_recipes)))
+ bitbake('-c deploy_archives %s %s' % (' '.join(target_recipes), ' '.join(native_recipes)))
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS', 'BUILD_SYS'])
+ src_path_target = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
+ src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
+
+ # Check that target_recipe[0] and native_recipes[1] were included
+ included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0]))
+ self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0])
+
+ included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1]))
+ self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1])
+
+ # Check that native_recipes[0] and target_recipes[1] were excluded
+ excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0]))
+ self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0])
+
+ excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1]))
+ self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
+
+
+
+ def test_archiver_srpm_mode(self):
+ """
+ Test that in srpm mode, the added recipe dependencies at least exist/work [YOCTO #11121]
+ """
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[srpm] = "1"\n'
+ self.write_config(features)
+
+ bitbake('-n core-image-sato')
+
+ def _test_archiver_mode(self, mode, target_file_name, extra_config=None):
+ target = "selftest-ed"
+
+ features = 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "%s"\n' % (mode)
+ if extra_config:
+ features += extra_config
+ self.write_config(features)
+
+ bitbake('-c clean %s' % (target))
+ bitbake('-c deploy_archives %s' % (target))
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS'])
+ glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'], '%s-*' % (target))
+ glob_result = glob.glob(glob_str)
+ self.assertTrue(glob_result, 'Missing archiver directory for %s' % (target))
+
+ archive_path = os.path.join(glob_result[0], target_file_name)
+ self.assertTrue(os.path.exists(archive_path), 'Missing archive file %s' % (target_file_name))
+
+ def test_archiver_mode_original(self):
+ """
+ Test that the archiver works in with `ARCHIVER_MODE[src] = "original"`.
+ """
+
+ self._test_archiver_mode('original', 'ed-1.14.1.tar.lz')
+
+ def test_archiver_mode_patched(self):
+ """
+ Test that the archiver works in with `ARCHIVER_MODE[src] = "patched"`.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-1.14.1-r0-patched.tar.gz')
+
+ def test_archiver_mode_configured(self):
+ """
+ Test that the archiver works in with `ARCHIVER_MODE[src] = "configured"`.
+ """
+
+ self._test_archiver_mode('configured', 'selftest-ed-1.14.1-r0-configured.tar.gz')
+
+ def test_archiver_mode_recipe(self):
+ """
+ Test that the archiver works in with `ARCHIVER_MODE[recipe] = "1"`.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-1.14.1-r0-recipe.tar.gz',
+ 'ARCHIVER_MODE[recipe] = "1"\n')
+
+ def test_archiver_mode_diff(self):
+ """
+ Test that the archiver works in with `ARCHIVER_MODE[diff] = "1"`.
+ Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-1.14.1-r0-diff.gz',
+ 'ARCHIVER_MODE[diff] = "1"\n')
+
+ def test_archiver_mode_dumpdata(self):
+ """
+ Test that the archiver works in with `ARCHIVER_MODE[dumpdata] = "1"`.
+ """
+
+ self._test_archiver_mode('patched', 'selftest-ed-1.14.1-r0-showdata.dump',
+ 'ARCHIVER_MODE[dumpdata] = "1"\n')
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py
new file mode 100644
index 0000000000..f131d9856c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblayers.py
@@ -0,0 +1,118 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+
+import oeqa.utils.ftools as ftools
+from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars
+
+from oeqa.selftest.case import OESelftestTestCase
+
+class BitbakeLayers(OESelftestTestCase):
+
+ def test_bitbakelayers_showcrossdepends(self):
+ result = runCmd('bitbake-layers show-cross-depends')
+ self.assertIn('aspell', result.output)
+
+ def test_bitbakelayers_showlayers(self):
+ result = runCmd('bitbake-layers show-layers')
+ self.assertIn('meta-selftest', result.output)
+
+ def test_bitbakelayers_showappends(self):
+ recipe = "xcursor-transparent-theme"
+ bb_file = self.get_recipe_basename(recipe)
+ result = runCmd('bitbake-layers show-appends')
+ self.assertIn(bb_file, result.output)
+
+ def test_bitbakelayers_showoverlayed(self):
+ result = runCmd('bitbake-layers show-overlayed')
+ self.assertIn('aspell', result.output)
+
+ def test_bitbakelayers_flatten(self):
+ recipe = "xcursor-transparent-theme"
+ recipe_path = "recipes-graphics/xcursor-transparent-theme"
+ recipe_file = self.get_recipe_basename(recipe)
+ testoutdir = os.path.join(self.builddir, 'test_bitbakelayers_flatten')
+ self.assertFalse(os.path.isdir(testoutdir), msg = "test_bitbakelayers_flatten should not exist at this point in time")
+ self.track_for_cleanup(testoutdir)
+ result = runCmd('bitbake-layers flatten %s' % testoutdir)
+ bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
+ self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
+ contents = ftools.read_file(bb_file)
+ find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
+ self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
+
+ def test_bitbakelayers_add_remove(self):
+ test_layer = os.path.join(get_bb_var('COREBASE'), 'meta-skeleton')
+ result = runCmd('bitbake-layers show-layers')
+ self.assertNotIn('meta-skeleton', result.output, "This test cannot run with meta-skeleton in bblayers.conf. bitbake-layers show-layers output: %s" % result.output)
+ result = runCmd('bitbake-layers add-layer %s' % test_layer)
+ result = runCmd('bitbake-layers show-layers')
+ self.assertIn('meta-skeleton', result.output, msg = "Something wrong happened. meta-skeleton layer was not added to conf/bblayers.conf. bitbake-layers show-layers output: %s" % result.output)
+ result = runCmd('bitbake-layers remove-layer %s' % test_layer)
+ result = runCmd('bitbake-layers show-layers')
+ self.assertNotIn('meta-skeleton', result.output, msg = "meta-skeleton should have been removed at this step. bitbake-layers show-layers output: %s" % result.output)
+ result = runCmd('bitbake-layers add-layer %s' % test_layer)
+ result = runCmd('bitbake-layers show-layers')
+ self.assertIn('meta-skeleton', result.output, msg = "Something wrong happened. meta-skeleton layer was not added to conf/bblayers.conf. bitbake-layers show-layers output: %s" % result.output)
+ result = runCmd('bitbake-layers remove-layer */meta-skeleton')
+ result = runCmd('bitbake-layers show-layers')
+ self.assertNotIn('meta-skeleton', result.output, msg = "meta-skeleton should have been removed at this step. bitbake-layers show-layers output: %s" % result.output)
+
+ def test_bitbakelayers_showrecipes(self):
+ result = runCmd('bitbake-layers show-recipes')
+ self.assertIn('aspell:', result.output)
+ self.assertIn('mtd-utils:', result.output)
+ self.assertIn('core-image-minimal:', result.output)
+ result = runCmd('bitbake-layers show-recipes mtd-utils')
+ self.assertIn('mtd-utils:', result.output)
+ self.assertNotIn('aspell:', result.output)
+ result = runCmd('bitbake-layers show-recipes -i image')
+ self.assertIn('core-image-minimal', result.output)
+ self.assertNotIn('mtd-utils:', result.output)
+ result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
+ self.assertIn('libproxy:', result.output)
+ self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
+ self.assertNotIn('wget:', result.output) # doesn't inherit cmake
+ self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
+ result = runCmd('bitbake-layers show-recipes -i nonexistentclass', ignore_status=True)
+ self.assertNotEqual(result.status, 0, 'bitbake-layers show-recipes -i nonexistentclass should have failed')
+ self.assertIn('ERROR:', result.output)
+
+ def test_bitbakelayers_createlayer(self):
+ priority = 10
+ layername = 'test-bitbakelayer-layercreate'
+ layerpath = os.path.join(self.builddir, layername)
+ self.assertFalse(os.path.exists(layerpath), '%s should not exist at this point in time' % layerpath)
+ result = runCmd('bitbake-layers create-layer --priority=%d %s' % (priority, layerpath))
+ self.track_for_cleanup(layerpath)
+ result = runCmd('bitbake-layers add-layer %s' % layerpath)
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % layerpath)
+ result = runCmd('bitbake-layers show-layers')
+ find_in_contents = re.search(re.escape(layername) + r'\s+' + re.escape(layerpath) + r'\s+' + re.escape(str(priority)), result.output)
+ self.assertTrue(find_in_contents, "%s not found in layers\n%s" % (layername, result.output))
+
+ layervars = ['BBFILE_PRIORITY', 'BBFILE_PATTERN', 'LAYERDEPENDS', 'LAYERSERIES_COMPAT']
+ bb_vars = get_bb_vars(['BBFILE_COLLECTIONS'] + ['%s_%s' % (v, layername) for v in layervars])
+
+ for v in layervars:
+ varname = '%s_%s' % (v, layername)
+ self.assertIsNotNone(bb_vars[varname], "%s not found" % varname)
+
+ find_in_contents = re.search(r'(^|\s)' + re.escape(layername) + r'($|\s)', bb_vars['BBFILE_COLLECTIONS'])
+ self.assertTrue(find_in_contents, "%s not in BBFILE_COLLECTIONS" % layername)
+
+ self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
+
+ def get_recipe_basename(self, recipe):
+ recipe_file = ""
+ result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
+ for line in result.output.splitlines():
+ if recipe in line:
+ recipe_file = line
+ break
+
+ self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
+ return os.path.basename(recipe_file)
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
new file mode 100644
index 0000000000..dc423ec439
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -0,0 +1,299 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+
+import oeqa.utils.ftools as ftools
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+
+from oeqa.selftest.case import OESelftestTestCase
+
+class BitbakeTests(OESelftestTestCase):
+
+ def getline(self, res, line):
+ for l in res.output.split('\n'):
+ if line in l:
+ return l
+
+ # Test bitbake can run from the <builddir>/conf directory
+ def test_run_bitbake_from_dir_1(self):
+ os.chdir(os.path.join(self.builddir, 'conf'))
+ self.assertEqual(bitbake('-e').status, 0, msg = "bitbake couldn't run from \"conf\" dir")
+
+ # Test bitbake can run from the <builddir>'s parent directory
+ def test_run_bitbake_from_dir_2(self):
+ my_env = os.environ.copy()
+ my_env['BBPATH'] = my_env['BUILDDIR']
+ os.chdir(os.path.dirname(os.environ['BUILDDIR']))
+ self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from builddir's parent directory")
+
+ # Test bitbake can run from some other random system location (we use /tmp/)
+ def test_run_bitbake_from_dir_3(self):
+ my_env = os.environ.copy()
+ my_env['BBPATH'] = my_env['BUILDDIR']
+ os.chdir("/tmp/")
+ self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from /tmp/")
+
+
+ def test_event_handler(self):
+ self.write_config("INHERIT += \"test_events\"")
+ result = bitbake('m4-native')
+ find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
+ find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
+ self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
+ self.assertTrue(find_build_completed, msg = "Match failed in:\n%s" % result.output)
+ self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
+
+ def test_local_sstate(self):
+ bitbake('m4-native')
+ bitbake('m4-native -cclean')
+ result = bitbake('m4-native')
+ find_setscene = re.search("m4-native.*do_.*_setscene", result.output)
+ self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output )
+
+ def test_bitbake_invalid_recipe(self):
+ result = bitbake('-b asdf', ignore_status=True)
+ self.assertTrue("ERROR: Unable to find any recipe file matching 'asdf'" in result.output, msg = "Though asdf recipe doesn't exist, bitbake didn't output any err. message. bitbake output: %s" % result.output)
+
+ def test_bitbake_invalid_target(self):
+ result = bitbake('asdf', ignore_status=True)
+ self.assertIn("ERROR: Nothing PROVIDES 'asdf'", result.output)
+
+ def test_warnings_errors(self):
+ result = bitbake('-b asdf', ignore_status=True)
+ find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output)
+ find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output)
+ self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
+ self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
+
+ def test_invalid_patch(self):
+ # This patch should fail to apply.
+ self.write_recipeinc('man-db', 'FILESEXTRAPATHS_prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"')
+ self.write_config("INHERIT_remove = \"report-error\"")
+ result = bitbake('man-db -c patch', ignore_status=True)
+ self.delete_recipeinc('man-db')
+ bitbake('-cclean man-db')
+ found = False
+ for l in result.output.split('\n'):
+ if l.startswith("ERROR:") and "failed" in l and "do_patch" in l:
+ found = l
+ self.assertTrue(found and found.startswith("ERROR:"), msg = "Incorrectly formed patch application didn't fail. bitbake output: %s" % result.output)
+
+ def test_force_task_1(self):
+ # test 1 from bug 5875
+ test_recipe = 'zlib'
+ test_data = "Microsoft Made No Profit From Anyone's Zunes Yo"
+ bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
+ image_dir = bb_vars['D']
+ pkgsplit_dir = bb_vars['PKGDEST']
+ man_dir = bb_vars['mandir']
+
+ bitbake('-c clean %s' % test_recipe)
+ bitbake('-c package -f %s' % test_recipe)
+ self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
+
+ man_file = os.path.join(image_dir + man_dir, 'man3/zlib.3')
+ ftools.append_file(man_file, test_data)
+ bitbake('-c package -f %s' % test_recipe)
+
+ man_split_file = os.path.join(pkgsplit_dir, 'zlib-doc' + man_dir, 'man3/zlib.3')
+ man_split_content = ftools.read_file(man_split_file)
+ self.assertIn(test_data, man_split_content, 'The man file has not changed in packages-split.')
+
+ ret = bitbake(test_recipe)
+ self.assertIn('task do_package_write_rpm:', ret.output, 'Task do_package_write_rpm did not re-executed.')
+
+ def test_force_task_2(self):
+ # test 2 from bug 5875
+ test_recipe = 'zlib'
+
+ bitbake(test_recipe)
+ self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
+
+ result = bitbake('-C compile %s' % test_recipe)
+ look_for_tasks = ['do_compile:', 'do_install:', 'do_populate_sysroot:', 'do_package:']
+ for task in look_for_tasks:
+ self.assertIn(task, result.output, msg="Couldn't find %s task.")
+
+ def test_bitbake_g(self):
+ recipe = 'base-files'
+ result = bitbake('-g %s' % recipe)
+ for f in ['pn-buildlist', 'task-depends.dot']:
+ self.addCleanup(os.remove, f)
+ self.assertTrue('Task dependencies saved to \'task-depends.dot\'' in result.output, msg = "No task dependency \"task-depends.dot\" file was generated for the given task target. bitbake output: %s" % result.output)
+ self.assertIn(recipe, ftools.read_file(os.path.join(self.builddir, 'task-depends.dot')))
+
+ def test_image_manifest(self):
+ bitbake('core-image-minimal')
+ bb_vars = get_bb_vars(["DEPLOY_DIR_IMAGE", "IMAGE_LINK_NAME"], "core-image-minimal")
+ deploydir = bb_vars["DEPLOY_DIR_IMAGE"]
+ imagename = bb_vars["IMAGE_LINK_NAME"]
+ manifest = os.path.join(deploydir, imagename + ".manifest")
+ self.assertTrue(os.path.islink(manifest), msg="No manifest file created for image. It should have been created in %s" % manifest)
+
+ def test_invalid_recipe_src_uri(self):
+ data = 'SRC_URI = "file://invalid"'
+ self.write_recipeinc('man-db', data)
+ self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
+SSTATE_DIR = \"${TOPDIR}/download-selftest\"
+INHERIT_remove = \"report-error\"
+""")
+ self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
+
+ bitbake('-ccleanall man-db')
+ result = bitbake('-c fetch man-db', ignore_status=True)
+ bitbake('-ccleanall man-db')
+ self.delete_recipeinc('man-db')
+ self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
+ self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output)
+ line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
+ self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
+doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
+
+ def test_rename_downloaded_file(self):
+ # TODO unique dldir instead of using cleanall
+ # TODO: need to set sstatedir?
+ self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
+SSTATE_DIR = \"${TOPDIR}/download-selftest\"
+""")
+ self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
+
+ data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
+ self.write_recipeinc('aspell', data)
+ result = bitbake('-f -c fetch aspell', ignore_status=True)
+ self.delete_recipeinc('aspell')
+ self.assertEqual(result.status, 0, msg = "Couldn't fetch aspell. %s" % result.output)
+ dl_dir = get_bb_var("DL_DIR")
+ self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % dl_dir)
+ self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % dl_dir)
+
+ def test_environment(self):
+ self.write_config("TEST_ENV=\"localconf\"")
+ result = runCmd('bitbake -e | grep TEST_ENV=')
+ self.assertIn('localconf', result.output)
+
+ def test_dry_run(self):
+ result = runCmd('bitbake -n m4-native')
+ self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
+
+ def test_just_parse(self):
+ result = runCmd('bitbake -p')
+ self.assertEqual(0, result.status, "errors encountered when parsing recipes. %s" % result.output)
+
+ def test_version(self):
+ result = runCmd('bitbake -s | grep wget')
+ find = re.search(r"wget *:([0-9a-zA-Z\.\-]+)", result.output)
+ self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
+
+ def test_prefile(self):
+ preconf = os.path.join(self.builddir, 'conf/prefile.conf')
+ self.track_for_cleanup(preconf)
+ ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
+ result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
+ self.assertIn('prefile', result.output)
+ self.write_config("TEST_PREFILE=\"localconf\"")
+ result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
+ self.assertIn('localconf', result.output)
+
+ def test_postfile(self):
+ postconf = os.path.join(self.builddir, 'conf/postfile.conf')
+ self.track_for_cleanup(postconf)
+ ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
+ self.write_config("TEST_POSTFILE=\"localconf\"")
+ result = runCmd('bitbake -R conf/postfile.conf -e | grep TEST_POSTFILE=')
+ self.assertIn('postfile', result.output)
+
+ def test_checkuri(self):
+ result = runCmd('bitbake -c checkuri m4')
+ self.assertEqual(0, result.status, msg = "\"checkuri\" task was not executed. bitbake output: %s" % result.output)
+
+ def test_continue(self):
+ self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
+SSTATE_DIR = \"${TOPDIR}/download-selftest\"
+INHERIT_remove = \"report-error\"
+""")
+ self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
+ self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
+ runCmd('bitbake -c cleanall man-db xcursor-transparent-theme')
+ result = runCmd('bitbake -c unpack -k man-db xcursor-transparent-theme', ignore_status=True)
+ errorpos = result.output.find('ERROR: Function failed: do_fail_task')
+ manver = re.search("NOTE: recipe xcursor-transparent-theme-(.*?): task do_unpack: Started", result.output)
+ continuepos = result.output.find('NOTE: recipe xcursor-transparent-theme-%s: task do_unpack: Started' % manver.group(1))
+ self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
+
+ def test_non_gplv3(self):
+ self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"')
+ result = bitbake('selftest-ed', ignore_status=True)
+ self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
+ lic_dir = get_bb_var('LICENSE_DIRECTORY')
+ self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3')))
+ self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2')))
+
+ def test_setscene_only(self):
+ """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
+ test_recipe = 'ed'
+
+ bitbake(test_recipe)
+ bitbake('-c clean %s' % test_recipe)
+ ret = bitbake('--setscene-only %s' % test_recipe)
+
+ tasks = re.findall(r'task\s+(do_\S+):', ret.output)
+
+ for task in tasks:
+ self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
+ 'Executed tasks were: %s' % (task, str(tasks)))
+
+ def test_skip_setscene(self):
+ test_recipe = 'ed'
+
+ bitbake(test_recipe)
+ bitbake('-c clean %s' % test_recipe)
+
+ ret = bitbake('--setscene-only %s' % test_recipe)
+ tasks = re.findall(r'task\s+(do_\S+):', ret.output)
+
+ for task in tasks:
+ self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
+ 'Executed tasks were: %s' % (task, str(tasks)))
+
+ # Run without setscene. Should do nothing
+ ret = bitbake('--skip-setscene %s' % test_recipe)
+ tasks = re.findall(r'task\s+(do_\S+):', ret.output)
+
+ self.assertFalse(tasks, 'Tasks %s ran when they should not have' % (str(tasks)))
+
+ # Clean (leave sstate cache) and run with --skip-setscene. No setscene
+ # tasks should run
+ bitbake('-c clean %s' % test_recipe)
+
+ ret = bitbake('--skip-setscene %s' % test_recipe)
+ tasks = re.findall(r'task\s+(do_\S+):', ret.output)
+
+ for task in tasks:
+ self.assertNotIn('_setscene', task, 'A _setscene task ran: %s.\n'
+ 'Executed tasks were: %s' % (task, str(tasks)))
+
+ def test_bbappend_order(self):
+ """ Bitbake should bbappend to recipe in a predictable order """
+ test_recipe = 'ed'
+ bb_vars = get_bb_vars(['SUMMARY', 'PV'], test_recipe)
+ test_recipe_summary_before = bb_vars['SUMMARY']
+ test_recipe_pv = bb_vars['PV']
+ recipe_append_file = test_recipe + '_' + test_recipe_pv + '.bbappend'
+ expected_recipe_summary = test_recipe_summary_before
+
+ for i in range(5):
+ recipe_append_dir = test_recipe + '_test_' + str(i)
+ recipe_append_path = os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir, recipe_append_file)
+ os.mkdir(os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir))
+ feature = 'SUMMARY += "%s"\n' % i
+ ftools.write_file(recipe_append_path, feature)
+ expected_recipe_summary += ' %s' % i
+
+ self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, 'recipes-test',
+ test_recipe + '_test_*'))
+
+ test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
+ self.assertEqual(expected_recipe_summary, test_recipe_summary_after)
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
new file mode 100644
index 0000000000..821f52f5a8
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -0,0 +1,50 @@
+# SPDX-License-Identifier: MIT
+import os
+import sys
+import re
+import logging
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
+
+def parse_values(content):
+ for i in content:
+ for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
+ if i.startswith(v + ": "):
+ yield i[len(v) + 2:].strip(), v
+ break
+
+@OETestTag("toolchain-user", "toolchain-system")
+class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
+ def test_binutils(self):
+ self.run_binutils("binutils")
+
+ def test_gas(self):
+ self.run_binutils("gas")
+
+ def test_ld(self):
+ self.run_binutils("ld")
+
+ def run_binutils(self, suite):
+ features = []
+ features.append('CHECK_TARGETS = "{0}"'.format(suite))
+ self.write_config("\n".join(features))
+
+ recipe = "binutils-cross-testsuite"
+ bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
+ builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
+
+ bitbake("{0} -c check".format(recipe))
+
+ sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
+ if not os.path.exists(sumspath):
+ sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
+ logpath = os.path.splitext(sumspath)[0] + ".log"
+
+ ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
+ self.ptest_section(ptestsuite, logfile = logpath)
+ with open(sumspath, "r") as f:
+ for test, result in parse_values(f):
+ self.ptest_result(ptestsuite, test, result)
+
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py
new file mode 100644
index 0000000000..d865da6252
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/buildhistory.py
@@ -0,0 +1,50 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+import datetime
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
+
+
+class BuildhistoryBase(OESelftestTestCase):
+
+ def config_buildhistory(self, tmp_bh_location=False):
+ bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT'])
+ if (not 'buildhistory' in bb_vars['USER_CLASSES']) and (not 'buildhistory' in bb_vars['INHERIT']):
+ add_buildhistory_config = 'INHERIT += "buildhistory"\nBUILDHISTORY_COMMIT = "1"'
+ self.append_config(add_buildhistory_config)
+
+ if tmp_bh_location:
+ # Using a temporary buildhistory location for testing
+ tmp_bh_dir = os.path.join(self.builddir, "tmp_buildhistory_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+ buildhistory_dir_config = "BUILDHISTORY_DIR = \"%s\"" % tmp_bh_dir
+ self.append_config(buildhistory_dir_config)
+ self.track_for_cleanup(tmp_bh_dir)
+
+ def run_buildhistory_operation(self, target, global_config='', target_config='', change_bh_location=False, expect_error=False, error_regex=''):
+ if change_bh_location:
+ tmp_bh_location = True
+ else:
+ tmp_bh_location = False
+ self.config_buildhistory(tmp_bh_location)
+
+ self.append_config(global_config)
+ self.append_recipeinc(target, target_config)
+ bitbake("-cclean %s" % target)
+ result = bitbake(target, ignore_status=True)
+ self.remove_config(global_config)
+ self.remove_recipeinc(target, target_config)
+
+ if expect_error:
+ self.assertEqual(result.status, 1, msg="Error expected for global config '%s' and target config '%s'" % (global_config, target_config))
+ search_for_error = re.search(error_regex, result.output)
+ self.assertTrue(search_for_error, msg="Could not find desired error in output: %s (%s)" % (error_regex, result.output))
+ else:
+ self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output))
+
+ # No tests should be added to the base class.
+ # Please create a new class that inherit this one, or use one of those already available for adding tests.
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
new file mode 100644
index 0000000000..e91f0bd18f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -0,0 +1,198 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+import glob as g
+import shutil
+import tempfile
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.selftest.cases.buildhistory import BuildhistoryBase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+import oeqa.utils.ftools as ftools
+
+class ImageOptionsTests(OESelftestTestCase):
+
+ def test_incremental_image_generation(self):
+ image_pkgtype = get_bb_var("IMAGE_PKGTYPE")
+ if image_pkgtype != 'rpm':
+ self.skipTest('Not using RPM as main package format')
+ bitbake("-c clean core-image-minimal")
+ self.write_config('INC_RPM_IMAGE_GEN = "1"')
+ self.append_config('IMAGE_FEATURES += "ssh-server-openssh"')
+ bitbake("core-image-minimal")
+ log_data_file = os.path.join(get_bb_var("WORKDIR", "core-image-minimal"), "temp/log.do_rootfs")
+ log_data_created = ftools.read_file(log_data_file)
+ incremental_created = re.search(r"Installing\s*:\s*packagegroup-core-ssh-openssh", log_data_created)
+ self.remove_config('IMAGE_FEATURES += "ssh-server-openssh"')
+ self.assertTrue(incremental_created, msg = "Match failed in:\n%s" % log_data_created)
+ bitbake("core-image-minimal")
+ log_data_removed = ftools.read_file(log_data_file)
+ incremental_removed = re.search(r"Erasing\s*:\s*packagegroup-core-ssh-openssh", log_data_removed)
+ self.assertTrue(incremental_removed, msg = "Match failed in:\n%s" % log_data_removed)
+
+ def test_ccache_tool(self):
+ bitbake("ccache-native")
+ bb_vars = get_bb_vars(['SYSROOT_DESTDIR', 'bindir'], 'ccache-native')
+ p = bb_vars['SYSROOT_DESTDIR'] + bb_vars['bindir'] + "/" + "ccache"
+ self.assertTrue(os.path.isfile(p), msg = "No ccache found (%s)" % p)
+ self.write_config('INHERIT += "ccache"')
+ self.add_command_to_tearDown('bitbake -c clean m4-native')
+ bitbake("m4-native -c clean")
+ bitbake("m4-native -f -c compile")
+ log_compile = os.path.join(get_bb_var("WORKDIR","m4-native"), "temp/log.do_compile")
+ with open(log_compile, "r") as f:
+ loglines = "".join(f.readlines())
+ self.assertIn("ccache", loglines, msg="No match for ccache in m4-native log.do_compile. For further details: %s" % log_compile)
+
+ def test_read_only_image(self):
+ distro_features = get_bb_var('DISTRO_FEATURES')
+ if not ('x11' in distro_features and 'opengl' in distro_features):
+ self.skipTest('core-image-sato requires x11 and opengl in distro features')
+ self.write_config('IMAGE_FEATURES += "read-only-rootfs"')
+ bitbake("core-image-sato")
+ # do_image will fail if there are any pending postinsts
+
+class DiskMonTest(OESelftestTestCase):
+
+ def test_stoptask_behavior(self):
+ self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"')
+ res = bitbake("delay -c delay", ignore_status = True)
+ self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output)
+ self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
+ self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"')
+ res = bitbake("delay -c delay", ignore_status = True)
+ self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output)
+ self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
+ self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"')
+ res = bitbake("delay -c delay")
+ self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output)
+
+class SanityOptionsTest(OESelftestTestCase):
+ def getline(self, res, line):
+ for l in res.output.split('\n'):
+ if line in l:
+ return l
+
+ def test_options_warnqa_errorqa_switch(self):
+
+ self.write_config("INHERIT_remove = \"report-error\"")
+ if "packages-list" not in get_bb_var("ERROR_QA"):
+ self.append_config("ERROR_QA_append = \" packages-list\"")
+
+ self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
+ self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
+ res = bitbake("xcursor-transparent-theme -f -c package", ignore_status=True)
+ self.delete_recipeinc('xcursor-transparent-theme')
+ line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
+ self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
+ self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
+ self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
+ self.append_config('ERROR_QA_remove = "packages-list"')
+ self.append_config('WARN_QA_append = " packages-list"')
+ res = bitbake("xcursor-transparent-theme -f -c package")
+ self.delete_recipeinc('xcursor-transparent-theme')
+ line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
+ self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
+
+ def test_layer_without_git_dir(self):
+ """
+ Summary: Test that layer git revisions are displayed and do not fail without git repository
+ Expected: The build to be successful and without "fatal" errors
+ Product: oe-core
+ Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ dirpath = tempfile.mkdtemp()
+
+ dummy_layer_name = 'meta-dummy'
+ dummy_layer_path = os.path.join(dirpath, dummy_layer_name)
+ dummy_layer_conf_dir = os.path.join(dummy_layer_path, 'conf')
+ os.makedirs(dummy_layer_conf_dir)
+ dummy_layer_conf_path = os.path.join(dummy_layer_conf_dir, 'layer.conf')
+
+ dummy_layer_content = 'BBPATH .= ":${LAYERDIR}"\n' \
+ 'BBFILES += "${LAYERDIR}/recipes-*/*/*.bb ${LAYERDIR}/recipes-*/*/*.bbappend"\n' \
+ 'BBFILE_COLLECTIONS += "%s"\n' \
+ 'BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' \
+ 'BBFILE_PRIORITY_%s = "6"\n' % (dummy_layer_name, dummy_layer_name, dummy_layer_name)
+
+ ftools.write_file(dummy_layer_conf_path, dummy_layer_content)
+
+ bblayers_conf = 'BBLAYERS += "%s"\n' % dummy_layer_path
+ self.write_bblayers_config(bblayers_conf)
+
+ test_recipe = 'ed'
+
+ ret = bitbake('-n %s' % test_recipe)
+
+ err = 'fatal: Not a git repository'
+
+ shutil.rmtree(dirpath)
+
+ self.assertNotIn(err, ret.output)
+
+
+class BuildhistoryTests(BuildhistoryBase):
+
+ def test_buildhistory_basic(self):
+ self.run_buildhistory_operation('xcursor-transparent-theme')
+ self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
+
+ def test_buildhistory_buildtime_pr_backwards(self):
+ target = 'xcursor-transparent-theme'
+ error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target
+ self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
+ self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
+
+class ArchiverTest(OESelftestTestCase):
+ def test_arch_work_dir_and_export_source(self):
+ """
+ Test for archiving the work directory and exporting the source files.
+ """
+ self.write_config("INHERIT += \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"")
+ res = bitbake("xcursor-transparent-theme", ignore_status=True)
+ self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
+ deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC')
+ pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*")
+ src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm"
+ tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz"
+ self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under %s/allarch*/xcursor*" % deploy_dir_src)
+
+class ToolchainOptions(OESelftestTestCase):
+ def test_toolchain_fortran(self):
+ """
+ Test that Fortran works by building a Hello, World binary.
+ """
+
+ features = 'FORTRAN_forcevariable = ",fortran"\n'
+ self.write_config(features)
+ bitbake('fortran-helloworld')
+
+class SourceMirroring(OESelftestTestCase):
+ # Can we download everything from the Yocto Sources Mirror over http only
+ def test_yocto_source_mirror(self):
+ self.write_config("""
+BB_ALLOWED_NETWORKS = "downloads.yoctoproject.org"
+MIRRORS = ""
+DL_DIR = "${TMPDIR}/test_downloads"
+STAMPS_DIR = "${TMPDIR}/test_stamps"
+SSTATE_DIR = "${TMPDIR}/test_sstate-cache"
+PREMIRRORS = "\\
+ bzr://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ cvs://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ git://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ gitsm://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ hg://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ osc://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ p4://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ svn://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ http://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
+ https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n"
+""")
+
+ bitbake("world --runall fetch")
+
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py
new file mode 100644
index 0000000000..c0998e319e
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/containerimage.py
@@ -0,0 +1,88 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
+
+# This test builds an image with using the "container" IMAGE_FSTYPE, and
+# ensures that then files in the image are only the ones expected.
+#
+# The only package added to the image is container_image_testpkg, which
+# contains one file. However, due to some other things not cleaning up during
+# rootfs creation, there is some cruft. Ideally bugs will be filed and the
+# cruft removed, but for now we whitelist some known set.
+#
+# Also for performance reasons we're only checking the cruft when using ipk.
+# When using deb, and rpm it is a bit different and we could test all
+# of them, but this test is more to catch if other packages get added by
+# default other than what is in ROOTFS_BOOTSTRAP_INSTALL.
+#
+class ContainerImageTests(OESelftestTestCase):
+
+ # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that
+ # the conversion type bar gets added as a dep as well
+ def test_expected_files(self):
+
+ def get_each_path_part(path):
+ if path:
+ part = [ '.' + path + '/' ]
+ result = get_each_path_part(path.rsplit('/', 1)[0])
+ if result:
+ return part + result
+ else:
+ return part
+ else:
+ return None
+
+ self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy"
+IMAGE_FSTYPES = "container"
+PACKAGE_CLASSES = "package_ipk"
+IMAGE_FEATURES = ""
+IMAGE_BUILDINFO_FILE = ""
+""")
+
+ bbvars = get_bb_vars(['bindir', 'sysconfdir', 'localstatedir',
+ 'DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'],
+ target='container-test-image')
+ expected_files = [
+ './',
+ '.{bindir}/theapp',
+ '.{sysconfdir}/default/',
+ '.{sysconfdir}/default/postinst',
+ '.{sysconfdir}/ld.so.cache',
+ '.{sysconfdir}/timestamp',
+ '.{sysconfdir}/version',
+ './run/',
+ '.{localstatedir}/cache/',
+ '.{localstatedir}/cache/ldconfig/',
+ '.{localstatedir}/cache/ldconfig/aux-cache',
+ '.{localstatedir}/cache/opkg/',
+ '.{localstatedir}/lib/',
+ '.{localstatedir}/lib/opkg/'
+ ]
+
+ expected_files = [ x.format(bindir=bbvars['bindir'],
+ sysconfdir=bbvars['sysconfdir'],
+ localstatedir=bbvars['localstatedir'])
+ for x in expected_files ]
+
+ # Since tar lists all directories individually, make sure each element
+ # from bindir, sysconfdir, etc is added
+ expected_files += get_each_path_part(bbvars['bindir'])
+ expected_files += get_each_path_part(bbvars['sysconfdir'])
+ expected_files += get_each_path_part(bbvars['localstatedir'])
+
+ expected_files = sorted(expected_files)
+
+ # Build the image of course
+ bitbake('container-test-image')
+
+ image = os.path.join(bbvars['DEPLOY_DIR_IMAGE'],
+ bbvars['IMAGE_LINK_NAME'] + '.tar.bz2')
+
+ # Ensure the files in the image are what we expect
+ result = runCmd("tar tf {} | sort".format(image), shell=True)
+ self.assertEqual(result.output.split('\n'), expected_files)
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
new file mode 100644
index 0000000000..57e6662e4a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -0,0 +1,1779 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+import shutil
+import tempfile
+import glob
+import fnmatch
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
+from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer
+
+oldmetapath = None
+
+def setUpModule():
+ import bb.utils
+
+ global templayerdir
+ templayerdir = tempfile.mkdtemp(prefix='devtoolqa')
+ corecopydir = os.path.join(templayerdir, 'core-copy')
+ bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
+ edited_layers = []
+
+ # We need to take a copy of the meta layer so we can modify it and not
+ # have any races against other tests that might be running in parallel
+ # however things like COREBASE mean that you can't just copy meta, you
+ # need the whole repository.
+ def bblayers_edit_cb(layerpath, canonical_layerpath):
+ global oldmetapath
+ if not canonical_layerpath.endswith('/'):
+ # This helps us match exactly when we're using this path later
+ canonical_layerpath += '/'
+ if not edited_layers and canonical_layerpath.endswith('/meta/'):
+ canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
+ edited_layers.append(layerpath)
+ oldmetapath = os.path.realpath(layerpath)
+ result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
+ oldreporoot = result.output.rstrip()
+ newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
+ runCmd('git clone %s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
+ # Now we need to copy any modified files
+ # You might ask "why not just copy the entire tree instead of
+ # cloning and doing this?" - well, the problem with that is
+ # TMPDIR or an equally large subdirectory might exist
+ # under COREBASE and we don't want to copy that, so we have
+ # to be selective.
+ result = runCmd('git status --porcelain', cwd=oldreporoot)
+ for line in result.output.splitlines():
+ if line.startswith(' M ') or line.startswith('?? '):
+ relpth = line.split()[1]
+ pth = os.path.join(oldreporoot, relpth)
+ if pth.startswith(canonical_layerpath):
+ if relpth.endswith('/'):
+ destdir = os.path.join(corecopydir, relpth)
+ shutil.copytree(pth, destdir)
+ else:
+ destdir = os.path.join(corecopydir, os.path.dirname(relpth))
+ bb.utils.mkdirhier(destdir)
+ shutil.copy2(pth, destdir)
+ return newmetapath
+ else:
+ return layerpath
+ bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb)
+
+def tearDownModule():
+ if oldmetapath:
+ edited_layers = []
+ def bblayers_edit_cb(layerpath, canonical_layerpath):
+ if not edited_layers and canonical_layerpath.endswith('/meta'):
+ edited_layers.append(layerpath)
+ return oldmetapath
+ else:
+ return layerpath
+ bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
+ bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb)
+ shutil.rmtree(templayerdir)
+
+class DevtoolBase(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(DevtoolBase, cls).setUpClass()
+ bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
+ cls.original_sstate = bb_vars['SSTATE_DIR']
+ cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
+ cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
+ cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
+ % cls.original_sstate)
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
+ runCmd('rm -rf %s' % cls.devtool_sstate)
+ super(DevtoolBase, cls).tearDownClass()
+
+ def setUp(self):
+ """Test case setup function"""
+ super(DevtoolBase, self).setUp()
+ self.workspacedir = os.path.join(self.builddir, 'workspace')
+ self.assertTrue(not os.path.exists(self.workspacedir),
+ 'This test cannot be run with a workspace directory '
+ 'under the build directory')
+ self.append_config(self.sstate_conf)
+
+ def _check_src_repo(self, repo_dir):
+ """Check srctree git repository"""
+ self.assertTrue(os.path.isdir(os.path.join(repo_dir, '.git')),
+ 'git repository for external source tree not found')
+ result = runCmd('git status --porcelain', cwd=repo_dir)
+ self.assertEqual(result.output.strip(), "",
+ 'Created git repo is not clean')
+ result = runCmd('git symbolic-ref HEAD', cwd=repo_dir)
+ self.assertEqual(result.output.strip(), "refs/heads/devtool",
+ 'Wrong branch in git repo')
+
+ def _check_repo_status(self, repo_dir, expected_status):
+ """Check the worktree status of a repository"""
+ result = runCmd('git status . --porcelain',
+ cwd=repo_dir)
+ for line in result.output.splitlines():
+ for ind, (f_status, fn_re) in enumerate(expected_status):
+ if re.match(fn_re, line[3:]):
+ if f_status != line[:2]:
+ self.fail('Unexpected status in line: %s' % line)
+ expected_status.pop(ind)
+ break
+ else:
+ self.fail('Unexpected modified file in line: %s' % line)
+ if expected_status:
+ self.fail('Missing file changes: %s' % expected_status)
+
+ def _test_recipe_contents(self, recipefile, checkvars, checkinherits):
+ with open(recipefile, 'r') as f:
+ invar = None
+ invalue = None
+ inherits = set()
+ for line in f:
+ var = None
+ if invar:
+ value = line.strip().strip('"')
+ if value.endswith('\\'):
+ invalue += ' ' + value[:-1].strip()
+ continue
+ else:
+ invalue += ' ' + value.strip()
+ var = invar
+ value = invalue
+ invar = None
+ elif '=' in line:
+ splitline = line.split('=', 1)
+ var = splitline[0].rstrip()
+ value = splitline[1].strip().strip('"')
+ if value.endswith('\\'):
+ invalue = value[:-1].strip()
+ invar = var
+ continue
+ elif line.startswith('inherit '):
+ inherits.update(line.split()[1:])
+
+ if var and var in checkvars:
+ needvalue = checkvars.pop(var)
+ if needvalue is None:
+ self.fail('Variable %s should not appear in recipe, but value is being set to "%s"' % (var, value))
+ if isinstance(needvalue, set):
+ if var == 'LICENSE':
+ value = set(value.split(' & '))
+ else:
+ value = set(value.split())
+ self.assertEqual(value, needvalue, 'values for %s do not match' % var)
+
+
+ missingvars = {}
+ for var, value in checkvars.items():
+ if value is not None:
+ missingvars[var] = value
+ self.assertEqual(missingvars, {}, 'Some expected variables not found in recipe: %s' % checkvars)
+
+ for inherit in checkinherits:
+ self.assertIn(inherit, inherits, 'Missing inherit of %s' % inherit)
+
+ def _check_bbappend(self, testrecipe, recipefile, appenddir):
+ result = runCmd('bitbake-layers show-appends', cwd=self.builddir)
+ resultlines = result.output.splitlines()
+ inrecipe = False
+ bbappends = []
+ bbappendfile = None
+ for line in resultlines:
+ if inrecipe:
+ if line.startswith(' '):
+ bbappends.append(line.strip())
+ else:
+ break
+ elif line == '%s:' % os.path.basename(recipefile):
+ inrecipe = True
+ self.assertLessEqual(len(bbappends), 2, '%s recipe is being bbappended by another layer - bbappends found:\n %s' % (testrecipe, '\n '.join(bbappends)))
+ for bbappend in bbappends:
+ if bbappend.startswith(appenddir):
+ bbappendfile = bbappend
+ break
+ else:
+ self.fail('bbappend for recipe %s does not seem to be created in test layer' % testrecipe)
+ return bbappendfile
+
+ def _create_temp_layer(self, templayerdir, addlayer, templayername, priority=999, recipepathspec='recipes-*/*'):
+ create_temp_layer(templayerdir, templayername, priority, recipepathspec)
+ if addlayer:
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
+ result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
+
+ def _process_ls_output(self, output):
+ """
+ Convert ls -l output to a format we can reasonably compare from one context
+ to another (e.g. from host to target)
+ """
+ filelist = []
+ for line in output.splitlines():
+ splitline = line.split()
+ if len(splitline) < 8:
+ self.fail('_process_ls_output: invalid output line: %s' % line)
+ # Remove trailing . on perms
+ splitline[0] = splitline[0].rstrip('.')
+ # Remove leading . on paths
+ splitline[-1] = splitline[-1].lstrip('.')
+ # Drop fields we don't want to compare
+ del splitline[7]
+ del splitline[6]
+ del splitline[5]
+ del splitline[4]
+ del splitline[1]
+ filelist.append(' '.join(splitline))
+ return filelist
+
+
+class DevtoolTests(DevtoolBase):
+
+ def test_create_workspace(self):
+ # Check preconditions
+ result = runCmd('bitbake-layers show-layers')
+ self.assertTrue('\nworkspace' not in result.output, 'This test cannot be run with a workspace layer in bblayers.conf')
+ # remove conf/devtool.conf to avoid it corrupting tests
+ devtoolconf = os.path.join(self.builddir, 'conf', 'devtool.conf')
+ self.track_for_cleanup(devtoolconf)
+ # Try creating a workspace layer with a specific path
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ result = runCmd('devtool create-workspace %s' % tempdir)
+ self.assertTrue(os.path.isfile(os.path.join(tempdir, 'conf', 'layer.conf')), msg = "No workspace created. devtool output: %s " % result.output)
+ result = runCmd('bitbake-layers show-layers')
+ self.assertIn(tempdir, result.output)
+ # Try creating a workspace layer with the default path
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool create-workspace')
+ self.assertTrue(os.path.isfile(os.path.join(self.workspacedir, 'conf', 'layer.conf')), msg = "No workspace created. devtool output: %s " % result.output)
+ result = runCmd('bitbake-layers show-layers')
+ self.assertNotIn(tempdir, result.output)
+ self.assertIn(self.workspacedir, result.output)
+
+class DevtoolAddTests(DevtoolBase):
+
+ def test_devtool_add(self):
+ # Fetch source
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ pn = 'pv'
+ pv = '1.5.3'
+ url = 'http://www.ivarch.com/programs/sources/pv-1.5.3.tar.bz2'
+ result = runCmd('wget %s' % url, cwd=tempdir)
+ result = runCmd('tar xfv %s' % os.path.basename(url), cwd=tempdir)
+ srcdir = os.path.join(tempdir, '%s-%s' % (pn, pv))
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure')), 'Unable to find configure script in source directory')
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add %s %s' % (pn, srcdir))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ recipepath = '%s/recipes/%s/%s_%s.bb' % (self.workspacedir, pn, pn, pv)
+ self.assertIn(recipepath, result.output)
+ self.assertIn(srcdir, result.output)
+ # Test devtool find-recipe
+ result = runCmd('devtool -q find-recipe %s' % pn)
+ self.assertEqual(recipepath, result.output.strip())
+ # Test devtool edit-recipe
+ result = runCmd('VISUAL="echo 123" devtool -q edit-recipe %s' % pn)
+ self.assertEqual('123 %s' % recipepath, result.output.strip())
+ # Clean up anything in the workdir/sysroot/sstate cache (have to do this *after* devtool add since the recipe only exists then)
+ bitbake('%s -c cleansstate' % pn)
+ # Test devtool build
+ result = runCmd('devtool build %s' % pn)
+ bb_vars = get_bb_vars(['D', 'bindir'], pn)
+ installdir = bb_vars['D']
+ self.assertTrue(installdir, 'Could not query installdir variable')
+ bindir = bb_vars['bindir']
+ self.assertTrue(bindir, 'Could not query bindir variable')
+ if bindir[0] == '/':
+ bindir = bindir[1:]
+ self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
+
+ def test_devtool_add_git_local(self):
+ # We need dbus built so that DEPENDS recognition works
+ bitbake('dbus')
+ # Fetch source from a remote URL, but do it outside of devtool
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ pn = 'dbus-wait'
+ srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
+ # We choose an https:// git URL here to check rewriting the URL works
+ url = 'https://git.yoctoproject.org/git/dbus-wait'
+ # Force fetching to "noname" subdir so we verify we're picking up the name from autoconf
+ # instead of the directory name
+ result = runCmd('git clone %s noname' % url, cwd=tempdir)
+ srcdir = os.path.join(tempdir, 'noname')
+ result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # Don't specify a name since we should be able to auto-detect it
+ result = runCmd('devtool add %s' % srcdir)
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ # Check the recipe name is correct
+ recipefile = get_bb_var('FILE', pn)
+ self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named')
+ self.assertIn(recipefile, result.output)
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(pn, result.output)
+ self.assertIn(srcdir, result.output)
+ self.assertIn(recipefile, result.output)
+ checkvars = {}
+ checkvars['LICENSE'] = 'GPLv2'
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
+ checkvars['S'] = '${WORKDIR}/git'
+ checkvars['PV'] = '0.1+git${SRCPV}'
+ checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https'
+ checkvars['SRCREV'] = srcrev
+ checkvars['DEPENDS'] = set(['dbus'])
+ self._test_recipe_contents(recipefile, checkvars, [])
+
+ def test_devtool_add_library(self):
+ # Fetch source
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ version = '1.1'
+ url = 'https://www.intra2net.com/en/developer/libftdi/download/libftdi1-%s.tar.bz2' % version
+ result = runCmd('wget %s' % url, cwd=tempdir)
+ result = runCmd('tar xfv libftdi1-%s.tar.bz2' % version, cwd=tempdir)
+ srcdir = os.path.join(tempdir, 'libftdi1-%s' % version)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'CMakeLists.txt')), 'Unable to find CMakeLists.txt in source directory')
+ # Test devtool add (and use -V so we test that too)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add libftdi %s -V %s' % (srcdir, version))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn('libftdi', result.output)
+ self.assertIn(srcdir, result.output)
+ # Clean up anything in the workdir/sysroot/sstate cache (have to do this *after* devtool add since the recipe only exists then)
+ bitbake('libftdi -c cleansstate')
+ # libftdi's python/CMakeLists.txt is a bit broken, so let's just disable it
+ # There's also the matter of it installing cmake files to a path we don't
+ # normally cover, which triggers the installed-vs-shipped QA test we have
+ # within do_package
+ recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version)
+ result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile)
+ with open(recipefile, 'a') as f:
+ f.write('\nFILES_${PN}-dev += "${datadir}/cmake/Modules"\n')
+ # We don't have the ability to pick up this dependency automatically yet...
+ f.write('\nDEPENDS += "libusb1"\n')
+ f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n')
+ # Test devtool build
+ result = runCmd('devtool build libftdi')
+ bb_vars = get_bb_vars(['TESTLIBOUTPUT', 'STAMP'], 'libftdi')
+ staging_libdir = bb_vars['TESTLIBOUTPUT']
+ self.assertTrue(staging_libdir, 'Could not query TESTLIBOUTPUT variable')
+ self.assertTrue(os.path.isfile(os.path.join(staging_libdir, 'libftdi1.so.2.1.0')), "libftdi binary not found in STAGING_LIBDIR. Output of devtool build libftdi %s" % result.output)
+ # Test devtool reset
+ stampprefix = bb_vars['STAMP']
+ result = runCmd('devtool reset libftdi')
+ result = runCmd('devtool status')
+ self.assertNotIn('libftdi', result.output)
+ self.assertTrue(stampprefix, 'Unable to get STAMP value for recipe libftdi')
+ matches = glob.glob(stampprefix + '*')
+ self.assertFalse(matches, 'Stamp files exist for recipe libftdi that should have been cleaned')
+ self.assertFalse(os.path.isfile(os.path.join(staging_libdir, 'libftdi1.so.2.1.0')), 'libftdi binary still found in STAGING_LIBDIR after cleaning')
+
+ def test_devtool_add_fetch(self):
+ # Fetch source
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ testver = '0.23'
+ url = 'https://files.pythonhosted.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-%s.tar.gz' % testver
+ testrecipe = 'python-markupsafe'
+ srcdir = os.path.join(tempdir, testrecipe)
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
+ self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(srcdir, result.output)
+ # Check recipe
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
+ checkvars = {}
+ checkvars['S'] = '${WORKDIR}/MarkupSafe-${PV}'
+ checkvars['SRC_URI'] = url.replace(testver, '${PV}')
+ self._test_recipe_contents(recipefile, checkvars, [])
+ # Try with version specified
+ result = runCmd('devtool reset -n %s' % testrecipe)
+ shutil.rmtree(srcdir)
+ fakever = '1.9'
+ result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(srcdir, result.output)
+ # Check recipe
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('%s_%s.bb' % (testrecipe, fakever), recipefile, 'Recipe file incorrectly named')
+ checkvars = {}
+ checkvars['S'] = '${WORKDIR}/MarkupSafe-%s' % testver
+ checkvars['SRC_URI'] = url
+ self._test_recipe_contents(recipefile, checkvars, [])
+
+ def test_devtool_add_fetch_git(self):
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ url = 'gitsm://git.yoctoproject.org/mraa'
+ checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d'
+ testrecipe = 'mraa'
+ srcdir = os.path.join(tempdir, testrecipe)
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add %s %s -a -f %s' % (testrecipe, srcdir, url))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created: %s' % result.output)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'imraa', 'imraa.c')), 'Unable to find imraa/imraa.c in source directory')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(srcdir, result.output)
+ # Check recipe
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
+ checkvars = {}
+ checkvars['S'] = '${WORKDIR}/git'
+ checkvars['PV'] = '1.0+git${SRCPV}'
+ checkvars['SRC_URI'] = url
+ checkvars['SRCREV'] = '${AUTOREV}'
+ self._test_recipe_contents(recipefile, checkvars, [])
+ # Try with revision and version specified
+ result = runCmd('devtool reset -n %s' % testrecipe)
+ shutil.rmtree(srcdir)
+ url_rev = '%s;rev=%s' % (url, checkrev)
+ result = runCmd('devtool add %s %s -f "%s" -V 1.5' % (testrecipe, srcdir, url_rev))
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'imraa', 'imraa.c')), 'Unable to find imraa/imraa.c in source directory')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(srcdir, result.output)
+ # Check recipe
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
+ checkvars = {}
+ checkvars['S'] = '${WORKDIR}/git'
+ checkvars['PV'] = '1.5+git${SRCPV}'
+ checkvars['SRC_URI'] = url
+ checkvars['SRCREV'] = checkrev
+ self._test_recipe_contents(recipefile, checkvars, [])
+
+ def test_devtool_add_fetch_simple(self):
+ # Fetch source from a remote URL, auto-detecting name
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ testver = '1.6.0'
+ url = 'http://www.ivarch.com/programs/sources/pv-%s.tar.bz2' % testver
+ testrecipe = 'pv'
+ srcdir = os.path.join(self.workspacedir, 'sources', testrecipe)
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add %s' % url)
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure')), 'Unable to find configure script in source directory')
+ self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(srcdir, result.output)
+ # Check recipe
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
+ checkvars = {}
+ checkvars['S'] = None
+ checkvars['SRC_URI'] = url.replace(testver, '${PV}')
+ self._test_recipe_contents(recipefile, checkvars, [])
+
+class DevtoolModifyTests(DevtoolBase):
+
+ def test_devtool_modify(self):
+ import oe.path
+
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean mdadm')
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify mdadm -x %s' % tempdir)
+ self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'mdadm_*.bbappend'))
+ self.assertTrue(matches, 'bbappend not created %s' % result.output)
+
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn('mdadm', result.output)
+ self.assertIn(tempdir, result.output)
+ self._check_src_repo(tempdir)
+
+ bitbake('mdadm -C unpack')
+
+ def check_line(checkfile, expected, message, present=True):
+ # Check for $expected, on a line on its own, in checkfile.
+ with open(checkfile, 'r') as f:
+ if present:
+ self.assertIn(expected + '\n', f, message)
+ else:
+ self.assertNotIn(expected + '\n', f, message)
+
+ modfile = os.path.join(tempdir, 'mdadm.8.in')
+ bb_vars = get_bb_vars(['PKGD', 'mandir'], 'mdadm')
+ pkgd = bb_vars['PKGD']
+ self.assertTrue(pkgd, 'Could not query PKGD variable')
+ mandir = bb_vars['mandir']
+ self.assertTrue(mandir, 'Could not query mandir variable')
+ manfile = oe.path.join(pkgd, mandir, 'man8', 'mdadm.8')
+
+ check_line(modfile, 'Linux Software RAID', 'Could not find initial string')
+ check_line(modfile, 'antique pin sardine', 'Unexpectedly found replacement string', present=False)
+
+ result = runCmd("sed -i 's!^Linux Software RAID$!antique pin sardine!' %s" % modfile)
+ check_line(modfile, 'antique pin sardine', 'mdadm.8.in file not modified (sed failed)')
+
+ bitbake('mdadm -c package')
+ check_line(manfile, 'antique pin sardine', 'man file not modified. man searched file path: %s' % manfile)
+
+ result = runCmd('git checkout -- %s' % modfile, cwd=tempdir)
+ check_line(modfile, 'Linux Software RAID', 'man .in file not restored (git failed)')
+
+ bitbake('mdadm -c package')
+ check_line(manfile, 'Linux Software RAID', 'man file not updated. man searched file path: %s' % manfile)
+
+ result = runCmd('devtool reset mdadm')
+ result = runCmd('devtool status')
+ self.assertNotIn('mdadm', result.output)
+
+ def test_devtool_buildclean(self):
+ def assertFile(path, *paths):
+ f = os.path.join(path, *paths)
+ self.assertExists(f)
+ def assertNoFile(path, *paths):
+ f = os.path.join(path, *paths)
+ self.assertNotExists(f)
+
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('mdadm m4 -c cleansstate')
+ # Try modifying a recipe
+ tempdir_mdadm = tempfile.mkdtemp(prefix='devtoolqa')
+ tempdir_m4 = tempfile.mkdtemp(prefix='devtoolqa')
+ builddir_m4 = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir_mdadm)
+ self.track_for_cleanup(tempdir_m4)
+ self.track_for_cleanup(builddir_m4)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean mdadm m4')
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ self.write_recipeinc('m4', 'EXTERNALSRC_BUILD = "%s"\ndo_clean() {\n\t:\n}\n' % builddir_m4)
+ try:
+ runCmd('devtool modify mdadm -x %s' % tempdir_mdadm)
+ runCmd('devtool modify m4 -x %s' % tempdir_m4)
+ assertNoFile(tempdir_mdadm, 'mdadm')
+ assertNoFile(builddir_m4, 'src/m4')
+ result = bitbake('m4 -e')
+ result = bitbake('mdadm m4 -c compile')
+ self.assertEqual(result.status, 0)
+ assertFile(tempdir_mdadm, 'mdadm')
+ assertFile(builddir_m4, 'src/m4')
+ # Check that buildclean task exists and does call make clean
+ bitbake('mdadm m4 -c buildclean')
+ assertNoFile(tempdir_mdadm, 'mdadm')
+ assertNoFile(builddir_m4, 'src/m4')
+ runCmd('echo "#Trigger rebuild" >> %s/Makefile' % tempdir_mdadm)
+ bitbake('mdadm m4 -c compile')
+ assertFile(tempdir_mdadm, 'mdadm')
+ assertFile(builddir_m4, 'src/m4')
+ bitbake('mdadm m4 -c clean')
+ # Check that buildclean task is run before clean for B == S
+ assertNoFile(tempdir_mdadm, 'mdadm')
+ # Check that buildclean task is not run before clean for B != S
+ assertFile(builddir_m4, 'src/m4')
+ finally:
+ self.delete_recipeinc('m4')
+
+ def test_devtool_modify_invalid(self):
+ # Try modifying some recipes
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split()
+ # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose
+ result = runCmd('bitbake-layers show-recipes gcc-source*')
+ for line in result.output.splitlines():
+ # just match those lines that contain a real target
+ m = re.match('(?P<recipe>^[a-zA-Z0-9.-]+)(?P<colon>:$)', line)
+ if m:
+ testrecipes.append(m.group('recipe'))
+ for testrecipe in testrecipes:
+ # Check it's a valid recipe
+ bitbake('%s -e' % testrecipe)
+ # devtool extract should fail
+ result = runCmd('devtool extract %s %s' % (testrecipe, os.path.join(tempdir, testrecipe)), ignore_status=True)
+ self.assertNotEqual(result.status, 0, 'devtool extract on %s should have failed. devtool output: %s' % (testrecipe, result.output))
+ self.assertNotIn('Fetching ', result.output, 'devtool extract on %s should have errored out before trying to fetch' % testrecipe)
+ self.assertIn('ERROR: ', result.output, 'devtool extract on %s should have given an ERROR' % testrecipe)
+ # devtool modify should fail
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, os.path.join(tempdir, testrecipe)), ignore_status=True)
+ self.assertNotEqual(result.status, 0, 'devtool modify on %s should have failed. devtool output: %s' % (testrecipe, result.output))
+ self.assertIn('ERROR: ', result.output, 'devtool modify on %s should have given an ERROR' % testrecipe)
+
+ def test_devtool_modify_native(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Try modifying some recipes
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ bbclassextended = False
+ inheritnative = False
+ testrecipes = 'mtools-native apt-native desktop-file-utils-native'.split()
+ for testrecipe in testrecipes:
+ checkextend = 'native' in (get_bb_var('BBCLASSEXTEND', testrecipe) or '').split()
+ if not bbclassextended:
+ bbclassextended = checkextend
+ if not inheritnative:
+ inheritnative = not checkextend
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, os.path.join(tempdir, testrecipe)))
+ self.assertNotIn('ERROR: ', result.output, 'ERROR in devtool modify output: %s' % result.output)
+ result = runCmd('devtool build %s' % testrecipe)
+ self.assertNotIn('ERROR: ', result.output, 'ERROR in devtool build output: %s' % result.output)
+ result = runCmd('devtool reset %s' % testrecipe)
+ self.assertNotIn('ERROR: ', result.output, 'ERROR in devtool reset output: %s' % result.output)
+
+ self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
+ self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
+
+
+ def test_devtool_modify_git(self):
+ # Check preconditions
+ testrecipe = 'psplash'
+ src_uri = get_bb_var('SRC_URI', testrecipe)
+ self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Try building
+ bitbake(testrecipe)
+
+ def test_devtool_modify_localfiles(self):
+ # Check preconditions
+ testrecipe = 'lighttpd'
+ src_uri = (get_bb_var('SRC_URI', testrecipe) or '').split()
+ foundlocal = False
+ for item in src_uri:
+ if item.startswith('file://') and '.patch' not in item:
+ foundlocal = True
+ break
+ self.assertTrue(foundlocal, 'This test expects the %s recipe to fetch local files and it seems that it no longer does' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'configure.ac'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Try building
+ bitbake(testrecipe)
+
+ def test_devtool_modify_virtual(self):
+ # Try modifying a virtual recipe
+ virtrecipe = 'virtual/make'
+ realrecipe = 'make'
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (virtrecipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % realrecipe))
+ self.assertTrue(matches, 'bbappend not created %s' % result.output)
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertNotIn(virtrecipe, result.output)
+ self.assertIn(realrecipe, result.output)
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # This is probably sufficient
+
+class DevtoolUpdateTests(DevtoolBase):
+
+ def test_devtool_update_recipe(self):
+ # Check preconditions
+ testrecipe = 'minicom'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ recipefile = bb_vars['FILE']
+ src_uri = bb_vars['SRC_URI']
+ self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe)
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ # First, modify a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ # We don't use -x here so that we test the behaviour of devtool modify without it
+ result = runCmd('devtool modify %s %s' % (testrecipe, tempdir))
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Add a couple of commits
+ # FIXME: this only tests adding, need to also test update and remove
+ result = runCmd('echo "Additional line" >> README', cwd=tempdir)
+ result = runCmd('git commit -a -m "Change the README"', cwd=tempdir)
+ result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir)
+ result = runCmd('git add devtool-new-file', cwd=tempdir)
+ result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
+ self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ ('??', '.*/0001-Change-the-README.patch$'),
+ ('??', '.*/0002-Add-a-new-file.patch$')]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+ def test_devtool_update_recipe_git(self):
+ # Check preconditions
+ testrecipe = 'mtd-utils'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ recipefile = bb_vars['FILE']
+ src_uri = bb_vars['SRC_URI']
+ self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ patches = []
+ for entry in src_uri.split():
+ if entry.startswith('file://') and entry.endswith('.patch'):
+ patches.append(entry[7:].split(';')[0])
+ self.assertGreater(len(patches), 0, 'The %s recipe does not appear to contain any patches, so this test will not be effective' % testrecipe)
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ # First, modify a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Add a couple of commits
+ # FIXME: this only tests adding, need to also test update and remove
+ result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempdir)
+ result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempdir)
+ result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir)
+ result = runCmd('git add devtool-new-file', cwd=tempdir)
+ result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
+ self.add_command_to_tearDown('cd %s; rm -rf %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool update-recipe -m srcrev %s' % testrecipe)
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile))] + \
+ [(' D', '.*/%s$' % patch) for patch in patches]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+ result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+ addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"']
+ srcurilines = src_uri.split()
+ srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
+ srcurilines.append('"')
+ removelines = ['SRCREV = ".*"'] + srcurilines
+ for line in result.output.splitlines():
+ if line.startswith('+++') or line.startswith('---'):
+ continue
+ elif line.startswith('+'):
+ matched = False
+ for item in addlines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ break
+ self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
+ elif line.startswith('-'):
+ matched = False
+ for item in removelines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ break
+ self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+ # Now try with auto mode
+ runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ result = runCmd('git rev-parse --show-toplevel', cwd=os.path.dirname(recipefile))
+ topleveldir = result.output.strip()
+ relpatchpath = os.path.join(os.path.relpath(os.path.dirname(recipefile), topleveldir), testrecipe)
+ expected_status = [(' M', os.path.relpath(recipefile, topleveldir)),
+ ('??', '%s/0001-Change-the-Makefile.patch' % relpatchpath),
+ ('??', '%s/0002-Add-a-new-file.patch' % relpatchpath)]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+ def test_devtool_update_recipe_append(self):
+ # Check preconditions
+ testrecipe = 'mdadm'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ recipefile = bb_vars['FILE']
+ src_uri = bb_vars['SRC_URI']
+ self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe)
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ # First, modify a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ tempsrcdir = os.path.join(tempdir, 'source')
+ templayerdir = os.path.join(tempdir, 'layer')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempsrcdir))
+ # Check git repo
+ self._check_src_repo(tempsrcdir)
+ # Add a commit
+ result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir)
+ result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
+ self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe))
+ # Create a temporary layer and add it to bblayers.conf
+ self._create_temp_layer(templayerdir, True, 'selftestupdaterecipe')
+ # Create the bbappend
+ result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+ self.assertNotIn('WARNING:', result.output)
+ # Check recipe is still clean
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ # Check bbappend was created
+ splitpath = os.path.dirname(recipefile).split(os.sep)
+ appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
+ bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
+ patchfile = os.path.join(appenddir, testrecipe, '0001-Add-our-custom-version.patch')
+ self.assertExists(patchfile, 'Patch file not created')
+
+ # Check bbappend contents
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n',
+ '\n']
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, f.readlines())
+
+ # Check we can run it again and bbappend isn't modified
+ result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, f.readlines())
+ # Drop new commit and check patch gets deleted
+ result = runCmd('git reset HEAD^', cwd=tempsrcdir)
+ result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+ self.assertNotExists(patchfile, 'Patch file not deleted')
+ expectedlines2 = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines2, f.readlines())
+ # Put commit back and check we can run it if layer isn't in bblayers.conf
+ os.remove(bbappendfile)
+ result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
+ result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
+ result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+ self.assertIn('WARNING: Specified layer is not currently enabled in bblayers.conf', result.output)
+ self.assertExists(patchfile, 'Patch file not created (with disabled layer)')
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, f.readlines())
+ # Deleting isn't expected to work under these circumstances
+
+ def test_devtool_update_recipe_append_git(self):
+ # Check preconditions
+ testrecipe = 'mtd-utils'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ recipefile = bb_vars['FILE']
+ src_uri = bb_vars['SRC_URI']
+ self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ for entry in src_uri.split():
+ if entry.startswith('git://'):
+ git_uri = entry
+ break
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ # First, modify a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ tempsrcdir = os.path.join(tempdir, 'source')
+ templayerdir = os.path.join(tempdir, 'layer')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempsrcdir))
+ # Check git repo
+ self._check_src_repo(tempsrcdir)
+ # Add a commit
+ result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempsrcdir)
+ result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
+ self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe))
+ # Create a temporary layer
+ os.makedirs(os.path.join(templayerdir, 'conf'))
+ with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
+ f.write('BBPATH .= ":${LAYERDIR}"\n')
+ f.write('BBFILES += "${LAYERDIR}/recipes-*/*/*.bbappend"\n')
+ f.write('BBFILE_COLLECTIONS += "oeselftesttemplayer"\n')
+ f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
+ f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
+ f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
+ f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n')
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
+ result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
+ # Create the bbappend
+ result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+ self.assertNotIn('WARNING:', result.output)
+ # Check recipe is still clean
+ self._check_repo_status(os.path.dirname(recipefile), [])
+ # Check bbappend was created
+ splitpath = os.path.dirname(recipefile).split(os.sep)
+ appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
+ bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
+ self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created')
+
+ # Check bbappend contents
+ result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
+ expectedlines = set(['SRCREV = "%s"\n' % result.output,
+ '\n',
+ 'SRC_URI = "%s"\n' % git_uri,
+ '\n'])
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, set(f.readlines()))
+
+ # Check we can run it again and bbappend isn't modified
+ result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, set(f.readlines()))
+ # Drop new commit and check SRCREV changes
+ result = runCmd('git reset HEAD^', cwd=tempsrcdir)
+ result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+ self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created')
+ result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
+ expectedlines = set(['SRCREV = "%s"\n' % result.output,
+ '\n',
+ 'SRC_URI = "%s"\n' % git_uri,
+ '\n'])
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, set(f.readlines()))
+ # Put commit back and check we can run it if layer isn't in bblayers.conf
+ os.remove(bbappendfile)
+ result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
+ result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
+ result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+ self.assertIn('WARNING: Specified layer is not currently enabled in bblayers.conf', result.output)
+ self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created')
+ result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
+ expectedlines = set(['SRCREV = "%s"\n' % result.output,
+ '\n',
+ 'SRC_URI = "%s"\n' % git_uri,
+ '\n'])
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, set(f.readlines()))
+ # Deleting isn't expected to work under these circumstances
+
+ def test_devtool_update_recipe_local_files(self):
+ """Check that local source files are copied over instead of patched"""
+ testrecipe = 'makedevs'
+ recipefile = get_bb_var('FILE', testrecipe)
+ # Setup srctree for modifying the recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be
+ # building it)
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Try building just to ensure we haven't broken that
+ bitbake("%s" % testrecipe)
+ # Edit / commit local source
+ runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir)
+ runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir)
+ runCmd('echo "Bar" > new-file', cwd=tempdir)
+ runCmd('git add new-file', cwd=tempdir)
+ runCmd('git commit -m "Add new file"', cwd=tempdir)
+ self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' %
+ os.path.dirname(recipefile))
+ runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ (' M', '.*/makedevs/makedevs.c$'),
+ ('??', '.*/makedevs/new-local$'),
+ ('??', '.*/makedevs/0001-Add-new-file.patch$')]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+ def test_devtool_update_recipe_local_files_2(self):
+ """Check local source files support when oe-local-files is in Git"""
+ testrecipe = 'devtool-test-local'
+ recipefile = get_bb_var('FILE', testrecipe)
+ recipedir = os.path.dirname(recipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % testrecipe)
+ # Setup srctree for modifying the recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Add oe-local-files to Git
+ runCmd('rm oe-local-files/.gitignore', cwd=tempdir)
+ runCmd('git add oe-local-files', cwd=tempdir)
+ runCmd('git commit -m "Add local sources"', cwd=tempdir)
+ # Edit / commit local sources
+ runCmd('echo "# Foobar" >> oe-local-files/file1', cwd=tempdir)
+ runCmd('git commit -am "Edit existing file"', cwd=tempdir)
+ runCmd('git rm oe-local-files/file2', cwd=tempdir)
+ runCmd('git commit -m"Remove file"', cwd=tempdir)
+ runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir)
+ runCmd('git add oe-local-files/new-local', cwd=tempdir)
+ runCmd('git commit -m "Add new local file"', cwd=tempdir)
+ runCmd('echo "Gar" > new-file', cwd=tempdir)
+ runCmd('git add new-file', cwd=tempdir)
+ runCmd('git commit -m "Add new file"', cwd=tempdir)
+ self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' %
+ os.path.dirname(recipefile))
+ # Checkout unmodified file to working copy -> devtool should still pick
+ # the modified version from HEAD
+ runCmd('git checkout HEAD^ -- oe-local-files/file1', cwd=tempdir)
+ runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ (' M', '.*/file1$'),
+ (' D', '.*/file2$'),
+ ('??', '.*/new-local$'),
+ ('??', '.*/0001-Add-new-file.patch$')]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+ def test_devtool_update_recipe_local_files_3(self):
+ # First, modify the recipe
+ testrecipe = 'devtool-test-localonly'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ recipefile = bb_vars['FILE']
+ src_uri = bb_vars['SRC_URI']
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s' % testrecipe)
+ # Modify one file
+ runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe, 'oe-local-files'))
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s/file2$' % testrecipe)]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+ def test_devtool_update_recipe_local_patch_gz(self):
+ # First, modify the recipe
+ testrecipe = 'devtool-test-patch-gz'
+ if get_bb_var('DISTRO') == 'poky-tiny':
+ self.skipTest("The DISTRO 'poky-tiny' does not provide the dependencies needed by %s" % testrecipe)
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ recipefile = bb_vars['FILE']
+ src_uri = bb_vars['SRC_URI']
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s' % testrecipe)
+ # Modify one file
+ srctree = os.path.join(self.workspacedir, 'sources', testrecipe)
+ runCmd('echo "Another line" >> README', cwd=srctree)
+ runCmd('git commit -a --amend --no-edit', cwd=srctree)
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ patch_gz = os.path.join(os.path.dirname(recipefile), testrecipe, 'readme.patch.gz')
+ result = runCmd('file %s' % patch_gz)
+ if 'gzip compressed data' not in result.output:
+ self.fail('New patch file is not gzipped - file reports:\n%s' % result.output)
+
+ def test_devtool_update_recipe_local_files_subdir(self):
+ # Try devtool update-recipe on a recipe that has a file with subdir= set in
+ # SRC_URI such that it overwrites a file that was in an archive that
+ # was also in SRC_URI
+ # First, modify the recipe
+ testrecipe = 'devtool-test-subdir'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ recipefile = bb_vars['FILE']
+ src_uri = bb_vars['SRC_URI']
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s' % testrecipe)
+ testfile = os.path.join(self.workspacedir, 'sources', testrecipe, 'testfile')
+ self.assertExists(testfile, 'Extracted source could not be found')
+ with open(testfile, 'r') as f:
+ contents = f.read().rstrip()
+ self.assertEqual(contents, 'Modified version', 'File has apparently not been overwritten as it should have been')
+ # Test devtool update-recipe without modifying any files
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = []
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+class DevtoolExtractTests(DevtoolBase):
+
+ def test_devtool_extract(self):
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ # Try devtool extract
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool extract matchbox-terminal %s' % tempdir)
+ self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
+ self._check_src_repo(tempdir)
+
+ def test_devtool_extract_virtual(self):
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ # Try devtool extract
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool extract virtual/make %s' % tempdir)
+ self.assertExists(os.path.join(tempdir, 'Makefile.am'), 'Extracted source could not be found')
+ self._check_src_repo(tempdir)
+
+ def test_devtool_reset_all(self):
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ testrecipe1 = 'mdadm'
+ testrecipe2 = 'cronie'
+ result = runCmd('devtool modify -x %s %s' % (testrecipe1, os.path.join(tempdir, testrecipe1)))
+ result = runCmd('devtool modify -x %s %s' % (testrecipe2, os.path.join(tempdir, testrecipe2)))
+ result = runCmd('devtool build %s' % testrecipe1)
+ result = runCmd('devtool build %s' % testrecipe2)
+ stampprefix1 = get_bb_var('STAMP', testrecipe1)
+ self.assertTrue(stampprefix1, 'Unable to get STAMP value for recipe %s' % testrecipe1)
+ stampprefix2 = get_bb_var('STAMP', testrecipe2)
+ self.assertTrue(stampprefix2, 'Unable to get STAMP value for recipe %s' % testrecipe2)
+ result = runCmd('devtool reset -a')
+ self.assertIn(testrecipe1, result.output)
+ self.assertIn(testrecipe2, result.output)
+ result = runCmd('devtool status')
+ self.assertNotIn(testrecipe1, result.output)
+ self.assertNotIn(testrecipe2, result.output)
+ matches1 = glob.glob(stampprefix1 + '*')
+ self.assertFalse(matches1, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe1)
+ matches2 = glob.glob(stampprefix2 + '*')
+ self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2)
+
+ def test_devtool_deploy_target(self):
+ # NOTE: Whilst this test would seemingly be better placed as a runtime test,
+ # unfortunately the runtime tests run under bitbake and you can't run
+ # devtool within bitbake (since devtool needs to run bitbake itself).
+ # Additionally we are testing build-time functionality as well, so
+ # really this has to be done as an oe-selftest test.
+ #
+ # Check preconditions
+ machine = get_bb_var('MACHINE')
+ if not machine.startswith('qemu'):
+ self.skipTest('This test only works with qemu machines')
+ if not os.path.exists('/etc/runqemu-nosudo'):
+ self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
+ if result.status != 0:
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
+ if result.status != 0:
+ self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
+ for line in result.output.splitlines():
+ if line.startswith('tap'):
+ break
+ else:
+ self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Definitions
+ testrecipe = 'mdadm'
+ testfile = '/sbin/mdadm'
+ testimage = 'oe-selftest-image'
+ testcommand = '/sbin/mdadm --help'
+ # Build an image to run
+ bitbake("%s qemu-native qemu-helper-native" % testimage)
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
+ self.add_command_to_tearDown('rm -f %s/%s*' % (deploy_dir_image, testimage))
+ # Clean recipe so the first deploy will fail
+ bitbake("%s -c clean" % testrecipe)
+ # Try devtool modify
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ # Test that deploy-target at this point fails (properly)
+ result = runCmd('devtool deploy-target -n %s root@localhost' % testrecipe, ignore_status=True)
+ self.assertNotEqual(result.output, 0, 'devtool deploy-target should have failed, output: %s' % result.output)
+ self.assertNotIn(result.output, 'Traceback', 'devtool deploy-target should have failed with a proper error not a traceback, output: %s' % result.output)
+ result = runCmd('devtool build %s' % testrecipe)
+ # First try a dry-run of deploy-target
+ result = runCmd('devtool deploy-target -n %s root@localhost' % testrecipe)
+ self.assertIn(' %s' % testfile, result.output)
+ # Boot the image
+ with runqemu(testimage) as qemu:
+ # Now really test deploy-target
+ result = runCmd('devtool deploy-target -c %s root@%s' % (testrecipe, qemu.ip))
+ # Run a test command to see if it was installed properly
+ sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand))
+ # Check if it deployed all of the files with the right ownership/perms
+ # First look on the host - need to do this under pseudo to get the correct ownership/perms
+ bb_vars = get_bb_vars(['D', 'FAKEROOTENV', 'FAKEROOTCMD'], testrecipe)
+ installdir = bb_vars['D']
+ fakerootenv = bb_vars['FAKEROOTENV']
+ fakerootcmd = bb_vars['FAKEROOTCMD']
+ result = runCmd('%s %s find . -type f -exec ls -l {} \\;' % (fakerootenv, fakerootcmd), cwd=installdir)
+ filelist1 = self._process_ls_output(result.output)
+
+ # Now look on the target
+ tempdir2 = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir2)
+ tmpfilelist = os.path.join(tempdir2, 'files.txt')
+ with open(tmpfilelist, 'w') as f:
+ for line in filelist1:
+ splitline = line.split()
+ f.write(splitline[-1] + '\n')
+ result = runCmd('cat %s | ssh -q %s root@%s \'xargs ls -l\'' % (tmpfilelist, sshargs, qemu.ip))
+ filelist2 = self._process_ls_output(result.output)
+ filelist1.sort(key=lambda item: item.split()[-1])
+ filelist2.sort(key=lambda item: item.split()[-1])
+ self.assertEqual(filelist1, filelist2)
+ # Test undeploy-target
+ result = runCmd('devtool undeploy-target -c %s root@%s' % (testrecipe, qemu.ip))
+ result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True)
+ self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have')
+
+ def test_devtool_build_image(self):
+ """Test devtool build-image plugin"""
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ image = 'core-image-minimal'
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % image)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ bitbake('%s -c clean' % image)
+ # Add target and native recipes to workspace
+ recipes = ['mdadm', 'parted-native']
+ for recipe in recipes:
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % recipe)
+ runCmd('devtool modify %s -x %s' % (recipe, tempdir))
+ # Try to build image
+ result = runCmd('devtool build-image %s' % image)
+ self.assertNotEqual(result, 0, 'devtool build-image failed')
+ # Check if image contains expected packages
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ image_link_name = get_bb_var('IMAGE_LINK_NAME', image)
+ reqpkgs = [item for item in recipes if not item.endswith('-native')]
+ with open(os.path.join(deploy_dir_image, image_link_name + '.manifest'), 'r') as f:
+ for line in f:
+ splitval = line.split()
+ if splitval:
+ pkg = splitval[0]
+ if pkg in reqpkgs:
+ reqpkgs.remove(pkg)
+ if reqpkgs:
+ self.fail('The following packages were not present in the image as expected: %s' % ', '.join(reqpkgs))
+
+class DevtoolUpgradeTests(DevtoolBase):
+
+ def test_devtool_upgrade(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # Check parameters
+ result = runCmd('devtool upgrade -h')
+ for param in 'recipename srctree --version -V --branch -b --keep-temp --no-patch'.split():
+ self.assertIn(param, result.output)
+ # For the moment, we are using a real recipe.
+ recipe = 'devtool-upgrade-test1'
+ version = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check that recipe is not already under devtool control
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output)
+ # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+ # we are downgrading instead of upgrading.
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+ # Check if srctree at least is populated
+ self.assertTrue(len(os.listdir(tempdir)) > 0, 'srctree (%s) should be populated with new (%s) source code' % (tempdir, version))
+ # Check new recipe subdirectory is present
+ self.assertExists(os.path.join(self.workspacedir, 'recipes', recipe, '%s-%s' % (recipe, version)), 'Recipe folder should exist')
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check devtool status and make sure recipe is present
+ result = runCmd('devtool status')
+ self.assertIn(recipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+ # Check devtool reset recipe
+ result = runCmd('devtool reset %s -n' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output)
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
+
+ def test_devtool_upgrade_git(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ recipe = 'devtool-upgrade-test2'
+ commit = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check that recipe is not already under devtool control
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output)
+ # Check upgrade
+ result = runCmd('devtool upgrade %s %s -S %s' % (recipe, tempdir, commit))
+ # Check if srctree at least is populated
+ self.assertTrue(len(os.listdir(tempdir)) > 0, 'srctree (%s) should be populated with new (%s) source code' % (tempdir, commit))
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldrecipefile))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check devtool status and make sure recipe is present
+ result = runCmd('devtool status')
+ self.assertIn(recipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+ # Check devtool reset recipe
+ result = runCmd('devtool reset %s -n' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output)
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
+
+ def test_devtool_layer_plugins(self):
+ """Test that devtool can use plugins from other layers.
+
+ This test executes the selftest-reverse command from meta-selftest."""
+
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ s = "Microsoft Made No Profit From Anyone's Zunes Yo"
+ result = runCmd("devtool --quiet selftest-reverse \"%s\"" % s)
+ self.assertEqual(result.output, s[::-1])
+
+ def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths):
+ dstdir = basedstdir
+ self.assertExists(dstdir)
+ for p in paths:
+ dstdir = os.path.join(dstdir, p)
+ if not os.path.exists(dstdir):
+ os.makedirs(dstdir)
+ self.track_for_cleanup(dstdir)
+ dstfile = os.path.join(dstdir, os.path.basename(srcfile))
+ if srcfile != dstfile:
+ shutil.copy(srcfile, dstfile)
+ self.track_for_cleanup(dstfile)
+
+ def test_devtool_load_plugin(self):
+ """Test that devtool loads only the first found plugin in BBPATH."""
+
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ devtool = runCmd("which devtool")
+ fromname = runCmd("devtool --quiet pluginfile")
+ srcfile = fromname.output
+ bbpath = get_bb_var('BBPATH')
+ searchpath = bbpath.split(':') + [os.path.dirname(devtool.output)]
+ plugincontent = []
+ with open(srcfile) as fh:
+ plugincontent = fh.readlines()
+ try:
+ self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
+ for path in searchpath:
+ self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
+ result = runCmd("devtool --quiet count")
+ self.assertEqual(result.output, '1')
+ result = runCmd("devtool --quiet multiloaded")
+ self.assertEqual(result.output, "no")
+ for path in searchpath:
+ result = runCmd("devtool --quiet bbdir")
+ self.assertEqual(result.output, path)
+ os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py'))
+ finally:
+ with open(srcfile, 'w') as fh:
+ fh.writelines(plugincontent)
+
+ def _setup_test_devtool_finish_upgrade(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # Use a "real" recipe from meta-selftest
+ recipe = 'devtool-upgrade-test1'
+ oldversion = '1.5.3'
+ newversion = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(oldrecipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check that recipe is not already under devtool control
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output)
+ # Do the upgrade
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, newversion))
+ # Check devtool status and make sure recipe is present
+ result = runCmd('devtool status')
+ self.assertIn(recipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Make a change to the source
+ result = runCmd('sed -i \'/^#include "pv.h"/a \\/* Here is a new comment *\\/\' src/pv/number.c', cwd=tempdir)
+ result = runCmd('git status --porcelain', cwd=tempdir)
+ self.assertIn('M src/pv/number.c', result.output)
+ result = runCmd('git commit src/pv/number.c -m "Add a comment to the code"', cwd=tempdir)
+ # Check if patch is there
+ recipedir = os.path.dirname(oldrecipefile)
+ olddir = os.path.join(recipedir, recipe + '-' + oldversion)
+ patchfn = '0001-Add-a-note-line-to-the-quick-reference.patch'
+ backportedpatchfn = 'backported.patch'
+ self.assertExists(os.path.join(olddir, patchfn), 'Original patch file does not exist')
+ self.assertExists(os.path.join(olddir, backportedpatchfn), 'Backported patch file does not exist')
+ return recipe, oldrecipefile, recipedir, olddir, newversion, patchfn, backportedpatchfn
+
+ def test_devtool_finish_upgrade_origlayer(self):
+ recipe, oldrecipefile, recipedir, olddir, newversion, patchfn, backportedpatchfn = self._setup_test_devtool_finish_upgrade()
+ # Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
+ self.assertIn('/meta-selftest/', recipedir)
+ # Try finish to the original layer
+ self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
+ result = runCmd('devtool finish %s meta-selftest' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ self.assertNotExists(oldrecipefile, 'Old recipe file should have been deleted but wasn\'t')
+ self.assertNotExists(os.path.join(olddir, patchfn), 'Old patch file should have been deleted but wasn\'t')
+ self.assertNotExists(os.path.join(olddir, backportedpatchfn), 'Old backported patch file should have been deleted but wasn\'t')
+ newrecipefile = os.path.join(recipedir, '%s_%s.bb' % (recipe, newversion))
+ newdir = os.path.join(recipedir, recipe + '-' + newversion)
+ self.assertExists(newrecipefile, 'New recipe file should have been copied into existing layer but wasn\'t')
+ self.assertExists(os.path.join(newdir, patchfn), 'Patch file should have been copied into new directory but wasn\'t')
+ self.assertNotExists(os.path.join(newdir, backportedpatchfn), 'Backported patch file should not have been copied into new directory but was')
+ self.assertExists(os.path.join(newdir, '0002-Add-a-comment-to-the-code.patch'), 'New patch file should have been created but wasn\'t')
+ with open(newrecipefile, 'r') as f:
+ newcontent = f.read()
+ self.assertNotIn(backportedpatchfn, newcontent, "Backported patch should have been removed from the recipe but wasn't")
+ self.assertIn(patchfn, newcontent, "Old patch should have not been removed from the recipe but was")
+ self.assertIn("0002-Add-a-comment-to-the-code.patch", newcontent, "New patch should have been added to the recipe but wasn't")
+ self.assertIn("http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz", newcontent, "New recipe no longer has upstream source in SRC_URI")
+
+
+ def test_devtool_finish_upgrade_otherlayer(self):
+ recipe, oldrecipefile, recipedir, olddir, newversion, patchfn, backportedpatchfn = self._setup_test_devtool_finish_upgrade()
+ # Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
+ self.assertIn('/meta-selftest/', recipedir)
+ # Try finish to a different layer - should create a bbappend
+ # This cleanup isn't strictly necessary but do it anyway just in case it goes wrong and writes to here
+ self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
+ oe_core_dir = os.path.join(get_bb_var('COREBASE'), 'meta')
+ newrecipedir = os.path.join(oe_core_dir, 'recipes-test', 'devtool')
+ newrecipefile = os.path.join(newrecipedir, '%s_%s.bb' % (recipe, newversion))
+ self.track_for_cleanup(newrecipedir)
+ result = runCmd('devtool finish %s oe-core' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ self.assertExists(oldrecipefile, 'Old recipe file should not have been deleted')
+ self.assertExists(os.path.join(olddir, patchfn), 'Old patch file should not have been deleted')
+ self.assertExists(os.path.join(olddir, backportedpatchfn), 'Old backported patch file should not have been deleted')
+ newdir = os.path.join(newrecipedir, recipe + '-' + newversion)
+ self.assertExists(newrecipefile, 'New recipe file should have been copied into existing layer but wasn\'t')
+ self.assertExists(os.path.join(newdir, patchfn), 'Patch file should have been copied into new directory but wasn\'t')
+ self.assertNotExists(os.path.join(newdir, backportedpatchfn), 'Backported patch file should not have been copied into new directory but was')
+ self.assertExists(os.path.join(newdir, '0002-Add-a-comment-to-the-code.patch'), 'New patch file should have been created but wasn\'t')
+ with open(newrecipefile, 'r') as f:
+ newcontent = f.read()
+ self.assertNotIn(backportedpatchfn, newcontent, "Backported patch should have been removed from the recipe but wasn't")
+ self.assertIn(patchfn, newcontent, "Old patch should have not been removed from the recipe but was")
+ self.assertIn("0002-Add-a-comment-to-the-code.patch", newcontent, "New patch should have been added to the recipe but wasn't")
+ self.assertIn("http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz", newcontent, "New recipe no longer has upstream source in SRC_URI")
+
+ def _setup_test_devtool_finish_modify(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Try modifying a recipe
+ self.track_for_cleanup(self.workspacedir)
+ recipe = 'mdadm'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(oldrecipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s %s' % (recipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(recipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Make a change to the source
+ result = runCmd('sed -i \'/^#include "mdadm.h"/a \\/* Here is a new comment *\\/\' maps.c', cwd=tempdir)
+ result = runCmd('git status --porcelain', cwd=tempdir)
+ self.assertIn('M maps.c', result.output)
+ result = runCmd('git commit maps.c -m "Add a comment to the code"', cwd=tempdir)
+ for entry in os.listdir(recipedir):
+ filesdir = os.path.join(recipedir, entry)
+ if os.path.isdir(filesdir):
+ break
+ else:
+ self.fail('Unable to find recipe files directory for %s' % recipe)
+ return recipe, oldrecipefile, recipedir, filesdir
+
+ def test_devtool_finish_modify_origlayer(self):
+ recipe, oldrecipefile, recipedir, filesdir = self._setup_test_devtool_finish_modify()
+ # Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
+ self.assertIn('/meta/', recipedir)
+ # Try finish to the original layer
+ self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
+ result = runCmd('devtool finish %s meta' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)),
+ ('??', '.*/.*-Add-a-comment-to-the-code.patch$')]
+ self._check_repo_status(recipedir, expected_status)
+
+ def test_devtool_finish_modify_otherlayer(self):
+ recipe, oldrecipefile, recipedir, filesdir = self._setup_test_devtool_finish_modify()
+ # Ensure the recipe is where we think it should be (so that cleanup doesn't trash things)
+ self.assertIn('/meta/', recipedir)
+ relpth = os.path.relpath(recipedir, os.path.join(get_bb_var('COREBASE'), 'meta'))
+ appenddir = os.path.join(get_test_layer(), relpth)
+ self.track_for_cleanup(appenddir)
+ # Try finish to the original layer
+ self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
+ result = runCmd('devtool finish %s meta-selftest' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains the following unexpected changes after finish:\n%s' % (recipe, result.output.strip()))
+ recipefn = os.path.splitext(os.path.basename(oldrecipefile))[0]
+ recipefn = recipefn.split('_')[0] + '_%'
+ appendfile = os.path.join(appenddir, recipefn + '.bbappend')
+ self.assertExists(appendfile, 'bbappend %s should have been created but wasn\'t' % appendfile)
+ newdir = os.path.join(appenddir, recipe)
+ files = os.listdir(newdir)
+ foundpatch = None
+ for fn in files:
+ if fnmatch.fnmatch(fn, '*-Add-a-comment-to-the-code.patch'):
+ foundpatch = fn
+ if not foundpatch:
+ self.fail('No patch file created next to bbappend')
+ files.remove(foundpatch)
+ if files:
+ self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files))
+
+ def test_devtool_rename(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ # First run devtool add
+ # We already have this recipe in OE-Core, but that doesn't matter
+ recipename = 'i2c-tools'
+ recipever = '3.1.2'
+ recipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, recipever))
+ url = 'http://downloads.yoctoproject.org/mirror/sources/i2c-tools-%s.tar.bz2' % recipever
+ def add_recipe():
+ result = runCmd('devtool add %s' % url)
+ self.assertExists(recipefile, 'Expected recipe file not created')
+ self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory not created')
+ checkvars = {}
+ checkvars['S'] = None
+ checkvars['SRC_URI'] = url.replace(recipever, '${PV}')
+ self._test_recipe_contents(recipefile, checkvars, [])
+ add_recipe()
+ # Now rename it - change both name and version
+ newrecipename = 'mynewrecipe'
+ newrecipever = '456'
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, newrecipever))
+ result = runCmd('devtool rename %s %s -V %s' % (recipename, newrecipename, newrecipever))
+ self.assertExists(newrecipefile, 'Recipe file not renamed')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists')
+ newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename)
+ self.assertExists(newsrctree, 'Source directory not renamed')
+ checkvars = {}
+ checkvars['S'] = '${WORKDIR}/%s-%s' % (recipename, recipever)
+ checkvars['SRC_URI'] = url
+ self._test_recipe_contents(newrecipefile, checkvars, [])
+ # Try again - change just name this time
+ result = runCmd('devtool reset -n %s' % newrecipename)
+ shutil.rmtree(newsrctree)
+ add_recipe()
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever))
+ result = runCmd('devtool rename %s %s' % (recipename, newrecipename))
+ self.assertExists(newrecipefile, 'Recipe file not renamed')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipename), 'Old recipe directory still exists')
+ self.assertExists(os.path.join(self.workspacedir, 'sources', newrecipename), 'Source directory not renamed')
+ checkvars = {}
+ checkvars['S'] = '${WORKDIR}/%s-${PV}' % recipename
+ checkvars['SRC_URI'] = url.replace(recipever, '${PV}')
+ self._test_recipe_contents(newrecipefile, checkvars, [])
+ # Try again - change just version this time
+ result = runCmd('devtool reset -n %s' % newrecipename)
+ shutil.rmtree(newsrctree)
+ add_recipe()
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever))
+ result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever))
+ self.assertExists(newrecipefile, 'Recipe file not renamed')
+ self.assertExists(os.path.join(self.workspacedir, 'sources', recipename), 'Source directory no longer exists')
+ checkvars = {}
+ checkvars['S'] = '${WORKDIR}/${BPN}-%s' % recipever
+ checkvars['SRC_URI'] = url
+ self._test_recipe_contents(newrecipefile, checkvars, [])
+
+ def test_devtool_virtual_kernel_modify(self):
+ """
+ Summary: The purpose of this test case is to verify that
+ devtool modify works correctly when building
+ the kernel.
+ Dependencies: NA
+ Steps: 1. Build kernel with bitbake.
+ 2. Save the config file generated.
+ 3. Clean the environment.
+ 4. Use `devtool modify virtual/kernel` to validate following:
+ 4.1 The source is checked out correctly.
+ 4.2 The resulting configuration is the same as
+ what was get on step 2.
+ 4.3 The Kernel can be build correctly.
+ 4.4 Changes made on the source are reflected on the
+ subsequent builds.
+ 4.5 Changes on the configuration are reflected on the
+ subsequent builds
+ Expected: devtool modify is able to checkout the source of the kernel
+ and modification to the source and configurations are reflected
+ when building the kernel.
+ """
+ kernel_provider = get_bb_var('PREFERRED_PROVIDER_virtual/kernel')
+ # Clean up the enviroment
+ bitbake('%s -c clean' % kernel_provider)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ tempdir_cfg = tempfile.mkdtemp(prefix='config_qa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(tempdir_cfg)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % kernel_provider)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ #Step 1
+ #Here is just generated the config file instead of all the kernel to optimize the
+ #time of executing this test case.
+ bitbake('%s -c configure' % kernel_provider)
+ bbconfig = os.path.join(get_bb_var('B', kernel_provider),'.config')
+ #Step 2
+ runCmd('cp %s %s' % (bbconfig, tempdir_cfg))
+ self.assertExists(os.path.join(tempdir_cfg, '.config'), 'Could not copy .config file from kernel')
+
+ tmpconfig = os.path.join(tempdir_cfg, '.config')
+ #Step 3
+ bitbake('%s -c clean' % kernel_provider)
+ #Step 4.1
+ runCmd('devtool modify virtual/kernel -x %s' % tempdir)
+ self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
+ #Step 4.2
+ configfile = os.path.join(tempdir,'.config')
+ diff = runCmd('diff %s %s' % (tmpconfig, configfile))
+ self.assertEqual(0,diff.status,'Kernel .config file is not the same using bitbake and devtool')
+ #Step 4.3
+ #NOTE: virtual/kernel is mapped to kernel_provider
+ result = runCmd('devtool build %s' % kernel_provider)
+ self.assertEqual(0,result.status,'Cannot build kernel using `devtool build`')
+ kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux')
+ self.assertExists(kernelfile, 'Kernel was not build correctly')
+
+ #Modify the kernel source
+ modfile = os.path.join(tempdir,'arch/x86/boot/header.S')
+ modstring = "Use a boot loader. Devtool testing."
+ modapplied = runCmd("sed -i 's/Use a boot loader./%s/' %s" % (modstring, modfile))
+ self.assertEqual(0,modapplied.status,'Modification to %s on kernel source failed' % modfile)
+ #Modify the configuration
+ codeconfigfile = os.path.join(tempdir,'.config.new')
+ modconfopt = "CONFIG_SG_POOL=n"
+ modconf = runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile))
+ self.assertEqual(0,modconf.status,'Modification to %s failed' % codeconfigfile)
+ #Build again kernel with devtool
+ rebuild = runCmd('devtool build %s' % kernel_provider)
+ self.assertEqual(0,rebuild.status,'Fail to build kernel after modification of source and config')
+ #Step 4.4
+ bzimagename = 'bzImage-' + get_bb_var('KERNEL_VERSION_NAME', kernel_provider)
+ bzimagefile = os.path.join(get_bb_var('D', kernel_provider),'boot', bzimagename)
+ checkmodcode = runCmd("grep '%s' %s" % (modstring, bzimagefile))
+ self.assertEqual(0,checkmodcode.status,'Modification on kernel source failed')
+ #Step 4.5
+ checkmodconfg = runCmd("grep %s %s" % (modconfopt, codeconfigfile))
+ self.assertEqual(0,checkmodconfg.status,'Modification to configuration file failed')
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
new file mode 100644
index 0000000000..68ba556485
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -0,0 +1,89 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+from oeqa.utils.decorators import testcase
+from oeqa.utils.ftools import write_file
+
+import oe.recipeutils
+
+class Distrodata(OESelftestTestCase):
+
+ def test_checkpkg(self):
+ """
+ Summary: Test that upstream version checks do not regress
+ Expected: Upstream version checks should succeed except for the recipes listed in the exception list.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ feature = 'LICENSE_FLAGS_WHITELIST += " commercial"\n'
+ self.write_config(feature)
+
+ pkgs = oe.recipeutils.get_recipe_upgrade_status()
+
+ regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN']
+ regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN']
+ msg = ""
+ if len(regressed_failures) > 0:
+ msg = msg + """
+The following packages failed upstream version checks. Please fix them using UPSTREAM_CHECK_URI/UPSTREAM_CHECK_REGEX
+(when using tarballs) or UPSTREAM_CHECK_GITTAGREGEX (when using git). If an upstream version check cannot be performed
+(for example, if upstream does not use git tags), you can set UPSTREAM_VERSION_UNKNOWN to '1' in the recipe to acknowledge
+that the check cannot be performed.
+""" + "\n".join(regressed_failures)
+ if len(regressed_successes) > 0:
+ msg = msg + """
+The following packages have been checked successfully for upstream versions,
+but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please remove that line from the recipes.
+""" + "\n".join(regressed_successes)
+ self.assertTrue(len(regressed_failures) == 0 and len(regressed_successes) == 0, msg)
+
+ def test_maintainers(self):
+ """
+ Summary: Test that oe-core recipes have a maintainer
+ Expected: All oe-core recipes (except a few special static/testing ones) should have a maintainer listed in maintainers.inc file.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ def is_exception(pkg):
+ exceptions = ["packagegroup-", "initramfs-", "systemd-machine-units", "target-sdk-provides-dummy"]
+ for i in exceptions:
+ if i in pkg:
+ return True
+ return False
+
+ feature = 'require conf/distro/include/maintainers.inc\n'
+ self.write_config(feature)
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False)
+
+ with_maintainer_list = []
+ no_maintainer_list = []
+ # We could have used all_recipes() here, but this method will find
+ # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files
+ # instead of maintainers.inc
+ for fn in tinfoil.all_recipe_files(variants=False):
+ if not '/meta/recipes-' in fn:
+ # We are only interested in OE-Core
+ continue
+ rd = tinfoil.parse_recipe_file(fn, appends=False)
+ pn = rd.getVar('PN')
+ if is_exception(pn):
+ continue
+ if rd.getVar('RECIPE_MAINTAINER'):
+ with_maintainer_list.append((pn, fn))
+ else:
+ no_maintainer_list.append((pn, fn))
+
+ if no_maintainer_list:
+ self.fail("""
+The following recipes do not have a maintainer assigned to them. Please add an entry to meta/conf/distro/include/maintainers.inc file.
+""" + "\n".join(['%s (%s)' % i for i in no_maintainer_list]))
+
+ if not with_maintainer_list:
+ self.fail("""
+The list of oe-core recipes with maintainers is empty. This may indicate that the test has regressed and needs fixing.
+""")
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/eSDK.py
new file mode 100644
index 0000000000..862849af35
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/eSDK.py
@@ -0,0 +1,120 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import tempfile
+import shutil
+import os
+import glob
+import time
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+
+class oeSDKExtSelfTest(OESelftestTestCase):
+ """
+ # Bugzilla Test Plan: 6033
+ # This code is planned to be part of the automation for eSDK containig
+ # Install libraries and headers, image generation binary feeds, sdk-update.
+ """
+
+ @staticmethod
+ def get_esdk_environment(env_eSDK, tmpdir_eSDKQA):
+ # XXX: at this time use the first env need to investigate
+ # what environment load oe-selftest, i586, x86_64
+ pattern = os.path.join(tmpdir_eSDKQA, 'environment-setup-*')
+ return glob.glob(pattern)[0]
+
+ @staticmethod
+ def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options):
+ if postconfig:
+ esdk_conf_file = os.path.join(tmpdir_eSDKQA, 'conf', 'local.conf')
+ with open(esdk_conf_file, 'a+') as f:
+ f.write(postconfig)
+ if not options:
+ options = {}
+ if not 'shell' in options:
+ options['shell'] = True
+
+ runCmd("cd %s; unset BBPATH; unset BUILDDIR; . %s; %s" % (tmpdir_eSDKQA, env_eSDK, cmd), **options)
+
+ @staticmethod
+ def generate_eSDK(image):
+ pn_task = '%s -c populate_sdk_ext' % image
+ bitbake(pn_task)
+
+ @staticmethod
+ def get_eSDK_toolchain(image):
+ pn_task = '%s -c populate_sdk_ext' % image
+
+ bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAINEXT_OUTPUTNAME'], pn_task)
+ sdk_deploy = bb_vars['SDK_DEPLOY']
+ toolchain_name = bb_vars['TOOLCHAINEXT_OUTPUTNAME']
+ return os.path.join(sdk_deploy, toolchain_name + '.sh')
+
+ @staticmethod
+ def update_configuration(cls, image, tmpdir_eSDKQA, env_eSDK, ext_sdk_path):
+ sstate_dir = os.path.join(os.environ['BUILDDIR'], 'sstate-cache')
+
+ oeSDKExtSelfTest.generate_eSDK(cls.image)
+
+ cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
+ runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
+
+ cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
+
+ sstate_config="""
+SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS"
+SSTATE_MIRRORS = "file://.* file://%s/PATH"
+CORE_IMAGE_EXTRA_INSTALL = "perl"
+ """ % sstate_dir
+
+ with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
+ f.write(sstate_config)
+
+ @classmethod
+ def setUpClass(cls):
+ super(oeSDKExtSelfTest, cls).setUpClass()
+ cls.image = 'core-image-minimal'
+
+ bb_vars = get_bb_vars(['SSTATE_DIR', 'WORKDIR'], cls.image)
+ bb.utils.mkdirhier(bb_vars["WORKDIR"])
+ cls.tmpdirobj = tempfile.TemporaryDirectory(prefix="selftest-esdk-", dir=bb_vars["WORKDIR"])
+ cls.tmpdir_eSDKQA = cls.tmpdirobj.name
+
+ oeSDKExtSelfTest.generate_eSDK(cls.image)
+
+ # Install eSDK
+ cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
+ runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
+
+ cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
+
+ # Configure eSDK to use sstate mirror from poky
+ sstate_config="""
+SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS"
+SSTATE_MIRRORS = "file://.* file://%s/PATH"
+ """ % bb_vars["SSTATE_DIR"]
+ with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
+ f.write(sstate_config)
+
+ @classmethod
+ def tearDownClass(cls):
+ for i in range(0, 10):
+ if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')):
+ time.sleep(1)
+ else:
+ break
+ cls.tmpdirobj.cleanup()
+ super().tearDownClass()
+
+ def test_install_libraries_headers(self):
+ pn_sstate = 'bc'
+ bitbake(pn_sstate)
+ cmd = "devtool sdk-install %s " % pn_sstate
+ oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
+
+ def test_image_generation_binary_feeds(self):
+ image = 'core-image-minimal'
+ cmd = "devtool build-image %s" % image
+ oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
+
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py
new file mode 100644
index 0000000000..a61cf9bcb3
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py
@@ -0,0 +1,46 @@
+# Based on runqemu.py test file
+#
+# Copyright (c) 2017 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: MIT
+#
+
+import re
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, runqemu, get_bb_var
+
+class GenericEFITest(OESelftestTestCase):
+ """EFI booting test class"""
+
+ cmd_common = "runqemu nographic serial wic ovmf"
+ efi_provider = "systemd-boot"
+ image = "core-image-minimal"
+ machine = "qemux86-64"
+ recipes_built = False
+
+ @classmethod
+ def setUpLocal(self):
+ super(GenericEFITest, self).setUpLocal(self)
+
+ self.write_config(self,
+"""
+EFI_PROVIDER = "%s"
+IMAGE_FSTYPES_pn-%s_append = " wic"
+MACHINE = "%s"
+MACHINE_FEATURES_append = " efi"
+WKS_FILE = "efi-bootdisk.wks.in"
+IMAGE_INSTALL_append = " grub-efi systemd-boot kernel-image-bzimage"
+"""
+% (self.efi_provider, self.image, self.machine))
+ if not self.recipes_built:
+ bitbake("ovmf")
+ bitbake(self.image)
+ self.recipes_built = True
+
+ @classmethod
+ def test_boot_efi(self):
+ """Test generic boot partition with qemu"""
+ cmd = "%s %s" % (self.cmd_common, self.machine)
+ with runqemu(self.image, ssh=False, launch_cmd=cmd) as qemu:
+ self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
new file mode 100644
index 0000000000..76cbadf2ff
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -0,0 +1,51 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import oe.path
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class Fetch(OESelftestTestCase):
+ def test_git_mirrors(self):
+ """
+ Verify that the git fetcher will fall back to the HTTP mirrors. The
+ recipe needs to be one that we have on the Yocto Project source mirror
+ and is hosted in git.
+ """
+
+ # TODO: mktempd instead of hardcoding
+ dldir = os.path.join(self.builddir, "download-git-mirrors")
+ self.track_for_cleanup(dldir)
+
+ # No mirrors, should use git to fetch successfully
+ features = """
+DL_DIR = "%s"
+MIRRORS_forcevariable = ""
+PREMIRRORS_forcevariable = ""
+""" % dldir
+ self.write_config(features)
+ oe.path.remove(dldir, recurse=True)
+ bitbake("dbus-wait -c fetch -f")
+
+ # No mirrors and broken git, should fail
+ features = """
+DL_DIR = "%s"
+GIT_PROXY_COMMAND = "false"
+MIRRORS_forcevariable = ""
+PREMIRRORS_forcevariable = ""
+""" % dldir
+ self.write_config(features)
+ oe.path.remove(dldir, recurse=True)
+ with self.assertRaises(AssertionError):
+ bitbake("dbus-wait -c fetch -f")
+
+ # Broken git but a specific mirror
+ features = """
+DL_DIR = "%s"
+GIT_PROXY_COMMAND = "false"
+MIRRORS_forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
+""" % dldir
+ self.write_config(features)
+ oe.path.remove(dldir, recurse=True)
+ bitbake("dbus-wait -c fetch -f")
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
new file mode 100644
index 0000000000..3efe15228f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -0,0 +1,152 @@
+# SPDX-License-Identifier: MIT
+import os
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command
+
+def parse_values(content):
+ for i in content:
+ for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
+ if i.startswith(v + ": "):
+ yield i[len(v) + 2:].strip(), v
+ break
+
+class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
+ def check_skip(self, suite):
+ targets = get_bb_var("RUNTIMETARGET", "gcc-runtime").split()
+ if suite not in targets:
+ self.skipTest("Target does not use {0}".format(suite))
+
+ def run_check(self, *suites, ssh = None):
+ targets = set()
+ for s in suites:
+ if s == "gcc":
+ targets.add("check-gcc-c")
+ elif s == "g++":
+ targets.add("check-gcc-c++")
+ else:
+ targets.add("check-target-{}".format(s))
+
+ # configure ssh target
+ features = []
+ features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets)))
+ if ssh is not None:
+ features.append('TOOLCHAIN_TEST_TARGET = "ssh"')
+ features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
+ features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
+ features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
+ self.write_config("\n".join(features))
+
+ recipe = "gcc-runtime"
+ bitbake("{} -c check".format(recipe))
+
+ bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
+ builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
+
+ for suite in suites:
+ sumspath = os.path.join(builddir, "gcc", "testsuite", suite, "{0}.sum".format(suite))
+ if not os.path.exists(sumspath): # check in target dirs
+ sumspath = os.path.join(builddir, target_sys, suite, "testsuite", "{0}.sum".format(suite))
+ if not os.path.exists(sumspath): # handle libstdc++-v3 -> libstdc++
+ sumspath = os.path.join(builddir, target_sys, suite, "testsuite", "{0}.sum".format(suite.split("-")[0]))
+ logpath = os.path.splitext(sumspath)[0] + ".log"
+
+ ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
+ ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
+ self.ptest_section(ptestsuite, logfile = logpath)
+ with open(sumspath, "r") as f:
+ for test, result in parse_values(f):
+ self.ptest_result(ptestsuite, test, result)
+
+ def run_check_emulated(self, *args, **kwargs):
+ # build core-image-minimal with required packages
+ default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
+ features = []
+ features.append('IMAGE_FEATURES += "ssh-server-openssh"')
+ features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
+ self.write_config("\n".join(features))
+ bitbake("core-image-minimal")
+
+ # wrap the execution with a qemu instance
+ with runqemu("core-image-minimal", runqemuparams = "nographic") as qemu:
+ # validate that SSH is working
+ status, _ = qemu.run("uname")
+ self.assertEqual(status, 0)
+
+ return self.run_check(*args, ssh=qemu.ip, **kwargs)
+
+@OETestTag("toolchain-user")
+class GccCrossSelfTest(GccSelfTestBase):
+ def test_cross_gcc(self):
+ self.run_check("gcc")
+
+@OETestTag("toolchain-user")
+class GxxCrossSelfTest(GccSelfTestBase):
+ def test_cross_gxx(self):
+ self.run_check("g++")
+
+@OETestTag("toolchain-user")
+class GccLibAtomicSelfTest(GccSelfTestBase):
+ def test_libatomic(self):
+ self.run_check("libatomic")
+
+@OETestTag("toolchain-user")
+class GccLibGompSelfTest(GccSelfTestBase):
+ def test_libgomp(self):
+ self.run_check("libgomp")
+
+@OETestTag("toolchain-user")
+class GccLibStdCxxSelfTest(GccSelfTestBase):
+ def test_libstdcxx(self):
+ self.run_check("libstdc++-v3")
+
+@OETestTag("toolchain-user")
+class GccLibSspSelfTest(GccSelfTestBase):
+ def test_libssp(self):
+ self.check_skip("libssp")
+ self.run_check("libssp")
+
+@OETestTag("toolchain-user")
+class GccLibItmSelfTest(GccSelfTestBase):
+ def test_libitm(self):
+ self.check_skip("libitm")
+ self.run_check("libitm")
+
+@OETestTag("toolchain-system")
+class GccCrossSelfTestSystemEmulated(GccSelfTestBase):
+ def test_cross_gcc(self):
+ self.run_check_emulated("gcc")
+
+@OETestTag("toolchain-system")
+class GxxCrossSelfTestSystemEmulated(GccSelfTestBase):
+ def test_cross_gxx(self):
+ self.run_check_emulated("g++")
+
+@OETestTag("toolchain-system")
+class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libatomic(self):
+ self.run_check_emulated("libatomic")
+
+@OETestTag("toolchain-system")
+class GccLibGompSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libgomp(self):
+ self.run_check_emulated("libgomp")
+
+@OETestTag("toolchain-system")
+class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libstdcxx(self):
+ self.run_check_emulated("libstdc++-v3")
+
+@OETestTag("toolchain-system")
+class GccLibSspSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libssp(self):
+ self.check_skip("libssp")
+ self.run_check_emulated("libssp")
+
+@OETestTag("toolchain-system")
+class GccLibItmSelfTestSystemEmulated(GccSelfTestBase):
+ def test_libitm(self):
+ self.check_skip("libitm")
+ self.run_check_emulated("libitm")
+
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
new file mode 100644
index 0000000000..c687f6ef93
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -0,0 +1,89 @@
+# SPDX-License-Identifier: MIT
+import os
+import contextlib
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command
+from oeqa.utils.nfs import unfs_server
+
+def parse_values(content):
+ for i in content:
+ for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
+ if i.startswith(v + ": "):
+ yield i[len(v) + 2:].strip(), v
+ break
+
+class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
+ def run_check(self, ssh = None):
+ # configure ssh target
+ features = []
+ if ssh is not None:
+ features.append('TOOLCHAIN_TEST_TARGET = "ssh"')
+ features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
+ features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
+ features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
+ # force single threaded test execution
+ features.append('EGLIBCPARALLELISM_task-check_pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
+ self.write_config("\n".join(features))
+
+ bitbake("glibc-testsuite -c check")
+
+ builddir = get_bb_var("B", "glibc-testsuite")
+
+ ptestsuite = "glibc-user" if ssh is None else "glibc"
+ self.ptest_section(ptestsuite)
+ with open(os.path.join(builddir, "tests.sum"), "r") as f:
+ for test, result in parse_values(f):
+ self.ptest_result(ptestsuite, test, result)
+
+ def run_check_emulated(self):
+ with contextlib.ExitStack() as s:
+ # use the base work dir, as the nfs mount, since the recipe directory may not exist
+ tmpdir = get_bb_var("BASE_WORKDIR")
+ nfsport, mountport = s.enter_context(unfs_server(tmpdir))
+
+ # build core-image-minimal with required packages
+ default_installed_packages = [
+ "glibc-charmaps",
+ "libgcc",
+ "libstdc++",
+ "libatomic",
+ "libgomp",
+ # "python3",
+ # "python3-pexpect",
+ "nfs-utils",
+ ]
+ features = []
+ features.append('IMAGE_FEATURES += "ssh-server-openssh"')
+ features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
+ self.write_config("\n".join(features))
+ bitbake("core-image-minimal")
+
+ # start runqemu
+ qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic"))
+
+ # validate that SSH is working
+ status, _ = qemu.run("uname")
+ self.assertEqual(status, 0)
+
+ # setup nfs mount
+ if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
+ raise Exception("Failed to setup NFS mount directory on target")
+ mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
+ status, output = qemu.run(mountcmd)
+ if status != 0:
+ raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
+
+ self.run_check(ssh = qemu.ip)
+
+@OETestTag("toolchain-user")
+class GlibcSelfTest(GlibcSelfTestBase):
+ def test_glibc(self):
+ self.run_check()
+
+@OETestTag("toolchain-system")
+class GlibcSelfTestSystemEmulated(GlibcSelfTestBase):
+ def test_glibc(self):
+ self.run_check_emulated()
+
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
new file mode 100644
index 0000000000..3119520f0d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -0,0 +1,71 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import glob
+import os
+import shutil
+import tempfile
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
+
+
+class oeGoToolchainSelfTest(OESelftestTestCase):
+ """
+ Test cases for OE's Go toolchain
+ """
+
+ @staticmethod
+ def get_sdk_environment(tmpdir_SDKQA):
+ pattern = os.path.join(tmpdir_SDKQA, "environment-setup-*")
+ # FIXME: this is a very naive implementation
+ return glob.glob(pattern)[0]
+
+ @staticmethod
+ def get_sdk_toolchain():
+ bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAIN_OUTPUTNAME'],
+ "meta-go-toolchain")
+ sdk_deploy = bb_vars['SDK_DEPLOY']
+ toolchain_name = bb_vars['TOOLCHAIN_OUTPUTNAME']
+ return os.path.join(sdk_deploy, toolchain_name + ".sh")
+
+ @classmethod
+ def setUpClass(cls):
+ super(oeGoToolchainSelfTest, cls).setUpClass()
+ cls.tmpdir_SDKQA = tempfile.mkdtemp(prefix='SDKQA')
+ cls.go_path = os.path.join(cls.tmpdir_SDKQA, "go")
+ # Build the SDK and locate it in DEPLOYDIR
+ bitbake("meta-go-toolchain")
+ cls.sdk_path = oeGoToolchainSelfTest.get_sdk_toolchain()
+ # Install the SDK into the tmpdir
+ runCmd("sh %s -y -d \"%s\"" % (cls.sdk_path, cls.tmpdir_SDKQA))
+ cls.env_SDK = oeGoToolchainSelfTest.get_sdk_environment(cls.tmpdir_SDKQA)
+
+ @classmethod
+ def tearDownClass(cls):
+ shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
+ super(oeGoToolchainSelfTest, cls).tearDownClass()
+
+ def run_sdk_go_command(self, gocmd):
+ cmd = "cd %s; " % self.tmpdir_SDKQA
+ cmd = cmd + ". %s; " % self.env_SDK
+ cmd = cmd + "export GOPATH=%s; " % self.go_path
+ cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
+ return runCmd(cmd).status
+
+ def test_go_dep_build(self):
+ proj = "github.com/golang"
+ name = "dep"
+ ver = "v0.3.1"
+ archive = ".tar.gz"
+ url = "https://%s/%s/archive/%s%s" % (proj, name, ver, archive)
+
+ runCmd("cd %s; wget %s" % (self.tmpdir_SDKQA, url))
+ runCmd("cd %s; tar -xf %s" % (self.tmpdir_SDKQA, ver+archive))
+ runCmd("mkdir -p %s/src/%s" % (self.go_path, proj))
+ runCmd("mv %s/dep-0.3.1 %s/src/%s/%s"
+ % (self.tmpdir_SDKQA, self.go_path, proj, name))
+ retv = self.run_sdk_go_command('build %s/%s/cmd/dep'
+ % (proj, name))
+ self.assertEqual(retv, 0,
+ msg="Running go build failed for %s" % name)
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py
new file mode 100644
index 0000000000..52e1080f13
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/image_typedep.py
@@ -0,0 +1,58 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class ImageTypeDepTests(OESelftestTestCase):
+
+ # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that
+ # the conversion type bar gets added as a dep as well
+ def test_conversion_typedep_added(self):
+
+ self.write_recipeinc('emptytest', """
+# Try to empty out the default dependency list
+PACKAGE_INSTALL = ""
+DISTRO_EXTRA_RDEPENDS=""
+
+LICENSE = "MIT"
+IMAGE_FSTYPES = "testfstype"
+
+IMAGE_TYPES_MASKED += "testfstype"
+IMAGE_TYPEDEP_testfstype = "tar.bz2"
+
+inherit image
+
+""")
+ # First get the dependency that should exist for bz2, it will look
+ # like CONVERSION_DEPENDS_bz2="somedep"
+ result = bitbake('-e emptytest')
+
+ dep = None
+ for line in result.output.split('\n'):
+ if line.startswith('CONVERSION_DEPENDS_bz2'):
+ dep = line.split('=')[1].strip('"')
+ break
+
+ self.assertIsNotNone(dep, "CONVERSION_DEPENDS_bz2 dependency not found in bitbake -e output")
+
+ # Now get the dependency task list and check for the expected task
+ # dependency
+ bitbake('-g emptytest')
+
+ taskdependsfile = os.path.join(self.builddir, 'task-depends.dot')
+ dep = dep + ".do_populate_sysroot"
+ depfound = False
+ expectedline = '"emptytest.do_rootfs" -> "{}"'.format(dep)
+
+ with open(taskdependsfile, "r") as f:
+ for line in f:
+ if line.strip() == expectedline:
+ depfound = True
+ break
+
+ if not depfound:
+ raise AssertionError("\"{}\" not found".format(expectedline))
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
new file mode 100644
index 0000000000..5c519ac3d6
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -0,0 +1,264 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
+from oeqa.utils.sshcontrol import SSHControl
+import os
+import json
+
+class ImageFeatures(OESelftestTestCase):
+
+ test_user = 'tester'
+ root_user = 'root'
+
+ def test_non_root_user_can_connect_via_ssh_without_password(self):
+ """
+ Summary: Check if non root user can connect via ssh without password
+ Expected: 1. Connection to the image via ssh using root user without providing a password should be allowed.
+ 2. Connection to the image via ssh using tester user without providing a password should be allowed.
+ Product: oe-core
+ Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh empty-root-password allow-empty-password allow-root-login"\n'
+ features += 'INHERIT += "extrausers"\n'
+ features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
+ self.write_config(features)
+
+ # Build a core-image-minimal
+ bitbake('core-image-minimal')
+
+ with runqemu("core-image-minimal") as qemu:
+ # Attempt to ssh with each user into qemu with empty password
+ for user in [self.root_user, self.test_user]:
+ ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user=user)
+ status, output = ssh.run("true")
+ self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output))
+
+ def test_all_users_can_connect_via_ssh_without_password(self):
+ """
+ Summary: Check if all users can connect via ssh without password
+ Expected: 1. Connection to the image via ssh using root user without providing a password should NOT be allowed.
+ 2. Connection to the image via ssh using tester user without providing a password should be allowed.
+ Product: oe-core
+ Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh allow-empty-password allow-root-login"\n'
+ features += 'INHERIT += "extrausers"\n'
+ features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
+ self.write_config(features)
+
+ # Build a core-image-minimal
+ bitbake('core-image-minimal')
+
+ with runqemu("core-image-minimal") as qemu:
+ # Attempt to ssh with each user into qemu with empty password
+ for user in [self.root_user, self.test_user]:
+ ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user=user)
+ status, output = ssh.run("true")
+ if user == 'root':
+ self.assertNotEqual(status, 0, 'ssh to user root was allowed when it should not have been')
+ else:
+ self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output)
+
+
+ def test_clutter_image_can_be_built(self):
+ """
+ Summary: Check if clutter image can be built
+ Expected: 1. core-image-clutter can be built
+ Product: oe-core
+ Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ # Build a core-image-clutter
+ bitbake('core-image-clutter')
+
+ def test_wayland_support_in_image(self):
+ """
+ Summary: Check Wayland support in image
+ Expected: 1. Wayland image can be build
+ 2. Wayland feature can be installed
+ Product: oe-core
+ Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ distro_features = get_bb_var('DISTRO_FEATURES')
+ if not ('opengl' in distro_features and 'wayland' in distro_features):
+ self.skipTest('neither opengl nor wayland present on DISTRO_FEATURES so core-image-weston cannot be built')
+
+ # Build a core-image-weston
+ bitbake('core-image-weston')
+
+ def test_bmap(self):
+ """
+ Summary: Check bmap support
+ Expected: 1. core-image-minimal can be build with bmap support
+ 2. core-image-minimal is sparse
+ Product: oe-core
+ Author: Ed Bartosh <ed.bartosh@linux.intel.com>
+ """
+
+ features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
+ self.write_config(features)
+
+ image_name = 'core-image-minimal'
+ bitbake(image_name)
+
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
+ image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name)
+ bmap_path = "%s.bmap" % image_path
+ gzip_path = "%s.gz" % bmap_path
+
+ # check if result image, bmap and bmap.gz files are in deploy directory
+ self.assertTrue(os.path.exists(image_path))
+ self.assertTrue(os.path.exists(bmap_path))
+ self.assertTrue(os.path.exists(gzip_path))
+
+ # check if result image is sparse
+ image_stat = os.stat(image_path)
+ self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
+
+ # check if the resulting gzip is valid
+ self.assertTrue(runCmd('gzip -t %s' % gzip_path))
+
+ def test_hypervisor_fmts(self):
+ """
+ Summary: Check various hypervisor formats
+ Expected: 1. core-image-minimal can be built with vmdk, vdi and
+ qcow2 support.
+ 2. qemu-img says each image has the expected format
+ Product: oe-core
+ Author: Tom Rini <trini@konsulko.com>
+ """
+
+ img_types = [ 'vmdk', 'vdi', 'qcow2' ]
+ features = ""
+ for itype in img_types:
+ features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype
+ self.write_config(features)
+
+ image_name = 'core-image-minimal'
+ bitbake(image_name)
+
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
+ for itype in img_types:
+ image_path = os.path.join(deploy_dir_image, "%s.wic.%s" %
+ (link_name, itype))
+
+ # check if result image file is in deploy directory
+ self.assertTrue(os.path.exists(image_path))
+
+ # check if result image is vmdk
+ sysroot = get_bb_var('STAGING_DIR_NATIVE', 'core-image-minimal')
+ result = runCmd('qemu-img info --output json %s' % image_path,
+ native_sysroot=sysroot)
+ try:
+ data = json.loads(result.output)
+ self.assertEqual(data.get('format'), itype,
+ msg="Unexpected format in '%s'" % (result.output))
+ except json.decoder.JSONDecodeError:
+ self.fail("Could not parse '%ss'" % result.output)
+
+ def test_long_chain_conversion(self):
+ """
+ Summary: Check for chaining many CONVERSION_CMDs together
+ Expected: 1. core-image-minimal can be built with
+ ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a
+ sha256sum
+ 2. The above image has a valid sha256sum
+ Product: oe-core
+ Author: Tom Rini <trini@konsulko.com>
+ """
+
+ conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot"
+ features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
+ self.write_config(features)
+
+ image_name = 'core-image-minimal'
+ bitbake(image_name)
+
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
+ image_path = os.path.join(deploy_dir_image, "%s.%s" %
+ (link_name, conv))
+
+ # check if resulting image is in the deploy directory
+ self.assertTrue(os.path.exists(image_path))
+ self.assertTrue(os.path.exists(image_path + ".sha256sum"))
+
+ # check if the resulting sha256sum agrees
+ self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
+ (deploy_dir_image, link_name, conv)))
+
+ def test_image_fstypes(self):
+ """
+ Summary: Check if image of supported image fstypes can be built
+ Expected: core-image-minimal can be built for various image types
+ Product: oe-core
+ Author: Ed Bartosh <ed.bartosh@linux.intel.com>
+ """
+ image_name = 'core-image-minimal'
+
+ all_image_types = set(get_bb_var("IMAGE_TYPES", image_name).split())
+ blacklist = set(('container', 'elf', 'f2fs', 'multiubi', 'tar.zst'))
+ img_types = all_image_types - blacklist
+
+ config = 'IMAGE_FSTYPES += "%s"\n'\
+ 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\
+ 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types)
+ self.write_config(config)
+
+ bitbake(image_name)
+
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
+ for itype in img_types:
+ image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype))
+ # check if result image is in deploy directory
+ self.assertTrue(os.path.exists(image_path),
+ "%s image %s doesn't exist" % (itype, image_path))
+
+ def test_useradd_static(self):
+ config = """
+USERADDEXTENSION = "useradd-staticids"
+USERADD_ERROR_DYNAMIC = "skip"
+USERADD_UID_TABLES += "files/static-passwd"
+USERADD_GID_TABLES += "files/static-group"
+"""
+ self.write_config(config)
+ bitbake("core-image-base")
+
+ def test_no_busybox_base_utils(self):
+ config = """
+# Enable x11
+DISTRO_FEATURES_append += "x11"
+
+# Switch to systemd
+DISTRO_FEATURES += "systemd"
+VIRTUAL-RUNTIME_init_manager = "systemd"
+VIRTUAL-RUNTIME_initscripts = ""
+VIRTUAL-RUNTIME_syslog = ""
+VIRTUAL-RUNTIME_login_manager = "shadow-base"
+DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
+
+# Replace busybox
+PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils"
+VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils"
+VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock"
+VIRTUAL-RUNTIME_base-utils-syslog = ""
+
+# Blacklist busybox
+PNBLACKLIST[busybox] = "Don't build this"
+"""
+ self.write_config(config)
+
+ bitbake("--graphviz core-image-sato")
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
new file mode 100644
index 0000000000..3eabd79097
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -0,0 +1,135 @@
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class IncompatibleLicenseTests(OESelftestTestCase):
+
+ def lic_test(self, pn, pn_lic, lic):
+ error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
+
+ self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
+
+ result = bitbake('%s --dry-run' % (pn), ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
+
+ # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
+ # cannot be built when INCOMPATIBLE_LICENSE contains this SPDX license
+ def test_incompatible_spdx_license(self):
+ self.lic_test('incompatible-license', 'GPL-3.0', 'GPL-3.0')
+
+ # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
+ # cannot be built when INCOMPATIBLE_LICENSE contains an alias (in
+ # SPDXLICENSEMAP) of this SPDX license
+ def test_incompatible_alias_spdx_license(self):
+ self.lic_test('incompatible-license', 'GPL-3.0', 'GPLv3')
+
+ # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
+ # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded license
+ # matching this SPDX license
+ def test_incompatible_spdx_license_wildcard(self):
+ self.lic_test('incompatible-license', 'GPL-3.0', '*GPL-3.0')
+
+ # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
+ # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded alias
+ # license matching this SPDX license
+ def test_incompatible_alias_spdx_license_wildcard(self):
+ self.lic_test('incompatible-license', 'GPL-3.0', '*GPLv3')
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
+ # license
+ def test_incompatible_spdx_license_alias(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0', 'GPL-3.0')
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains this alias
+ def test_incompatible_alias_spdx_license_alias(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0', 'GPLv3')
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
+ # license matching this SPDX license
+ def test_incompatible_spdx_license_alias_wildcard(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0', '*GPL-3.0')
+
+ # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
+ # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
+ # alias license matching the SPDX license
+ def test_incompatible_alias_spdx_license_alias_wildcard(self):
+ self.lic_test('incompatible-license-alias', 'GPL-3.0', '*GPLv3')
+
+ # Verify that a package with multiple SPDX licenses (from
+ # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains
+ # some of them
+ def test_incompatible_spdx_licenses(self):
+ self.lic_test('incompatible-licenses', 'GPL-3.0 LGPL-3.0', 'GPL-3.0 LGPL-3.0')
+
+ # Verify that a package with multiple SPDX licenses (from
+ # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a
+ # wildcard to some of them
+ def test_incompatible_spdx_licenses_wildcard(self):
+ self.lic_test('incompatible-licenses', 'GPL-3.0 LGPL-3.0', '*GPL-3.0')
+
+ # Verify that a package with multiple SPDX licenses (from
+ # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a
+ # wildcard matching all licenses
+ def test_incompatible_all_licenses_wildcard(self):
+ self.lic_test('incompatible-licenses', 'GPL-2.0 GPL-3.0 LGPL-3.0', '*')
+
+ # Verify that a package with a non-SPDX license (neither in
+ # AVAILABLE_LICENSES nor in SPDXLICENSEMAP) cannot be built when
+ # INCOMPATIBLE_LICENSE contains this license
+ def test_incompatible_nonspdx_license(self):
+ self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense')
+
+class IncompatibleLicensePerImageTests(OESelftestTestCase):
+ def default_config(self):
+ return """
+IMAGE_INSTALL_append = "bash"
+INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
+"""
+
+ def test_bash_default(self):
+ self.write_config(self.default_config())
+ error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0+"
+
+ result = bitbake('core-image-minimal', ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
+
+ def test_bash_and_license(self):
+ self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " & SomeLicense"')
+ error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0+"
+
+ result = bitbake('core-image-minimal', ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
+
+ def test_bash_or_license(self):
+ self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " | SomeLicense"')
+
+ bitbake('core-image-minimal')
+
+ def test_bash_whitelist(self):
+ self.write_config(self.default_config() + '\nWHITELIST_GPL-3.0_pn-core-image-minimal = "bash"')
+
+ bitbake('core-image-minimal')
+
+class NoGPL3InImagesTests(OESelftestTestCase):
+ def test_core_image_minimal(self):
+ self.write_config("""
+INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
+""")
+ bitbake('core-image-minimal')
+
+ def test_core_image_full_cmdline(self):
+ self.write_config("""
+INHERIT += "testimage"\n
+INCOMPATIBLE_LICENSE_pn-core-image-full-cmdline = "GPL-3.0 LGPL-3.0"\n
+RDEPENDS_packagegroup-core-full-cmdline-utils_remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-fish mc-helpers mc-helpers-perl sed tar time"\n
+RDEPENDS_packagegroup-core-full-cmdline-dev-utils_remove = "diffutils m4 make patch"\n
+RDEPENDS_packagegroup-core-full-cmdline-multiuser_remove = "gzip"\n
+""")
+ bitbake('core-image-full-cmdline')
+ bitbake('-c testimage core-image-full-cmdline')
+
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
new file mode 100644
index 0000000000..a61876ee61
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
@@ -0,0 +1,67 @@
+import os
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, get_bb_var
+from oeqa.utils.git import GitRepo
+
+class KernelDev(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(KernelDev, cls).setUpClass()
+ # Create the recipe directory structure inside the created layer
+ cls.layername = 'meta-kerneltest'
+ runCmd('bitbake-layers create-layer %s' % cls.layername)
+ runCmd('mkdir -p %s/recipes-kernel/linux/linux-yocto' % cls.layername)
+ cls.recipes_linuxyocto_dir = os.path.join \
+ (cls.builddir, cls.layername, 'recipes-kernel', 'linux', 'linux-yocto')
+ cls.recipeskernel_dir = os.path.dirname(cls.recipes_linuxyocto_dir)
+ runCmd('bitbake-layers add-layer %s' % cls.layername)
+
+ @classmethod
+ def tearDownClass(cls):
+ runCmd('bitbake-layers remove-layer %s' % cls.layername, ignore_status=True)
+ runCmd('rm -rf %s' % cls.layername)
+ super(KernelDev, cls).tearDownClass()
+
+ def setUp(self):
+ super(KernelDev, self).setUp()
+ self.set_machine_config('MACHINE = "qemux86-64"\n')
+
+ def test_apply_patches(self):
+ """
+ Summary: Able to apply a single patch to the Linux kernel source
+ Expected: The README file should exist and the patch changes should be
+ displayed at the end of the file.
+ Product: Kernel Development
+ Author: Yeoh Ee Peng <ee.peng.yeoh@intel.com>
+ AutomatedBy: Mazliana Mohamad <mazliana.mohamad@intel.com>
+ """
+ runCmd('bitbake virtual/kernel -c patch')
+ kernel_source = get_bb_var('STAGING_KERNEL_DIR')
+ readme = os.path.join(kernel_source, 'README')
+
+ # This test step adds modified file 'README' to git and creates a
+ # patch file '0001-KERNEL_DEV_TEST_CASE.patch' at the same location as file
+ patch_content = 'This is a test to apply a patch to the kernel'
+ with open(readme, 'a+') as f:
+ f.write(patch_content)
+ repo = GitRepo('%s' % kernel_source, is_topdir=True)
+ repo.run_cmd('add %s' % readme)
+ repo.run_cmd(['commit', '-m', 'KERNEL_DEV_TEST_CASE'])
+ repo.run_cmd(['format-patch', '-1'])
+ patch_name = '0001-KERNEL_DEV_TEST_CASE.patch'
+ patchpath = os.path.join(kernel_source, patch_name)
+ runCmd('mv %s %s' % (patchpath, self.recipes_linuxyocto_dir))
+ runCmd('rm %s ' % readme)
+ self.assertFalse(os.path.exists(readme))
+
+ recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
+ with open(recipe_append, 'w+') as fh:
+ fh.write('SRC_URI += "file://%s"\n' % patch_name)
+ fh.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"')
+
+ runCmd('bitbake virtual/kernel -c clean')
+ runCmd('bitbake virtual/kernel -c patch')
+ self.assertTrue(os.path.exists(readme))
+ result = runCmd('tail -n 1 %s' % readme)
+ self.assertEqual(result.output, patch_content)
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
new file mode 100644
index 0000000000..05e9426fc6
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -0,0 +1,97 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+import oeqa.utils.ftools as ftools
+
+class LayerAppendTests(OESelftestTestCase):
+ layerconf = """
+# We have a conf and classes directory, append to BBPATH
+BBPATH .= ":${LAYERDIR}"
+
+# We have a recipes directory, add to BBFILES
+BBFILES += "${LAYERDIR}/recipes*/*.bb ${LAYERDIR}/recipes*/*.bbappend"
+
+BBFILE_COLLECTIONS += "meta-layerINT"
+BBFILE_PATTERN_meta-layerINT := "^${LAYERDIR}/"
+BBFILE_PRIORITY_meta-layerINT = "6"
+"""
+ recipe = """
+LICENSE="CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+python do_build() {
+ bb.plain('Building ...')
+}
+addtask build
+"""
+ append = """
+FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
+
+SRC_URI_append = " file://appendtest.txt"
+
+sysroot_stage_all_append() {
+ install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
+}
+
+"""
+
+ append2 = """
+FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
+
+SRC_URI_append = " file://appendtest.txt"
+"""
+ layerappend = ''
+
+ def tearDownLocal(self):
+ if self.layerappend:
+ ftools.remove_from_file(self.builddir + "/conf/bblayers.conf", self.layerappend)
+ super(LayerAppendTests, self).tearDownLocal()
+
+ def test_layer_appends(self):
+ corebase = get_bb_var("COREBASE")
+
+ for l in ["0", "1", "2"]:
+ layer = os.path.join(corebase, "meta-layertest" + l)
+ self.assertFalse(os.path.exists(layer))
+ os.mkdir(layer)
+ os.mkdir(layer + "/conf")
+ with open(layer + "/conf/layer.conf", "w") as f:
+ f.write(self.layerconf.replace("INT", l))
+ os.mkdir(layer + "/recipes-test")
+ if l == "0":
+ with open(layer + "/recipes-test/layerappendtest.bb", "w") as f:
+ f.write(self.recipe)
+ elif l == "1":
+ with open(layer + "/recipes-test/layerappendtest.bbappend", "w") as f:
+ f.write(self.append)
+ os.mkdir(layer + "/recipes-test/layerappendtest")
+ with open(layer + "/recipes-test/layerappendtest/appendtest.txt", "w") as f:
+ f.write("Layer 1 test")
+ elif l == "2":
+ with open(layer + "/recipes-test/layerappendtest.bbappend", "w") as f:
+ f.write(self.append2)
+ os.mkdir(layer + "/recipes-test/layerappendtest")
+ with open(layer + "/recipes-test/layerappendtest/appendtest.txt", "w") as f:
+ f.write("Layer 2 test")
+ self.track_for_cleanup(layer)
+
+ self.layerappend = "BBLAYERS += \"{0}/meta-layertest0 {0}/meta-layertest1 {0}/meta-layertest2\"".format(corebase)
+ ftools.append_file(self.builddir + "/conf/bblayers.conf", self.layerappend)
+ stagingdir = get_bb_var("SYSROOT_DESTDIR", "layerappendtest")
+ bitbake("layerappendtest")
+ data = ftools.read_file(stagingdir + "/appendtest.txt")
+ self.assertEqual(data, "Layer 2 test")
+ os.remove(corebase + "/meta-layertest2/recipes-test/layerappendtest/appendtest.txt")
+ bitbake("layerappendtest")
+ data = ftools.read_file(stagingdir + "/appendtest.txt")
+ self.assertEqual(data, "Layer 1 test")
+ with open(corebase + "/meta-layertest2/recipes-test/layerappendtest/appendtest.txt", "w") as f:
+ f.write("Layer 2 test")
+ bitbake("layerappendtest")
+ data = ftools.read_file(stagingdir + "/appendtest.txt")
+ self.assertEqual(data, "Layer 2 test")
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py
new file mode 100644
index 0000000000..afe8f8809f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/liboe.py
@@ -0,0 +1,102 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake, runCmd
+import oe.path
+import os
+
+class LibOE(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(LibOE, cls).setUpClass()
+ cls.tmp_dir = get_bb_var('TMPDIR')
+
+ def test_copy_tree_special(self):
+ """
+ Summary: oe.path.copytree() should copy files with special character
+ Expected: 'test file with sp£c!al @nd spaces' should exist in
+ copy destination
+ Product: OE-Core
+ Author: Joshua Lock <joshua.g.lock@intel.com>
+ """
+ testloc = oe.path.join(self.tmp_dir, 'liboetests')
+ src = oe.path.join(testloc, 'src')
+ dst = oe.path.join(testloc, 'dst')
+ bb.utils.mkdirhier(testloc)
+ bb.utils.mkdirhier(src)
+ testfilename = 'test file with sp£c!al @nd spaces'
+
+ # create the test file and copy it
+ open(oe.path.join(src, testfilename), 'w+b').close()
+ oe.path.copytree(src, dst)
+
+ # ensure path exists in dest
+ fileindst = os.path.isfile(oe.path.join(dst, testfilename))
+ self.assertTrue(fileindst, "File with spaces doesn't exist in dst")
+
+ oe.path.remove(testloc)
+
+ def test_copy_tree_xattr(self):
+ """
+ Summary: oe.path.copytree() should preserve xattr on copied files
+ Expected: testxattr file in destination should have user.oetest
+ extended attribute
+ Product: OE-Core
+ Author: Joshua Lock <joshua.g.lock@intel.com>
+ """
+ testloc = oe.path.join(self.tmp_dir, 'liboetests')
+ src = oe.path.join(testloc, 'src')
+ dst = oe.path.join(testloc, 'dst')
+ bb.utils.mkdirhier(testloc)
+ bb.utils.mkdirhier(src)
+ testfilename = 'testxattr'
+
+ # ensure we have setfattr available
+ bitbake("attr-native")
+
+ bb_vars = get_bb_vars(['SYSROOT_DESTDIR', 'bindir'], 'attr-native')
+ destdir = bb_vars['SYSROOT_DESTDIR']
+ bindir = bb_vars['bindir']
+ bindir = destdir + bindir
+
+ # create a file with xattr and copy it
+ open(oe.path.join(src, testfilename), 'w+b').close()
+ runCmd('%s/setfattr -n user.oetest -v "testing liboe" %s' % (bindir, oe.path.join(src, testfilename)))
+ oe.path.copytree(src, dst)
+
+ # ensure file in dest has user.oetest xattr
+ result = runCmd('%s/getfattr -n user.oetest %s' % (bindir, oe.path.join(dst, testfilename)))
+ self.assertIn('user.oetest="testing liboe"', result.output, 'Extended attribute not sert in dst')
+
+ oe.path.remove(testloc)
+
+ def test_copy_hardlink_tree_count(self):
+ """
+ Summary: oe.path.copyhardlinktree() shouldn't miss out files
+ Expected: src and dst should have the same number of files
+ Product: OE-Core
+ Author: Joshua Lock <joshua.g.lock@intel.com>
+ """
+ testloc = oe.path.join(self.tmp_dir, 'liboetests')
+ src = oe.path.join(testloc, 'src')
+ dst = oe.path.join(testloc, 'dst')
+ bb.utils.mkdirhier(testloc)
+ bb.utils.mkdirhier(src)
+ testfiles = ['foo', 'bar', '.baz', 'quux']
+
+ def touchfile(tf):
+ open(oe.path.join(src, tf), 'w+b').close()
+
+ for f in testfiles:
+ touchfile(f)
+
+ oe.path.copyhardlinktree(src, dst)
+
+ dstcnt = len(os.listdir(dst))
+ srccnt = len(os.listdir(src))
+ self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
+
+ oe.path.remove(testloc)
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py
new file mode 100644
index 0000000000..bae935d697
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py
@@ -0,0 +1,38 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import tempfile
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+from oeqa.utils import CommandError
+
+class LicenseTests(OESelftestTestCase):
+
+ # Verify that changing a license file that has an absolute path causes
+ # the license qa to fail due to a mismatched md5sum.
+ def test_nonmatching_checksum(self):
+ bitbake_cmd = '-c populate_lic emptytest'
+ error_msg = 'emptytest: The new md5 checksum is 8d777f385d3dfec8815d20f7496026dc'
+
+ lic_file, lic_path = tempfile.mkstemp()
+ os.close(lic_file)
+ self.track_for_cleanup(lic_path)
+
+ self.write_config("INHERIT_remove = \"report-error\"")
+
+ self.write_recipeinc('emptytest', """
+INHIBIT_DEFAULT_DEPS = "1"
+LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
+SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
+""" % (lic_path, lic_path))
+ result = bitbake(bitbake_cmd)
+
+ with open(lic_path, "w") as f:
+ f.write("data")
+
+ result = bitbake(bitbake_cmd, ignore_status=True)
+ if error_msg not in result.output:
+ raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py
new file mode 100644
index 0000000000..5d13f35468
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/manifest.py
@@ -0,0 +1,164 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake
+
+class ManifestEntry:
+ '''A manifest item of a collection able to list missing packages'''
+ def __init__(self, entry):
+ self.file = entry
+ self.missing = []
+
+class VerifyManifest(OESelftestTestCase):
+ '''Tests for the manifest files and contents of an image'''
+
+ @classmethod
+ def check_manifest_entries(self, manifest, path):
+ manifest_errors = []
+ try:
+ with open(manifest, "r") as mfile:
+ for line in mfile:
+ manifest_entry = os.path.join(path, line.split()[0])
+ self.logger.debug("{}: looking for {}"\
+ .format(self.classname, manifest_entry))
+ if not os.path.isfile(manifest_entry):
+ manifest_errors.append(manifest_entry)
+ self.logger.debug("{}: {} not found"\
+ .format(self.classname, manifest_entry))
+ except OSError as e:
+ self.logger.debug("{}: checking of {} failed"\
+ .format(self.classname, manifest))
+ raise e
+
+ return manifest_errors
+
+ #this will possibly move from here
+ @classmethod
+ def get_dir_from_bb_var(self, bb_var, target = None):
+ target == self.buildtarget if target == None else target
+ directory = get_bb_var(bb_var, target);
+ if not directory or not os.path.isdir(directory):
+ self.logger.debug("{}: {} points to {} when target = {}"\
+ .format(self.classname, bb_var, directory, target))
+ raise OSError
+ return directory
+
+ @classmethod
+ def setUpClass(self):
+
+ super(VerifyManifest, self).setUpClass()
+ self.buildtarget = 'core-image-minimal'
+ self.classname = 'VerifyManifest'
+
+ self.logger.info("{}: doing bitbake {} as a prerequisite of the test"\
+ .format(self.classname, self.buildtarget))
+ if bitbake(self.buildtarget).status:
+ self.logger.debug("{} Failed to setup {}"\
+ .format(self.classname, self.buildtarget))
+ self.skipTest("{}: Cannot setup testing scenario"\
+ .format(self.classname))
+
+ def test_SDK_manifest_entries(self):
+ '''Verifying the SDK manifest entries exist, this may take a build'''
+
+ # the setup should bitbake core-image-minimal and here it is required
+ # to do an additional setup for the sdk
+ sdktask = '-c populate_sdk'
+ bbargs = sdktask + ' ' + self.buildtarget
+ self.logger.debug("{}: doing bitbake {} as a prerequisite of the test"\
+ .format(self.classname, bbargs))
+ if bitbake(bbargs).status:
+ self.logger.debug("{} Failed to bitbake {}"\
+ .format(self.classname, bbargs))
+ self.skipTest("{}: Cannot setup testing scenario"\
+ .format(self.classname))
+
+
+ pkgdata_dir = reverse_dir = {}
+ mfilename = mpath = m_entry = {}
+ # get manifest location based on target to query about
+ d_target= dict(target = self.buildtarget,
+ host = 'nativesdk-packagegroup-sdk-host')
+ try:
+ mdir = self.get_dir_from_bb_var('SDK_DEPLOY', self.buildtarget)
+ for k in d_target.keys():
+ toolchain_outputname = get_bb_var('TOOLCHAIN_OUTPUTNAME', self.buildtarget)
+ mfilename[k] = "{}.{}.manifest".format(toolchain_outputname, k)
+ mpath[k] = os.path.join(mdir, mfilename[k])
+ if not os.path.isfile(mpath[k]):
+ self.logger.debug("{}: {} does not exist".format(
+ self.classname, mpath[k]))
+ raise IOError
+ m_entry[k] = ManifestEntry(mpath[k])
+
+ pkgdata_dir[k] = self.get_dir_from_bb_var('PKGDATA_DIR',
+ d_target[k])
+ reverse_dir[k] = os.path.join(pkgdata_dir[k],
+ 'runtime-reverse')
+ if not os.path.exists(reverse_dir[k]):
+ self.logger.debug("{}: {} does not exist".format(
+ self.classname, reverse_dir[k]))
+ raise IOError
+ except OSError:
+ raise self.skipTest("{}: Error in obtaining manifest dirs"\
+ .format(self.classname))
+ except IOError:
+ msg = "{}: Error cannot find manifests in the specified dir:\n{}"\
+ .format(self.classname, mdir)
+ self.fail(msg)
+
+ for k in d_target.keys():
+ self.logger.debug("{}: Check manifest {}".format(
+ self.classname, m_entry[k].file))
+
+ m_entry[k].missing = self.check_manifest_entries(\
+ m_entry[k].file,reverse_dir[k])
+ if m_entry[k].missing:
+ msg = '{}: {} Error has the following missing entries'\
+ .format(self.classname, m_entry[k].file)
+ logmsg = msg+':\n'+'\n'.join(m_entry[k].missing)
+ self.logger.debug(logmsg)
+ self.logger.info(msg)
+ self.fail(logmsg)
+
+ def test_image_manifest_entries(self):
+ '''Verifying the image manifest entries exist'''
+
+ # get manifest location based on target to query about
+ try:
+ mdir = self.get_dir_from_bb_var('DEPLOY_DIR_IMAGE',
+ self.buildtarget)
+ mfilename = get_bb_var("IMAGE_LINK_NAME", self.buildtarget)\
+ + ".manifest"
+ mpath = os.path.join(mdir, mfilename)
+ if not os.path.isfile(mpath): raise IOError
+ m_entry = ManifestEntry(mpath)
+
+ pkgdata_dir = {}
+ pkgdata_dir = self.get_dir_from_bb_var('PKGDATA_DIR',
+ self.buildtarget)
+ revdir = os.path.join(pkgdata_dir, 'runtime-reverse')
+ if not os.path.exists(revdir): raise IOError
+ except OSError:
+ raise self.skipTest("{}: Error in obtaining manifest dirs"\
+ .format(self.classname))
+ except IOError:
+ msg = "{}: Error cannot find manifests in dir:\n{}"\
+ .format(self.classname, mdir)
+ self.fail(msg)
+
+ self.logger.debug("{}: Check manifest {}"\
+ .format(self.classname, m_entry.file))
+ m_entry.missing = self.check_manifest_entries(\
+ m_entry.file, revdir)
+ if m_entry.missing:
+ msg = '{}: {} Error has the following missing entries'\
+ .format(self.classname, m_entry.file)
+ logmsg = msg+':\n'+'\n'.join(m_entry.missing)
+ self.logger.debug(logmsg)
+ self.logger.info(msg)
+ self.fail(logmsg)
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
new file mode 100644
index 0000000000..03901a2f32
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -0,0 +1,51 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.sdk.utils.sdkbuildproject import SDKBuildProject
+from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
+from oeqa.core.decorator import OETestTag
+import tempfile
+import shutil
+
+@OETestTag("machine")
+class MetaIDE(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(MetaIDE, cls).setUpClass()
+ bitbake('meta-ide-support')
+ bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'TMPDIR', 'COREBASE'])
+ cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS']
+ cls.tmpdir = bb_vars['TMPDIR']
+ cls.environment_script_path = '%s/%s' % (cls.tmpdir, cls.environment_script)
+ cls.corebasedir = bb_vars['COREBASE']
+ cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
+
+ @classmethod
+ def tearDownClass(cls):
+ shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
+ super(MetaIDE, cls).tearDownClass()
+
+ def test_meta_ide_had_installed_meta_ide_support(self):
+ self.assertExists(self.environment_script_path)
+
+ def test_meta_ide_can_compile_c_program(self):
+ runCmd('cp %s/test.c %s' % (self.tc.files_dir, self.tmpdir_metaideQA))
+ runCmd("cd %s; . %s; $CC test.c -lm" % (self.tmpdir_metaideQA, self.environment_script_path))
+ compiled_file = '%s/a.out' % self.tmpdir_metaideQA
+ self.assertExists(compiled_file)
+
+ def test_meta_ide_can_build_cpio_project(self):
+ dl_dir = self.td.get('DL_DIR', None)
+ self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
+ "https://ftp.gnu.org/gnu/cpio/cpio-2.12.tar.gz",
+ self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
+ self.project.download_archive()
+ self.assertEqual(self.project.run_configure(), 0,
+ msg="Running configure failed")
+ self.assertEqual(self.project.run_make(), 0,
+ msg="Running make failed")
+ self.assertEqual(self.project.run_install(), 0,
+ msg="Running make install failed")
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py
new file mode 100644
index 0000000000..39b92f2439
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/multiconfig.py
@@ -0,0 +1,72 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import textwrap
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class MultiConfig(OESelftestTestCase):
+
+ def test_multiconfig(self):
+ """
+ Test that a simple multiconfig build works. This uses the mcextend class and the
+ multiconfig-image-packager test recipe to build a core-image-full-cmdline image which
+ contains a tiny core-image-minimal and a musl core-image-minimal, installed as packages.
+ """
+
+ config = """
+IMAGE_INSTALL_append_pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl"
+BBMULTICONFIG = "tiny musl"
+"""
+ self.write_config(config)
+
+ muslconfig = """
+MACHINE = "qemux86-64"
+DISTRO = "poky"
+TCLIBC = "musl"
+TMPDIR = "${TOPDIR}/tmp-mc-musl"
+"""
+ self.write_config(muslconfig, 'musl')
+
+ tinyconfig = """
+MACHINE = "qemux86"
+DISTRO = "poky-tiny"
+TMPDIR = "${TOPDIR}/tmp-mc-tiny"
+"""
+ self.write_config(tinyconfig, 'tiny')
+
+ # Build a core-image-minimal
+ bitbake('core-image-full-cmdline')
+
+ def test_multiconfig_reparse(self):
+ """
+ Test that changes to a multiconfig conf file are correctly detected and
+ cause a reparse/rebuild of a recipe.
+ """
+ config = textwrap.dedent('''\
+ MCTESTVAR = "test"
+ BBMULTICONFIG = "test"
+ ''')
+ self.write_config(config)
+
+ testconfig = textwrap.dedent('''\
+ MCTESTVAR_append = "1"
+ ''')
+ self.write_config(testconfig, 'test')
+
+ # Check that the 1) the task executed and 2) that it output the correct
+ # value. Note "bitbake -e" is not used because it always reparses the
+ # recipe and we want to ensure that the automatic reparsing and parse
+ # caching is detected.
+ result = bitbake('mc:test:multiconfig-test-parse -c showvar')
+ self.assertIn('MCTESTVAR=test1', result.output.splitlines())
+
+ testconfig = textwrap.dedent('''\
+ MCTESTVAR_append = "2"
+ ''')
+ self.write_config(testconfig, 'test')
+
+ result = bitbake('mc:test:multiconfig-test-parse -c showvar')
+ self.assertIn('MCTESTVAR=test2', result.output.splitlines())
diff --git a/meta/lib/oeqa/selftest/cases/oelib/__init__.py b/meta/lib/oeqa/selftest/cases/oelib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oelib/__init__.py
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
new file mode 100644
index 0000000000..6d80827652
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
@@ -0,0 +1,99 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+from oeqa.selftest.case import OESelftestTestCase
+import tempfile
+from oeqa.utils.commands import get_bb_var
+
+class TestBlobParsing(OESelftestTestCase):
+
+ def setUp(self):
+ import time
+ self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
+ dir=get_bb_var('TOPDIR'))
+
+ try:
+ from git import Repo
+ self.repo = Repo.init(self.repo_path)
+ except ImportError:
+ self.skipTest('Python module GitPython is not present')
+
+ self.test_file = "test"
+ self.var_map = {}
+
+ def tearDown(self):
+ import shutil
+ shutil.rmtree(self.repo_path)
+
+ def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
+ if len(to_add) == 0 and len(to_remove) == 0:
+ return
+
+ for k in to_remove:
+ self.var_map.pop(x,None)
+ for k in to_add:
+ self.var_map[k] = to_add[k]
+
+ with open(os.path.join(self.repo_path, self.test_file), 'w') as repo_file:
+ for k in self.var_map:
+ repo_file.write("%s = %s\n" % (k, self.var_map[k]))
+
+ self.repo.git.add("--all")
+ self.repo.git.commit(message=msg)
+
+ def test_blob_to_dict(self):
+ """
+ Test convertion of git blobs to dictionary
+ """
+ from oe.buildhistory_analysis import blob_to_dict
+ valuesmap = { "foo" : "1", "bar" : "2" }
+ self.commit_vars(to_add = valuesmap)
+
+ blob = self.repo.head.commit.tree.blobs[0]
+ self.assertEqual(valuesmap, blob_to_dict(blob),
+ "commit was not translated correctly to dictionary")
+
+ def test_compare_dict_blobs(self):
+ """
+ Test comparisson of dictionaries extracted from git blobs
+ """
+ from oe.buildhistory_analysis import compare_dict_blobs
+
+ changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
+
+ self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
+ blob1 = self.repo.heads.master.commit.tree.blobs[0]
+
+ self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
+ blob2 = self.repo.heads.master.commit.tree.blobs[0]
+
+ change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
+ blob1, blob2, False, False)
+
+ var_changes = { x.fieldname : (x.oldvalue, x.newvalue) for x in change_records}
+ self.assertEqual(changesmap, var_changes, "Changes not reported correctly")
+
+ def test_compare_dict_blobs_default(self):
+ """
+ Test default values for comparisson of git blob dictionaries
+ """
+ from oe.buildhistory_analysis import compare_dict_blobs
+ defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
+
+ self.commit_vars(to_add = { "foo" : "1" })
+ blob1 = self.repo.heads.master.commit.tree.blobs[0]
+
+ self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
+ blob2 = self.repo.heads.master.commit.tree.blobs[0]
+
+ change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
+ blob1, blob2, False, False)
+
+ var_changes = {}
+ for x in change_records:
+ oldvalue = "default" if ("default" in x.oldvalue) else x.oldvalue
+ var_changes[x.fieldname] = (oldvalue, x.newvalue)
+
+ self.assertEqual(defaultmap, var_changes, "Defaults not set properly")
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py
new file mode 100644
index 0000000000..d0a28090f2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py
@@ -0,0 +1,26 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from unittest.case import TestCase
+import oe.qa
+
+class TestElf(TestCase):
+ def test_machine_name(self):
+ """
+ Test elf_machine_to_string()
+ """
+ self.assertEqual(oe.qa.elf_machine_to_string(0x02), "SPARC")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x03), "x86")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x08), "MIPS")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x14), "PowerPC")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x28), "ARM")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x2A), "SuperH")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x32), "IA-64")
+ self.assertEqual(oe.qa.elf_machine_to_string(0x3E), "x86-64")
+ self.assertEqual(oe.qa.elf_machine_to_string(0xB7), "AArch64")
+ self.assertEqual(oe.qa.elf_machine_to_string(0xF7), "BPF")
+
+ self.assertEqual(oe.qa.elf_machine_to_string(0x00), "Unknown (0)")
+ self.assertEqual(oe.qa.elf_machine_to_string(0xDEADBEEF), "Unknown (3735928559)")
+ self.assertEqual(oe.qa.elf_machine_to_string("foobar"), "Unknown ('foobar')")
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py
new file mode 100644
index 0000000000..6ebbee589f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oelib/license.py
@@ -0,0 +1,103 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from unittest.case import TestCase
+import oe.license
+
+class SeenVisitor(oe.license.LicenseVisitor):
+ def __init__(self):
+ self.seen = []
+ oe.license.LicenseVisitor.__init__(self)
+
+ def visit_Str(self, node):
+ self.seen.append(node.s)
+
+class TestSingleLicense(TestCase):
+ licenses = [
+ "GPLv2",
+ "LGPL-2.0",
+ "Artistic",
+ "MIT",
+ "GPLv3+",
+ "FOO_BAR",
+ ]
+ invalid_licenses = ["GPL/BSD"]
+
+ @staticmethod
+ def parse(licensestr):
+ visitor = SeenVisitor()
+ visitor.visit_string(licensestr)
+ return visitor.seen
+
+ def test_single_licenses(self):
+ for license in self.licenses:
+ licenses = self.parse(license)
+ self.assertListEqual(licenses, [license])
+
+ def test_invalid_licenses(self):
+ for license in self.invalid_licenses:
+ with self.assertRaises(oe.license.InvalidLicense) as cm:
+ self.parse(license)
+ self.assertEqual(cm.exception.license, license)
+
+class TestSimpleCombinations(TestCase):
+ tests = {
+ "FOO&BAR": ["FOO", "BAR"],
+ "BAZ & MOO": ["BAZ", "MOO"],
+ "ALPHA|BETA": ["ALPHA"],
+ "BAZ&MOO|FOO": ["FOO"],
+ "FOO&BAR|BAZ": ["FOO", "BAR"],
+ }
+ preferred = ["ALPHA", "FOO", "BAR"]
+
+ def test_tests(self):
+ def choose(a, b):
+ if all(lic in self.preferred for lic in b):
+ return b
+ else:
+ return a
+
+ for license, expected in self.tests.items():
+ licenses = oe.license.flattened_licenses(license, choose)
+ self.assertListEqual(licenses, expected)
+
+class TestComplexCombinations(TestSimpleCombinations):
+ tests = {
+ "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"],
+ "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"],
+ "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"],
+ "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"],
+ }
+ preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"]
+
+class TestIsIncluded(TestCase):
+ tests = {
+ ("FOO | BAR", None, None):
+ [True, ["FOO"]],
+ ("FOO | BAR", None, "FOO"):
+ [True, ["BAR"]],
+ ("FOO | BAR", "BAR", None):
+ [True, ["BAR"]],
+ ("FOO | BAR & FOOBAR", "*BAR", None):
+ [True, ["BAR", "FOOBAR"]],
+ ("FOO | BAR & FOOBAR", None, "FOO*"):
+ [False, ["FOOBAR"]],
+ ("(FOO | BAR) & FOOBAR | BARFOO", None, "FOO"):
+ [True, ["BAR", "FOOBAR"]],
+ ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"):
+ [True, ["BAZ", "MOO", "BARFOO"]],
+ ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, None):
+ [True, ["GPL-3.0", "GPL-2.0", "LGPL-2.1"]],
+ ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0"):
+ [True, ["Proprietary"]],
+ ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0 Proprietary"):
+ [False, ["GPL-3.0"]]
+ }
+
+ def test_tests(self):
+ for args, expected in self.tests.items():
+ is_included, licenses = oe.license.is_included(
+ args[0], (args[1] or '').split(), (args[2] or '').split())
+ self.assertEqual(is_included, expected[0])
+ self.assertListEqual(licenses, expected[1])
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py
new file mode 100644
index 0000000000..a1cfa08c09
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oelib/path.py
@@ -0,0 +1,89 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from unittest.case import TestCase
+import oe, oe.path
+import tempfile
+import os
+import errno
+import shutil
+
+class TestRealPath(TestCase):
+ DIRS = [ "a", "b", "etc", "sbin", "usr", "usr/bin", "usr/binX", "usr/sbin", "usr/include", "usr/include/gdbm" ]
+ FILES = [ "etc/passwd", "b/file" ]
+ LINKS = [
+ ( "bin", "/usr/bin", "/usr/bin" ),
+ ( "binX", "usr/binX", "/usr/binX" ),
+ ( "c", "broken", "/broken" ),
+ ( "etc/passwd-1", "passwd", "/etc/passwd" ),
+ ( "etc/passwd-2", "passwd-1", "/etc/passwd" ),
+ ( "etc/passwd-3", "/etc/passwd-1", "/etc/passwd" ),
+ ( "etc/shadow-1", "/etc/shadow", "/etc/shadow" ),
+ ( "etc/shadow-2", "/etc/shadow-1", "/etc/shadow" ),
+ ( "prog-A", "bin/prog-A", "/usr/bin/prog-A" ),
+ ( "prog-B", "/bin/prog-B", "/usr/bin/prog-B" ),
+ ( "usr/bin/prog-C", "../../sbin/prog-C", "/sbin/prog-C" ),
+ ( "usr/bin/prog-D", "/sbin/prog-D", "/sbin/prog-D" ),
+ ( "usr/binX/prog-E", "../sbin/prog-E", None ),
+ ( "usr/bin/prog-F", "../../../sbin/prog-F", "/sbin/prog-F" ),
+ ( "loop", "a/loop", None ),
+ ( "a/loop", "../loop", None ),
+ ( "b/test", "file/foo", "/b/file/foo" ),
+ ]
+
+ LINKS_PHYS = [
+ ( "./", "/", "" ),
+ ( "binX/prog-E", "/usr/sbin/prog-E", "/sbin/prog-E" ),
+ ]
+
+ EXCEPTIONS = [
+ ( "loop", errno.ELOOP ),
+ ( "b/test", errno.ENOENT ),
+ ]
+
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp(prefix = "oe-test_path")
+ self.root = os.path.join(self.tmpdir, "R")
+
+ os.mkdir(os.path.join(self.tmpdir, "_real"))
+ os.symlink("_real", self.root)
+
+ for d in self.DIRS:
+ os.mkdir(os.path.join(self.root, d))
+ for f in self.FILES:
+ open(os.path.join(self.root, f), "w")
+ for l in self.LINKS:
+ os.symlink(l[1], os.path.join(self.root, l[0]))
+
+ def tearDown(self):
+ shutil.rmtree(self.tmpdir)
+
+ def __realpath(self, file, use_physdir, assume_dir = True):
+ return oe.path.realpath(os.path.join(self.root, file), self.root,
+ use_physdir, assume_dir = assume_dir)
+
+ def test_norm(self):
+ for l in self.LINKS:
+ if l[2] == None:
+ continue
+
+ target_p = self.__realpath(l[0], True)
+ target_l = self.__realpath(l[0], False)
+
+ if l[2] != False:
+ self.assertEqual(target_p, target_l)
+ self.assertEqual(l[2], target_p[len(self.root):])
+
+ def test_phys(self):
+ for l in self.LINKS_PHYS:
+ target_p = self.__realpath(l[0], True)
+ target_l = self.__realpath(l[0], False)
+
+ self.assertEqual(l[1], target_p[len(self.root):])
+ self.assertEqual(l[2], target_l[len(self.root):])
+
+ def test_loop(self):
+ for e in self.EXCEPTIONS:
+ self.assertRaisesRegex(OSError, r'\[Errno %u\]' % e[1],
+ self.__realpath, e[0], False, False)
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py
new file mode 100644
index 0000000000..7eb49e6f95
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oelib/types.py
@@ -0,0 +1,54 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from unittest.case import TestCase
+from oe.maketype import create
+
+class TestBooleanType(TestCase):
+ def test_invalid(self):
+ self.assertRaises(ValueError, create, '', 'boolean')
+ self.assertRaises(ValueError, create, 'foo', 'boolean')
+ self.assertRaises(TypeError, create, object(), 'boolean')
+
+ def test_true(self):
+ self.assertTrue(create('y', 'boolean'))
+ self.assertTrue(create('yes', 'boolean'))
+ self.assertTrue(create('1', 'boolean'))
+ self.assertTrue(create('t', 'boolean'))
+ self.assertTrue(create('true', 'boolean'))
+ self.assertTrue(create('TRUE', 'boolean'))
+ self.assertTrue(create('truE', 'boolean'))
+
+ def test_false(self):
+ self.assertFalse(create('n', 'boolean'))
+ self.assertFalse(create('no', 'boolean'))
+ self.assertFalse(create('0', 'boolean'))
+ self.assertFalse(create('f', 'boolean'))
+ self.assertFalse(create('false', 'boolean'))
+ self.assertFalse(create('FALSE', 'boolean'))
+ self.assertFalse(create('faLse', 'boolean'))
+
+ def test_bool_equality(self):
+ self.assertEqual(create('n', 'boolean'), False)
+ self.assertNotEqual(create('n', 'boolean'), True)
+ self.assertEqual(create('y', 'boolean'), True)
+ self.assertNotEqual(create('y', 'boolean'), False)
+
+class TestList(TestCase):
+ def assertListEqual(self, value, valid, sep=None):
+ obj = create(value, 'list', separator=sep)
+ self.assertEqual(obj, valid)
+ if sep is not None:
+ self.assertEqual(obj.separator, sep)
+ self.assertEqual(str(obj), obj.separator.join(obj))
+
+ def test_list_nosep(self):
+ testlist = ['alpha', 'beta', 'theta']
+ self.assertListEqual('alpha beta theta', testlist)
+ self.assertListEqual('alpha beta\ttheta', testlist)
+ self.assertListEqual('alpha', ['alpha'])
+
+ def test_list_usersep(self):
+ self.assertListEqual('foo:bar', ['foo', 'bar'], ':')
+ self.assertListEqual('foo:bar:baz', ['foo', 'bar', 'baz'], ':')
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
new file mode 100644
index 0000000000..a7214beb4c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -0,0 +1,103 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import sys
+from unittest.case import TestCase
+from contextlib import contextmanager
+from io import StringIO
+from oe.utils import packages_filter_out_system, trim_version, multiprocess_launch
+
+class TestPackagesFilterOutSystem(TestCase):
+ def test_filter(self):
+ """
+ Test that oe.utils.packages_filter_out_system works.
+ """
+ try:
+ import bb
+ except ImportError:
+ self.skipTest("Cannot import bb")
+
+ d = bb.data_smart.DataSmart()
+ d.setVar("PN", "foo")
+
+ d.setVar("PACKAGES", "foo foo-doc foo-dev")
+ pkgs = packages_filter_out_system(d)
+ self.assertEqual(pkgs, [])
+
+ d.setVar("PACKAGES", "foo foo-doc foo-data foo-dev")
+ pkgs = packages_filter_out_system(d)
+ self.assertEqual(pkgs, ["foo-data"])
+
+ d.setVar("PACKAGES", "foo foo-locale-en-gb")
+ pkgs = packages_filter_out_system(d)
+ self.assertEqual(pkgs, [])
+
+ d.setVar("PACKAGES", "foo foo-data foo-locale-en-gb")
+ pkgs = packages_filter_out_system(d)
+ self.assertEqual(pkgs, ["foo-data"])
+
+
+class TestTrimVersion(TestCase):
+ def test_version_exception(self):
+ with self.assertRaises(TypeError):
+ trim_version(None, 2)
+ with self.assertRaises(TypeError):
+ trim_version((1, 2, 3), 2)
+
+ def test_num_exception(self):
+ with self.assertRaises(ValueError):
+ trim_version("1.2.3", 0)
+ with self.assertRaises(ValueError):
+ trim_version("1.2.3", -1)
+
+ def test_valid(self):
+ self.assertEqual(trim_version("1.2.3", 1), "1")
+ self.assertEqual(trim_version("1.2.3", 2), "1.2")
+ self.assertEqual(trim_version("1.2.3", 3), "1.2.3")
+ self.assertEqual(trim_version("1.2.3", 4), "1.2.3")
+
+
+class TestMultiprocessLaunch(TestCase):
+
+ def test_multiprocesslaunch(self):
+ import bb
+
+ def testfunction(item, d):
+ if item == "2" or item == "1":
+ raise KeyError("Invalid number %s" % item)
+ return "Found %s" % item
+
+ def dummyerror(msg):
+ print("ERROR: %s" % msg)
+ def dummyfatal(msg):
+ print("ERROR: %s" % msg)
+ raise bb.BBHandledException()
+
+ @contextmanager
+ def captured_output():
+ new_out, new_err = StringIO(), StringIO()
+ old_out, old_err = sys.stdout, sys.stderr
+ try:
+ sys.stdout, sys.stderr = new_out, new_err
+ yield sys.stdout, sys.stderr
+ finally:
+ sys.stdout, sys.stderr = old_out, old_err
+
+ d = bb.data_smart.DataSmart()
+ bb.error = dummyerror
+ bb.fatal = dummyfatal
+
+ # Assert the function returns the right results
+ result = multiprocess_launch(testfunction, ["3", "4", "5", "6"], d, extraargs=(d,))
+ self.assertIn("Found 3", result)
+ self.assertIn("Found 4", result)
+ self.assertIn("Found 5", result)
+ self.assertIn("Found 6", result)
+ self.assertEqual(len(result), 4)
+
+ # Assert the function prints exceptions
+ with captured_output() as (out, err):
+ self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,))
+ self.assertIn("KeyError: 'Invalid number 1'", out.getvalue())
+ self.assertIn("KeyError: 'Invalid number 2'", out.getvalue())
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
new file mode 100644
index 0000000000..41cbe04808
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -0,0 +1,188 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import unittest
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.selftest.cases.buildhistory import BuildhistoryBase
+from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer
+from oeqa.utils import CommandError
+
+class BuildhistoryDiffTests(BuildhistoryBase):
+
+ def test_buildhistory_diff(self):
+ target = 'xcursor-transparent-theme'
+ self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
+ self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
+ result = runCmd("oe-pkgdata-util read-value PKGV %s" % target)
+ pkgv = result.output.rstrip()
+ result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
+ expected_endlines = [
+ "xcursor-transparent-theme-dev: RDEPENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
+ "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
+ ]
+ for line in result.output.splitlines():
+ for el in expected_endlines:
+ if line.endswith(el):
+ expected_endlines.remove(el)
+ break
+ else:
+ self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines)))
+ if expected_endlines:
+ self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
+
+class OEScriptTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(OEScriptTests, cls).setUpClass()
+ try:
+ import cairo
+ except ImportError:
+ raise unittest.SkipTest('Python module cairo is not present')
+ bitbake("core-image-minimal -c rootfs -f")
+ cls.tmpdir = get_bb_var('TMPDIR')
+ cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
+
+ scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
+
+class OEPybootchartguyTests(OEScriptTests):
+
+ def test_pybootchartguy_help(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
+
+ def test_pybootchartguy_to_generate_build_png_output(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f png' % (self.scripts_dir, self.buildstats, self.tmpdir))
+ self.assertTrue(os.path.exists(self.tmpdir + "/charts.png"))
+
+ def test_pybootchartguy_to_generate_build_svg_output(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f svg' % (self.scripts_dir, self.buildstats, self.tmpdir))
+ self.assertTrue(os.path.exists(self.tmpdir + "/charts.svg"))
+
+ def test_pybootchartguy_to_generate_build_pdf_output(self):
+ runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f pdf' % (self.scripts_dir, self.buildstats, self.tmpdir))
+ self.assertTrue(os.path.exists(self.tmpdir + "/charts.pdf"))
+
+
+class OEGitproxyTests(OESelftestTestCase):
+
+ scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
+
+ def test_oegitproxy_help(self):
+ try:
+ res = runCmd('%s/oe-git-proxy --help' % self.scripts_dir, assert_error=False)
+ self.assertTrue(False)
+ except CommandError as e:
+ self.assertEqual(2, e.retcode)
+
+ def run_oegitproxy(self, custom_shell=None):
+ os.environ['SOCAT'] = shutil.which("echo")
+ os.environ['ALL_PROXY'] = "https://proxy.example.com:3128"
+ os.environ['NO_PROXY'] = "*.example.com,.no-proxy.org,192.168.42.0/24,127.*.*.*"
+
+ if custom_shell is None:
+ prefix = ''
+ else:
+ prefix = custom_shell + ' '
+
+ # outside, use the proxy
+ res = runCmd('%s%s/oe-git-proxy host.outside-example.com 9418' %
+ (prefix,self.scripts_dir))
+ self.assertIn('PROXY:', res.output)
+ # match with wildcard suffix
+ res = runCmd('%s%s/oe-git-proxy host.example.com 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+ # match just suffix
+ res = runCmd('%s%s/oe-git-proxy host.no-proxy.org 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+ # match IP subnet
+ res = runCmd('%s%s/oe-git-proxy 192.168.42.42 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+ # match IP wildcard
+ res = runCmd('%s%s/oe-git-proxy 127.1.2.3 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+
+ # test that * globbering is off
+ os.environ['NO_PROXY'] = "*"
+ res = runCmd('%s%s/oe-git-proxy host.example.com 9418' %
+ (prefix, self.scripts_dir))
+ self.assertIn('TCP:', res.output)
+
+ def test_oegitproxy_proxy(self):
+ self.run_oegitproxy()
+
+ def test_oegitproxy_proxy_dash(self):
+ dash = shutil.which("dash")
+ if dash is None:
+ self.skipTest("No \"dash\" found on test system.")
+ self.run_oegitproxy(custom_shell=dash)
+
+class OeRunNativeTest(OESelftestTestCase):
+ def test_oe_run_native(self):
+ bitbake("qemu-helper-native -c addto_recipe_sysroot")
+ result = runCmd("oe-run-native qemu-helper-native tunctl -h")
+ self.assertIn("Delete: tunctl -d device-name [-f tun-clone-device]", result.output)
+
+class OEListPackageconfigTests(OEScriptTests):
+ #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
+ def check_endlines(self, results, expected_endlines):
+ for line in results.output.splitlines():
+ for el in expected_endlines:
+ if line == el:
+ expected_endlines.remove(el)
+ break
+
+ if expected_endlines:
+ self.fail('Missing expected listings:\n %s' % '\n '.join(expected_endlines))
+
+
+ #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
+ def test_packageconfig_flags_help(self):
+ runCmd('%s/contrib/list-packageconfig-flags.py -h' % self.scripts_dir)
+
+ def test_packageconfig_flags_default(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
+ expected_endlines.append("pinentry gtk2 libcap ncurses qt secret")
+ expected_endlines.append("tar acl")
+
+ self.check_endlines(results, expected_endlines)
+
+
+ def test_packageconfig_flags_option_flags(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py -f' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("PACKAGECONFIG FLAG RECIPE NAMES")
+ expected_endlines.append("qt nativesdk-pinentry pinentry pinentry-native")
+ expected_endlines.append("secret nativesdk-pinentry pinentry pinentry-native")
+
+ self.check_endlines(results, expected_endlines)
+
+ def test_packageconfig_flags_option_all(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("pinentry-1.1.0")
+ expected_endlines.append("PACKAGECONFIG ncurses libcap")
+ expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
+ expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
+ expected_endlines.append("PACKAGECONFIG[libcap] --with-libcap, --without-libcap, libcap")
+ expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
+ expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
+
+ self.check_endlines(results, expected_endlines)
+
+ def test_packageconfig_flags_optiins_preferred_only(self):
+ results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
+ expected_endlines = []
+ expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
+ expected_endlines.append("pinentry gtk2 libcap ncurses qt secret")
+
+ self.check_endlines(results, expected_endlines)
+
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
new file mode 100644
index 0000000000..291627877e
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -0,0 +1,150 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu
+import stat
+import subprocess, os
+import oe.path
+import re
+
+class VersionOrdering(OESelftestTestCase):
+ # version1, version2, sort order
+ tests = (
+ ("1.0", "1.0", 0),
+ ("1.0", "2.0", -1),
+ ("2.0", "1.0", 1),
+ ("2.0-rc", "2.0", 1),
+ ("2.0~rc", "2.0", -1),
+ ("1.2rc2", "1.2.0", -1)
+ )
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+
+ # Build the tools we need and populate a sysroot
+ bitbake("dpkg-native opkg-native rpm-native python3-native")
+ bitbake("build-sysroots -c build_native_sysroot")
+
+ # Get the paths so we can point into the sysroot correctly
+ vars = get_bb_vars(["STAGING_DIR", "BUILD_ARCH", "bindir_native", "libdir_native"])
+ cls.staging = oe.path.join(vars["STAGING_DIR"], vars["BUILD_ARCH"])
+ cls.bindir = oe.path.join(cls.staging, vars["bindir_native"])
+ cls.libdir = oe.path.join(cls.staging, vars["libdir_native"])
+
+ def setUpLocal(self):
+ # Just for convenience
+ self.staging = type(self).staging
+ self.bindir = type(self).bindir
+ self.libdir = type(self).libdir
+
+ def test_dpkg(self):
+ for ver1, ver2, sort in self.tests:
+ op = { -1: "<<", 0: "=", 1: ">>" }[sort]
+ status = subprocess.call((oe.path.join(self.bindir, "dpkg"), "--compare-versions", ver1, op, ver2))
+ self.assertEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
+
+ # Now do it again but with incorrect operations
+ op = { -1: ">>", 0: ">>", 1: "<<" }[sort]
+ status = subprocess.call((oe.path.join(self.bindir, "dpkg"), "--compare-versions", ver1, op, ver2))
+ self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
+
+ # Now do it again but with incorrect operations
+ op = { -1: "=", 0: "<<", 1: "=" }[sort]
+ status = subprocess.call((oe.path.join(self.bindir, "dpkg"), "--compare-versions", ver1, op, ver2))
+ self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
+
+ def test_opkg(self):
+ for ver1, ver2, sort in self.tests:
+ op = { -1: "<<", 0: "=", 1: ">>" }[sort]
+ status = subprocess.call((oe.path.join(self.bindir, "opkg"), "compare-versions", ver1, op, ver2))
+ self.assertEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
+
+ # Now do it again but with incorrect operations
+ op = { -1: ">>", 0: ">>", 1: "<<" }[sort]
+ status = subprocess.call((oe.path.join(self.bindir, "opkg"), "compare-versions", ver1, op, ver2))
+ self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
+
+ # Now do it again but with incorrect operations
+ op = { -1: "=", 0: "<<", 1: "=" }[sort]
+ status = subprocess.call((oe.path.join(self.bindir, "opkg"), "compare-versions", ver1, op, ver2))
+ self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
+
+ def test_rpm(self):
+ # Need to tell the Python bindings where to find its configuration
+ env = os.environ.copy()
+ env["RPM_CONFIGDIR"] = oe.path.join(self.libdir, "rpm")
+
+ for ver1, ver2, sort in self.tests:
+ # The only way to test rpm is via the Python module, so we need to
+ # execute python3-native. labelCompare returns -1/0/1 (like strcmp)
+ # so add 100 and use that as the exit code.
+ command = (oe.path.join(self.bindir, "python3-native", "python3"), "-c",
+ "import sys, rpm; v1=(None, \"%s\", None); v2=(None, \"%s\", None); sys.exit(rpm.labelCompare(v1, v2) + 100)" % (ver1, ver2))
+ status = subprocess.call(command, env=env)
+ self.assertIn(status, (99, 100, 101))
+ self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
+
+class PackageTests(OESelftestTestCase):
+ # Verify that a recipe which sets up hardlink files has those preserved into split packages
+ # Also test file sparseness is preserved
+ def test_preserve_sparse_hardlinks(self):
+ bitbake("selftest-hardlink -c package")
+
+ dest = get_bb_var('PKGDEST', 'selftest-hardlink')
+ bindir = get_bb_var('bindir', 'selftest-hardlink')
+
+ def checkfiles():
+ # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/
+ # so expect 8 in total.
+ self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8)
+
+ # Test a sparse file remains sparse
+ sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest")
+ self.assertEqual(sparsestat.st_blocks, 0)
+ self.assertEqual(sparsestat.st_size, 1048576)
+
+ checkfiles()
+
+ # Clean and reinstall so its now definitely from sstate, then retest.
+ bitbake("selftest-hardlink -c clean")
+ bitbake("selftest-hardlink -c package")
+
+ checkfiles()
+
+ # Verify gdb to read symbols from separated debug hardlink file correctly
+ def test_gdb_hardlink_debug(self):
+ features = 'IMAGE_INSTALL_append = " selftest-hardlink"\n'
+ features += 'IMAGE_INSTALL_append = " selftest-hardlink-dbg"\n'
+ features += 'IMAGE_INSTALL_append = " selftest-hardlink-gdb"\n'
+ self.write_config(features)
+ bitbake("core-image-minimal")
+
+ def gdbtest(qemu, binary):
+ """
+ Check that gdb ``binary`` to read symbols from separated debug file
+ """
+ self.logger.info("gdbtest %s" % binary)
+ status, output = qemu.run_serial('/usr/bin/gdb.sh %s' % binary, timeout=60)
+ for l in output.split('\n'):
+ # Check debugging symbols exists
+ if '(no debugging symbols found)' in l:
+ self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
+ return False
+
+ # Check debugging symbols works correctly
+ elif re.match("Breakpoint 1.*hello\.c.*4", l):
+ return True
+
+ self.logger.error("GDB result:\n%d: %s", status, output)
+ return False
+
+ with runqemu('core-image-minimal') as qemu:
+ for binary in ['/usr/bin/hello1',
+ '/usr/bin/hello2',
+ '/usr/libexec/hello3',
+ '/usr/libexec/hello4']:
+ if not gdbtest(qemu, binary):
+ self.fail('GDB %s failed' % binary)
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py
new file mode 100644
index 0000000000..833a1803ba
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/pkgdata.py
@@ -0,0 +1,220 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import tempfile
+import fnmatch
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+
+class OePkgdataUtilTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(OePkgdataUtilTests, cls).setUpClass()
+ # Ensure we have the right data in pkgdata
+ cls.logger.info('Running bitbake to generate pkgdata')
+ bitbake('target-sdk-provides-dummy -c clean')
+ bitbake('busybox zlib m4')
+
+ def test_lookup_pkg(self):
+ # Forward tests
+ result = runCmd('oe-pkgdata-util lookup-pkg "zlib busybox"')
+ self.assertEqual(result.output, 'libz1\nbusybox')
+ result = runCmd('oe-pkgdata-util lookup-pkg zlib-dev')
+ self.assertEqual(result.output, 'libz-dev')
+ result = runCmd('oe-pkgdata-util lookup-pkg nonexistentpkg', ignore_status=True)
+ self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+ self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
+ # Reverse tests
+ result = runCmd('oe-pkgdata-util lookup-pkg -r "libz1 busybox"')
+ self.assertEqual(result.output, 'zlib\nbusybox')
+ result = runCmd('oe-pkgdata-util lookup-pkg -r libz-dev')
+ self.assertEqual(result.output, 'zlib-dev')
+ result = runCmd('oe-pkgdata-util lookup-pkg -r nonexistentpkg', ignore_status=True)
+ self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+ self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
+
+ def test_read_value(self):
+ result = runCmd('oe-pkgdata-util read-value PN libz1')
+ self.assertEqual(result.output, 'zlib')
+ result = runCmd('oe-pkgdata-util read-value PKG libz1')
+ self.assertEqual(result.output, 'libz1')
+ result = runCmd('oe-pkgdata-util read-value PKGSIZE m4')
+ pkgsize = int(result.output.strip())
+ self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
+
+ def test_find_path(self):
+ result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1')
+ self.assertEqual(result.output, 'zlib: /lib/libz.so.1')
+ result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
+ self.assertEqual(result.output, 'm4: /usr/bin/m4')
+ result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
+ self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+ self.assertEqual(result.output, 'ERROR: Unable to find any package producing path /not/exist')
+
+ def test_lookup_recipe(self):
+ result = runCmd('oe-pkgdata-util lookup-recipe "libz-staticdev busybox"')
+ self.assertEqual(result.output, 'zlib\nbusybox')
+ result = runCmd('oe-pkgdata-util lookup-recipe libz-dbg')
+ self.assertEqual(result.output, 'zlib')
+ result = runCmd('oe-pkgdata-util lookup-recipe nonexistentpkg', ignore_status=True)
+ self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+ self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
+
+ def test_list_pkgs(self):
+ # No arguments
+ result = runCmd('oe-pkgdata-util list-pkgs')
+ pkglist = result.output.split()
+ self.assertIn('zlib', pkglist, "Listed packages: %s" % result.output)
+ self.assertIn('zlib-dev', pkglist, "Listed packages: %s" % result.output)
+ # No pkgspec, runtime
+ result = runCmd('oe-pkgdata-util list-pkgs -r')
+ pkglist = result.output.split()
+ self.assertIn('libz-dev', pkglist, "Listed packages: %s" % result.output)
+ # With recipe specified
+ result = runCmd('oe-pkgdata-util list-pkgs -p zlib')
+ pkglist = sorted(result.output.split())
+ try:
+ pkglist.remove('zlib-ptest') # in case ptest is disabled
+ except ValueError:
+ pass
+ self.assertEqual(pkglist, ['zlib', 'zlib-dbg', 'zlib-dev', 'zlib-doc', 'zlib-src', 'zlib-staticdev'], "Packages listed after remove: %s" % result.output)
+ # With recipe specified, runtime
+ result = runCmd('oe-pkgdata-util list-pkgs -p zlib -r')
+ pkglist = sorted(result.output.split())
+ try:
+ pkglist.remove('libz-ptest') # in case ptest is disabled
+ except ValueError:
+ pass
+ self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc', 'libz-src', 'libz-staticdev', 'libz1'], "Packages listed after remove: %s" % result.output)
+ # With recipe specified and unpackaged
+ result = runCmd('oe-pkgdata-util list-pkgs -p zlib -u')
+ pkglist = sorted(result.output.split())
+ self.assertIn('zlib-locale', pkglist, "Listed packages: %s" % result.output)
+ # With recipe specified and unpackaged, runtime
+ result = runCmd('oe-pkgdata-util list-pkgs -p zlib -u -r')
+ pkglist = sorted(result.output.split())
+ self.assertIn('libz-locale', pkglist, "Listed packages: %s" % result.output)
+ # With recipe specified and pkgspec
+ result = runCmd('oe-pkgdata-util list-pkgs -p zlib "*-d*"')
+ pkglist = sorted(result.output.split())
+ self.assertEqual(pkglist, ['zlib-dbg', 'zlib-dev', 'zlib-doc'], "Packages listed: %s" % result.output)
+ # With recipe specified and pkgspec, runtime
+ result = runCmd('oe-pkgdata-util list-pkgs -p zlib -r "*-d*"')
+ pkglist = sorted(result.output.split())
+ self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc'], "Packages listed: %s" % result.output)
+
+ def test_list_pkg_files(self):
+ def splitoutput(output):
+ files = {}
+ curpkg = None
+ for line in output.splitlines():
+ if line.startswith('\t'):
+ self.assertTrue(curpkg, 'Unexpected non-package line:\n%s' % line)
+ files[curpkg].append(line.strip())
+ else:
+ self.assertTrue(line.rstrip().endswith(':'), 'Invalid package line in output:\n%s' % line)
+ curpkg = line.split(':')[0]
+ files[curpkg] = []
+ return files
+ bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir'])
+ base_libdir = bb_vars['base_libdir']
+ libdir = bb_vars['libdir']
+ includedir = bb_vars['includedir']
+ mandir = bb_vars['mandir']
+ # Test recipe-space package name
+ result = runCmd('oe-pkgdata-util list-pkg-files zlib-dev zlib-doc')
+ files = splitoutput(result.output)
+ self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
+ self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
+ # Test runtime package name
+ result = runCmd('oe-pkgdata-util list-pkg-files -r libz1 libz-dev')
+ files = splitoutput(result.output)
+ self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertGreater(len(files['libz1']), 1)
+ libspec = os.path.join(base_libdir, 'libz.so.1.*')
+ found = False
+ for fileitem in files['libz1']:
+ if fnmatch.fnmatchcase(fileitem, libspec):
+ found = True
+ break
+ self.assertTrue(found, 'Could not find zlib library file %s in libz1 package file list: %s' % (libspec, files['libz1']))
+ self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
+ # Test recipe
+ result = runCmd('oe-pkgdata-util list-pkg-files -p zlib')
+ files = splitoutput(result.output)
+ self.assertIn('zlib-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertNotIn('zlib-locale', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ # (ignore ptest, might not be there depending on config)
+ self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
+ self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
+ self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev'])
+ # Test recipe, runtime
+ result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r')
+ files = splitoutput(result.output)
+ self.assertIn('libz-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertNotIn('libz-locale', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
+ self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc'])
+ self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev'])
+ # Test recipe, unpackaged
+ result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -u')
+ files = splitoutput(result.output)
+ self.assertIn('zlib-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('zlib-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) # this is the key one
+ self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
+ self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
+ self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev'])
+ # Test recipe, runtime, unpackaged
+ result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r -u')
+ files = splitoutput(result.output)
+ self.assertIn('libz-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
+ self.assertIn('libz-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) # this is the key one
+ self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
+ self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc'])
+ self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev'])
+
+ def test_glob(self):
+ tempdir = tempfile.mkdtemp(prefix='pkgdataqa')
+ self.track_for_cleanup(tempdir)
+ pkglistfile = os.path.join(tempdir, 'pkglist')
+ with open(pkglistfile, 'w') as f:
+ f.write('libz1\n')
+ f.write('busybox\n')
+ result = runCmd('oe-pkgdata-util glob %s "*-dev"' % pkglistfile)
+ desiredresult = ['libz-dev', 'busybox-dev']
+ self.assertEqual(sorted(result.output.split()), sorted(desiredresult))
+ # The following should not error (because when we use this during rootfs construction, sometimes the complementary package won't exist)
+ result = runCmd('oe-pkgdata-util glob %s "*-nonexistent"' % pkglistfile)
+ self.assertEqual(result.output, '')
+ # Test exclude option
+ result = runCmd('oe-pkgdata-util glob %s "*-dev *-dbg" -x "^libz"' % pkglistfile)
+ resultlist = result.output.split()
+ self.assertNotIn('libz-dev', resultlist)
+ self.assertNotIn('libz-dbg', resultlist)
+
+ def test_specify_pkgdatadir(self):
+ result = runCmd('oe-pkgdata-util -p %s lookup-pkg zlib' % get_bb_var('PKGDATA_DIR'))
+ self.assertEqual(result.output, 'libz1')
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
new file mode 100644
index 0000000000..fe1f24ea6d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -0,0 +1,125 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+import shutil
+import datetime
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.network import get_free_port
+
+class BitbakePrTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(BitbakePrTests, cls).setUpClass()
+ cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
+
+ def get_pr_version(self, package_name):
+ package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
+ package_data = ftools.read_file(package_data_file)
+ find_pr = re.search(r"PKGR: r[0-9]+\.([0-9]+)", package_data)
+ self.assertTrue(find_pr, "No PKG revision found in %s" % package_data_file)
+ return int(find_pr.group(1))
+
+ def get_task_stamp(self, package_name, recipe_task):
+ stampdata = get_bb_var('STAMP', target=package_name).split('/')
+ prefix = stampdata[-1]
+ package_stamps_path = "/".join(stampdata[:-1])
+ stamps = []
+ for stamp in os.listdir(package_stamps_path):
+ find_stamp = re.match(r"%s\.%s\.([a-z0-9]{32})" % (re.escape(prefix), recipe_task), stamp)
+ if find_stamp:
+ stamps.append(find_stamp.group(1))
+ self.assertFalse(len(stamps) == 0, msg="Cound not find stamp for task %s for recipe %s" % (recipe_task, package_name))
+ self.assertFalse(len(stamps) > 1, msg="Found multiple %s stamps for the %s recipe in the %s directory." % (recipe_task, package_name, package_stamps_path))
+ return str(stamps[0])
+
+ def increment_package_pr(self, package_name):
+ inc_data = "do_package_append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\"\n}" % datetime.datetime.now()
+ self.write_recipeinc(package_name, inc_data)
+ res = bitbake(package_name, ignore_status=True)
+ self.delete_recipeinc(package_name)
+ self.assertEqual(res.status, 0, msg=res.output)
+
+ def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
+ config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
+ self.write_config(config_package_data)
+ config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
+ self.append_config(config_server_data)
+
+ def run_test_pr_service(self, package_name, package_type='rpm', track_task='do_package', pr_socket='localhost:0'):
+ self.config_pr_tests(package_name, package_type, pr_socket)
+
+ self.increment_package_pr(package_name)
+ pr_1 = self.get_pr_version(package_name)
+ stamp_1 = self.get_task_stamp(package_name, track_task)
+
+ self.increment_package_pr(package_name)
+ pr_2 = self.get_pr_version(package_name)
+ stamp_2 = self.get_task_stamp(package_name, track_task)
+
+ self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
+ self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
+
+ def run_test_pr_export_import(self, package_name, replace_current_db=True):
+ self.config_pr_tests(package_name)
+
+ self.increment_package_pr(package_name)
+ pr_1 = self.get_pr_version(package_name)
+
+ exported_db_path = os.path.join(self.builddir, 'export.inc')
+ export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
+ self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
+ self.assertTrue(os.path.exists(exported_db_path))
+
+ if replace_current_db:
+ current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
+ self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path)
+ os.remove(current_db_path)
+
+ import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True)
+ os.remove(exported_db_path)
+ self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
+
+ self.increment_package_pr(package_name)
+ pr_2 = self.get_pr_version(package_name)
+
+ self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
+
+ def test_import_export_replace_db(self):
+ self.run_test_pr_export_import('m4')
+
+ def test_import_export_override_db(self):
+ self.run_test_pr_export_import('m4', replace_current_db=False)
+
+ def test_pr_service_rpm_arch_dep(self):
+ self.run_test_pr_service('m4', 'rpm', 'do_package')
+
+ def test_pr_service_deb_arch_dep(self):
+ self.run_test_pr_service('m4', 'deb', 'do_package')
+
+ def test_pr_service_ipk_arch_dep(self):
+ self.run_test_pr_service('m4', 'ipk', 'do_package')
+
+ def test_pr_service_rpm_arch_indep(self):
+ self.run_test_pr_service('xcursor-transparent-theme', 'rpm', 'do_package')
+
+ def test_pr_service_deb_arch_indep(self):
+ self.run_test_pr_service('xcursor-transparent-theme', 'deb', 'do_package')
+
+ def test_pr_service_ipk_arch_indep(self):
+ self.run_test_pr_service('xcursor-transparent-theme', 'ipk', 'do_package')
+
+ def test_stopping_prservice_message(self):
+ port = get_free_port()
+
+ runCmd('bitbake-prserv --host localhost --port %s --loglevel=DEBUG --start' % port)
+ ret = runCmd('bitbake-prserv --host localhost --port %s --loglevel=DEBUG --stop' % port)
+
+ self.assertEqual(ret.status, 0)
+
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
new file mode 100644
index 0000000000..c1562c63b2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -0,0 +1,702 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import tempfile
+import urllib.parse
+
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.commands import get_bb_vars, create_temp_layer
+from oeqa.selftest.cases import devtool
+
+templayerdir = None
+
+def setUpModule():
+ global templayerdir
+ templayerdir = tempfile.mkdtemp(prefix='recipetoolqa')
+ create_temp_layer(templayerdir, 'selftestrecipetool')
+ runCmd('bitbake-layers add-layer %s' % templayerdir)
+
+
+def tearDownModule():
+ runCmd('bitbake-layers remove-layer %s' % templayerdir, ignore_status=True)
+ runCmd('rm -rf %s' % templayerdir)
+
+
+class RecipetoolBase(devtool.DevtoolBase):
+
+ def setUpLocal(self):
+ super(RecipetoolBase, self).setUpLocal()
+ self.templayerdir = templayerdir
+ self.tempdir = tempfile.mkdtemp(prefix='recipetoolqa')
+ self.track_for_cleanup(self.tempdir)
+ self.testfile = os.path.join(self.tempdir, 'testfile')
+ with open(self.testfile, 'w') as f:
+ f.write('Test file\n')
+
+ def tearDownLocal(self):
+ runCmd('rm -rf %s/recipes-*' % self.templayerdir)
+ super(RecipetoolBase, self).tearDownLocal()
+
+ def _try_recipetool_appendcmd(self, cmd, testrecipe, expectedfiles, expectedlines=None):
+ result = runCmd(cmd)
+ self.assertNotIn('Traceback', result.output)
+
+ # Check the bbappend was created and applies properly
+ recipefile = get_bb_var('FILE', testrecipe)
+ bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
+
+ # Check the bbappend contents
+ if expectedlines is not None:
+ with open(bbappendfile, 'r') as f:
+ self.assertEqual(expectedlines, f.readlines(), "Expected lines are not present in %s" % bbappendfile)
+
+ # Check file was copied
+ filesdir = os.path.join(os.path.dirname(bbappendfile), testrecipe)
+ for expectedfile in expectedfiles:
+ self.assertTrue(os.path.isfile(os.path.join(filesdir, expectedfile)), 'Expected file %s to be copied next to bbappend, but it wasn\'t' % expectedfile)
+
+ # Check no other files created
+ createdfiles = []
+ for root, _, files in os.walk(filesdir):
+ for f in files:
+ createdfiles.append(os.path.relpath(os.path.join(root, f), filesdir))
+ self.assertTrue(sorted(createdfiles), sorted(expectedfiles))
+
+ return bbappendfile, result.output
+
+
+class RecipetoolTests(RecipetoolBase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(RecipetoolTests, cls).setUpClass()
+ # Ensure we have the right data in shlibs/pkgdata
+ cls.logger.info('Running bitbake to generate pkgdata')
+ bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile')
+ bb_vars = get_bb_vars(['COREBASE', 'BBPATH'])
+ cls.corebase = bb_vars['COREBASE']
+ cls.bbpath = bb_vars['BBPATH']
+
+ def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles):
+ cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options)
+ return self._try_recipetool_appendcmd(cmd, testrecipe, expectedfiles, expectedlines)
+
+ def _try_recipetool_appendfile_fail(self, destfile, newfile, checkerror):
+ cmd = 'recipetool appendfile %s %s %s' % (self.templayerdir, destfile, newfile)
+ result = runCmd(cmd, ignore_status=True)
+ self.assertNotEqual(result.status, 0, 'Command "%s" should have failed but didn\'t' % cmd)
+ self.assertNotIn('Traceback', result.output)
+ for errorstr in checkerror:
+ self.assertIn(errorstr, result.output)
+
+ def test_recipetool_appendfile_basic(self):
+ # Basic test
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_invalid(self):
+ # Test some commands that should error
+ self._try_recipetool_appendfile_fail('/etc/passwd', self.testfile, ['ERROR: /etc/passwd cannot be handled by this tool', 'useradd', 'extrausers'])
+ self._try_recipetool_appendfile_fail('/etc/timestamp', self.testfile, ['ERROR: /etc/timestamp cannot be handled by this tool'])
+ self._try_recipetool_appendfile_fail('/dev/console', self.testfile, ['ERROR: /dev/console cannot be handled by this tool'])
+
+ def test_recipetool_appendfile_alternatives(self):
+ # Now try with a file we know should be an alternative
+ # (this is very much a fake example, but one we know is reliably an alternative)
+ self._try_recipetool_appendfile_fail('/bin/ls', self.testfile, ['ERROR: File /bin/ls is an alternative possibly provided by the following recipes:', 'coreutils', 'busybox'])
+ # Need a test file - should be executable
+ testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
+ testfile2name = os.path.basename(testfile2)
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://%s"\n' % testfile2name,
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${base_bindir}\n',
+ ' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name,
+ '}\n']
+ self._try_recipetool_appendfile('coreutils', '/bin/ls', testfile2, '-r coreutils', expectedlines, [testfile2name])
+ # Now try bbappending the same file again, contents should not change
+ bbappendfile, _ = self._try_recipetool_appendfile('coreutils', '/bin/ls', self.testfile, '-r coreutils', expectedlines, [testfile2name])
+ # But file should have
+ copiedfile = os.path.join(os.path.dirname(bbappendfile), 'coreutils', testfile2name)
+ result = runCmd('diff -q %s %s' % (testfile2, copiedfile), ignore_status=True)
+ self.assertNotEqual(result.status, 0, 'New file should have been copied but was not %s' % result.output)
+
+ def test_recipetool_appendfile_binary(self):
+ # Try appending a binary file
+ # /bin/ls can be a symlink to /usr/bin/ls
+ ls = os.path.realpath("/bin/ls")
+ result = runCmd('recipetool appendfile %s /bin/ls %s -r coreutils' % (self.templayerdir, ls))
+ self.assertIn('WARNING: ', result.output)
+ self.assertIn('is a binary', result.output)
+
+ def test_recipetool_appendfile_add(self):
+ # Try arbitrary file add to a recipe
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile"\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${datadir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
+ '}\n']
+ self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile'])
+ # Try adding another file, this time where the source file is executable
+ # (so we're testing that, plus modifying an existing bbappend)
+ testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
+ testfile2name = os.path.basename(testfile2)
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile \\\n',
+ ' file://%s \\\n' % testfile2name,
+ ' "\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${datadir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
+ ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
+ '}\n']
+ self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name])
+
+ def test_recipetool_appendfile_add_bindir(self):
+ # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile"\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${bindir}\n',
+ ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
+ '}\n']
+ _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_add_machine(self):
+ # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
+ '\n',
+ 'SRC_URI_append_mymachine = " file://testfile"\n',
+ '\n',
+ 'do_install_append_mymachine() {\n',
+ ' install -d ${D}${datadir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
+ '}\n']
+ _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_orig(self):
+ # A file that's in SRC_URI and in do_install with the same name
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_todir(self):
+ # A file that's in SRC_URI and in do_install with destination directory rather than file
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_renamed(self):
+ # A file that's in SRC_URI with a different name to the destination file
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_subdir(self):
+ # A file that's in SRC_URI in a subdir
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile"\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${datadir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
+ '}\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_src_glob(self):
+ # A file that's in SRC_URI as a glob
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile"\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${datadir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-src-globfile\n',
+ '}\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-src-globfile', self.testfile, '', expectedlines, ['testfile'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_inst_glob(self):
+ # A file that's in do_install as a glob
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_inst_todir_glob(self):
+ # A file that's in do_install as a glob with destination as a directory
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_patch(self):
+ # A file that's added by a patch in SRC_URI
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile"\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${sysconfdir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
+ '}\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile'])
+ for line in output.splitlines():
+ if 'WARNING: ' in line:
+ self.assertIn('add-file.patch', line, 'Unexpected warning found in output:\n%s' % line)
+ break
+ else:
+ self.fail('Patch warning not found in output:\n%s' % output)
+
+ def test_recipetool_appendfile_script(self):
+ # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install)
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile"\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${datadir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
+ '}\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_inst_func(self):
+ # A file that's installed from a function called by do_install
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func'])
+ self.assertNotIn('WARNING: ', output)
+
+ def test_recipetool_appendfile_postinstall(self):
+ # A file that's created by a postinstall script (and explicitly mentioned in it)
+ # First try without specifying recipe
+ self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile'])
+ # Now specify recipe
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://testfile"\n',
+ '\n',
+ 'do_install_append() {\n',
+ ' install -d ${D}${datadir}\n',
+ ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
+ '}\n']
+ _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile'])
+
+ def test_recipetool_appendfile_extlayer(self):
+ # Try creating a bbappend in a layer that's not in bblayers.conf and has a different structure
+ exttemplayerdir = os.path.join(self.tempdir, 'extlayer')
+ self._create_temp_layer(exttemplayerdir, False, 'oeselftestextlayer', recipepathspec='metadata/recipes/recipes-*/*')
+ result = runCmd('recipetool appendfile %s /usr/share/selftest-replaceme-orig %s' % (exttemplayerdir, self.testfile))
+ self.assertNotIn('Traceback', result.output)
+ createdfiles = []
+ for root, _, files in os.walk(exttemplayerdir):
+ for f in files:
+ createdfiles.append(os.path.relpath(os.path.join(root, f), exttemplayerdir))
+ createdfiles.remove('conf/layer.conf')
+ expectedfiles = ['metadata/recipes/recipes-test/selftest-recipetool-appendfile/selftest-recipetool-appendfile.bbappend',
+ 'metadata/recipes/recipes-test/selftest-recipetool-appendfile/selftest-recipetool-appendfile/selftest-replaceme-orig']
+ self.assertEqual(sorted(createdfiles), sorted(expectedfiles))
+
+ def test_recipetool_appendfile_wildcard(self):
+
+ def try_appendfile_wc(options):
+ result = runCmd('recipetool appendfile %s /etc/profile %s %s' % (self.templayerdir, self.testfile, options))
+ self.assertNotIn('Traceback', result.output)
+ bbappendfile = None
+ for root, _, files in os.walk(self.templayerdir):
+ for f in files:
+ if f.endswith('.bbappend'):
+ bbappendfile = f
+ break
+ if not bbappendfile:
+ self.fail('No bbappend file created')
+ runCmd('rm -rf %s/recipes-*' % self.templayerdir)
+ return bbappendfile
+
+ # Check without wildcard option
+ recipefn = os.path.basename(get_bb_var('FILE', 'base-files'))
+ filename = try_appendfile_wc('')
+ self.assertEqual(filename, recipefn.replace('.bb', '.bbappend'))
+ # Now check with wildcard option
+ filename = try_appendfile_wc('-w')
+ self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend')
+
+ def test_recipetool_create(self):
+ # Try adding a recipe
+ tempsrc = os.path.join(self.tempdir, 'srctree')
+ os.makedirs(tempsrc)
+ recipefile = os.path.join(self.tempdir, 'logrotate_3.12.3.bb')
+ srcuri = 'https://github.com/logrotate/logrotate/releases/download/3.12.3/logrotate-3.12.3.tar.xz'
+ result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = 'GPLv2'
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
+ checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz'
+ checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288'
+ checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07'
+ self._test_recipe_contents(recipefile, checkvars, [])
+
+ def test_recipetool_create_git(self):
+ if 'x11' not in get_bb_var('DISTRO_FEATURES'):
+ self.skipTest('Test requires x11 as distro feature')
+ # Ensure we have the right data in shlibs/pkgdata
+ bitbake('libpng pango libx11 libxext jpeg libcheck')
+ # Try adding a recipe
+ tempsrc = os.path.join(self.tempdir, 'srctree')
+ os.makedirs(tempsrc)
+ recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
+ srcuri = 'git://git.yoctoproject.org/libmatchbox'
+ result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
+ self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
+ checkvars = {}
+ checkvars['LICENSE'] = 'LGPLv2.1'
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
+ checkvars['S'] = '${WORKDIR}/git'
+ checkvars['PV'] = '1.11+git${SRCPV}'
+ checkvars['SRC_URI'] = srcuri
+ checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango'])
+ inherits = ['autotools', 'pkgconfig']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_simple(self):
+ # Try adding a recipe
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pv = '1.7.3.0'
+ srcuri = 'http://www.dest-unreach.org/socat/download/socat-%s.tar.bz2' % pv
+ result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe))
+ dirlist = os.listdir(temprecipe)
+ if len(dirlist) > 1:
+ self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])):
+ self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named')
+ checkvars = {}
+ checkvars['LICENSE'] = set(['Unknown', 'GPLv2'])
+ checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'])
+ # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot
+ checkvars['S'] = None
+ checkvars['SRC_URI'] = srcuri.replace(pv, '${PV}')
+ inherits = ['autotools']
+ self._test_recipe_contents(os.path.join(temprecipe, dirlist[0]), checkvars, inherits)
+
+ def test_recipetool_create_cmake(self):
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ recipefile = os.path.join(temprecipe, 'taglib_1.11.1.bb')
+ srcuri = 'http://taglib.github.io/releases/taglib-1.11.1.tar.gz'
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['LGPLv2.1', 'MPL-1.1'])
+ checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz'
+ checkvars['SRC_URI[md5sum]'] = 'cee7be0ccfc892fa433d6c837df9522a'
+ checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b'
+ checkvars['DEPENDS'] = set(['boost', 'zlib'])
+ inherits = ['cmake']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_github(self):
+ # Basic test to see if github URL mangling works
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ recipefile = os.path.join(temprecipe, 'meson_git.bb')
+ srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0'
+ result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri])
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['Apache-2.0'])
+ checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https'
+ inherits = ['setuptools3']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_setuptools(self):
+ # Test creating python3 package from tarball (using setuptools3 class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz'
+ checkvars['SRC_URI[md5sum]'] = 'e384c95a47218f66c6501cd6dd45ff59'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ inherits = ['setuptools3']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_distutils(self):
+ # Test creating python3 package from tarball (using distutils3 class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'docutils'
+ pv = '0.14'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['PSF', '&', 'BSD', 'GPL'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING.txt;md5=35a23d42b615470583563132872c97d6'
+ checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-${PV}.tar.gz'
+ checkvars['SRC_URI[md5sum]'] = 'c53768d63db3873b7d452833553469de'
+ checkvars['SRC_URI[sha256sum]'] = '51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274'
+ inherits = ['distutils3']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_github_tarball(self):
+ # Basic test to ensure github URL mangling doesn't apply to release tarballs
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pv = '0.32.0'
+ recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv)
+ srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['Apache-2.0'])
+ checkvars['SRC_URI'] = 'https://github.com/mesonbuild/meson/releases/download/${PV}/meson-${PV}.tar.gz'
+ inherits = ['setuptools3']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_git_http(self):
+ # Basic test to check http git URL mangling works
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ recipefile = os.path.join(temprecipe, 'matchbox-terminal_git.bb')
+ srcuri = 'http://git.yoctoproject.org/git/matchbox-terminal'
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['GPLv2'])
+ checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http'
+ inherits = ['pkgconfig', 'autotools']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths):
+ dstdir = basedstdir
+ self.assertTrue(os.path.exists(dstdir))
+ for p in paths:
+ dstdir = os.path.join(dstdir, p)
+ if not os.path.exists(dstdir):
+ os.makedirs(dstdir)
+ self.track_for_cleanup(dstdir)
+ dstfile = os.path.join(dstdir, os.path.basename(srcfile))
+ if srcfile != dstfile:
+ shutil.copy(srcfile, dstfile)
+ self.track_for_cleanup(dstfile)
+
+ def test_recipetool_load_plugin(self):
+ """Test that recipetool loads only the first found plugin in BBPATH."""
+
+ recipetool = runCmd("which recipetool")
+ fromname = runCmd("recipetool --quiet pluginfile")
+ srcfile = fromname.output
+ searchpath = self.bbpath.split(':') + [os.path.dirname(recipetool.output)]
+ plugincontent = []
+ with open(srcfile) as fh:
+ plugincontent = fh.readlines()
+ try:
+ self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
+ for path in searchpath:
+ self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
+ result = runCmd("recipetool --quiet count")
+ self.assertEqual(result.output, '1')
+ result = runCmd("recipetool --quiet multiloaded")
+ self.assertEqual(result.output, "no")
+ for path in searchpath:
+ result = runCmd("recipetool --quiet bbdir")
+ self.assertEqual(result.output, path)
+ os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py'))
+ finally:
+ with open(srcfile, 'w') as fh:
+ fh.writelines(plugincontent)
+
+
+class RecipetoolAppendsrcBase(RecipetoolBase):
+ def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles):
+ cmd = 'recipetool appendsrcfile %s %s %s %s %s' % (options, self.templayerdir, testrecipe, newfile, destfile)
+ return self._try_recipetool_appendcmd(cmd, testrecipe, expectedfiles, expectedlines)
+
+ def _try_recipetool_appendsrcfiles(self, testrecipe, newfiles, expectedlines=None, expectedfiles=None, destdir=None, options=''):
+
+ if destdir:
+ options += ' -D %s' % destdir
+
+ if expectedfiles is None:
+ expectedfiles = [os.path.basename(f) for f in newfiles]
+
+ cmd = 'recipetool appendsrcfiles %s %s %s %s' % (options, self.templayerdir, testrecipe, ' '.join(newfiles))
+ return self._try_recipetool_appendcmd(cmd, testrecipe, expectedfiles, expectedlines)
+
+ def _try_recipetool_appendsrcfile_fail(self, testrecipe, newfile, destfile, checkerror):
+ cmd = 'recipetool appendsrcfile %s %s %s %s' % (self.templayerdir, testrecipe, newfile, destfile or '')
+ result = runCmd(cmd, ignore_status=True)
+ self.assertNotEqual(result.status, 0, 'Command "%s" should have failed but didn\'t' % cmd)
+ self.assertNotIn('Traceback', result.output)
+ for errorstr in checkerror:
+ self.assertIn(errorstr, result.output)
+
+ @staticmethod
+ def _get_first_file_uri(recipe):
+ '''Return the first file:// in SRC_URI for the specified recipe.'''
+ src_uri = get_bb_var('SRC_URI', recipe).split()
+ for uri in src_uri:
+ p = urllib.parse.urlparse(uri)
+ if p.scheme == 'file':
+ return p.netloc + p.path
+
+ def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''):
+ if newfile is None:
+ newfile = self.testfile
+
+ if srcdir:
+ if destdir:
+ expected_subdir = os.path.join(srcdir, destdir)
+ else:
+ expected_subdir = srcdir
+ else:
+ options += " -W"
+ expected_subdir = destdir
+
+ if filename:
+ if destdir:
+ destpath = os.path.join(destdir, filename)
+ else:
+ destpath = filename
+ else:
+ filename = os.path.basename(newfile)
+ if destdir:
+ destpath = destdir + os.sep
+ else:
+ destpath = '.' + os.sep
+
+ expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+ '\n']
+ if has_src_uri:
+ uri = 'file://%s' % filename
+ if expected_subdir:
+ uri += ';subdir=%s' % expected_subdir
+ expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri,
+ '\n']
+
+ return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
+
+ def _test_appendsrcfiles(self, testrecipe, newfiles, expectedfiles=None, destdir=None, options=''):
+ if expectedfiles is None:
+ expectedfiles = [os.path.basename(n) for n in newfiles]
+
+ self._try_recipetool_appendsrcfiles(testrecipe, newfiles, expectedfiles=expectedfiles, destdir=destdir, options=options)
+
+ bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'FILESEXTRAPATHS'], testrecipe)
+ src_uri = bb_vars['SRC_URI'].split()
+ for f in expectedfiles:
+ if destdir:
+ self.assertIn('file://%s;subdir=%s' % (f, destdir), src_uri)
+ else:
+ self.assertIn('file://%s' % f, src_uri)
+
+ recipefile = bb_vars['FILE']
+ bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
+ filesdir = os.path.join(os.path.dirname(bbappendfile), testrecipe)
+ filesextrapaths = bb_vars['FILESEXTRAPATHS'].split(':')
+ self.assertIn(filesdir, filesextrapaths)
+
+
+
+
+class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
+
+ def test_recipetool_appendsrcfile_basic(self):
+ self._test_appendsrcfile('base-files', 'a-file')
+
+ def test_recipetool_appendsrcfile_basic_wildcard(self):
+ testrecipe = 'base-files'
+ self._test_appendsrcfile(testrecipe, 'a-file', options='-w')
+ recipefile = get_bb_var('FILE', testrecipe)
+ bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
+ self.assertEqual(os.path.basename(bbappendfile), '%s_%%.bbappend' % testrecipe)
+
+ def test_recipetool_appendsrcfile_subdir_basic(self):
+ self._test_appendsrcfile('base-files', 'a-file', 'tmp')
+
+ def test_recipetool_appendsrcfile_subdir_basic_dirdest(self):
+ self._test_appendsrcfile('base-files', destdir='tmp')
+
+ def test_recipetool_appendsrcfile_srcdir_basic(self):
+ testrecipe = 'bash'
+ bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe)
+ srcdir = bb_vars['S']
+ workdir = bb_vars['WORKDIR']
+ subdir = os.path.relpath(srcdir, workdir)
+ self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir)
+
+ def test_recipetool_appendsrcfile_existing_in_src_uri(self):
+ testrecipe = 'base-files'
+ filepath = self._get_first_file_uri(testrecipe)
+ self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
+ self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
+
+ def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self):
+ testrecipe = 'base-files'
+ subdir = 'tmp'
+ filepath = self._get_first_file_uri(testrecipe)
+ self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
+
+ output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False)
+ self.assertTrue(any('with different parameters' in l for l in output))
+
+ def test_recipetool_appendsrcfile_replace_file_srcdir(self):
+ testrecipe = 'bash'
+ filepath = 'Makefile.in'
+ bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe)
+ srcdir = bb_vars['S']
+ workdir = bb_vars['WORKDIR']
+ subdir = os.path.relpath(srcdir, workdir)
+
+ self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir)
+ bitbake('%s:do_unpack' % testrecipe)
+ with open(self.testfile, 'r') as testfile:
+ with open(os.path.join(srcdir, filepath), 'r') as makefilein:
+ self.assertEqual(testfile.read(), makefilein.read())
+
+ def test_recipetool_appendsrcfiles_basic(self, destdir=None):
+ newfiles = [self.testfile]
+ for i in range(1, 5):
+ testfile = os.path.join(self.tempdir, 'testfile%d' % i)
+ with open(testfile, 'w') as f:
+ f.write('Test file %d\n' % i)
+ newfiles.append(testfile)
+ self._test_appendsrcfiles('gcc', newfiles, destdir=destdir, options='-W')
+
+ def test_recipetool_appendsrcfiles_basic_subdir(self):
+ self.test_recipetool_appendsrcfiles_basic(destdir='testdir')
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
new file mode 100644
index 0000000000..747870383b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -0,0 +1,140 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+import time
+import logging
+import bb.tinfoil
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, get_test_layer
+
+
+def setUpModule():
+ global tinfoil
+ global metaselftestpath
+ metaselftestpath = get_test_layer()
+ tinfoil = bb.tinfoil.Tinfoil(tracking=True)
+ tinfoil.prepare(config_only=False, quiet=2)
+
+
+def tearDownModule():
+ tinfoil.shutdown()
+
+
+class RecipeUtilsTests(OESelftestTestCase):
+ """ Tests for the recipeutils module functions """
+
+ def test_patch_recipe_varflag(self):
+ import oe.recipeutils
+ rd = tinfoil.parse_recipe('python3-async-test')
+ vals = {'SRC_URI[md5sum]': 'aaaaaa', 'LICENSE': 'something'}
+ patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
+
+ expected_patch = """
+--- a/recipes-devtools/python/python-async-test.inc
++++ b/recipes-devtools/python/python-async-test.inc
+@@ -1,14 +1,14 @@
+ SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
+ HOMEPAGE = "http://github.com/gitpython-developers/async"
+ SECTION = "devel/python"
+-LICENSE = "BSD"
++LICENSE = "something"
+ LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
+
+ inherit pypi
+
+ PYPI_PACKAGE = "async"
+
+-SRC_URI[md5sum] = "9b06b5997de2154f3bc0273f80bcef6b"
++SRC_URI[md5sum] = "aaaaaa"
+ SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
+
+ RDEPENDS_${PN} += "${PYTHON_PN}-threading"
+"""
+ patchlines = []
+ for f in patches:
+ for line in f:
+ patchlines.append(line)
+ self.maxDiff = None
+ self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
+
+
+ def test_patch_recipe_singleappend(self):
+ import oe.recipeutils
+ rd = tinfoil.parse_recipe('recipeutils-test')
+ val = rd.getVar('SRC_URI', False).split()
+ del val[1]
+ val = ' '.join(val)
+ vals = {'SRC_URI': val}
+ patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
+
+ expected_patch = """
+--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
++++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
+@@ -8,6 +8,4 @@
+
+ BBCLASSEXTEND = "native nativesdk"
+
+-SRC_URI += "file://somefile"
+-
+ SRC_URI_append = " file://anotherfile"
+"""
+ patchlines = []
+ for f in patches:
+ for line in f:
+ patchlines.append(line)
+ self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
+
+
+ def test_patch_recipe_appends(self):
+ import oe.recipeutils
+ rd = tinfoil.parse_recipe('recipeutils-test')
+ val = rd.getVar('SRC_URI', False).split()
+ vals = {'SRC_URI': val[0]}
+ patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
+
+ expected_patch = """
+--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
++++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
+@@ -8,6 +8,3 @@
+
+ BBCLASSEXTEND = "native nativesdk"
+
+-SRC_URI += "file://somefile"
+-
+-SRC_URI_append = " file://anotherfile"
+"""
+ patchlines = []
+ for f in patches:
+ for line in f:
+ patchlines.append(line)
+ self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
+
+
+ def test_validate_pn(self):
+ import oe.recipeutils
+ expected_results = {
+ 'test': '',
+ 'glib-2.0': '',
+ 'gtk+': '',
+ 'forcevariable': 'reserved',
+ 'pn-something': 'reserved',
+ 'test.bb': 'file',
+ 'test_one': 'character',
+ 'test!': 'character',
+ }
+
+ for pn, expected in expected_results.items():
+ result = oe.recipeutils.validate_pn(pn)
+ if expected:
+ self.assertIn(expected, result)
+ else:
+ self.assertEqual(result, '')
+
+ def test_split_var_value(self):
+ import oe.recipeutils
+ res = oe.recipeutils.split_var_value('test.1 test.2 ${@call_function("hi there world", false)} test.4')
+ self.assertEqual(res, ['test.1', 'test.2', '${@call_function("hi there world", false)}', 'test.4'])
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
new file mode 100644
index 0000000000..db538a4f89
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -0,0 +1,202 @@
+#
+# SPDX-License-Identifier: MIT
+#
+# Copyright 2019 by Garmin Ltd. or its subsidiaries
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
+import bb.utils
+import functools
+import multiprocessing
+import textwrap
+import json
+import unittest
+import tempfile
+import shutil
+import stat
+import os
+
+MISSING = 'MISSING'
+DIFFERENT = 'DIFFERENT'
+SAME = 'SAME'
+
+@functools.total_ordering
+class CompareResult(object):
+ def __init__(self):
+ self.reference = None
+ self.test = None
+ self.status = 'UNKNOWN'
+
+ def __eq__(self, other):
+ return (self.status, self.test) == (other.status, other.test)
+
+ def __lt__(self, other):
+ return (self.status, self.test) < (other.status, other.test)
+
+class PackageCompareResults(object):
+ def __init__(self):
+ self.total = []
+ self.missing = []
+ self.different = []
+ self.same = []
+
+ def add_result(self, r):
+ self.total.append(r)
+ if r.status == MISSING:
+ self.missing.append(r)
+ elif r.status == DIFFERENT:
+ self.different.append(r)
+ else:
+ self.same.append(r)
+
+ def sort(self):
+ self.total.sort()
+ self.missing.sort()
+ self.different.sort()
+ self.same.sort()
+
+ def __str__(self):
+ return 'same=%i different=%i missing=%i total=%i' % (len(self.same), len(self.different), len(self.missing), len(self.total))
+
+def compare_file(reference, test, diffutils_sysroot):
+ result = CompareResult()
+ result.reference = reference
+ result.test = test
+
+ if not os.path.exists(reference):
+ result.status = MISSING
+ return result
+
+ r = runCmd(['cmp', '--quiet', reference, test], native_sysroot=diffutils_sysroot, ignore_status=True)
+
+ if r.status:
+ result.status = DIFFERENT
+ return result
+
+ result.status = SAME
+ return result
+
+class ReproducibleTests(OESelftestTestCase):
+ package_classes = ['deb', 'ipk']
+ images = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline']
+ save_results = False
+
+ def setUpLocal(self):
+ super().setUpLocal()
+ needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS']
+ bb_vars = get_bb_vars(needed_vars)
+ for v in needed_vars:
+ setattr(self, v.lower(), bb_vars[v])
+
+ self.extrasresults = {}
+ self.extrasresults.setdefault('reproducible.rawlogs', {})['log'] = ''
+ self.extrasresults.setdefault('reproducible', {}).setdefault('files', {})
+
+ def append_to_log(self, msg):
+ self.extrasresults['reproducible.rawlogs']['log'] += msg
+
+ def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
+ result = PackageCompareResults()
+
+ old_cwd = os.getcwd()
+ try:
+ file_result = {}
+ os.chdir(test_dir)
+ with multiprocessing.Pool(processes=int(self.bb_number_threads or 0)) as p:
+ for root, dirs, files in os.walk('.'):
+ async_result = []
+ for f in files:
+ reference_path = os.path.join(reference_dir, root, f)
+ test_path = os.path.join(test_dir, root, f)
+ async_result.append(p.apply_async(compare_file, (reference_path, test_path, diffutils_sysroot)))
+
+ for a in async_result:
+ result.add_result(a.get())
+
+ finally:
+ os.chdir(old_cwd)
+
+ result.sort()
+ return result
+
+ def write_package_list(self, package_class, name, packages):
+ self.extrasresults['reproducible']['files'].setdefault(package_class, {})[name] = [
+ {'reference': p.reference, 'test': p.test} for p in packages]
+
+ def copy_file(self, source, dest):
+ bb.utils.mkdirhier(os.path.dirname(dest))
+ shutil.copyfile(source, dest)
+
+ def test_reproducible_builds(self):
+ capture_vars = ['DEPLOY_DIR_' + c.upper() for c in self.package_classes]
+
+ if self.save_results:
+ save_dir = tempfile.mkdtemp(prefix='oe-reproducible-')
+ os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+ self.logger.info('Non-reproducible packages will be copied to %s', save_dir)
+
+ # Build native utilities
+ self.write_config('')
+ bitbake("diffutils-native -c addto_recipe_sysroot")
+ diffutils_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffutils-native")
+
+ # Reproducible builds should not pull from sstate or mirrors, but
+ # sharing DL_DIR is fine
+ common_config = textwrap.dedent('''\
+ INHERIT += "reproducible_build"
+ PACKAGE_CLASSES = "%s"
+ SSTATE_DIR = "${TMPDIR}/sstate"
+ ''') % (' '.join('package_%s' % c for c in self.package_classes))
+
+ # Perform a build.
+ reproducibleA_tmp = os.path.join(self.topdir, 'reproducibleA', 'tmp')
+ if os.path.exists(reproducibleA_tmp):
+ bb.utils.remove(reproducibleA_tmp, recurse=True)
+
+ self.write_config((textwrap.dedent('''\
+ TMPDIR = "%s"
+ ''') % reproducibleA_tmp) + common_config)
+ vars_A = get_bb_vars(capture_vars)
+ bitbake(' '.join(self.images))
+
+ # Perform another build.
+ reproducibleB_tmp = os.path.join(self.topdir, 'reproducibleB', 'tmp')
+ if os.path.exists(reproducibleB_tmp):
+ bb.utils.remove(reproducibleB_tmp, recurse=True)
+
+ self.write_config((textwrap.dedent('''\
+ SSTATE_MIRROR = ""
+ TMPDIR = "%s"
+ ''') % reproducibleB_tmp) + common_config)
+ vars_B = get_bb_vars(capture_vars)
+ bitbake(' '.join(self.images))
+
+ # NOTE: The temp directories from the reproducible build are purposely
+ # kept after the build so it can be diffed for debugging.
+
+ for c in self.package_classes:
+ with self.subTest(package_class=c):
+ package_class = 'package_' + c
+
+ deploy_A = vars_A['DEPLOY_DIR_' + c.upper()]
+ deploy_B = vars_B['DEPLOY_DIR_' + c.upper()]
+
+ result = self.compare_packages(deploy_A, deploy_B, diffutils_sysroot)
+
+ self.logger.info('Reproducibility summary for %s: %s' % (c, result))
+
+ self.append_to_log('\n'.join("%s: %s" % (r.status, r.test) for r in result.total))
+
+ self.write_package_list(package_class, 'missing', result.missing)
+ self.write_package_list(package_class, 'different', result.different)
+ self.write_package_list(package_class, 'same', result.same)
+
+ if self.save_results:
+ for d in result.different:
+ self.copy_file(d.reference, '/'.join([save_dir, d.reference]))
+ self.copy_file(d.test, '/'.join([save_dir, d.test]))
+
+ if result.missing or result.different:
+ self.fail("The following %s packages are missing or different: %s" %
+ (c, ' '.join(r.test for r in (result.missing + result.different))))
+
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py
new file mode 100644
index 0000000000..dac5c46801
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py
@@ -0,0 +1,98 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+from resulttool.report import ResultsTextReport
+from resulttool import regression as regression
+from resulttool import resultutils as resultutils
+from oeqa.selftest.case import OESelftestTestCase
+
+class ResultToolTests(OESelftestTestCase):
+ base_results_data = {'base_result1': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86"},
+ 'result': {}},
+ 'base_result2': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86-64"},
+ 'result': {}}}
+ target_results_data = {'target_result1': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86"},
+ 'result': {}},
+ 'target_result2': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86"},
+ 'result': {}},
+ 'target_result3': {'configuration': {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86-64"},
+ 'result': {}}}
+
+ def test_report_can_aggregate_test_result(self):
+ result_data = {'result': {'test1': {'status': 'PASSED'},
+ 'test2': {'status': 'PASSED'},
+ 'test3': {'status': 'FAILED'},
+ 'test4': {'status': 'ERROR'},
+ 'test5': {'status': 'SKIPPED'}}}
+ report = ResultsTextReport()
+ result_report = report.get_aggregated_test_result(None, result_data, 'DummyMachine')
+ self.assertTrue(result_report['passed'] == 2, msg="Passed count not correct:%s" % result_report['passed'])
+ self.assertTrue(result_report['failed'] == 2, msg="Failed count not correct:%s" % result_report['failed'])
+ self.assertTrue(result_report['skipped'] == 1, msg="Skipped count not correct:%s" % result_report['skipped'])
+
+ def test_regression_can_get_regression_base_target_pair(self):
+
+ results = {}
+ resultutils.append_resultsdata(results, ResultToolTests.base_results_data)
+ resultutils.append_resultsdata(results, ResultToolTests.target_results_data)
+ self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
+ self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
+
+ def test_regrresion_can_get_regression_result(self):
+ base_result_data = {'result': {'test1': {'status': 'PASSED'},
+ 'test2': {'status': 'PASSED'},
+ 'test3': {'status': 'FAILED'},
+ 'test4': {'status': 'ERROR'},
+ 'test5': {'status': 'SKIPPED'}}}
+ target_result_data = {'result': {'test1': {'status': 'PASSED'},
+ 'test2': {'status': 'FAILED'},
+ 'test3': {'status': 'PASSED'},
+ 'test4': {'status': 'ERROR'},
+ 'test5': {'status': 'SKIPPED'}}}
+ result, text = regression.compare_result(self.logger, "BaseTestRunName", "TargetTestRunName", base_result_data, target_result_data)
+ self.assertTrue(result['test2']['base'] == 'PASSED',
+ msg="regression not correct:%s" % result['test2']['base'])
+ self.assertTrue(result['test2']['target'] == 'FAILED',
+ msg="regression not correct:%s" % result['test2']['target'])
+ self.assertTrue(result['test3']['base'] == 'FAILED',
+ msg="regression not correct:%s" % result['test3']['base'])
+ self.assertTrue(result['test3']['target'] == 'PASSED',
+ msg="regression not correct:%s" % result['test3']['target'])
+
+ def test_merge_can_merged_results(self):
+ results = {}
+ resultutils.append_resultsdata(results, ResultToolTests.base_results_data, configmap=resultutils.flatten_map)
+ resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
+ self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
+
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
new file mode 100644
index 0000000000..3755764ee7
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -0,0 +1,121 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd
+from oeqa.utils import CommandError
+
+import subprocess
+import threading
+import time
+import signal
+
+class MemLogger(object):
+ def __init__(self):
+ self.info_msgs = []
+ self.error_msgs = []
+
+ def info(self, msg):
+ self.info_msgs.append(msg)
+
+ def error(self, msg):
+ self.error_msgs.append(msg)
+
+class RunCmdTests(OESelftestTestCase):
+ """ Basic tests for runCmd() utility function """
+
+ # The delta is intentionally smaller than the timeout, to detect cases where
+ # we incorrectly apply the timeout more than once.
+ TIMEOUT = 5
+ DELTA = 3
+
+ def test_result_okay(self):
+ result = runCmd("true")
+ self.assertEqual(result.status, 0)
+
+ def test_result_false(self):
+ result = runCmd("false", ignore_status=True)
+ self.assertEqual(result.status, 1)
+
+ def test_shell(self):
+ # A shell is used for all string commands.
+ result = runCmd("false; true", ignore_status=True)
+ self.assertEqual(result.status, 0)
+
+ def test_no_shell(self):
+ self.assertRaises(FileNotFoundError,
+ runCmd, "false; true", shell=False)
+
+ def test_list_not_found(self):
+ self.assertRaises(FileNotFoundError,
+ runCmd, ["false; true"])
+
+ def test_list_okay(self):
+ result = runCmd(["true"])
+ self.assertEqual(result.status, 0)
+
+ def test_result_assertion(self):
+ self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
+ runCmd, "echo foobar >&2; false", shell=True)
+
+ def test_result_exception(self):
+ self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
+ runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
+
+ def test_output(self):
+ result = runCmd("echo stdout; echo stderr >&2", shell=True)
+ self.assertEqual("stdout\nstderr", result.output)
+ self.assertEqual("", result.error)
+
+ def test_output_split(self):
+ result = runCmd("echo stdout; echo stderr >&2", shell=True, stderr=subprocess.PIPE)
+ self.assertEqual("stdout", result.output)
+ self.assertEqual("stderr", result.error)
+
+ def test_timeout(self):
+ numthreads = threading.active_count()
+ start = time.time()
+ # Killing a hanging process only works when not using a shell?!
+ result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True)
+ self.assertEqual(result.status, -signal.SIGTERM)
+ end = time.time()
+ self.assertLess(end - start, self.TIMEOUT + self.DELTA)
+ self.assertEqual(numthreads, threading.active_count())
+
+ def test_timeout_split(self):
+ numthreads = threading.active_count()
+ start = time.time()
+ # Killing a hanging process only works when not using a shell?!
+ result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True, stderr=subprocess.PIPE)
+ self.assertEqual(result.status, -signal.SIGTERM)
+ end = time.time()
+ self.assertLess(end - start, self.TIMEOUT + self.DELTA)
+ self.assertEqual(numthreads, threading.active_count())
+
+ def test_stdin(self):
+ numthreads = threading.active_count()
+ result = runCmd("cat", data=b"hello world", timeout=self.TIMEOUT)
+ self.assertEqual("hello world", result.output)
+ self.assertEqual(numthreads, threading.active_count())
+
+ def test_stdin_timeout(self):
+ numthreads = threading.active_count()
+ start = time.time()
+ result = runCmd(['sleep', '60'], data=b"hello world", timeout=self.TIMEOUT, ignore_status=True)
+ self.assertEqual(result.status, -signal.SIGTERM)
+ end = time.time()
+ self.assertLess(end - start, self.TIMEOUT + self.DELTA)
+ self.assertEqual(numthreads, threading.active_count())
+
+ def test_log(self):
+ log = MemLogger()
+ result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log)
+ self.assertEqual(["Running: echo stdout; echo stderr >&2", "stdout", "stderr"], log.info_msgs)
+ self.assertEqual([], log.error_msgs)
+
+ def test_log_split(self):
+ log = MemLogger()
+ result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log, stderr=subprocess.PIPE)
+ self.assertEqual(["Running: echo stdout; echo stderr >&2", "stdout"], log.info_msgs)
+ self.assertEqual(["stderr"], log.error_msgs)
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py
new file mode 100644
index 0000000000..7e676bcb41
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -0,0 +1,211 @@
+#
+# Copyright (c) 2017 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: MIT
+#
+
+import re
+import tempfile
+import time
+import oe.types
+from oeqa.core.decorator import OETestTag
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, runqemu, get_bb_var, runCmd
+
+class RunqemuTests(OESelftestTestCase):
+ """Runqemu test class"""
+
+ image_is_ready = False
+ deploy_dir_image = ''
+
+ def setUpLocal(self):
+ super(RunqemuTests, self).setUpLocal()
+ self.recipe = 'core-image-minimal'
+ self.machine = 'qemux86-64'
+ self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi"
+ self.cmd_common = "runqemu nographic"
+
+ kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), 'x86_64')
+ if kvm:
+ self.cmd_common += " kvm"
+
+ self.write_config(
+"""
+MACHINE = "%s"
+IMAGE_FSTYPES = "%s"
+# 10 means 1 second
+SYSLINUX_TIMEOUT = "10"
+"""
+% (self.machine, self.fstypes)
+ )
+
+ if not RunqemuTests.image_is_ready:
+ RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ bitbake(self.recipe)
+ RunqemuTests.image_is_ready = True
+
+ def test_boot_machine(self):
+ """Test runqemu machine"""
+ cmd = "%s %s" % (self.cmd_common, self.machine)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
+
+ def test_boot_machine_ext4(self):
+ """Test runqemu machine ext4"""
+ cmd = "%s %s ext4" % (self.cmd_common, self.machine)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertIn('rootfs.ext4', f.read(), "Failed: %s" % cmd)
+
+ def test_boot_machine_iso(self):
+ """Test runqemu machine iso"""
+ cmd = "%s %s iso" % (self.cmd_common, self.machine)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertIn('media=cdrom', f.read(), "Failed: %s" % cmd)
+
+ def test_boot_recipe_image(self):
+ """Test runqemu recipe-image"""
+ cmd = "%s %s" % (self.cmd_common, self.recipe)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
+
+
+ def test_boot_recipe_image_vmdk(self):
+ """Test runqemu recipe-image vmdk"""
+ cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertIn('format=vmdk', f.read(), "Failed: %s" % cmd)
+
+ def test_boot_recipe_image_vdi(self):
+ """Test runqemu recipe-image vdi"""
+ cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertIn('format=vdi', f.read(), "Failed: %s" % cmd)
+
+ def test_boot_deploy(self):
+ """Test runqemu deploy_dir_image"""
+ cmd = "%s %s" % (self.cmd_common, self.deploy_dir_image)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
+
+
+ def test_boot_deploy_hddimg(self):
+ """Test runqemu deploy_dir_image hddimg"""
+ cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(re.search('file=.*.hddimg', f.read()), "Failed: %s, %s" % (cmd, f.read()))
+
+ def test_boot_machine_slirp(self):
+ """Test runqemu machine slirp"""
+ cmd = "%s slirp %s" % (self.cmd_common, self.machine)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
+
+ def test_boot_machine_slirp_qcow2(self):
+ """Test runqemu machine slirp qcow2"""
+ cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertIn('format=qcow2', f.read(), "Failed: %s" % cmd)
+
+ def test_boot_qemu_boot(self):
+ """Test runqemu /path/to/image.qemuboot.conf"""
+ qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine)
+ qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
+ if not os.path.exists(qemuboot_conf):
+ self.skipTest("%s not found" % qemuboot_conf)
+ cmd = "%s %s" % (self.cmd_common, qemuboot_conf)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
+
+ def test_boot_rootfs(self):
+ """Test runqemu /path/to/rootfs.ext4"""
+ rootfs = "%s-%s.ext4" % (self.recipe, self.machine)
+ rootfs = os.path.join(self.deploy_dir_image, rootfs)
+ if not os.path.exists(rootfs):
+ self.skipTest("%s not found" % rootfs)
+ cmd = "%s %s" % (self.cmd_common, rootfs)
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ with open(qemu.qemurunnerlog) as f:
+ self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
+
+
+# This test was designed as a separate class to test that shutdown
+# command will shutdown qemu as expected on each qemu architecture
+# based on the MACHINE configuration inside the config file
+# (eg. local.conf).
+#
+# This was different compared to RunqemuTests, where RunqemuTests was
+# dedicated for MACHINE=qemux86-64 where it test that qemux86-64 will
+# bootup various filesystem types, including live image(iso and hddimg)
+# where live image was not supported on all qemu architecture.
+@OETestTag("machine")
+class QemuTest(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(QemuTest, cls).setUpClass()
+ cls.recipe = 'core-image-minimal'
+ cls.machine = get_bb_var('MACHINE')
+ cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ cls.cmd_common = "runqemu nographic"
+ cls.qemuboot_conf = "%s-%s.qemuboot.conf" % (cls.recipe, cls.machine)
+ cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
+ bitbake(cls.recipe)
+
+ def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout):
+ qemu.run_serial("shutdown -h now")
+ # Stop thread will stop the LoggingThread instance used for logging
+ # qemu through serial console, stop thread will prevent this code
+ # from facing exception (Console connection closed unexpectedly)
+ # when qemu was shutdown by the above shutdown command
+ qemu.runner.stop_thread()
+ time_track = 0
+ try:
+ while True:
+ is_alive = qemu.check()
+ if not is_alive:
+ return True
+ if time_track > timeout:
+ return False
+ time.sleep(1)
+ time_track += 1
+ except SystemExit:
+ return True
+
+ def test_qemu_can_shutdown(self):
+ self.assertExists(self.qemuboot_conf)
+ cmd = "%s %s" % (self.cmd_common, self.qemuboot_conf)
+ shutdown_timeout = 120
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
+ self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
+
+ # Need to have portmap/rpcbind running to allow this test to work and
+ # current autobuilder setup does not have this.
+ def disabled_test_qemu_can_boot_nfs_and_shutdown(self):
+ self.assertExists(self.qemuboot_conf)
+ bitbake('meta-ide-support')
+ rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine)
+ rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
+ self.assertExists(rootfs_tar)
+ tmpdir = tempfile.mkdtemp(prefix='qemu_nfs')
+ tmpdir_nfs = os.path.join(tmpdir, 'nfs')
+ cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs)
+ result = runCmd(cmd_extract_nfs)
+ self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output)
+ cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs)
+ shutdown_timeout = 120
+ with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
+ qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
+ self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
+ runCmd('rm -rf %s' % tmpdir)
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
new file mode 100644
index 0000000000..60cb2e01a6
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -0,0 +1,439 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
+from oeqa.utils.sshcontrol import SSHControl
+import os
+import re
+import tempfile
+import shutil
+import oe.lsb
+from oeqa.core.decorator.data import skipIfNotQemu
+
+class TestExport(OESelftestTestCase):
+
+ @classmethod
+ def tearDownClass(cls):
+ runCmd("rm -rf /tmp/sdk")
+ super(TestExport, cls).tearDownClass()
+
+ def test_testexport_basic(self):
+ """
+ Summary: Check basic testexport functionality with only ping test enabled.
+ Expected: 1. testexport directory must be created.
+ 2. runexported.py must run without any error/exception.
+ 3. ping test must succeed.
+ Product: oe-core
+ Author: Mariano Lopez <mariano.lopez@intel.com>
+ """
+
+ features = 'INHERIT += "testexport"\n'
+ # These aren't the actual IP addresses but testexport class needs something defined
+ features += 'TEST_SERVER_IP = "192.168.7.1"\n'
+ features += 'TEST_TARGET_IP = "192.168.7.1"\n'
+ features += 'TEST_SUITES = "ping"\n'
+ self.write_config(features)
+
+ # Build tesexport for core-image-minimal
+ bitbake('core-image-minimal')
+ bitbake('-c testexport core-image-minimal')
+
+ testexport_dir = get_bb_var('TEST_EXPORT_DIR', 'core-image-minimal')
+
+ # Verify if TEST_EXPORT_DIR was created
+ isdir = os.path.isdir(testexport_dir)
+ self.assertEqual(True, isdir, 'Failed to create testexport dir: %s' % testexport_dir)
+
+ with runqemu('core-image-minimal') as qemu:
+ # Attempt to run runexported.py to perform ping test
+ test_path = os.path.join(testexport_dir, "oe-test")
+ data_file = os.path.join(testexport_dir, 'data', 'testdata.json')
+ manifest = os.path.join(testexport_dir, 'data', 'manifest')
+ cmd = ("%s runtime --test-data-file %s --packages-manifest %s "
+ "--target-ip %s --server-ip %s --quiet"
+ % (test_path, data_file, manifest, qemu.ip, qemu.server_ip))
+ result = runCmd(cmd)
+ # Verify ping test was succesful
+ self.assertEqual(0, result.status, 'oe-test runtime returned a non 0 status')
+
+ def test_testexport_sdk(self):
+ """
+ Summary: Check sdk functionality for testexport.
+ Expected: 1. testexport directory must be created.
+ 2. SDK tarball must exists.
+ 3. Uncompressing of tarball must succeed.
+ 4. Check if the SDK directory is added to PATH.
+ 5. Run tar from the SDK directory.
+ Product: oe-core
+ Author: Mariano Lopez <mariano.lopez@intel.com>
+ """
+
+ features = 'INHERIT += "testexport"\n'
+ # These aren't the actual IP addresses but testexport class needs something defined
+ features += 'TEST_SERVER_IP = "192.168.7.1"\n'
+ features += 'TEST_TARGET_IP = "192.168.7.1"\n'
+ features += 'TEST_SUITES = "ping"\n'
+ features += 'TEST_EXPORT_SDK_ENABLED = "1"\n'
+ features += 'TEST_EXPORT_SDK_PACKAGES = "nativesdk-tar"\n'
+ self.write_config(features)
+
+ # Build tesexport for core-image-minimal
+ bitbake('core-image-minimal')
+ bitbake('-c testexport core-image-minimal')
+
+ needed_vars = ['TEST_EXPORT_DIR', 'TEST_EXPORT_SDK_DIR', 'TEST_EXPORT_SDK_NAME']
+ bb_vars = get_bb_vars(needed_vars, 'core-image-minimal')
+ testexport_dir = bb_vars['TEST_EXPORT_DIR']
+ sdk_dir = bb_vars['TEST_EXPORT_SDK_DIR']
+ sdk_name = bb_vars['TEST_EXPORT_SDK_NAME']
+
+ # Check for SDK
+ tarball_name = "%s.sh" % sdk_name
+ tarball_path = os.path.join(testexport_dir, sdk_dir, tarball_name)
+ msg = "Couldn't find SDK tarball: %s" % tarball_path
+ self.assertEqual(os.path.isfile(tarball_path), True, msg)
+
+ # Extract SDK and run tar from SDK
+ result = runCmd("%s -y -d /tmp/sdk" % tarball_path)
+ self.assertEqual(0, result.status, "Couldn't extract SDK")
+
+ env_script = result.output.split()[-1]
+ result = runCmd(". %s; which tar" % env_script, shell=True)
+ self.assertEqual(0, result.status, "Couldn't setup SDK environment")
+ is_sdk_tar = True if "/tmp/sdk" in result.output else False
+ self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
+
+ tar_sdk = result.output
+ result = runCmd("%s --version" % tar_sdk)
+ self.assertEqual(0, result.status, "Couldn't run tar from SDK")
+
+
+class TestImage(OESelftestTestCase):
+
+ def test_testimage_install(self):
+ """
+ Summary: Check install packages functionality for testimage/testexport.
+ Expected: 1. Import tests from a directory other than meta.
+ 2. Check install/uninstall of socat.
+ Product: oe-core
+ Author: Mariano Lopez <mariano.lopez@intel.com>
+ """
+ if get_bb_var('DISTRO') == 'poky-tiny':
+ self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
+
+ features = 'INHERIT += "testimage"\n'
+ features += 'IMAGE_INSTALL_append = " libssl"\n'
+ features += 'TEST_SUITES = "ping ssh selftest"\n'
+ self.write_config(features)
+
+ # Build core-image-sato and testimage
+ bitbake('core-image-full-cmdline socat')
+ bitbake('-c testimage core-image-full-cmdline')
+
+ def test_testimage_dnf(self):
+ """
+ Summary: Check package feeds functionality for dnf
+ Expected: 1. Check that remote package feeds can be accessed
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ if get_bb_var('DISTRO') == 'poky-tiny':
+ self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
+
+ features = 'INHERIT += "testimage"\n'
+ features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
+ # We don't yet know what the server ip and port will be - they will be patched
+ # in at the start of the on-image test
+ features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n'
+ features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
+ features += 'PACKAGE_CLASSES = "package_rpm"\n'
+
+ bitbake('gnupg-native -c addto_recipe_sysroot')
+
+ # Enable package feed signing
+ self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
+ self.track_for_cleanup(self.gpg_home)
+ signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
+ runCmd('gpg --batch --homedir %s --import %s' % (self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"))
+ features += 'INHERIT += "sign_package_feed"\n'
+ features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
+ features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
+ features += 'GPG_PATH = "%s"\n' % self.gpg_home
+ self.write_config(features)
+
+ # Build core-image-sato and testimage
+ bitbake('core-image-full-cmdline socat')
+ bitbake('-c testimage core-image-full-cmdline')
+
+ def test_testimage_virgl_gtk_sdl(self):
+ """
+ Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
+ Expected: 1. Check that virgl kernel driver is loaded and 3d acceleration is enabled
+ 2. Check that kmscube demo runs without crashing.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ if "DISPLAY" not in os.environ:
+ self.skipTest("virgl gtk test must be run inside a X session")
+ distro = oe.lsb.distro_identifier()
+ if distro and distro == 'debian-8':
+ self.skipTest('virgl isn\'t working with Debian 8')
+ if distro and distro == 'centos-7':
+ self.skipTest('virgl isn\'t working with Centos 7')
+ if distro and distro == 'opensuseleap-15.0':
+ self.skipTest('virgl isn\'t working with Opensuse 15.0')
+
+ qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
+ sdl_packageconfig = get_bb_var('PACKAGECONFIG', 'libsdl2-native')
+ features = 'INHERIT += "testimage"\n'
+ if 'gtk+' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG_append_pn-qemu-system-native = " gtk+"\n'
+ if 'sdl' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG_append_pn-qemu-system-native = " sdl"\n'
+ if 'virglrenderer' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG_append_pn-qemu-system-native = " virglrenderer"\n'
+ if 'glx' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG_append_pn-qemu-system-native = " glx"\n'
+ if 'opengl' not in sdl_packageconfig:
+ features += 'PACKAGECONFIG_append_pn-libsdl2-native = " opengl"\n'
+ features += 'TEST_SUITES = "ping ssh virgl"\n'
+ features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n'
+ features += 'IMAGE_INSTALL_append = " kmscube"\n'
+ features_gtk = features + 'TEST_RUNQEMUPARAMS = "gtk gl"\n'
+ self.write_config(features_gtk)
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+ features_sdl = features + 'TEST_RUNQEMUPARAMS = "sdl gl"\n'
+ self.write_config(features_sdl)
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+
+ def test_testimage_virgl_headless(self):
+ """
+ Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
+ Expected: 1. Check that virgl kernel driver is loaded and 3d acceleration is enabled
+ 2. Check that kmscube demo runs without crashing.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+ import subprocess, os
+ try:
+ content = os.listdir("/dev/dri")
+ if len([i for i in content if i.startswith('render')]) == 0:
+ self.skipTest("No render nodes found in /dev/dri: %s" %(content))
+ except FileNotFoundError:
+ self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.")
+ try:
+ dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True)
+ except subprocess.CalledProcessError as e:
+ self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
+ qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
+ features = 'INHERIT += "testimage"\n'
+ if 'virglrenderer' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG_append_pn-qemu-system-native = " virglrenderer"\n'
+ if 'glx' not in qemu_packageconfig:
+ features += 'PACKAGECONFIG_append_pn-qemu-system-native = " glx"\n'
+ features += 'TEST_SUITES = "ping ssh virgl"\n'
+ features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n'
+ features += 'IMAGE_INSTALL_append = " kmscube"\n'
+ features += 'TEST_RUNQEMUPARAMS = "egl-headless"\n'
+ self.write_config(features)
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+
+class Postinst(OESelftestTestCase):
+
+ def init_manager_loop(self, init_manager):
+ import oe.path
+
+ vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
+ rootfs = vars["IMAGE_ROOTFS"]
+ self.assertIsNotNone(rootfs)
+ sysconfdir = vars["sysconfdir"]
+ self.assertIsNotNone(sysconfdir)
+ # Need to use oe.path here as sysconfdir starts with /
+ hosttestdir = oe.path.join(rootfs, sysconfdir, "postinst-test")
+ targettestdir = os.path.join(sysconfdir, "postinst-test")
+
+ for classes in ("package_rpm", "package_deb", "package_ipk"):
+ with self.subTest(init_manager=init_manager, package_class=classes):
+ features = 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
+ features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
+ features += 'PACKAGE_CLASSES = "%s"\n' % classes
+ if init_manager == "systemd":
+ features += 'DISTRO_FEATURES_append = " systemd"\n'
+ features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
+ features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
+ features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
+ self.write_config(features)
+
+ bitbake('core-image-minimal')
+
+ self.assertTrue(os.path.isfile(os.path.join(hosttestdir, "rootfs")),
+ "rootfs state file was not created")
+
+ with runqemu('core-image-minimal') as qemu:
+ # Make the test echo a string and search for that as
+ # run_serial()'s status code is useless.'
+ for filename in ("rootfs", "delayed-a", "delayed-b"):
+ status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
+ self.assertEqual(output, "found", "%s was not present on boot" % filename)
+
+
+
+ @skipIfNotQemu('qemuall', 'Test only runs in qemu')
+ def test_postinst_rootfs_and_boot_sysvinit(self):
+ """
+ Summary: The purpose of this test case is to verify Post-installation
+ scripts are called when rootfs is created and also test
+ that script can be delayed to run at first boot.
+ Dependencies: NA
+ Steps: 1. Add proper configuration to local.conf file
+ 2. Build a "core-image-minimal" image
+ 3. Verify that file created by postinst_rootfs recipe is
+ present on rootfs dir.
+ 4. Boot the image created on qemu and verify that the file
+ created by postinst_boot recipe is present on image.
+ Expected: The files are successfully created during rootfs and boot
+ time for 3 different package managers: rpm,ipk,deb and
+ for initialization managers: sysvinit.
+
+ """
+ self.init_manager_loop("sysvinit")
+
+
+ @skipIfNotQemu('qemuall', 'Test only runs in qemu')
+ def test_postinst_rootfs_and_boot_systemd(self):
+ """
+ Summary: The purpose of this test case is to verify Post-installation
+ scripts are called when rootfs is created and also test
+ that script can be delayed to run at first boot.
+ Dependencies: NA
+ Steps: 1. Add proper configuration to local.conf file
+ 2. Build a "core-image-minimal" image
+ 3. Verify that file created by postinst_rootfs recipe is
+ present on rootfs dir.
+ 4. Boot the image created on qemu and verify that the file
+ created by postinst_boot recipe is present on image.
+ Expected: The files are successfully created during rootfs and boot
+ time for 3 different package managers: rpm,ipk,deb and
+ for initialization managers: systemd.
+
+ """
+
+ self.init_manager_loop("systemd")
+
+
+ def test_failing_postinst(self):
+ """
+ Summary: The purpose of this test case is to verify that post-installation
+ scripts that contain errors are properly reported.
+ Expected: The scriptlet failure is properly reported.
+ The file that is created after the error in the scriptlet is not present.
+ Product: oe-core
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ """
+
+ import oe.path
+
+ vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
+ rootfs = vars["IMAGE_ROOTFS"]
+ self.assertIsNotNone(rootfs)
+ sysconfdir = vars["sysconfdir"]
+ self.assertIsNotNone(sysconfdir)
+ # Need to use oe.path here as sysconfdir starts with /
+ hosttestdir = oe.path.join(rootfs, sysconfdir, "postinst-test")
+
+ for classes in ("package_rpm", "package_deb", "package_ipk"):
+ with self.subTest(package_class=classes):
+ features = 'CORE_IMAGE_EXTRA_INSTALL = "postinst-rootfs-failing"\n'
+ features += 'PACKAGE_CLASSES = "%s"\n' % classes
+ self.write_config(features)
+ bb_result = bitbake('core-image-minimal', ignore_status=True)
+ self.assertGreaterEqual(bb_result.output.find("Postinstall scriptlets of ['postinst-rootfs-failing'] have failed."), 0,
+ "Warning about a failed scriptlet not found in bitbake output: %s" %(bb_result.output))
+
+ self.assertTrue(os.path.isfile(os.path.join(hosttestdir, "rootfs-before-failure")),
+ "rootfs-before-failure file was not created")
+ self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")),
+ "rootfs-after-failure file was created")
+
+class SystemTap(OESelftestTestCase):
+ """
+ Summary: The purpose of this test case is to verify native crosstap
+ works while talking to a target.
+ Expected: The script should successfully connect to the qemu machine
+ and run some systemtap examples on a qemu machine.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ super(SystemTap, cls).setUpClass()
+ cls.image = "core-image-minimal"
+
+ def default_config(self):
+ return """
+# These aren't the actual IP addresses but testexport class needs something defined
+TEST_SERVER_IP = "192.168.7.1"
+TEST_TARGET_IP = "192.168.7.2"
+
+EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs"
+IMAGE_FEATURES_append = " ssh-server-dropbear"
+
+# enables kernel debug symbols
+KERNEL_EXTRA_FEATURES_append = " features/debug/debug-kernel.scc"
+KERNEL_EXTRA_FEATURES_append = " features/systemtap/systemtap.scc"
+
+# add systemtap run-time into target image if it is not there yet
+IMAGE_INSTALL_append = " systemtap"
+"""
+
+ def test_crosstap_helloworld(self):
+ self.write_config(self.default_config())
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/general/helloworld.stp " % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap helloworld returned a non 0 status:%s' % result.output)
+
+ def test_crosstap_pstree(self):
+ self.write_config(self.default_config())
+
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/process/pstree.stp" % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap pstree returned a non 0 status:%s' % result.output)
+
+ def test_crosstap_syscalls_by_proc(self):
+ self.write_config(self.default_config())
+
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_proc.stp" % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap syscalls_by_proc returned a non 0 status:%s' % result.output)
+
+ def test_crosstap_syscalls_by_pid(self):
+ self.write_config(self.default_config())
+
+ bitbake('systemtap-native')
+ systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
+ bitbake(self.image)
+
+ with runqemu(self.image) as qemu:
+ cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples
+ result = runCmd(cmd)
+ self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output)
+
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py
new file mode 100644
index 0000000000..af080dcf03
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/selftest.py
@@ -0,0 +1,53 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import importlib
+from oeqa.utils.commands import runCmd
+import oeqa.selftest
+from oeqa.selftest.case import OESelftestTestCase
+
+class ExternalLayer(OESelftestTestCase):
+
+ def test_list_imported(self):
+ """
+ Summary: Checks functionality to import tests from other layers.
+ Expected: 1. File "external-layer.py" must be in
+ oeqa.selftest.__path__
+ 2. test_unconditional_pas method must exists
+ in ImportedTests class
+ Product: oe-core
+ Author: Mariano Lopez <mariano.lopez@intel.com>
+ """
+
+ test_file = "external-layer.py"
+ test_module = "oeqa.selftest.cases.external-layer"
+ method_name = "test_unconditional_pass"
+
+ # Check if "external-layer.py" is in oeqa path
+ found_file = search_test_file(test_file)
+ self.assertTrue(found_file, msg="Can't find %s in the oeqa path" % test_file)
+
+ # Import oeqa.selftest.external-layer module and search for
+ # test_unconditional_pass method of ImportedTests class
+ found_method = search_method(test_module, method_name)
+ self.assertTrue(method_name, msg="Can't find %s method" % method_name)
+
+def search_test_file(file_name):
+ for layer_path in oeqa.selftest.__path__:
+ for _, _, files in os.walk(layer_path):
+ for f in files:
+ if f == file_name:
+ return True
+ return False
+
+def search_method(module, method):
+ modlib = importlib.import_module(module)
+ for var in vars(modlib):
+ klass = vars(modlib)[var]
+ if isinstance(klass, type(OESelftestTestCase)) and issubclass(klass, OESelftestTestCase):
+ for m in dir(klass):
+ if m == method:
+ return True
+ return False
+
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py
new file mode 100644
index 0000000000..93b15ae681
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/signing.py
@@ -0,0 +1,224 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, create_temp_layer
+import os
+import oe
+import glob
+import re
+import shutil
+import tempfile
+from contextlib import contextmanager
+from oeqa.utils.ftools import write_file
+
+
+class Signing(OESelftestTestCase):
+
+ gpg_dir = ""
+ pub_key_path = ""
+ secret_key_path = ""
+
+ def setup_gpg(self):
+ bitbake('gnupg-native -c addto_recipe_sysroot')
+
+ self.gpg_dir = tempfile.mkdtemp(prefix="oeqa-signing-")
+ self.track_for_cleanup(self.gpg_dir)
+
+ self.pub_key_path = os.path.join(self.testlayer_path, 'files', 'signing', "key.pub")
+ self.secret_key_path = os.path.join(self.testlayer_path, 'files', 'signing', "key.secret")
+
+ nsysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native")
+
+ runCmd('gpg --agent-program=`which gpg-agent`\|--auto-expand-secmem --batch --homedir %s --import %s %s' % (self.gpg_dir, self.pub_key_path, self.secret_key_path), native_sysroot=nsysroot)
+ return nsysroot + get_bb_var("bindir_native")
+
+
+ @contextmanager
+ def create_new_builddir(self, builddir, newbuilddir):
+ bb.utils.mkdirhier(newbuilddir)
+ oe.path.copytree(builddir + "/conf", newbuilddir + "/conf")
+ oe.path.copytree(builddir + "/cache", newbuilddir + "/cache")
+
+ origenv = os.environ.copy()
+
+ for e in os.environ:
+ if builddir in os.environ[e]:
+ os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
+
+ os.chdir(newbuilddir)
+ try:
+ yield
+ finally:
+ for e in origenv:
+ os.environ[e] = origenv[e]
+ os.chdir(builddir)
+
+ def test_signing_packages(self):
+ """
+ Summary: Test that packages can be signed in the package feed
+ Expected: Package should be signed with the correct key
+ Expected: Images can be created from signed packages
+ Product: oe-core
+ Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ Author: Alexander Kanavin <alex.kanavin@gmail.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+ import oe.packagedata
+
+ self.setup_gpg()
+
+ package_classes = get_bb_var('PACKAGE_CLASSES')
+ if 'package_rpm' not in package_classes:
+ self.skipTest('This test requires RPM Packaging.')
+
+ test_recipe = 'ed'
+
+ feature = 'INHERIT += "sign_rpm"\n'
+ feature += 'RPM_GPG_PASSPHRASE = "test123"\n'
+ feature += 'RPM_GPG_NAME = "testuser"\n'
+ feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
+
+ self.write_config(feature)
+
+ bitbake('-c clean %s' % test_recipe)
+ bitbake('-f -c package_write_rpm %s' % test_recipe)
+
+ self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
+
+ needed_vars = ['PKGDATA_DIR', 'DEPLOY_DIR_RPM', 'PACKAGE_ARCH', 'STAGING_BINDIR_NATIVE']
+ bb_vars = get_bb_vars(needed_vars, test_recipe)
+ pkgdatadir = bb_vars['PKGDATA_DIR']
+ pkgdata = oe.packagedata.read_pkgdatafile(pkgdatadir + "/runtime/ed")
+ if 'PKGE' in pkgdata:
+ pf = pkgdata['PN'] + "-" + pkgdata['PKGE'] + pkgdata['PKGV'] + '-' + pkgdata['PKGR']
+ else:
+ pf = pkgdata['PN'] + "-" + pkgdata['PKGV'] + '-' + pkgdata['PKGR']
+ deploy_dir_rpm = bb_vars['DEPLOY_DIR_RPM']
+ package_arch = bb_vars['PACKAGE_ARCH'].replace('-', '_')
+ staging_bindir_native = bb_vars['STAGING_BINDIR_NATIVE']
+
+ pkg_deploy = os.path.join(deploy_dir_rpm, package_arch, '.'.join((pf, package_arch, 'rpm')))
+
+ # Use a temporary rpmdb
+ rpmdb = tempfile.mkdtemp(prefix='oeqa-rpmdb')
+
+ runCmd('%s/rpmkeys --define "_dbpath %s" --import %s' %
+ (staging_bindir_native, rpmdb, self.pub_key_path))
+
+ ret = runCmd('%s/rpmkeys --define "_dbpath %s" --checksig %s' %
+ (staging_bindir_native, rpmdb, pkg_deploy))
+ # tmp/deploy/rpm/i586/ed-1.9-r0.i586.rpm: rsa sha1 md5 OK
+ self.assertIn('digests signatures OK', ret.output, 'Package signed incorrectly.')
+ shutil.rmtree(rpmdb)
+
+ #Check that an image can be built from signed packages
+ self.add_command_to_tearDown('bitbake -c clean core-image-minimal')
+ bitbake('-c clean core-image-minimal')
+ bitbake('core-image-minimal')
+
+
+ def test_signing_sstate_archive(self):
+ """
+ Summary: Test that sstate archives can be signed
+ Expected: Package should be signed with the correct key
+ Product: oe-core
+ Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ test_recipe = 'ed'
+
+ # Since we need gpg but we can't use gpg-native for sstate signatures, we
+ # build gpg-native in our original builddir then run the tests in a second one.
+ builddir = os.environ.get('BUILDDIR') + "-testsign"
+ sstatedir = os.path.join(builddir, 'test-sstate')
+
+ nsysroot = self.setup_gpg()
+
+ feature = 'SSTATE_SIG_KEY ?= "testuser"\n'
+ feature += 'SSTATE_SIG_PASSPHRASE ?= "test123"\n'
+ feature += 'SSTATE_VERIFY_SIG ?= "1"\n'
+ feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
+ feature += 'SSTATE_DIR = "%s"\n' % sstatedir
+ # Any mirror might have partial sstate without .sig files, triggering failures
+ feature += 'SSTATE_MIRRORS_forcevariable = ""\n'
+
+ self.write_config(feature)
+
+ with self.create_new_builddir(os.environ['BUILDDIR'], builddir):
+
+ os.environ["PATH"] = nsysroot + ":" + os.environ["PATH"]
+ self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
+ self.add_command_to_tearDown('rm -rf %s' % sstatedir)
+ self.add_command_to_tearDown('rm -rf %s' % builddir)
+
+ bitbake('-c clean %s' % test_recipe)
+ bitbake('-c populate_lic %s' % test_recipe)
+
+ recipe_sig = glob.glob(sstatedir + '/*/*:ed:*_populate_lic.tgz.sig')
+ recipe_tgz = glob.glob(sstatedir + '/*/*:ed:*_populate_lic.tgz')
+
+ self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
+ self.assertEqual(len(recipe_tgz), 1, 'Failed to find .tgz file.')
+
+ ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_tgz[0]))
+ # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
+ # gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
+ self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
+
+
+class LockedSignatures(OESelftestTestCase):
+
+ def test_locked_signatures(self):
+ """
+ Summary: Test locked signature mechanism
+ Expected: Locked signatures will prevent task to run
+ Product: oe-core
+ Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+ """
+
+ import uuid
+
+ test_recipe = 'ed'
+ locked_sigs_file = 'locked-sigs.inc'
+
+ bitbake(test_recipe)
+ # Generate locked sigs include file
+ bitbake('-S none %s' % test_recipe)
+
+ feature = 'require %s\n' % locked_sigs_file
+ feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
+ self.write_config(feature)
+
+ # Build a locked recipe
+ bitbake(test_recipe)
+
+ templayerdir = tempfile.mkdtemp(prefix='signingqa')
+ create_temp_layer(templayerdir, 'selftestsigning')
+ runCmd('bitbake-layers add-layer %s' % templayerdir)
+
+ # Make a change that should cause the locked task signature to change
+ # Use uuid so hash equivalance server isn't triggered
+ recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend'
+ recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file)
+ feature = 'SUMMARY_${PN} = "test locked signature%s"\n' % uuid.uuid4()
+
+ os.mkdir(os.path.join(templayerdir, 'recipes-test'))
+ os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe))
+ write_file(recipe_append_path, feature)
+
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % templayerdir)
+ self.add_command_to_tearDown('rm -f %s' % os.path.join(self.builddir, locked_sigs_file))
+ self.add_command_to_tearDown('rm -rf %s' % templayerdir)
+
+ # Build the recipe again
+ ret = bitbake(test_recipe)
+
+ # Verify you get the warning and that the real task *isn't* run (i.e. the locked signature has worked)
+ patt = r'The %s:do_package sig is computed to be \S+, but the sig is locked to \S+ in SIGGEN_LOCKEDSIGS\S+' % test_recipe
+ found_warn = re.search(patt, ret.output)
+
+ self.assertIsNotNone(found_warn, "Didn't find the expected warning message. Output: %s" % ret.output)
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py
new file mode 100644
index 0000000000..410dec64fc
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/sstate.py
@@ -0,0 +1,67 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import datetime
+import unittest
+import os
+import re
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_test_layer
+
+
+class SStateBase(OESelftestTestCase):
+
+ def setUpLocal(self):
+ super(SStateBase, self).setUpLocal()
+ self.temp_sstate_location = None
+ needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
+ 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
+ bb_vars = get_bb_vars(needed_vars)
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.tclibc = bb_vars['TCLIBC']
+ self.tune_arch = bb_vars['TUNE_ARCH']
+ self.topdir = bb_vars['TOPDIR']
+ self.target_vendor = bb_vars['TARGET_VENDOR']
+ self.target_os = bb_vars['TARGET_OS']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ # Creates a special sstate configuration with the option to add sstate mirrors
+ def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
+ self.temp_sstate_location = temp_sstate_location
+
+ if self.temp_sstate_location:
+ temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+ config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
+ self.append_config(config_temp_sstate)
+ self.track_for_cleanup(temp_sstate_path)
+ bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ if add_local_mirrors:
+ config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
+ self.append_config(config_set_sstate_if_not_set)
+ for local_mirror in add_local_mirrors:
+ self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
+ config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
+ self.append_config(config_sstate_mirror)
+
+ # Returns a list containing sstate files
+ def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
+ result = []
+ for root, dirs, files in os.walk(self.sstate_path):
+ if distro_specific and re.search("%s/[a-z0-9]{2}$" % self.hostdistro, root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ if distro_nonspecific and re.search("%s/[a-z0-9]{2}$" % self.sstate_path, root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ return result
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
new file mode 100644
index 0000000000..6757a0ec68
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -0,0 +1,532 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import glob
+import subprocess
+import tempfile
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer, create_temp_layer
+from oeqa.selftest.cases.sstate import SStateBase
+
+import bb.siggen
+
+class SStateTests(SStateBase):
+ def test_autorev_sstate_works(self):
+ # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
+ # when PV does not contain SRCPV
+
+ tempdir = tempfile.mkdtemp(prefix='oeqa')
+ self.track_for_cleanup(tempdir)
+ create_temp_layer(tempdir, 'selftestrecipetool')
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir)
+ runCmd('bitbake-layers add-layer %s' % tempdir)
+
+ # Use dbus-wait as a local git repo we can add a commit between two builds in
+ pn = 'dbus-wait'
+ srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
+ url = 'git://git.yoctoproject.org/dbus-wait'
+ result = runCmd('git clone %s noname' % url, cwd=tempdir)
+ srcdir = os.path.join(tempdir, 'noname')
+ result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
+
+ recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
+ os.makedirs(os.path.dirname(recipefile))
+ srcuri = 'git://' + srcdir + ';protocol=file'
+ result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
+ self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
+
+ with open(recipefile, 'a') as f:
+ f.write('SRCREV = "${AUTOREV}"\n')
+ f.write('PV = "1.0"\n')
+
+ bitbake("dbus-wait-test -c fetch")
+ with open(os.path.join(srcdir, "bar.txt"), "w") as f:
+ f.write("foo")
+ result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
+ bitbake("dbus-wait-test -c unpack")
+
+
+ # Test sstate files creation and their location
+ def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True):
+ self.config_sstate(temp_sstate_location, [self.sstate_path])
+
+ if self.temp_sstate_location:
+ bitbake(['-cclean'] + targets)
+ else:
+ bitbake(['-ccleansstate'] + targets)
+
+ bitbake(targets)
+ file_tracker = []
+ results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
+ if distro_nonspecific:
+ for r in results:
+ if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo", "_fetch.tgz.siginfo", "_unpack.tgz.siginfo", "_patch.tgz.siginfo")):
+ continue
+ file_tracker.append(r)
+ else:
+ file_tracker = results
+
+ if should_pass:
+ self.assertTrue(file_tracker , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
+ else:
+ self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker)))
+
+ def test_sstate_creation_distro_specific_pass(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_specific_fail(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
+
+ def test_sstate_creation_distro_nonspecific_pass(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_nonspecific_fail(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+
+ # Test the sstate files deletion part of the do_cleansstate task
+ def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
+ self.config_sstate(temp_sstate_location, [self.sstate_path])
+
+ bitbake(['-ccleansstate'] + targets)
+
+ bitbake(targets)
+ tgz_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
+ self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_created)))
+
+ siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
+ self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
+
+ bitbake(['-ccleansstate'] + targets)
+ tgz_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
+ self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_removed)))
+
+ def test_cleansstate_task_distro_specific_nonspecific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_nonspecific(self):
+ self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_specific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+
+ # Test rebuilding of distro-specific sstate files
+ def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
+ self.config_sstate(temp_sstate_location, [self.sstate_path])
+
+ bitbake(['-ccleansstate'] + targets)
+
+ bitbake(targets)
+ results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True)
+ filtered_results = []
+ for r in results:
+ if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo")):
+ continue
+ filtered_results.append(r)
+ self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
+ file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+ self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+
+ self.track_for_cleanup(self.distro_specific_sstate + "_old")
+ shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
+ shutil.rmtree(self.distro_specific_sstate)
+
+ bitbake(['-cclean'] + targets)
+ bitbake(targets)
+ file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+ self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+
+ not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
+ self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated)))
+
+ created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
+ self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once)))
+
+ def test_rebuild_distro_specific_sstate_cross_native_targets(self):
+ self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
+
+ def test_rebuild_distro_specific_sstate_cross_target(self):
+ self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch], temp_sstate_location=True)
+
+ def test_rebuild_distro_specific_sstate_native_target(self):
+ self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
+
+
+ # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
+ # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE)
+ def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
+ self.assertTrue(global_config)
+ self.assertTrue(target_config)
+ self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
+ self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
+
+ # If buildhistory is enabled, we need to disable version-going-backwards
+ # QA checks for this test. It may report errors otherwise.
+ self.append_config('ERROR_QA_remove = "version-going-backwards"')
+
+ # For not this only checks if random sstate tasks are handled correctly as a group.
+ # In the future we should add control over what tasks we check for.
+
+ sstate_archs_list = []
+ expected_remaining_sstate = []
+ for idx in range(len(target_config)):
+ self.append_config(global_config[idx])
+ self.append_recipeinc(target, target_config[idx])
+ sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
+ if not sstate_arch in sstate_archs_list:
+ sstate_archs_list.append(sstate_arch)
+ if target_config[idx] == target_config[-1]:
+ target_sstate_before_build = self.search_sstate(target + r'.*?\.tgz$')
+ bitbake("-cclean %s" % target)
+ result = bitbake(target, ignore_status=True)
+ if target_config[idx] == target_config[-1]:
+ target_sstate_after_build = self.search_sstate(target + r'.*?\.tgz$')
+ expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)]
+ self.remove_config(global_config[idx])
+ self.remove_recipeinc(target, target_config[idx])
+ self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
+
+ runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list))))
+ actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)]
+
+ actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
+ self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected)))
+ expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
+ self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual)))
+
+ def test_sstate_cache_management_script_using_pr_1(self):
+ global_config = []
+ target_config = []
+ global_config.append('')
+ target_config.append('PR = "0"')
+ self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
+
+ def test_sstate_cache_management_script_using_pr_2(self):
+ global_config = []
+ target_config = []
+ global_config.append('')
+ target_config.append('PR = "0"')
+ global_config.append('')
+ target_config.append('PR = "1"')
+ self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
+
+ def test_sstate_cache_management_script_using_pr_3(self):
+ global_config = []
+ target_config = []
+ global_config.append('MACHINE = "qemux86-64"')
+ target_config.append('PR = "0"')
+ global_config.append(global_config[0])
+ target_config.append('PR = "1"')
+ global_config.append('MACHINE = "qemux86"')
+ target_config.append('PR = "1"')
+ self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
+
+ def test_sstate_cache_management_script_using_machine(self):
+ global_config = []
+ target_config = []
+ global_config.append('MACHINE = "qemux86-64"')
+ target_config.append('')
+ global_config.append('MACHINE = "qemux86"')
+ target_config.append('')
+ self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
+
+ def test_sstate_32_64_same_hash(self):
+ """
+ The sstate checksums for both native and target should not vary whether
+ they're built on a 32 or 64 bit system. Rather than requiring two different
+ build machines and running a builds, override the variables calling uname()
+ manually and check using bitbake -S.
+ """
+
+ self.write_config("""
+MACHINE = "qemux86"
+TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
+TCLIBCAPPEND = ""
+BUILD_ARCH = "x86_64"
+BUILD_OS = "linux"
+SDKMACHINE = "x86_64"
+PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("core-image-sato -S none")
+ self.write_config("""
+MACHINE = "qemux86"
+TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
+TCLIBCAPPEND = ""
+BUILD_ARCH = "i686"
+BUILD_OS = "linux"
+SDKMACHINE = "i686"
+PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("core-image-sato -S none")
+
+ def get_files(d):
+ f = []
+ for root, dirs, files in os.walk(d):
+ if "core-image-sato" in root:
+ # SDKMACHINE changing will change
+ # do_rootfs/do_testimage/do_build stamps of images which
+ # is safe to ignore.
+ continue
+ f.extend(os.path.join(root, name) for name in files)
+ return f
+ files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/")
+ files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/")
+ files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash").replace("i686-linux", "x86_64-linux").replace("i686" + self.target_vendor + "-linux", "x86_64" + self.target_vendor + "-linux", ) for x in files2]
+ self.maxDiff = None
+ self.assertCountEqual(files1, files2)
+
+
+ def test_sstate_nativelsbstring_same_hash(self):
+ """
+ The sstate checksums should be independent of whichever NATIVELSBSTRING is
+ detected. Rather than requiring two different build machines and running
+ builds, override the variables manually and check using bitbake -S.
+ """
+
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
+NATIVELSBSTRING = \"DistroA\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("core-image-sato -S none")
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
+NATIVELSBSTRING = \"DistroB\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("core-image-sato -S none")
+
+ def get_files(d):
+ f = []
+ for root, dirs, files in os.walk(d):
+ f.extend(os.path.join(root, name) for name in files)
+ return f
+ files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/")
+ files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/")
+ files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+ self.maxDiff = None
+ self.assertCountEqual(files1, files2)
+
+ def test_sstate_allarch_samesigs(self):
+ """
+ The sstate checksums of allarch packages should be independent of whichever
+ MACHINE is set. Check this using bitbake -S.
+ Also, rather than duplicate the test, check nativesdk stamps are the same between
+ the two MACHINE values.
+ """
+
+ configA = """
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86-64\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+"""
+ configB = """
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemuarm\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+"""
+ self.sstate_allarch_samesigs(configA, configB)
+
+ def test_sstate_nativesdk_samesigs_multilib(self):
+ """
+ check nativesdk stamps are the same between the two MACHINE values.
+ """
+
+ configA = """
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86-64\"
+require conf/multilib.conf
+MULTILIBS = \"multilib:lib32\"
+DEFAULTTUNE_virtclass-multilib-lib32 = \"x86\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+"""
+ configB = """
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemuarm\"
+require conf/multilib.conf
+MULTILIBS = \"\"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+"""
+ self.sstate_allarch_samesigs(configA, configB)
+
+ def sstate_allarch_samesigs(self, configA, configB):
+
+ self.write_config(configA)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("world meta-toolchain -S none")
+ self.write_config(configB)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("world meta-toolchain -S none")
+
+ def get_files(d):
+ f = {}
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ if "meta-environment" in root or "cross-canadian" in root:
+ continue
+ if "do_build" not in name:
+ # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
+ (_, task, _, shash) = name.rsplit(".", 3)
+ f[os.path.join(os.path.basename(root), task)] = shash
+ return f
+
+ nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0])
+
+ files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir)
+ files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir)
+ self.maxDiff = None
+ self.assertEqual(files1, files2)
+
+ def test_sstate_sametune_samesigs(self):
+ """
+ The sstate checksums of two identical machines (using the same tune) should be the
+ same, apart from changes within the machine specific stamps directory. We use the
+ qemux86copy machine to test this. Also include multilibs in the test.
+ """
+
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("world meta-toolchain -S none")
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86copy\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("world meta-toolchain -S none")
+
+ def get_files(d):
+ f = []
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ if "meta-environment" in root or "cross-canadian" in root:
+ continue
+ if "qemux86copy-" in root or "qemux86-" in root:
+ continue
+ if "do_build" not in name and "do_populate_sdk" not in name:
+ f.append(os.path.join(root, name))
+ return f
+ files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps")
+ files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps")
+ files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+ self.maxDiff = None
+ self.assertCountEqual(files1, files2)
+
+
+ def test_sstate_noop_samesigs(self):
+ """
+ The sstate checksums of two builds with these variables changed or
+ classes inherits should be the same.
+ """
+
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
+TCLIBCAPPEND = ""
+BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}"
+PARALLEL_MAKE = "-j 1"
+DL_DIR = "${TOPDIR}/download1"
+TIME = "111111"
+DATE = "20161111"
+INHERIT_remove = "buildstats-summary buildhistory uninative"
+http_proxy = ""
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ self.track_for_cleanup(self.topdir + "/download1")
+ bitbake("world meta-toolchain -S none")
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
+TCLIBCAPPEND = ""
+BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}"
+PARALLEL_MAKE = "-j 2"
+DL_DIR = "${TOPDIR}/download2"
+TIME = "222222"
+DATE = "20161212"
+# Always remove uninative as we're changing proxies
+INHERIT_remove = "uninative"
+INHERIT += "buildstats-summary buildhistory"
+http_proxy = "http://example.com/"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ self.track_for_cleanup(self.topdir + "/download2")
+ bitbake("world meta-toolchain -S none")
+
+ def get_files(d):
+ f = {}
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ name, shash = name.rsplit('.', 1)
+ # Extract just the machine and recipe name
+ base = os.sep.join(root.rsplit(os.sep, 2)[-2:] + [name])
+ f[base] = shash
+ return f
+
+ def compare_sigfiles(files, files1, files2, compare=False):
+ for k in files:
+ if k in files1 and k in files2:
+ print("%s differs:" % k)
+ if compare:
+ sigdatafile1 = self.topdir + "/tmp-sstatesamehash/stamps/" + k + "." + files1[k]
+ sigdatafile2 = self.topdir + "/tmp-sstatesamehash2/stamps/" + k + "." + files2[k]
+ output = bb.siggen.compare_sigfiles(sigdatafile1, sigdatafile2)
+ if output:
+ print('\n'.join(output))
+ elif k in files1 and k not in files2:
+ print("%s in files1" % k)
+ elif k not in files1 and k in files2:
+ print("%s in files2" % k)
+ else:
+ assert "shouldn't reach here"
+
+ files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/")
+ files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/")
+ # Remove items that are identical in both sets
+ for k,v in files1.items() & files2.items():
+ del files1[k]
+ del files2[k]
+ if not files1 and not files2:
+ # No changes, so we're done
+ return
+
+ files = list(files1.keys() | files2.keys())
+ # this is an expensive computation, thus just compare the first 'max_sigfiles_to_compare' k files
+ max_sigfiles_to_compare = 20
+ first, rest = files[:max_sigfiles_to_compare], files[max_sigfiles_to_compare:]
+ compare_sigfiles(first, files1, files2, compare=True)
+ compare_sigfiles(rest, files1, files2, compare=False)
+
+ self.fail("sstate hashes not identical.")
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
new file mode 100644
index 0000000000..42a1b6b4f4
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -0,0 +1,224 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import re
+import time
+import logging
+import bb.tinfoil
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd
+
+class TinfoilTests(OESelftestTestCase):
+ """ Basic tests for the tinfoil API """
+
+ def test_getvar(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(True)
+ machine = tinfoil.config_data.getVar('MACHINE')
+ if not machine:
+ self.fail('Unable to get MACHINE value - returned %s' % machine)
+
+ def test_expand(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(True)
+ expr = '${@os.getpid()}'
+ pid = tinfoil.config_data.expand(expr)
+ if not pid:
+ self.fail('Unable to expand "%s" - returned %s' % (expr, pid))
+
+ def test_getvar_bb_origenv(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(True)
+ origenv = tinfoil.config_data.getVar('BB_ORIGENV', False)
+ if not origenv:
+ self.fail('Unable to get BB_ORIGENV value - returned %s' % origenv)
+ self.assertEqual(origenv.getVar('HOME', False), os.environ['HOME'])
+
+ def test_parse_recipe(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ testrecipe = 'mdadm'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3])
+ self.assertEqual(testrecipe, rd.getVar('PN'))
+
+ def test_parse_recipe_copy_expand(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ testrecipe = 'mdadm'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3])
+ # Check we can get variable values
+ self.assertEqual(testrecipe, rd.getVar('PN'))
+ # Check that expanding a value that includes a variable reference works
+ self.assertEqual(testrecipe, rd.getVar('BPN'))
+ # Now check that changing the referenced variable's value in a copy gives that
+ # value when expanding
+ localdata = bb.data.createCopy(rd)
+ localdata.setVar('PN', 'hello')
+ self.assertEqual('hello', localdata.getVar('BPN'))
+
+ def test_parse_recipe_initial_datastore(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ testrecipe = 'mdadm'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ dcopy = bb.data.createCopy(tinfoil.config_data)
+ dcopy.setVar('MYVARIABLE', 'somevalue')
+ rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy)
+ # Check we can get variable values
+ self.assertEqual('somevalue', rd.getVar('MYVARIABLE'))
+
+ def test_list_recipes(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ # Check pkg_pn
+ checkpns = ['tar', 'automake', 'coreutils', 'm4-native', 'nativesdk-gcc']
+ pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn
+ for pn in checkpns:
+ self.assertIn(pn, pkg_pn)
+ # Check pkg_fn
+ checkfns = {'nativesdk-gcc': '^virtual:nativesdk:.*', 'coreutils': '.*/coreutils_.*.bb'}
+ for fn, pn in tinfoil.cooker.recipecaches[''].pkg_fn.items():
+ if pn in checkpns:
+ if pn in checkfns:
+ self.assertTrue(re.match(checkfns[pn], fn), 'Entry for %s: %s did not match %s' % (pn, fn, checkfns[pn]))
+ checkpns.remove(pn)
+ if checkpns:
+ self.fail('Unable to find pkg_fn entries for: %s' % ', '.join(checkpns))
+
+ def test_wait_event(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+
+ tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted'])
+
+ # Need to drain events otherwise events that were masked may still be in the queue
+ while tinfoil.wait_event():
+ pass
+
+ pattern = 'conf'
+ res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine')
+ self.assertTrue(res)
+
+ eventreceived = False
+ commandcomplete = False
+ start = time.time()
+ # Wait for 5s in total so we'd detect spurious heartbeat events for example
+ while time.time() - start < 5:
+ event = tinfoil.wait_event(1)
+ if event:
+ if isinstance(event, bb.command.CommandCompleted):
+ commandcomplete = True
+ elif isinstance(event, bb.event.FilesMatchingFound):
+ self.assertEqual(pattern, event._pattern)
+ self.assertIn('qemuarm.conf', event._matches)
+ eventreceived = True
+ elif isinstance(event, logging.LogRecord):
+ continue
+ else:
+ self.fail('Unexpected event: %s' % event)
+
+ self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server')
+ self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
+
+ def test_setvariable_clean(self):
+ # First check that setVariable affects the datastore
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+ tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue')
+ self.assertEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()')
+
+ # Now check that the setVariable's effects are no longer present
+ # (this may legitimately break in future if we stop reinitialising
+ # the datastore, in which case we'll have to reconsider use of
+ # setVariable entirely)
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+ self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is still present!')
+
+ # Now check that setVar on the main datastore works (uses setVariable internally)
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+ tinfoil.config_data.setVar('TESTVAR', 'specialvalue')
+ value = tinfoil.run_command('getVariable', 'TESTVAR')
+ self.assertEqual(value, 'specialvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()')
+
+ def test_datastore_operations(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+ # Test setVarFlag() / getVarFlag()
+ tinfoil.config_data.setVarFlag('TESTVAR', 'flagname', 'flagval')
+ value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname')
+ self.assertEqual(value, 'flagval', 'Value set using config_data.setVarFlag() is not reflected in config_data.getVarFlag()')
+ # Test delVarFlag()
+ tinfoil.config_data.setVarFlag('TESTVAR', 'otherflag', 'othervalue')
+ tinfoil.config_data.delVarFlag('TESTVAR', 'flagname')
+ value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname')
+ self.assertEqual(value, None, 'Varflag deleted using config_data.delVarFlag() is not reflected in config_data.getVarFlag()')
+ value = tinfoil.config_data.getVarFlag('TESTVAR', 'otherflag')
+ self.assertEqual(value, 'othervalue', 'Varflag deleted using config_data.delVarFlag() caused unrelated flag to be removed')
+ # Test delVar()
+ tinfoil.config_data.setVar('TESTVAR', 'varvalue')
+ value = tinfoil.config_data.getVar('TESTVAR')
+ self.assertEqual(value, 'varvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()')
+ tinfoil.config_data.delVar('TESTVAR')
+ value = tinfoil.config_data.getVar('TESTVAR')
+ self.assertEqual(value, None, 'Variable deleted using config_data.delVar() appears to still have a value')
+ # Test renameVar()
+ tinfoil.config_data.setVar('TESTVAROLD', 'origvalue')
+ tinfoil.config_data.renameVar('TESTVAROLD', 'TESTVARNEW')
+ value = tinfoil.config_data.getVar('TESTVAROLD')
+ self.assertEqual(value, None, 'Variable renamed using config_data.renameVar() still seems to exist')
+ value = tinfoil.config_data.getVar('TESTVARNEW')
+ self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name')
+ # Test overrides
+ tinfoil.config_data.setVar('TESTVAR', 'original')
+ tinfoil.config_data.setVar('TESTVAR_overrideone', 'one')
+ tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two')
+ tinfoil.config_data.appendVar('OVERRIDES', ':overrideone')
+ value = tinfoil.config_data.getVar('TESTVAR')
+ self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
+
+ def test_variable_history(self):
+ # Basic test to ensure that variable history works when tracking=True
+ with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ # Note that _tracking for any datastore we get will be
+ # false here, that's currently expected - so we can't check
+ # for that
+ history = tinfoil.config_data.varhistory.variable('DL_DIR')
+ for entry in history:
+ if entry['file'].endswith('/bitbake.conf'):
+ if entry['op'] in ['set', 'set?']:
+ break
+ else:
+ self.fail('Did not find history entry setting DL_DIR in bitbake.conf. History: %s' % history)
+ # Check it works for recipes as well
+ testrecipe = 'zlib'
+ rd = tinfoil.parse_recipe(testrecipe)
+ history = rd.varhistory.variable('LICENSE')
+ bbfound = -1
+ recipefound = -1
+ for i, entry in enumerate(history):
+ if entry['file'].endswith('/bitbake.conf'):
+ if entry['detail'] == 'INVALID' and entry['op'] in ['set', 'set?']:
+ bbfound = i
+ elif entry['file'].endswith('.bb'):
+ if entry['op'] == 'set':
+ recipefound = i
+ if bbfound == -1:
+ self.fail('Did not find history entry setting LICENSE in bitbake.conf parsing %s recipe. History: %s' % (testrecipe, history))
+ if recipefound == -1:
+ self.fail('Did not find history entry setting LICENSE in %s recipe. History: %s' % (testrecipe, history))
+ if bbfound > recipefound:
+ self.fail('History entry setting LICENSE in %s recipe and in bitbake.conf in wrong order. History: %s' % (testrecipe, history))
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py
new file mode 100644
index 0000000000..3c5be2f501
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/wic.py
@@ -0,0 +1,1051 @@
+#
+# Copyright (c) 2015, Intel Corporation.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# AUTHORS
+# Ed Bartosh <ed.bartosh@linux.intel.com>
+
+"""Test cases for wic."""
+
+import os
+import sys
+import unittest
+
+from glob import glob
+from shutil import rmtree, copy
+from functools import wraps, lru_cache
+from tempfile import NamedTemporaryFile
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
+
+
+@lru_cache(maxsize=32)
+def get_host_arch(recipe):
+ """A cached call to get_bb_var('HOST_ARCH', <recipe>)"""
+ return get_bb_var('HOST_ARCH', recipe)
+
+
+def only_for_arch(archs, image='core-image-minimal'):
+ """Decorator for wrapping test cases that can be run only for specific target
+ architectures. A list of compatible architectures is passed in `archs`.
+ Current architecture will be determined by parsing bitbake output for
+ `image` recipe.
+ """
+ def wrapper(func):
+ @wraps(func)
+ def wrapped_f(*args, **kwargs):
+ arch = get_host_arch(image)
+ if archs and arch not in archs:
+ raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch)
+ return func(*args, **kwargs)
+ wrapped_f.__name__ = func.__name__
+ return wrapped_f
+ return wrapper
+
+
+class WicTestCase(OESelftestTestCase):
+ """Wic test class."""
+
+ image_is_ready = False
+ wicenv_cache = {}
+
+ def setUpLocal(self):
+ """This code is executed before each test method."""
+ self.resultdir = self.builddir + "/wic-tmp/"
+ super(WicTestCase, self).setUpLocal()
+
+ # Do this here instead of in setUpClass as the base setUp does some
+ # clean up which can result in the native tools built earlier in
+ # setUpClass being unavailable.
+ if not WicTestCase.image_is_ready:
+ if get_bb_var('USE_NLS') == 'yes':
+ bitbake('wic-tools')
+ else:
+ self.skipTest('wic-tools cannot be built due its (intltool|gettext)-native dependency and NLS disable')
+
+ bitbake('core-image-minimal')
+ WicTestCase.image_is_ready = True
+
+ rmtree(self.resultdir, ignore_errors=True)
+
+ def tearDownLocal(self):
+ """Remove resultdir as it may contain images."""
+ rmtree(self.resultdir, ignore_errors=True)
+ super(WicTestCase, self).tearDownLocal()
+
+ def _get_image_env_path(self, image):
+ """Generate and obtain the path to <image>.env"""
+ if image not in WicTestCase.wicenv_cache:
+ self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status)
+ bb_vars = get_bb_vars(['STAGING_DIR', 'MACHINE'], image)
+ stdir = bb_vars['STAGING_DIR']
+ machine = bb_vars['MACHINE']
+ WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata')
+ return WicTestCase.wicenv_cache[image]
+
+class Wic(WicTestCase):
+
+ def test_version(self):
+ """Test wic --version"""
+ runCmd('wic --version')
+
+ def test_help(self):
+ """Test wic --help and wic -h"""
+ runCmd('wic --help')
+ runCmd('wic -h')
+
+ def test_createhelp(self):
+ """Test wic create --help"""
+ runCmd('wic create --help')
+
+ def test_listhelp(self):
+ """Test wic list --help"""
+ runCmd('wic list --help')
+
+ def test_help_create(self):
+ """Test wic help create"""
+ runCmd('wic help create')
+
+ def test_help_list(self):
+ """Test wic help list"""
+ runCmd('wic help list')
+
+ def test_help_overview(self):
+ """Test wic help overview"""
+ runCmd('wic help overview')
+
+ def test_help_plugins(self):
+ """Test wic help plugins"""
+ runCmd('wic help plugins')
+
+ def test_help_kickstart(self):
+ """Test wic help kickstart"""
+ runCmd('wic help kickstart')
+
+ def test_list_images(self):
+ """Test wic list images"""
+ runCmd('wic list images')
+
+ def test_list_source_plugins(self):
+ """Test wic list source-plugins"""
+ runCmd('wic list source-plugins')
+
+ def test_listed_images_help(self):
+ """Test wic listed images help"""
+ output = runCmd('wic list images').output
+ imagelist = [line.split()[0] for line in output.splitlines()]
+ for image in imagelist:
+ runCmd('wic list %s help' % image)
+
+ def test_unsupported_subcommand(self):
+ """Test unsupported subcommand"""
+ self.assertNotEqual(0, runCmd('wic unsupported', ignore_status=True).status)
+
+ def test_no_command(self):
+ """Test wic without command"""
+ self.assertEqual(1, runCmd('wic', ignore_status=True).status)
+
+ def test_build_image_name(self):
+ """Test wic create wictestdisk --image-name=core-image-minimal"""
+ cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_gpt_image(self):
+ """Test creation of core-image-minimal with gpt table and UUID boot"""
+ cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_iso_image(self):
+ """Test creation of hybrid iso image with legacy and EFI boot"""
+ config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
+ 'MACHINE_FEATURES_append = " efi"\n'\
+ 'DEPENDS_pn-core-image-minimal += "syslinux"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal core-image-minimal-initramfs')
+ self.remove_config(config)
+ cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct")))
+ self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_qemux86_directdisk(self):
+ """Test creation of qemux-86-directdisk image"""
+ cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "qemux86-directdisk-*direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_mkefidisk(self):
+ """Test creation of mkefidisk image"""
+ cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "mkefidisk-*direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_bootloader_config(self):
+ """Test creation of directdisk-bootloader-config image"""
+ config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+ cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "directdisk-bootloader-config-*direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_systemd_bootdisk(self):
+ """Test creation of systemd-bootdisk image"""
+ config = 'MACHINE_FEATURES_append = " efi"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+ cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "systemd-bootdisk-*direct")))
+
+ def test_sdimage_bootpart(self):
+ """Test creation of sdimage-bootpart image"""
+ cmd = "wic create sdimage-bootpart -e core-image-minimal -o %s" % self.resultdir
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal')
+ self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype)
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_default_output_dir(self):
+ """Test default output location"""
+ for fname in glob("directdisk-*.direct"):
+ os.remove(fname)
+ config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+ cmd = "wic create directdisk -e core-image-minimal"
+ runCmd(cmd)
+ self.assertEqual(1, len(glob("directdisk-*.direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_build_artifacts(self):
+ """Test wic create directdisk providing all artifacts."""
+ bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
+ 'wic-tools')
+ bb_vars.update(get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_ROOTFS'],
+ 'core-image-minimal'))
+ bbvars = {key.lower(): value for key, value in bb_vars.items()}
+ bbvars['resultdir'] = self.resultdir
+ runCmd("wic create directdisk "
+ "-b %(staging_datadir)s "
+ "-k %(deploy_dir_image)s "
+ "-n %(recipe_sysroot_native)s "
+ "-r %(image_rootfs)s "
+ "-o %(resultdir)s" % bbvars)
+ self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
+
+ def test_compress_gzip(self):
+ """Test compressing an image with gzip"""
+ runCmd("wic create wictestdisk "
+ "--image-name core-image-minimal "
+ "-c gzip -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.gz")))
+
+ def test_compress_bzip2(self):
+ """Test compressing an image with bzip2"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-c bzip2 -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.bz2")))
+
+ def test_compress_xz(self):
+ """Test compressing an image with xz"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "--compress-with=xz -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.xz")))
+
+ def test_wrong_compressor(self):
+ """Test how wic breaks if wrong compressor is provided"""
+ self.assertEqual(2, runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-c wrong -o %s" % self.resultdir,
+ ignore_status=True).status)
+
+ def test_debug_short(self):
+ """Test -D option"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-D -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+
+ def test_debug_long(self):
+ """Test --debug option"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "--debug -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+
+ def test_skip_build_check_short(self):
+ """Test -s option"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-s -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+
+ def test_skip_build_check_long(self):
+ """Test --skip-build-check option"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "--skip-build-check "
+ "--outdir %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+
+ def test_build_rootfs_short(self):
+ """Test -f option"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-f -o %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+
+ def test_build_rootfs_long(self):
+ """Test --build-rootfs option"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "--build-rootfs "
+ "--outdir %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_rootfs_indirect_recipes(self):
+ """Test usage of rootfs plugin with rootfs recipes"""
+ runCmd("wic create directdisk-multi-rootfs "
+ "--image-name=core-image-minimal "
+ "--rootfs rootfs1=core-image-minimal "
+ "--rootfs rootfs2=core-image-minimal "
+ "--outdir %s" % self.resultdir)
+ self.assertEqual(1, len(glob(self.resultdir + "directdisk-multi-rootfs*.direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_rootfs_artifacts(self):
+ """Test usage of rootfs plugin with rootfs paths"""
+ bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
+ 'wic-tools')
+ bb_vars.update(get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_ROOTFS'],
+ 'core-image-minimal'))
+ bbvars = {key.lower(): value for key, value in bb_vars.items()}
+ bbvars['wks'] = "directdisk-multi-rootfs"
+ bbvars['resultdir'] = self.resultdir
+ runCmd("wic create %(wks)s "
+ "--bootimg-dir=%(staging_datadir)s "
+ "--kernel-dir=%(deploy_dir_image)s "
+ "--native-sysroot=%(recipe_sysroot_native)s "
+ "--rootfs-dir rootfs1=%(image_rootfs)s "
+ "--rootfs-dir rootfs2=%(image_rootfs)s "
+ "--outdir %(resultdir)s" % bbvars)
+ self.assertEqual(1, len(glob(self.resultdir + "%(wks)s-*.direct" % bbvars)))
+
+ def test_exclude_path(self):
+ """Test --exclude-path wks option."""
+
+ oldpath = os.environ['PATH']
+ os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
+
+ try:
+ wks_file = 'temp.wks'
+ with open(wks_file, 'w') as wks:
+ rootfs_dir = get_bb_var('IMAGE_ROOTFS', 'core-image-minimal')
+ wks.write("""
+part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr
+part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr
+part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr"""
+ % (rootfs_dir, rootfs_dir))
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+
+ os.remove(wks_file)
+ wicout = glob(self.resultdir + "%s-*direct" % 'temp')
+ self.assertEqual(1, len(wicout))
+
+ wicimg = wicout[0]
+
+ # verify partition size with wic
+ res = runCmd("parted -m %s unit b p 2>/dev/null" % wicimg)
+
+ # parse parted output which looks like this:
+ # BYT;\n
+ # /var/tmp/wic/build/tmpfwvjjkf_-201611101222-hda.direct:200MiB:file:512:512:msdos::;\n
+ # 1:0.00MiB:200MiB:200MiB:ext4::;\n
+ partlns = res.output.splitlines()[2:]
+
+ self.assertEqual(3, len(partlns))
+
+ for part in [1, 2, 3]:
+ part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part)
+ partln = partlns[part-1].split(":")
+ self.assertEqual(7, len(partln))
+ start = int(partln[1].rstrip("B")) / 512
+ length = int(partln[3].rstrip("B")) / 512
+ runCmd("dd if=%s of=%s skip=%d count=%d" %
+ (wicimg, part_file, start, length))
+
+ def extract_files(debugfs_output):
+ """
+ extract file names from the output of debugfs -R 'ls -p',
+ which looks like this:
+
+ /2/040755/0/0/.//\n
+ /2/040755/0/0/..//\n
+ /11/040700/0/0/lost+found^M//\n
+ /12/040755/1002/1002/run//\n
+ /13/040755/1002/1002/sys//\n
+ /14/040755/1002/1002/bin//\n
+ /80/040755/1002/1002/var//\n
+ /92/040755/1002/1002/tmp//\n
+ """
+ # NOTE the occasional ^M in file names
+ return [line.split('/')[5].strip() for line in \
+ debugfs_output.strip().split('/\n')]
+
+ # Test partition 1, should contain the normal root directories, except
+ # /usr.
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \
+ os.path.join(self.resultdir, "selftest_img.part1"))
+ files = extract_files(res.output)
+ self.assertIn("etc", files)
+ self.assertNotIn("usr", files)
+
+ # Partition 2, should contain common directories for /usr, not root
+ # directories.
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \
+ os.path.join(self.resultdir, "selftest_img.part2"))
+ files = extract_files(res.output)
+ self.assertNotIn("etc", files)
+ self.assertNotIn("usr", files)
+ self.assertIn("share", files)
+
+ # Partition 3, should contain the same as partition 2, including the bin
+ # directory, but not the files inside it.
+ res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \
+ os.path.join(self.resultdir, "selftest_img.part3"))
+ files = extract_files(res.output)
+ self.assertNotIn("etc", files)
+ self.assertNotIn("usr", files)
+ self.assertIn("share", files)
+ self.assertIn("bin", files)
+ res = runCmd("debugfs -R 'ls -p bin' %s 2>/dev/null" % \
+ os.path.join(self.resultdir, "selftest_img.part3"))
+ files = extract_files(res.output)
+ self.assertIn(".", files)
+ self.assertIn("..", files)
+ self.assertEqual(2, len(files))
+
+ for part in [1, 2, 3]:
+ part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part)
+ os.remove(part_file)
+
+ finally:
+ os.environ['PATH'] = oldpath
+
+ def test_exclude_path_errors(self):
+ """Test --exclude-path wks option error handling."""
+ wks_file = 'temp.wks'
+
+ # Absolute argument.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path /usr")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+ # Argument pointing to parent directory.
+ with open(wks_file, 'w') as wks:
+ wks.write("part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path ././..")
+ self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir), ignore_status=True).status)
+ os.remove(wks_file)
+
+class Wic2(WicTestCase):
+
+ def test_bmap_short(self):
+ """Test generation of .bmap file -m option"""
+ cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap")))
+
+ def test_bmap_long(self):
+ """Test generation of .bmap file --bmap option"""
+ cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir
+ runCmd(cmd)
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap")))
+
+ def test_image_env(self):
+ """Test generation of <image>.env files."""
+ image = 'core-image-minimal'
+ imgdatadir = self._get_image_env_path(image)
+
+ bb_vars = get_bb_vars(['IMAGE_BASENAME', 'WICVARS'], image)
+ basename = bb_vars['IMAGE_BASENAME']
+ self.assertEqual(basename, image)
+ path = os.path.join(imgdatadir, basename) + '.env'
+ self.assertTrue(os.path.isfile(path))
+
+ wicvars = set(bb_vars['WICVARS'].split())
+ # filter out optional variables
+ wicvars = wicvars.difference(('DEPLOY_DIR_IMAGE', 'IMAGE_BOOT_FILES',
+ 'INITRD', 'INITRD_LIVE', 'ISODIR','INITRAMFS_IMAGE',
+ 'INITRAMFS_IMAGE_BUNDLE', 'INITRAMFS_LINK_NAME'))
+ with open(path) as envfile:
+ content = dict(line.split("=", 1) for line in envfile)
+ # test if variables used by wic present in the .env file
+ for var in wicvars:
+ self.assertTrue(var in content, "%s is not in .env file" % var)
+ self.assertTrue(content[var])
+
+ def test_image_vars_dir_short(self):
+ """Test image vars directory selection -v option"""
+ image = 'core-image-minimal'
+ imgenvdir = self._get_image_env_path(image)
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
+
+ runCmd("wic create wictestdisk "
+ "--image-name=%s -v %s -n %s -o %s"
+ % (image, imgenvdir, native_sysroot,
+ self.resultdir))
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
+
+ def test_image_vars_dir_long(self):
+ """Test image vars directory selection --vars option"""
+ image = 'core-image-minimal'
+ imgenvdir = self._get_image_env_path(image)
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
+
+ runCmd("wic create wictestdisk "
+ "--image-name=%s "
+ "--vars %s "
+ "--native-sysroot %s "
+ "--outdir %s"
+ % (image, imgenvdir, native_sysroot,
+ self.resultdir))
+ self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct")))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_wic_image_type(self):
+ """Test building wic images by bitbake"""
+ config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
+ 'MACHINE_FEATURES_append = " efi"\n'
+ self.append_config(config)
+ self.assertEqual(0, bitbake('wic-image-minimal').status)
+ self.remove_config(config)
+
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'])
+ deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
+ machine = bb_vars['MACHINE']
+ prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine)
+ # check if we have result image and manifests symlinks
+ # pointing to existing files
+ for suffix in ('wic', 'manifest'):
+ path = prefix + suffix
+ self.assertTrue(os.path.islink(path))
+ self.assertTrue(os.path.isfile(os.path.realpath(path)))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_qemu(self):
+ """Test wic-image-minimal under qemu"""
+ config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
+ 'MACHINE_FEATURES_append = " efi"\n'
+ self.append_config(config)
+ self.assertEqual(0, bitbake('wic-image-minimal').status)
+ self.remove_config(config)
+
+ with runqemu('wic-image-minimal', ssh=False) as qemu:
+ cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \
+ "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '4')
+ cmd = "grep UUID= /etc/fstab"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0')
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_qemu_efi(self):
+ """Test core-image-minimal efi image under qemu"""
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n'
+ self.append_config(config)
+ self.assertEqual(0, bitbake('core-image-minimal ovmf').status)
+ self.remove_config(config)
+
+ with runqemu('core-image-minimal', ssh=False,
+ runqemuparams='ovmf', image_fstype='wic') as qemu:
+ cmd = "grep sda. /proc/partitions |wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '3')
+
+ @staticmethod
+ def _make_fixed_size_wks(size):
+ """
+ Create a wks of an image with a single partition. Size of the partition is set
+ using --fixed-size flag. Returns a tuple: (path to wks file, wks image name)
+ """
+ with NamedTemporaryFile("w", suffix=".wks", delete=False) as tempf:
+ wkspath = tempf.name
+ tempf.write("part " \
+ "--source rootfs --ondisk hda --align 4 --fixed-size %d "
+ "--fstype=ext4\n" % size)
+ wksname = os.path.splitext(os.path.basename(wkspath))[0]
+
+ return wkspath, wksname
+
+ def test_fixed_size(self):
+ """
+ Test creation of a simple image with partition size controlled through
+ --fixed-size flag
+ """
+ wkspath, wksname = Wic2._make_fixed_size_wks(200)
+
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wkspath, self.resultdir))
+ os.remove(wkspath)
+ wicout = glob(self.resultdir + "%s-*direct" % wksname)
+ self.assertEqual(1, len(wicout))
+
+ wicimg = wicout[0]
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
+
+ # verify partition size with wic
+ res = runCmd("parted -m %s unit mib p 2>/dev/null" % wicimg,
+ native_sysroot=native_sysroot)
+
+ # parse parted output which looks like this:
+ # BYT;\n
+ # /var/tmp/wic/build/tmpfwvjjkf_-201611101222-hda.direct:200MiB:file:512:512:msdos::;\n
+ # 1:0.00MiB:200MiB:200MiB:ext4::;\n
+ partlns = res.output.splitlines()[2:]
+
+ self.assertEqual(1, len(partlns),
+ msg="Partition list '%s'" % res.output)
+ self.assertEqual("1:0.00MiB:200MiB:200MiB:ext4::;", partlns[0],
+ msg="Partition list '%s'" % res.output)
+
+ def test_fixed_size_error(self):
+ """
+ Test creation of a simple image with partition size controlled through
+ --fixed-size flag. The size of partition is intentionally set to 1MiB
+ in order to trigger an error in wic.
+ """
+ wkspath, wksname = Wic2._make_fixed_size_wks(1)
+
+ self.assertEqual(1, runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wkspath, self.resultdir), ignore_status=True).status)
+ os.remove(wkspath)
+ wicout = glob(self.resultdir + "%s-*direct" % wksname)
+ self.assertEqual(0, len(wicout))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_rawcopy_plugin_qemu(self):
+ """Test rawcopy plugin in qemu"""
+ # build ext4 and wic images
+ for fstype in ("ext4", "wic"):
+ config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype
+ self.append_config(config)
+ self.assertEqual(0, bitbake('core-image-minimal').status)
+ self.remove_config(config)
+
+ with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu:
+ cmd = "grep sda. /proc/partitions |wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '2')
+
+ def test_rawcopy_plugin(self):
+ """Test rawcopy plugin"""
+ img = 'core-image-minimal'
+ machine = get_bb_var('MACHINE', img)
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part /boot --active --source bootimg-pcbios\n',
+ 'part / --source rawcopy --sourceparams="file=%s-%s.ext4" --use-uuid\n'\
+ % (img, machine),
+ 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(self.resultdir + "%s-*direct" % wksname)
+ self.assertEqual(1, len(out))
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_biosplusefi_plugin_qemu(self):
+ """Test biosplusefi plugin in qemu"""
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n'
+ self.append_config(config)
+ self.assertEqual(0, bitbake('core-image-minimal').status)
+ self.remove_config(config)
+
+ with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu:
+ # Check that we have ONLY two /dev/sda* partitions (/boot and /)
+ cmd = "grep sda. /proc/partitions | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '2')
+ # Check that /dev/sda1 is /boot and that either /dev/root OR /dev/sda2 is /
+ cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' -e '/dev/root /|/dev/sda2 /'"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '2')
+ # Check that /boot has EFI bootx64.efi (required for EFI)
+ cmd = "ls /boot/EFI/BOOT/bootx64.efi | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '1')
+ # Check that "BOOTABLE" flag is set on boot partition (required for PC-Bios)
+ # Trailing "cat" seems to be required; otherwise run_serial() sends back echo of the input command
+ cmd = "fdisk -l /dev/sda | grep /dev/sda1 | awk {print'$2'} | cat"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '*')
+
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_biosplusefi_plugin(self):
+ """Test biosplusefi plugin"""
+ # Wic generation below may fail depending on the order of the unittests
+ # This is because bootimg-pcbios (that bootimg-biosplusefi uses) generate its MBR inside STAGING_DATADIR directory
+ # which may or may not exists depending on what was built already
+ # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir()
+ # will raise with "Couldn't find correct bootimg_dir"
+ # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n'
+ self.append_config(config)
+ self.assertEqual(0, bitbake('core-image-minimal').status)
+ self.remove_config(config)
+
+ img = 'core-image-minimal'
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part /boot --active --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\n',
+ 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
+ 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(self.resultdir + "%s-*.direct" % wksname)
+ self.assertEqual(1, len(out))
+
+ def test_fs_types(self):
+ """Test filesystem types for empty and not empty partitions"""
+ img = 'core-image-minimal'
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part ext2 --fstype ext2 --source rootfs\n',
+ 'part btrfs --fstype btrfs --source rootfs --size 40M\n',
+ 'part squash --fstype squashfs --source rootfs\n',
+ 'part swap --fstype swap --size 1M\n',
+ 'part emptyvfat --fstype vfat --size 1M\n',
+ 'part emptymsdos --fstype msdos --size 1M\n',
+ 'part emptyext2 --fstype ext2 --size 1M\n',
+ 'part emptybtrfs --fstype btrfs --size 150M\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(self.resultdir + "%s-*direct" % wksname)
+ self.assertEqual(1, len(out))
+
+ def test_kickstart_parser(self):
+ """Test wks parser options"""
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part / --fstype ext3 --source rootfs --system-id 0xFF '\
+ '--overhead-factor 1.2 --size 100k\n'])
+ wks.flush()
+ cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(self.resultdir + "%s-*direct" % wksname)
+ self.assertEqual(1, len(out))
+
+ def test_image_bootpart_globbed(self):
+ """Test globbed sources with image-bootpart plugin"""
+ img = "core-image-minimal"
+ cmd = "wic create sdimage-bootpart -e %s -o %s" % (img, self.resultdir)
+ config = 'IMAGE_BOOT_FILES = "%s*"' % get_bb_var('KERNEL_IMAGETYPE', img)
+ self.append_config(config)
+ runCmd(cmd)
+ self.remove_config(config)
+ self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct")))
+
+ def test_sparse_copy(self):
+ """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs"""
+ libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'wic')
+ sys.path.insert(0, libpath)
+ from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp
+ with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse:
+ src_name = sparse.name
+ src_size = 1024 * 10
+ sparse.truncate(src_size)
+ # write one byte to the file
+ with open(src_name, 'r+b') as sfile:
+ sfile.seek(1024 * 4)
+ sfile.write(b'\x00')
+ dest = sparse.name + '.out'
+ # copy src file to dest using different filemap APIs
+ for api in (FilemapFiemap, FilemapSeek, None):
+ if os.path.exists(dest):
+ os.unlink(dest)
+ try:
+ sparse_copy(sparse.name, dest, api=api)
+ except ErrorNotSupp:
+ continue # skip unsupported API
+ dest_stat = os.stat(dest)
+ self.assertEqual(dest_stat.st_size, src_size)
+ # 8 blocks is 4K (physical sector size)
+ self.assertEqual(dest_stat.st_blocks, 8)
+ os.unlink(dest)
+
+ def test_wic_ls(self):
+ """Test listing image content using 'wic ls'"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-D -o %s" % self.resultdir)
+ images = glob(self.resultdir + "wictestdisk-*.direct")
+ self.assertEqual(1, len(images))
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # list partitions
+ result = runCmd("wic ls %s -n %s" % (images[0], sysroot))
+ self.assertEqual(3, len(result.output.split('\n')))
+
+ # list directory content of the first partition
+ result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
+ self.assertEqual(6, len(result.output.split('\n')))
+
+ def test_wic_cp(self):
+ """Test copy files and directories to the the wic image."""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-D -o %s" % self.resultdir)
+ images = glob(self.resultdir + "wictestdisk-*.direct")
+ self.assertEqual(1, len(images))
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # list directory content of the first partition
+ result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
+ self.assertEqual(6, len(result.output.split('\n')))
+
+ with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
+ testfile.write("test")
+
+ # copy file to the partition
+ runCmd("wic cp %s %s:1/ -n %s" % (testfile.name, images[0], sysroot))
+
+ # check if file is there
+ result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
+ self.assertEqual(7, len(result.output.split('\n')))
+ self.assertTrue(os.path.basename(testfile.name) in result.output)
+
+ # prepare directory
+ testdir = os.path.join(self.resultdir, 'wic-test-cp-dir')
+ testsubdir = os.path.join(testdir, 'subdir')
+ os.makedirs(os.path.join(testsubdir))
+ copy(testfile.name, testdir)
+
+ # copy directory to the partition
+ runCmd("wic cp %s %s:1/ -n %s" % (testdir, images[0], sysroot))
+
+ # check if directory is there
+ result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
+ self.assertEqual(8, len(result.output.split('\n')))
+ self.assertTrue(os.path.basename(testdir) in result.output)
+
+ # copy the file from the partition and check if it success
+ dest = '%s-cp' % testfile.name
+ runCmd("wic cp %s:1/%s %s -n %s" % (images[0],
+ os.path.basename(testfile.name), dest, sysroot))
+ self.assertTrue(os.path.exists(dest))
+
+
+ def test_wic_rm(self):
+ """Test removing files and directories from the the wic image."""
+ runCmd("wic create mkefidisk "
+ "--image-name=core-image-minimal "
+ "-D -o %s" % self.resultdir)
+ images = glob(self.resultdir + "mkefidisk-*.direct")
+ self.assertEqual(1, len(images))
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # list directory content of the first partition
+ result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
+ self.assertIn('\nBZIMAGE ', result.output)
+ self.assertIn('\nEFI <DIR> ', result.output)
+
+ # remove file
+ runCmd("wic rm %s:1/bzimage -n %s" % (images[0], sysroot))
+
+ # remove directory
+ runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot))
+
+ # check if they're removed
+ result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
+ self.assertNotIn('\nBZIMAGE ', result.output)
+ self.assertNotIn('\nEFI <DIR> ', result.output)
+
+ def test_mkfs_extraopts(self):
+ """Test wks option --mkfs-extraopts for empty and not empty partitions"""
+ img = 'core-image-minimal'
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(
+ ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
+ "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
+ 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
+ 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
+ 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
+ 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
+ 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(self.resultdir + "%s-*direct" % wksname)
+ self.assertEqual(1, len(out))
+
+ def test_expand_mbr_image(self):
+ """Test wic write --expand command for mbr image"""
+ # build an image
+ config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
+ self.append_config(config)
+ self.assertEqual(0, bitbake('core-image-minimal').status)
+
+ # get path to the image
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'])
+ deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
+ machine = bb_vars['MACHINE']
+ image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
+
+ self.remove_config(config)
+
+ try:
+ # expand image to 1G
+ new_image_path = None
+ with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
+ dir=deploy_dir, delete=False) as sparse:
+ sparse.truncate(1024 ** 3)
+ new_image_path = sparse.name
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
+ runCmd(cmd)
+
+ # check if partitions are expanded
+ orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
+ exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
+ orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
+ exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
+ self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
+ self.assertTrue(orig_sizes[1] < exp_sizes[1])
+
+ # Check if all free space is partitioned
+ result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
+ self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output)
+
+ os.rename(image_path, image_path + '.bak')
+ os.rename(new_image_path, image_path)
+
+ # Check if it boots in qemu
+ with runqemu('core-image-minimal', ssh=False) as qemu:
+ cmd = "ls /etc/"
+ status, output = qemu.run_serial('true')
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ finally:
+ if os.path.exists(new_image_path):
+ os.unlink(new_image_path)
+ if os.path.exists(image_path + '.bak'):
+ os.rename(image_path + '.bak', image_path)
+
+ def test_wic_ls_ext(self):
+ """Test listing content of the ext partition using 'wic ls'"""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-D -o %s" % self.resultdir)
+ images = glob(self.resultdir + "wictestdisk-*.direct")
+ self.assertEqual(1, len(images))
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # list directory content of the second ext4 partition
+ result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
+ self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(
+ set(line.split()[-1] for line in result.output.split('\n') if line)))
+
+ def test_wic_cp_ext(self):
+ """Test copy files and directories to the ext partition."""
+ runCmd("wic create wictestdisk "
+ "--image-name=core-image-minimal "
+ "-D -o %s" % self.resultdir)
+ images = glob(self.resultdir + "wictestdisk-*.direct")
+ self.assertEqual(1, len(images))
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # list directory content of the ext4 partition
+ result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
+ dirs = set(line.split()[-1] for line in result.output.split('\n') if line)
+ self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs))
+
+ with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
+ testfile.write("test")
+
+ # copy file to the partition
+ runCmd("wic cp %s %s:2/ -n %s" % (testfile.name, images[0], sysroot))
+
+ # check if file is there
+ result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
+ newdirs = set(line.split()[-1] for line in result.output.split('\n') if line)
+ self.assertEqual(newdirs.difference(dirs), set([os.path.basename(testfile.name)]))
+
+ # check if the file to copy is in the partition
+ result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
+ self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line])
+
+ # copy file from the partition, replace the temporary file content with it and
+ # check for the file size to validate the copy
+ runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot))
+ self.assertTrue(os.stat(testfile.name).st_size > 0)
+
+
+ def test_wic_rm_ext(self):
+ """Test removing files from the ext partition."""
+ runCmd("wic create mkefidisk "
+ "--image-name=core-image-minimal "
+ "-D -o %s" % self.resultdir)
+ images = glob(self.resultdir + "mkefidisk-*.direct")
+ self.assertEqual(1, len(images))
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # list directory content of the /etc directory on ext4 partition
+ result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
+ self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line])
+
+ # remove file
+ runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
+
+ # check if it's removed
+ result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
+ self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line])
+
+ # remove non-empty directory
+ runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot))
+
+ # check if it's removed
+ result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
+ self.assertTrue('etc' not in [line.split()[-1] for line in result.output.split('\n') if line])