From 7c552996597faaee2fbee185b250c0ee30ea3b5f Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 14 Dec 2016 21:13:04 +0000 Subject: meta: remove True option to getVar calls getVar() now defaults to expanding by default, thus remove the True option from getVar() calls with a regex search and replace. Search made with the following regex: getVar ?\(( ?[^,()]*), True\) Signed-off-by: Joshua Lock Signed-off-by: Ross Burton --- meta/lib/buildstats.py | 4 +- meta/lib/oe/classextend.py | 12 +-- meta/lib/oe/copy_buildsystem.py | 12 +-- meta/lib/oe/data.py | 2 +- meta/lib/oe/distro_check.py | 20 ++-- meta/lib/oe/gpg_sign.py | 4 +- meta/lib/oe/manifest.py | 26 ++--- meta/lib/oe/package.py | 2 +- meta/lib/oe/package_manager.py | 174 +++++++++++++++---------------- meta/lib/oe/packagedata.py | 2 +- meta/lib/oe/packagegroup.py | 6 +- meta/lib/oe/patch.py | 30 +++--- meta/lib/oe/path.py | 2 +- meta/lib/oe/prservice.py | 26 ++--- meta/lib/oe/qa.py | 4 +- meta/lib/oe/recipeutils.py | 38 +++---- meta/lib/oe/rootfs.py | 114 ++++++++++---------- meta/lib/oe/sdk.py | 76 +++++++------- meta/lib/oe/sstatesig.py | 30 +++--- meta/lib/oe/terminal.py | 2 +- meta/lib/oe/utils.py | 30 +++--- meta/lib/oeqa/controllers/masterimage.py | 16 +-- meta/lib/oeqa/oetest.py | 60 +++++------ meta/lib/oeqa/runexported.py | 8 +- meta/lib/oeqa/runtime/_ptest.py | 16 +-- meta/lib/oeqa/runtime/date.py | 4 +- meta/lib/oeqa/runtime/multilib.py | 2 +- meta/lib/oeqa/runtime/parselogs.py | 4 +- meta/lib/oeqa/runtime/rpm.py | 6 +- meta/lib/oeqa/runtime/scp.py | 2 +- meta/lib/oeqa/runtime/smart.py | 18 ++-- meta/lib/oeqa/runtime/systemd.py | 2 +- meta/lib/oeqa/runtime/x32lib.py | 2 +- meta/lib/oeqa/sdk/gcc.py | 2 +- meta/lib/oeqa/selftest/tinfoil.py | 16 +-- meta/lib/oeqa/targetcontrol.py | 52 ++++----- meta/lib/oeqa/utils/commands.py | 2 +- meta/lib/oeqa/utils/dump.py | 4 +- meta/lib/oeqa/utils/package_manager.py | 12 +-- meta/lib/oeqa/utils/targetbuild.py | 8 +- meta/lib/oeqa/utils/testexport.py | 14 +-- 41 files changed, 433 insertions(+), 433 deletions(-) (limited to 'meta/lib') diff --git a/meta/lib/buildstats.py b/meta/lib/buildstats.py index 854c38721f..c5d4c73cf5 100644 --- a/meta/lib/buildstats.py +++ b/meta/lib/buildstats.py @@ -8,8 +8,8 @@ import bb.event class SystemStats: def __init__(self, d): - bn = d.getVar('BUILDNAME', True) - bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) + bn = d.getVar('BUILDNAME') + bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) bb.utils.mkdirhier(bsdir) self.proc_files = [] diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py index 4c8a00070c..d2eeaf0e5c 100644 --- a/meta/lib/oe/classextend.py +++ b/meta/lib/oe/classextend.py @@ -25,7 +25,7 @@ class ClassExtender(object): return name def map_variable(self, varname, setvar = True): - var = self.d.getVar(varname, True) + var = self.d.getVar(varname) if not var: return "" var = var.split() @@ -38,7 +38,7 @@ class ClassExtender(object): return newdata def map_regexp_variable(self, varname, setvar = True): - var = self.d.getVar(varname, True) + var = self.d.getVar(varname) if not var: return "" var = var.split() @@ -60,7 +60,7 @@ class ClassExtender(object): return dep else: # Do not extend for that already have multilib prefix - var = self.d.getVar("MULTILIB_VARIANTS", True) + var = self.d.getVar("MULTILIB_VARIANTS") if var: var = var.split() for v in var: @@ -74,7 +74,7 @@ class ClassExtender(object): varname = varname + "_" + suffix orig = self.d.getVar("EXTENDPKGV", False) self.d.setVar("EXTENDPKGV", "EXTENDPKGV") - deps = self.d.getVar(varname, True) + deps = self.d.getVar(varname) if not deps: self.d.setVar("EXTENDPKGV", orig) return @@ -87,7 +87,7 @@ class ClassExtender(object): self.d.setVar("EXTENDPKGV", orig) def map_packagevars(self): - for pkg in (self.d.getVar("PACKAGES", True).split() + [""]): + for pkg in (self.d.getVar("PACKAGES").split() + [""]): self.map_depends_variable("RDEPENDS", pkg) self.map_depends_variable("RRECOMMENDS", pkg) self.map_depends_variable("RSUGGESTS", pkg) @@ -97,7 +97,7 @@ class ClassExtender(object): self.map_depends_variable("PKG", pkg) def rename_packages(self): - for pkg in (self.d.getVar("PACKAGES", True) or "").split(): + for pkg in (self.d.getVar("PACKAGES") or "").split(): if pkg.startswith(self.extname): self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg]) continue diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 29ac6d418f..a372904183 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py @@ -21,8 +21,8 @@ class BuildSystem(object): def __init__(self, context, d): self.d = d self.context = context - self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS', True).split()] - self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE', True) or "").split() + self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS').split()] + self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE') or "").split() def copy_bitbake_and_layers(self, destdir, workspace_name=None): # Copy in all metadata layers + bitbake (as repositories) @@ -30,7 +30,7 @@ class BuildSystem(object): bb.utils.mkdirhier(destdir) layers = list(self.layerdirs) - corebase = os.path.abspath(self.d.getVar('COREBASE', True)) + corebase = os.path.abspath(self.d.getVar('COREBASE')) layers.append(corebase) # Exclude layers @@ -46,7 +46,7 @@ class BuildSystem(object): extranum += 1 workspace_newname = '%s-%d' % (workspace_name, extranum) - corebase_files = self.d.getVar('COREBASE_FILES', True).split() + corebase_files = self.d.getVar('COREBASE_FILES').split() corebase_files = [corebase + '/' +x for x in corebase_files] # Make sure bitbake goes in bitbake_dir = bb.__file__.rsplit('/', 3)[0] @@ -100,7 +100,7 @@ class BuildSystem(object): # Drop all bbappends except the one for the image the SDK is being built for # (because of externalsrc, the workspace bbappends will interfere with the # locked signatures if present, and we don't need them anyway) - image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE', True)))[0] + '.bbappend' + image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE')))[0] + '.bbappend' appenddir = os.path.join(layerdestpath, 'appends') if os.path.isdir(appenddir): for fn in os.listdir(appenddir): @@ -208,7 +208,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac import shutil bb.note('Generating sstate-cache...') - nativelsbstring = d.getVar('NATIVELSBSTRING', True) + nativelsbstring = d.getVar('NATIVELSBSTRING') bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) if fixedlsbstring and nativelsbstring != fixedlsbstring: nativedir = output_sstate_cache + '/' + nativelsbstring diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py index ee48950a82..032f68a847 100644 --- a/meta/lib/oe/data.py +++ b/meta/lib/oe/data.py @@ -12,6 +12,6 @@ def typed_value(key, d): flags = {} try: - return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags) + return oe.maketype.create(d.getVar(key) or '', var_type, **flags) except (TypeError, ValueError) as exc: bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index c666ddc257..f54f4bb67d 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py @@ -224,37 +224,37 @@ def compare_in_distro_packages_list(distro_check_dir, d): localdata = bb.data.createCopy(d) pkglst_dir = os.path.join(distro_check_dir, "package_lists") matching_distros = [] - pn = recipe_name = d.getVar('PN', True) + pn = recipe_name = d.getVar('PN') bb.note("Checking: %s" % pn) if pn.find("-native") != -1: pnstripped = pn.split("-native") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[0] if pn.startswith("nativesdk-"): pnstripped = pn.split("nativesdk-") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[1] if pn.find("-cross") != -1: pnstripped = pn.split("-cross") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[0] if pn.find("-initial") != -1: pnstripped = pn.split("-initial") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[0] bb.note("Recipe: %s" % recipe_name) distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) - tmp = localdata.getVar('DISTRO_PN_ALIAS', True) or "" + tmp = localdata.getVar('DISTRO_PN_ALIAS') or "" for str in tmp.split(): if str and str.find("=") == -1 and distro_exceptions[str]: matching_distros.append(str) @@ -286,10 +286,10 @@ def compare_in_distro_packages_list(distro_check_dir, d): return matching_distros def create_log_file(d, logname): - logpath = d.getVar('LOG_DIR', True) + logpath = d.getVar('LOG_DIR') bb.utils.mkdirhier(logpath) logfn, logsuffix = os.path.splitext(logname) - logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix)) + logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME'), logsuffix)) if not os.path.exists(logfile): slogfile = os.path.join(logpath, logname) if os.path.exists(slogfile): @@ -301,8 +301,8 @@ def create_log_file(d, logname): def save_distro_check_result(result, datetime, result_file, d): - pn = d.getVar('PN', True) - logdir = d.getVar('LOG_DIR', True) + pn = d.getVar('PN') + logdir = d.getVar('LOG_DIR') if not logdir: bb.error("LOG_DIR variable is not defined, can't write the distro_check results") return diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py index 38eb0cb137..dcd1990930 100644 --- a/meta/lib/oe/gpg_sign.py +++ b/meta/lib/oe/gpg_sign.py @@ -7,9 +7,9 @@ import oe.utils class LocalSigner(object): """Class for handling local (on the build host) signing""" def __init__(self, d): - self.gpg_bin = d.getVar('GPG_BIN', True) or \ + self.gpg_bin = d.getVar('GPG_BIN') or \ bb.utils.which(os.getenv('PATH'), 'gpg') - self.gpg_path = d.getVar('GPG_PATH', True) + self.gpg_path = d.getVar('GPG_PATH') self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpm") def export_pubkey(self, output_file, keyid, armor=True): diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py index 95f8eb2df3..6ec9b1af8b 100644 --- a/meta/lib/oe/manifest.py +++ b/meta/lib/oe/manifest.py @@ -59,9 +59,9 @@ class Manifest(object, metaclass=ABCMeta): if manifest_dir is None: if manifest_type != self.MANIFEST_TYPE_IMAGE: - self.manifest_dir = self.d.getVar('SDK_DIR', True) + self.manifest_dir = self.d.getVar('SDK_DIR') else: - self.manifest_dir = self.d.getVar('WORKDIR', True) + self.manifest_dir = self.d.getVar('WORKDIR') else: self.manifest_dir = manifest_dir @@ -82,7 +82,7 @@ class Manifest(object, metaclass=ABCMeta): This will be used for testing until the class is implemented properly! """ def _create_dummy_initial(self): - image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) + image_rootfs = self.d.getVar('IMAGE_ROOTFS') pkg_list = dict() if image_rootfs.find("core-image-sato-sdk") > 0: pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ @@ -195,7 +195,7 @@ class RpmManifest(Manifest): for pkg in pkg_list.split(): pkg_type = self.PKG_TYPE_MUST_INSTALL - ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() + ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() for ml_variant in ml_variants: if pkg.startswith(ml_variant + '-'): @@ -216,13 +216,13 @@ class RpmManifest(Manifest): for var in self.var_maps[self.manifest_type]: if var in self.vars_to_split: - split_pkgs = self._split_multilib(self.d.getVar(var, True)) + split_pkgs = self._split_multilib(self.d.getVar(var)) if split_pkgs is not None: pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) else: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is not None: - pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) + pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) for pkg_type in pkgs: for pkg in pkgs[pkg_type].split(): @@ -245,7 +245,7 @@ class OpkgManifest(Manifest): for pkg in pkg_list.split(): pkg_type = self.PKG_TYPE_MUST_INSTALL - ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() + ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() for ml_variant in ml_variants: if pkg.startswith(ml_variant + '-'): @@ -266,13 +266,13 @@ class OpkgManifest(Manifest): for var in self.var_maps[self.manifest_type]: if var in self.vars_to_split: - split_pkgs = self._split_multilib(self.d.getVar(var, True)) + split_pkgs = self._split_multilib(self.d.getVar(var)) if split_pkgs is not None: pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) else: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is not None: - pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) + pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) for pkg_type in pkgs: for pkg in pkgs[pkg_type].split(): @@ -310,7 +310,7 @@ class DpkgManifest(Manifest): manifest.write(self.initial_manifest_file_header) for var in self.var_maps[self.manifest_type]: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is None: continue @@ -332,7 +332,7 @@ def create_manifest(d, final_manifest=False, manifest_dir=None, 'ipk': OpkgManifest, 'deb': DpkgManifest} - manifest = manifest_map[d.getVar('IMAGE_PKGTYPE', True)](d, manifest_dir, manifest_type) + manifest = manifest_map[d.getVar('IMAGE_PKGTYPE')](d, manifest_dir, manifest_type) if final_manifest: manifest.create_final() diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index ae60a5843e..795389517f 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py @@ -104,7 +104,7 @@ def read_shlib_providers(d): import re shlib_provider = {} - shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() + shlibs_dirs = d.getVar('SHLIBSDIRS').split() list_re = re.compile('^(.*)\.list$') # Go from least to most specific since the last one found wins for dir in reversed(shlibs_dirs): diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py index e5e3c3b679..bb458691e3 100644 --- a/meta/lib/oe/package_manager.py +++ b/meta/lib/oe/package_manager.py @@ -107,16 +107,16 @@ class RpmIndexer(Indexer): target_os = collections.OrderedDict() if arch_var is not None and os_var is not None: - package_archs['default'] = self.d.getVar(arch_var, True).split() + package_archs['default'] = self.d.getVar(arch_var).split() package_archs['default'].reverse() - target_os['default'] = self.d.getVar(os_var, True).strip() + target_os['default'] = self.d.getVar(os_var).strip() else: - package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split() + package_archs['default'] = self.d.getVar("PACKAGE_ARCHS").split() # arch order is reversed. This ensures the -best- match is # listed first! package_archs['default'].reverse() - target_os['default'] = self.d.getVar("TARGET_OS", True).strip() - multilibs = self.d.getVar('MULTILIBS', True) or "" + target_os['default'] = self.d.getVar("TARGET_OS").strip() + multilibs = self.d.getVar('MULTILIBS') or "" for ext in multilibs.split(): eext = ext.split(':') if len(eext) > 1 and eext[0] == 'multilib': @@ -150,8 +150,8 @@ class RpmIndexer(Indexer): return (ml_prefix_list, target_os) def write_index(self): - sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split() - all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() + sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS') or "").replace('-', '_').split() + all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").replace('-', '_').split() mlb_prefix_list = self.get_ml_prefix_and_os_list()[0] @@ -165,15 +165,15 @@ class RpmIndexer(Indexer): archs = archs.union(set(sdk_pkg_archs)) rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': - signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': + signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) else: signer = None index_cmds = [] repomd_files = [] rpm_dirs_found = False for arch in archs: - dbpath = os.path.join(self.d.getVar('WORKDIR', True), 'rpmdb', arch) + dbpath = os.path.join(self.d.getVar('WORKDIR'), 'rpmdb', arch) if os.path.exists(dbpath): bb.utils.remove(dbpath, True) arch_dir = os.path.join(self.deploy_dir, arch) @@ -197,11 +197,11 @@ class RpmIndexer(Indexer): # Sign repomd if signer: for repomd in repomd_files: - feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) + feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') is_ascii_sig = (feed_sig_type.upper() != "BIN") signer.detach_sign(repomd, - self.d.getVar('PACKAGE_FEED_GPG_NAME', True), - self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), + self.d.getVar('PACKAGE_FEED_GPG_NAME'), + self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), armor=is_ascii_sig) @@ -212,8 +212,8 @@ class OpkgIndexer(Indexer): "MULTILIB_ARCHS"] opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': - signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': + signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) else: signer = None @@ -223,7 +223,7 @@ class OpkgIndexer(Indexer): index_cmds = set() index_sign_files = set() for arch_var in arch_vars: - archs = self.d.getVar(arch_var, True) + archs = self.d.getVar(arch_var) if archs is None: continue @@ -251,12 +251,12 @@ class OpkgIndexer(Indexer): bb.fatal('%s' % ('\n'.join(result))) if signer: - feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) + feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') is_ascii_sig = (feed_sig_type.upper() != "BIN") for f in index_sign_files: signer.detach_sign(f, - self.d.getVar('PACKAGE_FEED_GPG_NAME', True), - self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), + self.d.getVar('PACKAGE_FEED_GPG_NAME'), + self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), armor=is_ascii_sig) @@ -290,16 +290,16 @@ class DpkgIndexer(Indexer): os.environ['APT_CONFIG'] = self.apt_conf_file - pkg_archs = self.d.getVar('PACKAGE_ARCHS', True) + pkg_archs = self.d.getVar('PACKAGE_ARCHS') if pkg_archs is not None: arch_list = pkg_archs.split() - sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True) + sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS') if sdk_pkg_archs is not None: for a in sdk_pkg_archs.split(): if a not in pkg_archs: arch_list.append(a) - all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() + all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") @@ -332,7 +332,7 @@ class DpkgIndexer(Indexer): result = oe.utils.multiprocess_exec(index_cmds, create_index) if result: bb.fatal('%s' % ('\n'.join(result))) - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': raise NotImplementedError('Package feed signing not implementd for dpkg') @@ -386,7 +386,7 @@ class RpmPkgsList(PkgsList): # Workaround for bug 3565. Simply look to see if we # know of a package with that name, if not try again! - filename = os.path.join(self.d.getVar('PKGDATA_DIR', True), + filename = os.path.join(self.d.getVar('PKGDATA_DIR'), 'runtime-reverse', new_pkg) if os.path.exists(filename): @@ -464,7 +464,7 @@ class OpkgPkgsList(PkgsList): self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir) - self.opkg_args += self.d.getVar("OPKG_ARGS", True) + self.opkg_args += self.d.getVar("OPKG_ARGS") def list_pkgs(self, format=None): cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args) @@ -512,9 +512,9 @@ class PackageManager(object, metaclass=ABCMeta): self.d = d self.deploy_dir = None self.deploy_lock = None - self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or "" - self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS', True) or "" - self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS', True) + self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS') or "" + self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS') or "" + self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS') """ Update the package manager package database. @@ -568,7 +568,7 @@ class PackageManager(object, metaclass=ABCMeta): def install_complementary(self, globs=None): # we need to write the list of installed packages to a file because the # oe-pkgdata-util reads it from a file - installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True), + installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR'), "installed_pkgs.txt") with open(installed_pkgs_file, "w+") as installed_pkgs: pkgs = self.list_installed() @@ -576,10 +576,10 @@ class PackageManager(object, metaclass=ABCMeta): installed_pkgs.write(output) if globs is None: - globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True) + globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') split_linguas = set() - for translation in self.d.getVar('IMAGE_LINGUAS', True).split(): + for translation in self.d.getVar('IMAGE_LINGUAS').split(): split_linguas.add(translation) split_linguas.add(translation.split('-')[0]) @@ -592,9 +592,9 @@ class PackageManager(object, metaclass=ABCMeta): return cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), - "-p", self.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, + "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs_file, globs] - exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY', True) + exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') if exclude: cmd.extend(['--exclude=' + '|'.join(exclude.split())]) try: @@ -659,7 +659,7 @@ class RpmPM(PackageManager): self.task_name = task_name self.providename = providename self.fullpkglist = list() - self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True) + self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM') self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm") self.install_dir_name = "oe_install" self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name) @@ -669,7 +669,7 @@ class RpmPM(PackageManager): # 1 = --log-level=info (includes information about executing scriptlets and their output) # 2 = --log-level=debug # 3 = --log-level=debug plus dumps of scriplet content and command invocation - self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG', True) or "0") + self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG') or "0") self.smart_opt = ["--log-level=%s" % ("warning" if self.debug_level == 0 else "info" if self.debug_level == 1 else @@ -684,7 +684,7 @@ class RpmPM(PackageManager): if not os.path.exists(self.d.expand('${T}/saved')): bb.utils.mkdirhier(self.d.expand('${T}/saved')) - packageindex_dir = os.path.join(self.d.getVar('WORKDIR', True), 'rpms') + packageindex_dir = os.path.join(self.d.getVar('WORKDIR'), 'rpms') self.indexer = RpmIndexer(self.d, packageindex_dir) self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var) @@ -702,7 +702,7 @@ class RpmPM(PackageManager): # List must be prefered to least preferred order default_platform_extra = list() platform_extra = list() - bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" + bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or "" for mlib in self.ml_os_list: for arch in self.ml_prefix_list[mlib]: plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] @@ -750,7 +750,7 @@ class RpmPM(PackageManager): Create configs for rpm and smart, and multilib is supported ''' def create_configs(self): - target_arch = self.d.getVar('TARGET_ARCH', True) + target_arch = self.d.getVar('TARGET_ARCH') platform = '%s%s-%s' % (target_arch.replace('-', '_'), self.target_vendor, self.ml_os_list['default']) @@ -758,7 +758,7 @@ class RpmPM(PackageManager): # List must be prefered to least preferred order default_platform_extra = list() platform_extra = list() - bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" + bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or "" for mlib in self.ml_os_list: for arch in self.ml_prefix_list[mlib]: plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] @@ -841,7 +841,7 @@ class RpmPM(PackageManager): if not new_pkg: # Failed to translate, package not found! err_msg = '%s not found in the %s feeds (%s) in %s.' % \ - (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) + (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM')) if not attempt_only: bb.error(err_msg) bb.fatal("This is often caused by an empty package declared " \ @@ -860,7 +860,7 @@ class RpmPM(PackageManager): new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs) if not new_pkg: err_msg = '%s not found in the feeds (%s) in %s.' % \ - (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) + (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM')) if not attempt_only: bb.error(err_msg) bb.fatal("This is often caused by an empty package declared " \ @@ -887,7 +887,7 @@ class RpmPM(PackageManager): channel_priority = 5 platform_dir = os.path.join(self.etcrpm_dir, "platform") - sdkos = self.d.getVar("SDK_OS", True) + sdkos = self.d.getVar("SDK_OS") with open(platform_dir, "w+") as platform_fd: platform_fd.write(platform + '\n') for pt in platform_extra: @@ -957,8 +957,8 @@ class RpmPM(PackageManager): bb.fatal("Create rpm database failed. Command '%s' " "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) # Import GPG key to RPM database of the target system - if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1': - pubkey_path = self.d.getVar('RPM_GPG_PUBKEY', True) + if self.d.getVar('RPM_SIGN_PACKAGES') == '1': + pubkey_path = self.d.getVar('RPM_GPG_PUBKEY') cmd = [self.rpm_cmd, '--root', self.target_rootfs, '--dbpath', '/var/lib/rpm', '--import', pubkey_path] try: subprocess.check_output(cmd, stderr=subprocess.STDOUT) @@ -974,10 +974,10 @@ class RpmPM(PackageManager): self._invoke_smart(['config', '--set', 'rpm-root=%s' % self.target_rootfs]) self._invoke_smart(['config', '--set', 'rpm-dbpath=/var/lib/rpm']) self._invoke_smart(['config', '--set', 'rpm-extra-macros._var=%s' % - self.d.getVar('localstatedir', True)]) + self.d.getVar('localstatedir')]) cmd = ["config", "--set", "rpm-extra-macros._tmppath=/%s/tmp" % self.install_dir_name] - prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True) + prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH') if prefer_color: if prefer_color not in ['0', '1', '2', '4']: bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n" @@ -985,7 +985,7 @@ class RpmPM(PackageManager): "\t2: ELF64 wins\n" "\t4: ELF64 N32 wins (mips64 or mips64el only)" % prefer_color) - if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \ + if prefer_color == "4" and self.d.getVar("TUNE_ARCH") not in \ ['mips64', 'mips64el']: bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el " "only.") @@ -998,17 +998,17 @@ class RpmPM(PackageManager): # Write common configuration for host and target usage self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) - check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True) + check_signature = self.d.getVar('RPM_CHECK_SIGNATURES') if check_signature and check_signature.strip() == "0": self._invoke_smart(['config', '--set rpm-check-signatures=false']) - for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): + for i in self.d.getVar('BAD_RECOMMENDATIONS').split(): self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) # Do the following configurations here, to avoid them being # saved for field upgrade - if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1": + if self.d.getVar('NO_RECOMMENDATIONS').strip() == "1": self._invoke_smart(['config', '--set', 'ignore-all-recommends=1']) - pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" + pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" for i in pkg_exclude.split(): self._invoke_smart(['flag', '--set', 'exclude-packages', i]) @@ -1019,13 +1019,13 @@ class RpmPM(PackageManager): ch_already_added = [] for canonical_arch in platform_extra: arch = canonical_arch.split('-')[0] - arch_channel = os.path.join(self.d.getVar('WORKDIR', True), 'rpms', arch) + arch_channel = os.path.join(self.d.getVar('WORKDIR'), 'rpms', arch) oe.path.remove(arch_channel) deploy_arch_dir = os.path.join(self.deploy_dir, arch) if not os.path.exists(deploy_arch_dir): continue - lockfilename = self.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" + lockfilename = self.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock" lf = bb.utils.lockfile(lockfilename, False) oe.path.copyhardlinktree(deploy_arch_dir, arch_channel) bb.utils.unlockfile(lf) @@ -1096,7 +1096,7 @@ class RpmPM(PackageManager): "fi\n" intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts') - native_root = self.d.getVar('STAGING_DIR_NATIVE', True) + native_root = self.d.getVar('STAGING_DIR_NATIVE') scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'], self.target_rootfs, intercept_dir, @@ -1170,7 +1170,7 @@ class RpmPM(PackageManager): ml_pkgs = [] non_ml_pkgs = pkgs[:] for pkg in pkgs: - for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): + for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split(): if pkg.startswith(mlib + '-'): ml_pkgs.append(pkg) non_ml_pkgs.remove(pkg) @@ -1184,7 +1184,7 @@ class RpmPM(PackageManager): # correctly. pkgs_new = [] for pkg in non_ml_pkgs: - for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): + for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split(): mlib_pkg = mlib + "-" + pkg if mlib_pkg in ml_pkgs: pkgs_new.append(pkg) @@ -1401,7 +1401,7 @@ class RpmPM(PackageManager): self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) - for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): + for i in self.d.getVar('BAD_RECOMMENDATIONS').split(): self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) self._invoke_smart(['channel', '--add', 'rpmsys', 'type=rpm-sys', '-y']) @@ -1575,13 +1575,13 @@ class OpkgPM(OpkgDpkgPM): self.pkg_archs = archs self.task_name = task_name - self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True) + self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK") self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock") self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs) - self.opkg_args += self.d.getVar("OPKG_ARGS", True) + self.opkg_args += self.d.getVar("OPKG_ARGS") - opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True) + opkg_lib_dir = self.d.getVar('OPKGLIBDIR') if opkg_lib_dir[0] == "/": opkg_lib_dir = opkg_lib_dir[1:] @@ -1593,7 +1593,7 @@ class OpkgPM(OpkgDpkgPM): if not os.path.exists(self.d.expand('${T}/saved')): bb.utils.mkdirhier(self.d.expand('${T}/saved')) - self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") == "1" + self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") == "1" if self.from_feeds: self._create_custom_config() else: @@ -1638,7 +1638,7 @@ class OpkgPM(OpkgDpkgPM): config_file.write("arch %s %d\n" % (arch, priority)) priority += 5 - for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split(): + for line in (self.d.getVar('IPK_FEED_URIS') or "").split(): feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line) if feed_match is not None: @@ -1655,17 +1655,17 @@ class OpkgPM(OpkgDpkgPM): specified as compatible for the current machine. NOTE: Development-helper feature, NOT a full-fledged feed. """ - if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "": + if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": for arch in self.pkg_archs.split(): cfg_file_name = os.path.join(self.target_rootfs, - self.d.getVar("sysconfdir", True), + self.d.getVar("sysconfdir"), "opkg", "local-%s-feed.conf" % arch) with open(cfg_file_name, "w+") as cfg_file: cfg_file.write("src/gz local-%s %s/%s" % (arch, - self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True), + self.d.getVar('FEED_DEPLOYDIR_BASE_URI'), arch)) if self.opkg_dir != '/var/lib/opkg': @@ -1674,8 +1674,8 @@ class OpkgPM(OpkgDpkgPM): # the default value of "/var/lib" as defined in opkg: # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" - cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) - cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) + cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) + cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) def _create_config(self): @@ -1699,8 +1699,8 @@ class OpkgPM(OpkgDpkgPM): # the default value of "/var/lib" as defined in opkg: # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" - config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) - config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) + config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) + config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) def insert_feeds_uris(self): if self.feed_uris == "": @@ -1755,9 +1755,9 @@ class OpkgPM(OpkgDpkgPM): os.environ['OFFLINE_ROOT'] = self.target_rootfs os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs - os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), + os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") - os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') try: bb.note("Installing the following packages: %s" % ' '.join(pkgs)) @@ -1808,7 +1808,7 @@ class OpkgPM(OpkgDpkgPM): return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs() def handle_bad_recommendations(self): - bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or "" + bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS") or "" if bad_recommendations.strip() == "": return @@ -1859,7 +1859,7 @@ class OpkgPM(OpkgDpkgPM): bb.utils.mkdirhier(temp_opkg_dir) opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs) - opkg_args += self.d.getVar("OPKG_ARGS", True) + opkg_args += self.d.getVar("OPKG_ARGS") cmd = "%s %s update" % (self.opkg_cmd, opkg_args) try: @@ -1935,7 +1935,7 @@ class DpkgPM(OpkgDpkgPM): def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None): super(DpkgPM, self).__init__(d) self.target_rootfs = target_rootfs - self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True) + self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB') if apt_conf_dir is None: self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") else: @@ -1944,10 +1944,10 @@ class DpkgPM(OpkgDpkgPM): self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") - self.apt_args = d.getVar("APT_ARGS", True) + self.apt_args = d.getVar("APT_ARGS") self.all_arch_list = archs.split() - all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() + all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) self._create_configs(archs, base_archs) @@ -2008,9 +2008,9 @@ class DpkgPM(OpkgDpkgPM): os.environ['OFFLINE_ROOT'] = self.target_rootfs os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs - os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), + os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") - os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') failed_pkgs = [] for pkg_name in installed_pkgs: @@ -2161,7 +2161,7 @@ class DpkgPM(OpkgDpkgPM): priority += 5 - pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" + pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" for pkg in pkg_exclude.split(): prefs_file.write( "Package: %s\n" @@ -2176,14 +2176,14 @@ class DpkgPM(OpkgDpkgPM): os.path.join(self.deploy_dir, arch)) base_arch_list = base_archs.split() - multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True); + multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); for variant in multilib_variants.split(): localdata = bb.data.createCopy(self.d) variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) - orig_arch = localdata.getVar("DPKG_ARCH", True) + orig_arch = localdata.getVar("DPKG_ARCH") localdata.setVar("DEFAULTTUNE", variant_tune) bb.data.update_data(localdata) - variant_arch = localdata.getVar("DPKG_ARCH", True) + variant_arch = localdata.getVar("DPKG_ARCH") if variant_arch not in base_arch_list: base_arch_list.append(variant_arch) @@ -2214,7 +2214,7 @@ class DpkgPM(OpkgDpkgPM): def remove_packaging_data(self): bb.utils.remove(os.path.join(self.target_rootfs, - self.d.getVar('opkglibdir', True)), True) + self.d.getVar('opkglibdir')), True) bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) def fix_broken_dependencies(self): @@ -2262,12 +2262,12 @@ class DpkgPM(OpkgDpkgPM): return tmp_dir def generate_index_files(d): - classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split() + classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split() indexer_map = { - "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)), - "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)), - "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True)) + "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM')), + "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')), + "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB')) } result = None diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index 21d4de914f..32e5c82a94 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py @@ -57,7 +57,7 @@ def read_subpkgdata_dict(pkg, d): def _pkgmap(d): """Return a dictionary mapping package to recipe name.""" - pkgdatadir = d.getVar("PKGDATA_DIR", True) + pkgdatadir = d.getVar("PKGDATA_DIR") pkgmap = {} try: diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py index 97819279b7..d68e5d322b 100644 --- a/meta/lib/oe/packagegroup.py +++ b/meta/lib/oe/packagegroup.py @@ -1,7 +1,7 @@ import itertools def is_optional(feature, d): - packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) + packages = d.getVar("FEATURE_PACKAGES_%s" % feature) if packages: return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True)) else: @@ -9,9 +9,9 @@ def is_optional(feature, d): def packages(features, d): for feature in features: - packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) + packages = d.getVar("FEATURE_PACKAGES_%s" % feature) if not packages: - packages = d.getVar("PACKAGE_GROUP_%s" % feature, True) + packages = d.getVar("PACKAGE_GROUP_%s" % feature) for pkg in (packages or "").split(): yield pkg diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 456ee70f7d..95674b3706 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py @@ -281,8 +281,8 @@ class GitApplyTree(PatchTree): def __init__(self, dir, d): PatchTree.__init__(self, dir, d) - self.commituser = d.getVar('PATCH_GIT_USER_NAME', True) - self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) + self.commituser = d.getVar('PATCH_GIT_USER_NAME') + self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') @staticmethod def extractPatchHeader(patchfile): @@ -371,8 +371,8 @@ class GitApplyTree(PatchTree): @staticmethod def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None): if d: - commituser = d.getVar('PATCH_GIT_USER_NAME', True) - commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) + commituser = d.getVar('PATCH_GIT_USER_NAME') + commitemail = d.getVar('PATCH_GIT_USER_EMAIL') if commituser: cmd += ['-c', 'user.name="%s"' % commituser] if commitemail: @@ -551,7 +551,7 @@ class GitApplyTree(PatchTree): class QuiltTree(PatchSet): def _runcmd(self, args, run = True): - quiltrc = self.d.getVar('QUILTRCFILE', True) + quiltrc = self.d.getVar('QUILTRCFILE') if not run: return ["quilt"] + ["--quiltrc"] + [quiltrc] + args runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) @@ -727,7 +727,7 @@ class UserResolver(Resolver): # Patch application failed patchcmd = self.patchset.Push(True, False, False) - t = self.patchset.d.getVar('T', True) + t = self.patchset.d.getVar('T') if not t: bb.msg.fatal("Build", "T not set") bb.utils.mkdirhier(t) @@ -792,7 +792,7 @@ def patch_path(url, fetch, workdir, expand=True): return local def src_patches(d, all=False, expand=True): - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') fetch = bb.fetch2.Fetch([], d) patches = [] sources = [] @@ -839,13 +839,13 @@ def src_patches(d, all=False, expand=True): def should_apply(parm, d): if "mindate" in parm or "maxdate" in parm: - pn = d.getVar('PN', True) - srcdate = d.getVar('SRCDATE_%s' % pn, True) + pn = d.getVar('PN') + srcdate = d.getVar('SRCDATE_%s' % pn) if not srcdate: - srcdate = d.getVar('SRCDATE', True) + srcdate = d.getVar('SRCDATE') if srcdate == "now": - srcdate = d.getVar('DATE', True) + srcdate = d.getVar('DATE') if "maxdate" in parm and parm["maxdate"] < srcdate: return False, 'is outdated' @@ -855,22 +855,22 @@ def should_apply(parm, d): if "minrev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and srcrev < parm["minrev"]: return False, 'applies to later revisions' if "maxrev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and srcrev > parm["maxrev"]: return False, 'applies to earlier revisions' if "rev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and parm["rev"] not in srcrev: return False, "doesn't apply to revision" if "notrev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and parm["notrev"] in srcrev: return False, "doesn't apply to revision" diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py index f73fd4ac07..804ecd5fea 100644 --- a/meta/lib/oe/path.py +++ b/meta/lib/oe/path.py @@ -52,7 +52,7 @@ def make_relative_symlink(path): def format_display(path, metadata): """ Prepare a path for display to the user. """ - rel = relative(metadata.getVar("TOPDIR", True), path) + rel = relative(metadata.getVar("TOPDIR"), path) if len(rel) > len(path): return path else: diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py index 0054f954cc..32dfc15e88 100644 --- a/meta/lib/oe/prservice.py +++ b/meta/lib/oe/prservice.py @@ -1,7 +1,7 @@ def prserv_make_conn(d, check = False): import prserv.serv - host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) + host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) try: conn = None conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) @@ -15,11 +15,11 @@ def prserv_make_conn(d, check = False): return conn def prserv_dump_db(d): - if not d.getVar('PRSERV_HOST', True): + if not d.getVar('PRSERV_HOST'): bb.error("Not using network based PR service") return None - conn = d.getVar("__PRSERV_CONN", True) + conn = d.getVar("__PRSERV_CONN") if conn is None: conn = prserv_make_conn(d) if conn is None: @@ -27,18 +27,18 @@ def prserv_dump_db(d): return None #dump db - opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True) - opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True) - opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True) - opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True)) + opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') + opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') + opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') + opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): - if not d.getVar('PRSERV_HOST', True): + if not d.getVar('PRSERV_HOST'): bb.error("Not using network based PR service") return None - conn = d.getVar("__PRSERV_CONN", True) + conn = d.getVar("__PRSERV_CONN") if conn is None: conn = prserv_make_conn(d) if conn is None: @@ -58,7 +58,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu (filter_checksum and filter_checksum != checksum): continue try: - value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True)) + value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum)) except BaseException as exc: bb.debug("Not valid value of %s:%s" % (v,str(exc))) continue @@ -72,8 +72,8 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): import bb.utils #initilize the output file - bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR', True)) - df = d.getVar('PRSERV_DUMPFILE', True) + bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) + df = d.getVar('PRSERV_DUMPFILE') #write data lf = bb.utils.lockfile("%s.lock" % df) f = open(df, "a") @@ -114,7 +114,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): bb.utils.unlockfile(lf) def prserv_check_avail(d): - host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) + host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) try: if len(host_params) != 2: raise TypeError diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py index 22d76dcbcd..3231e60cea 100644 --- a/meta/lib/oe/qa.py +++ b/meta/lib/oe/qa.py @@ -129,11 +129,11 @@ class ELFFile: if cmd in self.objdump_output: return self.objdump_output[cmd] - objdump = d.getVar('OBJDUMP', True) + objdump = d.getVar('OBJDUMP') env = os.environ.copy() env["LC_ALL"] = "C" - env["PATH"] = d.getVar('PATH', True) + env["PATH"] = d.getVar('PATH') try: bb.note("%s %s %s" % (objdump, cmd, self.name)) diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py index 26c926f214..a7fdd36e40 100644 --- a/meta/lib/oe/recipeutils.py +++ b/meta/lib/oe/recipeutils.py @@ -328,16 +328,16 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): # FIXME need a warning if the unexpanded SRC_URI value contains variable references - uris = (d.getVar('SRC_URI', True) or "").split() + uris = (d.getVar('SRC_URI') or "").split() fetch = bb.fetch2.Fetch(uris, d) if download: fetch.download() # Copy local files to target directory and gather any remote files - bb_dir = os.path.dirname(d.getVar('FILE', True)) + os.sep + bb_dir = os.path.dirname(d.getVar('FILE')) + os.sep remotes = [] copied = [] - includes = [path for path in d.getVar('BBINCLUDED', True).split() if + includes = [path for path in d.getVar('BBINCLUDED').split() if path.startswith(bb_dir) and os.path.exists(path)] for path in fetch.localpaths() + includes: # Only import files that are under the meta directory @@ -361,7 +361,7 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): def get_recipe_local_files(d, patches=False, archives=False): """Get a list of local files in SRC_URI within a recipe.""" import oe.patch - uris = (d.getVar('SRC_URI', True) or "").split() + uris = (d.getVar('SRC_URI') or "").split() fetch = bb.fetch2.Fetch(uris, d) # FIXME this list should be factored out somewhere else (such as the # fetcher) though note that this only encompasses actual container formats @@ -421,7 +421,7 @@ def get_recipe_patched_files(d): for patch in patches: _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch) striplevel = int(parm['striplevel']) - patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S', True), parm.get('patchdir', ''))) + patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S'), parm.get('patchdir', ''))) return patchedfiles @@ -459,9 +459,9 @@ def get_bbfile_path(d, destdir, extrapathhint=None): confdata.setVar('LAYERDIR', destlayerdir) destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) - pn = d.getVar('PN', True) + pn = d.getVar('PN') - bbfilespecs = (confdata.getVar('BBFILES', True) or '').split() + bbfilespecs = (confdata.getVar('BBFILES') or '').split() if destdir == destlayerdir: for bbfilespec in bbfilespecs: if not bbfilespec.endswith('.bbappend'): @@ -474,8 +474,8 @@ def get_bbfile_path(d, destdir, extrapathhint=None): # Try to make up a path that matches BBFILES # this is a little crude, but better than nothing - bpn = d.getVar('BPN', True) - recipefn = os.path.basename(d.getVar('FILE', True)) + bpn = d.getVar('BPN') + recipefn = os.path.basename(d.getVar('FILE')) pathoptions = [destdir] if extrapathhint: pathoptions.append(os.path.join(destdir, extrapathhint)) @@ -499,7 +499,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): import bb.cookerdata destlayerdir = os.path.abspath(destlayerdir) - recipefile = d.getVar('FILE', True) + recipefile = d.getVar('FILE') recipefn = os.path.splitext(os.path.basename(recipefile))[0] if wildcardver and '_' in recipefn: recipefn = recipefn.split('_', 1)[0] + '_%' @@ -519,7 +519,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn) closepath = '' pathok = True - for bbfilespec in confdata.getVar('BBFILES', True).split(): + for bbfilespec in confdata.getVar('BBFILES').split(): if fnmatch.fnmatchcase(appendpath, bbfilespec): # Our append path works, we're done break @@ -592,7 +592,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, # FIXME check if the bbappend doesn't get overridden by a higher priority layer? - layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] + layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] if not os.path.abspath(destlayerdir) in layerdirs: bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') @@ -628,7 +628,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, else: bbappendlines.append((varname, op, value)) - destsubdir = rd.getVar('PN', True) + destsubdir = rd.getVar('PN') if srcfiles: bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) @@ -647,7 +647,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, srcurientry = 'file://%s' % srcfile # Double-check it's not there already # FIXME do we care if the entry is added by another bbappend that might go away? - if not srcurientry in rd.getVar('SRC_URI', True).split(): + if not srcurientry in rd.getVar('SRC_URI').split(): if machine: appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) else: @@ -796,7 +796,7 @@ def replace_dir_vars(path, d): # Sort by length so we get the variables we're interested in first for var in sorted(list(d.keys()), key=len): if var.endswith('dir') and var.lower() == var: - value = d.getVar(var, True) + value = d.getVar(var) if value.startswith('/') and not '\n' in value and value not in dirvars: dirvars[value] = var for dirpath in sorted(list(dirvars.keys()), reverse=True): @@ -850,12 +850,12 @@ def get_recipe_upstream_version(rd): ru['type'] = 'U' ru['datetime'] = '' - pv = rd.getVar('PV', True) + pv = rd.getVar('PV') # XXX: If don't have SRC_URI means that don't have upstream sources so # returns the current recipe version, so that upstream version check # declares a match. - src_uris = rd.getVar('SRC_URI', True) + src_uris = rd.getVar('SRC_URI') if not src_uris: ru['version'] = pv ru['type'] = 'M' @@ -866,13 +866,13 @@ def get_recipe_upstream_version(rd): src_uri = src_uris.split()[0] uri_type, _, _, _, _, _ = decodeurl(src_uri) - manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True) + manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") if manual_upstream_version: # manual tracking of upstream version. ru['version'] = manual_upstream_version ru['type'] = 'M' - manual_upstream_date = rd.getVar("CHECK_DATE", True) + manual_upstream_date = rd.getVar("CHECK_DATE") if manual_upstream_date: date = datetime.strptime(manual_upstream_date, "%b %d, %Y") else: diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index 74fc3bd256..d9a473006a 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py @@ -18,8 +18,8 @@ class Rootfs(object, metaclass=ABCMeta): def __init__(self, d, progress_reporter=None, logcatcher=None): self.d = d self.pm = None - self.image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) - self.deploydir = self.d.getVar('IMGDEPLOYDIR', True) + self.image_rootfs = self.d.getVar('IMAGE_ROOTFS') + self.deploydir = self.d.getVar('IMGDEPLOYDIR') self.progress_reporter = progress_reporter self.logcatcher = logcatcher @@ -72,7 +72,7 @@ class Rootfs(object, metaclass=ABCMeta): else: msg = '%d %s messages' % (len(messages), type) msg = '[log_check] %s: found %s in the logfile:\n%s' % \ - (self.d.getVar('PN', True), msg, ''.join(messages)) + (self.d.getVar('PN'), msg, ''.join(messages)) if type == 'error': bb.fatal(msg) else: @@ -103,7 +103,7 @@ class Rootfs(object, metaclass=ABCMeta): pass def _setup_dbg_rootfs(self, dirs): - gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS', True) or '0' + gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' if gen_debugfs != '1': return @@ -156,7 +156,7 @@ class Rootfs(object, metaclass=ABCMeta): os.rename(self.image_rootfs + '-orig', self.image_rootfs) def _exec_shell_cmd(self, cmd): - fakerootcmd = self.d.getVar('FAKEROOT', True) + fakerootcmd = self.d.getVar('FAKEROOT') if fakerootcmd is not None: exec_cmd = [fakerootcmd, cmd] else: @@ -171,14 +171,14 @@ class Rootfs(object, metaclass=ABCMeta): def create(self): bb.note("###### Generate rootfs #######") - pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND", True) - post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND", True) - rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND', True) + pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND") + post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") + rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') - postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR", True) + postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR") if not postinst_intercepts_dir: postinst_intercepts_dir = self.d.expand("${COREBASE}/scripts/postinst-intercepts") - intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), + intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") bb.utils.remove(intercepts_dir, True) @@ -201,10 +201,10 @@ class Rootfs(object, metaclass=ABCMeta): # call the package manager dependent create method self._create() - sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir', True) + sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir') bb.utils.mkdirhier(sysconfdir) with open(sysconfdir + "/version", "w+") as ver: - ver.write(self.d.getVar('BUILDNAME', True) + "\n") + ver.write(self.d.getVar('BUILDNAME') + "\n") execute_pre_post_process(self.d, rootfs_post_install_cmds) @@ -223,7 +223,7 @@ class Rootfs(object, metaclass=ABCMeta): "offline and rootfs is read-only: %s" % delayed_postinsts) - if self.d.getVar('USE_DEVFS', True) != "1": + if self.d.getVar('USE_DEVFS') != "1": self._create_devfs() self._uninstall_unneeded() @@ -235,7 +235,7 @@ class Rootfs(object, metaclass=ABCMeta): self._run_ldconfig() - if self.d.getVar('USE_DEPMOD', True) != "0": + if self.d.getVar('USE_DEPMOD') != "0": self._generate_kernel_module_deps() self._cleanup() @@ -251,16 +251,16 @@ class Rootfs(object, metaclass=ABCMeta): if delayed_postinsts is None: if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): self._exec_shell_cmd(["update-rc.d", "-f", "-r", - self.d.getVar('IMAGE_ROOTFS', True), + self.d.getVar('IMAGE_ROOTFS'), "run-postinsts", "remove"]) image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", True, False, self.d) - image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE', True) + image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') if image_rorfs or image_rorfs_force == "1": # Remove components that we don't need if it's a read-only rootfs - unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED", True).split() + unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED").split() pkgs_installed = image_list_installed_packages(self.d) pkgs_to_remove = [pkg for pkg in pkgs_installed if pkg in unneeded_pkgs] @@ -273,7 +273,7 @@ class Rootfs(object, metaclass=ABCMeta): bb.warn("There are post install scripts " "in a read-only rootfs") - post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND", True) + post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND") execute_pre_post_process(self.d, post_uninstall_cmds) runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management", @@ -283,12 +283,12 @@ class Rootfs(object, metaclass=ABCMeta): self.pm.remove_packaging_data() def _run_intercepts(self): - intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), + intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") bb.note("Running intercept scripts:") os.environ['D'] = self.image_rootfs - os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE') for script in os.listdir(intercepts_dir): script_full = os.path.join(intercepts_dir, script) @@ -320,7 +320,7 @@ class Rootfs(object, metaclass=ABCMeta): self._handle_intercept_failure(registered_pkgs) def _run_ldconfig(self): - if self.d.getVar('LDCONFIGDEPEND', True): + if self.d.getVar('LDCONFIGDEPEND'): bb.note("Executing: ldconfig -r" + self.image_rootfs + "-c new -v") self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', 'new', '-v']) @@ -340,7 +340,7 @@ class Rootfs(object, metaclass=ABCMeta): bb.note("No Kernel Modules found, not running depmod") return - kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR', True), "kernel-depmod", + kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod", 'kernel-abiversion') if not os.path.exists(kernel_abi_ver_file): bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) @@ -362,15 +362,15 @@ class Rootfs(object, metaclass=ABCMeta): """ def _create_devfs(self): devtable_list = [] - devtable = self.d.getVar('IMAGE_DEVICE_TABLE', True) + devtable = self.d.getVar('IMAGE_DEVICE_TABLE') if devtable is not None: devtable_list.append(devtable) else: - devtables = self.d.getVar('IMAGE_DEVICE_TABLES', True) + devtables = self.d.getVar('IMAGE_DEVICE_TABLES') if devtables is None: devtables = 'files/device_table-minimal.txt' for devtable in devtables.split(): - devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH', True), devtable)) + devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH'), devtable)) for devtable in devtable_list: self._exec_shell_cmd(["makedevs", "-r", @@ -386,16 +386,16 @@ class RpmRootfs(Rootfs): self.manifest = RpmManifest(d, manifest_dir) self.pm = RpmPM(d, - d.getVar('IMAGE_ROOTFS', True), - self.d.getVar('TARGET_VENDOR', True) + d.getVar('IMAGE_ROOTFS'), + self.d.getVar('TARGET_VENDOR') ) - self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN', True) + self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN') if self.inc_rpm_image_gen != "1": bb.utils.remove(self.image_rootfs, True) else: self.pm.recovery_packaging_data() - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) self.pm.create_configs() @@ -429,8 +429,8 @@ class RpmRootfs(Rootfs): def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS', True) - rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS', True) + rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS') + rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS') # update PM index files self.pm.write_index() @@ -601,7 +601,7 @@ class DpkgOpkgRootfs(Rootfs): pkg_list = [] pkgs = None - if not self.d.getVar('PACKAGE_INSTALL', True).strip(): + if not self.d.getVar('PACKAGE_INSTALL').strip(): bb.note("Building empty image") else: pkgs = self._get_pkgs_postinsts(status_file) @@ -637,17 +637,17 @@ class DpkgRootfs(DpkgOpkgRootfs): ] bb.utils.remove(self.image_rootfs, True) - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) self.manifest = DpkgManifest(d, manifest_dir) - self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS', True), - d.getVar('PACKAGE_ARCHS', True), - d.getVar('DPKG_ARCH', True)) + self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS'), + d.getVar('PACKAGE_ARCHS'), + d.getVar('DPKG_ARCH')) def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS', True) - deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS', True) + deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS') + deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS') alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives") bb.utils.mkdirhier(alt_dir) @@ -725,10 +725,10 @@ class OpkgRootfs(DpkgOpkgRootfs): self.log_check_regex = '(exit 1|Collected errors)' self.manifest = OpkgManifest(d, manifest_dir) - self.opkg_conf = self.d.getVar("IPKGCONF_TARGET", True) - self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True) + self.opkg_conf = self.d.getVar("IPKGCONF_TARGET") + self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS") - self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN', True) or "" + self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN') or "" if self._remove_old_rootfs(): bb.utils.remove(self.image_rootfs, True) self.pm = OpkgPM(d, @@ -742,7 +742,7 @@ class OpkgRootfs(DpkgOpkgRootfs): self.pkg_archs) self.pm.recover_packaging_data() - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) def _prelink_file(self, root_dir, filename): bb.note('prelink %s in %s' % (filename, root_dir)) @@ -797,7 +797,7 @@ class OpkgRootfs(DpkgOpkgRootfs): """ def _multilib_sanity_test(self, dirs): - allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP", True) + allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP") if allow_replace is None: allow_replace = "" @@ -829,12 +829,12 @@ class OpkgRootfs(DpkgOpkgRootfs): files[key] = item def _multilib_test_install(self, pkgs): - ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS", True) + ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS") bb.utils.mkdirhier(ml_temp) dirs = [self.image_rootfs] - for variant in self.d.getVar("MULTILIB_VARIANTS", True).split(): + for variant in self.d.getVar("MULTILIB_VARIANTS").split(): ml_target_rootfs = os.path.join(ml_temp, variant) bb.utils.remove(ml_target_rootfs, True) @@ -894,9 +894,9 @@ class OpkgRootfs(DpkgOpkgRootfs): old_vars_list = open(vars_list_file, 'r+').read() new_vars_list = '%s:%s:%s\n' % \ - ((self.d.getVar('BAD_RECOMMENDATIONS', True) or '').strip(), - (self.d.getVar('NO_RECOMMENDATIONS', True) or '').strip(), - (self.d.getVar('PACKAGE_EXCLUDE', True) or '').strip()) + ((self.d.getVar('BAD_RECOMMENDATIONS') or '').strip(), + (self.d.getVar('NO_RECOMMENDATIONS') or '').strip(), + (self.d.getVar('PACKAGE_EXCLUDE') or '').strip()) open(vars_list_file, 'w+').write(new_vars_list) if old_vars_list != new_vars_list: @@ -906,11 +906,11 @@ class OpkgRootfs(DpkgOpkgRootfs): def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS', True) - opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS', True) + opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS') + opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS') # update PM index files, unless users provide their own feeds - if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": + if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": self.pm.write_index() execute_pre_post_process(self.d, opkg_pre_process_cmds) @@ -968,7 +968,7 @@ class OpkgRootfs(DpkgOpkgRootfs): def _get_delayed_postinsts(self): status_file = os.path.join(self.image_rootfs, - self.d.getVar('OPKGLIBDIR', True).strip('/'), + self.d.getVar('OPKGLIBDIR').strip('/'), "opkg", "status") return self._get_delayed_postinsts_common(status_file) @@ -993,14 +993,14 @@ def get_class_for_type(imgtype): "deb": DpkgRootfs}[imgtype] def variable_depends(d, manifest_dir=None): - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') cls = get_class_for_type(img_type) return cls._depends_list() def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None): env_bkp = os.environ.copy() - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": RpmRootfs(d, manifest_dir, progress_reporter, logcatcher).create() elif img_type == "ipk": @@ -1014,13 +1014,13 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None) def image_list_installed_packages(d, rootfs_dir=None): if not rootfs_dir: - rootfs_dir = d.getVar('IMAGE_ROOTFS', True) + rootfs_dir = d.getVar('IMAGE_ROOTFS') - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": return RpmPkgsList(d, rootfs_dir).list_pkgs() elif img_type == "ipk": - return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET", True)).list_pkgs() + return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET")).list_pkgs() elif img_type == "deb": return DpkgPkgsList(d, rootfs_dir).list_pkgs() diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py index c74525f929..fef02d0777 100644 --- a/meta/lib/oe/sdk.py +++ b/meta/lib/oe/sdk.py @@ -11,16 +11,16 @@ import traceback class Sdk(object, metaclass=ABCMeta): def __init__(self, d, manifest_dir): self.d = d - self.sdk_output = self.d.getVar('SDK_OUTPUT', True) - self.sdk_native_path = self.d.getVar('SDKPATHNATIVE', True).strip('/') - self.target_path = self.d.getVar('SDKTARGETSYSROOT', True).strip('/') - self.sysconfdir = self.d.getVar('sysconfdir', True).strip('/') + self.sdk_output = self.d.getVar('SDK_OUTPUT') + self.sdk_native_path = self.d.getVar('SDKPATHNATIVE').strip('/') + self.target_path = self.d.getVar('SDKTARGETSYSROOT').strip('/') + self.sysconfdir = self.d.getVar('sysconfdir').strip('/') self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path) self.sdk_host_sysroot = self.sdk_output if manifest_dir is None: - self.manifest_dir = self.d.getVar("SDK_DIR", True) + self.manifest_dir = self.d.getVar("SDK_DIR") else: self.manifest_dir = manifest_dir @@ -40,12 +40,12 @@ class Sdk(object, metaclass=ABCMeta): # Don't ship any libGL in the SDK self.remove(os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('libdir_nativesdk', True).strip('/'), + self.d.getVar('libdir_nativesdk').strip('/'), "libGL*")) # Fix or remove broken .la files self.remove(os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('libdir_nativesdk', True).strip('/'), + self.d.getVar('libdir_nativesdk').strip('/'), "*.la")) # Link the ld.so.cache file into the hosts filesystem @@ -54,7 +54,7 @@ class Sdk(object, metaclass=ABCMeta): self.mkdirhier(os.path.dirname(link_name)) os.symlink("/etc/ld.so.cache", link_name) - execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND', True)) + execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND')) def movefile(self, sourcefile, destdir): try: @@ -102,7 +102,7 @@ class RpmSdk(Sdk): self.target_pm = RpmPM(d, self.sdk_target_sysroot, - self.d.getVar('TARGET_VENDOR', True), + self.d.getVar('TARGET_VENDOR'), 'target', target_providename ) @@ -118,7 +118,7 @@ class RpmSdk(Sdk): self.host_pm = RpmPM(d, self.sdk_host_sysroot, - self.d.getVar('SDK_VENDOR', True), + self.d.getVar('SDK_VENDOR'), 'host', sdk_providename, "SDK_PACKAGE_ARCHS", @@ -149,9 +149,9 @@ class RpmSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.target_pm.remove_packaging_data() @@ -159,7 +159,7 @@ class RpmSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.host_pm.remove_packaging_data() @@ -167,7 +167,7 @@ class RpmSdk(Sdk): # Move host RPM library data native_rpm_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('localstatedir_nativesdk', True).strip('/'), + self.d.getVar('localstatedir_nativesdk').strip('/'), "lib", "rpm" ) @@ -197,8 +197,8 @@ class OpkgSdk(Sdk): def __init__(self, d, manifest_dir=None): super(OpkgSdk, self).__init__(d, manifest_dir) - self.target_conf = self.d.getVar("IPKGCONF_TARGET", True) - self.host_conf = self.d.getVar("IPKGCONF_SDK", True) + self.target_conf = self.d.getVar("IPKGCONF_TARGET") + self.host_conf = self.d.getVar("IPKGCONF_SDK") self.target_manifest = OpkgManifest(d, self.manifest_dir, Manifest.MANIFEST_TYPE_SDK_TARGET) @@ -206,15 +206,15 @@ class OpkgSdk(Sdk): Manifest.MANIFEST_TYPE_SDK_HOST) self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf, - self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) + self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf, - self.d.getVar("SDK_PACKAGE_ARCHS", True)) + self.d.getVar("SDK_PACKAGE_ARCHS")) def _populate_sysroot(self, pm, manifest): pkgs_to_install = manifest.parse_initial_manifest() - if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": + if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": pm.write_index() pm.update() @@ -228,9 +228,9 @@ class OpkgSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.target_pm.remove_packaging_data() @@ -238,7 +238,7 @@ class OpkgSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.host_pm.remove_packaging_data() @@ -257,7 +257,7 @@ class OpkgSdk(Sdk): os.path.basename(self.host_conf)), 0o644) native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('localstatedir_nativesdk', True).strip('/'), + self.d.getVar('localstatedir_nativesdk').strip('/'), "lib", "opkg") self.mkdirhier(native_opkg_state_dir) for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")): @@ -270,8 +270,8 @@ class DpkgSdk(Sdk): def __init__(self, d, manifest_dir=None): super(DpkgSdk, self).__init__(d, manifest_dir) - self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt") - self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt-sdk") + self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt") + self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt-sdk") self.target_manifest = DpkgManifest(d, self.manifest_dir, Manifest.MANIFEST_TYPE_SDK_TARGET) @@ -279,17 +279,17 @@ class DpkgSdk(Sdk): Manifest.MANIFEST_TYPE_SDK_HOST) self.target_pm = DpkgPM(d, self.sdk_target_sysroot, - self.d.getVar("PACKAGE_ARCHS", True), - self.d.getVar("DPKG_ARCH", True), + self.d.getVar("PACKAGE_ARCHS"), + self.d.getVar("DPKG_ARCH"), self.target_conf_dir) self.host_pm = DpkgPM(d, self.sdk_host_sysroot, - self.d.getVar("SDK_PACKAGE_ARCHS", True), - self.d.getVar("DEB_SDK_ARCH", True), + self.d.getVar("SDK_PACKAGE_ARCHS"), + self.d.getVar("DEB_SDK_ARCH"), self.host_conf_dir) def _copy_apt_dir_to(self, dst_dir): - staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE", True) + staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE") self.remove(dst_dir, True) @@ -310,9 +310,9 @@ class DpkgSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt")) @@ -322,7 +322,7 @@ class DpkgSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path, "etc", "apt")) @@ -341,26 +341,26 @@ class DpkgSdk(Sdk): def sdk_list_installed_packages(d, target, rootfs_dir=None): if rootfs_dir is None: - sdk_output = d.getVar('SDK_OUTPUT', True) - target_path = d.getVar('SDKTARGETSYSROOT', True).strip('/') + sdk_output = d.getVar('SDK_OUTPUT') + target_path = d.getVar('SDKTARGETSYSROOT').strip('/') rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": arch_var = ["SDK_PACKAGE_ARCHS", None][target is True] os_var = ["SDK_OS", None][target is True] return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list_pkgs() elif img_type == "ipk": conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True] - return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var, True)).list_pkgs() + return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var)).list_pkgs() elif img_type == "deb": return DpkgPkgsList(d, rootfs_dir).list_pkgs() def populate_sdk(d, manifest_dir=None): env_bkp = os.environ.copy() - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": RpmSdk(d, manifest_dir).populate() elif img_type == "ipk": diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index 8224e3a12e..e053c37e96 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py @@ -63,10 +63,10 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache): def sstate_lockedsigs(d): sigs = {} - types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES", True) or "").split() + types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split() for t in types: siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t - lockedsigs = (d.getVar(siggen_lockedsigs_var, True) or "").split() + lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split() for ls in lockedsigs: pn, task, h = ls.split(":", 2) if pn not in sigs: @@ -77,8 +77,8 @@ def sstate_lockedsigs(d): class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): name = "OEBasic" def init_rundepcheck(self, data): - self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() - self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() + self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() + self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() pass def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) @@ -86,15 +86,15 @@ class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): name = "OEBasicHash" def init_rundepcheck(self, data): - self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() - self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() + self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() + self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() self.lockedsigs = sstate_lockedsigs(data) self.lockedhashes = {} self.lockedpnmap = {} self.lockedhashfn = {} - self.machine = data.getVar("MACHINE", True) + self.machine = data.getVar("MACHINE") self.mismatch_msgs = [] - self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES", True) or + self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or "").split() self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } pass @@ -224,13 +224,13 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" % (pn, sq_task[task], sq_hash[task])) - checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK", True) + checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") if checklevel == 'warn': warn_msgs += self.mismatch_msgs elif checklevel == 'error': error_msgs += self.mismatch_msgs - checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK", True) + checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK") if checklevel == 'warn': warn_msgs += sstate_missing_msgs elif checklevel == 'error': @@ -274,7 +274,7 @@ def find_siginfo(pn, taskname, taskhashlist, d): localdata.setVar('PV', '*') localdata.setVar('PR', '*') localdata.setVar('EXTENDPE', '') - stamp = localdata.getVar('STAMP', True) + stamp = localdata.getVar('STAMP') if pn.startswith("gcc-source"): # gcc-source shared workdir is a special case :( stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") @@ -309,18 +309,18 @@ def find_siginfo(pn, taskname, taskhashlist, d): localdata.setVar('PV', '*') localdata.setVar('PR', '*') localdata.setVar('BB_TASKHASH', hashval) - swspec = localdata.getVar('SSTATE_SWSPEC', True) + swspec = localdata.getVar('SSTATE_SWSPEC') if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") sstatename = taskname[3:] - filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG', True), sstatename) + filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename) if hashval != '*': - sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR', True), hashval[:2]) + sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR'), hashval[:2]) else: - sstatedir = d.getVar('SSTATE_DIR', True) + sstatedir = d.getVar('SSTATE_DIR') for root, dirs, files in os.walk(sstatedir): for fn in files: diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py index a89fa45691..0426e15834 100644 --- a/meta/lib/oe/terminal.py +++ b/meta/lib/oe/terminal.py @@ -196,7 +196,7 @@ class Custom(Terminal): priority = 3 def __init__(self, sh_cmd, title=None, env=None, d=None): - self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD', True) + self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD') if self.command: if not '{command}' in self.command: self.command += ' {command}' diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 2b095f1f0a..bb3f0e5d75 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py @@ -23,13 +23,13 @@ def ifelse(condition, iftrue = True, iffalse = False): return iffalse def conditional(variable, checkvalue, truevalue, falsevalue, d): - if d.getVar(variable, True) == checkvalue: + if d.getVar(variable) == checkvalue: return truevalue else: return falsevalue def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): - if float(d.getVar(variable, True)) <= float(checkvalue): + if float(d.getVar(variable)) <= float(checkvalue): return truevalue else: return falsevalue @@ -42,8 +42,8 @@ def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): return falsevalue def both_contain(variable1, variable2, checkvalue, d): - val1 = d.getVar(variable1, True) - val2 = d.getVar(variable2, True) + val1 = d.getVar(variable1) + val2 = d.getVar(variable2) val1 = set(val1.split()) val2 = set(val2.split()) if isinstance(checkvalue, str): @@ -66,8 +66,8 @@ def set_intersect(variable1, variable2, d): s3 = set_intersect(s1, s2) => s3 = "b c" """ - val1 = set(d.getVar(variable1, True).split()) - val2 = set(d.getVar(variable2, True).split()) + val1 = set(d.getVar(variable1).split()) + val2 = set(d.getVar(variable2).split()) return " ".join(val1 & val2) def prune_suffix(var, suffixes, d): @@ -77,7 +77,7 @@ def prune_suffix(var, suffixes, d): if var.endswith(suffix): var = var.replace(suffix, "") - prefix = d.getVar("MLPREFIX", True) + prefix = d.getVar("MLPREFIX") if prefix and var.startswith(prefix): var = var.replace(prefix, "") @@ -115,9 +115,9 @@ def features_backfill(var,d): # disturbing distributions that have already set DISTRO_FEATURES. # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED - features = (d.getVar(var, True) or "").split() - backfill = (d.getVar(var+"_BACKFILL", True) or "").split() - considered = (d.getVar(var+"_BACKFILL_CONSIDERED", True) or "").split() + features = (d.getVar(var) or "").split() + backfill = (d.getVar(var+"_BACKFILL") or "").split() + considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split() addfeatures = [] for feature in backfill: @@ -133,12 +133,12 @@ def packages_filter_out_system(d): Return a list of packages from PACKAGES with the "system" packages such as PN-dbg PN-doc PN-locale-eb-gb removed. """ - pn = d.getVar('PN', True) + pn = d.getVar('PN') blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')] localepkg = pn + "-locale-" pkgs = [] - for pkg in d.getVar('PACKAGES', True).split(): + for pkg in d.getVar('PACKAGES').split(): if pkg not in blacklist and localepkg not in pkg: pkgs.append(pkg) return pkgs @@ -231,7 +231,7 @@ def format_pkg_list(pkg_dict, ret_format=None): return '\n'.join(output) def host_gcc_version(d): - compiler = d.getVar("BUILD_CC", True) + compiler = d.getVar("BUILD_CC") retval, output = getstatusoutput("%s --version" % compiler) if retval: bb.fatal("Error running %s --version: %s" % (compiler, output)) @@ -316,8 +316,8 @@ def write_ld_so_conf(d): bb.utils.remove(ldsoconf) bb.utils.mkdirhier(os.path.dirname(ldsoconf)) with open(ldsoconf, "w") as f: - f.write(d.getVar("base_libdir", True) + '\n') - f.write(d.getVar("libdir", True) + '\n') + f.write(d.getVar("base_libdir") + '\n') + f.write(d.getVar("libdir") + '\n') class ImageQAFailed(bb.build.FuncFailed): def __init__(self, description, name=None, logfile=None): diff --git a/meta/lib/oeqa/controllers/masterimage.py b/meta/lib/oeqa/controllers/masterimage.py index 9ce3bf803d..d796fc3c30 100644 --- a/meta/lib/oeqa/controllers/masterimage.py +++ b/meta/lib/oeqa/controllers/masterimage.py @@ -32,14 +32,14 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta super(MasterImageHardwareTarget, self).__init__(d) # target ip - addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') + addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') self.ip = addr.split(":")[0] try: self.port = addr.split(":")[1] except IndexError: self.port = None bb.note("Target IP: %s" % self.ip) - self.server_ip = d.getVar("TEST_SERVER_IP", True) + self.server_ip = d.getVar("TEST_SERVER_IP") if not self.server_ip: try: self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1] @@ -49,8 +49,8 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta # test rootfs + kernel self.image_fstype = self.get_image_fstype(d) - self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype) - self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') + self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype) + self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') if not os.path.isfile(self.rootfs): # we could've checked that IMAGE_FSTYPES contains tar.gz but the config for running testimage might not be # the same as the config with which the image was build, ie @@ -64,16 +64,16 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta # master ssh connection self.master = None # if the user knows what they are doing, then by all means... - self.user_cmds = d.getVar("TEST_DEPLOY_CMDS", True) + self.user_cmds = d.getVar("TEST_DEPLOY_CMDS") self.deploy_cmds = None # this is the name of the command that controls the power for a board # e.g: TEST_POWERCONTROL_CMD = "/home/user/myscripts/powercontrol.py ${MACHINE} what-ever-other-args-the-script-wants" # the command should take as the last argument "off" and "on" and "cycle" (off, on) - self.powercontrol_cmd = d.getVar("TEST_POWERCONTROL_CMD", True) or None + self.powercontrol_cmd = d.getVar("TEST_POWERCONTROL_CMD") or None self.powercontrol_args = d.getVar("TEST_POWERCONTROL_EXTRA_ARGS", False) or "" - self.serialcontrol_cmd = d.getVar("TEST_SERIALCONTROL_CMD", True) or None + self.serialcontrol_cmd = d.getVar("TEST_SERIALCONTROL_CMD") or None self.serialcontrol_args = d.getVar("TEST_SERIALCONTROL_EXTRA_ARGS", False) or "" self.origenv = os.environ @@ -82,7 +82,7 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta # ssh + keys means we need the original user env bborigenv = d.getVar("BB_ORIGENV", False) or {} for key in bborigenv: - val = bborigenv.getVar(key, True) + val = bborigenv.getVar(key) if val is not None: self.origenv[key] = str(val) diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py index 95d3bf72fc..d1aef967e4 100644 --- a/meta/lib/oeqa/oetest.py +++ b/meta/lib/oeqa/oetest.py @@ -221,15 +221,15 @@ class TestContext(object): path = [os.path.dirname(os.path.abspath(__file__))] extrapath = "" else: - path = d.getVar("BBPATH", True).split(':') + path = d.getVar("BBPATH").split(':') extrapath = "lib/oeqa" self.testslist = self._get_tests_list(path, extrapath) self.testsrequired = self._get_test_suites_required() self.filesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "runtime/files") - self.imagefeatures = d.getVar("IMAGE_FEATURES", True).split() - self.distrofeatures = d.getVar("DISTRO_FEATURES", True).split() + self.imagefeatures = d.getVar("IMAGE_FEATURES").split() + self.distrofeatures = d.getVar("DISTRO_FEATURES").split() # get testcase list from specified file # if path is a relative path, then relative to build/conf/ @@ -406,9 +406,9 @@ class RuntimeTestContext(TestContext): self.target = target self.pkgmanifest = {} - manifest = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), - d.getVar("IMAGE_LINK_NAME", True) + ".manifest") - nomanifest = d.getVar("IMAGE_NO_MANIFEST", True) + manifest = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), + d.getVar("IMAGE_LINK_NAME") + ".manifest") + nomanifest = d.getVar("IMAGE_NO_MANIFEST") if nomanifest is None or nomanifest != "1": try: with open(manifest) as f: @@ -424,19 +424,19 @@ class RuntimeTestContext(TestContext): def _get_test_suites(self): testsuites = [] - manifests = (self.d.getVar("TEST_SUITES_MANIFEST", True) or '').split() + manifests = (self.d.getVar("TEST_SUITES_MANIFEST") or '').split() if manifests: for manifest in manifests: testsuites.extend(self._read_testlist(manifest, - self.d.getVar("TOPDIR", True)).split()) + self.d.getVar("TOPDIR")).split()) else: - testsuites = self.d.getVar("TEST_SUITES", True).split() + testsuites = self.d.getVar("TEST_SUITES").split() return testsuites def _get_test_suites_required(self): - return [t for t in self.d.getVar("TEST_SUITES", True).split() if t != "auto"] + return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"] def loadTests(self): super(RuntimeTestContext, self).loadTests() @@ -449,10 +449,10 @@ class RuntimeTestContext(TestContext): """ modules = self.getTestModules() - bbpaths = self.d.getVar("BBPATH", True).split(":") + bbpaths = self.d.getVar("BBPATH").split(":") - shutil.rmtree(self.d.getVar("TEST_EXTRACTED_DIR", True)) - shutil.rmtree(self.d.getVar("TEST_PACKAGED_DIR", True)) + shutil.rmtree(self.d.getVar("TEST_EXTRACTED_DIR")) + shutil.rmtree(self.d.getVar("TEST_PACKAGED_DIR")) for module in modules: json_file = self._getJsonFile(module) if json_file: @@ -466,8 +466,8 @@ class RuntimeTestContext(TestContext): import oe.path - extracted_path = self.d.getVar("TEST_EXTRACTED_DIR", True) - packaged_path = self.d.getVar("TEST_PACKAGED_DIR", True) + extracted_path = self.d.getVar("TEST_EXTRACTED_DIR") + packaged_path = self.d.getVar("TEST_PACKAGED_DIR") for key,value in needed_packages.items(): packages = () @@ -548,7 +548,7 @@ class RuntimeTestContext(TestContext): from oeqa.utils.package_manager import get_package_manager - pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR", True), pkg) + pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR"), pkg) pm = get_package_manager(self.d, pkg_path) extract_dir = pm.extract(pkg) shutil.rmtree(pkg_path) @@ -562,8 +562,8 @@ class RuntimeTestContext(TestContext): from oeqa.utils.package_manager import get_package_manager - pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR", True), pkg) - dst_dir = self.d.getVar("TEST_PACKAGED_DIR", True) + pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR"), pkg) + dst_dir = self.d.getVar("TEST_PACKAGED_DIR") pm = get_package_manager(self.d, pkg_path) pkg_info = pm.package_info(pkg) file_path = pkg_info[pkg]["filepath"] @@ -611,7 +611,7 @@ class ImageTestContext(RuntimeTestContext): def __init__(self, d, target, host_dumper): super(ImageTestContext, self).__init__(d, target) - self.tagexp = d.getVar("TEST_SUITES_TAGS", True) + self.tagexp = d.getVar("TEST_SUITES_TAGS") self.host_dumper = host_dumper @@ -629,7 +629,7 @@ class ImageTestContext(RuntimeTestContext): Check if the test requires a package and Install/Unistall it in the DUT """ - pkg_dir = self.d.getVar("TEST_EXTRACTED_DIR", True) + pkg_dir = self.d.getVar("TEST_EXTRACTED_DIR") super(ImageTestContext, self).install_uninstall_packages(test_id, pkg_dir, install) class ExportTestContext(RuntimeTestContext): @@ -643,7 +643,7 @@ class ExportTestContext(RuntimeTestContext): super(ExportTestContext, self).__init__(d, target, exported) tag = parsedArgs.get("tag", None) - self.tagexp = tag if tag != None else d.getVar("TEST_SUITES_TAGS", True) + self.tagexp = tag if tag != None else d.getVar("TEST_SUITES_TAGS") self.sigterm = None @@ -653,7 +653,7 @@ class ExportTestContext(RuntimeTestContext): """ export_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - extracted_dir = self.d.getVar("TEST_EXPORT_EXTRACTED_DIR", True) + extracted_dir = self.d.getVar("TEST_EXPORT_EXTRACTED_DIR") pkg_dir = os.path.join(export_dir, extracted_dir) super(ExportTestContext, self).install_uninstall_packages(test_id, pkg_dir, install) @@ -666,7 +666,7 @@ class SDKTestContext(TestContext): self.tcname = tcname if not hasattr(self, 'target_manifest'): - self.target_manifest = d.getVar("SDK_TARGET_MANIFEST", True) + self.target_manifest = d.getVar("SDK_TARGET_MANIFEST") try: self.pkgmanifest = {} with open(self.target_manifest) as f: @@ -677,7 +677,7 @@ class SDKTestContext(TestContext): bb.fatal("No package manifest file found. Did you build the sdk image?\n%s" % e) if not hasattr(self, 'host_manifest'): - self.host_manifest = d.getVar("SDK_HOST_MANIFEST", True) + self.host_manifest = d.getVar("SDK_HOST_MANIFEST") try: with open(self.host_manifest) as f: self.hostpkgmanifest = f.read() @@ -688,16 +688,16 @@ class SDKTestContext(TestContext): return "sdk" def _get_test_suites(self): - return (self.d.getVar("TEST_SUITES_SDK", True) or "auto").split() + return (self.d.getVar("TEST_SUITES_SDK") or "auto").split() def _get_test_suites_required(self): - return [t for t in (self.d.getVar("TEST_SUITES_SDK", True) or \ + return [t for t in (self.d.getVar("TEST_SUITES_SDK") or \ "auto").split() if t != "auto"] class SDKExtTestContext(SDKTestContext): def __init__(self, d, sdktestdir, sdkenv, tcname, *args): - self.target_manifest = d.getVar("SDK_EXT_TARGET_MANIFEST", True) - self.host_manifest = d.getVar("SDK_EXT_HOST_MANIFEST", True) + self.target_manifest = d.getVar("SDK_EXT_TARGET_MANIFEST") + self.host_manifest = d.getVar("SDK_EXT_HOST_MANIFEST") if args: self.cm = args[0] # Compatibility mode for run SDK tests else: @@ -715,8 +715,8 @@ class SDKExtTestContext(SDKTestContext): return "sdkext" def _get_test_suites(self): - return (self.d.getVar("TEST_SUITES_SDK_EXT", True) or "auto").split() + return (self.d.getVar("TEST_SUITES_SDK_EXT") or "auto").split() def _get_test_suites_required(self): - return [t for t in (self.d.getVar("TEST_SUITES_SDK_EXT", True) or \ + return [t for t in (self.d.getVar("TEST_SUITES_SDK_EXT") or \ "auto").split() if t != "auto"] diff --git a/meta/lib/oeqa/runexported.py b/meta/lib/oeqa/runexported.py index 7e245c4120..9cfea0f7ab 100755 --- a/meta/lib/oeqa/runexported.py +++ b/meta/lib/oeqa/runexported.py @@ -43,8 +43,8 @@ class FakeTarget(object): self.ip = None self.server_ip = None self.datetime = time.strftime('%Y%m%d%H%M%S',time.gmtime()) - self.testdir = d.getVar("TEST_LOG_DIR", True) - self.pn = d.getVar("PN", True) + self.testdir = d.getVar("TEST_LOG_DIR") + self.pn = d.getVar("PN") def exportStart(self): self.sshlog = os.path.join(self.testdir, "ssh_target_log.%s" % self.datetime) @@ -130,8 +130,8 @@ def extract_sdk(d): """ export_dir = os.path.dirname(os.path.realpath(__file__)) - tools_dir = d.getVar("TEST_EXPORT_SDK_DIR", True) - tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME", True) + tools_dir = d.getVar("TEST_EXPORT_SDK_DIR") + tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME") tarball_path = os.path.join(export_dir, tools_dir, tarball_name) extract_path = os.path.join(export_dir, "sysroot") if os.path.isfile(tarball_path): diff --git a/meta/lib/oeqa/runtime/_ptest.py b/meta/lib/oeqa/runtime/_ptest.py index 71324d3da2..cfb4041f18 100644 --- a/meta/lib/oeqa/runtime/_ptest.py +++ b/meta/lib/oeqa/runtime/_ptest.py @@ -13,7 +13,7 @@ def setUpModule(): skipModule("Image doesn't have package management feature") if not oeRuntimeTest.hasPackage("smartpm"): skipModule("Image doesn't have smart installed") - if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: + if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES").split()[0]: skipModule("Rpm is not the primary package manager") class PtestRunnerTest(oeRuntimeTest): @@ -57,7 +57,7 @@ class PtestRunnerTest(oeRuntimeTest): # (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0) # for x in result.split("\n"): # self.existingchannels.add(x) - self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), oeRuntimeTest.tc.target.server_ip) + self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR'), oeRuntimeTest.tc.target.server_ip) self.repo_server.start() @classmethod @@ -70,23 +70,23 @@ class PtestRunnerTest(oeRuntimeTest): # oeRuntimeTest.tc.target.run('smart channel --remove '+x[1:-1]+' -y', 0) def add_smart_channel(self): - image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True) + image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE') deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype) - pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split() + pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS').replace("-","_").split() for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)): if arch in pkgarchs: self.target.run('smart channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url), 0) self.target.run('smart update', 0) def install_complementary(self, globs=None): - installed_pkgs_file = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), + installed_pkgs_file = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR'), "installed_pkgs.txt") - self.pkgs_list = RpmPkgsList(oeRuntimeTest.tc.d, oeRuntimeTest.tc.d.getVar('IMAGE_ROOTFS', True), oeRuntimeTest.tc.d.getVar('arch_var', True), oeRuntimeTest.tc.d.getVar('os_var', True)) + self.pkgs_list = RpmPkgsList(oeRuntimeTest.tc.d, oeRuntimeTest.tc.d.getVar('IMAGE_ROOTFS'), oeRuntimeTest.tc.d.getVar('arch_var'), oeRuntimeTest.tc.d.getVar('os_var')) with open(installed_pkgs_file, "w+") as installed_pkgs: installed_pkgs.write(self.pkgs_list.list("arch")) cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), - "-p", oeRuntimeTest.tc.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, + "-p", oeRuntimeTest.tc.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs_file, globs] try: bb.note("Installing complementary packages ...") @@ -99,7 +99,7 @@ class PtestRunnerTest(oeRuntimeTest): return complementary_pkgs.split() def setUpLocal(self): - self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME', True)) + self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME')) @skipUnlessPassed('test_ssh') def test_ptestrunner(self): diff --git a/meta/lib/oeqa/runtime/date.py b/meta/lib/oeqa/runtime/date.py index 447987e075..6f3516a92f 100644 --- a/meta/lib/oeqa/runtime/date.py +++ b/meta/lib/oeqa/runtime/date.py @@ -5,11 +5,11 @@ import re class DateTest(oeRuntimeTest): def setUpLocal(self): - if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd": + if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager") == "systemd": self.target.run('systemctl stop systemd-timesyncd') def tearDownLocal(self): - if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd": + if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager") == "systemd": self.target.run('systemctl start systemd-timesyncd') @testcase(211) diff --git a/meta/lib/oeqa/runtime/multilib.py b/meta/lib/oeqa/runtime/multilib.py index 593d385021..5cce24f5f4 100644 --- a/meta/lib/oeqa/runtime/multilib.py +++ b/meta/lib/oeqa/runtime/multilib.py @@ -3,7 +3,7 @@ from oeqa.oetest import oeRuntimeTest, skipModule from oeqa.utils.decorators import * def setUpModule(): - multilibs = oeRuntimeTest.tc.d.getVar("MULTILIBS", True) or "" + multilibs = oeRuntimeTest.tc.d.getVar("MULTILIBS") or "" if "multilib:lib32" not in multilibs: skipModule("this isn't a multilib:lib32 image") diff --git a/meta/lib/oeqa/runtime/parselogs.py b/meta/lib/oeqa/runtime/parselogs.py index 3e1c7d0c30..cc2d0617f5 100644 --- a/meta/lib/oeqa/runtime/parselogs.py +++ b/meta/lib/oeqa/runtime/parselogs.py @@ -193,10 +193,10 @@ class ParseLogsTest(oeRuntimeTest): self.ignore_errors[machine] = self.ignore_errors[machine] + video_related def getMachine(self): - return oeRuntimeTest.tc.d.getVar("MACHINE", True) + return oeRuntimeTest.tc.d.getVar("MACHINE") def getWorkdir(self): - return oeRuntimeTest.tc.d.getVar("WORKDIR", True) + return oeRuntimeTest.tc.d.getVar("WORKDIR") #get some information on the CPU of the machine to display at the beginning of the output. This info might be useful in some cases. def getHardwareInfo(self): diff --git a/meta/lib/oeqa/runtime/rpm.py b/meta/lib/oeqa/runtime/rpm.py index 7f514ca00c..f1c4763fc0 100644 --- a/meta/lib/oeqa/runtime/rpm.py +++ b/meta/lib/oeqa/runtime/rpm.py @@ -7,7 +7,7 @@ from oeqa.utils.decorators import * def setUpModule(): if not oeRuntimeTest.hasFeature("package-management"): skipModule("rpm module skipped: target doesn't have package-management in IMAGE_FEATURES") - if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: + if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES").split()[0]: skipModule("rpm module skipped: target doesn't have rpm as primary package manager") @@ -29,8 +29,8 @@ class RpmInstallRemoveTest(oeRuntimeTest): @classmethod def setUpClass(self): - pkgarch = oeRuntimeTest.tc.d.getVar('TUNE_PKGARCH', True).replace("-", "_") - rpmdir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), "rpm", pkgarch) + pkgarch = oeRuntimeTest.tc.d.getVar('TUNE_PKGARCH').replace("-", "_") + rpmdir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR'), "rpm", pkgarch) # pick rpm-doc as a test file to get installed, because it's small and it will always be built for standard targets for f in fnmatch.filter(os.listdir(rpmdir), "rpm-doc-*.%s.rpm" % pkgarch): testrpmfile = f diff --git a/meta/lib/oeqa/runtime/scp.py b/meta/lib/oeqa/runtime/scp.py index 48e87d2d0b..cf36cfa5d5 100644 --- a/meta/lib/oeqa/runtime/scp.py +++ b/meta/lib/oeqa/runtime/scp.py @@ -11,7 +11,7 @@ class ScpTest(oeRuntimeTest): @testcase(220) @skipUnlessPassed('test_ssh') def test_scp_file(self): - test_log_dir = oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR", True) + test_log_dir = oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR") test_file_path = os.path.join(test_log_dir, 'test_scp_file') with open(test_file_path, 'w') as test_scp_file: test_scp_file.seek(2 ** 22 - 1) diff --git a/meta/lib/oeqa/runtime/smart.py b/meta/lib/oeqa/runtime/smart.py index 6cdb10d631..dde1c4d792 100644 --- a/meta/lib/oeqa/runtime/smart.py +++ b/meta/lib/oeqa/runtime/smart.py @@ -11,7 +11,7 @@ def setUpModule(): skipModule("Image doesn't have package management feature") if not oeRuntimeTest.hasPackage("smartpm"): skipModule("Image doesn't have smart installed") - if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: + if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES").split()[0]: skipModule("Rpm is not the primary package manager") class SmartTest(oeRuntimeTest): @@ -75,16 +75,16 @@ class SmartRepoTest(SmartTest): rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") index_cmds = [] rpm_dirs_found = False - archs = (oeRuntimeTest.tc.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() + archs = (oeRuntimeTest.tc.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").replace('-', '_').split() for arch in archs: - rpm_dir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM', True), arch) - idx_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), 'rpm', arch) - db_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), 'rpmdb', arch) + rpm_dir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM'), arch) + idx_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR'), 'rpm', arch) + db_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR'), 'rpmdb', arch) if not os.path.isdir(rpm_dir): continue if os.path.exists(db_path): bb.utils.remove(dbpath, True) - lockfilename = oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" + lockfilename = oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock" lf = bb.utils.lockfile(lockfilename, False) oe.path.copyhardlinktree(rpm_dir, idx_path) # Full indexes overload a 256MB image so reduce the number of rpms @@ -98,7 +98,7 @@ class SmartRepoTest(SmartTest): result = oe.utils.multiprocess_exec(index_cmds, self.create_index) if result: bb.fatal('%s' % ('\n'.join(result))) - self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('WORKDIR', True), oeRuntimeTest.tc.target.server_ip) + self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('WORKDIR'), oeRuntimeTest.tc.target.server_ip) self.repo_server.start() @classmethod @@ -113,9 +113,9 @@ class SmartRepoTest(SmartTest): @testcase(719) def test_smart_channel_add(self): - image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True) + image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE') deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype) - pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split() + pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS').replace("-","_").split() for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)): if arch in pkgarchs: self.smart('channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url)) diff --git a/meta/lib/oeqa/runtime/systemd.py b/meta/lib/oeqa/runtime/systemd.py index 8de799cd63..52feb1b31e 100644 --- a/meta/lib/oeqa/runtime/systemd.py +++ b/meta/lib/oeqa/runtime/systemd.py @@ -6,7 +6,7 @@ from oeqa.utils.decorators import * def setUpModule(): if not oeRuntimeTest.hasFeature("systemd"): skipModule("target doesn't have systemd in DISTRO_FEATURES") - if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True): + if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager"): skipModule("systemd is not the init manager for this image") diff --git a/meta/lib/oeqa/runtime/x32lib.py b/meta/lib/oeqa/runtime/x32lib.py index ce5e214035..2f98dbf71e 100644 --- a/meta/lib/oeqa/runtime/x32lib.py +++ b/meta/lib/oeqa/runtime/x32lib.py @@ -4,7 +4,7 @@ from oeqa.utils.decorators import * def setUpModule(): #check if DEFAULTTUNE is set and it's value is: x86-64-x32 - defaulttune = oeRuntimeTest.tc.d.getVar("DEFAULTTUNE", True) + defaulttune = oeRuntimeTest.tc.d.getVar("DEFAULTTUNE") if "x86-64-x32" not in defaulttune: skipModule("DEFAULTTUNE is not set to x86-64-x32") diff --git a/meta/lib/oeqa/sdk/gcc.py b/meta/lib/oeqa/sdk/gcc.py index 8395b9b908..f3f4341a20 100644 --- a/meta/lib/oeqa/sdk/gcc.py +++ b/meta/lib/oeqa/sdk/gcc.py @@ -5,7 +5,7 @@ from oeqa.oetest import oeSDKTest, skipModule from oeqa.utils.decorators import * def setUpModule(): - machine = oeSDKTest.tc.d.getVar("MACHINE", True) + machine = oeSDKTest.tc.d.getVar("MACHINE") if not oeSDKTest.hasHostPackage("packagegroup-cross-canadian-" + machine): skipModule("SDK doesn't contain a cross-canadian toolchain") diff --git a/meta/lib/oeqa/selftest/tinfoil.py b/meta/lib/oeqa/selftest/tinfoil.py index 4f70e0d2f7..c8d635cd05 100644 --- a/meta/lib/oeqa/selftest/tinfoil.py +++ b/meta/lib/oeqa/selftest/tinfoil.py @@ -13,7 +13,7 @@ class TinfoilTests(oeSelfTest): def test_getvar(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(True) - machine = tinfoil.config_data.getVar('MACHINE', True) + machine = tinfoil.config_data.getVar('MACHINE') if not machine: self.fail('Unable to get MACHINE value - returned %s' % machine) @@ -41,7 +41,7 @@ class TinfoilTests(oeSelfTest): if not best: self.fail('Unable to find recipe providing %s' % testrecipe) rd = tinfoil.parse_recipe_file(best[3]) - self.assertEqual(testrecipe, rd.getVar('PN', True)) + self.assertEqual(testrecipe, rd.getVar('PN')) def test_parse_recipe_copy_expand(self): with bb.tinfoil.Tinfoil() as tinfoil: @@ -52,14 +52,14 @@ class TinfoilTests(oeSelfTest): self.fail('Unable to find recipe providing %s' % testrecipe) rd = tinfoil.parse_recipe_file(best[3]) # Check we can get variable values - self.assertEqual(testrecipe, rd.getVar('PN', True)) + self.assertEqual(testrecipe, rd.getVar('PN')) # Check that expanding a value that includes a variable reference works - self.assertEqual(testrecipe, rd.getVar('BPN', True)) + self.assertEqual(testrecipe, rd.getVar('BPN')) # Now check that changing the referenced variable's value in a copy gives that # value when expanding localdata = bb.data.createCopy(rd) localdata.setVar('PN', 'hello') - self.assertEqual('hello', localdata.getVar('BPN', True)) + self.assertEqual('hello', localdata.getVar('BPN')) def test_parse_recipe_initial_datastore(self): with bb.tinfoil.Tinfoil() as tinfoil: @@ -72,7 +72,7 @@ class TinfoilTests(oeSelfTest): dcopy.setVar('MYVARIABLE', 'somevalue') rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy) # Check we can get variable values - self.assertEqual('somevalue', rd.getVar('MYVARIABLE', True)) + self.assertEqual('somevalue', rd.getVar('MYVARIABLE')) def test_list_recipes(self): with bb.tinfoil.Tinfoil() as tinfoil: @@ -127,7 +127,7 @@ class TinfoilTests(oeSelfTest): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue') - self.assertEqual(tinfoil.config_data.getVar('TESTVAR', True), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()') + self.assertEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()') # Now check that the setVariable's effects are no longer present # (this may legitimately break in future if we stop reinitialising @@ -135,7 +135,7 @@ class TinfoilTests(oeSelfTest): # setVariable entirely) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) - self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR', True), 'specialvalue', 'Value set using setVariable is still present!') + self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is still present!') # Now check that setVar on the main datastore works (uses setVariable internally) with bb.tinfoil.Tinfoil() as tinfoil: diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py index 24669f461d..d1f441f841 100644 --- a/meta/lib/oeqa/targetcontrol.py +++ b/meta/lib/oeqa/targetcontrol.py @@ -19,7 +19,7 @@ from oeqa.controllers.testtargetloader import TestTargetLoader from abc import ABCMeta, abstractmethod def get_target_controller(d): - testtarget = d.getVar("TEST_TARGET", True) + testtarget = d.getVar("TEST_TARGET") # old, simple names if testtarget == "qemu": return QemuTarget(d) @@ -33,7 +33,7 @@ def get_target_controller(d): except AttributeError: # nope, perhaps a layer defined one try: - bbpath = d.getVar("BBPATH", True).split(':') + bbpath = d.getVar("BBPATH").split(':') testtargetloader = TestTargetLoader() controller = testtargetloader.get_controller_module(testtarget, bbpath) except ImportError as e: @@ -51,9 +51,9 @@ class BaseTarget(object, metaclass=ABCMeta): self.connection = None self.ip = None self.server_ip = None - self.datetime = d.getVar('DATETIME', True) - self.testdir = d.getVar("TEST_LOG_DIR", True) - self.pn = d.getVar("PN", True) + self.datetime = d.getVar('DATETIME') + self.testdir = d.getVar("TEST_LOG_DIR") + self.pn = d.getVar("PN") @abstractmethod def deploy(self): @@ -80,7 +80,7 @@ class BaseTarget(object, metaclass=ABCMeta): @classmethod def match_image_fstype(self, d, image_fstypes=None): if not image_fstypes: - image_fstypes = d.getVar('IMAGE_FSTYPES', True).split(' ') + image_fstypes = d.getVar('IMAGE_FSTYPES').split(' ') possible_image_fstypes = [fstype for fstype in self.supported_image_fstypes if fstype in image_fstypes] if possible_image_fstypes: return possible_image_fstypes[0] @@ -119,14 +119,14 @@ class QemuTarget(BaseTarget): self.image_fstype = self.get_image_fstype(d) self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime) - self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype) - self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') - dump_target_cmds = d.getVar("testimage_dump_target", True) - dump_host_cmds = d.getVar("testimage_dump_host", True) - dump_dir = d.getVar("TESTIMAGE_DUMP_DIR", True) + self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype) + self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') + dump_target_cmds = d.getVar("testimage_dump_target") + dump_host_cmds = d.getVar("testimage_dump_host") + dump_dir = d.getVar("TESTIMAGE_DUMP_DIR") if d.getVar("QEMU_USE_KVM", False) is not None \ and d.getVar("QEMU_USE_KVM", False) == "True" \ - and "x86" in d.getVar("MACHINE", True): + and "x86" in d.getVar("MACHINE"): use_kvm = True else: use_kvm = False @@ -141,26 +141,26 @@ class QemuTarget(BaseTarget): logger.addHandler(loggerhandler) oe.path.symlink(os.path.basename(self.qemurunnerlog), os.path.join(self.testdir, 'qemurunner_log'), force=True) - if d.getVar("DISTRO", True) == "poky-tiny": - self.runner = QemuTinyRunner(machine=d.getVar("MACHINE", True), + if d.getVar("DISTRO") == "poky-tiny": + self.runner = QemuTinyRunner(machine=d.getVar("MACHINE"), rootfs=self.rootfs, - tmpdir = d.getVar("TMPDIR", True), - deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True), - display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True), + tmpdir = d.getVar("TMPDIR"), + deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE"), + display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY"), logfile = self.qemulog, kernel = self.kernel, - boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True))) + boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT"))) else: - self.runner = QemuRunner(machine=d.getVar("MACHINE", True), + self.runner = QemuRunner(machine=d.getVar("MACHINE"), rootfs=self.rootfs, - tmpdir = d.getVar("TMPDIR", True), - deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True), - display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True), + tmpdir = d.getVar("TMPDIR"), + deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE"), + display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY"), logfile = self.qemulog, - boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True)), + boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), use_kvm = use_kvm, dump_dir = dump_dir, - dump_host_cmds = d.getVar("testimage_dump_host", True)) + dump_host_cmds = d.getVar("testimage_dump_host")) self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner) @@ -214,14 +214,14 @@ class SimpleRemoteTarget(BaseTarget): def __init__(self, d): super(SimpleRemoteTarget, self).__init__(d) - addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') + addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') self.ip = addr.split(":")[0] try: self.port = addr.split(":")[1] except IndexError: self.port = None bb.note("Target IP: %s" % self.ip) - self.server_ip = d.getVar("TEST_SERVER_IP", True) + self.server_ip = d.getVar("TEST_SERVER_IP") if not self.server_ip: try: self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1] diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py index aecf8cf5a8..3a68b001b7 100644 --- a/meta/lib/oeqa/utils/commands.py +++ b/meta/lib/oeqa/utils/commands.py @@ -231,7 +231,7 @@ def runqemu(pn, ssh=True): logger = logging.getLogger('BitBake.QemuRunner') logger.setLevel(logging.DEBUG) logger.propagate = False - logdir = recipedata.getVar("TEST_LOG_DIR", True) + logdir = recipedata.getVar("TEST_LOG_DIR") qemu = oeqa.targetcontrol.QemuTarget(recipedata) finally: diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py index 71422a9aea..44037a989d 100644 --- a/meta/lib/oeqa/utils/dump.py +++ b/meta/lib/oeqa/utils/dump.py @@ -6,8 +6,8 @@ import itertools from .commands import runCmd def get_host_dumper(d): - cmds = d.getVar("testimage_dump_host", True) - parent_dir = d.getVar("TESTIMAGE_DUMP_DIR", True) + cmds = d.getVar("testimage_dump_host") + parent_dir = d.getVar("TESTIMAGE_DUMP_DIR") return HostDumper(cmds, parent_dir) diff --git a/meta/lib/oeqa/utils/package_manager.py b/meta/lib/oeqa/utils/package_manager.py index 099ecc9728..0f6bdbc542 100644 --- a/meta/lib/oeqa/utils/package_manager.py +++ b/meta/lib/oeqa/utils/package_manager.py @@ -4,24 +4,24 @@ def get_package_manager(d, root_path): """ from oe.package_manager import RpmPM, OpkgPM, DpkgPM - pkg_class = d.getVar("IMAGE_PKGTYPE", True) + pkg_class = d.getVar("IMAGE_PKGTYPE") if pkg_class == "rpm": pm = RpmPM(d, root_path, - d.getVar('TARGET_VENDOR', True)) + d.getVar('TARGET_VENDOR')) pm.create_configs() elif pkg_class == "ipk": pm = OpkgPM(d, root_path, - d.getVar("IPKGCONF_TARGET", True), - d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) + d.getVar("IPKGCONF_TARGET"), + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) elif pkg_class == "deb": pm = DpkgPM(d, root_path, - d.getVar('PACKAGE_ARCHS', True), - d.getVar('DPKG_ARCH', True)) + d.getVar('PACKAGE_ARCHS'), + d.getVar('DPKG_ARCH')) pm.write_index() pm.update() diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py index 59593f5ef3..c001602b54 100644 --- a/meta/lib/oeqa/utils/targetbuild.py +++ b/meta/lib/oeqa/utils/targetbuild.py @@ -25,7 +25,7 @@ class BuildProject(metaclass=ABCMeta): # Download self.archive to self.localarchive def _download_archive(self): - dl_dir = self.d.getVar("DL_DIR", True) + dl_dir = self.d.getVar("DL_DIR") if dl_dir and os.path.exists(os.path.join(dl_dir, self.archive)): bb.utils.copyfile(os.path.join(dl_dir, self.archive), self.localarchive) return @@ -40,7 +40,7 @@ class BuildProject(metaclass=ABCMeta): cmd = '' for var in exportvars: - val = self.d.getVar(var, True) + val = self.d.getVar(var) if val: cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) @@ -103,8 +103,8 @@ class SDKBuildProject(BuildProject): self.testdir = testpath self.targetdir = testpath bb.utils.mkdirhier(testpath) - self.datetime = d.getVar('DATETIME', True) - self.testlogdir = d.getVar("TEST_LOG_DIR", True) + self.datetime = d.getVar('DATETIME') + self.testlogdir = d.getVar("TEST_LOG_DIR") bb.utils.mkdirhier(self.testlogdir) self.logfile = os.path.join(self.testlogdir, "sdk_target_log.%s" % self.datetime) BuildProject.__init__(self, d, uri, foldername, tmpdir=testpath) diff --git a/meta/lib/oeqa/utils/testexport.py b/meta/lib/oeqa/utils/testexport.py index 57be2ca449..be2a2110fc 100644 --- a/meta/lib/oeqa/utils/testexport.py +++ b/meta/lib/oeqa/utils/testexport.py @@ -72,9 +72,9 @@ def process_binaries(d, params): return extract_bin_command if determine_if_poky_env(): # machine with poky environment - exportpath = d.getVar("TEST_EXPORT_DIR", True) if export_env else d.getVar("DEPLOY_DIR", True) - rpm_deploy_dir = d.getVar("DEPLOY_DIR_RPM", True) - arch = get_dest_folder(d.getVar("TUNE_FEATURES", True), os.listdir(rpm_deploy_dir)) + exportpath = d.getVar("TEST_EXPORT_DIR") if export_env else d.getVar("DEPLOY_DIR") + rpm_deploy_dir = d.getVar("DEPLOY_DIR_RPM") + arch = get_dest_folder(d.getVar("TUNE_FEATURES"), os.listdir(rpm_deploy_dir)) arch_rpm_dir = os.path.join(rpm_deploy_dir, arch) extracted_bin_dir = os.path.join(exportpath,"binaries", arch, "extracted_binaries") packaged_bin_dir = os.path.join(exportpath,"binaries", arch, "packaged_binaries") @@ -92,7 +92,7 @@ def process_binaries(d, params): return "" for item in native_rpm_file_list:# will copy all versions of package. Used version will be selected on remote machine bb.plain("Copying native package file: %s" % item) - sh.copy(os.path.join(rpm_deploy_dir, native_rpm_dir, item), os.path.join(d.getVar("TEST_EXPORT_DIR", True), "binaries", "native")) + sh.copy(os.path.join(rpm_deploy_dir, native_rpm_dir, item), os.path.join(d.getVar("TEST_EXPORT_DIR"), "binaries", "native")) else: # nothing to do here; running tests under bitbake, so we asume native binaries are in sysroots dir. if param_list[1] or param_list[4]: bb.warn("Native binary %s %s%s. Running tests under bitbake environment. Version can't be checked except when the test itself does it" @@ -148,7 +148,7 @@ def process_binaries(d, params): else: # this is for target device if param_list[2] == "rpm": return "No need to extract, this is an .rpm file" - arch = get_dest_folder(d.getVar("TUNE_FEATURES", True), os.listdir(binaries_path)) + arch = get_dest_folder(d.getVar("TUNE_FEATURES"), os.listdir(binaries_path)) extracted_bin_path = os.path.join(binaries_path, arch, "extracted_binaries") extracted_bin_list = [item for item in os.listdir(extracted_bin_path)] packaged_bin_path = os.path.join(binaries_path, arch, "packaged_binaries") @@ -206,9 +206,9 @@ def send_bin_to_DUT(d,params): from oeqa.oetest import oeRuntimeTest param_list = params cleanup_list = list() - bins_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), "binaries") if determine_if_poky_env() \ + bins_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"), "binaries") if determine_if_poky_env() \ else os.getenv("bin_dir") - arch = get_dest_folder(d.getVar("TUNE_FEATURES", True), os.listdir(bins_dir)) + arch = get_dest_folder(d.getVar("TUNE_FEATURES"), os.listdir(bins_dir)) arch_rpms_dir = os.path.join(bins_dir, arch, "packaged_binaries") extracted_bin_dir = os.path.join(bins_dir, arch, "extracted_binaries", param_list[0]) -- cgit 1.2.3-korg