aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc2
-rw-r--r--meta-demoapps/recipes-gnome/abiword/abiword.inc2
-rw-r--r--meta-demoapps/recipes-support/poppler/poppler-fpu.inc2
-rw-r--r--meta/classes/autotools.bbclass6
-rw-r--r--meta/classes/base.bbclass114
-rw-r--r--meta/classes/bugzilla.bbclass30
-rw-r--r--meta/classes/buildstats.bbclass36
-rw-r--r--meta/classes/cpan-base.bbclass2
-rw-r--r--meta/classes/cpan_build.bbclass4
-rw-r--r--meta/classes/cross-canadian.bbclass2
-rw-r--r--meta/classes/debian.bbclass18
-rw-r--r--meta/classes/distrodata.bbclass162
-rw-r--r--meta/classes/distutils-base.bbclass2
-rw-r--r--meta/classes/distutils-native-base.bbclass2
-rw-r--r--meta/classes/gconf.bbclass16
-rw-r--r--meta/classes/gtk-icon-cache.bbclass24
-rw-r--r--meta/classes/icecc.bbclass16
-rw-r--r--meta/classes/image-swab.bbclass4
-rw-r--r--meta/classes/image.bbclass20
-rw-r--r--meta/classes/image_types.bbclass4
-rw-r--r--meta/classes/imagetest-qemu.bbclass56
-rw-r--r--meta/classes/insane.bbclass78
-rw-r--r--meta/classes/kernel-arch.bbclass6
-rw-r--r--meta/classes/kernel-yocto.bbclass2
-rw-r--r--meta/classes/kernel.bbclass68
-rw-r--r--meta/classes/libc-common.bbclass10
-rw-r--r--meta/classes/libc-package.bbclass66
-rw-r--r--meta/classes/license.bbclass18
-rw-r--r--meta/classes/metadata_scm.bbclass2
-rw-r--r--meta/classes/native.bbclass12
-rw-r--r--meta/classes/nativesdk.bbclass14
-rw-r--r--meta/classes/package.bbclass232
-rw-r--r--meta/classes/package_deb.bbclass82
-rw-r--r--meta/classes/package_ipk.bbclass82
-rw-r--r--meta/classes/package_rpm.bbclass136
-rw-r--r--meta/classes/package_tar.bbclass38
-rw-r--r--meta/classes/packagedata.bbclass8
-rw-r--r--meta/classes/packagehistory.bbclass8
-rw-r--r--meta/classes/patch.bbclass28
-rw-r--r--meta/classes/pkg_distribute.bbclass2
-rw-r--r--meta/classes/pkg_metainfo.bbclass12
-rw-r--r--meta/classes/populate_sdk_deb.bbclass4
-rw-r--r--meta/classes/populate_sdk_rpm.bbclass2
-rw-r--r--meta/classes/qemu.bbclass2
-rw-r--r--meta/classes/qt4e.bbclass2
-rw-r--r--meta/classes/qt4x11.bbclass2
-rw-r--r--meta/classes/relocatable.bbclass2
-rw-r--r--meta/classes/rootfs_ipk.bbclass10
-rw-r--r--meta/classes/rootfs_rpm.bbclass14
-rw-r--r--meta/classes/sanity.bbclass10
-rw-r--r--meta/classes/siteconfig.bbclass2
-rw-r--r--meta/classes/siteinfo.bbclass2
-rw-r--r--meta/classes/sourcepkg.bbclass12
-rw-r--r--meta/classes/src_distribute.bbclass16
-rw-r--r--meta/classes/sstate.bbclass94
-rw-r--r--meta/classes/staging.bbclass6
-rw-r--r--meta/classes/syslinux.bbclass34
-rw-r--r--meta/classes/task.bbclass2
-rw-r--r--meta/classes/toolchain-scripts.bbclass6
-rw-r--r--meta/classes/update-alternatives.bbclass38
-rw-r--r--meta/classes/update-rc.d.bbclass36
-rw-r--r--meta/classes/useradd.bbclass8
-rw-r--r--meta/classes/utility-tasks.bbclass8
-rw-r--r--meta/classes/utils.bbclass4
-rw-r--r--meta/conf/bitbake.conf36
-rw-r--r--meta/conf/distro/defaultsetup.conf2
-rw-r--r--meta/conf/machine/include/arm/feature-arm-thumb.inc2
-rw-r--r--meta/conf/machine/include/tune-thumb.inc8
-rw-r--r--meta/lib/oe/distro_check.py22
-rw-r--r--meta/lib/oe/patch.py4
-rw-r--r--meta/lib/oe/utils.py8
-rw-r--r--meta/recipes-bsp/grub/grub_0.97.bb2
-rw-r--r--meta/recipes-bsp/grub/grub_1.99.bb2
-rw-r--r--meta/recipes-bsp/uboot/u-boot_2011.03.bb2
-rw-r--r--meta/recipes-bsp/uboot/u-boot_2011.06.bb2
-rw-r--r--meta/recipes-bsp/x-load/x-load_git.bb2
-rw-r--r--meta/recipes-connectivity/connman/connman.inc2
-rw-r--r--meta/recipes-core/busybox/busybox.inc6
-rw-r--r--meta/recipes-core/eglibc/eglibc-options.inc6
-rw-r--r--meta/recipes-core/eglibc/eglibc-package.inc4
-rw-r--r--meta/recipes-core/eglibc/eglibc_2.13.bb4
-rw-r--r--meta/recipes-core/eglibc/eglibc_2.14.bb4
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb2
-rw-r--r--meta/recipes-core/libxml/libxml2.inc4
-rw-r--r--meta/recipes-core/tasks/task-base.bb10
-rw-r--r--meta/recipes-core/tasks/task-core-sdk.bb6
-rw-r--r--meta/recipes-core/uclibc/uclibc-config.inc16
-rw-r--r--meta/recipes-core/uclibc/uclibc.inc10
-rw-r--r--meta/recipes-devtools/apt/apt-native.inc4
-rw-r--r--meta/recipes-devtools/apt/apt-package.inc8
-rw-r--r--meta/recipes-devtools/automake/automake.inc2
-rw-r--r--meta/recipes-devtools/cmake/cmake.inc2
-rw-r--r--meta/recipes-devtools/cmake/cmake_2.8.5.bb6
-rw-r--r--meta/recipes-devtools/gcc/gcc-common.inc8
-rw-r--r--meta/recipes-devtools/gcc/gcc-configure-common.inc2
-rw-r--r--meta/recipes-devtools/gnu-config/gnu-config_20080123.bb2
-rw-r--r--meta/recipes-devtools/intltool/intltool.inc2
-rw-r--r--meta/recipes-devtools/opkg/opkg.inc2
-rw-r--r--meta/recipes-devtools/perl/perl_5.14.2.bb2
-rw-r--r--meta/recipes-devtools/python/python-pygobject_2.27.91.bb2
-rw-r--r--meta/recipes-devtools/qemu/qemu-targets.inc2
-rw-r--r--meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb2
-rw-r--r--meta/recipes-extended/cups/cups14.inc2
-rw-r--r--meta/recipes-extended/lsof/lsof_4.85.bb6
-rw-r--r--meta/recipes-extended/pam/libpam_1.1.4.bb4
-rw-r--r--meta/recipes-extended/zip/zip.inc2
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb2
-rw-r--r--meta/recipes-gnome/gtk+/gtk+_2.12.7.bb8
-rw-r--r--meta/recipes-gnome/gtk+/gtk+_2.16.6.bb8
-rw-r--r--meta/recipes-gnome/gtk+/gtk+_2.24.6.bb6
-rw-r--r--meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb4
-rw-r--r--meta/recipes-graphics/cairo/cairo-fpu.inc2
-rw-r--r--meta/recipes-graphics/clutter/clutter-fpu.inc2
-rw-r--r--meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb2
-rw-r--r--meta/recipes-graphics/mesa/mesa-dri.inc2
-rw-r--r--meta/recipes-graphics/pango/pango.inc2
-rw-r--r--meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb4
-rw-r--r--meta/recipes-kernel/linux/linux-dtb.inc10
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb2
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_2.6.34.bb2
-rw-r--r--meta/recipes-multimedia/alsa/alsa-fpu.inc2
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-plugins-package.inc12
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio.inc2
-rw-r--r--meta/recipes-qt/qt4/qt4.inc20
-rw-r--r--meta/recipes-qt/qt4/qt4_arch.inc6
-rw-r--r--meta/recipes-sato/puzzles/oh-puzzles_git.bb4
-rw-r--r--meta/recipes-sato/puzzles/puzzles_r9306.bb2
-rw-r--r--meta/recipes-support/attr/ea-acl.inc4
-rw-r--r--meta/recipes-support/boost/boost-36.inc10
-rwxr-xr-xscripts/jhbuild/jhbuild2oe.py28
130 files changed, 1058 insertions, 1058 deletions
diff --git a/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc b/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc
index ce537df7ffe..511ae97befb 100644
--- a/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc
+++ b/meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc
@@ -9,7 +9,7 @@ RDEPENDS_${PN} = "glibc-gconv-ibm850 glibc-gconv-cp1252 \
SRC_URI = "http://www.abiword.org/downloads/abiword/${PV}/source/abiword-${PV}.tar.gz"
#want 2.x from 2.x.y for the installation directory
-SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}"
+SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
FILES_${PN} += " \
${datadir}/icons/* \
diff --git a/meta-demoapps/recipes-gnome/abiword/abiword.inc b/meta-demoapps/recipes-gnome/abiword/abiword.inc
index 4ec0ba976a7..036f98f348f 100644
--- a/meta-demoapps/recipes-gnome/abiword/abiword.inc
+++ b/meta-demoapps/recipes-gnome/abiword/abiword.inc
@@ -17,7 +17,7 @@ SVNURI = "svn://svn.abisource.com/abiword/trunk;module=abiword;proto=http"
SVNSRC = "${WORKDIR}/abi"
#want 2.x from 2.x.y for the installation directory
-SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}"
+SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
FILES_${PN} += " \
${datadir}/icons/* \
diff --git a/meta-demoapps/recipes-support/poppler/poppler-fpu.inc b/meta-demoapps/recipes-support/poppler/poppler-fpu.inc
index a26273020aa..2fbee13b512 100644
--- a/meta-demoapps/recipes-support/poppler/poppler-fpu.inc
+++ b/meta-demoapps/recipes-support/poppler/poppler-fpu.inc
@@ -1,6 +1,6 @@
def get_poppler_fpu_setting(bb, d):
- if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]:
+ if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--enable-fixedpoint"
return ""
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index 451c7fcc1ed..37e7d4b482b 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -1,8 +1,8 @@
def autotools_dep_prepend(d):
- if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1):
+ if d.getVar('INHIBIT_AUTOTOOLS_DEPS', 1):
return ''
- pn = bb.data.getVar('PN', d, 1)
+ pn = d.getVar('PN', 1)
deps = ''
if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
@@ -13,7 +13,7 @@ def autotools_dep_prepend(d):
deps += 'libtool-native '
if not bb.data.inherits_class('native', d) \
and not bb.data.inherits_class('cross', d) \
- and not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d, 1):
+ and not d.getVar('INHIBIT_DEFAULT_DEPS', 1):
deps += 'libtool-cross '
return deps + 'gnu-config-native '
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index f5397446dd9..f3f798f9bf9 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -60,9 +60,9 @@ def base_dep_prepend(d):
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
- if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
- if (bb.data.getVar('HOST_SYS', d, 1) !=
- bb.data.getVar('BUILD_SYS', d, 1)):
+ if not d.getVar('INHIBIT_DEFAULT_DEPS'):
+ if (d.getVar('HOST_SYS', 1) !=
+ d.getVar('BUILD_SYS', 1)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
return deps
@@ -73,13 +73,13 @@ DEPENDS_prepend="${BASEDEPENDS} "
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
# THISDIR only works properly with imediate expansion as it has to run
# in the context of the location its used (:=)
-THISDIR = "${@os.path.dirname(bb.data.getVar('FILE', d, True))}"
+THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}"
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
python base_do_fetch() {
- src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
+ src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
@@ -96,14 +96,14 @@ python base_do_fetch() {
addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}"
python base_do_unpack() {
- src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
+ src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
- rootdir = bb.data.getVar('WORKDIR', localdata, True)
+ rootdir = localdata.getVar('WORKDIR', True)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
@@ -118,7 +118,7 @@ def generate_git_config(e):
from bb import data
if data.getVar('GIT_CORE_CONFIG', e.data, True):
- gitconfig_path = bb.data.getVar('GIT_CONFIG', e.data, True)
+ gitconfig_path = e.data.getVar('GIT_CONFIG', True)
proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True)
bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data))
@@ -207,11 +207,11 @@ python base_eventhandler() {
name = getName(e)
if name.startswith("BuildStarted"):
- bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
+ e.data.setVar( 'BB_VERSION', bb.__version__)
statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU']
- statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
+ statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, 1) or '') for i in statusvars]
- layers = (bb.data.getVar("BBLAYERS", e.data, 1) or "").split()
+ layers = (e.data.getVar("BBLAYERS", 1) or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \
@@ -237,7 +237,7 @@ python base_eventhandler() {
needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
pesteruser = []
for v in needed_vars:
- val = bb.data.getVar(v, e.data, 1)
+ val = e.data.getVar(v, 1)
if not val or val == 'INVALID':
pesteruser.append(v)
if pesteruser:
@@ -330,23 +330,23 @@ python () {
appendVar('EXTRA_OECONF', extraconf)
# If PRINC is set, try and increase the PR value by the amount specified
- princ = bb.data.getVar('PRINC', d, True)
+ princ = d.getVar('PRINC', True)
if princ:
- pr = bb.data.getVar('PR', d, True)
+ pr = d.getVar('PR', True)
pr_prefix = re.search("\D+",pr)
prval = re.search("\d+",pr)
if pr_prefix is None or prval is None:
bb.error("Unable to analyse format of PR variable: %s" % pr)
nval = int(prval.group(0)) + int(princ)
pr = pr_prefix.group(0) + str(nval) + pr[prval.end():]
- bb.data.setVar('PR', pr, d)
+ d.setVar('PR', pr)
- pn = bb.data.getVar('PN', d, 1)
- license = bb.data.getVar('LICENSE', d, True)
+ pn = d.getVar('PN', 1)
+ license = d.getVar('LICENSE', True)
if license == "INVALID":
bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
- commercial_license = " %s " % bb.data.getVar('COMMERCIAL_LICENSE', d, 1)
+ commercial_license = " %s " % d.getVar('COMMERCIAL_LICENSE', 1)
import re
pnr = "[ \t]%s[ \t]" % pn.replace('+', "\+")
if commercial_license and re.search(pnr, commercial_license):
@@ -356,86 +356,86 @@ python () {
# If we're building a target package we need to use fakeroot (pseudo)
# in order to capture permissions, owners, groups and special files
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
- bb.data.setVarFlag('do_configure', 'umask', 022, d)
- bb.data.setVarFlag('do_compile', 'umask', 022, d)
- deps = (bb.data.getVarFlag('do_install', 'depends', d) or "").split()
+ d.setVarFlag('do_configure', 'umask', 022)
+ d.setVarFlag('do_compile', 'umask', 022)
+ deps = (d.getVarFlag('do_install', 'depends') or "").split()
deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d)
- bb.data.setVarFlag('do_install', 'fakeroot', 1, d)
- bb.data.setVarFlag('do_install', 'umask', 022, d)
- deps = (bb.data.getVarFlag('do_package', 'depends', d) or "").split()
+ d.setVarFlag('do_install', 'fakeroot', 1)
+ d.setVarFlag('do_install', 'umask', 022)
+ deps = (d.getVarFlag('do_package', 'depends') or "").split()
deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d)
- bb.data.setVarFlag('do_package', 'fakeroot', 1, d)
- bb.data.setVarFlag('do_package', 'umask', 022, d)
- bb.data.setVarFlag('do_package_setscene', 'fakeroot', 1, d)
- source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
+ d.setVarFlag('do_package', 'fakeroot', 1)
+ d.setVarFlag('do_package', 'umask', 022)
+ d.setVarFlag('do_package_setscene', 'fakeroot', 1)
+ source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
if not source_mirror_fetch:
- need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
+ need_host = d.getVar('COMPATIBLE_HOST', 1)
if need_host:
import re
- this_host = bb.data.getVar('HOST_SYS', d, 1)
+ this_host = d.getVar('HOST_SYS', 1)
if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
- need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
+ need_machine = d.getVar('COMPATIBLE_MACHINE', 1)
if need_machine:
import re
- this_machine = bb.data.getVar('MACHINE', d, 1)
+ this_machine = d.getVar('MACHINE', 1)
if this_machine and not re.match(need_machine, this_machine):
- this_soc_family = bb.data.getVar('SOC_FAMILY', d, 1)
+ this_soc_family = d.getVar('SOC_FAMILY', 1)
if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family:
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
- dont_want_license = bb.data.getVar('INCOMPATIBLE_LICENSE', d, 1)
+ dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', 1)
if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"):
- hosttools_whitelist = (bb.data.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, d, 1) or "").split()
- lgplv2_whitelist = (bb.data.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, d, 1) or "").split()
- dont_want_whitelist = (bb.data.getVar('WHITELIST_%s' % dont_want_license, d, 1) or "").split()
+ hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, 1) or "").split()
+ lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, 1) or "").split()
+ dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, 1) or "").split()
if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist:
import re
- this_license = bb.data.getVar('LICENSE', d, 1)
+ this_license = d.getVar('LICENSE', 1)
if this_license and re.search(dont_want_license, this_license):
bb.note("SKIPPING %s because it's %s" % (pn, this_license))
raise bb.parse.SkipPackage("incompatible with license %s" % this_license)
# Git packages should DEPEND on git-native
- srcuri = bb.data.getVar('SRC_URI', d, 1)
+ srcuri = d.getVar('SRC_URI', 1)
if "git://" in srcuri:
- depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
+ depends = d.getVarFlag('do_fetch', 'depends') or ""
depends = depends + " git-native:do_populate_sysroot"
- bb.data.setVarFlag('do_fetch', 'depends', depends, d)
+ d.setVarFlag('do_fetch', 'depends', depends)
# Mercurial packages should DEPEND on mercurial-native
elif "hg://" in srcuri:
- depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
+ depends = d.getVarFlag('do_fetch', 'depends') or ""
depends = depends + " mercurial-native:do_populate_sysroot"
- bb.data.setVarFlag('do_fetch', 'depends', depends, d)
+ d.setVarFlag('do_fetch', 'depends', depends)
# OSC packages should DEPEND on osc-native
elif "osc://" in srcuri:
- depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
+ depends = d.getVarFlag('do_fetch', 'depends') or ""
depends = depends + " osc-native:do_populate_sysroot"
- bb.data.setVarFlag('do_fetch', 'depends', depends, d)
+ d.setVarFlag('do_fetch', 'depends', depends)
# *.xz should depends on xz-native for unpacking
# Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future
if '.xz' in srcuri:
- depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
+ depends = d.getVarFlag('do_unpack', 'depends') or ""
depends = depends + " xz-native:do_populate_sysroot"
- bb.data.setVarFlag('do_unpack', 'depends', depends, d)
+ d.setVarFlag('do_unpack', 'depends', depends)
# unzip-native should already be staged before unpacking ZIP recipes
if ".zip" in srcuri:
- depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
+ depends = d.getVarFlag('do_unpack', 'depends') or ""
depends = depends + " unzip-native:do_populate_sysroot"
- bb.data.setVarFlag('do_unpack', 'depends', depends, d)
+ d.setVarFlag('do_unpack', 'depends', depends)
# 'multimachine' handling
- mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
- pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
+ mach_arch = d.getVar('MACHINE_ARCH', 1)
+ pkg_arch = d.getVar('PACKAGE_ARCH', 1)
if (pkg_arch == mach_arch):
# Already machine specific - nothing further to do
@@ -445,7 +445,7 @@ python () {
# We always try to scan SRC_URI for urls with machine overrides
# unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
#
- override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
+ override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', 1)
if override != '0':
paths = []
for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
@@ -461,18 +461,18 @@ python () {
for mp in paths:
if local.startswith(mp):
#bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
- bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
+ d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
return
- packages = bb.data.getVar('PACKAGES', d, 1).split()
+ packages = d.getVar('PACKAGES', 1).split()
for pkg in packages:
- pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
+ pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, 1)
# We could look for != PACKAGE_ARCH here but how to choose
# if multiple differences are present?
# Look through PACKAGE_ARCHS for the priority order?
if pkgarch and pkgarch == mach_arch:
- bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
+ d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True))
}
@@ -483,7 +483,7 @@ python do_cleansstate() {
addtask cleanall after do_cleansstate
python do_cleanall() {
- src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
+ src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
diff --git a/meta/classes/bugzilla.bbclass b/meta/classes/bugzilla.bbclass
index 801bd66d2f6..4028d261c86 100644
--- a/meta/classes/bugzilla.bbclass
+++ b/meta/classes/bugzilla.bbclass
@@ -109,14 +109,14 @@ python bugzilla_eventhandler() {
return
if name == "TaskFailed":
- xmlrpc = bb.data.getVar("BUGZILLA_XMLRPC", data, True)
- user = bb.data.getVar("BUGZILLA_USER", data, True)
- passw = bb.data.getVar("BUGZILLA_PASS", data, True)
- product = bb.data.getVar("BUGZILLA_PRODUCT", data, True)
- compon = bb.data.getVar("BUGZILLA_COMPONENT", data, True)
- version = bb.data.getVar("BUGZILLA_VERSION", data, True)
-
- proxy = bb.data.getVar('http_proxy', data, True )
+ xmlrpc = data.getVar("BUGZILLA_XMLRPC", True)
+ user = data.getVar("BUGZILLA_USER", True)
+ passw = data.getVar("BUGZILLA_PASS", True)
+ product = data.getVar("BUGZILLA_PRODUCT", True)
+ compon = data.getVar("BUGZILLA_COMPONENT", True)
+ version = data.getVar("BUGZILLA_VERSION", True)
+
+ proxy = data.getVar('http_proxy', True )
if (proxy):
import urllib2
s, u, p, hostport = urllib2._parse_proxy(proxy)
@@ -132,14 +132,14 @@ python bugzilla_eventhandler() {
'component': compon}
# evil hack to figure out what is going on
- debug_file = open(os.path.join(bb.data.getVar("TMPDIR", data, True),"..","bugzilla-log"),"a")
+ debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a")
file = None
- bugname = "%(package)s-%(pv)s-autobuild" % { "package" : bb.data.getVar("PN", data, True),
- "pv" : bb.data.getVar("PV", data, True),
+ bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True),
+ "pv" : data.getVar("PV", True),
}
- log_file = glob.glob("%s/log.%s.*" % (bb.data.getVar('T', event.data, True), event.task))
- text = "The %s step in %s failed at %s for machine %s" % (e.task, bb.data.getVar("PN", data, True), bb.data.getVar('DATETIME', data, True), bb.data.getVar( 'MACHINE', data, True ) )
+ log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task))
+ text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) )
if len(log_file) != 0:
print >> debug_file, "Adding log file %s" % log_file[0]
file = open(log_file[0], 'r')
@@ -167,7 +167,7 @@ python bugzilla_eventhandler() {
if bug_number and log:
print >> debug_file, "The bug is known as '%s'" % bug_number
- desc = "Build log for machine %s" % (bb.data.getVar('MACHINE', data, True))
+ desc = "Build log for machine %s" % (data.getVar('MACHINE', True))
if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc):
print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number
else:
@@ -181,6 +181,6 @@ python bugzilla_eventhandler() {
# store bug number for oestats-client
if bug_number:
- bb.data.setVar('OESTATS_BUG_NUMBER', bug_number, data)
+ data.setVar('OESTATS_BUG_NUMBER', bug_number)
}
diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass
index 96c98d409f0..4cd8fe6936c 100644
--- a/meta/classes/buildstats.bbclass
+++ b/meta/classes/buildstats.bbclass
@@ -21,25 +21,25 @@ def get_cputime():
return sum(int(field) for field in fields)
def set_bn(e):
- bn = e.getPkgs()[0] + "-" + bb.data.getVar('MACHINE',e.data, True)
+ bn = e.getPkgs()[0] + "-" + e.data.getVar('MACHINE', True)
try:
- os.remove(bb.data.getVar('BNFILE', e.data, True))
+ os.remove(e.data.getVar('BNFILE', True))
except:
pass
- file = open(bb.data.getVar('BNFILE', e.data, True), "w")
- file.write(os.path.join(bn, bb.data.getVar('BUILDNAME', e.data, True)))
+ file = open(e.data.getVar('BNFILE', True), "w")
+ file.write(os.path.join(bn, e.data.getVar('BUILDNAME', True)))
file.close()
def get_bn(e):
- file = open(bb.data.getVar('BNFILE', e.data, True))
+ file = open(e.data.getVar('BNFILE', True))
bn = file.readline()
file.close()
return bn
def set_device(e):
- tmpdir = bb.data.getVar('TMPDIR', e.data, True)
+ tmpdir = e.data.getVar('TMPDIR', True)
try:
- os.remove(bb.data.getVar('DEVFILE', e.data, True))
+ os.remove(e.data.getVar('DEVFILE', True))
except:
pass
############################################################################
@@ -66,12 +66,12 @@ def set_device(e):
rdev=line.split()[2]
else:
rdev="NoLogicalDevice"
- file = open(bb.data.getVar('DEVFILE', e.data, True), "w")
+ file = open(e.data.getVar('DEVFILE', True), "w")
file.write(rdev)
file.close()
def get_device(e):
- file = open(bb.data.getVar('DEVFILE', e.data, True))
+ file = open(e.data.getVar('DEVFILE', True))
device = file.readline()
file.close()
return device
@@ -126,7 +126,7 @@ def get_timedata(var, data):
def write_task_data(status, logfile, dev, e):
bn = get_bn(e)
- bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn)
+ bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
file = open(os.path.join(logfile), "a")
timedata = get_timedata("__timedata_task", e.data)
@@ -168,7 +168,7 @@ python run_buildstats () {
# set the buildname
########################################################################
try:
- bb.mkdirhier(bb.data.getVar('BUILDSTATS_BASE', e.data, True))
+ bb.mkdirhier(e.data.getVar('BUILDSTATS_BASE', True))
except:
pass
set_bn(e)
@@ -176,7 +176,7 @@ python run_buildstats () {
set_device(e)
device = get_device(e)
- bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn)
+ bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
try:
bb.mkdirhier(bsdir)
except:
@@ -199,7 +199,7 @@ python run_buildstats () {
elif isinstance(e, bb.event.BuildCompleted):
bn = get_bn(e)
device = get_device(e)
- bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn)
+ bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
build_time = os.path.join(bsdir, "build_stats")
file = open(build_time, "a")
@@ -224,7 +224,7 @@ python run_buildstats () {
if isinstance(e, bb.build.TaskStarted):
bn = get_bn(e)
device = get_device(e)
- bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn)
+ bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
if device != "NoLogicalDevice":
set_diskdata("__diskdata_task", device, e.data)
@@ -242,14 +242,14 @@ python run_buildstats () {
elif isinstance(e, bb.build.TaskSucceeded):
bn = get_bn(e)
device = get_device(e)
- bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn)
+ bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
write_task_data("passed", os.path.join(taskdir, e.task), device, e)
if e.task == "do_rootfs":
- bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn)
+ bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
bs=os.path.join(bsdir, "build_stats")
file = open(bs,"a")
- rootfs = bb.data.getVar('IMAGE_ROOTFS', e.data, True)
+ rootfs = e.data.getVar('IMAGE_ROOTFS', True)
rootfs_size = subprocess.Popen(["du", "-sh", rootfs], stdout=subprocess.PIPE).stdout.read()
file.write("Uncompressed Rootfs size: %s" % rootfs_size)
file.close()
@@ -257,7 +257,7 @@ python run_buildstats () {
elif isinstance(e, bb.build.TaskFailed):
bn = get_bn(e)
device = get_device(e)
- bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn)
+ bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
write_task_data("failed", os.path.join(taskdir, e.task), device, e)
########################################################################
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass
index b5dbdaea818..79582ca76cb 100644
--- a/meta/classes/cpan-base.bbclass
+++ b/meta/classes/cpan-base.bbclass
@@ -28,7 +28,7 @@ def get_perl_version(d):
# Determine where the library directories are
def perl_get_libdirs(d):
- libdir = bb.data.getVar('libdir', d, 1)
+ libdir = d.getVar('libdir', 1)
if is_target(d) == "no":
libdir += '/perl-native'
libdir += '/perl'
diff --git a/meta/classes/cpan_build.bbclass b/meta/classes/cpan_build.bbclass
index cc503a424ea..981332c4fa6 100644
--- a/meta/classes/cpan_build.bbclass
+++ b/meta/classes/cpan_build.bbclass
@@ -10,9 +10,9 @@ inherit cpan-base
# libmodule-build-perl)
#
def cpan_build_dep_prepend(d):
- if bb.data.getVar('CPAN_BUILD_DEPS', d, 1):
+ if d.getVar('CPAN_BUILD_DEPS', 1):
return ''
- pn = bb.data.getVar('PN', d, 1)
+ pn = d.getVar('PN', 1)
if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']:
return ''
return 'libmodule-build-perl-native '
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass
index 601175db45a..6f5bcd0ad40 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes/cross-canadian.bbclass
@@ -16,7 +16,7 @@ STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${S
#
PACKAGE_ARCH = "${SDK_ARCH}-nativesdk"
python () {
- archs = bb.data.getVar('PACKAGE_ARCHS', d, True).split()
+ archs = d.getVar('PACKAGE_ARCHS', True).split()
sdkarchs = []
for arch in archs:
sdkarchs.append(arch + '-nativesdk')
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass
index 554525dc84d..025abcfad0b 100644
--- a/meta/classes/debian.bbclass
+++ b/meta/classes/debian.bbclass
@@ -22,8 +22,8 @@ python () {
python debian_package_name_hook () {
import glob, copy, stat, errno, re
- pkgdest = bb.data.getVar('PKGDEST', d, 1)
- packages = bb.data.getVar('PACKAGES', d, 1)
+ pkgdest = d.getVar('PKGDEST', 1)
+ packages = d.getVar('PACKAGES', 1)
bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
so_re = re.compile("lib.*\.so")
@@ -60,7 +60,7 @@ python debian_package_name_hook () {
for f in files:
if so_re.match(f):
fp = os.path.join(root, f)
- cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + fp + " 2>/dev/null"
+ cmd = (d.getVar('BUILD_PREFIX', 1) or "") + "objdump -p " + fp + " 2>/dev/null"
fd = os.popen(cmd)
lines = fd.readlines()
fd.close()
@@ -74,7 +74,7 @@ python debian_package_name_hook () {
if len(sonames) == 1:
soname = sonames[0]
elif len(sonames) > 1:
- lead = bb.data.getVar('LEAD_SONAME', d, 1)
+ lead = d.getVar('LEAD_SONAME', 1)
if lead:
r = re.compile(lead)
filtered = []
@@ -95,21 +95,21 @@ python debian_package_name_hook () {
if soname_result:
(pkgname, devname) = soname_result
for pkg in packages.split():
- if (bb.data.getVar('PKG_' + pkg, d) or bb.data.getVar('DEBIAN_NOAUTONAME_' + pkg, d)):
+ if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
continue
- debian_pn = bb.data.getVar('DEBIANNAME_' + pkg, d)
+ debian_pn = d.getVar('DEBIANNAME_' + pkg)
if debian_pn:
newpkg = debian_pn
elif pkg == orig_pkg:
newpkg = pkgname
else:
newpkg = pkg.replace(orig_pkg, devname, 1)
- mlpre=bb.data.getVar('MLPREFIX', d, True)
+ mlpre=d.getVar('MLPREFIX', True)
if mlpre:
if not newpkg.find(mlpre) == 0:
newpkg = mlpre + newpkg
if newpkg != pkg:
- bb.data.setVar('PKG_' + pkg, newpkg, d)
+ d.setVar('PKG_' + pkg, newpkg)
# reversed sort is needed when some package is substring of another
# ie in ncurses we get without reverse sort:
@@ -117,7 +117,7 @@ python debian_package_name_hook () {
# and later
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
# so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
- for pkg in sorted((bb.data.getVar('AUTO_LIBNAME_PKGS', d, 1) or "").split(), reverse=True):
+ for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', 1) or "").split(), reverse=True):
auto_libname(packages, pkg)
}
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index ce7b931b139..687247a6491 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -19,87 +19,87 @@ addtask distrodata_np
do_distrodata_np[nostamp] = "1"
python do_distrodata_np() {
localdata = bb.data.createCopy(d)
- pn = bb.data.getVar("PN", d, True)
+ pn = d.getVar("PN", True)
bb.note("Package Name: %s" % pn)
import oe.distro_check as dist_check
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ tmpdir = d.getVar('TMPDIR', True)
distro_check_dir = os.path.join(tmpdir, "distro_check")
- datetime = bb.data.getVar('DATETIME', localdata, True)
+ datetime = localdata.getVar('DATETIME', True)
dist_check.update_distro_data(distro_check_dir, datetime)
if pn.find("-native") != -1:
pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pn.find("-nativesdk") != -1:
pnstripped = pn.split("-nativesdk")
bb.note("Native Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pn.find("-cross") != -1:
pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pn.find("-crosssdk") != -1:
pnstripped = pn.split("-crosssdk")
bb.note("cross Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pn.find("-initial") != -1:
pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
"""generate package information from .bb file"""
- pname = bb.data.getVar('PN', localdata, True)
- pcurver = bb.data.getVar('PV', localdata, True)
- pdesc = bb.data.getVar('DESCRIPTION', localdata, True)
+ pname = localdata.getVar('PN', True)
+ pcurver = localdata.getVar('PV', True)
+ pdesc = localdata.getVar('DESCRIPTION', True)
if pdesc is not None:
pdesc = pdesc.replace(',','')
pdesc = pdesc.replace('\n','')
- pgrp = bb.data.getVar('SECTION', localdata, True)
- plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_')
- if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True):
+ pgrp = localdata.getVar('SECTION', True)
+ plicense = localdata.getVar('LICENSE', True).replace(',','_')
+ if localdata.getVar('LIC_FILES_CHKSUM', True):
pchksum="1"
else:
pchksum="0"
- if bb.data.getVar('RECIPE_STATUS', localdata, True):
+ if localdata.getVar('RECIPE_STATUS', True):
hasrstatus="1"
else:
hasrstatus="0"
- rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True)
+ rstatus = localdata.getVar('RECIPE_STATUS', True)
if rstatus is not None:
rstatus = rstatus.replace(',','')
- pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True)
+ pupver = localdata.getVar('RECIPE_LATEST_VERSION', True)
if pcurver == pupver:
vermatch="1"
else:
vermatch="0"
- noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True)
+ noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
if noupdate_reason is None:
noupdate="0"
else:
noupdate="1"
noupdate_reason = noupdate_reason.replace(',','')
- ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True)
- maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True)
- rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True)
- rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True)
- dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True)
- rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True)
+ ris = localdata.getVar('RECIPE_INTEL_SECTION', True)
+ maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
+ rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True)
+ rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True)
+ dc = localdata.getVar('DEPENDENCY_CHECK', True)
+ rc = localdata.getVar('RECIPE_COMMENTS', True)
result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s\n" % \
@@ -113,81 +113,81 @@ python do_distrodata_np() {
addtask distrodata
do_distrodata[nostamp] = "1"
python do_distrodata() {
- logpath = bb.data.getVar('LOG_DIR', d, True)
+ logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath)
logfile = os.path.join(logpath, "distrodata.csv")
import oe.distro_check as dist_check
localdata = bb.data.createCopy(d)
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ tmpdir = d.getVar('TMPDIR', True)
distro_check_dir = os.path.join(tmpdir, "distro_check")
- datetime = bb.data.getVar('DATETIME', localdata, True)
+ datetime = localdata.getVar('DATETIME', True)
dist_check.update_distro_data(distro_check_dir, datetime)
- pn = bb.data.getVar("PN", d, True)
+ pn = d.getVar("PN", True)
bb.note("Package Name: %s" % pn)
if pn.find("-native") != -1:
pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pn.find("-cross") != -1:
pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pn.find("-initial") != -1:
pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
"""generate package information from .bb file"""
- pname = bb.data.getVar('PN', localdata, True)
- pcurver = bb.data.getVar('PV', localdata, True)
- pdesc = bb.data.getVar('DESCRIPTION', localdata, True)
+ pname = localdata.getVar('PN', True)
+ pcurver = localdata.getVar('PV', True)
+ pdesc = localdata.getVar('DESCRIPTION', True)
if pdesc is not None:
pdesc = pdesc.replace(',','')
pdesc = pdesc.replace('\n','')
- pgrp = bb.data.getVar('SECTION', localdata, True)
- plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_')
- if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True):
+ pgrp = localdata.getVar('SECTION', True)
+ plicense = localdata.getVar('LICENSE', True).replace(',','_')
+ if localdata.getVar('LIC_FILES_CHKSUM', True):
pchksum="1"
else:
pchksum="0"
- if bb.data.getVar('RECIPE_STATUS', localdata, True):
+ if localdata.getVar('RECIPE_STATUS', True):
hasrstatus="1"
else:
hasrstatus="0"
- rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True)
+ rstatus = localdata.getVar('RECIPE_STATUS', True)
if rstatus is not None:
rstatus = rstatus.replace(',','')
- pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True)
+ pupver = localdata.getVar('RECIPE_LATEST_VERSION', True)
if pcurver == pupver:
vermatch="1"
else:
vermatch="0"
- noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True)
+ noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
if noupdate_reason is None:
noupdate="0"
else:
noupdate="1"
noupdate_reason = noupdate_reason.replace(',','')
- ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True)
- maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True)
- rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True)
- rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True)
- dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True)
- rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True)
+ ris = localdata.getVar('RECIPE_INTEL_SECTION', True)
+ maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
+ rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True)
+ rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True)
+ dc = localdata.getVar('DEPENDENCY_CHECK', True)
+ rc = localdata.getVar('RECIPE_COMMENTS', True)
# do the comparison
result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
@@ -298,7 +298,7 @@ python do_checkpkg() {
Clear internal url cache as it's a temporary check. Not doing so will have
bitbake check url multiple times when looping through a single url
"""
- fn = bb.data.getVar('FILE', d, True)
+ fn = d.getVar('FILE', True)
bb.fetch2.urldata_cache[fn] = {}
"""
@@ -329,7 +329,7 @@ python do_checkpkg() {
Return new version if success, or else error in "Errxxxx" style
"""
def check_new_dir(url, curver, d):
- pn = bb.data.getVar('PN', d, True)
+ pn = d.getVar('PN', True)
f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
status = internal_fetch_wget(url, d, f)
fhtml = f.read()
@@ -372,7 +372,7 @@ python do_checkpkg() {
f.close()
if status != "ErrHostNoDir" and re.match("Err", status):
- logpath = bb.data.getVar('LOG_DIR', d, 1)
+ logpath = d.getVar('LOG_DIR', 1)
os.system("cp %s %s/" % (f.name, logpath))
os.unlink(f.name)
return status
@@ -388,7 +388,7 @@ python do_checkpkg() {
"""possible to have no version in pkg name, such as spectrum-fw"""
if not re.search("\d+", curname):
return pcurver
- pn = bb.data.getVar('PN', d, True)
+ pn = d.getVar('PN', True)
f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
status = internal_fetch_wget(url, d, f)
fhtml = f.read()
@@ -431,55 +431,55 @@ python do_checkpkg() {
f.close()
"""if host hasn't directory information, no need to save tmp file"""
if status != "ErrHostNoDir" and re.match("Err", status):
- logpath = bb.data.getVar('LOG_DIR', d, True)
+ logpath = d.getVar('LOG_DIR', True)
os.system("cp %s %s/" % (f.name, logpath))
os.unlink(f.name)
return status
"""first check whether a uri is provided"""
- src_uri = bb.data.getVar('SRC_URI', d, True)
+ src_uri = d.getVar('SRC_URI', True)
if not src_uri:
return
"""initialize log files."""
- logpath = bb.data.getVar('LOG_DIR', d, True)
+ logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath)
logfile = os.path.join(logpath, "checkpkg.csv")
"""generate package information from .bb file"""
- pname = bb.data.getVar('PN', d, True)
+ pname = d.getVar('PN', True)
if pname.find("-native") != -1:
pnstripped = pname.split("-native")
bb.note("Native Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pname.find("-cross") != -1:
pnstripped = pname.split("-cross")
bb.note("cross Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
if pname.find("-initial") != -1:
pnstripped = pname.split("-initial")
bb.note("initial Split: %s" % pnstripped)
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
- pdesc = bb.data.getVar('DESCRIPTION', localdata, True)
- pgrp = bb.data.getVar('SECTION', localdata, True)
- pversion = bb.data.getVar('PV', localdata, True)
- plicense = bb.data.getVar('LICENSE', localdata, True)
- psection = bb.data.getVar('SECTION', localdata, True)
- phome = bb.data.getVar('HOMEPAGE', localdata, True)
- prelease = bb.data.getVar('PR', localdata, True)
- ppriority = bb.data.getVar('PRIORITY', localdata, True)
- pdepends = bb.data.getVar('DEPENDS', localdata, True)
- pbugtracker = bb.data.getVar('BUGTRACKER', localdata, True)
- ppe = bb.data.getVar('PE', localdata, True)
- psrcuri = bb.data.getVar('SRC_URI', localdata, True)
- maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True)
+ pdesc = localdata.getVar('DESCRIPTION', True)
+ pgrp = localdata.getVar('SECTION', True)
+ pversion = localdata.getVar('PV', True)
+ plicense = localdata.getVar('LICENSE', True)
+ psection = localdata.getVar('SECTION', True)
+ phome = localdata.getVar('HOMEPAGE', True)
+ prelease = localdata.getVar('PR', True)
+ ppriority = localdata.getVar('PRIORITY', True)
+ pdepends = localdata.getVar('DEPENDS', True)
+ pbugtracker = localdata.getVar('BUGTRACKER', True)
+ ppe = localdata.getVar('PE', True)
+ psrcuri = localdata.getVar('SRC_URI', True)
+ maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
found = 0
for uri in src_uri.split():
@@ -497,9 +497,9 @@ python do_checkpkg() {
(type, host, path, user, pswd, parm) = bb.decodeurl(uri)
if type in ['http', 'https', 'ftp']:
- pcurver = bb.data.getVar('PV', d, True)
+ pcurver = d.getVar('PV', True)
else:
- pcurver = bb.data.getVar("SRCREV", d, True)
+ pcurver = d.getVar("SRCREV", True)
if type in ['http', 'https', 'ftp']:
newver = pcurver
@@ -639,7 +639,7 @@ python do_checkpkg() {
pstatus += ":%s%s" % (host, path)
"""Read from manual distro tracking fields as alternative"""
- pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, True)
+ pmver = d.getVar("RECIPE_LATEST_VERSION", True)
if not pmver:
pmver = "N/A"
pmstatus = "ErrNoRecipeData"
@@ -688,12 +688,12 @@ python do_distro_check() {
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ tmpdir = d.getVar('TMPDIR', True)
distro_check_dir = os.path.join(tmpdir, "distro_check")
- logpath = bb.data.getVar('LOG_DIR', d, True)
+ logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath)
result_file = os.path.join(logpath, "distrocheck.csv")
- datetime = bb.data.getVar('DATETIME', localdata, True)
+ datetime = localdata.getVar('DATETIME', True)
dc.update_distro_data(distro_check_dir, datetime)
# do the comparison
@@ -734,12 +734,12 @@ python do_checklicense() {
import os
import bb
import shutil
- logpath = bb.data.getVar('LOG_DIR', d, True)
+ logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath)
- pn = bb.data.getVar('PN', d, True)
+ pn = d.getVar('PN', True)
logfile = os.path.join(logpath, "missinglicense.csv")
- generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True)
- license_types = bb.data.getVar('LICENSE', d, True)
+ generic_directory = d.getVar('COMMON_LICENSE_DIR', True)
+ license_types = d.getVar('LICENSE', True)
for license_type in ((license_types.replace('+', '').replace('|', '&')
.replace('(', '').replace(')', '').replace(';', '')
.replace(',', '').replace(" ", "").split("&"))):
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass
index e84b0fcc5ab..e7d0bb8071e 100644
--- a/meta/classes/distutils-base.bbclass
+++ b/meta/classes/distutils-base.bbclass
@@ -1,4 +1,4 @@
-DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"
+DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', 1) == '')]}"
RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}"
inherit distutils-common-base
diff --git a/meta/classes/distutils-native-base.bbclass b/meta/classes/distutils-native-base.bbclass
index 2703fe07400..47367d796b2 100644
--- a/meta/classes/distutils-native-base.bbclass
+++ b/meta/classes/distutils-native-base.bbclass
@@ -1,3 +1,3 @@
-DEPENDS += "${@["python-native", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"
+DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', 1) == '')]}"
inherit distutils-common-base
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass
index 67986787d7f..bffc92ea7a1 100644
--- a/meta/classes/gconf.bbclass
+++ b/meta/classes/gconf.bbclass
@@ -27,8 +27,8 @@ done
python populate_packages_append () {
import re
- packages = bb.data.getVar('PACKAGES', d, 1).split()
- pkgdest = bb.data.getVar('PKGDEST', d, 1)
+ packages = d.getVar('PACKAGES', 1).split()
+ pkgdest = d.getVar('PKGDEST', 1)
for pkg in packages:
schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
@@ -41,15 +41,15 @@ python populate_packages_append () {
if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d)
- postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
+ postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
if not postinst:
postinst = '#!/bin/sh\n'
- postinst += bb.data.getVar('gconf_postinst', d, 1)
- bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
- prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1)
+ postinst += d.getVar('gconf_postinst', 1)
+ d.setVar('pkg_postinst_%s' % pkg, postinst)
+ prerm = d.getVar('pkg_prerm_%s' % pkg, 1) or d.getVar('pkg_prerm', 1)
if not prerm:
prerm = '#!/bin/sh\n'
- prerm += bb.data.getVar('gconf_prerm', d, 1)
- bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d)
+ prerm += d.getVar('gconf_prerm', 1)
+ d.setVar('pkg_prerm_%s' % pkg, prerm)
}
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass
index d0840d59b60..eac3061b0a3 100644
--- a/meta/classes/gtk-icon-cache.bbclass
+++ b/meta/classes/gtk-icon-cache.bbclass
@@ -28,31 +28,31 @@ done
}
python populate_packages_append () {
- packages = bb.data.getVar('PACKAGES', d, 1).split()
- pkgdest = bb.data.getVar('PKGDEST', d, 1)
+ packages = d.getVar('PACKAGES', 1).split()
+ pkgdest = d.getVar('PKGDEST', 1)
for pkg in packages:
- icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, bb.data.getVar('datadir', d, 1))
+ icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', 1))
if not os.path.exists(icon_dir):
continue
bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
- rdepends = bb.data.getVar('RDEPENDS_%s' % pkg, d, 1)
- rdepends = rdepends + ' ' + bb.data.getVar('MLPREFIX', d) + "hicolor-icon-theme"
- bb.data.setVar('RDEPENDS_%s' % pkg, rdepends, d)
+ rdepends = d.getVar('RDEPENDS_%s' % pkg, 1)
+ rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
+ d.setVar('RDEPENDS_%s' % pkg, rdepends)
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
- postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
+ postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
if not postinst:
postinst = '#!/bin/sh\n'
- postinst += bb.data.getVar('gtk_icon_cache_postinst', d, 1)
- bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
+ postinst += d.getVar('gtk_icon_cache_postinst', 1)
+ d.setVar('pkg_postinst_%s' % pkg, postinst)
- postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1)
+ postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1)
if not postrm:
postrm = '#!/bin/sh\n'
- postrm += bb.data.getVar('gtk_icon_cache_postrm', d, 1)
- bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
+ postrm += d.getVar('gtk_icon_cache_postrm', 1)
+ d.setVar('pkg_postrm_%s' % pkg, postrm)
}
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index f8e9d8859b0..7e3676af356 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -32,7 +32,7 @@ def icecc_dep_prepend(d):
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
- if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
+ if not d.getVar('INHIBIT_DEFAULT_DEPS'):
return "icecc-create-env-native"
return ""
@@ -54,7 +54,7 @@ def create_path(compilers, bb, d):
staging += "-kernel"
#check if the icecc path is set by the user
- icecc = bb.data.getVar('ICECC_PATH', d) or os.popen("which icecc").read()[:-1]
+ icecc = d.getVar('ICECC_PATH') or os.popen("which icecc").read()[:-1]
# Create the dir if necessary
try:
@@ -81,7 +81,7 @@ def use_icc(bb,d):
package_tmp = bb.data.expand('${PN}', d)
system_class_blacklist = [ "none" ]
- user_class_blacklist = (bb.data.getVar('ICECC_USER_CLASS_BL', d) or "none").split()
+ user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL') or "none").split()
package_class_blacklist = system_class_blacklist + user_class_blacklist
for black in package_class_blacklist:
@@ -92,7 +92,7 @@ def use_icc(bb,d):
#"system" package blacklist contains a list of packages that can not distribute compile tasks
#for one reason or the other
system_package_blacklist = [ "uclibc", "glibc", "gcc", "bind", "u-boot", "dhcp-forwarder", "enchant", "connman", "orbit2" ]
- user_package_blacklist = (bb.data.getVar('ICECC_USER_PACKAGE_BL', d) or "").split()
+ user_package_blacklist = (d.getVar('ICECC_USER_PACKAGE_BL') or "").split()
package_blacklist = system_package_blacklist + user_package_blacklist
for black in package_blacklist:
@@ -100,7 +100,7 @@ def use_icc(bb,d):
#bb.note(package_tmp, ' found in blacklist, disable icecc')
return "no"
- if bb.data.getVar('PARALLEL_MAKE', d) == "":
+ if d.getVar('PARALLEL_MAKE') == "":
bb.note(package_tmp, " ", bb.data.expand('${PV}', d), " has empty PARALLEL_MAKE, disable icecc")
return "no"
@@ -119,8 +119,8 @@ def icc_version(bb, d):
if use_icc(bb, d) == "no":
return ""
- parallel = bb.data.getVar('ICECC_PARALLEL_MAKE', d) or ""
- bb.data.setVar("PARALLEL_MAKE", parallel, d)
+ parallel = d.getVar('ICECC_PARALLEL_MAKE') or ""
+ d.setVar("PARALLEL_MAKE", parallel)
if icc_is_native(bb, d):
archive_name = "local-host-env"
@@ -130,7 +130,7 @@ def icc_version(bb, d):
prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d)
target_sys = bb.data.expand('${TARGET_SYS}', d)
- float = bb.data.getVar('TARGET_FPU', d) or "hard"
+ float = d.getVar('TARGET_FPU') or "hard"
archive_name = prefix + distro + "-" + target_sys + "-" + float
if icc_is_kernel(bb, d):
archive_name += "-kernel"
diff --git a/meta/classes/image-swab.bbclass b/meta/classes/image-swab.bbclass
index b939ec4b45f..23183b3ec3b 100644
--- a/meta/classes/image-swab.bbclass
+++ b/meta/classes/image-swab.bbclass
@@ -51,13 +51,13 @@ python() {
# and cross packages which aren't swabber-native or one of its dependencies
# I have ignored them for now...
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d):
- deps = (bb.data.getVarFlag('do_setscene', 'depends', d) or "").split()
+ deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
deps.append('strace-native:do_populate_sysroot')
bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d)
logdir = bb.data.expand("${TRACE_LOGDIR}", d)
bb.utils.mkdirhier(logdir)
else:
- bb.data.setVar('STRACEFUNC', '', d)
+ d.setVar('STRACEFUNC', '')
}
STRACEPID = "${@os.getpid()}"
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index 14726d25374..4642fa63e21 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -74,17 +74,17 @@ IMAGE_TYPE = ${@base_contains("IMAGE_FSTYPES", "live", "live", "empty", d)}
inherit image-${IMAGE_TYPE}
python () {
- deps = bb.data.getVarFlag('do_rootfs', 'depends', d) or ""
- for type in (bb.data.getVar('IMAGE_FSTYPES', d, True) or "").split():
- for dep in ((bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "").split() or []):
+ deps = d.getVarFlag('do_rootfs', 'depends') or ""
+ for type in (d.getVar('IMAGE_FSTYPES', True) or "").split():
+ for dep in ((d.getVar('IMAGE_DEPENDS_%s' % type) or "").split() or []):
deps += " %s:do_populate_sysroot" % dep
- for dep in (bb.data.getVar('EXTRA_IMAGEDEPENDS', d, True) or "").split():
+ for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split():
deps += " %s:do_populate_sysroot" % dep
- bb.data.setVarFlag('do_rootfs', 'depends', deps, d)
+ d.setVarFlag('do_rootfs', 'depends', deps)
# If we don't do this we try and run the mapping hooks while parsing which is slow
# bitbake should really provide something to let us know this...
- if bb.data.getVar('BB_WORKERCONTEXT', d, True) is not None:
+ if d.getVar('BB_WORKERCONTEXT', True) is not None:
runtime_mapping_rename("PACKAGE_INSTALL", d)
runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", d)
}
@@ -98,15 +98,15 @@ python () {
# is searched for in the BBPATH (same as the old version.)
#
def get_devtable_list(d):
- devtable = bb.data.getVar('IMAGE_DEVICE_TABLE', d, 1)
+ devtable = d.getVar('IMAGE_DEVICE_TABLE', 1)
if devtable != None:
return devtable
str = ""
- devtables = bb.data.getVar('IMAGE_DEVICE_TABLES', d, 1)
+ devtables = d.getVar('IMAGE_DEVICE_TABLES', 1)
if devtables == None:
devtables = 'files/device_table-minimal.txt'
for devtable in devtables.split():
- str += " %s" % bb.which(bb.data.getVar('BBPATH', d, 1), devtable)
+ str += " %s" % bb.which(d.getVar('BBPATH', 1), devtable)
return str
IMAGE_CLASSES ?= "image_types"
@@ -119,7 +119,7 @@ ROOTFS_POSTPROCESS_COMMAND ?= ""
# some default locales
IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
-LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, bb.data.getVar('IMAGE_LINGUAS', d, 1).split()))}"
+LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', 1).split()))}"
do_rootfs[nostamp] = "1"
do_rootfs[dirs] = "${TOPDIR}"
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass
index 9549a9e3e05..ea0d9a56d70 100644
--- a/meta/classes/image_types.bbclass
+++ b/meta/classes/image_types.bbclass
@@ -1,8 +1,8 @@
def get_imagecmds(d):
cmds = "\n"
- old_overrides = bb.data.getVar('OVERRIDES', d, 0)
+ old_overrides = d.getVar('OVERRIDES', 0)
- types = bb.data.getVar('IMAGE_FSTYPES', d, True).split()
+ types = d.getVar('IMAGE_FSTYPES', True).split()
# Live images will be processed via inheriting bbclass and
# does not get processed here.
# live images also depend on ext3 so ensure its present
diff --git a/meta/classes/imagetest-qemu.bbclass b/meta/classes/imagetest-qemu.bbclass
index 4ea86c04fe5..de142bc0fbd 100644
--- a/meta/classes/imagetest-qemu.bbclass
+++ b/meta/classes/imagetest-qemu.bbclass
@@ -35,12 +35,12 @@ def qemuimagetest_main(d):
casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)')
resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)')
- machine = bb.data.getVar('MACHINE', d, 1)
- pname = bb.data.getVar('PN', d, 1)
+ machine = d.getVar('MACHINE', 1)
+ pname = d.getVar('PN', 1)
"""function to save test cases running status"""
def teststatus(test, status, index, length):
- test_status = bb.data.getVar('TEST_STATUS', d, 1)
+ test_status = d.getVar('TEST_STATUS', 1)
if not os.path.exists(test_status):
raise bb.build.FuncFailed("No test status file existing under TEST_TMP")
@@ -51,30 +51,30 @@ def qemuimagetest_main(d):
"""funtion to run each case under scenario"""
def runtest(scen, case, fulltestpath):
- resultpath = bb.data.getVar('TEST_RESULT', d, 1)
- tmppath = bb.data.getVar('TEST_TMP', d, 1)
+ resultpath = d.getVar('TEST_RESULT', 1)
+ tmppath = d.getVar('TEST_TMP', 1)
"""initialize log file for testcase"""
- logpath = bb.data.getVar('TEST_LOG', d, 1)
+ logpath = d.getVar('TEST_LOG', 1)
bb.utils.mkdirhier("%s/%s" % (logpath, scen))
- caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, bb.data.getVar('DATETIME', d, 1)))
+ caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', 1)))
os.system("touch %s" % caselog)
"""export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH"""
- os.environ["PATH"] = bb.data.getVar("PATH", d, True)
+ os.environ["PATH"] = d.getVar("PATH", True)
os.environ["TEST_TMP"] = tmppath
os.environ["TEST_RESULT"] = resultpath
- os.environ["DEPLOY_DIR"] = bb.data.getVar("DEPLOY_DIR", d, True)
+ os.environ["DEPLOY_DIR"] = d.getVar("DEPLOY_DIR", True)
os.environ["QEMUARCH"] = machine
os.environ["QEMUTARGET"] = pname
- os.environ["DISPLAY"] = bb.data.getVar("DISPLAY", d, True)
- os.environ["COREBASE"] = bb.data.getVar("COREBASE", d, True)
- os.environ["TOPDIR"] = bb.data.getVar("TOPDIR", d, True)
- os.environ["OE_TMPDIR"] = bb.data.getVar("TMPDIR", d, True)
- os.environ["TEST_STATUS"] = bb.data.getVar("TEST_STATUS", d, True)
- os.environ["TARGET_IPSAVE"] = bb.data.getVar("TARGET_IPSAVE", d, True)
- os.environ["TEST_SERIALIZE"] = bb.data.getVar("TEST_SERIALIZE", d, True)
- os.environ["SDK_NAME"] = bb.data.getVar("SDK_NAME", d, True)
+ os.environ["DISPLAY"] = d.getVar("DISPLAY", True)
+ os.environ["COREBASE"] = d.getVar("COREBASE", True)
+ os.environ["TOPDIR"] = d.getVar("TOPDIR", True)
+ os.environ["OE_TMPDIR"] = d.getVar("TMPDIR", True)
+ os.environ["TEST_STATUS"] = d.getVar("TEST_STATUS", True)
+ os.environ["TARGET_IPSAVE"] = d.getVar("TARGET_IPSAVE", True)
+ os.environ["TEST_SERIALIZE"] = d.getVar("TEST_SERIALIZE", True)
+ os.environ["SDK_NAME"] = d.getVar("SDK_NAME", True)
"""run Test Case"""
bb.note("Run %s test in scenario %s" % (case, scen))
@@ -92,13 +92,13 @@ def qemuimagetest_main(d):
if n:
item = n.group('scen')
casefile = n.group('case')
- for dir in bb.data.getVar("QEMUIMAGETESTS", d, True).split():
+ for dir in d.getVar("QEMUIMAGETESTS", True).split():
fulltestcase = os.path.join(dir, item, casefile)
if not os.path.isfile(fulltestcase):
raise bb.build.FuncFailed("Testcase %s not found" % fulltestcase)
list.append((item, casefile, fulltestcase))
else:
- for dir in bb.data.getVar("QEMUIMAGETESTS", d, True).split():
+ for dir in d.getVar("QEMUIMAGETESTS", True).split():
scenlist = os.path.join(dir, "scenario", machine, pname)
if not os.path.isfile(scenlist):
raise bb.build.FuncFailed("No scenario list file named %s found" % scenlist)
@@ -118,7 +118,7 @@ def qemuimagetest_main(d):
"""Clean tmp folder for testing"""
def clean_tmp():
- tmppath = bb.data.getVar('TEST_TMP', d, 1)
+ tmppath = d.getVar('TEST_TMP', 1)
if os.path.isdir(tmppath):
for f in os.listdir(tmppath):
@@ -132,28 +132,28 @@ def qemuimagetest_main(d):
clean_tmp()
"""check testcase folder and create test log folder"""
- testpath = bb.data.getVar('TEST_DIR', d, 1)
+ testpath = d.getVar('TEST_DIR', 1)
bb.utils.mkdirhier(testpath)
- logpath = bb.data.getVar('TEST_LOG', d, 1)
+ logpath = d.getVar('TEST_LOG', 1)
bb.utils.mkdirhier(logpath)
- tmppath = bb.data.getVar('TEST_TMP', d, 1)
+ tmppath = d.getVar('TEST_TMP', 1)
bb.utils.mkdirhier(tmppath)
"""initialize test status file"""
- test_status = bb.data.getVar('TEST_STATUS', d, 1)
+ test_status = d.getVar('TEST_STATUS', 1)
if os.path.exists(test_status):
os.remove(test_status)
os.system("touch %s" % test_status)
"""initialize result file"""
- resultpath = bb.data.getVar('TEST_RESULT', d, 1)
+ resultpath = d.getVar('TEST_RESULT', 1)
bb.utils.mkdirhier(resultpath)
- resultfile = os.path.join(resultpath, "testresult.%s" % bb.data.getVar('DATETIME', d, 1))
+ resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', 1))
sresultfile = os.path.join(resultpath, "testresult.log")
- machine = bb.data.getVar('MACHINE', d, 1)
+ machine = d.getVar('MACHINE', 1)
if os.path.exists(sresultfile):
os.remove(sresultfile)
@@ -165,7 +165,7 @@ def qemuimagetest_main(d):
f.close()
"""generate pre-defined testcase list"""
- testlist = bb.data.getVar('TEST_SCEN', d, 1)
+ testlist = d.getVar('TEST_SCEN', 1)
fulllist = generate_list(testlist)
"""Begin testing"""
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 017f7bedc68..a65f3ee865c 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -105,7 +105,7 @@ ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch la2 pkgconfig la perms"
def package_qa_clean_path(path,d):
""" Remove the common prefix from the path. In this case it is the TMPDIR"""
- return path.replace(bb.data.getVar('TMPDIR',d,True),"")
+ return path.replace(d.getVar('TMPDIR',True),"")
def package_qa_write_error(error, d):
logfile = d.getVar('QA_LOGFILE', True)
@@ -132,13 +132,13 @@ def package_qa_check_rpath(file,name, d, elf, messages):
if not elf:
return
- scanelf = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'scanelf')
- bad_dirs = [bb.data.getVar('TMPDIR', d, True) + "/work", bb.data.getVar('STAGING_DIR_TARGET', d, True)]
- bad_dir_test = bb.data.getVar('TMPDIR', d, True)
+ scanelf = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'scanelf')
+ bad_dirs = [d.getVar('TMPDIR', True) + "/work", d.getVar('STAGING_DIR_TARGET', True)]
+ bad_dir_test = d.getVar('TMPDIR', True)
if not os.path.exists(scanelf):
bb.fatal("Can not check RPATH, scanelf (part of pax-utils-native) not found")
- if not bad_dirs[0] in bb.data.getVar('WORKDIR', d, True):
+ if not bad_dirs[0] in d.getVar('WORKDIR', True):
bb.fatal("This class assumed that WORKDIR is ${TMPDIR}/work... Not doing any check")
output = os.popen("%s -B -F%%r#F '%s'" % (scanelf,file))
@@ -156,11 +156,11 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
if not elf:
return
- objdump = bb.data.getVar('OBJDUMP', d, True)
- env_path = bb.data.getVar('PATH', d, True)
+ objdump = d.getVar('OBJDUMP', True)
+ env_path = d.getVar('PATH', True)
- libdir = bb.data.getVar("libdir", d, True)
- base_libdir = bb.data.getVar("base_libdir", d, True)
+ libdir = d.getVar("libdir", True)
+ base_libdir = d.getVar("base_libdir", True)
import re
rpath_re = re.compile("\s+RPATH\s+(.*)")
@@ -209,8 +209,8 @@ def package_qa_check_arch(path,name,d, elf, messages):
if not elf:
return
- target_os = bb.data.getVar('TARGET_OS', d, True)
- target_arch = bb.data.getVar('TARGET_ARCH', d, True)
+ target_os = d.getVar('TARGET_OS', True)
+ target_arch = d.getVar('TARGET_ARCH', True)
# FIXME: Cross package confuse this check, so just skip them
for s in ['cross', 'nativesdk', 'cross-canadian']:
@@ -243,7 +243,7 @@ def package_qa_check_desktop(path, name, d, elf, messages):
Run all desktop files through desktop-file-validate.
"""
if path.endswith(".desktop"):
- desktop_file_validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'desktop-file-validate')
+ desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate')
output = os.popen("%s %s" % (desktop_file_validate, path))
# This only produces output on errors
for l in output:
@@ -261,14 +261,14 @@ def package_qa_hash_style(path, name, d, elf, messages):
if os.path.islink(path):
return
- gnu_hash = "--hash-style=gnu" in bb.data.getVar('LDFLAGS', d, True)
+ gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True)
if not gnu_hash:
- gnu_hash = "--hash-style=both" in bb.data.getVar('LDFLAGS', d, True)
+ gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True)
if not gnu_hash:
return
- objdump = bb.data.getVar('OBJDUMP', d, True)
- env_path = bb.data.getVar('PATH', d, True)
+ objdump = d.getVar('OBJDUMP', True)
+ env_path = d.getVar('PATH', True)
sane = False
has_syms = False
@@ -299,7 +299,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
if os.path.islink(path):
return
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ tmpdir = d.getVar('TMPDIR', True)
file_content = open(path).read()
if tmpdir in file_content:
messages.append("File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d))
@@ -311,9 +311,9 @@ def package_qa_check_license(workdir, d):
import tempfile
sane = True
- lic_files = bb.data.getVar('LIC_FILES_CHKSUM', d, True)
- lic = bb.data.getVar('LICENSE', d, True)
- pn = bb.data.getVar('PN', d, True)
+ lic_files = d.getVar('LIC_FILES_CHKSUM', True)
+ lic = d.getVar('LICENSE', True)
+ pn = d.getVar('PN', True)
if lic == "CLOSED":
return True
@@ -324,7 +324,7 @@ def package_qa_check_license(workdir, d):
bb.error(pn + ": Recipe file does not have license file information (LIC_FILES_CHKSUM)")
return False
- srcdir = bb.data.getVar('S', d, True)
+ srcdir = d.getVar('S', True)
for url in lic_files.split():
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
@@ -384,7 +384,7 @@ def package_qa_check_staged(path,d):
"""
sane = True
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ tmpdir = d.getVar('TMPDIR', True)
workdir = os.path.join(tmpdir, "work")
installed = "installed=yes"
@@ -417,8 +417,8 @@ def package_qa_walk(path, warnfuncs, errorfuncs, skip, package, d):
import oe.qa
#if this will throw an exception, then fix the dict above
- target_os = bb.data.getVar('TARGET_OS', d, True)
- target_arch = bb.data.getVar('TARGET_ARCH', d, True)
+ target_os = d.getVar('TARGET_OS', True)
+ target_arch = d.getVar('TARGET_ARCH', True)
warnings = []
errors = []
@@ -457,19 +457,19 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, d):
localdata = bb.data.createCopy(d)
root = "%s/%s" % (pkgdest, pkg)
- bb.data.setVar('ROOT', '', localdata)
- bb.data.setVar('ROOT_%s' % pkg, root, localdata)
- pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, True)
+ localdata.setVar('ROOT', '')
+ localdata.setVar('ROOT_%s' % pkg, root)
+ pkgname = localdata.getVar('PKG_%s' % pkg, True)
if not pkgname:
pkgname = pkg
- bb.data.setVar('PKG', pkgname, localdata)
+ localdata.setVar('PKG', pkgname)
- bb.data.setVar('OVERRIDES', pkg, localdata)
+ localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata)
# Now check the RDEPENDS
- rdepends = bb.utils.explode_deps(bb.data.getVar('RDEPENDS', localdata, True) or "")
+ rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "")
# Now do the sanity check!!!
@@ -487,8 +487,8 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, d):
python do_package_qa () {
bb.note("DO PACKAGE QA")
- logdir = bb.data.getVar('T', d, True)
- pkg = bb.data.getVar('PN', d, True)
+ logdir = d.getVar('T', True)
+ pkg = d.getVar('PN', True)
# Check the compile log for host contamination
compilelog = os.path.join(logdir,"log.do_compile")
@@ -508,8 +508,8 @@ python do_package_qa () {
(pkg, installlog))
# Scan the packages...
- pkgdest = bb.data.getVar('PKGDEST', d, True)
- packages = bb.data.getVar('PACKAGES',d, True)
+ pkgdest = d.getVar('PKGDEST', True)
+ packages = d.getVar('PACKAGES', True)
# no packages should be scanned
if not packages:
@@ -521,7 +521,7 @@ python do_package_qa () {
walk_sane = True
rdepends_sane = True
for package in packages.split():
- skip = (bb.data.getVar('INSANE_SKIP_' + package, d, True) or "").split()
+ skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split()
if skip:
bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
warnchecks = []
@@ -560,7 +560,7 @@ python do_qa_staging() {
python do_qa_configure() {
configs = []
- workdir = bb.data.getVar('WORKDIR', d, True)
+ workdir = d.getVar('WORKDIR', True)
bb.note("Checking autotools environment for common misconfiguration")
for root, dirs, files in os.walk(workdir):
statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % \
@@ -575,8 +575,8 @@ Rerun configure task after fixing this. The path was '%s'""" % root)
if "configure.in" in files:
configs.append(os.path.join(root, "configure.in"))
- cnf = bb.data.getVar('EXTRA_OECONF', d, True) or ""
- if "gettext" not in bb.data.getVar('P', d, True) and "gcc-runtime" not in bb.data.getVar('P', d, True) and "--disable-nls" not in cnf:
+ cnf = d.getVar('EXTRA_OECONF', True) or ""
+ if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf:
ml = d.getVar("MLPREFIX", True) or ""
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
gt = "gettext-native"
@@ -584,7 +584,7 @@ Rerun configure task after fixing this. The path was '%s'""" % root)
gt = "gettext-nativesdk"
else:
gt = "virtual/" + ml + "gettext"
- deps = bb.utils.explode_deps(bb.data.getVar('DEPENDS', d, True) or "")
+ deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "")
if gt not in deps:
for config in configs:
gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass
index 225d5994faa..86933951116 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes/kernel-arch.bbclass
@@ -18,7 +18,7 @@ valid_archs = "alpha cris ia64 \
def map_kernel_arch(a, d):
import re
- valid_archs = bb.data.getVar('valid_archs', d, 1).split()
+ valid_archs = d.getVar('valid_archs', 1).split()
if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
elif re.match('arm26$', a): return 'arm26'
@@ -32,7 +32,7 @@ def map_kernel_arch(a, d):
else:
bb.error("cannot map '%s' to a linux kernel architecture" % a)
-export ARCH = "${@map_kernel_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}"
+export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', 1), d)}"
def map_uboot_arch(a, d):
import re
@@ -41,5 +41,5 @@ def map_uboot_arch(a, d):
elif re.match('i.86$', a): return 'x86'
return a
-export UBOOT_ARCH = "${@map_uboot_arch(bb.data.getVar('ARCH', d, 1), d)}"
+export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', 1), d)}"
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass
index 2205686ddd9..d555fc04b9d 100644
--- a/meta/classes/kernel-yocto.bbclass
+++ b/meta/classes/kernel-yocto.bbclass
@@ -104,7 +104,7 @@ python do_kernel_configcheck() {
bb.plain("NOTE: validating kernel configuration")
- pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True)
+ pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
cmd = bb.data.expand("cd ${B}/..; kconf_check -config- ${B} ${S} ${B} ${KBRANCH}",d )
ret, result = commands.getstatusoutput("%s%s" % (pathprefix, cmd))
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index dc711f287af..12e0b83b947 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -11,15 +11,15 @@ INITRAMFS_IMAGE ?= ""
INITRAMFS_TASK ?= ""
python __anonymous () {
- kerneltype = bb.data.getVar('KERNEL_IMAGETYPE', d, 1) or ''
+ kerneltype = d.getVar('KERNEL_IMAGETYPE', 1) or ''
if kerneltype == 'uImage':
- depends = bb.data.getVar("DEPENDS", d, 1)
+ depends = d.getVar("DEPENDS", 1)
depends = "%s u-boot-mkimage-native" % depends
- bb.data.setVar("DEPENDS", depends, d)
+ d.setVar("DEPENDS", depends)
- image = bb.data.getVar('INITRAMFS_IMAGE', d, True)
+ image = d.getVar('INITRAMFS_IMAGE', True)
if image:
- bb.data.setVar('INITRAMFS_TASK', '${INITRAMFS_IMAGE}:do_rootfs', d)
+ d.setVar('INITRAMFS_TASK', '${INITRAMFS_IMAGE}:do_rootfs')
}
inherit kernel-arch deploy
@@ -31,7 +31,7 @@ PACKAGES_DYNAMIC += "kernel-firmware-*"
export OS = "${TARGET_OS}"
export CROSS_COMPILE = "${TARGET_PREFIX}"
-KERNEL_PRIORITY = "${@bb.data.getVar('PV',d,1).split('-')[0].split('.')[-1]}"
+KERNEL_PRIORITY = "${@d.getVar('PV',1).split('-')[0].split('.')[-1]}"
KERNEL_RELEASE ?= "${KERNEL_VERSION}"
@@ -56,7 +56,7 @@ KERNEL_IMAGEDEST = "boot"
#
# configuration
#
-export CMDLINE_CONSOLE = "console=${@bb.data.getVar("KERNEL_CONSOLE",d,1) or "ttyS0"}"
+export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE",1) or "ttyS0"}"
KERNEL_VERSION = "${@get_kernelversion('${B}')}"
@@ -75,7 +75,7 @@ EXTRA_OEMAKE = ""
KERNEL_ALT_IMAGETYPE ??= ""
-KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(bb.data.getVar('KERNEL_IMAGETYPE', d, 1))}"
+KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', 1))}"
kernel_do_compile() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
@@ -306,10 +306,10 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm"
python populate_packages_prepend () {
def extract_modinfo(file):
import tempfile, re
- tempfile.tempdir = bb.data.getVar("WORKDIR", d, 1)
+ tempfile.tempdir = d.getVar("WORKDIR", 1)
tf = tempfile.mkstemp()
tmpfile = tf[1]
- cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile)
+ cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", 1), d.getVar("HOST_PREFIX", 1) or "", file, tmpfile)
os.system(cmd)
f = open(tmpfile)
l = f.read().split("\000")
@@ -328,18 +328,18 @@ python populate_packages_prepend () {
def parse_depmod():
import re
- dvar = bb.data.getVar('PKGD', d, 1)
+ dvar = d.getVar('PKGD', 1)
if not dvar:
bb.error("PKGD not defined")
return
- kernelver = bb.data.getVar('KERNEL_VERSION', d, 1)
+ kernelver = d.getVar('KERNEL_VERSION', 1)
kernelver_stripped = kernelver
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
if m:
kernelver_stripped = m.group(1)
- path = bb.data.getVar("PATH", d, 1)
- host_prefix = bb.data.getVar("HOST_PREFIX", d, 1) or ""
+ path = d.getVar("PATH", 1)
+ host_prefix = d.getVar("HOST_PREFIX", 1) or ""
cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped)
f = os.popen(cmd, 'r')
@@ -377,9 +377,9 @@ python populate_packages_prepend () {
def get_dependencies(file, pattern, format):
# file no longer includes PKGD
- file = file.replace(bb.data.getVar('PKGD', d, 1) or '', '', 1)
+ file = file.replace(d.getVar('PKGD', 1) or '', '', 1)
# instead is prefixed with /lib/modules/${KERNEL_VERSION}
- file = file.replace("/lib/modules/%s/" % bb.data.getVar('KERNEL_VERSION', d, 1) or '', '', 1)
+ file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', 1) or '', '', 1)
if module_deps.has_key(file):
import re
@@ -398,40 +398,40 @@ python populate_packages_prepend () {
import re
vals = extract_modinfo(file)
- dvar = bb.data.getVar('PKGD', d, 1)
+ dvar = d.getVar('PKGD', 1)
# If autoloading is requested, output /etc/modutils/<name> and append
# appropriate modprobe commands to the postinst
- autoload = bb.data.getVar('module_autoload_%s' % basename, d, 1)
+ autoload = d.getVar('module_autoload_%s' % basename, 1)
if autoload:
name = '%s/etc/modutils/%s' % (dvar, basename)
f = open(name, 'w')
for m in autoload.split():
f.write('%s\n' % m)
f.close()
- postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1)
+ postinst = d.getVar('pkg_postinst_%s' % pkg, 1)
if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg)
- postinst += bb.data.getVar('autoload_postinst_fragment', d, 1) % autoload
- bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
+ postinst += d.getVar('autoload_postinst_fragment', 1) % autoload
+ d.setVar('pkg_postinst_%s' % pkg, postinst)
# Write out any modconf fragment
- modconf = bb.data.getVar('module_conf_%s' % basename, d, 1)
+ modconf = d.getVar('module_conf_%s' % basename, 1)
if modconf:
name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
f = open(name, 'w')
f.write("%s\n" % modconf)
f.close()
- files = bb.data.getVar('FILES_%s' % pkg, d, 1)
+ files = d.getVar('FILES_%s' % pkg, 1)
files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename)
- bb.data.setVar('FILES_%s' % pkg, files, d)
+ d.setVar('FILES_%s' % pkg, files)
if vals.has_key("description"):
- old_desc = bb.data.getVar('DESCRIPTION_' + pkg, d, 1) or ""
- bb.data.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"], d)
+ old_desc = d.getVar('DESCRIPTION_' + pkg, 1) or ""
+ d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
- rdepends_str = bb.data.getVar('RDEPENDS_' + pkg, d, 1)
+ rdepends_str = d.getVar('RDEPENDS_' + pkg, 1)
if rdepends_str:
rdepends = rdepends_str.split()
else:
@@ -443,29 +443,29 @@ python populate_packages_prepend () {
module_regex = '^(.*)\.k?o$'
module_pattern = 'kernel-module-%s'
- postinst = bb.data.getVar('pkg_postinst_modules', d, 1)
- postrm = bb.data.getVar('pkg_postrm_modules', d, 1)
+ postinst = d.getVar('pkg_postinst_modules', 1)
+ postrm = d.getVar('pkg_postrm_modules', 1)
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
- do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % bb.data.getVar("KERNEL_VERSION", d, 1))
+ do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", 1))
import re
metapkg = "kernel-modules"
- bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d)
- bb.data.setVar('FILES_' + metapkg, "", d)
+ d.setVar('ALLOW_EMPTY_' + metapkg, "1")
+ d.setVar('FILES_' + metapkg, "")
blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux', 'perf', 'perf-dbg', 'kernel-misc' ]
for l in module_deps.values():
for i in l:
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
blacklist.append(pkg)
metapkg_rdepends = []
- packages = bb.data.getVar('PACKAGES', d, 1).split()
+ packages = d.getVar('PACKAGES', 1).split()
for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg)
bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d)
- bb.data.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package', d)
+ d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
packages.append(metapkg)
bb.data.setVar('PACKAGES', ' '.join(packages), d)
}
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass
index 0ee9a55b095..ec33762a207 100644
--- a/meta/classes/libc-common.bbclass
+++ b/meta/classes/libc-common.bbclass
@@ -18,13 +18,13 @@ do_install() {
}
def get_libc_fpu_setting(bb, d):
- if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]:
+ if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--without-fp"
return ""
python populate_packages_prepend () {
- if bb.data.getVar('DEBIAN_NAMES', d, 1):
- bpn = bb.data.getVar('BPN', d, 1)
- bb.data.setVar('PKG_'+bpn, 'libc6', d)
- bb.data.setVar('PKG_'+bpn+'-dev', 'libc6-dev', d)
+ if d.getVar('DEBIAN_NAMES', 1):
+ bpn = d.getVar('BPN', 1)
+ d.setVar('PKG_'+bpn, 'libc6')
+ d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
}
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass
index 6ef2f972542..fc1a5794d7b 100644
--- a/meta/classes/libc-package.bbclass
+++ b/meta/classes/libc-package.bbclass
@@ -10,7 +10,7 @@
GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
python __anonymous () {
- enabled = bb.data.getVar("ENABLE_BINARY_LOCALE_GENERATION", d, 1)
+ enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", 1)
pn = d.getVar("PN", True)
if pn.endswith("-initial"):
@@ -19,21 +19,21 @@ python __anonymous () {
if enabled and int(enabled):
import re
- target_arch = bb.data.getVar("TARGET_ARCH", d, 1)
- binary_arches = bb.data.getVar("BINARY_LOCALE_ARCHES", d, 1) or ""
- use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or ""
+ target_arch = d.getVar("TARGET_ARCH", 1)
+ binary_arches = d.getVar("BINARY_LOCALE_ARCHES", 1) or ""
+ use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or ""
for regexp in binary_arches.split(" "):
r = re.compile(regexp)
if r.match(target_arch):
- depends = bb.data.getVar("DEPENDS", d, 1)
+ depends = d.getVar("DEPENDS", 1)
if use_cross_localedef == "1" :
depends = "%s cross-localedef-native" % depends
else:
depends = "%s qemu-native" % depends
- bb.data.setVar("DEPENDS", depends, d)
- bb.data.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile", d)
+ d.setVar("DEPENDS", depends)
+ d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile")
break
}
@@ -109,19 +109,19 @@ inherit qemu
python package_do_split_gconvs () {
import os, re
- if (bb.data.getVar('PACKAGE_NO_GCONV', d, 1) == '1'):
+ if (d.getVar('PACKAGE_NO_GCONV', 1) == '1'):
bb.note("package requested not splitting gconvs")
return
- if not bb.data.getVar('PACKAGES', d, 1):
+ if not d.getVar('PACKAGES', 1):
return
- bpn = bb.data.getVar('BPN', d, 1)
- libdir = bb.data.getVar('libdir', d, 1)
+ bpn = d.getVar('BPN', 1)
+ libdir = d.getVar('libdir', 1)
if not libdir:
bb.error("libdir not defined")
return
- datadir = bb.data.getVar('datadir', d, 1)
+ datadir = d.getVar('datadir', 1)
if not datadir:
bb.error("datadir not defined")
return
@@ -191,17 +191,17 @@ python package_do_split_gconvs () {
do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
- bb.data.setVar('PACKAGES', bb.data.getVar('PACKAGES', d) + ' ' + bb.data.getVar('MLPREFIX', d) + bpn + '-gconv', d)
+ bb.data.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv', d)
- use_bin = bb.data.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", d, 1)
+ use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", 1)
dot_re = re.compile("(.*)\.(.*)")
#GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales
if use_bin != "precompiled":
- supported = bb.data.getVar('GLIBC_GENERATE_LOCALES', d, 1)
+ supported = d.getVar('GLIBC_GENERATE_LOCALES', 1)
if not supported or supported == "all":
- f = open(base_path_join(bb.data.getVar('WORKDIR', d, 1), "SUPPORTED"), "r")
+ f = open(base_path_join(d.getVar('WORKDIR', 1), "SUPPORTED"), "r")
supported = f.readlines()
f.close()
else:
@@ -209,7 +209,7 @@ python package_do_split_gconvs () {
supported = map(lambda s:s.replace(".", " ") + "\n", supported)
else:
supported = []
- full_bin_path = bb.data.getVar('PKGD', d, True) + binary_locales_dir
+ full_bin_path = d.getVar('PKGD', True) + binary_locales_dir
for dir in os.listdir(full_bin_path):
dbase = dir.split(".")
d2 = " "
@@ -218,7 +218,7 @@ python package_do_split_gconvs () {
supported.append(dbase[0] + d2)
# Collate the locales by base and encoding
- utf8_only = int(bb.data.getVar('LOCALE_UTF8_ONLY', d, 1) or 0)
+ utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', 1) or 0)
encodings = {}
for l in supported:
l = l[:-1]
@@ -235,9 +235,9 @@ python package_do_split_gconvs () {
def output_locale_source(name, pkgname, locale, encoding):
bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
(bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d)
- bb.data.setVar('pkg_postinst_%s' % pkgname, bb.data.getVar('locale_base_postinst', d, 1) \
+ bb.data.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', 1) \
% (locale, encoding, locale), d)
- bb.data.setVar('pkg_postrm_%s' % pkgname, bb.data.getVar('locale_base_postrm', d, 1) % \
+ bb.data.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', 1) % \
(locale, encoding, locale), d)
def output_locale_binary_rdepends(name, pkgname, locale, encoding):
@@ -248,23 +248,23 @@ python package_do_split_gconvs () {
libc_name = name
bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
% (bpn, libc_name)), d)
- rprovides = (bb.data.getVar('RPROVIDES_%s' % pkgname, d, True) or "").split()
+ rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split()
rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name)))
bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d)
commands = {}
def output_locale_binary(name, pkgname, locale, encoding):
- treedir = base_path_join(bb.data.getVar("WORKDIR", d, 1), "locale-tree")
- ldlibdir = base_path_join(treedir, bb.data.getVar("base_libdir", d, 1))
- path = bb.data.getVar("PATH", d, 1)
+ treedir = base_path_join(d.getVar("WORKDIR", 1), "locale-tree")
+ ldlibdir = base_path_join(treedir, d.getVar("base_libdir", 1))
+ path = d.getVar("PATH", 1)
i18npath = base_path_join(treedir, datadir, "i18n")
gconvpath = base_path_join(treedir, "iconvdata")
outputpath = base_path_join(treedir, libdir, "locale")
- use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or "0"
+ use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "0"
if use_cross_localedef == "1":
- target_arch = bb.data.getVar('TARGET_ARCH', d, True)
+ target_arch = d.getVar('TARGET_ARCH', True)
locale_arch_options = { \
"arm": " --uint32-align=4 --little-endian ", \
"powerpc": " --uint32-align=4 --big-endian ", \
@@ -292,9 +292,9 @@ python package_do_split_gconvs () {
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name)
- qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % bb.data.getVar('PACKAGE_ARCH', d, 1), d, 1)
+ qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', 1), d, 1)
if not qemu_options:
- qemu_options = bb.data.getVar('QEMU_OPTIONS', d, 1)
+ qemu_options = d.getVar('QEMU_OPTIONS', 1)
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
@@ -305,14 +305,14 @@ python package_do_split_gconvs () {
bb.note("generating locale %s (%s)" % (locale, encoding))
def output_locale(name, locale, encoding):
- pkgname = bb.data.getVar('MLPREFIX', d) + 'locale-base-' + legitimize_package_name(name)
- bb.data.setVar('ALLOW_EMPTY_%s' % pkgname, '1', d)
- bb.data.setVar('PACKAGES', '%s %s' % (pkgname, bb.data.getVar('PACKAGES', d, 1)), d)
+ pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
+ d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
+ bb.data.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', 1)), d)
rprovides = ' virtual-locale-%s' % legitimize_package_name(name)
m = re.match("(.*)_(.*)", name)
if m:
rprovides += ' virtual-locale-%s' % m.group(1)
- bb.data.setVar('RPROVIDES_%s' % pkgname, rprovides, d)
+ d.setVar('RPROVIDES_%s' % pkgname, rprovides)
if use_bin == "compile":
output_locale_binary_rdepends(name, pkgname, locale, encoding)
@@ -347,7 +347,7 @@ python package_do_split_gconvs () {
bb.note(" " + " ".join(non_utf8))
if use_bin == "compile":
- makefile = base_path_join(bb.data.getVar("WORKDIR", d, 1), "locale-tree", "Makefile")
+ makefile = base_path_join(d.getVar("WORKDIR", 1), "locale-tree", "Makefile")
m = open(makefile, "w")
m.write("all: %s\n\n" % " ".join(commands.keys()))
for cmd in commands:
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass
index baf35f00cc4..4d036b171ed 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes/license.bbclass
@@ -104,10 +104,10 @@ python do_populate_lic() {
# If the generic does not exist we need to check to see if there is an SPDX mapping to it
if not os.path.isfile(os.path.join(generic_directory, license_type)):
- if bb.data.getVarFlag('SPDXLICENSEMAP', license_type, d) != None:
+ if d.getVarFlag('SPDXLICENSEMAP', license_type) != None:
# Great, there is an SPDXLICENSEMAP. We can copy!
bb.note("We need to use a SPDXLICENSEMAP for %s" % (license_type))
- spdx_generic = bb.data.getVarFlag('SPDXLICENSEMAP', license_type, d)
+ spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type)
copy_license(generic_directory, gen_lic_dest, spdx_generic)
link_license(gen_lic_dest, destdir, spdx_generic)
else:
@@ -120,16 +120,16 @@ python do_populate_lic() {
link_license(gen_lic_dest, destdir, license_type)
# All the license types for the package
- license_types = bb.data.getVar('LICENSE', d, True)
+ license_types = d.getVar('LICENSE', True)
# All the license files for the package
- lic_files = bb.data.getVar('LIC_FILES_CHKSUM', d, True)
- pn = bb.data.getVar('PN', d, True)
+ lic_files = d.getVar('LIC_FILES_CHKSUM', True)
+ pn = d.getVar('PN', True)
# The base directory we wrangle licenses to
- destdir = os.path.join(bb.data.getVar('LICSSTATEDIR', d, True), pn)
+ destdir = os.path.join(d.getVar('LICSSTATEDIR', True), pn)
# The license files are located in S/LIC_FILE_CHECKSUM.
- srcdir = bb.data.getVar('S', d, True)
+ srcdir = d.getVar('S', True)
# Directory we store the generic licenses as set in the distro configuration
- generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True)
+ generic_directory = d.getVar('COMMON_LICENSE_DIR', True)
try:
bb.mkdirhier(destdir)
@@ -154,7 +154,7 @@ python do_populate_lic() {
if ret is False or ret == 0:
bb.warn("%s could not be copied for some reason. It may not exist. WARN for now." % srclicfile)
- gen_lic_dest = os.path.join(bb.data.getVar('LICENSE_DIRECTORY', d, True), "common-licenses")
+ gen_lic_dest = os.path.join(d.getVar('LICENSE_DIRECTORY', True), "common-licenses")
clean_licenses = ""
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass
index 99ac05c18d6..57609ef8cdf 100644
--- a/meta/classes/metadata_scm.bbclass
+++ b/meta/classes/metadata_scm.bbclass
@@ -27,7 +27,7 @@ def base_detect_branch(d):
return "<unknown>"
def base_get_scmbasepath(d):
- return bb.data.getVar( 'COREBASE', d, 1 )
+ return d.getVar( 'COREBASE', 1 )
def base_get_metadata_monotone_branch(path, d):
monotone_branch = "<unknown>"
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass
index ba8b0bf25e5..5f25bdd2c27 100644
--- a/meta/classes/native.bbclass
+++ b/meta/classes/native.bbclass
@@ -98,18 +98,18 @@ python native_virtclass_handler () {
if not isinstance(e, bb.event.RecipePreFinalise):
return
- classextend = bb.data.getVar('BBCLASSEXTEND', e.data, True) or ""
+ classextend = e.data.getVar('BBCLASSEXTEND', True) or ""
if "native" not in classextend:
return
- pn = bb.data.getVar("PN", e.data, True)
+ pn = e.data.getVar("PN", True)
if not pn.endswith("-native"):
return
def map_dependencies(varname, d, suffix = ""):
if suffix:
varname = varname + "_" + suffix
- deps = bb.data.getVar(varname, d, True)
+ deps = d.getVar(varname, True)
if not deps:
return
deps = bb.utils.explode_deps(deps)
@@ -131,15 +131,15 @@ python native_virtclass_handler () {
map_dependencies("RPROVIDES", e.data, pkg)
map_dependencies("RREPLACES", e.data, pkg)
- provides = bb.data.getVar("PROVIDES", e.data, True)
+ provides = e.data.getVar("PROVIDES", True)
for prov in provides.split():
if prov.find(pn) != -1:
continue
if not prov.endswith("-native"):
provides = provides.replace(prov, prov + "-native")
- bb.data.setVar("PROVIDES", provides, e.data)
+ e.data.setVar("PROVIDES", provides)
- bb.data.setVar("OVERRIDES", bb.data.getVar("OVERRIDES", e.data, False) + ":virtclass-native", e.data)
+ bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native", e.data)
}
addhandler native_virtclass_handler
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass
index bb59ac57a6a..ca24efaa7c0 100644
--- a/meta/classes/nativesdk.bbclass
+++ b/meta/classes/nativesdk.bbclass
@@ -11,7 +11,7 @@ STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${S
#
PACKAGE_ARCH = "${SDK_ARCH}-nativesdk"
python () {
- archs = bb.data.getVar('PACKAGE_ARCHS', d, True).split()
+ archs = d.getVar('PACKAGE_ARCHS', True).split()
sdkarchs = []
for arch in archs:
sdkarchs.append(arch + '-nativesdk')
@@ -62,22 +62,22 @@ python nativesdk_virtclass_handler () {
if not isinstance(e, bb.event.RecipePreFinalise):
return
- pn = bb.data.getVar("PN", e.data, True)
+ pn = e.data.getVar("PN", True)
if not pn.endswith("-nativesdk"):
return
- bb.data.setVar("OVERRIDES", bb.data.getVar("OVERRIDES", e.data, False) + ":virtclass-nativesdk", e.data)
+ bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk", e.data)
}
python () {
- pn = bb.data.getVar("PN", d, True)
+ pn = d.getVar("PN", True)
if not pn.endswith("-nativesdk"):
return
def map_dependencies(varname, d, suffix = ""):
if suffix:
varname = varname + "_" + suffix
- deps = bb.data.getVar(varname, d, True)
+ deps = d.getVar(varname, True)
if not deps:
return
deps = bb.utils.explode_deps(deps)
@@ -101,13 +101,13 @@ python () {
# map_dependencies("RPROVIDES", d, pkg)
# map_dependencies("RREPLACES", d, pkg)
- provides = bb.data.getVar("PROVIDES", d, True)
+ provides = d.getVar("PROVIDES", True)
for prov in provides.split():
if prov.find(pn) != -1:
continue
if not prov.endswith("-nativesdk"):
provides = provides.replace(prov, prov + "-nativesdk")
- bb.data.setVar("PROVIDES", provides, d)
+ d.setVar("PROVIDES", provides)
}
addhandler nativesdk_virtclass_handler
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index 0e1d8dbfc46..d01275f33a3 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -88,9 +88,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
if newdeps:
extra_depends = " ".join(newdeps)
- dvar = bb.data.getVar('PKGD', d, True)
+ dvar = d.getVar('PKGD', True)
- packages = bb.data.getVar('PACKAGES', d, True).split()
+ packages = d.getVar('PACKAGES', True).split()
if postinst:
postinst = '#!/bin/sh\n' + postinst + '\n'
@@ -136,7 +136,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
packages = [pkg] + packages
else:
packages.append(pkg)
- oldfiles = bb.data.getVar('FILES_' + pkg, d, True)
+ oldfiles = d.getVar('FILES_' + pkg, True)
if not oldfiles:
the_files = [os.path.join(root, o)]
if aux_files_pattern:
@@ -153,17 +153,17 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
the_files.append(aux_files_pattern_verbatim % m.group(1))
bb.data.setVar('FILES_' + pkg, " ".join(the_files), d)
if extra_depends != '':
- the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, True)
+ the_depends = d.getVar('RDEPENDS_' + pkg, True)
if the_depends:
the_depends = '%s %s' % (the_depends, extra_depends)
else:
the_depends = extra_depends
- bb.data.setVar('RDEPENDS_' + pkg, the_depends, d)
- bb.data.setVar('DESCRIPTION_' + pkg, description % on, d)
+ d.setVar('RDEPENDS_' + pkg, the_depends)
+ d.setVar('DESCRIPTION_' + pkg, description % on)
if postinst:
- bb.data.setVar('pkg_postinst_' + pkg, postinst, d)
+ d.setVar('pkg_postinst_' + pkg, postinst)
if postrm:
- bb.data.setVar('pkg_postrm_' + pkg, postrm, d)
+ d.setVar('pkg_postrm_' + pkg, postrm)
else:
bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d)
if callable(hook):
@@ -174,13 +174,13 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
PACKAGE_DEPENDS += "file-native"
python () {
- if bb.data.getVar('PACKAGES', d, True) != '':
- deps = bb.data.getVarFlag('do_package', 'depends', d) or ""
- for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split():
+ if d.getVar('PACKAGES', True) != '':
+ deps = d.getVarFlag('do_package', 'depends') or ""
+ for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
deps += " %s:do_populate_sysroot" % dep
- bb.data.setVarFlag('do_package', 'depends', deps, d)
+ d.setVarFlag('do_package', 'depends', deps)
- deps = (bb.data.getVarFlag('do_package', 'deptask', d) or "").split()
+ deps = (d.getVarFlag('do_package', 'deptask') or "").split()
# shlibs requires any DEPENDS to have already packaged for the *.list files
deps.append("do_package")
bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d)
@@ -198,9 +198,9 @@ def splitfile(file, debugfile, debugsrcdir, d):
import commands, stat
- dvar = bb.data.getVar('PKGD', d, True)
- pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True)
- objcopy = bb.data.getVar("OBJCOPY", d, True)
+ dvar = d.getVar('PKGD', True)
+ pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
+ objcopy = d.getVar("OBJCOPY", True)
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
workdir = bb.data.expand("${WORKDIR}", d)
workparentdir = os.path.dirname(workdir)
@@ -240,10 +240,10 @@ def splitfile2(debugsrcdir, d):
import commands, stat
- dvar = bb.data.getVar('PKGD', d, True)
- pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True)
- strip = bb.data.getVar("STRIP", d, True)
- objcopy = bb.data.getVar("OBJCOPY", d, True)
+ dvar = d.getVar('PKGD', True)
+ pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
+ strip = d.getVar("STRIP", True)
+ objcopy = d.getVar("OBJCOPY", True)
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
workdir = bb.data.expand("${WORKDIR}", d)
workparentdir = os.path.dirname(workdir)
@@ -279,8 +279,8 @@ def runstrip(file, elftype, d):
import commands, stat
- pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True)
- strip = bb.data.getVar("STRIP", d, True)
+ pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
+ strip = d.getVar("STRIP", True)
# Handle kernel modules specifically - .debug directories here are pointless
if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
@@ -329,10 +329,10 @@ def get_package_mapping (pkg, d):
return pkg
def runtime_mapping_rename (varname, d):
- #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, True)))
+ #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
new_depends = []
- deps = bb.utils.explode_dep_versions(bb.data.getVar(varname, d, True) or "")
+ deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "")
for depend in deps:
# Have to be careful with any version component of the depend
new_depend = get_package_mapping(depend, d)
@@ -343,7 +343,7 @@ def runtime_mapping_rename (varname, d):
bb.data.setVar(varname, " ".join(new_depends) or None, d)
- #bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, True)))
+ #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
#
# Package functions suitable for inclusion in PACKAGEFUNCS
@@ -359,19 +359,19 @@ python package_get_auto_pr() {
}
python package_do_split_locales() {
- if (bb.data.getVar('PACKAGE_NO_LOCALE', d, True) == '1'):
+ if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
bb.debug(1, "package requested not splitting locales")
return
- packages = (bb.data.getVar('PACKAGES', d, True) or "").split()
+ packages = (d.getVar('PACKAGES', True) or "").split()
- datadir = bb.data.getVar('datadir', d, True)
+ datadir = d.getVar('datadir', True)
if not datadir:
bb.note("datadir not defined")
return
- dvar = bb.data.getVar('PKGD', d, True)
- pn = bb.data.getVar('PN', d, True)
+ dvar = d.getVar('PKGD', True)
+ pn = d.getVar('PN', True)
if pn + '-locale' in packages:
packages.remove(pn + '-locale')
@@ -392,9 +392,9 @@ python package_do_split_locales() {
if mainpkg.find('-dev'):
mainpkg = mainpkg.replace('-dev', '')
- summary = bb.data.getVar('SUMMARY', d, True) or pn
- description = bb.data.getVar('DESCRIPTION', d, True) or ""
- locale_section = bb.data.getVar('LOCALE_SECTION', d, True)
+ summary = d.getVar('SUMMARY', True) or pn
+ description = d.getVar('DESCRIPTION', True) or ""
+ locale_section = d.getVar('LOCALE_SECTION', True)
for l in locales:
ln = legitimize_package_name(l)
pkg = pn + '-locale-' + ln
@@ -405,7 +405,7 @@ python package_do_split_locales() {
bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d)
bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d)
if locale_section:
- bb.data.setVar('SECTION_' + pkg, locale_section, d)
+ d.setVar('SECTION_' + pkg, locale_section)
bb.data.setVar('PACKAGES', ' '.join(packages), d)
@@ -415,14 +415,14 @@ python package_do_split_locales() {
# glibc-localedata-translit* won't install as a dependency
# for some other package which breaks meta-toolchain
# Probably breaks since virtual-locale- isn't provided anywhere
- #rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, True) or bb.data.getVar('RDEPENDS', d, True) or "").split()
+ #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split()
#rdep.append('%s-locale*' % pn)
#bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
}
python perform_packagecopy () {
- dest = bb.data.getVar('D', d, True)
- dvar = bb.data.getVar('PKGD', d, True)
+ dest = d.getVar('D', True)
+ dvar = d.getVar('PKGD', True)
bb.mkdirhier(dvar)
@@ -549,16 +549,16 @@ python fixup_perms () {
# paths are resolved via BBPATH
def get_fs_perms_list(d):
str = ""
- fs_perms_tables = bb.data.getVar('FILESYSTEM_PERMS_TABLES', d, True)
+ fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
if not fs_perms_tables:
fs_perms_tables = 'files/fs-perms.txt'
for conf_file in fs_perms_tables.split():
- str += " %s" % bb.which(bb.data.getVar('BBPATH', d, True), conf_file)
+ str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file)
return str
- dvar = bb.data.getVar('PKGD', d, True)
+ dvar = d.getVar('PKGD', True)
fs_perms_table = {}
@@ -586,7 +586,7 @@ python fixup_perms () {
'oldincludedir' ]
for path in target_path_vars:
- dir = bb.data.getVar(path, d, True) or ""
+ dir = d.getVar(path, True) or ""
if dir == "":
continue
fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
@@ -664,11 +664,11 @@ python fixup_perms () {
python split_and_strip_files () {
import commands, stat, errno
- dvar = bb.data.getVar('PKGD', d, True)
- pn = bb.data.getVar('PN', d, True)
+ dvar = d.getVar('PKGD', True)
+ pn = d.getVar('PN', True)
# We default to '.debug' style
- if bb.data.getVar('PACKAGE_DEBUG_SPLIT_STYLE', d, True) == 'debug-file-directory':
+ if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
# Single debug-file-directory style debug info
debugappend = ".debug"
debugdir = ""
@@ -691,7 +691,7 @@ python split_and_strip_files () {
# 8 - shared library
def isELF(path):
type = 0
- pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True)
+ pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path))
if ret:
@@ -715,8 +715,8 @@ python split_and_strip_files () {
#
file_list = {}
file_links = {}
- if (bb.data.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', d, True) != '1') and \
- (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, True) != '1'):
+ if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \
+ (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
for root, dirs, files in os.walk(dvar):
for f in files:
file = os.path.join(root, f)
@@ -764,7 +764,7 @@ python split_and_strip_files () {
#
# First lets process debug splitting
#
- if (bb.data.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', d, True) != '1'):
+ if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
for file in file_list:
src = file[len(dvar):]
dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
@@ -842,7 +842,7 @@ python split_and_strip_files () {
#
# Now lets go back over things and strip them
#
- if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, True) != '1'):
+ if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
for file in file_list:
if file_list[file].startswith("ELF: "):
elf_file = int(file_list[file][5:])
@@ -856,11 +856,11 @@ python split_and_strip_files () {
python populate_packages () {
import glob, stat, errno, re
- workdir = bb.data.getVar('WORKDIR', d, True)
- outdir = bb.data.getVar('DEPLOY_DIR', d, True)
- dvar = bb.data.getVar('PKGD', d, True)
- packages = bb.data.getVar('PACKAGES', d, True)
- pn = bb.data.getVar('PN', d, True)
+ workdir = d.getVar('WORKDIR', True)
+ outdir = d.getVar('DEPLOY_DIR', True)
+ dvar = d.getVar('PKGD', True)
+ packages = d.getVar('PACKAGES', True)
+ pn = d.getVar('PN', True)
bb.mkdirhier(outdir)
os.chdir(dvar)
@@ -877,7 +877,7 @@ python populate_packages () {
else:
package_list.append(pkg)
- pkgdest = bb.data.getVar('PKGDEST', d, True)
+ pkgdest = d.getVar('PKGDEST', True)
os.system('rm -rf %s' % pkgdest)
seen = []
@@ -887,14 +887,14 @@ python populate_packages () {
root = os.path.join(pkgdest, pkg)
bb.mkdirhier(root)
- bb.data.setVar('PKG', pkg, localdata)
- overrides = bb.data.getVar('OVERRIDES', localdata, True)
+ localdata.setVar('PKG', pkg)
+ overrides = localdata.getVar('OVERRIDES', True)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
- bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata)
+ localdata.setVar('OVERRIDES', overrides + ':' + pkg)
bb.data.update_data(localdata)
- filesvar = bb.data.getVar('FILES', localdata, True) or ""
+ filesvar = localdata.getVar('FILES', True) or ""
files = filesvar.split()
file_links = {}
for file in files:
@@ -973,9 +973,9 @@ python populate_packages () {
bb.build.exec_func("package_name_hook", d)
for pkg in package_list:
- pkgname = bb.data.getVar('PKG_%s' % pkg, d, True)
+ pkgname = d.getVar('PKG_%s' % pkg, True)
if pkgname is None:
- bb.data.setVar('PKG_%s' % pkg, pkg, d)
+ d.setVar('PKG_%s' % pkg, pkg)
dangling_links = {}
pkg_files = {}
@@ -999,7 +999,7 @@ python populate_packages () {
dangling_links[pkg].append(os.path.normpath(target))
for pkg in package_list:
- rdepends = bb.utils.explode_dep_versions(bb.data.getVar('RDEPENDS_' + pkg, d, True) or bb.data.getVar('RDEPENDS', d, True) or "")
+ rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "")
for l in dangling_links[pkg]:
found = False
@@ -1040,9 +1040,9 @@ python emit_pkgdata() {
f.write('%s: %s\n' % (var, encode(val)))
return
- packages = bb.data.getVar('PACKAGES', d, True)
- pkgdest = bb.data.getVar('PKGDEST', d, 1)
- pkgdatadir = bb.data.getVar('PKGDESTWORK', d, True)
+ packages = d.getVar('PACKAGES', True)
+ pkgdest = d.getVar('PKGDEST', 1)
+ pkgdatadir = d.getVar('PKGDESTWORK', True)
# Take shared lock since we're only reading, not writing
lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d), True)
@@ -1052,7 +1052,7 @@ python emit_pkgdata() {
f.write("PACKAGES: %s\n" % packages)
f.close()
- workdir = bb.data.getVar('WORKDIR', d, True)
+ workdir = d.getVar('WORKDIR', True)
for pkg in packages.split():
subdata_file = pkgdatadir + "/runtime/%s" % pkg
@@ -1080,19 +1080,19 @@ python emit_pkgdata() {
write_if_exists(sf, pkg, 'pkg_preinst')
write_if_exists(sf, pkg, 'pkg_prerm')
write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
- for dfile in (bb.data.getVar('FILERPROVIDESFLIST_' + pkg, d, True) or "").split():
+ for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
- for dfile in (bb.data.getVar('FILERDEPENDSFLIST_' + pkg, d, True) or "").split():
+ for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
sf.close()
- allow_empty = bb.data.getVar('ALLOW_EMPTY_%s' % pkg, d, True)
+ allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
if not allow_empty:
- allow_empty = bb.data.getVar('ALLOW_EMPTY', d, True)
+ allow_empty = d.getVar('ALLOW_EMPTY', True)
root = "%s/%s" % (pkgdest, pkg)
os.chdir(root)
g = glob('*')
@@ -1123,8 +1123,8 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps"
python package_do_filedeps() {
import os, re
- pkgdest = bb.data.getVar('PKGDEST', d, True)
- packages = bb.data.getVar('PACKAGES', d, True)
+ pkgdest = d.getVar('PKGDEST', True)
+ packages = d.getVar('PACKAGES', True)
rpmdeps = bb.data.expand("${RPMDEPS}", d)
r = re.compile(r'[<>=]+ +[^ ]*')
@@ -1189,7 +1189,7 @@ SHLIBSWORKDIR = "${WORKDIR}/shlibs"
python package_do_shlibs() {
import re
- exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0)
+ exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
if exclude_shlibs:
bb.note("not generating shlibs")
return
@@ -1197,27 +1197,27 @@ python package_do_shlibs() {
lib_re = re.compile("^.*\.so")
libdir_re = re.compile(".*/lib$")
- packages = bb.data.getVar('PACKAGES', d, True)
- targetos = bb.data.getVar('TARGET_OS', d, True)
+ packages = d.getVar('PACKAGES', True)
+ targetos = d.getVar('TARGET_OS', True)
- workdir = bb.data.getVar('WORKDIR', d, True)
+ workdir = d.getVar('WORKDIR', True)
- ver = bb.data.getVar('PKGV', d, True)
+ ver = d.getVar('PKGV', True)
if not ver:
bb.error("PKGV not defined")
return
- pkgdest = bb.data.getVar('PKGDEST', d, True)
+ pkgdest = d.getVar('PKGDEST', True)
- shlibs_dir = bb.data.getVar('SHLIBSDIR', d, True)
- shlibswork_dir = bb.data.getVar('SHLIBSWORKDIR', d, True)
+ shlibs_dir = d.getVar('SHLIBSDIR', True)
+ shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
# Take shared lock since we're only reading, not writing
lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d))
def linux_so(root, path, file):
- cmd = bb.data.getVar('OBJDUMP', d, True) + " -p " + os.path.join(root, file) + " 2>/dev/null"
- cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', d, True), cmd)
+ cmd = d.getVar('OBJDUMP', True) + " -p " + os.path.join(root, file) + " 2>/dev/null"
+ cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd)
fd = os.popen(cmd)
lines = fd.readlines()
fd.close()
@@ -1264,7 +1264,7 @@ python package_do_shlibs() {
if not combo in sonames:
sonames.append(combo)
if file.endswith('.dylib') or file.endswith('.so'):
- lafile = fullpath.replace(os.path.join(pkgdest, pkg), bb.data.getVar('PKGD', d, True))
+ lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True))
# Drop suffix
lafile = lafile.rsplit(".",1)[0]
lapath = os.path.dirname(lafile)
@@ -1299,26 +1299,26 @@ python package_do_shlibs() {
needed[pkg].append(name)
#bb.note("Adding %s for %s" % (name, pkg))
- if bb.data.getVar('PACKAGE_SNAP_LIB_SYMLINKS', d, True) == "1":
+ if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
snap_symlinks = True
else:
snap_symlinks = False
- if (bb.data.getVar('USE_LDCONFIG', d, True) or "1") == "1":
+ if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
use_ldconfig = True
else:
use_ldconfig = False
needed = {}
shlib_provider = {}
- private_libs = bb.data.getVar('PRIVATE_LIBS', d, True)
+ private_libs = d.getVar('PRIVATE_LIBS', True)
for pkg in packages.split():
needs_ldconfig = False
bb.debug(2, "calculating shlib provides for %s" % pkg)
- pkgver = bb.data.getVar('PKGV_' + pkg, d, True)
+ pkgver = d.getVar('PKGV_' + pkg, True)
if not pkgver:
- pkgver = bb.data.getVar('PV_' + pkg, d, True)
+ pkgver = d.getVar('PV_' + pkg, True)
if not pkgver:
pkgver = ver
@@ -1352,11 +1352,11 @@ python package_do_shlibs() {
fd.close()
if needs_ldconfig and use_ldconfig:
bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
- postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, True) or bb.data.getVar('pkg_postinst', d, True)
+ postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
- postinst += bb.data.getVar('ldconfig_postinst_fragment', d, True)
- bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
+ postinst += d.getVar('ldconfig_postinst_fragment', True)
+ d.setVar('pkg_postinst_%s' % pkg, postinst)
list_re = re.compile('^(.*)\.list$')
for dir in [shlibs_dir]:
@@ -1380,7 +1380,7 @@ python package_do_shlibs() {
bb.utils.unlockfile(lf)
- assumed_libs = bb.data.getVar('ASSUME_SHLIBS', d, True)
+ assumed_libs = d.getVar('ASSUME_SHLIBS', True)
if assumed_libs:
for e in assumed_libs.split():
l, dep_pkg = e.split(":")
@@ -1424,12 +1424,12 @@ python package_do_shlibs() {
python package_do_pkgconfig () {
import re
- packages = bb.data.getVar('PACKAGES', d, True)
- workdir = bb.data.getVar('WORKDIR', d, True)
- pkgdest = bb.data.getVar('PKGDEST', d, True)
+ packages = d.getVar('PACKAGES', True)
+ workdir = d.getVar('WORKDIR', True)
+ pkgdest = d.getVar('PKGDEST', True)
- shlibs_dir = bb.data.getVar('SHLIBSDIR', d, True)
- shlibswork_dir = bb.data.getVar('SHLIBSWORKDIR', d, True)
+ shlibs_dir = d.getVar('SHLIBSDIR', True)
+ shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
pc_re = re.compile('(.*)\.pc$')
var_re = re.compile('(.*)=(.*)')
@@ -1515,9 +1515,9 @@ python package_do_pkgconfig () {
}
python read_shlibdeps () {
- packages = bb.data.getVar('PACKAGES', d, True).split()
+ packages = d.getVar('PACKAGES', True).split()
for pkg in packages:
- rdepends = bb.utils.explode_dep_versions(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "")
+ rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, 0) or d.getVar('RDEPENDS', 0) or "")
for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d)
@@ -1544,14 +1544,14 @@ python package_depchains() {
package.
"""
- packages = bb.data.getVar('PACKAGES', d, True)
- postfixes = (bb.data.getVar('DEPCHAIN_POST', d, True) or '').split()
- prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, True) or '').split()
+ packages = d.getVar('PACKAGES', True)
+ postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
+ prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
#bb.note('depends for %s is %s' % (base, depends))
- rreclist = bb.utils.explode_dep_versions(bb.data.getVar('RRECOMMENDS_' + pkg, d, True) or bb.data.getVar('RRECOMMENDS', d, True) or "")
+ rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
for depend in depends:
if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
@@ -1572,7 +1572,7 @@ python package_depchains() {
def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
#bb.note('rdepends for %s is %s' % (base, rdepends))
- rreclist = bb.utils.explode_dep_versions(bb.data.getVar('RRECOMMENDS_' + pkg, d, True) or bb.data.getVar('RRECOMMENDS', d, True) or "")
+ rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
for depend in rdepends:
if depend.find('virtual-locale-') != -1:
@@ -1596,15 +1596,15 @@ python package_depchains() {
list.append(dep)
depends = []
- for dep in bb.utils.explode_deps(bb.data.getVar('DEPENDS', d, True) or ""):
+ for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
add_dep(depends, dep)
rdepends = []
- for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS', d, True) or ""):
+ for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""):
add_dep(rdepends, dep)
for pkg in packages.split():
- for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, True) or ""):
+ for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
add_dep(rdepends, dep)
#bb.note('rdepends is %s' % rdepends)
@@ -1630,7 +1630,7 @@ python package_depchains() {
for suffix in pkgs:
for pkg in pkgs[suffix]:
- if bb.data.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', d):
+ if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
continue
(base, func) = pkgs[suffix][pkg]
if suffix == "-dev":
@@ -1639,7 +1639,7 @@ python package_depchains() {
pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
else:
rdeps = []
- for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS_' + base, d, True) or bb.data.getVar('RDEPENDS', d, True) or ""):
+ for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""):
add_dep(rdeps, dep)
pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
}
@@ -1679,22 +1679,22 @@ python do_package () {
# as any change to rpmdeps requires this to be rerun.
# PACKAGE_BBCLASS_VERSION = "1"
- packages = (bb.data.getVar('PACKAGES', d, True) or "").split()
+ packages = (d.getVar('PACKAGES', True) or "").split()
if len(packages) < 1:
bb.debug(1, "No packages to build, skipping do_package")
return
- workdir = bb.data.getVar('WORKDIR', d, True)
- outdir = bb.data.getVar('DEPLOY_DIR', d, True)
- dest = bb.data.getVar('D', d, True)
- dvar = bb.data.getVar('PKGD', d, True)
- pn = bb.data.getVar('PN', d, True)
+ workdir = d.getVar('WORKDIR', True)
+ outdir = d.getVar('DEPLOY_DIR', True)
+ dest = d.getVar('D', True)
+ dvar = d.getVar('PKGD', True)
+ pn = d.getVar('PN', True)
if not workdir or not outdir or not dest or not dvar or not pn or not packages:
bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package")
return
- for f in (bb.data.getVar('PACKAGEFUNCS', d, True) or '').split():
+ for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
bb.build.exec_func(f, d)
}
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass
index 6733e645349..71e46a8c8e0 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes/package_deb.bbclass
@@ -11,18 +11,18 @@ DPKG_ARCH ?= "${TARGET_ARCH}"
PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs"
python package_deb_fn () {
- bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
+ bb.data.setVar('PKGFN', d.getVar('PKG'), d)
}
addtask package_deb_install
python do_package_deb_install () {
- pkg = bb.data.getVar('PKG', d, True)
- pkgfn = bb.data.getVar('PKGFN', d, True)
- rootfs = bb.data.getVar('IMAGE_ROOTFS', d, True)
- debdir = bb.data.getVar('DEPLOY_DIR_DEB', d, True)
+ pkg = d.getVar('PKG', True)
+ pkgfn = d.getVar('PKGFN', True)
+ rootfs = d.getVar('IMAGE_ROOTFS', True)
+ debdir = d.getVar('DEPLOY_DIR_DEB', True)
apt_config = bb.data.expand('${STAGING_ETCDIR_NATIVE}/apt/apt.conf', d)
- stagingbindir = bb.data.getVar('STAGING_BINDIR_NATIVE', d, True)
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ stagingbindir = d.getVar('STAGING_BINDIR_NATIVE', True)
+ tmpdir = d.getVar('TMPDIR', True)
if None in (pkg,pkgfn,rootfs):
raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGE_ROOTFS)")
@@ -206,22 +206,22 @@ python do_package_deb () {
import re, copy
import textwrap
- workdir = bb.data.getVar('WORKDIR', d, True)
+ workdir = d.getVar('WORKDIR', True)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
- outdir = bb.data.getVar('PKGWRITEDIRDEB', d, True)
+ outdir = d.getVar('PKGWRITEDIRDEB', True)
if not outdir:
bb.error("PKGWRITEDIRDEB not defined, unable to package")
return
- packages = bb.data.getVar('PACKAGES', d, True)
+ packages = d.getVar('PACKAGES', True)
if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package")
return
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ tmpdir = d.getVar('TMPDIR', True)
if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@@ -230,7 +230,7 @@ python do_package_deb () {
bb.debug(1, "No packages; nothing to do")
return
- pkgdest = bb.data.getVar('PKGDEST', d, True)
+ pkgdest = d.getVar('PKGDEST', True)
for pkg in packages.split():
localdata = bb.data.createCopy(d)
@@ -238,19 +238,19 @@ python do_package_deb () {
lf = bb.utils.lockfile(root + ".lock")
- bb.data.setVar('ROOT', '', localdata)
- bb.data.setVar('ROOT_%s' % pkg, root, localdata)
- pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, True)
+ localdata.setVar('ROOT', '')
+ localdata.setVar('ROOT_%s' % pkg, root)
+ pkgname = localdata.getVar('PKG_%s' % pkg, True)
if not pkgname:
pkgname = pkg
- bb.data.setVar('PKG', pkgname, localdata)
+ localdata.setVar('PKG', pkgname)
- bb.data.setVar('OVERRIDES', pkg, localdata)
+ localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root))
- pkgoutdir = os.path.join(outdir, bb.data.getVar('PACKAGE_ARCH', localdata, True))
+ pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True))
bb.mkdirhier(pkgoutdir)
os.chdir(root)
@@ -261,8 +261,8 @@ python do_package_deb () {
del g[g.index('./DEBIAN')]
except ValueError:
pass
- if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
- bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, True), bb.data.getVar('PKGR', localdata, True)))
+ if not g and localdata.getVar('ALLOW_EMPTY') != "1":
+ bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True)))
bb.utils.unlockfile(lf)
continue
@@ -278,7 +278,7 @@ python do_package_deb () {
raise bb.build.FuncFailed("unable to open control file for writing.")
fields = []
- pe = bb.data.getVar('PKGE', d, True)
+ pe = d.getVar('PKGE', True)
if pe and int(pe) > 0:
fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
else:
@@ -298,10 +298,10 @@ python do_package_deb () {
def pullData(l, d):
l2 = []
for i in l:
- data = bb.data.getVar(i, d, True)
+ data = d.getVar(i, True)
if data is None:
raise KeyError(f)
- if i == 'DPKG_ARCH' and bb.data.getVar('PACKAGE_ARCH', d, True) == 'all':
+ if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH', True) == 'all':
data = 'all'
l2.append(data)
return l2
@@ -311,12 +311,12 @@ python do_package_deb () {
try:
for (c, fs) in fields:
for f in fs:
- if bb.data.getVar(f, localdata) is None:
+ if localdata.getVar(f) is None:
raise KeyError(f)
# Special behavior for description...
if 'DESCRIPTION' in fs:
- summary = bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or "."
- description = bb.data.getVar('DESCRIPTION', localdata, True) or "."
+ summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "."
+ description = localdata.getVar('DESCRIPTION', True) or "."
description = textwrap.dedent(description).strip()
ctrlfile.write('Description: %s\n' % unicode(summary))
ctrlfile.write('%s\n' % unicode(textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' ')))
@@ -332,18 +332,18 @@ python do_package_deb () {
bb.build.exec_func("mapping_rename_hook", localdata)
- rdepends = bb.utils.explode_dep_versions(bb.data.getVar("RDEPENDS", localdata, True) or "")
+ rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", True) or "")
for dep in rdepends:
if '*' in dep:
del rdepends[dep]
- rrecommends = bb.utils.explode_dep_versions(bb.data.getVar("RRECOMMENDS", localdata, True) or "")
+ rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", True) or "")
for dep in rrecommends:
if '*' in dep:
del rrecommends[dep]
- rsuggests = bb.utils.explode_dep_versions(bb.data.getVar("RSUGGESTS", localdata, True) or "")
- rprovides = bb.utils.explode_dep_versions(bb.data.getVar("RPROVIDES", localdata, True) or "")
- rreplaces = bb.utils.explode_dep_versions(bb.data.getVar("RREPLACES", localdata, True) or "")
- rconflicts = bb.utils.explode_dep_versions(bb.data.getVar("RCONFLICTS", localdata, True) or "")
+ rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", True) or "")
+ rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", True) or "")
+ rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", True) or "")
+ rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", True) or "")
if rdepends:
ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends)))
if rsuggests:
@@ -359,7 +359,7 @@ python do_package_deb () {
ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]:
- scriptvar = bb.data.getVar('pkg_%s' % script, localdata, True)
+ scriptvar = localdata.getVar('pkg_%s' % script, True)
if not scriptvar:
continue
try:
@@ -372,7 +372,7 @@ python do_package_deb () {
scriptfile.close()
os.chmod(os.path.join(controldir, script), 0755)
- conffiles_str = bb.data.getVar("CONFFILES", localdata, True)
+ conffiles_str = localdata.getVar("CONFFILES", True)
if conffiles_str:
try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
@@ -384,7 +384,7 @@ python do_package_deb () {
conffiles.close()
os.chdir(basedir)
- ret = os.system("PATH=\"%s\" dpkg-deb -b %s %s" % (bb.data.getVar("PATH", localdata, True), root, pkgoutdir))
+ ret = os.system("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH", True), root, pkgoutdir))
if ret != 0:
bb.utils.prunedir(controldir)
bb.utils.unlockfile(lf)
@@ -405,17 +405,17 @@ python do_package_write_deb_setscene () {
addtask do_package_write_deb_setscene
python () {
- if bb.data.getVar('PACKAGES', d, True) != '':
- deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split()
+ if d.getVar('PACKAGES', True) != '':
+ deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split()
deps.append('dpkg-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d)
- bb.data.setVarFlag('do_package_write_deb', 'fakeroot', "1", d)
- bb.data.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1", d)
+ d.setVarFlag('do_package_write_deb', 'fakeroot', "1")
+ d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1")
# Map TARGET_ARCH to Debian's ideas about architectures
- if bb.data.getVar('DPKG_ARCH', d, True) in ["x86", "i486", "i586", "i686", "pentium"]:
- bb.data.setVar('DPKG_ARCH', 'i386', d)
+ if d.getVar('DPKG_ARCH', True) in ["x86", "i486", "i586", "i686", "pentium"]:
+ d.setVar('DPKG_ARCH', 'i386')
}
python do_package_write_deb () {
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index d41b40d2c5b..df608fc0e3a 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -11,16 +11,16 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
OPKGBUILDCMD ??= "opkg-build"
python package_ipk_fn () {
- bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
+ bb.data.setVar('PKGFN', d.getVar('PKG'), d)
}
python package_ipk_install () {
- pkg = bb.data.getVar('PKG', d, 1)
- pkgfn = bb.data.getVar('PKGFN', d, 1)
- rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
- ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
- stagingdir = bb.data.getVar('STAGING_DIR', d, 1)
- tmpdir = bb.data.getVar('TMPDIR', d, 1)
+ pkg = d.getVar('PKG', 1)
+ pkgfn = d.getVar('PKGFN', 1)
+ rootfs = d.getVar('IMAGE_ROOTFS', 1)
+ ipkdir = d.getVar('DEPLOY_DIR_IPK', 1)
+ stagingdir = d.getVar('STAGING_DIR', 1)
+ tmpdir = d.getVar('TMPDIR', 1)
if None in (pkg,pkgfn,rootfs):
raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@@ -36,7 +36,7 @@ python package_ipk_install () {
# Generate ipk.conf if it or the stamp doesnt exist
conffile = os.path.join(stagingdir,"ipkg.conf")
if not os.access(conffile, os.R_OK):
- ipkg_archs = bb.data.getVar('PACKAGE_ARCHS',d)
+ ipkg_archs = d.getVar('PACKAGE_ARCHS')
if ipkg_archs is None:
bb.error("PACKAGE_ARCHS missing")
raise FuncFailed
@@ -259,15 +259,15 @@ python do_package_ipk () {
import re, copy
import textwrap
- workdir = bb.data.getVar('WORKDIR', d, True)
- outdir = bb.data.getVar('PKGWRITEDIRIPK', d, True)
- tmpdir = bb.data.getVar('TMPDIR', d, True)
- pkgdest = bb.data.getVar('PKGDEST', d, True)
+ workdir = d.getVar('WORKDIR', True)
+ outdir = d.getVar('PKGWRITEDIRIPK', True)
+ tmpdir = d.getVar('TMPDIR', True)
+ pkgdest = d.getVar('PKGDEST', True)
if not workdir or not outdir or not tmpdir:
bb.error("Variables incorrectly set, unable to package")
return
- packages = bb.data.getVar('PACKAGES', d, True)
+ packages = d.getVar('PACKAGES', True)
if not packages or packages == '':
bb.debug(1, "No packages; nothing to do")
return
@@ -283,18 +283,18 @@ python do_package_ipk () {
lf = bb.utils.lockfile(root + ".lock")
- bb.data.setVar('ROOT', '', localdata)
- bb.data.setVar('ROOT_%s' % pkg, root, localdata)
- pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1)
+ localdata.setVar('ROOT', '')
+ localdata.setVar('ROOT_%s' % pkg, root)
+ pkgname = localdata.getVar('PKG_%s' % pkg, 1)
if not pkgname:
pkgname = pkg
- bb.data.setVar('PKG', pkgname, localdata)
+ localdata.setVar('PKG', pkgname)
- bb.data.setVar('OVERRIDES', pkg, localdata)
+ localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root))
- arch = bb.data.getVar('PACKAGE_ARCH', localdata, 1)
+ arch = localdata.getVar('PACKAGE_ARCH', 1)
pkgoutdir = "%s/%s" % (outdir, arch)
bb.mkdirhier(pkgoutdir)
os.chdir(root)
@@ -305,8 +305,8 @@ python do_package_ipk () {
del g[g.index('./CONTROL')]
except ValueError:
pass
- if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
- bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, 1), bb.data.getVar('PKGR', localdata, 1)))
+ if not g and localdata.getVar('ALLOW_EMPTY') != "1":
+ bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1)))
bb.utils.unlockfile(lf)
continue
@@ -319,7 +319,7 @@ python do_package_ipk () {
raise bb.build.FuncFailed("unable to open control file for writing.")
fields = []
- pe = bb.data.getVar('PKGE', d, 1)
+ pe = d.getVar('PKGE', 1)
if pe and int(pe) > 0:
fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
else:
@@ -336,7 +336,7 @@ python do_package_ipk () {
def pullData(l, d):
l2 = []
for i in l:
- l2.append(bb.data.getVar(i, d, 1))
+ l2.append(d.getVar(i, 1))
return l2
ctrlfile.write("Package: %s\n" % pkgname)
@@ -344,12 +344,12 @@ python do_package_ipk () {
try:
for (c, fs) in fields:
for f in fs:
- if bb.data.getVar(f, localdata) is None:
+ if localdata.getVar(f) is None:
raise KeyError(f)
# Special behavior for description...
if 'DESCRIPTION' in fs:
- summary = bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or "."
- description = bb.data.getVar('DESCRIPTION', localdata, True) or "."
+ summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "."
+ description = localdata.getVar('DESCRIPTION', True) or "."
description = textwrap.dedent(description).strip()
ctrlfile.write('Description: %s\n' % summary)
ctrlfile.write('%s\n' % textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' '))
@@ -365,12 +365,12 @@ python do_package_ipk () {
bb.build.exec_func("mapping_rename_hook", localdata)
- rdepends = bb.utils.explode_dep_versions(bb.data.getVar("RDEPENDS", localdata, 1) or "")
- rrecommends = bb.utils.explode_dep_versions(bb.data.getVar("RRECOMMENDS", localdata, 1) or "")
- rsuggests = bb.utils.explode_dep_versions(bb.data.getVar("RSUGGESTS", localdata, 1) or "")
- rprovides = bb.utils.explode_dep_versions(bb.data.getVar("RPROVIDES", localdata, 1) or "")
- rreplaces = bb.utils.explode_dep_versions(bb.data.getVar("RREPLACES", localdata, 1) or "")
- rconflicts = bb.utils.explode_dep_versions(bb.data.getVar("RCONFLICTS", localdata, 1) or "")
+ rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", 1) or "")
+ rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", 1) or "")
+ rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", 1) or "")
+ rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", 1) or "")
+ rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", 1) or "")
+ rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", 1) or "")
if rdepends:
ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends))
@@ -384,14 +384,14 @@ python do_package_ipk () {
ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
if rconflicts:
ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
- src_uri = bb.data.getVar("SRC_URI", localdata, 1)
+ src_uri = localdata.getVar("SRC_URI", 1)
if src_uri:
src_uri = re.sub("\s+", " ", src_uri)
ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]:
- scriptvar = bb.data.getVar('pkg_%s' % script, localdata, 1)
+ scriptvar = localdata.getVar('pkg_%s' % script, 1)
if not scriptvar:
continue
try:
@@ -403,7 +403,7 @@ python do_package_ipk () {
scriptfile.close()
os.chmod(os.path.join(controldir, script), 0755)
- conffiles_str = bb.data.getVar("CONFFILES", localdata, 1)
+ conffiles_str = localdata.getVar("CONFFILES", 1)
if conffiles_str:
try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
@@ -415,8 +415,8 @@ python do_package_ipk () {
conffiles.close()
os.chdir(basedir)
- ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1),
- bb.data.getVar("OPKGBUILDCMD",d,1), pkg, pkgoutdir))
+ ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", 1),
+ d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir))
if ret != 0:
bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("opkg-build execution failed")
@@ -437,13 +437,13 @@ python do_package_write_ipk_setscene () {
addtask do_package_write_ipk_setscene
python () {
- if bb.data.getVar('PACKAGES', d, True) != '':
- deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split()
+ if d.getVar('PACKAGES', True) != '':
+ deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split()
deps.append('opkg-utils-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d)
- bb.data.setVarFlag('do_package_write_ipk', 'fakeroot', "1", d)
- bb.data.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1", d)
+ d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
+ d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1")
}
python do_package_write_ipk () {
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index f804a0fc177..2c5545c11d7 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -8,7 +8,7 @@ RPMBUILD="rpmbuild"
PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms"
python package_rpm_fn () {
- bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
+ bb.data.setVar('PKGFN', d.getVar('PKG'), d)
}
python package_rpm_install () {
@@ -406,7 +406,7 @@ python write_specfile () {
name = "".join(name.split(eext[1] + '-'))
return name
-# ml = bb.data.getVar("MLPREFIX", d, True)
+# ml = d.getVar("MLPREFIX", True)
# if ml and name and len(ml) != 0 and name.find(ml) == 0:
# return ml.join(name.split(ml, 1)[1:])
# return name
@@ -426,7 +426,7 @@ python write_specfile () {
# after renaming we cannot look up the dependencies in the packagedata
# store.
def translate_vers(varname, d):
- depends = bb.data.getVar(varname, d, True)
+ depends = d.getVar(varname, True)
if depends:
depends_dict = bb.utils.explode_dep_versions(depends)
newdeps_dict = {}
@@ -481,34 +481,34 @@ python write_specfile () {
scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi'
return scr
- packages = bb.data.getVar('PACKAGES', d, True)
+ packages = d.getVar('PACKAGES', True)
if not packages or packages == '':
bb.debug(1, "No packages; nothing to do")
return
- pkgdest = bb.data.getVar('PKGDEST', d, True)
+ pkgdest = d.getVar('PKGDEST', True)
if not pkgdest:
bb.fatal("No PKGDEST")
return
- outspecfile = bb.data.getVar('OUTSPECFILE', d, True)
+ outspecfile = d.getVar('OUTSPECFILE', True)
if not outspecfile:
bb.fatal("No OUTSPECFILE")
return
# Construct the SPEC file...
- srcname = strip_multilib(bb.data.getVar('PN', d, True), d)
- srcsummary = (bb.data.getVar('SUMMARY', d, True) or bb.data.getVar('DESCRIPTION', d, True) or ".")
- srcversion = bb.data.getVar('PKGV', d, True).replace('-', '+')
- srcrelease = bb.data.getVar('PKGR', d, True)
- srcepoch = (bb.data.getVar('PKGE', d, True) or "")
- srclicense = bb.data.getVar('LICENSE', d, True)
- srcsection = bb.data.getVar('SECTION', d, True)
- srcmaintainer = bb.data.getVar('MAINTAINER', d, True)
- srchomepage = bb.data.getVar('HOMEPAGE', d, True)
- srcdescription = bb.data.getVar('DESCRIPTION', d, True) or "."
-
- srcdepends = strip_multilib(bb.data.getVar('DEPENDS', d, True), d)
+ srcname = strip_multilib(d.getVar('PN', True), d)
+ srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".")
+ srcversion = d.getVar('PKGV', True).replace('-', '+')
+ srcrelease = d.getVar('PKGR', True)
+ srcepoch = (d.getVar('PKGE', True) or "")
+ srclicense = d.getVar('LICENSE', True)
+ srcsection = d.getVar('SECTION', True)
+ srcmaintainer = d.getVar('MAINTAINER', True)
+ srchomepage = d.getVar('HOMEPAGE', True)
+ srcdescription = d.getVar('DESCRIPTION', True) or "."
+
+ srcdepends = strip_multilib(d.getVar('DEPENDS', True), d)
srcrdepends = []
srcrrecommends = []
srcrsuggests = []
@@ -538,28 +538,28 @@ python write_specfile () {
lf = bb.utils.lockfile(root + ".lock")
- bb.data.setVar('ROOT', '', localdata)
- bb.data.setVar('ROOT_%s' % pkg, root, localdata)
- pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1)
+ localdata.setVar('ROOT', '')
+ localdata.setVar('ROOT_%s' % pkg, root)
+ pkgname = localdata.getVar('PKG_%s' % pkg, 1)
if not pkgname:
pkgname = pkg
- bb.data.setVar('PKG', pkgname, localdata)
+ localdata.setVar('PKG', pkgname)
- bb.data.setVar('OVERRIDES', pkg, localdata)
+ localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata)
- conffiles = (bb.data.getVar('CONFFILES', localdata, True) or "").split()
+ conffiles = (localdata.getVar('CONFFILES', True) or "").split()
splitname = strip_multilib(pkgname, d)
- splitsummary = (bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or ".")
- splitversion = (bb.data.getVar('PKGV', localdata, True) or "").replace('-', '+')
- splitrelease = (bb.data.getVar('PKGR', localdata, True) or "")
- splitepoch = (bb.data.getVar('PKGE', localdata, True) or "")
- splitlicense = (bb.data.getVar('LICENSE', localdata, True) or "")
- splitsection = (bb.data.getVar('SECTION', localdata, True) or "")
- splitdescription = (bb.data.getVar('DESCRIPTION', localdata, True) or ".")
+ splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".")
+ splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+')
+ splitrelease = (localdata.getVar('PKGR', True) or "")
+ splitepoch = (localdata.getVar('PKGE', True) or "")
+ splitlicense = (localdata.getVar('LICENSE', True) or "")
+ splitsection = (localdata.getVar('SECTION', True) or "")
+ splitdescription = (localdata.getVar('DESCRIPTION', True) or ".")
translate_vers('RDEPENDS', localdata)
translate_vers('RRECOMMENDS', localdata)
@@ -571,12 +571,12 @@ python write_specfile () {
# Map the dependencies into their final form
bb.build.exec_func("mapping_rename_hook", localdata)
- splitrdepends = strip_multilib(bb.data.getVar('RDEPENDS', localdata, True), d) or ""
- splitrrecommends = strip_multilib(bb.data.getVar('RRECOMMENDS', localdata, True), d) or ""
- splitrsuggests = strip_multilib(bb.data.getVar('RSUGGESTS', localdata, True), d) or ""
- splitrprovides = strip_multilib(bb.data.getVar('RPROVIDES', localdata, True), d) or ""
- splitrreplaces = strip_multilib(bb.data.getVar('RREPLACES', localdata, True), d) or ""
- splitrconflicts = strip_multilib(bb.data.getVar('RCONFLICTS', localdata, True), d) or ""
+ splitrdepends = strip_multilib(localdata.getVar('RDEPENDS', True), d) or ""
+ splitrrecommends = strip_multilib(localdata.getVar('RRECOMMENDS', True), d) or ""
+ splitrsuggests = strip_multilib(localdata.getVar('RSUGGESTS', True), d) or ""
+ splitrprovides = strip_multilib(localdata.getVar('RPROVIDES', True), d) or ""
+ splitrreplaces = strip_multilib(localdata.getVar('RREPLACES', True), d) or ""
+ splitrconflicts = strip_multilib(localdata.getVar('RCONFLICTS', True), d) or ""
splitrobsoletes = []
# For now we need to manually supplement RPROVIDES with any update-alternatives links
@@ -592,14 +592,14 @@ python write_specfile () {
srcrreplaces = splitrreplaces
srcrconflicts = splitrconflicts
- srcpreinst = bb.data.getVar('pkg_preinst', localdata, True)
- srcpostinst = bb.data.getVar('pkg_postinst', localdata, True)
- srcprerm = bb.data.getVar('pkg_prerm', localdata, True)
- srcpostrm = bb.data.getVar('pkg_postrm', localdata, True)
+ srcpreinst = localdata.getVar('pkg_preinst', True)
+ srcpostinst = localdata.getVar('pkg_postinst', True)
+ srcprerm = localdata.getVar('pkg_prerm', True)
+ srcpostrm = localdata.getVar('pkg_postrm', True)
file_list = []
walk_files(root, file_list, conffiles)
- if not file_list and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
+ if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty RPM package for %s" % splitname)
else:
bb.note("Creating RPM package for %s" % splitname)
@@ -672,7 +672,7 @@ python write_specfile () {
# Now process scriptlets
for script in ["preinst", "postinst", "prerm", "postrm"]:
- scriptvar = bb.data.getVar('pkg_%s' % script, localdata, True)
+ scriptvar = localdata.getVar('pkg_%s' % script, True)
if not scriptvar:
continue
if script == 'preinst':
@@ -691,7 +691,7 @@ python write_specfile () {
# Now process files
file_list = []
walk_files(root, file_list, conffiles)
- if not file_list and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
+ if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty RPM package for %s" % splitname)
else:
spec_files_bottom.append('%%files -n %s' % splitname)
@@ -813,29 +813,29 @@ python do_package_rpm () {
# We need a simple way to remove the MLPREFIX from the package name,
# and dependency information...
def strip_multilib(name, d):
- ml = bb.data.getVar("MLPREFIX", d, True)
+ ml = d.getVar("MLPREFIX", True)
if ml and name and len(ml) != 0 and name.find(ml) >= 0:
return "".join(name.split(ml))
return name
- workdir = bb.data.getVar('WORKDIR', d, True)
- outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, True)
- tmpdir = bb.data.getVar('TMPDIR', d, True)
- pkgd = bb.data.getVar('PKGD', d, True)
- pkgdest = bb.data.getVar('PKGDEST', d, True)
+ workdir = d.getVar('WORKDIR', True)
+ outdir = d.getVar('DEPLOY_DIR_IPK', True)
+ tmpdir = d.getVar('TMPDIR', True)
+ pkgd = d.getVar('PKGD', True)
+ pkgdest = d.getVar('PKGDEST', True)
if not workdir or not outdir or not pkgd or not tmpdir:
bb.error("Variables incorrectly set, unable to package")
return
- packages = bb.data.getVar('PACKAGES', d, True)
+ packages = d.getVar('PACKAGES', True)
if not packages or packages == '':
bb.debug(1, "No packages; nothing to do")
return
# Construct the spec file...
- srcname = strip_multilib(bb.data.getVar('PN', d, True), d)
+ srcname = strip_multilib(d.getVar('PN', True), d)
outspecfile = workdir + "/" + srcname + ".spec"
- bb.data.setVar('OUTSPECFILE', outspecfile, d)
+ d.setVar('OUTSPECFILE', outspecfile)
bb.build.exec_func('write_specfile', d)
# Construct per file dependencies file
@@ -844,10 +844,10 @@ python do_package_rpm () {
outfile.write("\n# Dependency table\n")
for pkg in packages.split():
dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
- dependsflist = (bb.data.getVar(dependsflist_key, d, True) or "")
+ dependsflist = (d.getVar(dependsflist_key, True) or "")
for dfile in dependsflist.split():
key = "FILE" + varname + "_" + dfile + "_" + pkg
- depends_dict = bb.utils.explode_dep_versions(bb.data.getVar(key, d, True) or "")
+ depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "")
file = dfile.replace("@underscore@", "_")
file = file.replace("@closebrace@", "]")
file = file.replace("@openbrace@", "[")
@@ -899,15 +899,15 @@ python do_package_rpm () {
os.chmod(outprovides, 0755)
# Setup the rpmbuild arguments...
- rpmbuild = bb.data.getVar('RPMBUILD', d, True)
- targetsys = bb.data.getVar('TARGET_SYS', d, True)
- targetvendor = bb.data.getVar('TARGET_VENDOR', d, True)
- package_arch = bb.data.getVar('PACKAGE_ARCH', d, True) or ""
+ rpmbuild = d.getVar('RPMBUILD', True)
+ targetsys = d.getVar('TARGET_SYS', True)
+ targetvendor = d.getVar('TARGET_VENDOR', True)
+ package_arch = d.getVar('PACKAGE_ARCH', True) or ""
if package_arch not in "all any noarch".split():
- ml_prefix = (bb.data.getVar('MLPREFIX', d, True) or "").replace("-", "_")
- bb.data.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch, d)
+ ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_")
+ d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
else:
- bb.data.setVar('PACKAGE_ARCH_EXTEND', package_arch, d)
+ d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
pkgwritedir = bb.data.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}', d)
pkgarch = bb.data.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}', d)
magicfile = bb.data.expand('${STAGING_DIR_NATIVE}/usr/share/misc/magic.mgc', d)
@@ -927,19 +927,19 @@ python do_package_rpm () {
cmd = cmd + " -bb " + outspecfile
# Build the rpm package!
- bb.data.setVar('BUILDSPEC', cmd + "\n", d)
- bb.data.setVarFlag('BUILDSPEC', 'func', '1', d)
+ d.setVar('BUILDSPEC', cmd + "\n")
+ d.setVarFlag('BUILDSPEC', 'func', '1')
bb.build.exec_func('BUILDSPEC', d)
}
python () {
- if bb.data.getVar('PACKAGES', d, True) != '':
- deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split()
+ if d.getVar('PACKAGES', True) != '':
+ deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split()
deps.append('rpm-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d)
- bb.data.setVarFlag('do_package_write_rpm', 'fakeroot', 1, d)
- bb.data.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1, d)
+ d.setVarFlag('do_package_write_rpm', 'fakeroot', 1)
+ d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1)
}
SSTATETASKS += "do_package_write_rpm"
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass
index a806e4514be..f26a2c00085 100644
--- a/meta/classes/package_tar.bbclass
+++ b/meta/classes/package_tar.bbclass
@@ -3,15 +3,15 @@ inherit package
IMAGE_PKGTYPE ?= "tar"
python package_tar_fn () {
- fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PKGV', d), bb.data.getVar('PKGR', d)))
+ fn = os.path.join(d.getVar('DEPLOY_DIR_TAR'), "%s-%s-%s.tar.gz" % (d.getVar('PKG'), d.getVar('PKGV'), d.getVar('PKGR')))
fn = bb.data.expand(fn, d)
- bb.data.setVar('PKGFN', fn, d)
+ d.setVar('PKGFN', fn)
}
python package_tar_install () {
- pkg = bb.data.getVar('PKG', d, 1)
- pkgfn = bb.data.getVar('PKGFN', d, 1)
- rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
+ pkg = d.getVar('PKG', 1)
+ pkgfn = d.getVar('PKGFN', 1)
+ rootfs = d.getVar('IMAGE_ROOTFS', 1)
if None in (pkg,pkgfn,rootfs):
bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@@ -35,24 +35,24 @@ python package_tar_install () {
}
python do_package_tar () {
- workdir = bb.data.getVar('WORKDIR', d, 1)
+ workdir = d.getVar('WORKDIR', 1)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
- outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1)
+ outdir = d.getVar('DEPLOY_DIR_TAR', 1)
if not outdir:
bb.error("DEPLOY_DIR_TAR not defined, unable to package")
return
bb.mkdirhier(outdir)
- dvar = bb.data.getVar('D', d, 1)
+ dvar = d.getVar('D', 1)
if not dvar:
bb.error("D not defined, unable to package")
return
bb.mkdirhier(dvar)
- packages = bb.data.getVar('PACKAGES', d, 1)
+ packages = d.getVar('PACKAGES', 1)
if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package")
return
@@ -61,11 +61,11 @@ python do_package_tar () {
localdata = bb.data.createCopy(d)
root = "%s/install/%s" % (workdir, pkg)
- bb.data.setVar('ROOT', '', localdata)
- bb.data.setVar('ROOT_%s' % pkg, root, localdata)
- bb.data.setVar('PKG', pkg, localdata)
+ localdata.setVar('ROOT', '')
+ localdata.setVar('ROOT_%s' % pkg, root)
+ localdata.setVar('PKG', pkg)
- overrides = bb.data.getVar('OVERRIDES', localdata)
+ overrides = localdata.getVar('OVERRIDES')
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata)
@@ -73,17 +73,17 @@ python do_package_tar () {
bb.data.update_data(localdata)
- root = bb.data.getVar('ROOT', localdata)
+ root = localdata.getVar('ROOT')
bb.mkdirhier(root)
basedir = os.path.dirname(root)
pkgoutdir = outdir
bb.mkdirhier(pkgoutdir)
bb.build.exec_func('package_tar_fn', localdata)
- tarfn = bb.data.getVar('PKGFN', localdata, 1)
+ tarfn = localdata.getVar('PKGFN', 1)
os.chdir(root)
from glob import glob
if not glob('*'):
- bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, 1), bb.data.getVar('PKGR', localdata, 1)))
+ bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1)))
continue
ret = os.system("tar -czf %s %s" % (tarfn, '.'))
if ret != 0:
@@ -91,12 +91,12 @@ python do_package_tar () {
}
python () {
- if bb.data.getVar('PACKAGES', d, True) != '':
- deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split()
+ if d.getVar('PACKAGES', True) != '':
+ deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split()
deps.append('tar-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d)
- bb.data.setVarFlag('do_package_write_ipk', 'fakeroot', "1", d)
+ d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
}
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass
index bf051feea80..9c7aede3bbb 100644
--- a/meta/classes/packagedata.bbclass
+++ b/meta/classes/packagedata.bbclass
@@ -1,13 +1,13 @@
python read_subpackage_metadata () {
import oe.packagedata
- data = oe.packagedata.read_pkgdata(bb.data.getVar('PN', d, 1), d)
+ data = oe.packagedata.read_pkgdata(d.getVar('PN', 1), d)
for key in data.keys():
- bb.data.setVar(key, data[key], d)
+ d.setVar(key, data[key])
- for pkg in bb.data.getVar('PACKAGES', d, 1).split():
+ for pkg in d.getVar('PACKAGES', 1).split():
sdata = oe.packagedata.read_subpkgdata(pkg, d)
for key in sdata.keys():
- bb.data.setVar(key, sdata[key], d)
+ d.setVar(key, sdata[key])
}
diff --git a/meta/classes/packagehistory.bbclass b/meta/classes/packagehistory.bbclass
index 492bbac218a..2cdf9d8a7c6 100644
--- a/meta/classes/packagehistory.bbclass
+++ b/meta/classes/packagehistory.bbclass
@@ -10,8 +10,8 @@ PKGHIST_DIR = "${TMPDIR}/pkghistory/${BASEPKG_TARGET_SYS}/"
# for comparision when writing future packages
#
python emit_pkghistory() {
- packages = bb.data.getVar('PACKAGES', d, True)
- pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True)
+ packages = d.getVar('PACKAGES', True)
+ pkghistdir = d.getVar('PKGHIST_DIR', True)
# Should check PACKAGES here to see if anything removed
@@ -72,14 +72,14 @@ def check_pkghistory(pkg, pe, pv, pr, lastversion):
def write_pkghistory(pkg, pe, pv, pr, d):
bb.debug(2, "Writing package history")
- pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True)
+ pkghistdir = d.getVar('PKGHIST_DIR', True)
verpath = os.path.join(pkghistdir, pkg, pe, pv, pr)
if not os.path.exists(verpath):
os.makedirs(verpath)
def write_latestlink(pkg, pe, pv, pr, d):
- pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True)
+ pkghistdir = d.getVar('PKGHIST_DIR', True)
def rm_link(path):
try:
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass
index 86046e1ff88..b2b6d976c53 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes/patch.bbclass
@@ -10,7 +10,7 @@ inherit terminal
python patch_do_patch() {
import oe.patch
- src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split()
+ src_uri = (d.getVar('SRC_URI', 1) or '').split()
if not src_uri:
return
@@ -20,23 +20,23 @@ python patch_do_patch() {
"git": oe.patch.GitApplyTree,
}
- cls = patchsetmap[bb.data.getVar('PATCHTOOL', d, 1) or 'quilt']
+ cls = patchsetmap[d.getVar('PATCHTOOL', 1) or 'quilt']
resolvermap = {
"noop": oe.patch.NOOPResolver,
"user": oe.patch.UserResolver,
}
- rcls = resolvermap[bb.data.getVar('PATCHRESOLVE', d, 1) or 'user']
+ rcls = resolvermap[d.getVar('PATCHRESOLVE', 1) or 'user']
- s = bb.data.getVar('S', d, 1)
+ s = d.getVar('S', 1)
path = os.getenv('PATH')
- os.putenv('PATH', bb.data.getVar('PATH', d, 1))
+ os.putenv('PATH', d.getVar('PATH', 1))
classes = {}
- workdir = bb.data.getVar('WORKDIR', d, 1)
+ workdir = d.getVar('WORKDIR', 1)
for url in src_uri:
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
@@ -76,13 +76,13 @@ python patch_do_patch() {
pname = os.path.basename(local)
if "mindate" in parm or "maxdate" in parm:
- pn = bb.data.getVar('PN', d, 1)
- srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
+ pn = d.getVar('PN', 1)
+ srcdate = d.getVar('SRCDATE_%s' % pn, 1)
if not srcdate:
- srcdate = bb.data.getVar('SRCDATE', d, 1)
+ srcdate = d.getVar('SRCDATE', 1)
if srcdate == "now":
- srcdate = bb.data.getVar('DATE', d, 1)
+ srcdate = d.getVar('DATE', 1)
if "maxdate" in parm and parm["maxdate"] < srcdate:
bb.note("Patch '%s' is outdated" % pname)
@@ -94,25 +94,25 @@ python patch_do_patch() {
if "minrev" in parm:
- srcrev = bb.data.getVar('SRCREV', d, 1)
+ srcrev = d.getVar('SRCREV', 1)
if srcrev and srcrev < parm["minrev"]:
bb.note("Patch '%s' applies to later revisions" % pname)
continue
if "maxrev" in parm:
- srcrev = bb.data.getVar('SRCREV', d, 1)
+ srcrev = d.getVar('SRCREV', 1)
if srcrev and srcrev > parm["maxrev"]:
bb.note("Patch '%s' applies to earlier revisions" % pname)
continue
if "rev" in parm:
- srcrev = bb.data.getVar('SRCREV', d, 1)
+ srcrev = d.getVar('SRCREV', 1)
if srcrev and parm["rev"] not in srcrev:
bb.note("Patch '%s' doesn't apply to revision" % pname)
continue
if "notrev" in parm:
- srcrev = bb.data.getVar('SRCREV', d, 1)
+ srcrev = d.getVar('SRCREV', 1)
if srcrev and parm["notrev"] in srcrev:
bb.note("Patch '%s' doesn't apply to revision" % pname)
continue
diff --git a/meta/classes/pkg_distribute.bbclass b/meta/classes/pkg_distribute.bbclass
index 81978e3e3b6..52643a2f904 100644
--- a/meta/classes/pkg_distribute.bbclass
+++ b/meta/classes/pkg_distribute.bbclass
@@ -1,6 +1,6 @@
PKG_DISTRIBUTECOMMAND[func] = "1"
python do_distribute_packages () {
- cmd = bb.data.getVar('PKG_DISTRIBUTECOMMAND', d, 1)
+ cmd = d.getVar('PKG_DISTRIBUTECOMMAND', 1)
if not cmd:
raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined")
bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d)
diff --git a/meta/classes/pkg_metainfo.bbclass b/meta/classes/pkg_metainfo.bbclass
index ac4f73c77b5..1714a535c2d 100644
--- a/meta/classes/pkg_metainfo.bbclass
+++ b/meta/classes/pkg_metainfo.bbclass
@@ -1,5 +1,5 @@
python do_pkg_write_metainfo () {
- deploydir = bb.data.getVar('DEPLOY_DIR', d, 1)
+ deploydir = d.getVar('DEPLOY_DIR', 1)
if not deploydir:
bb.error("DEPLOY_DIR not defined, unable to write package info")
return
@@ -9,11 +9,11 @@ python do_pkg_write_metainfo () {
except OSError:
raise bb.build.FuncFailed("unable to open package-info file for writing.")
- name = bb.data.getVar('PN', d, 1)
- version = bb.data.getVar('PV', d, 1)
- desc = bb.data.getVar('DESCRIPTION', d, 1)
- page = bb.data.getVar('HOMEPAGE', d, 1)
- lic = bb.data.getVar('LICENSE', d, 1)
+ name = d.getVar('PN', 1)
+ version = d.getVar('PV', 1)
+ desc = d.getVar('DESCRIPTION', 1)
+ page = d.getVar('HOMEPAGE', 1)
+ lic = d.getVar('LICENSE', 1)
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
infofile.close()
diff --git a/meta/classes/populate_sdk_deb.bbclass b/meta/classes/populate_sdk_deb.bbclass
index be7b5520c49..eedc80b26b2 100644
--- a/meta/classes/populate_sdk_deb.bbclass
+++ b/meta/classes/populate_sdk_deb.bbclass
@@ -2,8 +2,8 @@ do_populate_sdk[depends] += "dpkg-native:do_populate_sysroot apt-native:do_popul
do_populate_sdk[recrdeptask] += "do_package_write_deb"
-DEB_SDK_ARCH = "${@[bb.data.getVar('SDK_ARCH', d, 1), "i386"]\
- [bb.data.getVar('SDK_ARCH', d, 1) in \
+DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', 1), "i386"]\
+ [d.getVar('SDK_ARCH', 1) in \
["x86", "i486", "i586", "i686", "pentium"]]}"
populate_sdk_post_deb () {
diff --git a/meta/classes/populate_sdk_rpm.bbclass b/meta/classes/populate_sdk_rpm.bbclass
index 9989d0abfdd..829d83a8c5a 100644
--- a/meta/classes/populate_sdk_rpm.bbclass
+++ b/meta/classes/populate_sdk_rpm.bbclass
@@ -127,6 +127,6 @@ python () {
localdata.setVar("DEFAULTTUNE", localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + eext[1], False) or "")
ml_package_archs += localdata.getVar("PACKAGE_ARCHS", True) or ""
#bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides))
- bb.data.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs, d)
+ d.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs)
}
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass
index 66dfb2b0d21..1bdd209afe7 100644
--- a/meta/classes/qemu.bbclass
+++ b/meta/classes/qemu.bbclass
@@ -6,7 +6,7 @@
def qemu_target_binary(data):
import bb
- target_arch = bb.data.getVar("TARGET_ARCH", data, 1)
+ target_arch = data.getVar("TARGET_ARCH", 1)
if target_arch in ("i486", "i586", "i686"):
target_arch = "i386"
elif target_arch == "powerpc":
diff --git a/meta/classes/qt4e.bbclass b/meta/classes/qt4e.bbclass
index 670605ba4b3..d955aca5d8e 100644
--- a/meta/classes/qt4e.bbclass
+++ b/meta/classes/qt4e.bbclass
@@ -1,4 +1,4 @@
-DEPENDS_prepend = "${@["qt4-embedded ", ""][(bb.data.getVar('PN', d, 1)[:12] == 'qt4-embedded')]}"
+DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', 1)[:12] == 'qt4-embedded')]}"
inherit qmake2
diff --git a/meta/classes/qt4x11.bbclass b/meta/classes/qt4x11.bbclass
index ee2cdca3ad3..3f955d48df8 100644
--- a/meta/classes/qt4x11.bbclass
+++ b/meta/classes/qt4x11.bbclass
@@ -1,4 +1,4 @@
-DEPENDS_prepend = "${@["qt4-x11-free ", ""][(bb.data.getVar('BPN', d, True)[:12] == 'qt4-x11-free')]}"
+DEPENDS_prepend = "${@["qt4-x11-free ", ""][(d.getVar('BPN', True)[:12] == 'qt4-x11-free')]}"
inherit qmake2
diff --git a/meta/classes/relocatable.bbclass b/meta/classes/relocatable.bbclass
index e665e317c01..54227a91ca3 100644
--- a/meta/classes/relocatable.bbclass
+++ b/meta/classes/relocatable.bbclass
@@ -8,7 +8,7 @@ def process_dir (directory, d):
import stat
cmd = bb.data.expand('${CHRPATH_BIN}', d)
- tmpdir = bb.data.getVar('TMPDIR', d)
+ tmpdir = d.getVar('TMPDIR')
basedir = bb.data.expand('${base_prefix}', d)
#bb.debug("Checking %s for binaries to process" % directory)
diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes/rootfs_ipk.bbclass
index ffc4a72c06a..3b4c392cecc 100644
--- a/meta/classes/rootfs_ipk.bbclass
+++ b/meta/classes/rootfs_ipk.bbclass
@@ -185,13 +185,13 @@ ipk_insert_feed_uris () {
python () {
- if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True):
- flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d)
+ if d.getVar('BUILD_IMAGES_FROM_FEEDS', True):
+ flags = d.getVarFlag('do_rootfs', 'recrdeptask')
flags = flags.replace("do_package_write_ipk", "")
flags = flags.replace("do_deploy", "")
flags = flags.replace("do_populate_sysroot", "")
- bb.data.setVarFlag('do_rootfs', 'recrdeptask', flags, d)
- bb.data.setVar('OPKG_PREPROCESS_COMMANDS', "package_generate_archlist\nipk_insert_feed_uris", d)
- bb.data.setVar('OPKG_POSTPROCESS_COMMANDS', '', d)
+ d.setVarFlag('do_rootfs', 'recrdeptask', flags)
+ d.setVar('OPKG_PREPROCESS_COMMANDS', "package_generate_archlist\nipk_insert_feed_uris")
+ d.setVar('OPKG_POSTPROCESS_COMMANDS', '')
}
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass
index 9f507648867..95e9455e5ce 100644
--- a/meta/classes/rootfs_rpm.bbclass
+++ b/meta/classes/rootfs_rpm.bbclass
@@ -200,14 +200,14 @@ install_all_locales() {
}
python () {
- if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True):
- flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d)
+ if d.getVar('BUILD_IMAGES_FROM_FEEDS', True):
+ flags = d.getVarFlag('do_rootfs', 'recrdeptask')
flags = flags.replace("do_package_write_rpm", "")
flags = flags.replace("do_deploy", "")
flags = flags.replace("do_populate_sysroot", "")
- bb.data.setVarFlag('do_rootfs', 'recrdeptask', flags, d)
- bb.data.setVar('RPM_PREPROCESS_COMMANDS', '', d)
- bb.data.setVar('RPM_POSTPROCESS_COMMANDS', '', d)
+ d.setVarFlag('do_rootfs', 'recrdeptask', flags)
+ d.setVar('RPM_PREPROCESS_COMMANDS', '')
+ d.setVar('RPM_POSTPROCESS_COMMANDS', '')
ml_package_archs = ""
ml_prefix_list = ""
@@ -224,6 +224,6 @@ python () {
ml_package_archs += " " + package_archs
ml_prefix_list += " " + eext[1]
#bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides))
- bb.data.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs, d)
- bb.data.setVar('MULTILIB_PREFIX_LIST', ml_prefix_list, d)
+ d.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs)
+ d.setVar('MULTILIB_PREFIX_LIST', ml_prefix_list)
}
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index 838448f33ca..53b82d73e3f 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -14,7 +14,7 @@ def raise_sanity_error(msg):
def check_conf_exists(fn, data):
bbpath = []
fn = bb.data.expand(fn, data)
- vbbpath = bb.data.getVar("BBPATH", data)
+ vbbpath = data.getVar("BBPATH")
if vbbpath:
bbpath += vbbpath.split(":")
for p in bbpath:
@@ -87,12 +87,12 @@ def check_connectivity(d):
# URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
# using the same syntax as for SRC_URI. If the variable is not set
# the check is skipped
- test_uris = (bb.data.getVar('CONNECTIVITY_CHECK_URIS', d, True) or "").split()
+ test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS', True) or "").split()
retval = ""
# Only check connectivity if network enabled and the
# CONNECTIVITY_CHECK_URIS are set
- network_enabled = not bb.data.getVar('BB_NO_NETWORK', d, True)
+ network_enabled = not d.getVar('BB_NO_NETWORK', True)
check_enabled = len(test_uris)
# Take a copy of the data store and unset MIRRORS and PREMIRROS
data = bb.data.createCopy(d)
@@ -105,7 +105,7 @@ def check_connectivity(d):
except Exception:
# Allow the message to be configured so that users can be
# pointed to a support mechanism.
- msg = bb.data.getVar('CONNECTIVITY_CHECK_MSG', data, True) or ""
+ msg = data.getVar('CONNECTIVITY_CHECK_MSG', True) or ""
if len(msg) == 0:
msg = "Failed to fetch test data from the network. Please ensure your network is configured correctly.\n"
retval = msg
@@ -450,7 +450,7 @@ def check_sanity(e):
addhandler check_sanity_eventhandler
python check_sanity_eventhandler() {
- if bb.event.getName(e) == "ConfigParsed" and bb.data.getVar("BB_WORKERCONTEXT", e.data, True) != "1":
+ if bb.event.getName(e) == "ConfigParsed" and e.data.getVar("BB_WORKERCONTEXT", True) != "1":
check_sanity(e)
return
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
index 0813c2543ec..880dcad1f38 100644
--- a/meta/classes/siteconfig.bbclass
+++ b/meta/classes/siteconfig.bbclass
@@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () {
shared_state = sstate_state_fromvars(d)
if shared_state['name'] != 'populate-sysroot':
return
- if not os.path.isdir(os.path.join(bb.data.getVar('FILE_DIRNAME', d, 1), 'site_config')):
+ if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', 1), 'site_config')):
bb.debug(1, "No site_config directory, skipping do_siteconfig")
return
bb.build.exec_func('do_siteconfig_gencache', d)
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass
index 02294c4d2ef..604c6ba6b03 100644
--- a/meta/classes/siteinfo.bbclass
+++ b/meta/classes/siteinfo.bbclass
@@ -130,7 +130,7 @@ def siteinfo_get_files(d, no_cache = False):
if no_cache: return sitefiles
# Now check for siteconfig cache files
- path_siteconfig = bb.data.getVar('SITECONFIG_SYSROOTCACHE', d, 1)
+ path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', 1)
if os.path.isdir(path_siteconfig):
for i in os.listdir(path_siteconfig):
filename = os.path.join(path_siteconfig, i)
diff --git a/meta/classes/sourcepkg.bbclass b/meta/classes/sourcepkg.bbclass
index 64e6d7c8600..38edfe4e2ed 100644
--- a/meta/classes/sourcepkg.bbclass
+++ b/meta/classes/sourcepkg.bbclass
@@ -6,12 +6,12 @@ DISTRO ?= "openembedded"
def get_src_tree(d):
- workdir = bb.data.getVar('WORKDIR', d, 1)
+ workdir = d.getVar('WORKDIR', 1)
if not workdir:
bb.error("WORKDIR not defined, unable to find source tree.")
return
- s = bb.data.getVar('S', d, 0)
+ s = d.getVar('S', 0)
if not s:
bb.error("S not defined, unable to find source tree.")
return
@@ -55,8 +55,8 @@ sourcepkg_do_archive_bb() {
python sourcepkg_do_dumpdata() {
- workdir = bb.data.getVar('WORKDIR', d, 1)
- distro = bb.data.getVar('DISTRO', d, 1)
+ workdir = d.getVar('WORKDIR', 1)
+ distro = d.getVar('DISTRO', 1)
s_tree = get_src_tree(d)
openembeddeddir = os.path.join(workdir, s_tree, distro)
dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d))
@@ -73,8 +73,8 @@ python sourcepkg_do_dumpdata() {
bb.data.emit_env(f, d, True)
# emit the metadata which isnt valid shell
for e in d.keys():
- if bb.data.getVarFlag(e, 'python', d):
- f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
+ if d.getVarFlag(e, 'python'):
+ f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, 1)))
f.close()
}
diff --git a/meta/classes/src_distribute.bbclass b/meta/classes/src_distribute.bbclass
index fbfbdf0094e..2069d652a39 100644
--- a/meta/classes/src_distribute.bbclass
+++ b/meta/classes/src_distribute.bbclass
@@ -3,12 +3,12 @@ python do_distribute_sources () {
l = bb.data.createCopy(d)
bb.data.update_data(l)
- sources_dir = bb.data.getVar('SRC_DISTRIBUTEDIR', d, 1)
- src_uri = bb.data.getVar('SRC_URI', d, 1).split()
+ sources_dir = d.getVar('SRC_DISTRIBUTEDIR', 1)
+ src_uri = d.getVar('SRC_URI', 1).split()
fetcher = bb.fetch2.Fetch(src_uri, d)
ud = fetcher.ud
- licenses = bb.data.getVar('LICENSE', d, 1).replace('&', '|')
+ licenses = d.getVar('LICENSE', 1).replace('&', '|')
licenses = licenses.replace('(', '').replace(')', '')
clean_licenses = ""
for x in licenses.split():
@@ -20,20 +20,20 @@ python do_distribute_sources () {
for license in clean_licenses.split('|'):
for url in ud.values():
- cmd = bb.data.getVar('SRC_DISTRIBUTECOMMAND', d, 1)
+ cmd = d.getVar('SRC_DISTRIBUTECOMMAND', 1)
if not cmd:
raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined")
url.setup_localpath(d)
- bb.data.setVar('SRC', url.localpath, d)
+ d.setVar('SRC', url.localpath)
if url.type == 'file':
if url.basename == '*':
import os.path
dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath)))
- bb.data.setVar('DEST', "%s_%s/" % (bb.data.getVar('PF', d, 1), dest_dir), d)
+ bb.data.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir), d)
else:
- bb.data.setVar('DEST', "%s_%s" % (bb.data.getVar('PF', d, 1), url.basename), d)
+ bb.data.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename), d)
else:
- bb.data.setVar('DEST', '', d)
+ d.setVar('DEST', '')
bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d)
bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d)
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index 6abf55bb140..a777c79bb38 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -20,7 +20,7 @@ SSTATEPOSTINSTFUNCS ?= ""
python () {
if bb.data.inherits_class('native', d):
- bb.data.setVar('SSTATE_PKGARCH', bb.data.getVar('BUILD_ARCH', d), d)
+ bb.data.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'), d)
elif bb.data.inherits_class('cross', d):
bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d)
bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d)
@@ -37,19 +37,19 @@ python () {
# reused if we manipulate the paths
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d):
scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}"
- bb.data.setVar('SSTATE_SCAN_CMD', scan_cmd, d)
+ d.setVar('SSTATE_SCAN_CMD', scan_cmd)
- unique_tasks = set((bb.data.getVar('SSTATETASKS', d, True) or "").split())
+ unique_tasks = set((d.getVar('SSTATETASKS', True) or "").split())
d.setVar('SSTATETASKS', " ".join(unique_tasks))
namemap = []
for task in unique_tasks:
- namemap.append(bb.data.getVarFlag(task, 'sstate-name', d))
- funcs = bb.data.getVarFlag(task, 'prefuncs', d) or ""
+ namemap.append(d.getVarFlag(task, 'sstate-name'))
+ funcs = d.getVarFlag(task, 'prefuncs') or ""
funcs = "sstate_task_prefunc " + funcs
- bb.data.setVarFlag(task, 'prefuncs', funcs, d)
- funcs = bb.data.getVarFlag(task, 'postfuncs', d) or ""
+ d.setVarFlag(task, 'prefuncs', funcs)
+ funcs = d.getVarFlag(task, 'postfuncs') or ""
funcs = funcs + " sstate_task_postfunc"
- bb.data.setVarFlag(task, 'postfuncs', funcs, d)
+ d.setVarFlag(task, 'postfuncs', funcs)
d.setVar('SSTATETASKNAMES', " ".join(namemap))
}
@@ -65,18 +65,18 @@ def sstate_init(name, task, d):
def sstate_state_fromvars(d, task = None):
if task is None:
- task = bb.data.getVar('BB_CURRENTTASK', d, True)
+ task = d.getVar('BB_CURRENTTASK', True)
if not task:
bb.fatal("sstate code running without task context?!")
task = task.replace("_setscene", "")
- name = bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-name', d), d)
- inputs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-inputdirs', d) or "", d)).split()
- outputs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-outputdirs', d) or "", d)).split()
- plaindirs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-plaindirs', d) or "", d)).split()
- lockfiles = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-lockfile', d) or "", d)).split()
- lockfilesshared = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-lockfile-shared', d) or "", d)).split()
- interceptfuncs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-interceptfuncs', d) or "", d)).split()
+ name = bb.data.expand(d.getVarFlag("do_" + task, 'sstate-name'), d)
+ inputs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-inputdirs') or "", d)).split()
+ outputs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-outputdirs') or "", d)).split()
+ plaindirs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-plaindirs') or "", d)).split()
+ lockfiles = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-lockfile') or "", d)).split()
+ lockfilesshared = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "", d)).split()
+ interceptfuncs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "", d)).split()
if not name or len(inputs) != len(outputs):
bb.fatal("sstate variables not setup correctly?!")
@@ -139,7 +139,7 @@ def sstate_install(ss, d):
f.write(di + "\n")
f.close()
- for postinst in (bb.data.getVar('SSTATEPOSTINSTFUNCS', d, True) or '').split():
+ for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split():
bb.build.exec_func(postinst, d)
for lock in locks:
@@ -156,7 +156,7 @@ def sstate_installpkg(ss, d):
oe.path.remove(dir)
sstateinst = bb.data.expand("${WORKDIR}/sstate-install-%s/" % ss['name'], d)
- sstatepkg = bb.data.getVar('SSTATE_PKG', d, True) + '_' + ss['name'] + ".tgz"
+ sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['name'] + ".tgz"
if not os.path.exists(sstatepkg):
pstaging_fetch(sstatepkg, d)
@@ -167,16 +167,16 @@ def sstate_installpkg(ss, d):
sstate_clean(ss, d)
- bb.data.setVar('SSTATE_INSTDIR', sstateinst, d)
- bb.data.setVar('SSTATE_PKG', sstatepkg, d)
+ d.setVar('SSTATE_INSTDIR', sstateinst)
+ d.setVar('SSTATE_PKG', sstatepkg)
bb.build.exec_func('sstate_unpack_package', d)
# Fixup hardcoded paths
fixmefn = sstateinst + "fixmepath"
if os.path.isfile(fixmefn):
- staging = bb.data.getVar('STAGING_DIR', d, True)
- staging_target = bb.data.getVar('STAGING_DIR_TARGET', d, True)
- staging_host = bb.data.getVar('STAGING_DIR_HOST', d, True)
+ staging = d.getVar('STAGING_DIR', True)
+ staging_target = d.getVar('STAGING_DIR_TARGET', True)
+ staging_host = d.getVar('STAGING_DIR_HOST', True)
fixmefd = open(fixmefn, "r")
fixmefiles = fixmefd.readlines()
fixmefd.close()
@@ -206,13 +206,13 @@ def sstate_installpkg(ss, d):
def sstate_clean_cachefile(ss, d):
import oe.path
- sstatepkgdir = bb.data.getVar('SSTATE_DIR', d, True)
- sstatepkgfile = sstatepkgdir + '/' + bb.data.getVar('SSTATE_PKGSPEC', d, True) + "*_" + ss['name'] + ".tgz*"
+ sstatepkgdir = d.getVar('SSTATE_DIR', True)
+ sstatepkgfile = sstatepkgdir + '/' + d.getVar('SSTATE_PKGSPEC', True) + "*_" + ss['name'] + ".tgz*"
bb.note("Removing %s" % sstatepkgfile)
oe.path.remove(sstatepkgfile)
def sstate_clean_cachefiles(d):
- for task in (bb.data.getVar('SSTATETASKS', d, True) or "").split():
+ for task in (d.getVar('SSTATETASKS', True) or "").split():
ss = sstate_state_fromvars(d, task[3:])
sstate_clean_cachefile(ss, d)
@@ -274,10 +274,10 @@ CLEANFUNCS += "sstate_cleanall"
python sstate_cleanall() {
import fnmatch
- bb.note("Removing shared state for package %s" % bb.data.getVar('PN', d, True))
+ bb.note("Removing shared state for package %s" % d.getVar('PN', True))
- manifest_dir = bb.data.getVar('SSTATE_MANIFESTS', d, True)
- manifest_prefix = bb.data.getVar("SSTATE_MANFILEPREFIX", d, True)
+ manifest_dir = d.getVar('SSTATE_MANIFESTS', True)
+ manifest_prefix = d.getVar("SSTATE_MANFILEPREFIX", True)
manifest_pattern = os.path.basename(manifest_prefix) + ".*"
if not os.path.exists(manifest_dir):
@@ -298,7 +298,7 @@ python sstate_cleanall() {
def sstate_hardcode_path(d):
# Need to remove hardcoded paths and fix these when we install the
# staging packages.
- sstate_scan_cmd = bb.data.getVar('SSTATE_SCAN_CMD', d, True)
+ sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True)
p = os.popen("%s" % sstate_scan_cmd)
file_list = p.read()
@@ -306,10 +306,10 @@ def sstate_hardcode_path(d):
p.close()
return
- staging = bb.data.getVar('STAGING_DIR', d, True)
- staging_target = bb.data.getVar('STAGING_DIR_TARGET', d, True)
- staging_host = bb.data.getVar('STAGING_DIR_HOST', d, True)
- sstate_builddir = bb.data.getVar('SSTATE_BUILDDIR', d, True)
+ staging = d.getVar('STAGING_DIR', True)
+ staging_target = d.getVar('STAGING_DIR_TARGET', True)
+ staging_host = d.getVar('STAGING_DIR_HOST', True)
+ sstate_builddir = d.getVar('SSTATE_BUILDDIR', True)
for i in file_list.split('\n'):
if not i:
@@ -349,10 +349,10 @@ def sstate_package(ss, d):
os.remove(path)
os.symlink(base, path)
- tmpdir = bb.data.getVar('TMPDIR', d, True)
+ tmpdir = d.getVar('TMPDIR', True)
sstatebuild = bb.data.expand("${WORKDIR}/sstate-build-%s/" % ss['name'], d)
- sstatepkg = bb.data.getVar('SSTATE_PKG', d, True) + '_'+ ss['name'] + ".tgz"
+ sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['name'] + ".tgz"
bb.mkdirhier(sstatebuild)
bb.mkdirhier(os.path.dirname(sstatepkg))
for state in ss['dirs']:
@@ -369,15 +369,15 @@ def sstate_package(ss, d):
bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0]))
oe.path.copytree(state[1], sstatebuild + state[0])
- workdir = bb.data.getVar('WORKDIR', d, True)
+ workdir = d.getVar('WORKDIR', True)
for plain in ss['plaindirs']:
pdir = plain.replace(workdir, sstatebuild)
bb.mkdirhier(plain)
bb.mkdirhier(pdir)
oe.path.copytree(plain, pdir)
- bb.data.setVar('SSTATE_BUILDDIR', sstatebuild, d)
- bb.data.setVar('SSTATE_PKG', sstatepkg, d)
+ d.setVar('SSTATE_BUILDDIR', sstatebuild)
+ d.setVar('SSTATE_PKG', sstatepkg)
sstate_hardcode_path(d)
bb.build.exec_func('sstate_create_package', d)
@@ -389,7 +389,7 @@ def pstaging_fetch(sstatepkg, d):
import bb.fetch2
# Only try and fetch if the user has configured a mirror
- mirrors = bb.data.getVar('SSTATE_MIRRORS', d, True)
+ mirrors = d.getVar('SSTATE_MIRRORS', True)
if not mirrors:
return
@@ -402,9 +402,9 @@ def pstaging_fetch(sstatepkg, d):
bb.mkdirhier(dldir)
- bb.data.setVar('DL_DIR', dldir, localdata)
- bb.data.setVar('PREMIRRORS', mirrors, localdata)
- bb.data.setVar('SRC_URI', srcuri, localdata)
+ localdata.setVar('DL_DIR', dldir)
+ localdata.setVar('PREMIRRORS', mirrors)
+ localdata.setVar('SRC_URI', srcuri)
# Try a fetch from the sstate mirror, if it fails just return and
# we will build the package
@@ -493,15 +493,15 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
else:
bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile)
- mirrors = bb.data.getVar("SSTATE_MIRRORS", d, True)
+ mirrors = d.getVar("SSTATE_MIRRORS", True)
if mirrors:
# Copy the data object and override DL_DIR and SRC_URI
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
dldir = bb.data.expand("${SSTATE_DIR}", localdata)
- bb.data.setVar('DL_DIR', dldir, localdata)
- bb.data.setVar('PREMIRRORS', mirrors, localdata)
+ localdata.setVar('DL_DIR', dldir)
+ localdata.setVar('PREMIRRORS', mirrors)
bb.debug(2, "SState using premirror of: %s" % mirrors)
@@ -513,7 +513,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
sstatefile = sstatefile.replace("${BB_TASKHASH}", sq_hash[task])
srcuri = "file://" + os.path.basename(sstatefile)
- bb.data.setVar('SRC_URI', srcuri, localdata)
+ localdata.setVar('SRC_URI', srcuri)
bb.debug(2, "SState: Attempting to fetch %s" % srcuri)
try:
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass
index 04d51ede428..831840456b6 100644
--- a/meta/classes/staging.bbclass
+++ b/meta/classes/staging.bbclass
@@ -84,7 +84,7 @@ python do_populate_sysroot () {
#
bb.build.exec_func("sysroot_stage_all", d)
- for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
+ for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS', True) or '').split():
bb.build.exec_func(f, d)
}
@@ -100,8 +100,8 @@ python do_populate_sysroot_setscene () {
addtask do_populate_sysroot_setscene
python () {
- if bb.data.getVar('do_stage', d, True) is not None:
- bb.fatal("Legacy staging found for %s as it has a do_stage function. This will need conversion to a do_install or often simply removal to work with Poky" % bb.data.getVar("FILE", d, True))
+ if d.getVar('do_stage', True) is not None:
+ bb.fatal("Legacy staging found for %s as it has a do_stage function. This will need conversion to a do_install or often simply removal to work with Poky" % d.getVar("FILE", True))
}
diff --git a/meta/classes/syslinux.bbclass b/meta/classes/syslinux.bbclass
index fb7597470be..0cc6b851bca 100644
--- a/meta/classes/syslinux.bbclass
+++ b/meta/classes/syslinux.bbclass
@@ -8,12 +8,12 @@ python build_syslinux_menu () {
import copy
import sys
- workdir = bb.data.getVar('WORKDIR', d, 1)
+ workdir = d.getVar('WORKDIR', 1)
if not workdir:
bb.error("WORKDIR is not defined")
return
- labels = bb.data.getVar('LABELS', d, 1)
+ labels = d.getVar('LABELS', 1)
if not labels:
bb.debug(1, "LABELS not defined, nothing to do")
return
@@ -22,7 +22,7 @@ python build_syslinux_menu () {
bb.debug(1, "No labels, nothing to do")
return
- cfile = bb.data.getVar('SYSLINUXMENU', d, 1)
+ cfile = d.getVar('SYSLINUXMENU', 1)
if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
@@ -45,15 +45,15 @@ python build_syslinux_menu () {
from copy import deepcopy
localdata = deepcopy(d)
- overrides = bb.data.getVar('OVERRIDES', localdata)
+ overrides = localdata.getVar('OVERRIDES')
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata)
- bb.data.setVar('OVERRIDES', label + ':' + overrides, localdata)
+ localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata)
- usage = bb.data.getVar('USAGE', localdata, 1)
+ usage = localdata.getVar('USAGE', 1)
cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
cfgfile.write('%s\n' % (usage))
@@ -67,12 +67,12 @@ python build_syslinux_cfg () {
import copy
import sys
- workdir = bb.data.getVar('WORKDIR', d, 1)
+ workdir = d.getVar('WORKDIR', 1)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
- labels = bb.data.getVar('LABELS', d, 1)
+ labels = d.getVar('LABELS', 1)
if not labels:
bb.debug(1, "LABELS not defined, nothing to do")
return
@@ -81,7 +81,7 @@ python build_syslinux_cfg () {
bb.debug(1, "No labels, nothing to do")
return
- cfile = bb.data.getVar('SYSLINUXCFG', d, 1)
+ cfile = d.getVar('SYSLINUXCFG', 1)
if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
@@ -98,7 +98,7 @@ python build_syslinux_cfg () {
cfgfile.write('# Automatically created by OE\n')
- opts = bb.data.getVar('SYSLINUX_OPTS', d, 1)
+ opts = d.getVar('SYSLINUX_OPTS', 1)
if opts:
for opt in opts.split(';'):
@@ -107,7 +107,7 @@ python build_syslinux_cfg () {
cfgfile.write('ALLOWOPTIONS 1\n');
cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
- timeout = bb.data.getVar('SYSLINUX_TIMEOUT', d, 1)
+ timeout = d.getVar('SYSLINUX_TIMEOUT', 1)
if timeout:
cfgfile.write('TIMEOUT %s\n' % timeout)
@@ -116,29 +116,29 @@ python build_syslinux_cfg () {
cfgfile.write('PROMPT 1\n')
- menu = bb.data.getVar('AUTO_SYSLINUXMENU', d, 1)
+ menu = d.getVar('AUTO_SYSLINUXMENU', 1)
# This is ugly. My bad.
if menu:
bb.build.exec_func('build_syslinux_menu', d)
- mfile = bb.data.getVar('SYSLINUXMENU', d, 1)
+ mfile = d.getVar('SYSLINUXMENU', 1)
cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
for label in labels.split():
localdata = bb.data.createCopy(d)
- overrides = bb.data.getVar('OVERRIDES', localdata, True)
+ overrides = localdata.getVar('OVERRIDES', True)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
- bb.data.setVar('OVERRIDES', label + ':' + overrides, localdata)
+ localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata)
cfgfile.write('LABEL %s\nKERNEL vmlinuz\n' % (label))
- append = bb.data.getVar('APPEND', localdata, 1)
- initrd = bb.data.getVar('INITRD', localdata, 1)
+ append = localdata.getVar('APPEND', 1)
+ initrd = localdata.getVar('INITRD', 1)
if append:
cfgfile.write('APPEND ')
diff --git a/meta/classes/task.bbclass b/meta/classes/task.bbclass
index 4edd7048297..516d1a1c209 100644
--- a/meta/classes/task.bbclass
+++ b/meta/classes/task.bbclass
@@ -17,7 +17,7 @@ PACKAGE_ARCH = "all"
# to the list. Their dependencies (RRECOMMENDS) are handled as usual
# by package_depchains in a following step.
python () {
- packages = bb.data.getVar('PACKAGES', d, 1).split()
+ packages = d.getVar('PACKAGES', 1).split()
genpackages = []
for pkg in packages:
for postfix in ['-dbg', '-dev']:
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass
index 875efed0202..c936a27a499 100644
--- a/meta/classes/toolchain-scripts.bbclass
+++ b/meta/classes/toolchain-scripts.bbclass
@@ -137,8 +137,8 @@ toolchain_create_sdk_version () {
}
python __anonymous () {
- deps = bb.data.getVarFlag('do_configure', 'depends', d) or ""
- for dep in (bb.data.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', d, True) or "").split():
+ deps = d.getVarFlag('do_configure', 'depends') or ""
+ for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', True) or "").split():
deps += " %s:do_populate_sysroot" % dep
- bb.data.setVarFlag('do_configure', 'depends', deps, d)
+ d.setVarFlag('do_configure', 'depends', deps)
}
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass
index ba812210a74..e5ba6550d7d 100644
--- a/meta/classes/update-alternatives.bbclass
+++ b/meta/classes/update-alternatives.bbclass
@@ -78,38 +78,38 @@ fi
}
def update_alternatives_after_parse(d):
- if bb.data.getVar('ALTERNATIVE_LINKS', d) != None:
- doinstall = bb.data.getVar('do_install', d, 0)
- doinstall += bb.data.getVar('update_alternatives_batch_doinstall', d, 0)
- bb.data.setVar('do_install', doinstall, d)
+ if d.getVar('ALTERNATIVE_LINKS') != None:
+ doinstall = d.getVar('do_install', 0)
+ doinstall += d.getVar('update_alternatives_batch_doinstall', 0)
+ d.setVar('do_install', doinstall)
return
- if bb.data.getVar('ALTERNATIVE_NAME', d) == None:
- raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d)
- if bb.data.getVar('ALTERNATIVE_PATH', d) == None:
- raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % bb.data.getVar('FILE', d)
+ if d.getVar('ALTERNATIVE_NAME') == None:
+ raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE')
+ if d.getVar('ALTERNATIVE_PATH') == None:
+ raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE')
python __anonymous() {
update_alternatives_after_parse(d)
}
python populate_packages_prepend () {
- pkg = bb.data.getVar('PN', d, 1)
+ pkg = d.getVar('PN', 1)
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
- postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
+ postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
if not postinst:
postinst = '#!/bin/sh\n'
- if bb.data.getVar('ALTERNATIVE_LINKS', d) != None:
- postinst += bb.data.getVar('update_alternatives_batch_postinst', d, 1)
+ if d.getVar('ALTERNATIVE_LINKS') != None:
+ postinst += d.getVar('update_alternatives_batch_postinst', 1)
else:
- postinst += bb.data.getVar('update_alternatives_postinst', d, 1)
- bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
- postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1)
+ postinst += d.getVar('update_alternatives_postinst', 1)
+ d.setVar('pkg_postinst_%s' % pkg, postinst)
+ postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1)
if not postrm:
postrm = '#!/bin/sh\n'
- if bb.data.getVar('ALTERNATIVE_LINKS', d) != None:
- postrm += bb.data.getVar('update_alternatives_batch_postrm', d, 1)
+ if d.getVar('ALTERNATIVE_LINKS') != None:
+ postrm += d.getVar('update_alternatives_batch_postrm', 1)
else:
- postrm += bb.data.getVar('update_alternatives_postrm', d, 1)
- bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
+ postrm += d.getVar('update_alternatives_postrm', 1)
+ d.setVar('pkg_postrm_%s' % pkg, postrm)
}
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass
index 7e4dda7cbb7..492c5fba2d4 100644
--- a/meta/classes/update-rc.d.bbclass
+++ b/meta/classes/update-rc.d.bbclass
@@ -30,11 +30,11 @@ update-rc.d $D ${INITSCRIPT_NAME} remove
def update_rc_after_parse(d):
- if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None:
- if bb.data.getVar('INITSCRIPT_NAME', d) == None:
- raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d)
- if bb.data.getVar('INITSCRIPT_PARAMS', d) == None:
- raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % bb.data.getVar('FILE', d)
+ if d.getVar('INITSCRIPT_PACKAGES') == None:
+ if d.getVar('INITSCRIPT_NAME') == None:
+ raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % d.getVar('FILE')
+ if d.getVar('INITSCRIPT_PARAMS') == None:
+ raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % d.getVar('FILE')
python __anonymous() {
update_rc_after_parse(d)
@@ -44,7 +44,7 @@ python populate_packages_prepend () {
def update_rcd_package(pkg):
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d)
- overrides = bb.data.getVar("OVERRIDES", localdata, 1)
+ overrides = localdata.getVar("OVERRIDES", 1)
bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata)
bb.data.update_data(localdata)
@@ -53,28 +53,28 @@ python populate_packages_prepend () {
execute on the target. Not doing so may cause update_rc.d postinst invoked
twice to cause unwanted warnings.
"""
- postinst = bb.data.getVar('pkg_postinst', localdata, 1)
+ postinst = localdata.getVar('pkg_postinst', 1)
if not postinst:
postinst = '#!/bin/sh\n'
- postinst += bb.data.getVar('updatercd_postinst', localdata, 1)
- bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
+ postinst += localdata.getVar('updatercd_postinst', 1)
+ d.setVar('pkg_postinst_%s' % pkg, postinst)
- prerm = bb.data.getVar('pkg_prerm', localdata, 1)
+ prerm = localdata.getVar('pkg_prerm', 1)
if not prerm:
prerm = '#!/bin/sh\n'
- prerm += bb.data.getVar('updatercd_prerm', localdata, 1)
- bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d)
+ prerm += localdata.getVar('updatercd_prerm', 1)
+ d.setVar('pkg_prerm_%s' % pkg, prerm)
- postrm = bb.data.getVar('pkg_postrm', localdata, 1)
+ postrm = localdata.getVar('pkg_postrm', 1)
if not postrm:
postrm = '#!/bin/sh\n'
- postrm += bb.data.getVar('updatercd_postrm', localdata, 1)
- bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
+ postrm += localdata.getVar('updatercd_postrm', 1)
+ d.setVar('pkg_postrm_%s' % pkg, postrm)
- pkgs = bb.data.getVar('INITSCRIPT_PACKAGES', d, 1)
+ pkgs = d.getVar('INITSCRIPT_PACKAGES', 1)
if pkgs == None:
- pkgs = bb.data.getVar('UPDATERCPN', d, 1)
- packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
+ pkgs = d.getVar('UPDATERCPN', 1)
+ packages = (d.getVar('PACKAGES', 1) or "").split()
if not pkgs in packages and packages != []:
pkgs = packages[0]
for pkg in pkgs.split():
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass
index f3dd8236eef..928f619b7c9 100644
--- a/meta/classes/useradd.bbclass
+++ b/meta/classes/useradd.bbclass
@@ -107,11 +107,11 @@ def update_useradd_after_parse(d):
useradd_packages = d.getVar('USERADD_PACKAGES', True)
if not useradd_packages:
- raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % bb.data.getVar('FILE', d)
+ raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE')
for pkg in useradd_packages.split():
if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True):
- raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (bb.data.getVar('FILE', d), pkg)
+ raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg)
python __anonymous() {
update_useradd_after_parse(d)
@@ -147,12 +147,12 @@ fakeroot python populate_packages_prepend () {
if not preinst:
preinst = '#!/bin/sh\n'
preinst += d.getVar('useradd_preinst', True)
- bb.data.setVar('pkg_preinst_%s' % pkg, preinst, d)
+ d.setVar('pkg_preinst_%s' % pkg, preinst)
# RDEPENDS setup
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += " base-passwd shadow"
- bb.data.setVar("RDEPENDS_%s" % pkg, rdepends, d)
+ d.setVar("RDEPENDS_%s" % pkg, rdepends)
# Add the user/group preinstall scripts and RDEPENDS requirements
# to packages specified by USERADD_PACKAGES
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
index 64bd84a5d3b..009ef1fd048 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes/utility-tasks.bbclass
@@ -6,7 +6,7 @@ python do_listtasks() {
#bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell
for e in d.keys():
- if bb.data.getVarFlag(e, 'task', d):
+ if d.getVarFlag(e, 'task'):
bb.plain("%s" % e)
}
@@ -20,18 +20,18 @@ python do_clean() {
bb.note("Removing " + dir)
oe.path.remove(dir)
- dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
+ dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d)
bb.note("Removing " + dir)
oe.path.remove(dir)
- for f in (bb.data.getVar('CLEANFUNCS', d, 1) or '').split():
+ for f in (d.getVar('CLEANFUNCS', 1) or '').split():
bb.build.exec_func(f, d)
}
addtask checkuri
do_checkuri[nostamp] = "1"
python do_checkuri() {
- src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
+ src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass
index 3c2e342f91c..103fa9a546e 100644
--- a/meta/classes/utils.bbclass
+++ b/meta/classes/utils.bbclass
@@ -331,12 +331,12 @@ def explode_deps(s):
def base_set_filespath(path, d):
filespath = []
- extrapaths = (bb.data.getVar("FILESEXTRAPATHS", d, True) or "")
+ extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
# Don't prepend empty strings to the path list
if extrapaths != "":
path = extrapaths.split(":") + path
# The ":" ensures we have an 'empty' override
- overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":"
+ overrides = (d.getVar("OVERRIDES", 1) or "") + ":"
for p in path:
if p != "":
for o in overrides.split(":"):
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index d405b6a7ff4..7e75be2d78d 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -99,7 +99,7 @@ ABIEXTENSION ??= ""
TARGET_ARCH = "${TUNE_ARCH}"
TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}"
TARGET_VENDOR = "-oe"
-TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + bb.data.getVar('TARGET_OS', d, 1), ''][bb.data.getVar('TARGET_OS', d, 1) == ('' or 'custom')]}"
+TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', 1), ''][d.getVar('TARGET_OS', 1) == ('' or 'custom')]}"
TARGET_PREFIX = "${TARGET_SYS}-"
TARGET_CC_ARCH = "${TUNE_CCARGS}"
TARGET_LD_ARCH = "${TUNE_LDARGS}"
@@ -108,7 +108,7 @@ TARGET_AS_ARCH = "${TUNE_ASARGS}"
SDK_ARCH = "${BUILD_ARCH}"
SDK_OS = "${BUILD_OS}"
SDK_VENDOR = "-oesdk"
-SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + bb.data.getVar('SDK_OS', d, 1), ''][bb.data.getVar('SDK_OS', d, 1) == ('' or 'custom')]}"
+SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', 1), ''][d.getVar('SDK_OS', 1) == ('' or 'custom')]}"
SDK_PREFIX = "${SDK_SYS}-"
SDK_CC_ARCH = "${BUILD_CC_ARCH}"
SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk"
@@ -116,7 +116,7 @@ SDK_LD_ARCH = "${BUILD_LD_ARCH}"
SDK_AS_ARCH = "${BUILD_AS_ARCH}"
PACKAGE_ARCH = "${TUNE_PKGARCH}"
-MACHINE_ARCH = "${@[bb.data.getVar('TUNE_PKGARCH', d, 1), bb.data.getVar('MACHINE', d, 1)][bool(bb.data.getVar('MACHINE', d, 1))].replace('-', '_')}"
+MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', 1), d.getVar('MACHINE', 1)][bool(d.getVar('MACHINE', 1))].replace('-', '_')}"
PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}"
PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}"
# MACHINE_ARCH shouldn't be included here as a variable dependency
@@ -167,33 +167,33 @@ ASSUME_PROVIDED = "\
# Package default variables.
##################################################################
-PN = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[0] or 'defaultpkgname'}"
-PV = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[1] or '1.0'}"
-PR = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[2] or 'r0'}"
+PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
+PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
+PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[2] or 'r0'}"
PF = "${PN}-${EXTENDPE}${PV}-${PR}"
-EXTENDPE = "${@['','${PE\x7d_'][bb.data.getVar('PE',d,1) > 0]}"
+EXTENDPE = "${@['','${PE\x7d_'][d.getVar('PE',1) > 0]}"
P = "${PN}-${PV}"
-EXTENDPRAUTO = "${@['.${PRAUTO\x7d',''][bb.data.getVar('PRAUTO',d,1) is None]}"
+EXTENDPRAUTO = "${@['.${PRAUTO\x7d',''][d.getVar('PRAUTO',1) is None]}"
PRAUTOINX = "${PF}"
PKGV ?= "${PV}"
PKGR ?= "${PR}${EXTENDPRAUTO}"
-PKGE ?= "${@['','${PE\x7d'][bb.data.getVar('PE',d,1) > 0]}"
-EXTENDPKGEVER = "${@['','${PKGE\x7d:'][bb.data.getVar('PKGE',d,1).strip() != '']}"
+PKGE ?= "${@['','${PE\x7d'][d.getVar('PE',1) > 0]}"
+EXTENDPKGEVER = "${@['','${PKGE\x7d:'][d.getVar('PKGE',1).strip() != '']}"
EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}"
# Base package name
# Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial"
# otherwise it is the same as PN and P
SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -nativesdk -crosssdk -cross-canadian"
-BPN = "${@base_prune_suffix(bb.data.getVar('PN', d, True), bb.data.getVar('SPECIAL_PKGSUFFIX', d, True).split(), d)}"
+BPN = "${@base_prune_suffix(d.getVar('PN', True), d.getVar('SPECIAL_PKGSUFFIX', True).split(), d)}"
BP = "${BPN}-${PV}"
#
# network based PR service
#
-USE_PR_SERV = "${@[1,0][(bb.data.getVar('PRSERV_HOST',d,1) is None) or (bb.data.getVar('PRSERV_PORT',d,1) is None)]}"
+USE_PR_SERV = "${@[1,0][(d.getVar('PRSERV_HOST',1) is None) or (d.getVar('PRSERV_PORT',1) is None)]}"
# Package info.
@@ -288,7 +288,7 @@ DOTDEBUG-dbg = "${bindir}/.debug ${sbindir}/.debug ${libexecdir}/.debug ${libdir
DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug"
-FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][bb.data.getVar('PACKAGE_DEBUG_SPLIT_STYLE', d, 1) == 'debug-file-directory'], d, 1)}"
+FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', 1) == 'debug-file-directory'], d, 1)}"
SECTION_${PN}-dbg = "devel"
ALLOW_EMPTY_${PN}-dbg = "1"
@@ -298,17 +298,17 @@ FILES_${PN}-locale = "${datadir}/locale"
# File manifest
-FILE_DIRNAME = "${@os.path.dirname(bb.data.getVar('FILE', d))}"
+FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE'))}"
# FILESPATH is set in base.bbclass
#FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
-FILESDIR = "${@bb.which(bb.data.getVar('FILESPATH', d, 1), '.')}"
+FILESDIR = "${@bb.which(d.getVar('FILESPATH', 1), '.')}"
##################################################################
# General work and output directories for the build system.
##################################################################
TMPDIR ?= "${TOPDIR}/tmp"
-CACHE = "${TMPDIR}/cache${@['', '/' + str(bb.data.getVar('MACHINE', d, 1))][bool(bb.data.getVar('MACHINE', d, 1))]}${@['', '/' + str(bb.data.getVar('SDKMACHINE', d, 1))][bool(bb.data.getVar('SDKMACHINE', d, 1))]}"
+CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}"
# The persistent cache should be shared by all builds
PERSISTENT_DIR = "${TMPDIR}/cache"
LOG_DIR = "${TMPDIR}/log"
@@ -403,7 +403,7 @@ export PATH
# Build utility info.
##################################################################
-CCACHE = "${@bb.which(bb.data.getVar('PATH', d, 1), 'ccache') and 'ccache '}"
+CCACHE = "${@bb.which(d.getVar('PATH', 1), 'ccache') and 'ccache '}"
TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}"
export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}"
@@ -505,7 +505,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types"
# Disabled until the option works properly -feliminate-dwarf2-dups
FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}"
DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe"
-SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][bb.data.getVar('DEBUG_BUILD', d, 1) == '1'], d, 1)}"
+SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', 1) == '1'], d, 1)}"
BUILD_OPTIMIZATION = "-O2 -pipe"
##################################################################
diff --git a/meta/conf/distro/defaultsetup.conf b/meta/conf/distro/defaultsetup.conf
index 1a297785fd7..064c1e05376 100644
--- a/meta/conf/distro/defaultsetup.conf
+++ b/meta/conf/distro/defaultsetup.conf
@@ -13,7 +13,7 @@ require conf/distro/include/tclibc-${TCLIBC}.inc
TCLIBCAPPEND ?= "-${TCLIBC}"
TMPDIR .= "${TCLIBCAPPEND}"
-CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(bb.data.getVar('MACHINE', d, 1))][bool(bb.data.getVar('MACHINE', d, 1))]}${@['', '/' + str(bb.data.getVar('SDKMACHINE', d, 1))][bool(bb.data.getVar('SDKMACHINE', d, 1))]}"
+CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}"
USER_CLASSES ?= ""
PACKAGE_CLASSES ?= "package_ipk"
diff --git a/meta/conf/machine/include/arm/feature-arm-thumb.inc b/meta/conf/machine/include/arm/feature-arm-thumb.inc
index d606a35ca9a..cd34199b047 100644
--- a/meta/conf/machine/include/arm/feature-arm-thumb.inc
+++ b/meta/conf/machine/include/arm/feature-arm-thumb.inc
@@ -5,7 +5,7 @@
# but requires more instructions (140% for 70% smaller code) so may be
# slower.
TUNEVALID[thumb] = "Use thumb instructions instead of ARM"
-ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}"
+ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}"
TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}"
OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}"
diff --git a/meta/conf/machine/include/tune-thumb.inc b/meta/conf/machine/include/tune-thumb.inc
index 9f6ce95a474..85473ce741a 100644
--- a/meta/conf/machine/include/tune-thumb.inc
+++ b/meta/conf/machine/include/tune-thumb.inc
@@ -16,15 +16,15 @@ THUMB_INTERWORK ?= "yes"
# arm system and vice versa. It is strongly recommended that DISTROs not
# turn this off - the actual cost is very small.
-OVERRIDE_THUMB = "${@['', ':thumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}"
-OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}"
+OVERRIDE_THUMB = "${@['', ':thumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}"
+OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}"
OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}"
# Compiler and linker options for application code and kernel code. These
# options ensure that the compiler has the correct settings for the selected
# instruction set and interworking.
-ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}"
-ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}"
+ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}"
+ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}"
#
TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}"
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index 4721355bd85..ad391e3b013 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -276,32 +276,32 @@ def compare_in_distro_packages_list(distro_check_dir, d):
localdata = bb.data.createCopy(d)
pkglst_dir = os.path.join(distro_check_dir, "package_lists")
matching_distros = []
- pn = bb.data.getVar('PN', d, True)
- recipe_name = bb.data.getVar('PN', d, True)
+ pn = d.getVar('PN', True)
+ recipe_name = d.getVar('PN', True)
bb.note("Checking: %s" % pn)
trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"})
if pn.find("-native") != -1:
pnstripped = pn.split("-native")
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
recipe_name = pnstripped[0]
if pn.find("-cross") != -1:
pnstripped = pn.split("-cross")
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
recipe_name = pnstripped[0]
if pn.find("-initial") != -1:
pnstripped = pn.split("-initial")
- bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata)
+ bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata)
recipe_name = pnstripped[0]
bb.note("Recipe: %s" % recipe_name)
- tmp = bb.data.getVar('DISTRO_PN_ALIAS', localdata, True)
+ tmp = localdata.getVar('DISTRO_PN_ALIAS', True)
distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'})
@@ -343,23 +343,23 @@ def compare_in_distro_packages_list(distro_check_dir, d):
return matching_distros
def create_log_file(d, logname):
- logpath = bb.data.getVar('LOG_DIR', d, True)
+ logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath)
logfn, logsuffix = os.path.splitext(logname)
- logfile = os.path.join(logpath, "%s.%s%s" % (logfn, bb.data.getVar('DATETIME', d, True), logsuffix))
+ logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix))
if not os.path.exists(logfile):
slogfile = os.path.join(logpath, logname)
if os.path.exists(slogfile):
os.remove(slogfile)
os.system("touch %s" % logfile)
os.symlink(logfile, slogfile)
- bb.data.setVar('LOG_FILE', logfile, d)
+ d.setVar('LOG_FILE', logfile)
return logfile
def save_distro_check_result(result, datetime, result_file, d):
- pn = bb.data.getVar('PN', d, True)
- logdir = bb.data.getVar('LOG_DIR', d, True)
+ pn = d.getVar('PN', True)
+ logdir = d.getVar('LOG_DIR', True)
if not logdir:
bb.error("LOG_DIR variable is not defined, can't write the distro_check results")
return
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index 75fb91e0fb2..f4ccb3e1835 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -179,7 +179,7 @@ class GitApplyTree(PatchTree):
class QuiltTree(PatchSet):
def _runcmd(self, args, run = True):
- quiltrc = bb.data.getVar('QUILTRCFILE', self.d, 1)
+ quiltrc = self.d.getVar('QUILTRCFILE', 1)
if not run:
return ["quilt"] + ["--quiltrc"] + [quiltrc] + args
runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir)
@@ -357,7 +357,7 @@ class UserResolver(Resolver):
# Patch application failed
patchcmd = self.patchset.Push(True, False, False)
- t = bb.data.getVar('T', self.patchset.d, 1)
+ t = self.patchset.d.getVar('T', 1)
if not t:
bb.msg.fatal("Build", "T not set")
bb.utils.mkdirhier(t)
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index b3473d34762..95daace6c61 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -16,19 +16,19 @@ def ifelse(condition, iftrue = True, iffalse = False):
return iffalse
def conditional(variable, checkvalue, truevalue, falsevalue, d):
- if bb.data.getVar(variable,d,1) == checkvalue:
+ if d.getVar(variable,1) == checkvalue:
return truevalue
else:
return falsevalue
def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
- if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
+ if float(d.getVar(variable,1)) <= float(checkvalue):
return truevalue
else:
return falsevalue
def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
- result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
+ result = bb.vercmp(d.getVar(variable,True), checkvalue)
if result <= 0:
return truevalue
else:
@@ -48,7 +48,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
return falsevalue
def both_contain(variable1, variable2, checkvalue, d):
- if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
+ if d.getVar(variable1,1).find(checkvalue) != -1 and d.getVar(variable2,1).find(checkvalue) != -1:
return checkvalue
else:
return ""
diff --git a/meta/recipes-bsp/grub/grub_0.97.bb b/meta/recipes-bsp/grub/grub_0.97.bb
index 0f297ff3ff4..6ec66e3b0fc 100644
--- a/meta/recipes-bsp/grub/grub_0.97.bb
+++ b/meta/recipes-bsp/grub/grub_0.97.bb
@@ -23,7 +23,7 @@ inherit autotools
python __anonymous () {
import re
- host = bb.data.getVar('HOST_SYS', d, 1)
+ host = d.getVar('HOST_SYS', 1)
if not re.match('i.86.*-linux', host):
raise bb.parse.SkipPackage("incompatible with host %s" % host)
}
diff --git a/meta/recipes-bsp/grub/grub_1.99.bb b/meta/recipes-bsp/grub/grub_1.99.bb
index afd10f473fc..b6aa827a5b1 100644
--- a/meta/recipes-bsp/grub/grub_1.99.bb
+++ b/meta/recipes-bsp/grub/grub_1.99.bb
@@ -33,7 +33,7 @@ do_configure() {
python __anonymous () {
import re
- host = bb.data.getVar('HOST_SYS', d, 1)
+ host = d.getVar('HOST_SYS', 1)
if not re.match('x86.64.*-linux', host) and not re.match('i.86.*-linux', host):
raise bb.parse.SkipPackage("incompatible with host %s" % host)
}
diff --git a/meta/recipes-bsp/uboot/u-boot_2011.03.bb b/meta/recipes-bsp/uboot/u-boot_2011.03.bb
index 34655ead595..1ebdbeabca1 100644
--- a/meta/recipes-bsp/uboot/u-boot_2011.03.bb
+++ b/meta/recipes-bsp/uboot/u-boot_2011.03.bb
@@ -10,7 +10,7 @@ LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \
file://README;beginline=1;endline=22;md5=3a00ef51d3fc96e9d6c1bc4708ccd3b5"
-FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/u-boot-git/${MACHINE}"
+FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/u-boot-git/${MACHINE}"
# This revision corresponds to the tag "v2011.03"
# We use the revision in order to avoid having to fetch it from the repo during parse
diff --git a/meta/recipes-bsp/uboot/u-boot_2011.06.bb b/meta/recipes-bsp/uboot/u-boot_2011.06.bb
index acd0944a8a8..8ebdbff5ea0 100644
--- a/meta/recipes-bsp/uboot/u-boot_2011.06.bb
+++ b/meta/recipes-bsp/uboot/u-boot_2011.06.bb
@@ -10,7 +10,7 @@ LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \
file://README;beginline=1;endline=22;md5=5ba4218ac89af7846802d0348df3fb90"
-FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/u-boot-git/${MACHINE}"
+FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/u-boot-git/${MACHINE}"
# This revision corresponds to the tag "v2011.06"
# We use the revision in order to avoid having to fetch it from the repo during parse
diff --git a/meta/recipes-bsp/x-load/x-load_git.bb b/meta/recipes-bsp/x-load/x-load_git.bb
index 8114fd23890..87fc16f0a50 100644
--- a/meta/recipes-bsp/x-load/x-load_git.bb
+++ b/meta/recipes-bsp/x-load/x-load_git.bb
@@ -1,6 +1,6 @@
require x-load.inc
-FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/x-load-git/${MACHINE}"
+FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/x-load-git/${MACHINE}"
LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://README;beginline=1;endline=25;md5=ef08d08cb99057bbb5b9d6d0c5a4396f"
diff --git a/meta/recipes-connectivity/connman/connman.inc b/meta/recipes-connectivity/connman/connman.inc
index e86bd9fa38d..7ee460ec8b2 100644
--- a/meta/recipes-connectivity/connman/connman.inc
+++ b/meta/recipes-connectivity/connman/connman.inc
@@ -64,5 +64,5 @@ python populate_packages_prepend() {
plugintype = package.split( '-' )[-1]
if plugintype in depmap:
bb.note( "Adding rdependency on %s to package %s" % ( depmap[plugintype], package ) )
- bb.data.setVar("RDEPENDS_%s" % package, depmap[plugintype], d)
+ d.setVar("RDEPENDS_%s" % package, depmap[plugintype])
}
diff --git a/meta/recipes-core/busybox/busybox.inc b/meta/recipes-core/busybox/busybox.inc
index f8fee51725e..0b74ea6780d 100644
--- a/meta/recipes-core/busybox/busybox.inc
+++ b/meta/recipes-core/busybox/busybox.inc
@@ -49,8 +49,8 @@ def busybox_cfg(feature, features, tokens, cnf, rem):
# Map distro and machine features to config settings
def features_to_busybox_settings(d):
cnf, rem = ([], [])
- distro_features = bb.data.getVar('DISTRO_FEATURES', d, True).split()
- machine_features = bb.data.getVar('MACHINE_FEATURES', d, True).split()
+ distro_features = d.getVar('DISTRO_FEATURES', True).split()
+ machine_features = d.getVar('MACHINE_FEATURES', True).split()
busybox_cfg('ipv6', distro_features, 'CONFIG_FEATURE_IPV6', cnf, rem)
busybox_cfg('largefile', distro_features, 'CONFIG_LFS', cnf, rem)
busybox_cfg('largefile', distro_features, 'CONFIG_FDISK_SUPPORT_LARGE_DISKS', cnf, rem)
@@ -79,7 +79,7 @@ DO_IPv6 := ${@base_contains('DISTRO_FEATURES', 'ipv6', 1, 0, d)}
python () {
if "${OE_DEL}":
- bb.data.setVar('configmangle_append', "${OE_DEL}" + "\n", d)
+ d.setVar('configmangle_append', "${OE_DEL}" + "\n")
if "${OE_FEATURES}":
bb.data.setVar('configmangle_append',
"/^### DISTRO FEATURES$/a\\\n%s\n\n" %
diff --git a/meta/recipes-core/eglibc/eglibc-options.inc b/meta/recipes-core/eglibc/eglibc-options.inc
index 112029dab84..baf4f4b06ca 100644
--- a/meta/recipes-core/eglibc/eglibc-options.inc
+++ b/meta/recipes-core/eglibc/eglibc-options.inc
@@ -83,7 +83,7 @@ def distro_features_check_deps(distro_features):
# Map distro features to eglibc options settings
def features_to_eglibc_settings(d):
cnf = ([])
- distro_features = (bb.data.getVar('DISTRO_FEATURES', d, True) or '').split()
+ distro_features = (d.getVar('DISTRO_FEATURES', True) or '').split()
distro_features_check_deps(distro_features)
@@ -128,8 +128,8 @@ def features_to_eglibc_settings(d):
# try to fix disable charsets/locales/locale-code compile fail
if 'libc-charsets' in distro_features and 'libc-locales' in distro_features and 'libc-locale-code' in distro_features:
- bb.data.setVar('PACKAGE_NO_GCONV', '0', d)
+ d.setVar('PACKAGE_NO_GCONV', '0')
else:
- bb.data.setVar('PACKAGE_NO_GCONV', '1', d)
+ d.setVar('PACKAGE_NO_GCONV', '1')
return "\n".join(cnf)
diff --git a/meta/recipes-core/eglibc/eglibc-package.inc b/meta/recipes-core/eglibc/eglibc-package.inc
index 519a49c75fd..020f55876b0 100644
--- a/meta/recipes-core/eglibc/eglibc-package.inc
+++ b/meta/recipes-core/eglibc/eglibc-package.inc
@@ -8,10 +8,10 @@
python __anonymous () {
import bb, re
- uc_os = (re.match('.*uclibc*', bb.data.getVar('TARGET_OS', d, 1)) != None)
+ uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', 1)) != None)
if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" %
- bb.data.getVar('TARGET_OS', d, 1))
+ d.getVar('TARGET_OS', 1))
}
# Set this to zero if you don't want ldconfig in the output package
diff --git a/meta/recipes-core/eglibc/eglibc_2.13.bb b/meta/recipes-core/eglibc/eglibc_2.13.bb
index fc8ac64a1c3..f076ae7c7fe 100644
--- a/meta/recipes-core/eglibc/eglibc_2.13.bb
+++ b/meta/recipes-core/eglibc/eglibc_2.13.bb
@@ -53,10 +53,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR
python __anonymous () {
import bb, re
- uc_os = (re.match('.*uclibc$', bb.data.getVar('TARGET_OS', d, 1)) != None)
+ uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None)
if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" %
- bb.data.getVar('TARGET_OS', d, 1))
+ d.getVar('TARGET_OS', 1))
}
export libc_cv_slibdir = "${base_libdir}"
diff --git a/meta/recipes-core/eglibc/eglibc_2.14.bb b/meta/recipes-core/eglibc/eglibc_2.14.bb
index 571d39d26ee..501987525ef 100644
--- a/meta/recipes-core/eglibc/eglibc_2.14.bb
+++ b/meta/recipes-core/eglibc/eglibc_2.14.bb
@@ -54,10 +54,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR
python __anonymous () {
import bb, re
- uc_os = (re.match('.*uclibc$', bb.data.getVar('TARGET_OS', d, 1)) != None)
+ uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None)
if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" %
- bb.data.getVar('TARGET_OS', d, 1))
+ d.getVar('TARGET_OS', 1))
}
export libc_cv_slibdir = "${base_libdir}"
diff --git a/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb b/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb
index 0efce406e0a..634a4e4f4a8 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb
+++ b/meta/recipes-core/glib-2.0/glib-2.0_2.30.0.bb
@@ -7,7 +7,7 @@ DEPENDS += "libffi python-argparse-native"
DEPENDS_virtclass-native += "libffi-native python-argparse-native"
DEPENDS_virtclass-nativesdk += "libffi-nativesdk python-argparse-native zlib-nativesdk"
-SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}"
+SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
QSORT_PATCH = "file://remove.test.for.qsort_r.patch"
QSORT_PATCH_virtclass-native = ""
diff --git a/meta/recipes-core/libxml/libxml2.inc b/meta/recipes-core/libxml/libxml2.inc
index 6f79333b911..1f7a4e6ec8e 100644
--- a/meta/recipes-core/libxml/libxml2.inc
+++ b/meta/recipes-core/libxml/libxml2.inc
@@ -33,8 +33,8 @@ export LDFLAGS += "-ldl"
python populate_packages_prepend () {
# autonamer would call this libxml2-2, but we don't want that
- if bb.data.getVar('DEBIAN_NAMES', d, 1):
- bb.data.setVar('PKG_libxml2', '${MLPREFIX}libxml2', d)
+ if d.getVar('DEBIAN_NAMES', 1):
+ d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
}
PACKAGES += "${PN}-utils"
diff --git a/meta/recipes-core/tasks/task-base.bb b/meta/recipes-core/tasks/task-base.bb
index 99b7e17a692..e0960b5e0ed 100644
--- a/meta/recipes-core/tasks/task-base.bb
+++ b/meta/recipes-core/tasks/task-base.bb
@@ -126,17 +126,17 @@ python __anonymous () {
import bb
- distro_features = set(bb.data.getVar("DISTRO_FEATURES", d, 1).split())
- machine_features= set(bb.data.getVar("MACHINE_FEATURES", d, 1).split())
+ distro_features = set(d.getVar("DISTRO_FEATURES", 1).split())
+ machine_features= set(d.getVar("MACHINE_FEATURES", 1).split())
if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
- bb.data.setVar("ADD_BT", "task-base-bluetooth", d)
+ d.setVar("ADD_BT", "task-base-bluetooth")
if "wifi" in distro_features and not "wifi" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
- bb.data.setVar("ADD_WIFI", "task-base-wifi", d)
+ d.setVar("ADD_WIFI", "task-base-wifi")
if "3g" in distro_features and not "3g" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
- bb.data.setVar("ADD_3G", "task-base-3g", d)
+ d.setVar("ADD_3G", "task-base-3g")
}
#
diff --git a/meta/recipes-core/tasks/task-core-sdk.bb b/meta/recipes-core/tasks/task-core-sdk.bb
index 5743631787c..a74de01b07c 100644
--- a/meta/recipes-core/tasks/task-core-sdk.bb
+++ b/meta/recipes-core/tasks/task-core-sdk.bb
@@ -50,7 +50,7 @@ RDEPENDS_task-core-sdk = "\
#python generate_sdk_pkgs () {
# poky_pkgs = read_pkgdata('task-core', d)['PACKAGES']
-# pkgs = bb.data.getVar('PACKAGES', d, 1).split()
+# pkgs = d.getVar('PACKAGES', 1).split()
# for pkg in poky_pkgs.split():
# newpkg = pkg.replace('task-core', 'task-core-sdk')
#
@@ -79,9 +79,9 @@ RDEPENDS_task-core-sdk = "\
# if packaged('%s-dev' % name, d):
# rreclist.append('%s-dev' % name)
#
-# oldrrec = bb.data.getVar('RRECOMMENDS_%s' % newpkg, d) or ''
+# oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or ''
# bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d)
-# # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, bb.data.getVar('RRECOMMENDS_%s' % newpkg, d)))
+# # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg)))
#
# # bb.note('pkgs is %s' % pkgs)
# bb.data.setVar('PACKAGES', ' '.join(pkgs), d)
diff --git a/meta/recipes-core/uclibc/uclibc-config.inc b/meta/recipes-core/uclibc/uclibc-config.inc
index 697164c0089..a30188d209d 100644
--- a/meta/recipes-core/uclibc/uclibc-config.inc
+++ b/meta/recipes-core/uclibc/uclibc-config.inc
@@ -35,7 +35,7 @@ def map_uclibc_arch(a, d):
"""Return the uClibc architecture for the given TARGET_ARCH."""
import re
- valid_archs = bb.data.getVar('valid_archs', d, 1).split()
+ valid_archs = d.getVar('valid_archs', 1).split()
if re.match('^(arm|sa110).*', a): return 'arm'
elif re.match('^(i.86|athlon)$', a): return 'i386'
@@ -50,14 +50,14 @@ def map_uclibc_arch(a, d):
else:
bb.error("cannot map '%s' to a uClibc architecture" % a)
-export UCLIBC_ARCH = "${@map_uclibc_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}"
+export UCLIBC_ARCH = "${@map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d)}"
def map_uclibc_abi(o, d):
"""Return the uClibc ABI for the given TARGET_OS."""
import re
- arch = bb.data.getVar('TARGET_ARCH', d, 1)
- if map_uclibc_arch(bb.data.getVar('TARGET_ARCH', d, 1), d) == "arm":
+ arch = d.getVar('TARGET_ARCH', 1)
+ if map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d) == "arm":
if re.match('.*eabi$', o): return 'ARM_EABI'
else: return 'ARM_OABI'
# FIXME: This is inaccurate! Handle o32, n32, n64
@@ -65,7 +65,7 @@ def map_uclibc_abi(o, d):
elif re.match('^mips.*', arch): return 'MIPS_O32_ABI'
return ""
-export UCLIBC_ABI = "${@map_uclibc_abi(bb.data.getVar('TARGET_OS', d, 1), d)}"
+export UCLIBC_ABI = "${@map_uclibc_abi(d.getVar('TARGET_OS', 1), d)}"
def map_uclibc_endian(a, d):
"""Return the uClibc endianess for the given TARGET_ARCH."""
@@ -79,7 +79,7 @@ def map_uclibc_endian(a, d):
return 'BIG'
return 'LITTLE'
-export UCLIBC_ENDIAN = "${@map_uclibc_endian(bb.data.getVar('TARGET_ARCH', d, 1), d)}"
+export UCLIBC_ENDIAN = "${@map_uclibc_endian(d.getVar('TARGET_ARCH', 1), d)}"
# internal helper
def uclibc_cfg(feature, features, tokens, cnf, rem):
@@ -94,8 +94,8 @@ def uclibc_cfg(feature, features, tokens, cnf, rem):
# Map distro and machine features to config settings
def features_to_uclibc_settings(d):
cnf, rem = ([], [])
- distro_features = bb.data.getVar('DISTRO_FEATURES', d, True).split()
- machine_features = bb.data.getVar('MACHINE_FEATURES', d, True).split()
+ distro_features = d.getVar('DISTRO_FEATURES', True).split()
+ machine_features = d.getVar('MACHINE_FEATURES', True).split()
uclibc_cfg('ipv4', distro_features, 'UCLIBC_HAS_IPV4', cnf, rem)
uclibc_cfg('ipv6', distro_features, 'UCLIBC_HAS_IPV6', cnf, rem)
uclibc_cfg('largefile', distro_features, 'UCLIBC_HAS_LFS', cnf, rem)
diff --git a/meta/recipes-core/uclibc/uclibc.inc b/meta/recipes-core/uclibc/uclibc.inc
index 222c34f383c..8438f254505 100644
--- a/meta/recipes-core/uclibc/uclibc.inc
+++ b/meta/recipes-core/uclibc/uclibc.inc
@@ -125,9 +125,9 @@ configmangle = '/^KERNEL_HEADERS/d; \
/^SHARED_LIB_LOADER_PREFIX/d; \
/^UCLIBC_EXTRA_CFLAGS/d; \
s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \
- ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][bb.data.getVar("ARM_INSTRUCTION_SET", d, 1) != "arm"]} \
- ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][bb.data.getVar("USE_NLS", d, 1) == "yes"]} \
- ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][bb.data.getVar("TARGET_ARCH", d, 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \
+ ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][d.getVar("ARM_INSTRUCTION_SET", 1) != "arm"]} \
+ ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][d.getVar("USE_NLS", 1) == "yes"]} \
+ ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][d.getVar("TARGET_ARCH", 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \
/^CROSS/d; \
/^TARGET_ARCH=/d; \
/^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \
@@ -139,7 +139,7 @@ OE_FEATURES := "${@features_to_uclibc_conf(d)}"
OE_DEL := "${@features_to_uclibc_del(d)}"
python () {
if "${OE_DEL}":
- bb.data.setVar('configmangle_append', "${OE_DEL}" + "\n", d)
+ d.setVar('configmangle_append', "${OE_DEL}" + "\n")
if "${OE_FEATURES}":
bb.data.setVar('configmangle_append',
"/^### DISTRO FEATURES$/a\\\n%s\n\n" %
@@ -161,7 +161,7 @@ python () {
("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"),
d)
bb.data.setVar('configmangle_append',
- "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]]), d)
+ "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', 1) in [ 'soft' ]]), d)
if "${UCLIBC_ENDIAN}":
bb.data.setVar('configmangle_append',
"/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"),
diff --git a/meta/recipes-devtools/apt/apt-native.inc b/meta/recipes-devtools/apt/apt-native.inc
index b16f99e93c5..d4b207b9862 100644
--- a/meta/recipes-devtools/apt/apt-native.inc
+++ b/meta/recipes-devtools/apt/apt-native.inc
@@ -13,14 +13,14 @@ python do_install () {
}
python do_install_config () {
- indir = os.path.dirname(bb.data.getVar('FILE',d,1))
+ indir = os.path.dirname(d.getVar('FILE',1))
infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r')
data = infile.read()
infile.close()
data = bb.data.expand(data, d)
- outdir = os.path.join(bb.data.getVar('D', d, 1), bb.data.getVar('sysconfdir', d, 1), 'apt')
+ outdir = os.path.join(d.getVar('D', 1), d.getVar('sysconfdir', 1), 'apt')
if not os.path.exists(outdir):
os.makedirs(outdir)
outpath = os.path.join(outdir, 'apt.conf.sample')
diff --git a/meta/recipes-devtools/apt/apt-package.inc b/meta/recipes-devtools/apt/apt-package.inc
index 2e3be3885bb..dde916e3dac 100644
--- a/meta/recipes-devtools/apt/apt-package.inc
+++ b/meta/recipes-devtools/apt/apt-package.inc
@@ -59,15 +59,15 @@ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \
${localstatedir} ${sysconfdir} \
${libdir}/dpkg"
FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates"
-FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, bb.data.getVar('apt-manpages', d, 1))} \
+FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', 1))} \
${docdir}/apt"
-FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, bb.data.getVar('apt-utils-manpages', d, 1))}"
+FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))}"
FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}"
do_install () {
set -x
- ${@get_commands_apt_doc(d, bb, bb.data.getVar('apt-manpages', d, 1))}
- ${@get_commands_apt_doc(d, bb, bb.data.getVar('apt-utils-manpages', d, 1))}
+ ${@get_commands_apt_doc(d, bb, d.getVar('apt-manpages', 1))}
+ ${@get_commands_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))}
install -d ${D}${bindir}
install -m 0755 bin/apt-cdrom ${D}${bindir}/
install -m 0755 bin/apt-get ${D}${bindir}/
diff --git a/meta/recipes-devtools/automake/automake.inc b/meta/recipes-devtools/automake/automake.inc
index f217e1432b1..370a9725663 100644
--- a/meta/recipes-devtools/automake/automake.inc
+++ b/meta/recipes-devtools/automake/automake.inc
@@ -10,6 +10,6 @@ SRC_URI = "${GNU_MIRROR}/automake/automake-${PV}.tar.bz2 "
inherit autotools
-export AUTOMAKE = "${@bb.which('automake', bb.data.getVar('PATH', d, 1))}"
+export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', 1))}"
FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*"
diff --git a/meta/recipes-devtools/cmake/cmake.inc b/meta/recipes-devtools/cmake/cmake.inc
index ec37a101a38..8433c533b74 100644
--- a/meta/recipes-devtools/cmake/cmake.inc
+++ b/meta/recipes-devtools/cmake/cmake.inc
@@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://Copyright.txt;md5=f372516292ff7c33337bf16a74a5f9a8 \
INC_PR = "r1"
-CMAKE_MAJOR_VERSION = "${@'.'.join(bb.data.getVar('PV',d,1).split('.')[0:2])}"
+CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV',1).split('.')[0:2])}"
SRC_URI = "http://www.cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \
file://support-oe-qt4-tools-names.patch"
diff --git a/meta/recipes-devtools/cmake/cmake_2.8.5.bb b/meta/recipes-devtools/cmake/cmake_2.8.5.bb
index 8e040728c9a..3e2a218385a 100644
--- a/meta/recipes-devtools/cmake/cmake_2.8.5.bb
+++ b/meta/recipes-devtools/cmake/cmake_2.8.5.bb
@@ -13,8 +13,8 @@ SRC_URI[sha256sum] = "5e18bff75f01656c64f553412a8905527e1b85efaf3163c6fb81ea5aac
# Strip ${prefix} from ${docdir}, set result into docdir_stripped
python () {
- prefix=bb.data.getVar("prefix", d, 1)
- docdir=bb.data.getVar("docdir", d, 1)
+ prefix=d.getVar("prefix", 1)
+ docdir=d.getVar("docdir", 1)
if not docdir.startswith(prefix):
raise bb.build.FuncFailed('docdir must contain prefix as its prefix')
@@ -23,7 +23,7 @@ python () {
if len(docdir_stripped) > 0 and docdir_stripped[0] == '/':
docdir_stripped = docdir_stripped[1:]
- bb.data.setVar("docdir_stripped", docdir_stripped, d)
+ d.setVar("docdir_stripped", docdir_stripped)
}
EXTRA_OECMAKE=" \
diff --git a/meta/recipes-devtools/gcc/gcc-common.inc b/meta/recipes-devtools/gcc/gcc-common.inc
index f83f4da7980..69e0213e9f5 100644
--- a/meta/recipes-devtools/gcc/gcc-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-common.inc
@@ -7,17 +7,17 @@ NATIVEDEPS = ""
inherit autotools gettext
-FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/gcc-${PV}"
+FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/gcc-${PV}"
def get_gcc_fpu_setting(bb, d):
- if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]:
+ if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--with-float=soft"
- if bb.data.getVar('TARGET_FPU', d, 1) in [ 'ppc-efd' ]:
+ if d.getVar('TARGET_FPU', 1) in [ 'ppc-efd' ]:
return "--enable-e500_double"
return ""
def get_gcc_mips_plt_setting(bb, d):
- if bb.data.getVar('TARGET_ARCH', d, 1) in [ 'mips', 'mipsel' ] and 'mplt' in bb.data.getVar('DISTRO_FEATURES',d,1).split() :
+ if d.getVar('TARGET_ARCH', 1) in [ 'mips', 'mipsel' ] and 'mplt' in d.getVar('DISTRO_FEATURES',1).split() :
return "--with-mips-plt"
return ""
diff --git a/meta/recipes-devtools/gcc/gcc-configure-common.inc b/meta/recipes-devtools/gcc/gcc-configure-common.inc
index 2ddc3d7c9f2..ae23e8ee49c 100644
--- a/meta/recipes-devtools/gcc/gcc-configure-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-configure-common.inc
@@ -27,7 +27,7 @@ EXTRA_OECONF_INTERMEDIATE ?= ""
GCCMULTILIB = "--disable-multilib"
-EXTRA_OECONF = "${@['--enable-clocale=generic', ''][bb.data.getVar('USE_NLS', d, 1) != 'no']} \
+EXTRA_OECONF = "${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', 1) != 'no']} \
--with-gnu-ld \
--enable-shared \
--enable-languages=${LANGUAGES} \
diff --git a/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb b/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb
index de07f434b8f..f93d3421984 100644
--- a/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb
+++ b/meta/recipes-devtools/gnu-config/gnu-config_20080123.bb
@@ -8,7 +8,7 @@ DEPENDS_virtclass-native = "perl-native-runtime"
INHIBIT_DEFAULT_DEPS = "1"
-FIXEDSRCDATE = "${@bb.data.getVar('FILE', d, 1).split('_')[-1].split('.')[0]}"
+FIXEDSRCDATE = "${@d.getVar('FILE', 1).split('_')[-1].split('.')[0]}"
PV = "0.1+cvs${FIXEDSRCDATE}"
PR = "r4"
diff --git a/meta/recipes-devtools/intltool/intltool.inc b/meta/recipes-devtools/intltool/intltool.inc
index 23a1ee97748..8d983dfc6c9 100644
--- a/meta/recipes-devtools/intltool/intltool.inc
+++ b/meta/recipes-devtools/intltool/intltool.inc
@@ -2,7 +2,7 @@ DESCRIPTION = "Utility scripts for internationalizing XML"
SECTION = "devel"
LICENSE = "GPLv2"
-URLV="${@'.'.join(bb.data.getVar('PV',d,1).split('.')[0:2])}"
+URLV="${@'.'.join(d.getVar('PV',1).split('.')[0:2])}"
SRC_URI = "${GNOME_MIRROR}/intltool/${URLV}/intltool-${PV}.tar.bz2"
S = "${WORKDIR}/intltool-${PV}"
diff --git a/meta/recipes-devtools/opkg/opkg.inc b/meta/recipes-devtools/opkg/opkg.inc
index a6492139166..3e350e8197d 100644
--- a/meta/recipes-devtools/opkg/opkg.inc
+++ b/meta/recipes-devtools/opkg/opkg.inc
@@ -12,7 +12,7 @@ DEPENDS_virtclass-nativesdk = "curl-nativesdk"
PE = "1"
-FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/opkg"
+FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/opkg"
# Werror gives all kinds bounds issuses with gcc 4.3.3
do_configure_prepend() {
diff --git a/meta/recipes-devtools/perl/perl_5.14.2.bb b/meta/recipes-devtools/perl/perl_5.14.2.bb
index 788962638d3..e864f1e4366 100644
--- a/meta/recipes-devtools/perl/perl_5.14.2.bb
+++ b/meta/recipes-devtools/perl/perl_5.14.2.bb
@@ -285,7 +285,7 @@ FILES_perl-module-unicore += "${libdir}/perl/${PV}/unicore"
# packages (actually the non modules packages and not created too)
ALLOW_EMPTY_perl-modules = "1"
PACKAGES_append = " perl-modules "
-RRECOMMENDS_perl-modules = "${@bb.data.getVar('PACKAGES', d, 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
+RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
python populate_packages_prepend () {
libdir = bb.data.expand('${libdir}/perl/${PV}', d)
diff --git a/meta/recipes-devtools/python/python-pygobject_2.27.91.bb b/meta/recipes-devtools/python/python-pygobject_2.27.91.bb
index da4faec1cad..efc06b45c85 100644
--- a/meta/recipes-devtools/python/python-pygobject_2.27.91.bb
+++ b/meta/recipes-devtools/python/python-pygobject_2.27.91.bb
@@ -7,7 +7,7 @@ DEPENDS_virtclass-native = "glib-2.0-native"
RDEPENDS_virtclass-native = ""
PR = "r3"
-MAJ_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}"
+MAJ_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
SRC_URI = "${GNOME_MIRROR}/pygobject/${MAJ_VER}/pygobject-${PV}.tar.bz2"
diff --git a/meta/recipes-devtools/qemu/qemu-targets.inc b/meta/recipes-devtools/qemu/qemu-targets.inc
index 550a7fe1b5a..1970dda55ba 100644
--- a/meta/recipes-devtools/qemu/qemu-targets.inc
+++ b/meta/recipes-devtools/qemu/qemu-targets.inc
@@ -4,7 +4,7 @@
def get_qemu_target_list(d):
import bb
- archs = bb.data.getVar('QEMU_TARGETS', d, True).split()
+ archs = d.getVar('QEMU_TARGETS', True).split()
targets = ""
for arch in ['mips64', 'mips64el', 'ppcemb']:
if arch in archs:
diff --git a/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb b/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb
index 29c7052056b..eb80b54fc4d 100644
--- a/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb
+++ b/meta/recipes-devtools/unfs-server/unfs-server_2.1+2.2beta47.bb
@@ -70,7 +70,7 @@ do_configure_prepend () {
python __anonymous () {
import re
- pn = bb.data.getVar("PN", d, 1)
+ pn = d.getVar("PN", 1)
if not pn.endswith('-native') and not pn.endswith('-nativesdk'):
raise bb.parse.SkipPackage("unfs-server is intended for native/nativesdk builds only")
}
diff --git a/meta/recipes-extended/cups/cups14.inc b/meta/recipes-extended/cups/cups14.inc
index 48f493d6c73..8c01caf55e9 100644
--- a/meta/recipes-extended/cups/cups14.inc
+++ b/meta/recipes-extended/cups/cups14.inc
@@ -58,7 +58,7 @@ fakeroot do_install () {
python do_package_append() {
# Change permissions back the way they were, they probably had a reason...
- workdir = bb.data.getVar('WORKDIR', d, 1)
+ workdir = d.getVar('WORKDIR', 1)
os.system('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir)
}
diff --git a/meta/recipes-extended/lsof/lsof_4.85.bb b/meta/recipes-extended/lsof/lsof_4.85.bb
index 96bc0c54bc1..8f25e24a506 100644
--- a/meta/recipes-extended/lsof/lsof_4.85.bb
+++ b/meta/recipes-extended/lsof/lsof_4.85.bb
@@ -17,10 +17,10 @@ LIC_FILES_CHKSUM = "file://${S}/00README;beginline=645;endline=679;md5=e0108f781
python do_unpack () {
bb.build.exec_func('base_do_unpack', d)
- src_uri = bb.data.getVar('SRC_URI', d)
- bb.data.setVar('SRC_URI', '${LOCALSRC}', d)
+ src_uri = d.getVar('SRC_URI')
+ d.setVar('SRC_URI', '${LOCALSRC}')
bb.build.exec_func('base_do_unpack', d)
- bb.data.setVar('SRC_URI', src_uri, d)
+ d.setVar('SRC_URI', src_uri)
}
export LSOF_OS = "${TARGET_OS}"
diff --git a/meta/recipes-extended/pam/libpam_1.1.4.bb b/meta/recipes-extended/pam/libpam_1.1.4.bb
index 868bffd9607..afe17cc932b 100644
--- a/meta/recipes-extended/pam/libpam_1.1.4.bb
+++ b/meta/recipes-extended/pam/libpam_1.1.4.bb
@@ -54,10 +54,10 @@ python populate_packages_prepend () {
def pam_plugin_append_file(pn, dir, file):
nf = os.path.join(dir, file)
- of = bb.data.getVar('FILES_' + pn, d, True)
+ of = d.getVar('FILES_' + pn, True)
if of:
nf = of + " " + nf
- bb.data.setVar('FILES_' + pn, nf, d)
+ d.setVar('FILES_' + pn, nf)
dvar = bb.data.expand('${WORKDIR}/package', d, True)
pam_libdir = bb.data.expand('${base_libdir}/security', d)
diff --git a/meta/recipes-extended/zip/zip.inc b/meta/recipes-extended/zip/zip.inc
index 9550447264b..8cce146b18d 100644
--- a/meta/recipes-extended/zip/zip.inc
+++ b/meta/recipes-extended/zip/zip.inc
@@ -5,7 +5,7 @@ SECTION = "console/utils"
LICENSE = "Info-ZIP"
LIC_FILES_CHKSUM = "file://LICENSE;md5=04d43c5d70b496c032308106e26ae17d"
-SRC_URI = "ftp://ftp.info-zip.org/pub/infozip/src/zip${@bb.data.getVar('PV',d,1).replace('.', '')}.tgz"
+SRC_URI = "ftp://ftp.info-zip.org/pub/infozip/src/zip${@d.getVar('PV',1).replace('.', '')}.tgz"
EXTRA_OEMAKE = "'CC=${CC}' 'BIND=${CC}' 'AS=${CC} -c' 'CPP=${CPP}' \
'CFLAGS=-I. -DUNIX ${CFLAGS}' 'INSTALL=install' \
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb
index 9fabbe2c46a..b33eca96825 100644
--- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb
+++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.0.bb
@@ -56,7 +56,7 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-*"
PACKAGES_DYNAMIC_virtclass-native = ""
python populate_packages_prepend () {
- postinst_pixbufloader = bb.data.getVar("postinst_pixbufloader", d, 1)
+ postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1)
loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d)
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb b/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb
index 3b4acd9ce91..c6077ecb3b5 100644
--- a/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb
+++ b/meta/recipes-gnome/gtk+/gtk+_2.12.7.bb
@@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () {
import os.path
- prologue = bb.data.getVar("postinst_prologue", d, 1)
- postinst_pixbufloader = bb.data.getVar("postinst_pixbufloader", d, 1)
+ prologue = d.getVar("postinst_prologue", 1)
+ postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1)
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
loaders_root = os.path.join(gtk_libdir, 'loaders')
@@ -46,6 +46,6 @@ python populate_packages_prepend () {
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
- if (bb.data.getVar('DEBIAN_NAMES', d, 1)):
- bb.data.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0', d)
+ if (d.getVar('DEBIAN_NAMES', 1)):
+ d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
}
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb b/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb
index 371eda38145..5fcb576bbe4 100644
--- a/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb
+++ b/meta/recipes-gnome/gtk+/gtk+_2.16.6.bb
@@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () {
import os.path
- prologue = bb.data.getVar("postinst_prologue", d, 1)
- postinst_pixbufloader = bb.data.getVar("postinst_pixbufloader", d, 1)
+ prologue = d.getVar("postinst_prologue", 1)
+ postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1)
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
loaders_root = os.path.join(gtk_libdir, 'loaders')
@@ -46,6 +46,6 @@ python populate_packages_prepend () {
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
- if (bb.data.getVar('DEBIAN_NAMES', d, 1)):
- bb.data.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0', d)
+ if (d.getVar('DEBIAN_NAMES', 1)):
+ d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
}
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb b/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb
index 01f4a11bcad..cd5c8cb1161 100644
--- a/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb
+++ b/meta/recipes-gnome/gtk+/gtk+_2.24.6.bb
@@ -40,7 +40,7 @@ PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () {
import os.path
- prologue = bb.data.getVar("postinst_prologue", d, 1)
+ prologue = d.getVar("postinst_prologue", 1)
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
immodules_root = os.path.join(gtk_libdir, 'immodules')
@@ -49,6 +49,6 @@ python populate_packages_prepend () {
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
- if (bb.data.getVar('DEBIAN_NAMES', d, 1)):
- bb.data.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0', d)
+ if (d.getVar('DEBIAN_NAMES', 1)):
+ d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
}
diff --git a/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb b/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb
index bbf52be266e..233a6ee7736 100644
--- a/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb
+++ b/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb
@@ -31,8 +31,8 @@ inherit gnome
python populate_packages_prepend() {
import os.path
- engines_root = os.path.join(bb.data.getVar('libdir', d, 1), "gtk-2.0/2.10.0/engines")
- themes_root = os.path.join(bb.data.getVar('datadir', d, 1), "themes")
+ engines_root = os.path.join(d.getVar('libdir', 1), "gtk-2.0/2.10.0/engines")
+ themes_root = os.path.join(d.getVar('datadir', 1), "themes")
do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='')
do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='')
diff --git a/meta/recipes-graphics/cairo/cairo-fpu.inc b/meta/recipes-graphics/cairo/cairo-fpu.inc
index bdaf789799f..8c0ecfde941 100644
--- a/meta/recipes-graphics/cairo/cairo-fpu.inc
+++ b/meta/recipes-graphics/cairo/cairo-fpu.inc
@@ -1,6 +1,6 @@
def get_cairo_fpu_setting(bb, d):
- if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]:
+ if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--disable-some-floating-point"
return ""
diff --git a/meta/recipes-graphics/clutter/clutter-fpu.inc b/meta/recipes-graphics/clutter/clutter-fpu.inc
index 21a16feac21..dfa933de5c7 100644
--- a/meta/recipes-graphics/clutter/clutter-fpu.inc
+++ b/meta/recipes-graphics/clutter/clutter-fpu.inc
@@ -1,6 +1,6 @@
def get_clutter_fpu_setting(bb, d):
- if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]:
+ if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--without-fpu"
return ""
diff --git a/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb b/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb
index 03a1bc86c22..0e213909e35 100644
--- a/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb
+++ b/meta/recipes-graphics/fontconfig/fontconfig-native_2.8.0.bb
@@ -3,7 +3,7 @@ inherit native
DEPENDS = "freetype-native expat-native zlib-native"
EXTRA_OEMAKE = ""
-EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % bb.data.getVar('STAGING_BINDIR', d, 1)][os.path.isfile('%s/freetype-config' % bb.data.getVar('STAGING_BINDIR', d, 1))]}"
+EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % d.getVar('STAGING_BINDIR', 1)][os.path.isfile('%s/freetype-config' % d.getVar('STAGING_BINDIR', 1))]}"
do_install_append () {
install -d ${D}${bindir}/
diff --git a/meta/recipes-graphics/mesa/mesa-dri.inc b/meta/recipes-graphics/mesa/mesa-dri.inc
index fcce25996d5..3687648999b 100644
--- a/meta/recipes-graphics/mesa/mesa-dri.inc
+++ b/meta/recipes-graphics/mesa/mesa-dri.inc
@@ -13,7 +13,7 @@ EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gal
python populate_packages_prepend() {
import os.path
- dri_drivers_root = os.path.join(bb.data.getVar('libdir', d, 1), "dri")
+ dri_drivers_root = os.path.join(d.getVar('libdir', 1), "dri")
do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='')
}
diff --git a/meta/recipes-graphics/pango/pango.inc b/meta/recipes-graphics/pango/pango.inc
index a8e99e2f6dd..6d94e028782 100644
--- a/meta/recipes-graphics/pango/pango.inc
+++ b/meta/recipes-graphics/pango/pango.inc
@@ -39,7 +39,7 @@ fi
}
python populate_packages_prepend () {
- prologue = bb.data.getVar("postinst_prologue", d, 1)
+ prologue = d.getVar("postinst_prologue", 1)
modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d)
diff --git a/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb b/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb
index 0fb10e6af04..0f9a1b3c9b5 100644
--- a/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb
+++ b/meta/recipes-graphics/xorg-lib/libxft_2.2.0.bb
@@ -26,8 +26,8 @@ XORG_PN = "libXft"
BBCLASSEXTEND = "native nativesdk"
python () {
- if bb.data.getVar('DEBIAN_NAMES', d, 1):
- bb.data.setVar('PKG_${PN}', '${MLPREFIX}libxft2', d)
+ if d.getVar('DEBIAN_NAMES', 1):
+ d.setVar('PKG_${PN}', '${MLPREFIX}libxft2')
}
FILES_${PN} = "${libdir}/lib*${SOLIBS}"
diff --git a/meta/recipes-kernel/linux/linux-dtb.inc b/meta/recipes-kernel/linux/linux-dtb.inc
index eb894562b39..7ec75848dd3 100644
--- a/meta/recipes-kernel/linux/linux-dtb.inc
+++ b/meta/recipes-kernel/linux/linux-dtb.inc
@@ -5,12 +5,12 @@ KERNEL_DEVICETREE_FLAGS = "-R 8 -p 0x3000"
python __anonymous () {
import bb
- devicetree = bb.data.getVar("KERNEL_DEVICETREE", d, 1) or ''
+ devicetree = d.getVar("KERNEL_DEVICETREE", 1) or ''
if devicetree:
- depends = bb.data.getVar("DEPENDS", d, 1)
- bb.data.setVar("DEPENDS", "%s dtc-native" % depends, d)
- packages = bb.data.getVar("PACKAGES", d, 1)
- bb.data.setVar("PACKAGES", "%s kernel-devicetree" % packages, d)
+ depends = d.getVar("DEPENDS", 1)
+ d.setVar("DEPENDS", "%s dtc-native" % depends)
+ packages = d.getVar("PACKAGES", 1)
+ d.setVar("PACKAGES", "%s kernel-devicetree" % packages)
}
do_install_append() {
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb b/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb
index d92a902a9a7..26423255c72 100644
--- a/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-rt_2.6.34.bb
@@ -27,7 +27,7 @@ python __anonymous () {
import bb, re, string
kerntype = string.replace(bb.data.expand("${LINUX_KERNEL_TYPE}", d), "_", "-")
- bb.data.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype, d)
+ d.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype)
}
SRC_URI = "git://git.yoctoproject.org/linux-yocto-2.6.34.git;protocol=git;nocheckout=1;branch=${KBRANCH},wrs_meta;name=machine,meta"
diff --git a/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb b/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb
index 4891463c03f..cbcfa56fec3 100644
--- a/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb
+++ b/meta/recipes-kernel/linux/linux-yocto_2.6.34.bb
@@ -33,7 +33,7 @@ python __anonymous () {
import bb, re, string
kerntype = string.replace(bb.data.expand("${LINUX_KERNEL_TYPE}", d), "_", "-")
- bb.data.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype, d)
+ d.setVar("LINUX_KERNEL_TYPE_EXTENSION", kerntype)
}
SRC_URI = "git://git.yoctoproject.org/linux-yocto-2.6.34.git;protocol=git;nocheckout=1;branch=${KBRANCH},wrs_meta;name=machine,meta"
diff --git a/meta/recipes-multimedia/alsa/alsa-fpu.inc b/meta/recipes-multimedia/alsa/alsa-fpu.inc
index ebd3493e6ca..2a0c6b0194a 100644
--- a/meta/recipes-multimedia/alsa/alsa-fpu.inc
+++ b/meta/recipes-multimedia/alsa/alsa-fpu.inc
@@ -1,6 +1,6 @@
def get_alsa_fpu_setting(bb, d):
- if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]:
+ if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--with-softfloat"
return ""
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
index 796df14a9e0..7949058b136 100644
--- a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
+++ b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
@@ -2,25 +2,25 @@ LIBV = "0.10"
python populate_packages_prepend () {
gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d)
- postinst = bb.data.getVar('plugin_postinst', d, 1)
+ postinst = d.getVar('plugin_postinst', 1)
glibdir = bb.data.expand('${libdir}', d)
do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d))
do_split_packages(d, gst_libdir, 'libgst(.*)\.l?a$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d))
- pn = bb.data.getVar('PN', d, 1)
+ pn = d.getVar('PN', 1)
metapkg = pn + '-meta'
- bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d)
- bb.data.setVar('FILES_' + metapkg, "", d)
+ d.setVar('ALLOW_EMPTY_' + metapkg, "1")
+ d.setVar('FILES_' + metapkg, "")
blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ]
metapkg_rdepends = []
- packages = bb.data.getVar('PACKAGES', d, 1).split()
+ packages = d.getVar('PACKAGES', 1).split()
for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'):
metapkg_rdepends.append(pkg)
bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d)
- bb.data.setVar('DESCRIPTION_' + metapkg, pn + ' meta package', d)
+ d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
}
ALLOW_EMPTY = "1"
diff --git a/meta/recipes-multimedia/pulseaudio/pulseaudio.inc b/meta/recipes-multimedia/pulseaudio/pulseaudio.inc
index c581eae4c64..747b650e7ba 100644
--- a/meta/recipes-multimedia/pulseaudio/pulseaudio.inc
+++ b/meta/recipes-multimedia/pulseaudio/pulseaudio.inc
@@ -103,7 +103,7 @@ pkg_postrm_${PN}-server() {
}
python populate_packages_prepend() {
- #bb.data.setVar('PKG_pulseaudio', 'pulseaudio', d)
+ #d.setVar('PKG_pulseaudio', 'pulseaudio')
plugindir = bb.data.expand('${libdir}/pulse-${PV}/modules/', d)
do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' )
diff --git a/meta/recipes-qt/qt4/qt4.inc b/meta/recipes-qt/qt4/qt4.inc
index 1406a874ece..82ba6377a0f 100644
--- a/meta/recipes-qt/qt4/qt4.inc
+++ b/meta/recipes-qt/qt4/qt4.inc
@@ -39,8 +39,8 @@ python __anonymous () {
lib_packages = []
dev_packages = []
dbg_packages = []
- for name in bb.data.getVar("QT_LIB_NAMES", d, 1).split():
- pkg = bb.data.getVar("QT_BASE_LIB",d, True) + name.lower().replace("qt", "").replace("_", "-") + "4"
+ for name in d.getVar("QT_LIB_NAMES", 1).split():
+ pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
# NOTE: the headers for QtAssistantClient are different
incname = name.replace("QtAssistantClient", "QtAssistant")
bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals(), d)
@@ -51,15 +51,15 @@ python __anonymous () {
${includedir}/${QT_DIR_NAME}/%(incname)s
${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals(), d)
bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals(), d)
- bb.data.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg", d)
+ d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg")
lib_packages.append(pkg)
dev_packages.append("%s-dev" % pkg)
dbg_packages.append("%s-dbg" % pkg)
- for name in bb.data.getVar("OTHER_PACKAGES", d, 1).split():
+ for name in d.getVar("OTHER_PACKAGES", 1).split():
dbg_packages.append("%s-dbg" % name)
- for name in bb.data.getVar("QT_EXTRA_LIBS", d, 1).split():
- pkg = bb.data.getVar("QT_BASE_LIB",d, True) + name.lower().replace("qt", "").replace("_", "-") + "4"
+ for name in d.getVar("QT_EXTRA_LIBS", 1).split():
+ pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals(), d)
bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl
${libdir}/lib%(name)s.a
@@ -68,7 +68,7 @@ python __anonymous () {
${includedir}/${QT_DIR_NAME}/%(incname)s
${libdir}/pkgconfig/%(name)s.pc""" % locals(), d)
bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals(), d)
- bb.data.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg", d)
+ d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg")
lib_packages.append(pkg)
dev_packages.append("%s-dev" % pkg)
dbg_packages.append("%s-dbg" % pkg)
@@ -256,14 +256,14 @@ python populate_packages_prepend() {
do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook)
# Create a -dbg package as well
plugin_dir_dbg = bb.data.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path, d)
- packages = bb.data.getVar('PACKAGES',d)
+ packages = d.getVar('PACKAGES')
for (file,package) in dev_packages:
packages = "%s %s-dbg" % (packages, package)
file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
- bb.data.setVar("FILES_%s-dbg" % package, file_name, d)
+ d.setVar("FILES_%s-dbg" % package, file_name)
bb.data.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package), d)
- bb.data.setVar('PACKAGES', packages, d)
+ d.setVar('PACKAGES', packages)
qtopia_split('accessible', 'accessible', '^libq(.*)\.so$')
qtopia_split('codecs', 'codec', '^libq(.*)\.so$')
diff --git a/meta/recipes-qt/qt4/qt4_arch.inc b/meta/recipes-qt/qt4/qt4_arch.inc
index e8c8fabe686..46d65a20d7b 100644
--- a/meta/recipes-qt/qt4/qt4_arch.inc
+++ b/meta/recipes-qt/qt4/qt4_arch.inc
@@ -4,7 +4,7 @@ ARM_INSTRUCTION_SET = "arm"
def qt_arch(d):
import bb, re
- arch = bb.data.getVar('TARGET_ARCH', d, 1)
+ arch = d.getVar('TARGET_ARCH', 1)
if re.match("^i.86$", arch):
arch = "i386"
elif re.match("^arm.*", arch):
@@ -17,9 +17,9 @@ def qt_arch(d):
def qt_endian(d):
import bb
- if bb.data.getVar('SITEINFO_ENDIANNESS', d, True) == "le":
+ if d.getVar('SITEINFO_ENDIANNESS', True) == "le":
return "-little-endian"
- elif bb.data.getVar('SITEINFO_ENDIANNESS', d, True) == "be":
+ elif d.getVar('SITEINFO_ENDIANNESS', True) == "be":
return "-big-endian"
else:
assert False
diff --git a/meta/recipes-sato/puzzles/oh-puzzles_git.bb b/meta/recipes-sato/puzzles/oh-puzzles_git.bb
index 6d95d79e8cb..c2e76222452 100644
--- a/meta/recipes-sato/puzzles/oh-puzzles_git.bb
+++ b/meta/recipes-sato/puzzles/oh-puzzles_git.bb
@@ -61,10 +61,10 @@ FILES_${PN}-extra = "/usr/games/ /usr/share/applications /etc/gconf/schemas"
python __anonymous () {
import bb
var = bb.data.expand("FILES_${PN}", d, 1)
- data = bb.data.getVar(var, d, 1)
+ data = d.getVar(var, 1)
for name in ("bridges", "fifteen", "inertia", "map", "samegame", "slant"):
data = data + " /usr/games/%s" % name
data = data + " /usr/share/applications/%s.desktop" % name
data = data + " /etc/gconf/schemas/%s.schemas" % name
- bb.data.setVar(var, data, d)
+ d.setVar(var, data)
}
diff --git a/meta/recipes-sato/puzzles/puzzles_r9306.bb b/meta/recipes-sato/puzzles/puzzles_r9306.bb
index ee0c02537a5..c3849309ad2 100644
--- a/meta/recipes-sato/puzzles/puzzles_r9306.bb
+++ b/meta/recipes-sato/puzzles/puzzles_r9306.bb
@@ -3,7 +3,7 @@ HOMEPAGE="http://www.chiark.greenend.org.uk/~sgtatham/puzzles/"
DEPENDS = "gtk+ libxt"
PR = "r0"
-MOD_PV = "${@bb.data.getVar('PV',d,1)[1:]}"
+MOD_PV = "${@d.getVar('PV',1)[1:]}"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://LICENCE;md5=9928b60f3b78be315b7ab699c1b03ff5"
diff --git a/meta/recipes-support/attr/ea-acl.inc b/meta/recipes-support/attr/ea-acl.inc
index ce98a95cdb2..1fda792b77a 100644
--- a/meta/recipes-support/attr/ea-acl.inc
+++ b/meta/recipes-support/attr/ea-acl.inc
@@ -34,8 +34,8 @@ FILES_lib${BPN}-doc = "${mandir}/man2 \
BBCLASSEXTEND = "native"
# Only append ldflags for target recipe and if USE_NLS is enabled
-LDFLAGS_append_libc-uclibc = "${@['', ' -lintl '][(bb.data.getVar('PN', d, True) == bb.data.getVar('BPN', d , True)) and (bb.data.getVar('USE_NLS', d, True) == 'yes')]}"
-EXTRA_OECONF_append_libc-uclibc = "${@['', ' --disable-gettext '][(bb.data.getVar('PN', d, True) == bb.data.getVar('BPN', d , True)) and (bb.data.getVar('USE_NLS', d, True) == 'no')]}"
+LDFLAGS_append_libc-uclibc = "${@['', ' -lintl '][(d.getVar('PN', True) == d.getVar('BPN', True)) and (d.getVar('USE_NLS', True) == 'yes')]}"
+EXTRA_OECONF_append_libc-uclibc = "${@['', ' --disable-gettext '][(d.getVar('PN', True) == d.getVar('BPN', True)) and (d.getVar('USE_NLS', True) == 'no')]}"
fix_symlink () {
if test "${libdir}" = "${base_libdir}" ; then
diff --git a/meta/recipes-support/boost/boost-36.inc b/meta/recipes-support/boost/boost-36.inc
index bb267d791c1..8b0622f6ba6 100644
--- a/meta/recipes-support/boost/boost-36.inc
+++ b/meta/recipes-support/boost/boost-36.inc
@@ -11,8 +11,8 @@ LICENSE = "Boost"
PR = "r4"
ARM_INSTRUCTION_SET = "arm"
-BOOST_VER = "${@"_".join(bb.data.getVar("PV",d,1).split("."))}"
-BOOST_MAJ = "${@"_".join(bb.data.getVar("PV",d,1).split(".")[0:2])}"
+BOOST_VER = "${@"_".join(d.getVar("PV",1).split("."))}"
+BOOST_MAJ = "${@"_".join(d.getVar("PV",1).split(".")[0:2])}"
BOOST_P = "boost_${BOOST_VER}"
BOOST_LIBS = "\
@@ -46,12 +46,12 @@ python __anonymous () {
packages = []
extras = []
- for lib in bb.data.getVar('BOOST_LIBS', d, 1).split( ):
+ for lib in d.getVar('BOOST_LIBS', 1).split( ):
pkg = "boost-%s" % lib.replace("_", "-")
extras.append("--with-%s" % lib)
packages.append(pkg)
- if not bb.data.getVar("FILES_%s" % pkg, d, 1):
- bb.data.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib, d)
+ if not d.getVar("FILES_%s" % pkg, 1):
+ d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib)
bb.data.setVar("BOOST_PACKAGES", " ".join(packages), d)
bb.data.setVar("BJAM_EXTRA", " ".join(extras), d)
}
diff --git a/scripts/jhbuild/jhbuild2oe.py b/scripts/jhbuild/jhbuild2oe.py
index ef292763de7..9b31cafb699 100755
--- a/scripts/jhbuild/jhbuild2oe.py
+++ b/scripts/jhbuild/jhbuild2oe.py
@@ -161,9 +161,9 @@ class Handlers(object):
# create the package
d = bb.data.init()
pn = self.packagename(element.attrib.get('id'))
- bb.data.setVar('PN', pn, d)
+ d.setVar('PN', pn)
bb.data.setVar('DEPENDS', ' '.join(deps), d)
- bb.data.setVar('_handler', 'metamodule', d)
+ d.setVar('_handler', 'metamodule')
self.packages.append(d)
def autotools(self, element, parent):
@@ -181,23 +181,23 @@ class Handlers(object):
if id is None:
raise Exception('Error: autotools element has no id attribute.')
pn = self.packagename(id)
- bb.data.setVar('PN', pn, d)
+ d.setVar('PN', pn)
if deps is not None:
bb.data.setVar('DEPENDS', ' '.join(deps), d)
if branch is not None:
# <branch repo="git.freedesktop.org" module="xorg/xserver"/>
repo = os.path.join(self.repositories[branch.attrib.get('repo')], branch.attrib.get('module'))
- bb.data.setVar('SRC_URI', repo, d)
+ d.setVar('SRC_URI', repo)
checkoutdir = branch.attrib.get('checkoutdir')
if checkoutdir is not None:
bb.data.setVar('S', os.path.join('${WORKDIR}', checkoutdir), d)
# build class
- bb.data.setVar('INHERITS', 'autotools', d)
- bb.data.setVarFlag('INHERITS', 'operator', '+=', d)
- bb.data.setVar('_handler', 'autotools', d)
+ d.setVar('INHERITS', 'autotools')
+ d.setVarFlag('INHERITS', 'operator', '+=')
+ d.setVar('_handler', 'autotools')
self.packages.append(d)
class Emitter(object):
@@ -209,7 +209,7 @@ class Emitter(object):
def __init__(self, filefunc = None, basedir = None):
def _defaultfilefunc(package):
# return a relative path to the bitbake .bb which will be written
- return bb.data.getVar('PN', package, 1) + '.bb'
+ return package.getVar('PN', 1) + '.bb'
self.filefunc = filefunc or _defaultfilefunc
self.basedir = basedir or os.path.abspath(os.curdir)
@@ -226,16 +226,16 @@ class Emitter(object):
f.close()
for key in bb.data.keys(package):
- fdata = fdata.replace('@@'+key+'@@', bb.data.getVar(key, package))
+ fdata = fdata.replace('@@'+key+'@@', package.getVar(key))
else:
for key in bb.data.keys(package):
if key == '_handler':
continue
elif key == 'INHERITS':
- fdata += 'inherit %s\n' % bb.data.getVar('INHERITS', package)
+ fdata += 'inherit %s\n' % package.getVar('INHERITS')
else:
- oper = bb.data.getVarFlag(key, 'operator', package) or '='
- fdata += '%s %s "%s"\n' % (key, oper, bb.data.getVar(key, package))
+ oper = package.getVarFlag(key, 'operator') or '='
+ fdata += '%s %s "%s"\n' % (key, oper, package.getVar(key))
if not os.path.exists(os.path.join(self.basedir, os.path.dirname(self.filefunc(package)))):
os.makedirs(os.path.join(self.basedir, os.path.dirname(self.filefunc(package))))
@@ -254,8 +254,8 @@ def _test():
def filefunc(package):
# return a relative path to the bitbake .bb which will be written
- src_uri = bb.data.getVar('SRC_URI', package, 1)
- filename = bb.data.getVar('PN', package, 1) + '.bb'
+ src_uri = package.getVar('SRC_URI', 1)
+ filename = package.getVar('PN', 1) + '.bb'
if not src_uri:
return filename
else: