aboutsummaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/autotools.bbclass128
-rw-r--r--meta/classes/base.bbclass2
-rw-r--r--meta/classes/ccache.bbclass3
-rw-r--r--meta/classes/cross-canadian.bbclass5
-rw-r--r--meta/classes/cross.bbclass2
-rw-r--r--meta/classes/crosssdk.bbclass1
-rw-r--r--meta/classes/gobject-introspection.bbclass2
-rw-r--r--meta/classes/image.bbclass6
-rw-r--r--meta/classes/insane.bbclass3
-rw-r--r--meta/classes/module.bbclass2
-rw-r--r--meta/classes/native.bbclass2
-rw-r--r--meta/classes/nativesdk.bbclass4
-rw-r--r--meta/classes/package.bbclass3
-rw-r--r--meta/classes/pixbufcache.bbclass21
-rw-r--r--meta/classes/populate_sdk_ext.bbclass2
-rw-r--r--meta/classes/siteconfig.bbclass2
-rw-r--r--meta/classes/sstate.bbclass126
-rw-r--r--meta/classes/staging.bbclass349
-rw-r--r--meta/classes/toolchain-scripts.bbclass1
-rw-r--r--meta/classes/uninative.bbclass3
-rw-r--r--meta/classes/useradd.bbclass34
21 files changed, 493 insertions, 208 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index c43531b050..3741d09706 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -131,129 +131,15 @@ EXTRACONFFUNCS ??= ""
EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}"
-do_configure[prefuncs] += "autotools_preconfigure autotools_copy_aclocals ${EXTRACONFFUNCS}"
+do_configure[prefuncs] += "autotools_preconfigure autotools_aclocals ${EXTRACONFFUNCS}"
do_configure[postfuncs] += "autotools_postconfigure"
-ACLOCALDIR = "${WORKDIR}/aclocal-copy"
-
-python autotools_copy_aclocals () {
- import copy
-
- s = d.getVar("AUTOTOOLS_SCRIPT_PATH")
- if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"):
- if not d.getVar("AUTOTOOLS_COPYACLOCAL", False):
- return
-
- taskdepdata = d.getVar("BB_TASKDEPDATA", False)
- #bb.warn(str(taskdepdata))
- pn = d.getVar("PN")
- aclocaldir = d.getVar("ACLOCALDIR")
- oe.path.remove(aclocaldir)
- bb.utils.mkdirhier(aclocaldir)
- start = None
- configuredeps = []
- # Detect bitbake -b usage
- # Everything but quilt-native would have dependencies
- nodeps = (pn != "quilt-native")
-
- for dep in taskdepdata:
- data = taskdepdata[dep]
- if data[1] == "do_configure" and data[0] == pn:
- start = dep
- if not nodeps and start:
- break
- if nodeps and data[0] != pn:
- nodeps = False
- if start is None:
- bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
-
- # We need to figure out which m4 files we need to expose to this do_configure task.
- # This needs to match what would get restored from sstate, which is controlled
- # ultimately by calls from bitbake to setscene_depvalid().
- # That function expects a setscene dependency tree. We build a dependency tree
- # condensed to do_populate_sysroot -> do_populate_sysroot dependencies, similar to
- # that used by setscene tasks. We can then call into setscene_depvalid() and decide
- # which dependencies we can "see" and should expose the m4 files for.
- setscenedeps = copy.deepcopy(taskdepdata)
-
- start = set([start])
-
- # Create collapsed do_populate_sysroot -> do_populate_sysroot tree
- for dep in taskdepdata:
- data = setscenedeps[dep]
- if data[1] != "do_populate_sysroot":
- for dep2 in setscenedeps:
- data2 = setscenedeps[dep2]
- if dep in data2[3]:
- data2[3].update(setscenedeps[dep][3])
- data2[3].remove(dep)
- if dep in start:
- start.update(setscenedeps[dep][3])
- start.remove(dep)
- del setscenedeps[dep]
-
- # Remove circular references
- for dep in setscenedeps:
- if dep in setscenedeps[dep][3]:
- setscenedeps[dep][3].remove(dep)
-
- # Direct dependencies should be present and can be depended upon
- for dep in start:
- configuredeps.append(setscenedeps[dep][0])
-
- # Call into setscene_depvalid for each sub-dependency and only copy m4 files
- # for ones that would be restored from sstate.
- done = list(start)
- next = list(start)
- while next:
- new = []
- for dep in next:
- data = setscenedeps[dep]
- for datadep in data[3]:
- if datadep in done:
- continue
- taskdeps = {}
- taskdeps[dep] = setscenedeps[dep][:2]
- taskdeps[datadep] = setscenedeps[datadep][:2]
- retval = setscene_depvalid(datadep, taskdeps, [], d)
- if retval:
- bb.note("Skipping setscene dependency %s for m4 macro copying" % datadep)
- continue
- done.append(datadep)
- new.append(datadep)
- configuredeps.append(setscenedeps[datadep][0])
- next = new
-
- cp = []
- if nodeps:
- bb.warn("autotools: Unable to find task dependencies, -b being used? Pulling in all m4 files")
- for l in [d.expand("${STAGING_DATADIR_NATIVE}/aclocal/"), d.expand("${STAGING_DATADIR}/aclocal/")]:
- cp.extend(os.path.join(l, f) for f in os.listdir(l))
-
- for c in configuredeps:
- if c.endswith("-native"):
- manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}-%s.populate_sysroot" % c)
- elif c.startswith("nativesdk-"):
- manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c)
- elif "-cross-" in c or "-crosssdk" in c:
- continue
- else:
- manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${MACHINE}-%s.populate_sysroot" % c)
- try:
- f = open(manifest, "r")
- for l in f:
- if "/aclocal/" in l and l.strip().endswith(".m4"):
- cp.append(l.strip())
- elif "config_site.d/" in l:
- cp.append(l.strip())
- except:
- bb.warn("%s not found" % manifest)
-
- for c in cp:
- t = os.path.join(aclocaldir, os.path.basename(c))
- if not os.path.exists(t):
- os.symlink(c, t)
+ACLOCALDIR = "${STAGING_DATADIR}/aclocal"
+ACLOCALEXTRAPATH = ""
+ACLOCALEXTRAPATH_class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/"
+ACLOCALEXTRAPATH_class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/"
+python autotools_aclocals () {
# Refresh variable with cache files
d.setVar("CONFIG_SITE", siteinfo_get_files(d, aclocalcache=True))
}
@@ -279,6 +165,7 @@ autotools_do_configure() {
if [ -e ${AUTOTOOLS_SCRIPT_PATH}/configure.in -o -e ${AUTOTOOLS_SCRIPT_PATH}/configure.ac ]; then
olddir=`pwd`
cd ${AUTOTOOLS_SCRIPT_PATH}
+ mkdir -p ${ACLOCALDIR}
ACLOCAL="aclocal --system-acdir=${ACLOCALDIR}/"
if [ x"${acpaths}" = xdefault ]; then
acpaths=
@@ -289,6 +176,7 @@ autotools_do_configure() {
else
acpaths="${acpaths}"
fi
+ acpaths="$acpaths ${ACLOCALEXTRAPATH}"
AUTOV=`automake --version | sed -e '1{s/.* //;s/\.[0-9]\+$//};q'`
automake --version
echo "AUTOV is $AUTOV"
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 2765ebf61b..965518c733 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -284,7 +284,7 @@ CLEANBROKEN = "0"
addtask configure after do_patch
do_configure[dirs] = "${B}"
-do_configure[deptask] = "do_populate_sysroot"
+do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
base_do_configure() {
if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass
index 93fcacaf1a..76c8828619 100644
--- a/meta/classes/ccache.bbclass
+++ b/meta/classes/ccache.bbclass
@@ -2,5 +2,8 @@ CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}"
export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}"
CCACHE_DISABLE[unexport] = "1"
+DEPENDS_append_class-target = " ccache-native"
+DEPENDS[vardepvalueexclude] = " ccache-native"
+
do_configure[dirs] =+ "${CCACHE_DIR}"
do_kernel_configme[dirs] =+ "${CCACHE_DIR}"
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass
index 64db1134f5..3120f46a14 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes/cross-canadian.bbclass
@@ -100,9 +100,9 @@ MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}"
INHIBIT_DEFAULT_DEPS = "1"
-STAGING_DIR_HOST = "${STAGING_DIR}/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}"
+STAGING_DIR_HOST = "${RECIPE_SYSROOT}"
-TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR}/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}"
+TOOLCHAIN_OPTIONS = " --sysroot=${RECIPE_SYSROOT}"
PATH_append = ":${TMPDIR}/sysroots/${HOST_ARCH}/${bindir_cross}"
PKGHIST_DIR = "${TMPDIR}/pkghistory/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}/"
@@ -169,6 +169,7 @@ USE_NLS = "${SDKUSE_NLS}"
# and not any particular tune that is enabled.
TARGET_ARCH[vardepsexclude] = "TUNE_ARCH"
+PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}"
# If MLPREFIX is set by multilib code, shlibs
# points to the wrong place so force it
SHLIBSDIRS = "${PKGDATA_DIR}/nativesdk-shlibs2"
diff --git a/meta/classes/cross.bbclass b/meta/classes/cross.bbclass
index 01b09337ae..d125f10d01 100644
--- a/meta/classes/cross.bbclass
+++ b/meta/classes/cross.bbclass
@@ -19,7 +19,7 @@ HOST_AS_ARCH = "${BUILD_AS_ARCH}"
export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir} /lib /lib64 /usr/lib /usr/lib64"
-STAGING_DIR_HOST = "${STAGING_DIR}/${HOST_ARCH}${HOST_VENDOR}-${HOST_OS}"
+STAGING_DIR_HOST = "${RECIPE_SYSROOT_NATIVE}"
PACKAGE_ARCH = "${BUILD_ARCH}"
diff --git a/meta/classes/crosssdk.bbclass b/meta/classes/crosssdk.bbclass
index eaf2beb94d..c5c29ea3da 100644
--- a/meta/classes/crosssdk.bbclass
+++ b/meta/classes/crosssdk.bbclass
@@ -8,7 +8,6 @@ python () {
d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH'))
}
-STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}"
STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}"
TARGET_ARCH = "${SDK_ARCH}"
diff --git a/meta/classes/gobject-introspection.bbclass b/meta/classes/gobject-introspection.bbclass
index 37389cbc8b..b6160b88b6 100644
--- a/meta/classes/gobject-introspection.bbclass
+++ b/meta/classes/gobject-introspection.bbclass
@@ -17,7 +17,7 @@ UNKNOWN_CONFIGURE_WHITELIST_append = " --enable-introspection --disable-introspe
# Generating introspection data depends on a combination of native and target
# introspection tools, and qemu to run the target tools.
-DEPENDS_append_class-target = " gobject-introspection gobject-introspection-native qemu-native"
+DEPENDS_append_class-target = " gobject-introspection gobject-introspection-native qemu-native prelink-native"
# Even though introspection is disabled on -native, gobject-introspection package is still
# needed for m4 macros.
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index 6b0864913b..5fc7385d79 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -13,7 +13,7 @@ inherit gzipnative
LICENSE = "MIT"
PACKAGES = ""
-DEPENDS += "${MLPREFIX}qemuwrapper-cross ${MLPREFIX}depmodwrapper-cross"
+DEPENDS += "${MLPREFIX}qemuwrapper-cross depmodwrapper-cross"
RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL}"
RRECOMMENDS += "${PACKAGE_INSTALL_ATTEMPTONLY}"
@@ -164,7 +164,7 @@ python () {
deps = ""
for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split():
deps += " %s:do_populate_sysroot" % dep
- d.appendVarFlag('do_build', 'depends', deps)
+ d.appendVarFlag('do_image_complete', 'depends', deps)
#process IMAGE_FEATURES, we must do this before runtime_mapping_rename
#Check for replaces image features
@@ -274,7 +274,7 @@ fakeroot python do_rootfs () {
do_rootfs[dirs] = "${TOPDIR}"
do_rootfs[cleandirs] += "${S} ${IMGDEPLOYDIR}"
do_rootfs[umask] = "022"
-addtask rootfs before do_build
+addtask rootfs before do_build after do_prepare_recipe_sysroot
fakeroot python do_image () {
from oe.utils import execute_pre_post_process
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 7332e45453..3cf86c12c3 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -827,6 +827,7 @@ def package_qa_check_staged(path,d):
sane = True
tmpdir = d.getVar('TMPDIR')
workdir = os.path.join(tmpdir, "work")
+ recipesysroot = d.getVar("RECIPE_SYSROOT")
if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
pkgconfigcheck = workdir
@@ -842,12 +843,14 @@ def package_qa_check_staged(path,d):
if file.endswith(".la"):
with open(path) as f:
file_content = f.read()
+ file_content = file_content.replace(recipesysroot, "")
if workdir in file_content:
error_msg = "%s failed sanity test (workdir) in path %s" % (file,root)
sane = package_qa_handle_error("la", error_msg, d)
elif file.endswith(".pc"):
with open(path) as f:
file_content = f.read()
+ file_content = file_content.replace(recipesysroot, "")
if pkgconfigcheck in file_content:
error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root)
sane = package_qa_handle_error("pkgconfig", error_msg, d)
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass
index 58470474cc..a5888736f2 100644
--- a/meta/classes/module.bbclass
+++ b/meta/classes/module.bbclass
@@ -1,6 +1,6 @@
inherit module-base kernel-module-split
-addtask make_scripts after do_patch before do_compile
+addtask make_scripts after do_prepare_recipe_sysroot before do_compile
do_make_scripts[lockfiles] = "${TMPDIR}/kernel-scripts.lock"
do_make_scripts[depends] += "virtual/kernel:do_shared_workdir"
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass
index 5592e1d1c1..4d4aaadc02 100644
--- a/meta/classes/native.bbclass
+++ b/meta/classes/native.bbclass
@@ -95,7 +95,7 @@ libdir .= "${NATIVE_PACKAGE_PATH_SUFFIX}"
libexecdir .= "${NATIVE_PACKAGE_PATH_SUFFIX}"
do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}/"
-do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_NATIVE}/"
+do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR}-components/${PACKAGE_ARCH}/${PN}"
# Since we actually install these into situ there is no staging prefix
STAGING_DIR_HOST = ""
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass
index 2ac8fd82ed..a97f5a7114 100644
--- a/meta/classes/nativesdk.bbclass
+++ b/meta/classes/nativesdk.bbclass
@@ -25,9 +25,7 @@ PACKAGE_ARCHS = "${SDK_PACKAGE_ARCHS}"
DEPENDS_append = " chrpath-replacement-native"
EXTRANATIVEPATH += "chrpath-native"
-STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}"
-STAGING_DIR_TARGET = "${STAGING_DIR}/${MULTIMACH_TARGET_SYS}"
-PKGDATA_DIR = "${STAGING_DIR_HOST}/pkgdata"
+PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}"
HOST_ARCH = "${SDK_ARCH}"
HOST_VENDOR = "${SDK_VENDOR}"
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index 0068a50258..ac2d71cfa4 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -414,7 +414,8 @@ def copydebugsources(debugsrcdir, d):
bb.utils.mkdirhier(basepath)
cpath.updatecache(basepath)
- processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
+ # Ignore files from the recipe sysroots (target and native)
+ processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | "
# We need to ignore files that are not actually ours
# we do this by only paying attention to items from this package
processdebugsrc += "fgrep -zw '%s' | "
diff --git a/meta/classes/pixbufcache.bbclass b/meta/classes/pixbufcache.bbclass
index 633a8757ed..aa9815cc6f 100644
--- a/meta/classes/pixbufcache.bbclass
+++ b/meta/classes/pixbufcache.bbclass
@@ -48,24 +48,17 @@ python populate_packages_append() {
}
gdkpixbuf_complete() {
- GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1
+GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1
}
-#
-# Add an sstate postinst hook to update the cache for native packages.
-# An error exit during populate_sysroot_setscene allows bitbake to
-# try to recover by re-building the package.
-#
DEPENDS_append_class-native = " gdk-pixbuf-native"
-SSTATEPOSTINSTFUNCS_append_class-native = " pixbufcache_sstate_postinst"
+SYSROOT_PREPROCESS_FUNCS_append_class-native = " pixbufcache_sstate_postinst"
# See base.bbclass for the other half of this
pixbufcache_sstate_postinst() {
- if [ "${BB_CURRENTTASK}" = "populate_sysroot" ]; then
- ${gdkpixbuf_complete}
- elif [ "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ]; then
- if [ -x ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders ]; then
- echo "${gdkpixbuf_complete}" >> ${STAGING_DIR}/sstatecompletions
- fi
- fi
+ mkdir -p ${SYSROOT_DESTDIR}${bindir}
+ dest=${SYSROOT_DESTDIR}${bindir}/postinst-useradd-${PN}
+ echo '#!/bin/sh' > $dest
+ echo "${gdkpixbuf_complete}" >> $dest
+ chmod 0755 $dest
}
diff --git a/meta/classes/populate_sdk_ext.bbclass b/meta/classes/populate_sdk_ext.bbclass
index 7c9666c162..39e0c83fe4 100644
--- a/meta/classes/populate_sdk_ext.bbclass
+++ b/meta/classes/populate_sdk_ext.bbclass
@@ -404,7 +404,7 @@ python copy_buildsystem () {
if sdk_include_toolchain:
lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base2.inc'
- lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST') + '/locked-sigs/locked-sigs-extsdk-toolchain.inc'
+ lockedsigs_toolchain = d.expand("${STAGING_DIR}/${TUNE_PKGARCH}/meta-extsdk-toolchain/locked-sigs/locked-sigs-extsdk-toolchain.inc")
shutil.move(lockedsigs_pruned, lockedsigs_base)
oe.copy_buildsystem.merge_lockedsigs([],
lockedsigs_base,
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
index 9d51a0266a..bb491d2994 100644
--- a/meta/classes/siteconfig.bbclass
+++ b/meta/classes/siteconfig.bbclass
@@ -5,9 +5,9 @@ python siteconfig_do_siteconfig () {
if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
bb.debug(1, "No site_config directory, skipping do_siteconfig")
return
+ sstate_install(shared_state, d)
bb.build.exec_func('do_siteconfig_gencache', d)
sstate_clean(shared_state, d)
- sstate_install(shared_state, d)
}
EXTRASITECONFIG ?= ""
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index f1faf4848e..a767a0203b 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -97,7 +97,7 @@ python () {
# These classes encode staging paths into their scripts data so can only be
# reused if we manipulate the paths
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d):
- scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}"
+ scan_cmd = "grep -Irl -e ${RECIPE_SYSROOT} -e ${RECIPE_SYSROOT_NATIVE} ${SSTATE_BUILDDIR}"
d.setVar('SSTATE_SCAN_CMD', scan_cmd)
unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split()))
@@ -131,6 +131,7 @@ def sstate_state_fromvars(d, task = None):
lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split()
lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split()
interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "").split()
+ fixmedir = d.getVarFlag("do_" + task, 'sstate-fixmedir') or ""
if not task or len(inputs) != len(outputs):
bb.fatal("sstate variables not setup correctly?!")
@@ -146,6 +147,7 @@ def sstate_state_fromvars(d, task = None):
ss['lockfiles-shared'] = lockfilesshared
ss['plaindirs'] = plaindirs
ss['interceptfuncs'] = interceptfuncs
+ ss['fixmedir'] = fixmedir
return ss
def sstate_add(ss, source, dest, d):
@@ -195,6 +197,9 @@ def sstate_install(ss, d):
srcdir = os.path.join(walkroot, dir)
dstdir = srcdir.replace(state[1], state[2])
#bb.debug(2, "Staging %s to %s" % (srcdir, dstdir))
+ if os.path.islink(srcdir):
+ sharedfiles.append(dstdir)
+ continue
if not dstdir.endswith("/"):
dstdir = dstdir + "/"
shareddirs.append(dstdir)
@@ -203,7 +208,7 @@ def sstate_install(ss, d):
whitelist = (d.getVar("SSTATE_DUPWHITELIST") or "").split()
match = []
for f in sharedfiles:
- if os.path.exists(f):
+ if os.path.exists(f) and not os.path.islink(f):
f = os.path.normpath(f)
realmatch = True
for w in whitelist:
@@ -244,6 +249,10 @@ def sstate_install(ss, d):
(d.getVar('PN'), "\n ".join(match)))
bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.")
+ if ss['fixmedir'] and os.path.exists(ss['fixmedir'] + "/fixmepath.cmd"):
+ sharedfiles.append(ss['fixmedir'] + "/fixmepath.cmd")
+ sharedfiles.append(ss['fixmedir'] + "/fixmepath")
+
# Write out the manifest
f = open(manifest, "w")
for file in sharedfiles:
@@ -288,17 +297,8 @@ sstate_install[vardepsexclude] += "SSTATE_DUPWHITELIST STATE_MANMACH SSTATE_MANF
sstate_install[vardeps] += "${SSTATEPOSTINSTFUNCS}"
def sstate_installpkg(ss, d):
- import oe.path
- import subprocess
from oe.gpg_sign import get_signer
- def prepdir(dir):
- # remove dir if it exists, ensure any parent directories do exist
- if os.path.exists(dir):
- oe.path.remove(dir)
- bb.utils.mkdirhier(dir)
- oe.path.remove(dir)
-
sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task'])
sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz"
sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz"
@@ -320,11 +320,36 @@ def sstate_installpkg(ss, d):
if not signer.verify(sstatepkg + '.sig'):
bb.warn("Cannot verify signature on sstate package %s" % sstatepkg)
- for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split():
+ sstateinst = d.getVar("SSTATE_INSTDIR")
+ d.setVar('SSTATE_FIXMEDIR', ss['fixmedir'])
+
+ for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package']:
# All hooks should run in the SSTATE_INSTDIR
bb.build.exec_func(f, d, (sstateinst,))
+ return sstate_installpkgdir(ss, d)
+
+def sstate_installpkgdir(ss, d):
+ import oe.path
+ import subprocess
+
+ sstateinst = d.getVar("SSTATE_INSTDIR")
+ d.setVar('SSTATE_FIXMEDIR', ss['fixmedir'])
+
+ for f in (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split():
+ # All hooks should run in the SSTATE_INSTDIR
+ bb.build.exec_func(f, d, (sstateinst,))
+
+ def prepdir(dir):
+ # remove dir if it exists, ensure any parent directories do exist
+ if os.path.exists(dir):
+ oe.path.remove(dir)
+ bb.utils.mkdirhier(dir)
+ oe.path.remove(dir)
+
for state in ss['dirs']:
+ if d.getVar('SSTATE_SKIP_CREATION') == '1':
+ continue
prepdir(state[1])
os.rename(sstateinst + state[0], state[1])
sstate_install(ss, d)
@@ -347,18 +372,18 @@ python sstate_hardcode_path_unpack () {
import subprocess
sstateinst = d.getVar('SSTATE_INSTDIR')
- fixmefn = sstateinst + "fixmepath"
+ sstatefixmedir = d.getVar('SSTATE_FIXMEDIR')
+ fixmefn = sstateinst + "fixmepath"
if os.path.isfile(fixmefn):
- staging = d.getVar('STAGING_DIR')
- staging_target = d.getVar('STAGING_DIR_TARGET')
- staging_host = d.getVar('STAGING_DIR_HOST')
-
- if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
- sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
- elif bb.data.inherits_class('cross', d):
- sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging)
- else:
+ staging_target = d.getVar('RECIPE_SYSROOT')
+ staging_host = d.getVar('RECIPE_SYSROOT_NATIVE')
+
+ if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross-canadian', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
+ elif bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d):
+ sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIRHOST:%s:g'" % (staging_target, staging_host)
+ else:
+ sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g'" % (staging_target)
extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
for fixmevar in extra_staging_fixmes.split():
@@ -368,6 +393,18 @@ python sstate_hardcode_path_unpack () {
# Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd)
+ # Defer do_populate_sysroot relocation command
+ if sstatefixmedir:
+ bb.utils.mkdirhier(sstatefixmedir)
+ with open(sstatefixmedir + "/fixmepath.cmd", "w") as f:
+ sstate_hardcode_cmd = sstate_hardcode_cmd.replace(fixmefn, sstatefixmedir + "/fixmepath")
+ sstate_hardcode_cmd = sstate_hardcode_cmd.replace(sstateinst, "FIXMEFINALSSTATEINST")
+ sstate_hardcode_cmd = sstate_hardcode_cmd.replace(staging_host, "FIXMEFINALSSTATEHOST")
+ sstate_hardcode_cmd = sstate_hardcode_cmd.replace(staging_target, "FIXMEFINALSSTATETARGET")
+ f.write(sstate_hardcode_cmd)
+ bb.utils.copyfile(fixmefn, sstatefixmedir + "/fixmepath")
+ return
+
bb.note("Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd))
subprocess.call(sstate_hardcode_cmd, shell=True)
@@ -499,20 +536,19 @@ python sstate_hardcode_path () {
# Note: the logic in this function needs to match the reverse logic
# in sstate_installpkg(ss, d)
- staging = d.getVar('STAGING_DIR')
- staging_target = d.getVar('STAGING_DIR_TARGET')
- staging_host = d.getVar('STAGING_DIR_HOST')
+ staging_target = d.getVar('RECIPE_SYSROOT')
+ staging_host = d.getVar('RECIPE_SYSROOT_NATIVE')
sstate_builddir = d.getVar('SSTATE_BUILDDIR')
- if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
- sstate_grep_cmd = "grep -l -e '%s'" % (staging)
- sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging)
- elif bb.data.inherits_class('cross', d):
- sstate_grep_cmd = "grep -l -e '%s' -e '%s'" % (staging_target, staging)
- sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging)
- else:
+ if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross-canadian', d):
sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
+ elif bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d):
+ sstate_grep_cmd = "grep -l -e '%s' -e '%s'" % (staging_target, staging_host)
+ sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIRHOST:g'" % (staging_target, staging_host)
+ else:
+ sstate_grep_cmd = "grep -l -e '%s'" % (staging_target)
+ sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g'" % (staging_target)
extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
for fixmevar in extra_staging_fixmes.split():
@@ -537,14 +573,14 @@ python sstate_hardcode_path () {
sstate_hardcode_cmd = "%s | xargs %s | %s | xargs %s %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, xargs_no_empty_run_cmd, sstate_sed_cmd)
bb.note("Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd))
- subprocess.call(sstate_hardcode_cmd, shell=True)
+ subprocess.check_call(sstate_hardcode_cmd, shell=True, cwd=sstate_builddir)
# If the fixmefn is empty, remove it..
if os.stat(fixmefn).st_size == 0:
os.remove(fixmefn)
else:
bb.note("Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd))
- subprocess.call(sstate_filelist_relative_cmd, shell=True)
+ subprocess.check_call(sstate_filelist_relative_cmd, shell=True)
}
def sstate_package(ss, d):
@@ -560,6 +596,8 @@ def sstate_package(ss, d):
if not link.startswith(tmpdir):
return
+ #base = os.path.relpath(link, os.path.dirname(path))
+
depth = outputpath.rpartition(tmpdir)[2].count('/')
base = link.partition(tmpdir)[2].strip()
while depth > 1:
@@ -594,14 +632,14 @@ def sstate_package(ss, d):
dstpath = srcpath.replace(state[1], state[2])
make_relative_symlink(srcpath, dstpath, d)
bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0]))
- oe.path.copyhardlinktree(state[1], sstatebuild + state[0])
+ os.rename(state[1], sstatebuild + state[0])
workdir = d.getVar('WORKDIR')
for plain in ss['plaindirs']:
pdir = plain.replace(workdir, sstatebuild)
bb.utils.mkdirhier(plain)
bb.utils.mkdirhier(pdir)
- oe.path.copyhardlinktree(plain, pdir)
+ os.rename(plain, pdir)
d.setVar('SSTATE_BUILDDIR', sstatebuild)
d.setVar('SSTATE_PKG', sstatepkg)
@@ -614,6 +652,8 @@ def sstate_package(ss, d):
bb.siggen.dump_this_task(sstatepkg + ".siginfo", d)
+ d.setVar('SSTATE_INSTDIR', sstatebuild)
+
return
def pstaging_fetch(sstatefetch, sstatepkg, d):
@@ -672,14 +712,21 @@ sstate_task_prefunc[dirs] = "${WORKDIR}"
python sstate_task_postfunc () {
shared_state = sstate_state_fromvars(d)
- sstate_install(shared_state, d)
for intercept in shared_state['interceptfuncs']:
bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),))
+
omask = os.umask(0o002)
if omask != 0o002:
bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask)
sstate_package(shared_state, d)
os.umask(omask)
+
+ sstateinst = d.getVar("SSTATE_INSTDIR")
+ d.setVar('SSTATE_FIXMEDIR', shared_state['fixmedir'])
+
+ sstate_installpkgdir(shared_state, d)
+
+ bb.utils.remove(d.getVar("SSTATE_BUILDDIR"), recurse=True)
}
sstate_task_postfunc[dirs] = "${WORKDIR}"
@@ -704,9 +751,6 @@ sstate_create_package () {
fi
chmod 0664 $TFILE
mv -f $TFILE ${SSTATE_PKG}
-
- cd ${WORKDIR}
- rm -rf ${SSTATE_BUILDDIR}
}
python sstate_sign_package () {
@@ -877,7 +921,7 @@ def setscene_depvalid(task, taskdependees, notneeded, d):
bb.debug(2, "Considering setscene task: %s" % (str(taskdependees[task])))
def isNativeCross(x):
- return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x
+ return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x or x.endswith("-cross")
# We only need to trigger populate_lic through direct dependencies
if taskdependees[task][1] == "do_populate_lic":
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass
index 2512ae6f5d..1a4668e5d3 100644
--- a/meta/classes/staging.bbclass
+++ b/meta/classes/staging.bbclass
@@ -235,12 +235,357 @@ do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}"
SSTATETASKS += "do_populate_sysroot"
do_populate_sysroot[cleandirs] = "${SYSROOT_DESTDIR}"
do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}"
-do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/"
-do_populate_sysroot[stamp-extra-info] = "${MACHINE}"
+do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR}-components/${PACKAGE_ARCH}/${PN}"
+do_populate_sysroot[sstate-fixmedir] = "${STAGING_DIR}-components/${PACKAGE_ARCH}/${PN}"
python do_populate_sysroot_setscene () {
sstate_setscene(d)
}
addtask do_populate_sysroot_setscene
+def staging_copyfile(c, target, fixme, postinsts, stagingdir):
+ import errno
+
+ if c.endswith("/fixmepath"):
+ fixme.append(c)
+ return None
+ if c.endswith("/fixmepath.cmd"):
+ return None
+ #bb.warn(c)
+ dest = c.replace(stagingdir, "")
+ dest = target + "/" + "/".join(dest.split("/")[3:])
+ bb.utils.mkdirhier(os.path.dirname(dest))
+ if "/usr/bin/postinst-" in c:
+ postinsts.append(dest)
+ if os.path.islink(c):
+ linkto = os.readlink(c)
+ if os.path.lexists(dest):
+ if os.readlink(dest) == linkto:
+ return dest
+ bb.fatal("Link %s already exists to a different location?" % dest)
+ os.symlink(linkto, dest)
+ #bb.warn(c)
+ else:
+ try:
+ os.link(c, dest)
+ except OSError as err:
+ if err.errno == errno.EXDEV:
+ bb.utils.copyfile(c, dest)
+ else:
+ raise
+ return dest
+
+def staging_copydir(c, target, stagingdir):
+ dest = c.replace(stagingdir, "")
+ dest = target + "/" + "/".join(dest.split("/")[3:])
+ bb.utils.mkdirhier(dest)
+
+def staging_processfixme(fixme, target, recipesysroot, recipesysrootnative, d):
+ import subprocess
+
+ if not fixme:
+ return
+ cmd = "sed -e 's:^[^/]*/:%s/:g' %s | xargs sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIRHOST:%s:g'" % (target, " ".join(fixme), recipesysroot, recipesysrootnative)
+ for fixmevar in ['PKGDATA_DIR']:
+ fixme_path = d.getVar(fixmevar)
+ cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path)
+ bb.note(cmd)
+ subprocess.check_call(cmd, shell=True)
+
+
+def staging_populate_sysroot_dir(targetsysroot, nativesysroot, native, d):
+ import glob
+ import subprocess
+
+ fixme = []
+ postinsts = []
+ stagingdir = d.getVar("STAGING_DIR")
+ if native:
+ pkgarchs = ['${BUILD_ARCH}', '${BUILD_ARCH}_*']
+ targetdir = nativesysroot
+ else:
+ pkgarchs = ['${MACHINE_ARCH}', '${TUNE_PKGARCH}', 'allarch']
+ targetdir = targetsysroot
+
+ bb.utils.mkdirhier(targetdir)
+ for pkgarch in pkgarchs:
+ for manifest in glob.glob(d.expand("${SSTATE_MANIFESTS}/manifest-%s-*.populate_sysroot" % pkgarch)):
+ if manifest.endswith("-initial.populate_sysroot"):
+ # skip glibc-initial and libgcc-initial due to file overlap
+ continue
+ tmanifest = targetdir + "/" + os.path.basename(manifest)
+ if os.path.exists(tmanifest):
+ continue
+ try:
+ os.link(manifest, tmanifest)
+ except OSError as err:
+ if err.errno == errno.EXDEV:
+ bb.utils.copyfile(manifest, tmanifest)
+ else:
+ raise
+ with open(manifest, "r") as f:
+ for l in f:
+ l = l.strip()
+ if l.endswith("/"):
+ staging_copydir(l, targetdir, stagingdir)
+ continue
+ staging_copyfile(l, targetdir, fixme, postinsts, stagingdir)
+
+ staging_processfixme(fixme, targetdir, targetsysroot, nativesysroot, d)
+ for p in postinsts:
+ subprocess.check_call(p, shell=True)
+
+#
+# Manifests here are complicated. The main sysroot area has the unpacked sstate
+# which us unrelocated and tracked by the main sstate manifests. Each recipe
+# specific sysroot has manifests for each dependency that is installed there.
+# The task hash is used to tell whether the data needs to be reinstalled. We
+# use a symlink to point to the currently installed hash. There is also a
+# "complete" stamp file which is used to mark if installation completed. If
+# something fails (e.g. a postinst), this won't get written and we would
+# remove and reinstall the dependency. This also means partially installed
+# dependencies should get cleaned up correctly.
+#
+
+python extend_recipe_sysroot() {
+ import copy
+ import subprocess
+
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ mytaskname = d.getVar("BB_RUNTASK")
+ #bb.warn(str(taskdepdata))
+ pn = d.getVar("PN")
+
+ if mytaskname.endswith("_setscene"):
+ mytaskname = mytaskname.replace("_setscene", "")
+
+ start = None
+ configuredeps = []
+ for dep in taskdepdata:
+ data = taskdepdata[dep]
+ if data[1] == mytaskname and data[0] == pn:
+ start = dep
+ break
+ if start is None:
+ bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
+
+ # We need to figure out which sysroot files we need to expose to this task.
+ # This needs to match what would get restored from sstate, which is controlled
+ # ultimately by calls from bitbake to setscene_depvalid().
+ # That function expects a setscene dependency tree. We build a dependency tree
+ # condensed to inter-sstate task dependencies, similar to that used by setscene
+ # tasks. We can then call into setscene_depvalid() and decide
+ # which dependencies we can "see" and should expose in the recipe specific sysroot.
+ setscenedeps = copy.deepcopy(taskdepdata)
+
+ start = set([start])
+
+ sstatetasks = d.getVar("SSTATETASKS").split()
+
+ def print_dep_tree(deptree):
+ data = ""
+ for dep in deptree:
+ deps = " " + "\n ".join(deptree[dep][3]) + "\n"
+ data = "%s:\n %s\n %s\n%s %s\n %s\n" % (deptree[dep][0], deptree[dep][1], deptree[dep][2], deps, deptree[dep][4], deptree[dep][5])
+ return data
+
+ #bb.note("Full dep tree is:\n%s" % print_dep_tree(taskdepdata))
+
+ #bb.note(" start2 is %s" % str(start))
+
+ # If start is an sstate task (like do_package) we need to add in its direct dependencies
+ # else the code below won't recurse into them.
+ for dep in set(start):
+ for dep2 in setscenedeps[dep][3]:
+ start.add(dep2)
+ start.remove(dep)
+
+ #bb.note(" start3 is %s" % str(start))
+
+ # Create collapsed do_populate_sysroot -> do_populate_sysroot tree
+ for dep in taskdepdata:
+ data = setscenedeps[dep]
+ if data[1] not in sstatetasks:
+ for dep2 in setscenedeps:
+ data2 = setscenedeps[dep2]
+ if dep in data2[3]:
+ data2[3].update(setscenedeps[dep][3])
+ data2[3].remove(dep)
+ if dep in start:
+ start.update(setscenedeps[dep][3])
+ start.remove(dep)
+ del setscenedeps[dep]
+
+ # Remove circular references
+ for dep in setscenedeps:
+ if dep in setscenedeps[dep][3]:
+ setscenedeps[dep][3].remove(dep)
+
+ #bb.note("Computed dep tree is:\n%s" % print_dep_tree(setscenedeps))
+ #bb.note(" start is %s" % str(start))
+
+ # Direct dependencies should be present and can be depended upon
+ for dep in set(start):
+ if setscenedeps[dep][1] == "do_populate_sysroot":
+ if dep not in configuredeps:
+ configuredeps.append(dep)
+ bb.note("Direct dependencies are %s" % str(configuredeps))
+ #bb.note(" or %s" % str(start))
+
+ # Call into setscene_depvalid for each sub-dependency and only copy sysroot files
+ # for ones that would be restored from sstate.
+ done = list(start)
+ next = list(start)
+ while next:
+ new = []
+ for dep in next:
+ data = setscenedeps[dep]
+ for datadep in data[3]:
+ if datadep in done:
+ continue
+ taskdeps = {}
+ taskdeps[dep] = setscenedeps[dep][:2]
+ taskdeps[datadep] = setscenedeps[datadep][:2]
+ retval = setscene_depvalid(datadep, taskdeps, [], d)
+ if retval:
+ bb.note("Skipping setscene dependency %s for installation into the sysroot" % datadep)
+ continue
+ done.append(datadep)
+ new.append(datadep)
+ if datadep not in configuredeps and setscenedeps[datadep][1] == "do_populate_sysroot":
+ configuredeps.append(datadep)
+ bb.note("Adding dependency on %s" % setscenedeps[datadep][0])
+ else:
+ bb.note("Following dependency on %s" % setscenedeps[datadep][0])
+ next = new
+
+ stagingdir = d.getVar("STAGING_DIR")
+ recipesysroot = d.getVar("RECIPE_SYSROOT")
+ recipesysrootnative = d.getVar("RECIPE_SYSROOT_NATIVE")
+ current_variant = d.getVar("BBEXTENDVARIANT")
+
+ # Detect bitbake -b usage
+ nodeps = d.getVar("BB_LIMITEDDEPS") or False
+ if nodeps:
+ lock = bb.utils.lockfile(recipesysroot + "/sysroot.lock")
+ staging_populate_sysroot_dir(recipesysroot, recipesysrootnative, True, d)
+ staging_populate_sysroot_dir(recipesysroot, recipesysrootnative, False, d)
+ bb.utils.unlockfile(lock)
+
+ depdir = recipesysrootnative + "/installeddeps"
+ bb.utils.mkdirhier(depdir)
+
+ lock = bb.utils.lockfile(recipesysroot + "/sysroot.lock")
+
+ fixme = {}
+ fixme[''] = []
+ fixme['native'] = []
+ postinsts = []
+ multilibs = {}
+
+ for dep in configuredeps:
+ c = setscenedeps[dep][0]
+ taskhash = setscenedeps[dep][5]
+ taskmanifest = depdir + "/" + c + "." + taskhash
+ if mytaskname in ["do_sdk_depends", "do_populate_sdk_ext"] and c.endswith("-initial"):
+ bb.note("Skipping initial setscene dependency %s for installation into the sysroot" % c)
+ continue
+ if os.path.exists(depdir + "/" + c):
+ lnk = os.readlink(depdir + "/" + c)
+ if lnk == c + "." + taskhash and os.path.exists(depdir + "/" + c + ".complete"):
+ bb.note("%s exists in sysroot, skipping" % c)
+ continue
+ else:
+ bb.note("%s exists in sysroot, but is stale (%s vs. %s), removing." % (c, lnk, c + "." + taskhash))
+ sstate_clean_manifest(depdir + "/" + lnk, d)
+ os.unlink(depdir + "/" + c)
+ elif os.path.lexists(depdir + "/" + c):
+ os.unlink(depdir + "/" + c)
+
+ os.symlink(c + "." + taskhash, depdir + "/" + c)
+
+ d2 = d
+ destsysroot = recipesysroot
+ variant = ''
+ if setscenedeps[dep][2].startswith("virtual:multilib"):
+ variant = setscenedeps[dep][2].split(":")[2]
+ if variant != current_variant:
+ if variant not in multilibs:
+ multilibs[variant] = get_multilib_datastore(variant, d)
+ d2 = multilibs[variant]
+ destsysroot = d2.getVar("RECIPE_SYSROOT")
+
+ native = False
+ if c.endswith("-native"):
+ manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}-%s.populate_sysroot" % c)
+ native = True
+ elif c.startswith("nativesdk-"):
+ manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c)
+ elif "-cross-" in c:
+ manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}_${TARGET_ARCH}-%s.populate_sysroot" % c)
+ native = True
+ elif "-crosssdk" in c:
+ manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c)
+ native = True
+ else:
+ manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${MACHINE_ARCH}-%s.populate_sysroot" % c)
+ if not os.path.exists(manifest):
+ manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${TUNE_PKGARCH}-%s.populate_sysroot" % c)
+ if not os.path.exists(manifest):
+ manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-allarch-%s.populate_sysroot" % c)
+ if not os.path.exists(manifest):
+ bb.warn("Manifest %s not found?" % manifest)
+ else:
+ with open(manifest, "r") as f, open(taskmanifest, 'w') as m:
+ for l in f:
+ l = l.strip()
+ if l.endswith("/"):
+ if native:
+ dest = staging_copydir(l, recipesysrootnative, stagingdir)
+ else:
+ dest = staging_copydir(l, destsysroot, stagingdir)
+ continue
+ if native:
+ dest = staging_copyfile(l, recipesysrootnative, fixme['native'], postinsts, stagingdir)
+ else:
+ dest = staging_copyfile(l, destsysroot, fixme[''], postinsts, stagingdir)
+ if dest:
+ m.write(dest + "\n")
+
+ for f in fixme:
+ if f == '':
+ staging_processfixme(fixme[f], recipesysroot, recipesysroot, recipesysrootnative, d)
+ elif f == 'native':
+ staging_processfixme(fixme[f], recipesysrootnative, recipesysroot, recipesysrootnative, d)
+ else:
+ staging_processfixme(fixme[f], multilibs[f].getVar("RECIPE_SYSROOT"), recipesysroot, recipesysrootnative, d)
+
+ for p in postinsts:
+ subprocess.check_call(p, shell=True)
+
+ for dep in configuredeps:
+ c = setscenedeps[dep][0]
+ open(depdir + "/" + c + ".complete", "w").close()
+
+ bb.utils.unlockfile(lock)
+}
+extend_recipe_sysroot[vardepsexclude] += "MACHINE SDK_ARCH BUILD_ARCH SDK_OS BB_TASKDEPDATA"
+
+python do_prepare_recipe_sysroot () {
+ bb.build.exec_func("extend_recipe_sysroot", d)
+}
+addtask do_prepare_recipe_sysroot before do_configure after do_fetch
+
+# Clean out the recipe specific sysroots before do_fetch
+do_fetch[cleandirs] += "${RECIPE_SYSROOT} ${RECIPE_SYSROOT_NATIVE}"
+
+python staging_taskhandler() {
+ bbtasks = e.tasklist
+ for task in bbtasks:
+ deps = d.getVarFlag(task, "depends")
+ if deps and "populate_sysroot" in deps:
+ d.appendVarFlag(task, "prefuncs", " extend_recipe_sysroot")
+}
+staging_taskhandler[eventmask] = "bb.event.RecipeTaskPreProcess"
+addhandler staging_taskhandler
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass
index 44b4e24255..7b68d20a15 100644
--- a/meta/classes/toolchain-scripts.bbclass
+++ b/meta/classes/toolchain-scripts.bbclass
@@ -108,6 +108,7 @@ EOF
TOOLCHAIN_CONFIGSITE_NOCACHE = "${@siteinfo_get_files(d)}"
TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DIR}/${MLPREFIX}${MACHINE}/${target_datadir}/${TARGET_SYS}_config_site.d"
TOOLCHAIN_NEED_CONFIGSITE_CACHE ??= "virtual/${MLPREFIX}libc ncurses"
+DEPENDS += "${TOOLCHAIN_NEED_CONFIGSITE_CACHE}"
#This function create a site config file
toolchain_create_sdk_siteconfig () {
diff --git a/meta/classes/uninative.bbclass b/meta/classes/uninative.bbclass
index 54f63afad1..ba7ca63b8f 100644
--- a/meta/classes/uninative.bbclass
+++ b/meta/classes/uninative.bbclass
@@ -91,7 +91,8 @@ def enable_uninative(d):
bb.debug(2, "Enabling uninative")
d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d))
d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp")
- d.prependVar("PATH", "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:")
+ d.setVarFlag("SSTATEPOSTUNPACKFUNCS", "vardepvalueexclude", " uninative_changeinterp")
+ d.prependVar("PATH", "${STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:")
python uninative_changeinterp () {
import subprocess
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass
index 82f4d52758..326c04d0b1 100644
--- a/meta/classes/useradd.bbclass
+++ b/meta/classes/useradd.bbclass
@@ -3,7 +3,7 @@ inherit useradd_base
# base-passwd-cross provides the default passwd and group files in the
# target sysroot, and shadow -native and -sysroot provide the utilities
# and support files needed to add and modify user and group accounts
-DEPENDS_append_class-target = " base-files shadow-native shadow-sysroot shadow"
+DEPENDS_append_class-target = " base-files shadow-native shadow-sysroot shadow base-passwd"
PACKAGE_WRITE_DEPS += "shadow-native"
# This preinstall function can be run in four different contexts:
@@ -97,13 +97,13 @@ fi
}
useradd_sysroot () {
- # Pseudo may (do_install) or may not (do_populate_sysroot_setscene) be running
+ # Pseudo may (do_prepare_recipe_sysroot) or may not (do_populate_sysroot_setscene) be running
# at this point so we're explicit about the environment so pseudo can load if
# not already present.
- export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${STAGING_DIR_NATIVE}${bindir_native}/pseudo"
+ export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${PSEUDO_SYSROOT}${bindir_native}/pseudo"
# Explicitly set $D since it isn't set to anything
- # before do_install
+ # before do_prepare_recipe_sysroot
D=${STAGING_DIR_TARGET}
# Add groups and users defined for all recipe packages
@@ -117,17 +117,25 @@ useradd_sysroot () {
useradd_preinst
}
-useradd_sysroot_sstate () {
- if [ "${BB_CURRENTTASK}" = "package_setscene" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ]
- then
- useradd_sysroot
- fi
+python useradd_sysroot_sstate () {
+ task = d.getVar("BB_CURRENTTASK")
+ if task == "package_setscene":
+ bb.build.exec_func("useradd_sysroot", d)
+ elif task == "prepare_recipe_sysroot":
+ scriptfile = d.expand("${RECIPE_SYSROOT}${bindir}/postinst-useradd-${PN}")
+ bb.utils.mkdirhier(os.path.dirname(scriptfile))
+ with open(scriptfile, 'w') as script:
+ script.write("#!/bin/sh\n")
+ bb.data.emit_func("useradd_sysroot", script, d)
+ script.write("useradd_sysroot\n")
+ os.chmod(scriptfile, 0o755)
+ bb.build.exec_func("useradd_sysroot", d)
}
userdel_sysroot_sstate () {
if test "x${STAGING_DIR_TARGET}" != "x"; then
if [ "${BB_CURRENTTASK}" = "clean" ]; then
- export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${STAGING_DIR_NATIVE}${bindir_native}/pseudo"
+ export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${PSEUDO_SYSROOT}${bindir_native}/pseudo"
OPT="--root ${STAGING_DIR_TARGET}"
# Remove groups and users defined for package
@@ -154,10 +162,10 @@ if test "x${STAGING_DIR_TARGET}" != "x"; then
fi
}
-SSTATECLEANFUNCS_append_class-target = " userdel_sysroot_sstate"
+#SSTATECLEANFUNCS_append_class-target = " userdel_sysroot_sstate"
-do_install[prefuncs] += "${SYSROOTFUNC}"
-SYSROOTFUNC_class-target = "useradd_sysroot"
+do_prepare_recipe_sysroot[postfuncs] += "${SYSROOTFUNC}"
+SYSROOTFUNC_class-target = "useradd_sysroot_sstate"
SYSROOTFUNC = ""
SSTATEPREINSTFUNCS_append_class-target = " useradd_sysroot_sstate"