summaryrefslogtreecommitdiffstats
path: root/meta/classes/buildhistory.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/buildhistory.bbclass')
-rw-r--r--meta/classes/buildhistory.bbclass618
1 files changed, 452 insertions, 166 deletions
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index cad5116ed5..7d5e3eb8fd 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -3,41 +3,107 @@
#
# Based in part on testlab.bbclass and packagehistory.bbclass
#
-# Copyright (C) 2011-2014 Intel Corporation
+# Copyright (C) 2011-2016 Intel Corporation
# Copyright (C) 2007-2011 Koen Kooi <koen@openembedded.org>
#
+inherit image-artifact-names
+
BUILDHISTORY_FEATURES ?= "image package sdk"
BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory"
BUILDHISTORY_DIR_IMAGE = "${BUILDHISTORY_DIR}/images/${MACHINE_ARCH}/${TCLIBC}/${IMAGE_BASENAME}"
BUILDHISTORY_DIR_PACKAGE = "${BUILDHISTORY_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}"
-BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}/${IMAGE_BASENAME}"
+
+# Setting this to non-empty will remove the old content of the buildhistory as part of
+# the current bitbake invocation and replace it with information about what was built
+# during the build.
+#
+# This is meant to be used in continuous integration (CI) systems when invoking bitbake
+# for full world builds. The effect in that case is that information about packages
+# that no longer get build also gets removed from the buildhistory, which is not
+# the case otherwise.
+#
+# The advantage over manually cleaning the buildhistory outside of bitbake is that
+# the "version-going-backwards" check still works. When relying on that, be careful
+# about failed world builds: they will lead to incomplete information in the
+# buildhistory because information about packages that could not be built will
+# also get removed. A CI system should handle that by discarding the buildhistory
+# of failed builds.
+#
+# The expected usage is via auto.conf, but passing via the command line also works
+# with: BB_ENV_EXTRAWHITE=BUILDHISTORY_RESET BUILDHISTORY_RESET=1
+BUILDHISTORY_RESET ?= ""
+
+BUILDHISTORY_OLD_DIR = "${BUILDHISTORY_DIR}/${@ "old" if "${BUILDHISTORY_RESET}" else ""}"
+BUILDHISTORY_OLD_DIR_PACKAGE = "${BUILDHISTORY_OLD_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}"
+BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}${SDK_EXT}/${IMAGE_BASENAME}"
BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group"
-BUILDHISTORY_COMMIT ?= "0"
+BUILDHISTORY_SDK_FILES ?= "conf/local.conf conf/bblayers.conf conf/auto.conf conf/locked-sigs.inc conf/devtool.conf"
+BUILDHISTORY_COMMIT ?= "1"
BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>"
BUILDHISTORY_PUSH_REPO ?= ""
+BUILDHISTORY_TAG ?= "build"
SSTATEPOSTINSTFUNCS_append = " buildhistory_emit_pkghistory"
-# We want to avoid influence the signatures of sstate tasks - first the function itself:
+# We want to avoid influencing the signatures of sstate tasks - first the function itself:
sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory"
# then the value added to SSTATEPOSTINSTFUNCS:
SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory"
+# Similarly for our function that gets the output signatures
+SSTATEPOSTUNPACKFUNCS_append = " buildhistory_emit_outputsigs"
+sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs"
+SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs"
+
+# All items excepts those listed here will be removed from a recipe's
+# build history directory by buildhistory_emit_pkghistory(). This is
+# necessary because some of these items (package directories, files that
+# we no longer emit) might be obsolete.
#
-# Write out metadata about this package for comparision when writing future packages
+# When extending build history, derive your class from buildhistory.bbclass
+# and extend this list here with the additional files created by the derived
+# class.
+BUILDHISTORY_PRESERVE = "latest latest_srcrev sysroot"
+
+PATCH_GIT_USER_EMAIL ?= "buildhistory@oe"
+PATCH_GIT_USER_NAME ?= "OpenEmbedded"
+
+#
+# Write out the contents of the sysroot
+#
+buildhistory_emit_sysroot() {
+ mkdir --parents ${BUILDHISTORY_DIR_PACKAGE}
+ case ${CLASSOVERRIDE} in
+ class-native|class-cross|class-crosssdk)
+ BASE=${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}
+ ;;
+ *)
+ BASE=${SYSROOT_DESTDIR}
+ ;;
+ esac
+ buildhistory_list_files_no_owners $BASE ${BUILDHISTORY_DIR_PACKAGE}/sysroot
+}
+
+#
+# Write out metadata about this package for comparison when writing future packages
#
python buildhistory_emit_pkghistory() {
- if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']:
+ if d.getVar('BB_CURRENTTASK') in ['populate_sysroot', 'populate_sysroot_setscene']:
+ bb.build.exec_func("buildhistory_emit_sysroot", d)
+
+ if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']:
return 0
- if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split():
+ if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
return 0
import re
import json
+ import shlex
import errno
- pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True)
+ pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
+ oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE')
class RecipeInfo:
def __init__(self, name):
@@ -48,6 +114,9 @@ python buildhistory_emit_pkghistory() {
self.depends = ""
self.packages = ""
self.srcrev = ""
+ self.layer = ""
+ self.config = ""
+ self.src_uri = ""
class PackageInfo:
@@ -80,7 +149,7 @@ python buildhistory_emit_pkghistory() {
pkginfo = PackageInfo(pkg)
with open(histfile, "r") as f:
for line in f:
- lns = line.split('=')
+ lns = line.split('=', 1)
name = lns[0].strip()
value = lns[1].strip(" \t\r\n").strip('"')
if name == "PE":
@@ -110,7 +179,7 @@ python buildhistory_emit_pkghistory() {
elif name == "RCONFLICTS":
pkginfo.rconflicts = value
elif name == "PKGSIZE":
- pkginfo.size = long(value)
+ pkginfo.size = int(value)
elif name == "FILES":
pkginfo.files = value
elif name == "FILELIST":
@@ -128,13 +197,13 @@ python buildhistory_emit_pkghistory() {
def getlastpkgversion(pkg):
try:
- histfile = os.path.join(pkghistdir, pkg, "latest")
+ histfile = os.path.join(oldpkghistdir, pkg, "latest")
return readPackageInfo(pkg, histfile)
except EnvironmentError:
return None
def sortpkglist(string):
- pkgiter = re.finditer(r'[a-zA-Z0-9.+-]+( \([><=]+ [^ )]+\))?', string, 0)
+ pkgiter = re.finditer(r'[a-zA-Z0-9.+-]+( \([><=]+[^)]+\))?', string, 0)
pkglist = [p.group(0) for p in pkgiter]
pkglist.sort()
return ' '.join(pkglist)
@@ -144,12 +213,13 @@ python buildhistory_emit_pkghistory() {
items.sort()
return ' '.join(items)
- pn = d.getVar('PN', True)
- pe = d.getVar('PE', True) or "0"
- pv = d.getVar('PV', True)
- pr = d.getVar('PR', True)
+ pn = d.getVar('PN')
+ pe = d.getVar('PE') or "0"
+ pv = d.getVar('PV')
+ pr = d.getVar('PR')
+ layer = bb.utils.get_file_layer(d.getVar('FILE'), d)
- pkgdata_dir = d.getVar('PKGDATA_DIR', True)
+ pkgdata_dir = d.getVar('PKGDATA_DIR')
packages = ""
try:
with open(os.path.join(pkgdata_dir, pn)) as f:
@@ -165,40 +235,42 @@ python buildhistory_emit_pkghistory() {
raise
packagelist = packages.split()
+ preserve = d.getVar('BUILDHISTORY_PRESERVE').split()
if not os.path.exists(pkghistdir):
bb.utils.mkdirhier(pkghistdir)
else:
# Remove files for packages that no longer exist
for item in os.listdir(pkghistdir):
- if item != "latest" and item != "latest_srcrev":
+ if item not in preserve:
if item not in packagelist:
- subdir = os.path.join(pkghistdir, item)
- for subfile in os.listdir(subdir):
- os.unlink(os.path.join(subdir, subfile))
- os.rmdir(subdir)
+ itempath = os.path.join(pkghistdir, item)
+ if os.path.isdir(itempath):
+ for subfile in os.listdir(itempath):
+ os.unlink(os.path.join(itempath, subfile))
+ os.rmdir(itempath)
+ else:
+ os.unlink(itempath)
rcpinfo = RecipeInfo(pn)
rcpinfo.pe = pe
rcpinfo.pv = pv
rcpinfo.pr = pr
- rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or ""))
+ rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or ""))
rcpinfo.packages = packages
+ rcpinfo.layer = layer
+ rcpinfo.config = sortlist(oe.utils.squashspaces(d.getVar('PACKAGECONFIG') or ""))
+ rcpinfo.src_uri = oe.utils.squashspaces(d.getVar('SRC_URI') or "")
write_recipehistory(rcpinfo, d)
- pkgdest = d.getVar('PKGDEST', True)
+ bb.build.exec_func("read_subpackage_metadata", d)
+
for pkg in packagelist:
- pkgdata = {}
- with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
- for line in f.readlines():
- item = line.rstrip('\n').split(': ', 1)
- key = item[0]
- if key.endswith('_' + pkg):
- key = key[:-len(pkg)-1]
- pkgdata[key] = item[1].decode('utf-8').decode('string_escape')
-
- pkge = pkgdata.get('PKGE', '0')
- pkgv = pkgdata['PKGV']
- pkgr = pkgdata['PKGR']
+ localdata = d.createCopy()
+ localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg)
+
+ pkge = localdata.getVar("PKGE") or '0'
+ pkgv = localdata.getVar("PKGV")
+ pkgr = localdata.getVar("PKGR")
#
# Find out what the last version was
# Make sure the version did not decrease
@@ -210,36 +282,36 @@ python buildhistory_emit_pkghistory() {
last_pkgr = lastversion.pkgr
r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr))
if r < 0:
- msg = "Package version for package %s went backwards which would break package feeds from (%s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr)
+ msg = "Package version for package %s went backwards which would break package feeds (from %s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr)
package_qa_handle_error("version-going-backwards", msg, d)
pkginfo = PackageInfo(pkg)
# Apparently the version can be different on a per-package basis (see Python)
- pkginfo.pe = pkgdata.get('PE', '0')
- pkginfo.pv = pkgdata['PV']
- pkginfo.pr = pkgdata['PR']
- pkginfo.pkg = pkgdata['PKG']
+ pkginfo.pe = localdata.getVar("PE") or '0'
+ pkginfo.pv = localdata.getVar("PV")
+ pkginfo.pr = localdata.getVar("PR")
+ pkginfo.pkg = localdata.getVar("PKG")
pkginfo.pkge = pkge
pkginfo.pkgv = pkgv
pkginfo.pkgr = pkgr
- pkginfo.rprovides = sortpkglist(oe.utils.squashspaces(pkgdata.get('RPROVIDES', "")))
- pkginfo.rdepends = sortpkglist(oe.utils.squashspaces(pkgdata.get('RDEPENDS', "")))
- pkginfo.rrecommends = sortpkglist(oe.utils.squashspaces(pkgdata.get('RRECOMMENDS', "")))
- pkginfo.rsuggests = sortpkglist(oe.utils.squashspaces(pkgdata.get('RSUGGESTS', "")))
- pkginfo.rreplaces = sortpkglist(oe.utils.squashspaces(pkgdata.get('RREPLACES', "")))
- pkginfo.rconflicts = sortpkglist(oe.utils.squashspaces(pkgdata.get('RCONFLICTS', "")))
- pkginfo.files = oe.utils.squashspaces(pkgdata.get('FILES', ""))
+ pkginfo.rprovides = sortpkglist(oe.utils.squashspaces(localdata.getVar("RPROVIDES") or ""))
+ pkginfo.rdepends = sortpkglist(oe.utils.squashspaces(localdata.getVar("RDEPENDS") or ""))
+ pkginfo.rrecommends = sortpkglist(oe.utils.squashspaces(localdata.getVar("RRECOMMENDS") or ""))
+ pkginfo.rsuggests = sortpkglist(oe.utils.squashspaces(localdata.getVar("RSUGGESTS") or ""))
+ pkginfo.replaces = sortpkglist(oe.utils.squashspaces(localdata.getVar("RREPLACES") or ""))
+ pkginfo.rconflicts = sortpkglist(oe.utils.squashspaces(localdata.getVar("RCONFLICTS") or ""))
+ pkginfo.files = oe.utils.squashspaces(localdata.getVar("FILES") or "")
for filevar in pkginfo.filevars:
- pkginfo.filevars[filevar] = pkgdata.get(filevar, "")
+ pkginfo.filevars[filevar] = localdata.getVar(filevar) or ""
# Gather information about packaged files
- val = pkgdata.get('FILES_INFO', '')
+ val = localdata.getVar('FILES_INFO') or ''
dictval = json.loads(val)
- filelist = dictval.keys()
+ filelist = list(dictval.keys())
filelist.sort()
- pkginfo.filelist = " ".join(filelist)
+ pkginfo.filelist = " ".join([shlex.quote(x) for x in filelist])
- pkginfo.size = int(pkgdata['PKGSIZE'])
+ pkginfo.size = int(localdata.getVar('PKGSIZE') or '0')
write_pkghistory(pkginfo, d)
@@ -247,52 +319,85 @@ python buildhistory_emit_pkghistory() {
bb.build.exec_func("buildhistory_list_pkg_files", d)
}
+python buildhistory_emit_outputsigs() {
+ if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
+ return
+
+ import hashlib
+
+ taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task', 'output')
+ bb.utils.mkdirhier(taskoutdir)
+ currenttask = d.getVar('BB_CURRENTTASK')
+ pn = d.getVar('PN')
+ taskfile = os.path.join(taskoutdir, '%s.%s' % (pn, currenttask))
+
+ cwd = os.getcwd()
+ filesigs = {}
+ for root, _, files in os.walk(cwd):
+ for fname in files:
+ if fname == 'fixmepath':
+ continue
+ fullpath = os.path.join(root, fname)
+ try:
+ if os.path.islink(fullpath):
+ sha256 = hashlib.sha256(os.readlink(fullpath).encode('utf-8')).hexdigest()
+ elif os.path.isfile(fullpath):
+ sha256 = bb.utils.sha256_file(fullpath)
+ else:
+ continue
+ except OSError:
+ bb.warn('buildhistory: unable to read %s to get output signature' % fullpath)
+ continue
+ filesigs[os.path.relpath(fullpath, cwd)] = sha256
+ with open(taskfile, 'w') as f:
+ for fpath, fsig in sorted(filesigs.items(), key=lambda item: item[0]):
+ f.write('%s %s\n' % (fpath, fsig))
+}
-def write_recipehistory(rcpinfo, d):
- import codecs
+def write_recipehistory(rcpinfo, d):
bb.debug(2, "Writing recipe history")
- pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True)
+ pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
infofile = os.path.join(pkghistdir, "latest")
- with codecs.open(infofile, "w", encoding='utf8') as f:
+ with open(infofile, "w") as f:
if rcpinfo.pe != "0":
f.write(u"PE = %s\n" % rcpinfo.pe)
f.write(u"PV = %s\n" % rcpinfo.pv)
f.write(u"PR = %s\n" % rcpinfo.pr)
f.write(u"DEPENDS = %s\n" % rcpinfo.depends)
f.write(u"PACKAGES = %s\n" % rcpinfo.packages)
+ f.write(u"LAYER = %s\n" % rcpinfo.layer)
+ f.write(u"CONFIG = %s\n" % rcpinfo.config)
+ f.write(u"SRC_URI = %s\n" % rcpinfo.src_uri)
+ write_latest_srcrev(d, pkghistdir)
def write_pkghistory(pkginfo, d):
- import codecs
-
bb.debug(2, "Writing package history for package %s" % pkginfo.name)
- pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True)
+ pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
pkgpath = os.path.join(pkghistdir, pkginfo.name)
if not os.path.exists(pkgpath):
bb.utils.mkdirhier(pkgpath)
infofile = os.path.join(pkgpath, "latest")
- with codecs.open(infofile, "w", encoding='utf8') as f:
+ with open(infofile, "w") as f:
if pkginfo.pe != "0":
f.write(u"PE = %s\n" % pkginfo.pe)
f.write(u"PV = %s\n" % pkginfo.pv)
f.write(u"PR = %s\n" % pkginfo.pr)
- pkgvars = {}
- pkgvars['PKG'] = pkginfo.pkg if pkginfo.pkg != pkginfo.name else ''
- pkgvars['PKGE'] = pkginfo.pkge if pkginfo.pkge != pkginfo.pe else ''
- pkgvars['PKGV'] = pkginfo.pkgv if pkginfo.pkgv != pkginfo.pv else ''
- pkgvars['PKGR'] = pkginfo.pkgr if pkginfo.pkgr != pkginfo.pr else ''
- for pkgvar in pkgvars:
- val = pkgvars[pkgvar]
- if val:
- f.write(u"%s = %s\n" % (pkgvar, val))
-
+ if pkginfo.pkg != pkginfo.name:
+ f.write(u"PKG = %s\n" % pkginfo.pkg)
+ if pkginfo.pkge != pkginfo.pe:
+ f.write(u"PKGE = %s\n" % pkginfo.pkge)
+ if pkginfo.pkgv != pkginfo.pv:
+ f.write(u"PKGV = %s\n" % pkginfo.pkgv)
+ if pkginfo.pkgr != pkginfo.pr:
+ f.write(u"PKGR = %s\n" % pkginfo.pkgr)
f.write(u"RPROVIDES = %s\n" % pkginfo.rprovides)
f.write(u"RDEPENDS = %s\n" % pkginfo.rdepends)
f.write(u"RRECOMMENDS = %s\n" % pkginfo.rrecommends)
@@ -310,7 +415,7 @@ def write_pkghistory(pkginfo, d):
filevarpath = os.path.join(pkgpath, "latest.%s" % filevar)
val = pkginfo.filevars[filevar]
if val:
- with codecs.open(filevarpath, "w", encoding='utf8') as f:
+ with open(filevarpath, "w") as f:
f.write(val)
else:
if os.path.exists(filevarpath):
@@ -322,18 +427,21 @@ def write_pkghistory(pkginfo, d):
def buildhistory_list_installed(d, rootfs_type="image"):
from oe.rootfs import image_list_installed_packages
from oe.sdk import sdk_list_installed_packages
+ from oe.utils import format_pkg_list
- process_list = [('file', 'bh_installed_pkgs.txt'),\
- ('deps', 'bh_installed_pkgs_deps.txt')]
+ process_list = [('file', 'bh_installed_pkgs_%s.txt' % os.getpid()),\
+ ('deps', 'bh_installed_pkgs_deps_%s.txt' % os.getpid())]
+
+ if rootfs_type == "image":
+ pkgs = image_list_installed_packages(d)
+ else:
+ pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target")
for output_type, output_file in process_list:
- output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file)
+ output_file_full = os.path.join(d.getVar('WORKDIR'), output_file)
with open(output_file_full, 'w') as output:
- if rootfs_type == "image":
- output.write(image_list_installed_packages(d, output_type))
- else:
- output.write(sdk_list_installed_packages(d, rootfs_type == "sdk_target", output_type))
+ output.write(format_pkg_list(pkgs, output_type))
python buildhistory_list_installed_image() {
buildhistory_list_installed(d)
@@ -352,9 +460,10 @@ buildhistory_get_installed() {
# Get list of installed packages
pkgcache="$1/installed-packages.tmp"
- cat ${WORKDIR}/bh_installed_pkgs.txt | sort > $pkgcache && rm ${WORKDIR}/bh_installed_pkgs.txt
+ cat ${WORKDIR}/bh_installed_pkgs_${PID}.txt | sort > $pkgcache && rm ${WORKDIR}/bh_installed_pkgs_${PID}.txt
cat $pkgcache | awk '{ print $1 }' > $1/installed-package-names.txt
+
if [ -s $pkgcache ] ; then
cat $pkgcache | awk '{ print $2 }' | xargs -n1 basename > $1/installed-packages.txt
else
@@ -363,26 +472,26 @@ buildhistory_get_installed() {
# Produce dependency graph
# First, quote each name to handle characters that cause issues for dot
- sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps.txt > $1/depends.tmp && \
- rm ${WORKDIR}/bh_installed_pkgs_deps.txt
- # Change delimiter from pipe to -> and set style for recommend lines
- sed -i -e 's:|: -> :' -e 's:"\[REC\]":[style=dotted]:' -e 's:$:;:' $1/depends.tmp
+ sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps_${PID}.txt > $1/depends.tmp &&
+ rm ${WORKDIR}/bh_installed_pkgs_deps_${PID}.txt
+ # Remove lines with rpmlib(...) and config(...) dependencies, change the
+ # delimiter from pipe to "->", set the style for recommend lines and
+ # turn versioned dependencies into edge labels.
+ sed -i -e '/rpmlib(/d' \
+ -e '/config(/d' \
+ -e 's:|: -> :' \
+ -e 's:"\[REC\]":[style=dotted]:' \
+ -e 's:"\([<>=]\+\)" "\([^"]*\)":[label="\1 \2"]:' \
+ $1/depends.tmp
# Add header, sorted and de-duped contents and footer and then delete the temp file
printf "digraph depends {\n node [shape=plaintext]\n" > $1/depends.dot
- cat $1/depends.tmp | sort | uniq >> $1/depends.dot
+ cat $1/depends.tmp | sort -u >> $1/depends.dot
echo "}" >> $1/depends.dot
rm $1/depends.tmp
# Produce installed package sizes list
- printf "" > $1/installed-package-sizes.tmp
- cat $pkgcache | while read pkg pkgfile pkgarch
- do
- size=`oe-pkgdata-util -p ${PKGDATA_DIR} read-value "PKGSIZE" ${pkg}_${pkgarch}`
- if [ "$size" != "" ] ; then
- echo "$size $pkg" >> $1/installed-package-sizes.tmp
- fi
- done
- cat $1/installed-package-sizes.tmp | sort -n -r | awk '{print $1 "\tKiB " $2}' > $1/installed-package-sizes.txt
+ oe-pkgdata-util -p ${PKGDATA_DIR} read-value "PKGSIZE" -n -f $pkgcache > $1/installed-package-sizes.tmp
+ cat $1/installed-package-sizes.tmp | awk '{print $2 "\tKiB\t" $1}' | sort -n -r > $1/installed-package-sizes.txt
rm $1/installed-package-sizes.tmp
# We're now done with the cache, delete it
@@ -390,7 +499,7 @@ buildhistory_get_installed() {
if [ "$2" != "sdk" ] ; then
# Produce some cut-down graphs (for readability)
- grep -v kernel_image $1/depends.dot | grep -v kernel-2 | grep -v kernel-3 > $1/depends-nokernel.dot
+ grep -v kernel-image $1/depends.dot | grep -v kernel-3 | grep -v kernel-4 > $1/depends-nokernel.dot
grep -v libc6 $1/depends-nokernel.dot | grep -v libgcc > $1/depends-nokernel-nolibc.dot
grep -v update- $1/depends-nokernel-nolibc.dot > $1/depends-nokernel-nolibc-noupdate.dot
grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot
@@ -434,25 +543,43 @@ buildhistory_get_sdk_installed_target() {
buildhistory_list_files() {
# List the files in the specified directory, but exclude date/time etc.
- # This awk script is somewhat messy, but handles where the size is not printed for device files under pseudo
- ( cd $1 && find . -printf "%M %-10u %-10g %10s %p -> %l\n" | sort -k5 | sed 's/ * -> $//' > $2 )
+ # This is somewhat messy, but handles where the size is not printed for device files under pseudo
+ ( cd $1
+ find_cmd='find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n"'
+ if [ "$3" = "fakeroot" ] ; then
+ eval ${FAKEROOTENV} ${FAKEROOTCMD} $find_cmd
+ else
+ eval $find_cmd
+ fi | sort -k5 | sed 's/ * -> $//' > $2 )
+}
+
+buildhistory_list_files_no_owners() {
+ # List the files in the specified directory, but exclude date/time etc.
+ # Also don't output the ownership data, but instead output just - - so
+ # that the same parsing code as for _list_files works.
+ # This is somewhat messy, but handles where the size is not printed for device files under pseudo
+ ( cd $1
+ find_cmd='find . ! -path . -printf "%M - - %10s %p -> %l\n"'
+ if [ "$3" = "fakeroot" ] ; then
+ eval ${FAKEROOTENV} ${FAKEROOTCMD} "$find_cmd"
+ else
+ eval "$find_cmd"
+ fi | sort -k5 | sed 's/ * -> $//' > $2 )
}
buildhistory_list_pkg_files() {
- file_prefix="files-in-"
-
- # Create individual files-in-package for each recipe's package
- for pkgdir in $(find ${PKGDEST}/* -maxdepth 0 -type d); do
- pkgname=$(basename ${pkgdir})
- outfolder="${BUILDHISTORY_DIR_PACKAGE}/${pkgname}"
- outfile="${outfolder}/${file_prefix}${pkgname}.txt"
- # Make sure the output folder, exist so we can create the files-in-$pkgname.txt file
- if [ ! -d ${outfolder} ] ; then
- bbdebug 2 "Folder ${outfolder} does not exist, file ${outfile} not created"
- continue
- fi
- buildhistory_list_files ${pkgdir} ${outfile}
- done
+ # Create individual files-in-package for each recipe's package
+ for pkgdir in $(find ${PKGDEST}/* -maxdepth 0 -type d); do
+ pkgname=$(basename $pkgdir)
+ outfolder="${BUILDHISTORY_DIR_PACKAGE}/$pkgname"
+ outfile="$outfolder/files-in-package.txt"
+ # Make sure the output folder exists so we can create the file
+ if [ ! -d $outfolder ] ; then
+ bbdebug 2 "Folder $outfolder does not exist, file $outfile not created"
+ continue
+ fi
+ buildhistory_list_files $pkgdir $outfile fakeroot
+ done
}
buildhistory_get_imageinfo() {
@@ -460,6 +587,7 @@ buildhistory_get_imageinfo() {
return
fi
+ mkdir -p ${BUILDHISTORY_DIR_IMAGE}
buildhistory_list_files ${IMAGE_ROOTFS} ${BUILDHISTORY_DIR_IMAGE}/files-in-image.txt
# Collect files requested in BUILDHISTORY_IMAGE_FILES
@@ -494,6 +622,15 @@ buildhistory_get_sdkinfo() {
buildhistory_list_files ${SDK_OUTPUT} ${BUILDHISTORY_DIR_SDK}/files-in-sdk.txt
+ # Collect files requested in BUILDHISTORY_SDK_FILES
+ rm -rf ${BUILDHISTORY_DIR_SDK}/sdk-files
+ for f in ${BUILDHISTORY_SDK_FILES}; do
+ if [ -f ${SDK_OUTPUT}/${SDKPATH}/$f ] ; then
+ mkdir -p ${BUILDHISTORY_DIR_SDK}/sdk-files/`dirname $f`
+ cp ${SDK_OUTPUT}/${SDKPATH}/$f ${BUILDHISTORY_DIR_SDK}/sdk-files/$f
+ fi
+ done
+
# Record some machine-readable meta-information about the SDK
printf "" > ${BUILDHISTORY_DIR_SDK}/sdk-info.txt
cat >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt <<END
@@ -503,26 +640,63 @@ END
echo "SDKSIZE = $sdksize" >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt
}
+python buildhistory_get_extra_sdkinfo() {
+ import operator
+ from oe.sdk import get_extra_sdkinfo
+
+ sstate_dir = d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')
+ extra_info = get_extra_sdkinfo(sstate_dir)
+
+ if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext' and \
+ "sdk" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
+ with open(d.expand('${BUILDHISTORY_DIR_SDK}/sstate-package-sizes.txt'), 'w') as f:
+ filesizes_sorted = sorted(extra_info['filesizes'].items(), key=operator.itemgetter(1, 0), reverse=True)
+ for fn, size in filesizes_sorted:
+ f.write('%10d KiB %s\n' % (size, fn))
+ with open(d.expand('${BUILDHISTORY_DIR_SDK}/sstate-task-sizes.txt'), 'w') as f:
+ tasksizes_sorted = sorted(extra_info['tasksizes'].items(), key=operator.itemgetter(1, 0), reverse=True)
+ for task, size in tasksizes_sorted:
+ f.write('%10d KiB %s\n' % (size, task))
+}
+
# By using ROOTFS_POSTUNINSTALL_COMMAND we get in after uninstallation of
# unneeded packages but before the removal of packaging files
-ROOTFS_POSTUNINSTALL_COMMAND += " buildhistory_list_installed_image ;\
- buildhistory_get_image_installed ; "
+ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image ;"
+ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed ;"
+ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image ;| buildhistory_get_image_installed ;"
+ROOTFS_POSTUNINSTALL_COMMAND[vardepsexclude] += "buildhistory_list_installed_image buildhistory_get_image_installed"
-IMAGE_POSTPROCESS_COMMAND += " buildhistory_get_imageinfo ; "
+IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo ;"
+IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo ;"
+IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo"
# We want these to be the last run so that we get called after complementary package installation
-POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_list_installed_sdk_target ;\
- buildhistory_get_sdk_installed_target ; "
-POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_list_installed_sdk_host ;\
- buildhistory_get_sdk_installed_host ; "
-
-SDK_POSTPROCESS_COMMAND += "buildhistory_get_sdkinfo ; "
+POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_list_installed_sdk_target;"
+POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_get_sdk_installed_target;"
+POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target;| buildhistory_get_sdk_installed_target;"
+
+POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_list_installed_sdk_host;"
+POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_get_sdk_installed_host;"
+POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host;| buildhistory_get_sdk_installed_host;"
+
+SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
+SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
+
+python buildhistory_write_sigs() {
+ if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
+ return
+
+ # Create sigs file
+ if hasattr(bb.parse.siggen, 'dump_siglist'):
+ taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task')
+ bb.utils.mkdirhier(taskoutdir)
+ bb.parse.siggen.dump_siglist(os.path.join(taskoutdir, 'tasksigs.txt'))
+}
def buildhistory_get_build_id(d):
- if d.getVar('BB_WORKERCONTEXT', True) != '1':
+ if d.getVar('BB_WORKERCONTEXT') != '1':
return ""
localdata = bb.data.createCopy(d)
- bb.data.update_data(localdata)
statuslines = []
for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
g = globals()
@@ -533,15 +707,32 @@ def buildhistory_get_build_id(d):
if flines:
statuslines.extend(flines)
- statusheader = d.getVar('BUILDCFG_HEADER', True)
+ statusheader = d.getVar('BUILDCFG_HEADER')
return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
+def buildhistory_get_modified(path):
+ # copied from get_layer_git_status() in image-buildinfo.bbclass
+ import subprocess
+ try:
+ subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
+ git diff --quiet --no-ext-diff
+ git diff --quiet --no-ext-diff --cached""" % path,
+ shell=True,
+ stderr=subprocess.STDOUT)
+ return ""
+ except subprocess.CalledProcessError as ex:
+ # Silently treat errors as "modified", without checking for the
+ # (expected) return code 1 in a modified git repo. For example, we get
+ # output and a 129 return code when a layer isn't a git repo at all.
+ return " -- modified"
+
def buildhistory_get_metadata_revs(d):
# We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want
- layers = (d.getVar("BBLAYERS", True) or "").split()
- medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \
+ layers = (d.getVar("BBLAYERS") or "").split()
+ medadata_revs = ["%-17s = %s:%s%s" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \
- base_get_metadata_git_revision(i, None)) \
+ base_get_metadata_git_revision(i, None), \
+ buildhistory_get_modified(i)) \
for i in layers]
return '\n'.join(medadata_revs)
@@ -550,7 +741,7 @@ def outputvars(vars, listvars, d):
listvars = listvars.split()
ret = ""
for var in vars:
- value = d.getVar(var, True) or ""
+ value = d.getVar(var) or ""
if var in listvars:
# Squash out spaces
value = oe.utils.squashspaces(value)
@@ -558,28 +749,70 @@ def outputvars(vars, listvars, d):
return ret.rstrip('\n')
def buildhistory_get_imagevars(d):
- if d.getVar('BB_WORKERCONTEXT', True) != '1':
+ if d.getVar('BB_WORKERCONTEXT') != '1':
return ""
imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND"
listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE"
return outputvars(imagevars, listvars, d)
def buildhistory_get_sdkvars(d):
- if d.getVar('BB_WORKERCONTEXT', True) != '1':
+ if d.getVar('BB_WORKERCONTEXT') != '1':
return ""
sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
- listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE"
+ if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
+ # Extensible SDK uses some additional variables
+ sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN"
+ listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST"
return outputvars(sdkvars, listvars, d)
def buildhistory_get_cmdline(d):
- if sys.argv[0].endswith('bin/bitbake'):
- bincmd = 'bitbake'
- else:
- bincmd = sys.argv[0]
- return '%s %s' % (bincmd, ' '.join(sys.argv[1:]))
+ argv = d.getVar('BB_CMDLINE', False)
+ if argv:
+ if argv[0].endswith('bin/bitbake'):
+ bincmd = 'bitbake'
+ else:
+ bincmd = argv[0]
+ return '%s %s' % (bincmd, ' '.join(argv[1:]))
+ return ''
+buildhistory_single_commit() {
+ if [ "$3" = "" ] ; then
+ commitopts="${BUILDHISTORY_DIR}/ --allow-empty"
+ shortlogprefix="No changes: "
+ else
+ commitopts=""
+ shortlogprefix=""
+ fi
+ if [ "${BUILDHISTORY_BUILD_FAILURES}" = "0" ] ; then
+ result="succeeded"
+ else
+ result="failed"
+ fi
+ case ${BUILDHISTORY_BUILD_INTERRUPTED} in
+ 1)
+ result="$result (interrupted)"
+ ;;
+ 2)
+ result="$result (force interrupted)"
+ ;;
+ esac
+ commitmsgfile=`mktemp`
+ cat > $commitmsgfile << END
+${shortlogprefix}Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $2
+
+cmd: $1
+
+result: $result
+
+metadata revisions:
+END
+ cat ${BUILDHISTORY_DIR}/metadata-revs >> $commitmsgfile
+ git commit $commitopts -F $commitmsgfile --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null
+ rm $commitmsgfile
+}
+
buildhistory_commit() {
if [ ! -d ${BUILDHISTORY_DIR} ] ; then
# Code above that creates this dir never executed, so there can't be anything to commit
@@ -593,22 +826,16 @@ END
( cd ${BUILDHISTORY_DIR}/
# Initialise the repo if necessary
- if [ ! -d .git ] ; then
+ if [ ! -e .git ] ; then
git init -q
else
- git tag -f build-minus-3 build-minus-2 > /dev/null 2>&1 || true
- git tag -f build-minus-2 build-minus-1 > /dev/null 2>&1 || true
- git tag -f build-minus-1 > /dev/null 2>&1 || true
- fi
- # If the user hasn't set up their name/email, set some defaults
- # just for this repo (otherwise the commit will fail with older
- # versions of git)
- if ! git config user.email > /dev/null ; then
- git config --local user.email "buildhistory@${DISTRO}"
- fi
- if ! git config user.name > /dev/null ; then
- git config --local user.name "buildhistory"
+ git tag -f ${BUILDHISTORY_TAG}-minus-3 ${BUILDHISTORY_TAG}-minus-2 > /dev/null 2>&1 || true
+ git tag -f ${BUILDHISTORY_TAG}-minus-2 ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
+ git tag -f ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
fi
+
+ check_git_config
+
# Check if there are new/changed files to commit (other than metadata-revs)
repostatus=`git status --porcelain | grep -v " metadata-revs$"`
HOSTNAME=`hostname 2>/dev/null || echo unknown`
@@ -617,27 +844,54 @@ END
git add -A .
# porcelain output looks like "?? packages/foo/bar"
# Ensure we commit metadata-revs with the first commit
- for entry in `echo "$repostatus" | awk '{print $2}' | awk -F/ '{print $1}' | sort | uniq` ; do
- git commit $entry metadata-revs -m "$entry: Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $HOSTNAME" -m "cmd: $CMDLINE" --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null
- done
+ buildhistory_single_commit "$CMDLINE" "$HOSTNAME" dummy
git gc --auto --quiet
- if [ "${BUILDHISTORY_PUSH_REPO}" != "" ] ; then
- git push -q ${BUILDHISTORY_PUSH_REPO}
- fi
else
- git commit ${BUILDHISTORY_DIR}/ --allow-empty -m "No changes: Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $HOSTNAME" -m "cmd: $CMDLINE" --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null
+ buildhistory_single_commit "$CMDLINE" "$HOSTNAME"
+ fi
+ if [ "${BUILDHISTORY_PUSH_REPO}" != "" ] ; then
+ git push -q ${BUILDHISTORY_PUSH_REPO}
fi) || true
}
python buildhistory_eventhandler() {
- if e.data.getVar('BUILDHISTORY_FEATURES', True).strip():
- if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1":
- bb.note("Writing buildhistory")
- bb.build.exec_func("buildhistory_commit", e.data)
+ if e.data.getVar('BUILDHISTORY_FEATURES').strip():
+ reset = e.data.getVar("BUILDHISTORY_RESET")
+ olddir = e.data.getVar("BUILDHISTORY_OLD_DIR")
+ if isinstance(e, bb.event.BuildStarted):
+ if reset:
+ import shutil
+ # Clean up after potentially interrupted build.
+ if os.path.isdir(olddir):
+ shutil.rmtree(olddir)
+ rootdir = e.data.getVar("BUILDHISTORY_DIR")
+ entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ]
+ bb.utils.mkdirhier(olddir)
+ for entry in entries:
+ os.rename(os.path.join(rootdir, entry),
+ os.path.join(olddir, entry))
+ elif isinstance(e, bb.event.BuildCompleted):
+ if reset:
+ import shutil
+ shutil.rmtree(olddir)
+ if e.data.getVar("BUILDHISTORY_COMMIT") == "1":
+ bb.note("Writing buildhistory")
+ bb.build.exec_func("buildhistory_write_sigs", d)
+ import time
+ start=time.time()
+ localdata = bb.data.createCopy(e.data)
+ localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures))
+ interrupted = getattr(e, '_interrupted', 0)
+ localdata.setVar('BUILDHISTORY_BUILD_INTERRUPTED', str(interrupted))
+ bb.build.exec_func("buildhistory_commit", localdata)
+ stop=time.time()
+ bb.note("Writing buildhistory took: %s seconds" % round(stop-start))
+ else:
+ bb.note("No commit since BUILDHISTORY_COMMIT != '1'")
}
addhandler buildhistory_eventhandler
-buildhistory_eventhandler[eventmask] = "bb.event.BuildCompleted"
+buildhistory_eventhandler[eventmask] = "bb.event.BuildCompleted bb.event.BuildStarted"
# FIXME this ought to be moved into the fetcher
@@ -647,7 +901,7 @@ def _get_srcrev_values(d):
"""
scms = []
- fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d)
+ fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
for u in urldata:
if urldata[u].method.supports_srcrev():
@@ -679,7 +933,10 @@ def _get_srcrev_values(d):
do_fetch[postfuncs] += "write_srcrev"
do_fetch[vardepsexclude] += "write_srcrev"
python write_srcrev() {
- pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True)
+ write_latest_srcrev(d, d.getVar('BUILDHISTORY_DIR_PACKAGE'))
+}
+
+def write_latest_srcrev(d, pkghistdir):
srcrevfile = os.path.join(pkghistdir, 'latest_srcrev')
srcrevs, tag_srcrevs = _get_srcrev_values(d)
@@ -700,21 +957,50 @@ python write_srcrev() {
if orig_srcrev != 'INVALID':
f.write('# SRCREV = "%s"\n' % orig_srcrev)
if len(srcrevs) > 1:
- for name, srcrev in srcrevs.items():
+ for name, srcrev in sorted(srcrevs.items()):
orig_srcrev = d.getVar('SRCREV_%s' % name, False)
if orig_srcrev:
f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev))
f.write('SRCREV_%s = "%s"\n' % (name, srcrev))
else:
- f.write('SRCREV = "%s"\n' % srcrevs.itervalues().next())
+ f.write('SRCREV = "%s"\n' % next(iter(srcrevs.values())))
if len(tag_srcrevs) > 0:
- for name, srcrev in tag_srcrevs.items():
+ for name, srcrev in sorted(tag_srcrevs.items()):
f.write('# tag_%s = "%s"\n' % (name, srcrev))
if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
- pkg = d.getVar('PN', True)
+ pkg = d.getVar('PN')
bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
else:
if os.path.exists(srcrevfile):
os.remove(srcrevfile)
+
+do_testimage[postfuncs] += "write_ptest_result"
+do_testimage[vardepsexclude] += "write_ptest_result"
+
+python write_ptest_result() {
+ write_latest_ptest_result(d, d.getVar('BUILDHISTORY_DIR'))
}
+
+def write_latest_ptest_result(d, histdir):
+ import glob
+ import subprocess
+ test_log_dir = d.getVar('TEST_LOG_DIR')
+ input_ptest = os.path.join(test_log_dir, 'ptest_log')
+ output_ptest = os.path.join(histdir, 'ptest')
+ if os.path.exists(input_ptest):
+ try:
+ # Lock it avoid race issue
+ lock = bb.utils.lockfile(output_ptest + "/ptest.lock")
+ bb.utils.mkdirhier(output_ptest)
+ oe.path.copytree(input_ptest, output_ptest)
+ # Sort test result
+ for result in glob.glob('%s/pass.fail.*' % output_ptest):
+ bb.debug(1, 'Processing %s' % result)
+ cmd = ['sort', result, '-o', result]
+ bb.debug(1, 'Running %s' % cmd)
+ ret = subprocess.call(cmd)
+ if ret != 0:
+ bb.error('Failed to run %s!' % cmd)
+ finally:
+ bb.utils.unlockfile(lock)