summaryrefslogtreecommitdiffstats
path: root/scripts/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/contrib')
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix-plot.sh15
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix.sh15
-rwxr-xr-xscripts/contrib/bb-perf/buildstats-plot.sh160
-rwxr-xr-xscripts/contrib/bb-perf/buildstats.sh167
-rwxr-xr-xscripts/contrib/bbvars.py186
-rwxr-xr-xscripts/contrib/build-perf-test-wrapper.sh258
-rwxr-xr-xscripts/contrib/build-perf-test.sh369
-rwxr-xr-xscripts/contrib/convert-overrides.py144
-rwxr-xr-xscripts/contrib/convert-srcuri.py63
-rwxr-xr-xscripts/contrib/ddimage98
-rwxr-xr-xscripts/contrib/devtool-stress.py245
-rwxr-xr-xscripts/contrib/dialog-power-control2
-rwxr-xr-xscripts/contrib/documentation-audit.sh5
-rwxr-xr-xscripts/contrib/graph-tool122
-rwxr-xr-xscripts/contrib/image-manifest523
-rwxr-xr-xscripts/contrib/list-packageconfig-flags.py76
-rwxr-xr-xscripts/contrib/mkefidisk.sh403
-rwxr-xr-xscripts/contrib/oe-build-perf-report-email.py276
-rwxr-xr-xscripts/contrib/patchreview.py238
-rwxr-xr-xscripts/contrib/patchtest.sh104
-rwxr-xr-xscripts/contrib/python/generate-manifest-2.7.py398
-rwxr-xr-xscripts/contrib/python/generate-manifest-3.4.py388
-rwxr-xr-xscripts/contrib/serdevtry3
-rwxr-xr-xscripts/contrib/test_build_time.sh16
-rwxr-xr-xscripts/contrib/test_build_time_worker.sh4
-rwxr-xr-xscripts/contrib/uncovered26
-rwxr-xr-xscripts/contrib/verify-homepage.py81
27 files changed, 2530 insertions, 1855 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh
index 136a25570d..e7bd129e9e 100755
--- a/scripts/contrib/bb-perf/bb-matrix-plot.sh
+++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh
@@ -1,21 +1,8 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script operates on the .dat file generated by bb-matrix.sh. It tolerates
diff --git a/scripts/contrib/bb-perf/bb-matrix.sh b/scripts/contrib/bb-perf/bb-matrix.sh
index 106456584d..b1fff0f344 100755
--- a/scripts/contrib/bb-perf/bb-matrix.sh
+++ b/scripts/contrib/bb-perf/bb-matrix.sh
@@ -1,21 +1,8 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script runs BB_CMD (typically building core-image-sato) for all
diff --git a/scripts/contrib/bb-perf/buildstats-plot.sh b/scripts/contrib/bb-perf/buildstats-plot.sh
new file mode 100755
index 0000000000..45c27d0b97
--- /dev/null
+++ b/scripts/contrib/bb-perf/buildstats-plot.sh
@@ -0,0 +1,160 @@
+#!/usr/bin/env bash
+#
+# Copyright (c) 2011, Intel Corporation.
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# DESCRIPTION
+#
+# Produces script data to be consumed by gnuplot. There are two possible plots
+# depending if either the -S parameter is present or not:
+#
+# * without -S: Produces a histogram listing top N recipes/tasks versus
+# stats. The first stat defined in the -s parameter is the one taken
+# into account for ranking
+# * -S: Produces a histogram listing tasks versus stats. In this case,
+# the value of each stat is the sum for that particular stat in all recipes found.
+# Stats values are in descending order defined by the first stat defined on -s
+#
+# EXAMPLES
+#
+# 1. Top recipes' tasks taking into account utime
+#
+# $ buildstats-plot.sh -s utime | gnuplot -p
+#
+# 2. Tasks versus utime:stime
+#
+# $ buildstats-plot.sh -s utime:stime -S | gnuplot -p
+#
+# 3. Tasks versus IO write_bytes:IO read_bytes
+#
+# $ buildstats-plot.sh -s 'IO write_bytes:IO read_bytes' -S | gnuplot -p
+#
+# AUTHORS
+# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
+#
+
+set -o nounset
+set -o errexit
+
+BS_DIR="tmp/buildstats"
+N=10
+RECIPE=""
+TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
+STATS="utime"
+ACCUMULATE=""
+SUM=""
+OUTDATA_FILE="$PWD/buildstats-plot.out"
+
+function usage {
+ CMD=$(basename $0)
+ cat <<EOM
+Usage: $CMD [-b buildstats_dir] [-t do_task]
+ -b buildstats The path where the folder resides
+ (default: "$BS_DIR")
+ -n N Top N recipes to display. Ignored if -S is present
+ (default: "$N")
+ -r recipe The recipe mask to be searched
+ -t tasks The tasks to be computed
+ (default: "$TASKS")
+ -s stats The stats to be matched. If more that one stat, units
+ should be the same because data is plot as histogram.
+ (see buildstats.sh -h for all options) or any other defined
+ (build)stat separated by colons, i.e. stime:utime
+ (default: "$STATS")
+ -a Accumulate all stats values for found recipes
+ -S Sum values for a particular stat for found recipes
+ -o Output data file.
+ (default: "$OUTDATA_FILE")
+ -h Display this help message
+EOM
+}
+
+# Parse and validate arguments
+while getopts "b:n:r:t:s:o:aSh" OPT; do
+ case $OPT in
+ b)
+ BS_DIR="$OPTARG"
+ ;;
+ n)
+ N="$OPTARG"
+ ;;
+ r)
+ RECIPE="-r $OPTARG"
+ ;;
+ t)
+ TASKS="$OPTARG"
+ ;;
+ s)
+ STATS="$OPTARG"
+ ;;
+ a)
+ ACCUMULATE="-a"
+ ;;
+ S)
+ SUM="y"
+ ;;
+ o)
+ OUTDATA_FILE="$OPTARG"
+ ;;
+ h)
+ usage
+ exit 0
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+# Get number of stats
+IFS=':'; statsarray=(${STATS}); unset IFS
+nstats=${#statsarray[@]}
+
+# Get script folder, use to run buildstats.sh
+CD=$(dirname $0)
+
+# Parse buildstats recipes to produce a single table
+OUTBUILDSTATS="$PWD/buildstats.log"
+$CD/buildstats.sh -b "$BS_DIR" -s "$STATS" -t "$TASKS" $RECIPE $ACCUMULATE -H > $OUTBUILDSTATS
+
+# Get headers
+HEADERS=$(cat $OUTBUILDSTATS | sed -n -e 's/\(.*\)/"\1"/' -e '1s/ /\\\\\\\\ /g' -e 's/_/\\\\\\\\_/g' -e '1s/:/" "/gp')
+
+echo -e "set boxwidth 0.9 relative"
+echo -e "set style data histograms"
+echo -e "set style fill solid 1.0 border lt -1"
+echo -e "set xtics rotate by 45 right"
+
+# Get output data
+if [ -z "$SUM" ]; then
+ cat $OUTBUILDSTATS | sed -e '1d' -e 's/_/\\\\_/g' | sort -k3 -n -r | head -$N > $OUTDATA_FILE
+ # include task at recipe column
+ sed -i -e "1i\
+${HEADERS}" $OUTDATA_FILE
+ echo -e "set title \"Top task/recipes\""
+ echo -e "plot for [COL=3:`expr 3 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(stringcolumn(1).' '.stringcolumn(2)) title columnheader(COL)"
+else
+
+ # Construct datatamash sum argument (sum 3 sum 4 ...)
+ declare -a sumargs
+ j=0
+ for i in `seq $nstats`; do
+ sumargs[j]=sum; j=$(( $j + 1 ))
+ sumargs[j]=`expr 3 + $i - 1`; j=$(( $j + 1 ))
+ done
+
+ # Do the processing with datamash
+ cat $OUTBUILDSTATS | sed -e '1d' | datamash -t ' ' -g1 ${sumargs[*]} | sort -k2 -n -r > $OUTDATA_FILE
+
+ # Include headers into resulted file, so we can include gnuplot xtics
+ HEADERS=$(echo $HEADERS | sed -e 's/recipe//1')
+ sed -i -e "1i\
+${HEADERS}" $OUTDATA_FILE
+
+ # Plot
+ echo -e "set title \"Sum stats values per task for all recipes\""
+ echo -e "plot for [COL=2:`expr 2 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(1) title columnheader(COL)"
+fi
+
diff --git a/scripts/contrib/bb-perf/buildstats.sh b/scripts/contrib/bb-perf/buildstats.sh
new file mode 100755
index 0000000000..e45cfc146d
--- /dev/null
+++ b/scripts/contrib/bb-perf/buildstats.sh
@@ -0,0 +1,167 @@
+#!/bin/bash
+#
+# Copyright (c) 2011, Intel Corporation.
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# DESCRIPTION
+# Given 'buildstats' data (generate by bitbake when setting
+# USER_CLASSES ?= "buildstats" on local.conf), task names and a stats values
+# (these are the ones preset on the buildstats files), outputs
+# '<task> <recipe> <value_1> <value_2> ... <value_n>'. The units are the ones
+# defined at buildstats, which in turn takes data from /proc/[pid] files
+#
+# Some useful pipelines
+#
+# 1. Tasks with largest stime (Amount of time that this process has been scheduled
+# in kernel mode) values
+# $ buildstats.sh -b <buildstats> -s stime | sort -k3 -n -r | head
+#
+# 2. Min, max, sum utime (Amount of time that this process has been scheduled
+# in user mode) per task (in needs GNU datamash)
+# $ buildstats.sh -b <buildstats> -s utime | datamash -t' ' -g1 min 3 max 3 sum 3 | sort -k4 -n -r
+#
+# AUTHORS
+# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
+#
+
+# Stats, by type
+TIME="utime:stime:cutime:cstime"
+IO="IO wchar:IO write_bytes:IO syscr:IO read_bytes:IO rchar:IO syscw:IO cancelled_write_bytes"
+RUSAGE="rusage ru_utime:rusage ru_stime:rusage ru_maxrss:rusage ru_minflt:rusage ru_majflt:\
+rusage ru_inblock:rusage ru_oublock:rusage ru_nvcsw:rusage ru_nivcsw"
+
+CHILD_RUSAGE="Child rusage ru_utime:Child rusage ru_stime:Child rusage ru_maxrss:Child rusage ru_minflt:\
+Child rusage ru_majflt:Child rusage ru_inblock:Child rusage ru_oublock:Child rusage ru_nvcsw:\
+Child rusage ru_nivcsw"
+
+BS_DIR="tmp/buildstats"
+RECIPE=""
+TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
+STATS="$TIME"
+ACCUMULATE=""
+HEADER="" # No header by default
+
+function usage {
+CMD=$(basename $0)
+cat <<EOM
+Usage: $CMD [-b buildstats_dir] [-t do_task]
+ -b buildstats The path where the folder resides
+ (default: "$BS_DIR")
+ -r recipe The recipe to be computed
+ -t tasks The tasks to be computed
+ (default: "$TASKS")
+ -s stats The stats to be matched. Options: TIME, IO, RUSAGE, CHILD_RUSAGE
+ or any other defined buildstat separated by colons, i.e. stime:utime
+ (default: "$STATS")
+ Default stat sets:
+ TIME=$TIME
+ IO=$IO
+ RUSAGE=$RUSAGE
+ CHILD_RUSAGE=$CHILD_RUSAGE
+ -a Accumulate all stats values for found recipes
+ -h Display this help message
+EOM
+}
+
+# Parse and validate arguments
+while getopts "b:r:t:s:aHh" OPT; do
+ case $OPT in
+ b)
+ BS_DIR="$OPTARG"
+ ;;
+ r)
+ RECIPE="$OPTARG"
+ ;;
+ t)
+ TASKS="$OPTARG"
+ ;;
+ s)
+ STATS="$OPTARG"
+ ;;
+ a)
+ ACCUMULATE="y"
+ ;;
+ H)
+ HEADER="y"
+ ;;
+ h)
+ usage
+ exit 0
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+# Ensure the buildstats folder exists
+if [ ! -d "$BS_DIR" ]; then
+ echo "ERROR: $BS_DIR does not exist"
+ usage
+ exit 1
+fi
+
+stats=""
+IFS=":"
+for stat in ${STATS}; do
+ case $stat in
+ TIME)
+ stats="${stats}:${TIME}"
+ ;;
+ IO)
+ stats="${stats}:${IO}"
+ ;;
+ RUSAGE)
+ stats="${stats}:${RUSAGE}"
+ ;;
+ CHILD_RUSAGE)
+ stats="${stats}:${CHILD_RUSAGE}"
+ ;;
+ *)
+ stats="${STATS}"
+ ;;
+ esac
+done
+
+# remove possible colon at the beginning
+stats="$(echo "$stats" | sed -e 's/^://1')"
+
+# Provide a header if required by the user
+if [ -n "$HEADER" ] ; then
+ if [ -n "$ACCUMULATE" ]; then
+ echo "task:recipe:accumulated(${stats//:/;})"
+ else
+ echo "task:recipe:$stats"
+ fi
+fi
+
+for task in ${TASKS}; do
+ task="do_${task}"
+ for file in $(find ${BS_DIR} -type f -path *${RECIPE}*/${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do
+ recipe="$(basename $(dirname $file))"
+ times=""
+ for stat in ${stats}; do
+ [ -z "$stat" ] && { echo "empty stats"; }
+ time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file)
+ # in case the stat is not present, set the value as NA
+ [ -z "$time" ] && { time="NA"; }
+ # Append it to times
+ if [ -z "$times" ]; then
+ times="${time}"
+ else
+ times="${times} ${time}"
+ fi
+ done
+ if [ -n "$ACCUMULATE" ]; then
+ IFS=' '; valuesarray=(${times}); IFS=':'
+ times=0
+ for value in "${valuesarray[@]}"; do
+ [ "$value" == "NA" ] && { echo "ERROR: stat is not present."; usage; exit 1; }
+ times=$(( $times + $value ))
+ done
+ fi
+ echo "${task} ${recipe} ${times}"
+ done
+done
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py
index 0896d64445..090133600b 100755
--- a/scripts/contrib/bbvars.py
+++ b/scripts/contrib/bbvars.py
@@ -1,18 +1,6 @@
-#!/usr/bin/env python
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
+#!/usr/bin/env python3
#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+# SPDX-License-Identifier: GPL-2.0-or-later
#
# Copyright (C) Darren Hart <dvhart@linux.intel.com>, 2010
@@ -23,62 +11,38 @@ import os
import os.path
import re
+# Set up sys.path to let us import tinfoil
+scripts_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+lib_path = scripts_path + '/lib'
+sys.path.insert(0, lib_path)
+import scriptpath
+scriptpath.add_bitbake_lib_path()
+import bb.tinfoil
+
def usage():
- print 'Usage: %s -d FILENAME [-d FILENAME]* -m METADIR [-m MATADIR]*' % os.path.basename(sys.argv[0])
- print ' -d FILENAME documentation file to search'
- print ' -h, --help display this help and exit'
- print ' -m METADIR meta directory to search for recipes'
- print ' -t FILENAME documentation config file (for doc tags)'
- print ' -T Only display variables with doc tags (requires -t)'
-
-def recipe_bbvars(recipe):
- ''' Return a unique set of every bbvar encountered in the recipe '''
- prog = re.compile("[A-Z_]+")
- vset = set()
- try:
- r = open(recipe)
- except IOError as (errno, strerror):
- print 'WARNING: Failed to open recipe ', recipe
- print strerror
-
- for line in r:
- # Strip any comments from the line
- line = line.rsplit('#')[0]
- vset = vset.union(set(prog.findall(line)))
- r.close()
-
- bbvars = {}
- for v in vset:
- bbvars[v] = 1
-
- return bbvars
-
-def collect_bbvars(metadir):
- ''' Walk the metadir and collect the bbvars from each recipe found '''
- bbvars = {}
- for root,dirs,files in os.walk(metadir):
- for name in files:
- if name.find(".bb") >= 0:
- for key in recipe_bbvars(os.path.join(root,name)).iterkeys():
- if bbvars.has_key(key):
- bbvars[key] = bbvars[key] + 1
- else:
- bbvars[key] = 1
- return bbvars
-
-def bbvar_is_documented(var, docfiles):
- prog = re.compile(".*($|[^A-Z_])%s([^A-Z_]|$)" % (var))
- for doc in docfiles:
- try:
- f = open(doc)
- except IOError as (errno, strerror):
- print 'WARNING: Failed to open doc ', doc
- print strerror
- for line in f:
- if prog.match(line):
- return True
- f.close()
- return False
+ print('Usage: %s -d FILENAME [-d FILENAME]*' % os.path.basename(sys.argv[0]))
+ print(' -d FILENAME documentation file to search')
+ print(' -h, --help display this help and exit')
+ print(' -t FILENAME documentation config file (for doc tags)')
+ print(' -T Only display variables with doc tags (requires -t)')
+
+def bbvar_is_documented(var, documented_vars):
+ ''' Check if variable (var) is in the list of documented variables(documented_vars) '''
+ if var in documented_vars:
+ return True
+ else:
+ return False
+
+def collect_documented_vars(docfiles):
+ ''' Walk the docfiles and collect the documented variables '''
+ documented_vars = []
+ prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-")
+ var_prog = re.compile('<glossentry id=\'var-(.*)\'>')
+ for d in docfiles:
+ with open(d) as f:
+ documented_vars += var_prog.findall(f.read())
+
+ return documented_vars
def bbvar_doctag(var, docconf):
prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var))
@@ -87,8 +51,8 @@ def bbvar_doctag(var, docconf):
try:
f = open(docconf)
- except IOError as (errno, strerror):
- return strerror
+ except IOError as err:
+ return err.args[1]
for line in f:
m = prog.search(line)
@@ -100,8 +64,7 @@ def bbvar_doctag(var, docconf):
def main():
docfiles = []
- metadirs = []
- bbvars = {}
+ bbvars = set()
undocumented = []
docconf = ""
onlydoctags = False
@@ -109,8 +72,8 @@ def main():
# Collect and validate input
try:
opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"])
- except getopt.GetoptError, err:
- print '%s' % str(err)
+ except getopt.GetoptError as err:
+ print('%s' % str(err))
usage()
sys.exit(2)
@@ -122,14 +85,8 @@ def main():
if os.path.isfile(a):
docfiles.append(a)
else:
- print 'ERROR: documentation file %s is not a regular file' % (a)
+ print('ERROR: documentation file %s is not a regular file' % a)
sys.exit(3)
- elif o == '-m':
- if os.path.isdir(a):
- metadirs.append(a)
- else:
- print 'ERROR: meta directory %s is not a directory' % (a)
- sys.exit(4)
elif o == "-t":
if os.path.isfile(a):
docconf = a
@@ -139,47 +96,72 @@ def main():
assert False, "unhandled option"
if len(docfiles) == 0:
- print 'ERROR: no docfile specified'
+ print('ERROR: no docfile specified')
usage()
sys.exit(5)
- if len(metadirs) == 0:
- print 'ERROR: no metadir specified'
- usage()
- sys.exit(6)
-
if onlydoctags and docconf == "":
- print 'ERROR: no docconf specified'
+ print('ERROR: no docconf specified')
usage()
sys.exit(7)
- # Collect all the variable names from the recipes in the metadirs
- for m in metadirs:
- for key,cnt in collect_bbvars(m).iteritems():
- if bbvars.has_key(key):
- bbvars[key] = bbvars[key] + cnt
+ prog = re.compile("^[^a-z]*$")
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False)
+ parser = bb.codeparser.PythonParser('parser', None)
+ datastore = tinfoil.config_data
+
+ def bbvars_update(data):
+ if prog.match(data):
+ bbvars.add(data)
+ if tinfoil.config_data.getVarFlag(data, 'python'):
+ try:
+ parser.parse_python(tinfoil.config_data.getVar(data))
+ except bb.data_smart.ExpansionError:
+ pass
+ for var in parser.references:
+ if prog.match(var):
+ bbvars.add(var)
else:
- bbvars[key] = cnt
+ try:
+ expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data)
+ for var in expandedVar.references:
+ if prog.match(var):
+ bbvars.add(var)
+ except bb.data_smart.ExpansionError:
+ pass
+
+ # Use tinfoil to collect all the variable names globally
+ for data in datastore:
+ bbvars_update(data)
+
+ # Collect variables from all recipes
+ for recipe in tinfoil.all_recipe_files(variants=False):
+ print("Checking %s" % recipe)
+ for data in tinfoil.parse_recipe_file(recipe):
+ bbvars_update(data)
+
+ documented_vars = collect_documented_vars(docfiles)
# Check each var for documentation
varlen = 0
- for v in bbvars.iterkeys():
+ for v in bbvars:
if len(v) > varlen:
varlen = len(v)
- if not bbvar_is_documented(v, docfiles):
+ if not bbvar_is_documented(v, documented_vars):
undocumented.append(v)
undocumented.sort()
varlen = varlen + 1
# Report all undocumented variables
- print 'Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars))
- header = '%s%s%s' % (str("VARIABLE").ljust(varlen), str("COUNT").ljust(6), str("DOCTAG").ljust(7))
- print header
- print str("").ljust(len(header), '=')
+ print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars)))
+ header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7))
+ print(header)
+ print(str("").ljust(len(header), '='))
for v in undocumented:
doctag = bbvar_doctag(v, docconf)
if not onlydoctags or not doctag == "":
- print '%s%s%s' % (v.ljust(varlen), str(bbvars[v]).ljust(6), doctag)
+ print('%s%s' % (v.ljust(varlen), doctag))
if __name__ == "__main__":
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh
new file mode 100755
index 0000000000..fa71d4a2e9
--- /dev/null
+++ b/scripts/contrib/build-perf-test-wrapper.sh
@@ -0,0 +1,258 @@
+#!/bin/bash
+#
+# Build performance test script wrapper
+#
+# Copyright (c) 2016, Intel Corporation.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This script is a simple wrapper around the actual build performance tester
+# script. This script initializes the build environment, runs
+# oe-build-perf-test and archives the results.
+
+script=`basename $0`
+script_dir=$(realpath $(dirname $0))
+archive_dir=~/perf-results/archives
+
+usage () {
+cat << EOF
+Usage: $script [-h] [-c COMMITISH] [-C GIT_REPO]
+
+Optional arguments:
+ -h show this help and exit.
+ -a ARCHIVE_DIR archive results tarball here, give an empty string to
+ disable tarball archiving (default: $archive_dir)
+ -c COMMITISH test (checkout) this commit, <branch>:<commit> can be
+ specified to test specific commit of certain branch
+ -C GIT_REPO commit results into Git
+ -d DOWNLOAD_DIR directory to store downloaded sources in
+ -E EMAIL_ADDR send email report
+ -g GLOBALRES_DIR where to place the globalres file
+ -P GIT_REMOTE push results to a remote Git repository
+ -R DEST rsync reports to a remote destination
+ -w WORK_DIR work dir for this script
+ (default: GIT_TOP_DIR/build-perf-test)
+ -x create xml report (instead of json)
+EOF
+}
+
+get_os_release_var () {
+ ( source /etc/os-release; eval echo '$'$1 )
+}
+
+
+# Parse command line arguments
+commitish=""
+oe_build_perf_test_extra_opts=()
+oe_git_archive_extra_opts=()
+while getopts "ha:c:C:d:E:g:P:R:w:x" opt; do
+ case $opt in
+ h) usage
+ exit 0
+ ;;
+ a) mkdir -p "$OPTARG"
+ archive_dir=`realpath -s "$OPTARG"`
+ ;;
+ c) commitish=$OPTARG
+ ;;
+ C) mkdir -p "$OPTARG"
+ results_repo=`realpath -s "$OPTARG"`
+ ;;
+ d) download_dir=`realpath -s "$OPTARG"`
+ ;;
+ E) email_to="$OPTARG"
+ ;;
+ g) mkdir -p "$OPTARG"
+ globalres_dir=`realpath -s "$OPTARG"`
+ ;;
+ P) oe_git_archive_extra_opts+=("--push" "$OPTARG")
+ ;;
+ R) rsync_dst="$OPTARG"
+ ;;
+ w) base_dir=`realpath -s "$OPTARG"`
+ ;;
+ x) oe_build_perf_test_extra_opts+=("--xml")
+ ;;
+ *) usage
+ exit 1
+ ;;
+ esac
+done
+
+# Check positional args
+shift "$((OPTIND - 1))"
+if [ $# -ne 0 ]; then
+ echo "ERROR: No positional args are accepted."
+ usage
+ exit 1
+fi
+
+if [ -n "$email_to" ]; then
+ if ! [ -x "$(command -v phantomjs)" ]; then
+ echo "ERROR: Sending email needs phantomjs."
+ exit 1
+ fi
+ if ! [ -x "$(command -v optipng)" ]; then
+ echo "ERROR: Sending email needs optipng."
+ exit 1
+ fi
+fi
+
+# Open a file descriptor for flock and acquire lock
+LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
+if ! exec 3> "$LOCK_FILE"; then
+ echo "ERROR: Unable to open lock file"
+ exit 1
+fi
+if ! flock -n 3; then
+ echo "ERROR: Another instance of this script is running"
+ exit 1
+fi
+
+echo "Running on `uname -n`"
+if ! git_topdir=$(git rev-parse --show-toplevel); then
+ echo "The current working dir doesn't seem to be a git clone. Please cd there before running `basename $0`"
+ exit 1
+fi
+
+cd "$git_topdir"
+
+if [ -n "$commitish" ]; then
+ echo "Running git fetch"
+ git fetch &> /dev/null
+ git checkout HEAD^0 &> /dev/null
+
+ # Handle <branch>:<commit> format
+ if echo "$commitish" | grep -q ":"; then
+ commit=`echo "$commitish" | cut -d":" -f2`
+ branch=`echo "$commitish" | cut -d":" -f1`
+ else
+ commit="$commitish"
+ branch="$commitish"
+ fi
+
+ echo "Checking out $commitish"
+ git branch -D $branch &> /dev/null
+ if ! git checkout -f $branch &> /dev/null; then
+ echo "ERROR: Git checkout failed"
+ exit 1
+ fi
+
+ # Check that the specified branch really contains the commit
+ commit_hash=`git rev-parse --revs-only $commit --`
+ if [ -z "$commit_hash" -o "`git merge-base $branch $commit`" != "$commit_hash" ]; then
+ echo "ERROR: branch $branch does not contain commit $commit"
+ exit 1
+ fi
+ git reset --hard $commit > /dev/null
+fi
+
+# Determine name of the current branch
+branch=`git symbolic-ref HEAD 2> /dev/null`
+# Strip refs/heads/
+branch=${branch:11}
+
+# Setup build environment
+if [ -z "$base_dir" ]; then
+ base_dir="$git_topdir/build-perf-test"
+fi
+echo "Using working dir $base_dir"
+
+if [ -z "$download_dir" ]; then
+ download_dir="$base_dir/downloads"
+fi
+if [ -z "$globalres_dir" ]; then
+ globalres_dir="$base_dir"
+fi
+
+timestamp=`date "+%Y%m%d%H%M%S"`
+git_rev=$(git rev-parse --short HEAD) || exit 1
+build_dir="$base_dir/build-$git_rev-$timestamp"
+results_dir="$base_dir/results-$git_rev-$timestamp"
+globalres_log="$globalres_dir/globalres.log"
+machine="qemux86"
+
+mkdir -p "$base_dir"
+source ./oe-init-build-env $build_dir >/dev/null || exit 1
+
+# Additional config
+auto_conf="$build_dir/conf/auto.conf"
+echo "MACHINE = \"$machine\"" > "$auto_conf"
+echo 'BB_NUMBER_THREADS = "8"' >> "$auto_conf"
+echo 'PARALLEL_MAKE = "-j 8"' >> "$auto_conf"
+echo "DL_DIR = \"$download_dir\"" >> "$auto_conf"
+# Disabling network sanity check slightly reduces the variance of timing results
+echo 'CONNECTIVITY_CHECK_URIS = ""' >> "$auto_conf"
+# Possibility to define extra settings
+if [ -f "$base_dir/auto.conf.extra" ]; then
+ cat "$base_dir/auto.conf.extra" >> "$auto_conf"
+fi
+
+# Run actual test script
+oe-build-perf-test --out-dir "$results_dir" \
+ --globalres-file "$globalres_log" \
+ "${oe_build_perf_test_extra_opts[@]}" \
+ --lock-file "$base_dir/oe-build-perf.lock"
+
+case $? in
+ 1) echo "ERROR: oe-build-perf-test script failed!"
+ exit 1
+ ;;
+ 2) echo "NOTE: some tests failed!"
+ ;;
+esac
+
+# Commit results to git
+if [ -n "$results_repo" ]; then
+ echo -e "\nArchiving results in $results_repo"
+ oe-git-archive \
+ --git-dir "$results_repo" \
+ --branch-name "{hostname}/{branch}/{machine}" \
+ --tag-name "{hostname}/{branch}/{machine}/{commit_count}-g{commit}/{tag_number}" \
+ --exclude "buildstats.json" \
+ --notes "buildstats/{branch_name}" "$results_dir/buildstats.json" \
+ "${oe_git_archive_extra_opts[@]}" \
+ "$results_dir"
+
+ # Generate test reports
+ sanitized_branch=`echo $branch | tr / _`
+ report_txt=`hostname`_${sanitized_branch}_${machine}.txt
+ report_html=`hostname`_${sanitized_branch}_${machine}.html
+ echo -e "\nGenerating test report"
+ oe-build-perf-report -r "$results_repo" > $report_txt
+ oe-build-perf-report -r "$results_repo" --html > $report_html
+
+ # Send email report
+ if [ -n "$email_to" ]; then
+ echo "Emailing test report"
+ os_name=`get_os_release_var PRETTY_NAME`
+ "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
+ fi
+
+ # Upload report files, unless we're on detached head
+ if [ -n "$rsync_dst" -a -n "$branch" ]; then
+ echo "Uploading test report"
+ rsync $report_txt $report_html $rsync_dst
+ fi
+fi
+
+
+echo -ne "\n\n-----------------\n"
+echo "Global results file:"
+echo -ne "\n"
+
+cat "$globalres_log"
+
+if [ -n "$archive_dir" ]; then
+ echo -ne "\n\n-----------------\n"
+ echo "Archiving results in $archive_dir"
+ mkdir -p "$archive_dir"
+ results_basename=`basename "$results_dir"`
+ results_dirname=`dirname "$results_dir"`
+ tar -czf "$archive_dir/`uname -n`-${results_basename}.tar.gz" -C "$results_dirname" "$results_basename"
+fi
+
+rm -rf "$build_dir"
+rm -rf "$results_dir"
+
+echo "DONE"
diff --git a/scripts/contrib/build-perf-test.sh b/scripts/contrib/build-perf-test.sh
deleted file mode 100755
index cdd7885dca..0000000000
--- a/scripts/contrib/build-perf-test.sh
+++ /dev/null
@@ -1,369 +0,0 @@
-#!/bin/bash
-#
-# This script runs a series of tests (with and without sstate) and reports build time (and tmp/ size)
-#
-# Build performance test script
-#
-# Copyright 2013 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-#
-# AUTHORS:
-# Stefan Stanacar <stefanx.stanacar@intel.com>
-
-
-ME=$(basename $0)
-
-#
-# usage and setup
-#
-
-usage () {
-cat << EOT
-Usage: $ME [-h]
- $ME [-c <commit>] [-v] [-m <val>] [-j <val>] [-t <val>] [-i <image-name>] [-d <path>]
-Options:
- -h
- Display this help and exit.
- -c <commit>
- git checkout <commit> before anything else
- -v
- Show bitbake output, don't redirect it to a log.
- -m <machine>
- Value for MACHINE. Default is qemux86.
- -j <val>
- Value for PARALLEL_MAKE. Default is 8.
- -t <val>
- Value for BB_NUMBER_THREADS. Default is 8.
- -i <image-name>
- Instead of timing against core-image-sato, use <image-name>
- -d <path>
- Use <path> as DL_DIR
- -p <githash>
- Cherry pick githash onto the commit
-
-Note: current working directory must be inside a poky git clone.
-
-EOT
-}
-
-
-if clonedir=$(git rev-parse --show-toplevel); then
- cd $clonedir
-else
- echo "The current working dir doesn't seem to be a poky git clone. Please cd there before running $ME"
- exit 1
-fi
-
-IMAGE="core-image-sato"
-verbose=0
-dldir=
-commit=
-pmake=
-cherrypicks=
-while getopts "hvc:m:j:t:i:d:p:" opt; do
- case $opt in
- h) usage
- exit 0
- ;;
- v) verbose=1
- ;;
- c) commit=$OPTARG
- ;;
- m) export MACHINE=$OPTARG
- ;;
- j) pmake=$OPTARG
- ;;
- t) export BB_NUMBER_THREADS=$OPTARG
- ;;
- i) IMAGE=$OPTARG
- ;;
- d) dldir=$OPTARG
- ;;
- p) cherrypicks="$cherrypicks $OPTARG"
- ;;
- *) usage
- exit 1
- ;;
- esac
-done
-
-
-#drop cached credentials and test for sudo access without a password
-sudo -k -n ls > /dev/null 2>&1
-reqpass=$?
-if [ $reqpass -ne 0 ]; then
- echo "The script requires sudo access to drop caches between builds (echo 3 > /proc/sys/vm/drop_caches)"
- read -s -p "Please enter your sudo password: " pass
- echo
-fi
-
-if [ -n "$commit" ]; then
- echo "git checkout -f $commit"
- git pull > /dev/null 2>&1
- git checkout -f $commit || exit 1
- git pull > /dev/null 2>&1
-fi
-
-if [ -n "$cherrypicks" ]; then
- for c in $cherrypicks; do
- git cherry-pick $c
- done
-fi
-
-rev=$(git rev-parse --short HEAD) || exit 1
-OUTDIR="$clonedir/build-perf-test/results-$rev-`date "+%Y%m%d%H%M%S"`"
-BUILDDIR="$OUTDIR/build"
-resultsfile="$OUTDIR/results.log"
-bboutput="$OUTDIR/bitbake.log"
-myoutput="$OUTDIR/output.log"
-globalres="$clonedir/build-perf-test/globalres.log"
-
-mkdir -p $OUTDIR || exit 1
-
-log () {
- local msg="$1"
- echo "`date`: $msg" | tee -a $myoutput
-}
-
-
-#
-# Config stuff
-#
-
-branch=`git branch 2>&1 | grep "^* " | tr -d "* "`
-gitcommit=$(git rev-parse HEAD) || exit 1
-log "Running on $branch:$gitcommit"
-
-source ./oe-init-build-env $OUTDIR/build >/dev/null || exit 1
-cd $OUTDIR/build
-
-[ -n "$MACHINE" ] || export MACHINE="qemux86"
-[ -n "$BB_NUMBER_THREADS" ] || export BB_NUMBER_THREADS="8"
-
-if [ -n "$pmake" ]; then
- export PARALLEL_MAKE="-j $pmake"
-else
- export PARALLEL_MAKE="-j 8"
-fi
-
-if [ -n "$dldir" ]; then
- echo "DL_DIR = \"$dldir\"" >> conf/local.conf
-else
- echo "DL_DIR = \"$clonedir/build-perf-test/downloads\"" >> conf/local.conf
-fi
-
-# Sometimes I've noticed big differences in timings for the same commit, on the same machine
-# Disabling the network sanity check helps a bit (because of my crappy network connection and/or proxy)
-echo "CONNECTIVITY_CHECK_URIS =\"\"" >> conf/local.conf
-
-
-#
-# Functions
-#
-
-declare -a TIMES
-time_count=0
-declare -a SIZES
-size_count=0
-
-bbtime () {
- local arg="$@"
- log " Timing: bitbake ${arg}"
-
- if [ $verbose -eq 0 ]; then
- /usr/bin/time -v -o $resultsfile bitbake ${arg} >> $bboutput
- else
- /usr/bin/time -v -o $resultsfile bitbake ${arg}
- fi
- ret=$?
- if [ $ret -eq 0 ]; then
- t=`grep wall $resultsfile | sed 's/.*m:ss): //'`
- log " TIME: $t"
- TIMES[(( time_count++ ))]="$t"
- else
- log "ERROR: exit status was non-zero, will report time as 0."
- TIMES[(( time_count++ ))]="0"
- fi
-
- #time by default overwrites the output file and we want to keep the results
- #it has an append option but I don't want to clobber the results in the same file
- i=`ls $OUTDIR/results.log* |wc -l`
- mv $resultsfile "${resultsfile}.${i}"
- log "More stats can be found in ${resultsfile}.${i}"
-}
-
-#we don't time bitbake here
-bbnotime () {
- local arg="$@"
- log " Running: bitbake ${arg}"
- if [ $verbose -eq 0 ]; then
- bitbake ${arg} >> $bboutput
- else
- bitbake ${arg}
- fi
- ret=$?
- if [ $ret -eq 0 ]; then
- log " Finished bitbake ${arg}"
- else
- log "ERROR: exit status was non-zero. Exit.."
- exit $ret
- fi
-
-}
-
-do_rmtmp() {
- log " Removing tmp"
- rm -rf bitbake.lock pseudodone conf/sanity_info cache tmp
-}
-do_rmsstate () {
- log " Removing sstate-cache"
- rm -rf sstate-cache
-}
-do_sync () {
- log " Syncing and dropping caches"
- sync; sync
- if [ $reqpass -eq 0 ]; then
- sudo sh -c "echo 3 > /proc/sys/vm/drop_caches"
- else
- echo "$pass" | sudo -S sh -c "echo 3 > /proc/sys/vm/drop_caches"
- echo
- fi
- sleep 3
-}
-
-write_results() {
- echo -n "`uname -n`,$branch:$gitcommit,`git describe`," >> $globalres
- for i in "${TIMES[@]}"; do
- echo -n "$i," >> $globalres
- done
- for i in "${SIZES[@]}"; do
- echo -n "$i," >> $globalres
- done
- echo >> $globalres
- sed -i '$ s/,$//' $globalres
-}
-
-####
-
-#
-# Test 1
-# Measure: Wall clock of "bitbake core-image-sato" and size of tmp/dir (w/o rm_work and w/ rm_work)
-# Pre: Downloaded sources, no sstate
-# Steps:
-# Part1:
-# - fetchall
-# - clean build dir
-# - time bitbake core-image-sato
-# - collect data
-# Part2:
-# - bitbake virtual/kernel -c cleansstate
-# - time bitbake virtual/kernel
-# Part3:
-# - add INHERIT to local.conf
-# - clean build dir
-# - build
-# - report size, remove INHERIT
-
-test1_p1 () {
- log "Running Test 1, part 1/3: Measure wall clock of bitbake $IMAGE and size of tmp/ dir"
- bbnotime $IMAGE -c fetchall
- do_rmtmp
- do_rmsstate
- do_sync
- bbtime $IMAGE
- s=`du -s tmp | sed 's/tmp//' | sed 's/[ \t]*$//'`
- SIZES[(( size_count++ ))]="$s"
- log "SIZE of tmp dir is: $s"
- log "Buildstats are saved in $OUTDIR/buildstats-test1"
- mv tmp/buildstats $OUTDIR/buildstats-test1
-}
-
-
-test1_p2 () {
- log "Running Test 1, part 2/3: bitbake virtual/kernel -c cleansstate and time bitbake virtual/kernel"
- bbnotime virtual/kernel -c cleansstate
- do_sync
- bbtime virtual/kernel
-}
-
-test1_p3 () {
- log "Running Test 1, part 3/3: Build $IMAGE w/o sstate and report size of tmp/dir with rm_work enabled"
- echo "INHERIT += \"rm_work\"" >> conf/local.conf
- do_rmtmp
- do_rmsstate
- do_sync
- bbtime $IMAGE
- sed -i 's/INHERIT += \"rm_work\"//' conf/local.conf
- s=`du -s tmp | sed 's/tmp//' | sed 's/[ \t]*$//'`
- SIZES[(( size_count++ ))]="$s"
- log "SIZE of tmp dir is: $s"
- log "Buildstats are saved in $OUTDIR/buildstats-test13"
- mv tmp/buildstats $OUTDIR/buildstats-test13
-}
-
-
-#
-# Test 2
-# Measure: Wall clock of "bitbake core-image-sato" and size of tmp/dir
-# Pre: populated sstate cache
-
-test2 () {
- # Assuming test 1 has run
- log "Running Test 2: Measure wall clock of bitbake $IMAGE -c rootfs with sstate"
- do_rmtmp
- do_sync
- bbtime $IMAGE -c rootfs
-}
-
-
-# Test 3
-# parsing time metrics
-#
-# Start with
-# i) "rm -rf tmp/cache; time bitbake -p"
-# ii) "rm -rf tmp/cache/default-glibc/; time bitbake -p"
-# iii) "time bitbake -p"
-
-
-test3 () {
- log "Running Test 3: Parsing time metrics (bitbake -p)"
- log " Removing tmp/cache && cache"
- rm -rf tmp/cache cache
- bbtime -p
- log " Removing tmp/cache/default-glibc/"
- rm -rf tmp/cache/default-glibc/
- bbtime -p
- bbtime -p
-}
-
-
-
-# RUN!
-
-test1_p1
-test1_p2
-test1_p3
-test2
-test3
-
-# if we got til here write to global results
-write_results
-
-log "All done, cleaning up..."
-
-do_rmtmp
-do_rmsstate
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py
new file mode 100755
index 0000000000..4d41a4c475
--- /dev/null
+++ b/scripts/contrib/convert-overrides.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python3
+#
+# Conversion script to add new override syntax to existing bitbake metadata
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# To use this script on a new layer you need to list the overrides the
+# layer is known to use in the list below.
+#
+# Known constraint: Matching is 'loose' and in particular will find variable
+# and function names with "_append" and "_remove" in them. Those need to be
+# filtered out manually or in the skip list below.
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+# List of strings to treat as overrides
+vars = ["append", "prepend", "remove"]
+vars = vars + ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
+vars = vars + ["genericx86", "edgerouter", "beaglebone-yocto"]
+vars = vars + ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
+vars = vars + ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
+vars = vars + ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
+vars = vars + ["tune-", "pn-", "forcevariable"]
+vars = vars + ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
+vars = vars + ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
+vars = vars + ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
+vars = vars + ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
+vars = vars + ["linux-gnueabi", "eabi"]
+vars = vars + ["virtclass-multilib", "virtclass-mcextend"]
+
+# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
+# Handles issues with arc matching arch.
+shortvars = ["arc", "mips", "mipsel", "sh4"]
+
+# Variables which take packagenames as an override
+packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
+ "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
+ "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
+ "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
+ "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"]
+
+# Expressions to skip if encountered, these are not overrides
+skips = ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
+skips = skips + ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
+skips = skips + ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
+skips = skips + ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
+skips = skips + ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
+skips = skips + ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
+skips = skips + ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
+skips = skips + ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
+skips = skips + ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
+
+imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"]
+packagevars = packagevars + imagevars
+
+vars_re = {}
+for exp in vars:
+ vars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
+
+shortvars_re = {}
+for exp in shortvars:
+ shortvars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + '([\(\'"\s:])'), r"\1:" + exp + r"\3")
+
+package_re = {}
+for exp in packagevars:
+ package_re[exp] = (re.compile('(^|[#\'"\s\-\+]+)' + exp + '_' + '([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
+
+# Other substitutions to make
+subs = {
+ 'r = re.compile("([^:]+):\s*(.*)")' : 'r = re.compile("(^.+?):\s+(.*)")',
+ "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
+ "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
+ "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
+ 'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)',
+}
+
+def processfile(fn):
+ print("processing file '%s'" % fn)
+ try:
+ fh, abs_path = tempfile.mkstemp()
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ for line in old_file:
+ skip = False
+ for s in skips:
+ if s in line:
+ skip = True
+ if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line:
+ skip = False
+ for sub in subs:
+ if sub in line:
+ line = line.replace(sub, subs[sub])
+ skip = True
+ if not skip:
+ for pvar in packagevars:
+ line = package_re[pvar][0].sub(package_re[pvar][1], line)
+ for var in vars:
+ line = vars_re[var][0].sub(vars_re[var][1], line)
+ for shortvar in shortvars:
+ line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line)
+ if "pkg_postinst:ontarget" in line:
+ line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget")
+ new_file.write(line)
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.9.3"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py
new file mode 100755
index 0000000000..4bf9e3013d
--- /dev/null
+++ b/scripts/contrib/convert-srcuri.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+# Conversion script to update SRC_URI to add branch to git urls
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+def processfile(fn):
+ print("processing file '%s'" % fn)
+ try:
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ for line in old_file:
+ if ("git://" in line or "gitsm://" in line) and "branch=" not in line and "MIRROR" not in line and ".*" not in line:
+ if line.endswith('"\n'):
+ line = line.replace('"\n', ';branch=master"\n')
+ elif line.endswith(" \\\n"):
+ line = line.replace(' \\\n', ';branch=master \\\n')
+ modified = True
+ new_file.write(line)
+ if modified:
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.1"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage
index a503f11d0d..7f2ad112a6 100755
--- a/scripts/contrib/ddimage
+++ b/scripts/contrib/ddimage
@@ -1,8 +1,7 @@
#!/bin/sh
-
-# Default to avoiding the first two disks on typical Linux and Mac OS installs
-# Better safe than sorry :-)
-BLACKLIST_DEVICES="/dev/sda /dev/sdb /dev/disk1 /dev/disk2"
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# 1MB blocksize
BLOCKSIZE=1048576
@@ -29,7 +28,6 @@ image_details() {
}
device_details() {
- DEV=$1
BLOCK_SIZE=512
echo "Device details"
@@ -42,11 +40,17 @@ device_details() {
fi
# Default / Linux information collection
- echo " device: $DEVICE"
+ ACTUAL_DEVICE=`readlink -f $DEVICE`
+ DEV=`basename $ACTUAL_DEVICE`
+ if [ "$ACTUAL_DEVICE" != "$DEVICE" ] ; then
+ echo " device: $DEVICE -> $ACTUAL_DEVICE"
+ else
+ echo " device: $DEVICE"
+ fi
if [ -f "/sys/class/block/$DEV/device/vendor" ]; then
echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)"
else
- echo " vendor: UNKOWN"
+ echo " vendor: UNKNOWN"
fi
if [ -f "/sys/class/block/$DEV/device/model" ]; then
echo " model: $(cat /sys/class/block/$DEV/device/model)"
@@ -61,6 +65,49 @@ device_details() {
echo ""
}
+check_mount_device() {
+ if cat /proc/self/mounts | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1$" ; then
+ return 0
+ fi
+ return 1
+}
+
+is_mounted() {
+ if [ "$(uname)" = "Darwin" ]; then
+ if df | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1(s[0-9]+)?$" ; then
+ return 0
+ fi
+ else
+ if check_mount_device $1 ; then
+ return 0
+ fi
+ DEV=`basename $1`
+ if [ -d /sys/class/block/$DEV/ ] ; then
+ PARENT_BLKDEV=`basename $(readlink -f "/sys/class/block/$DEV/..")`
+ if [ "$PARENT_BLKDEV" != "block" ] ; then
+ if check_mount_device $PARENT_BLKDEV ; then
+ return 0
+ fi
+ fi
+ for CHILD_BLKDEV in `find /sys/class/block/$DEV/ -mindepth 1 -maxdepth 1 -name "$DEV*" -type d`
+ do
+ if check_mount_device /dev/`basename $CHILD_BLKDEV` ; then
+ return 0
+ fi
+ done
+ fi
+ fi
+ return 1
+}
+
+is_inuse() {
+ HOLDERS_DIR="/sys/class/block/`basename $1`/holders"
+ if [ -d $HOLDERS_DIR ] && [ `ls -A $HOLDERS_DIR` ] ; then
+ return 0
+ fi
+ return 1
+}
+
if [ $# -ne 2 ]; then
usage
exit 1
@@ -75,22 +122,37 @@ if [ ! -e "$IMAGE" ]; then
exit 1
fi
+if [ ! -e "$DEVICE" ]; then
+ echo "ERROR: Device $DEVICE does not exist"
+ usage
+ exit 1
+fi
-for i in ${BLACKLIST_DEVICES}; do
- if [ "$i" = "$DEVICE" ]; then
- echo "ERROR: Device $DEVICE is blacklisted"
- exit 1
- fi
-done
+if [ "$(uname)" = "Darwin" ]; then
+ # readlink doesn't support -f on MacOS, just assume it isn't a symlink
+ ACTUAL_DEVICE=$DEVICE
+else
+ ACTUAL_DEVICE=`readlink -f $DEVICE`
+fi
+if is_mounted $ACTUAL_DEVICE ; then
+ echo "ERROR: Device $DEVICE is currently mounted - check if this is the right device, and unmount it first if so"
+ device_details
+ exit 1
+fi
+if is_inuse $ACTUAL_DEVICE ; then
+ echo "ERROR: Device $DEVICE is currently in use (possibly part of LVM) - check if this is the right device!"
+ device_details
+ exit 1
+fi
if [ ! -w "$DEVICE" ]; then
- echo "ERROR: Device $DEVICE does not exist or is not writable"
+ echo "ERROR: Device $DEVICE is not writable - possibly use sudo?"
usage
exit 1
fi
image_details $IMAGE
-device_details $(basename $DEVICE)
+device_details
printf "Write $IMAGE to $DEVICE [y/N]? "
read RESPONSE
@@ -100,5 +162,9 @@ if [ "$RESPONSE" != "y" ]; then
fi
echo "Writing image..."
-dd if="$IMAGE" of="$DEVICE" bs="$BLOCKSIZE"
+if which pv >/dev/null 2>&1; then
+ pv "$IMAGE" | dd of="$DEVICE" bs="$BLOCKSIZE"
+else
+ dd if="$IMAGE" of="$DEVICE" bs="$BLOCKSIZE"
+fi
sync
diff --git a/scripts/contrib/devtool-stress.py b/scripts/contrib/devtool-stress.py
new file mode 100755
index 0000000000..81046ecf49
--- /dev/null
+++ b/scripts/contrib/devtool-stress.py
@@ -0,0 +1,245 @@
+#!/usr/bin/env python3
+
+# devtool stress tester
+#
+# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
+#
+# Copyright 2015 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import sys
+import os
+import os.path
+import subprocess
+import re
+import argparse
+import logging
+import tempfile
+import shutil
+import signal
+import fnmatch
+
+scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
+sys.path.insert(0, scripts_lib_path)
+import scriptutils
+import argparse_oe
+logger = scriptutils.logger_create('devtool-stress')
+
+def select_recipes(args):
+ import bb.tinfoil
+ tinfoil = bb.tinfoil.Tinfoil()
+ tinfoil.prepare(False)
+
+ pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn
+ (latest_versions, preferred_versions) = bb.providers.findProviders(tinfoil.config_data, tinfoil.cooker.recipecaches[''], pkg_pn)
+
+ skip_classes = args.skip_classes.split(',')
+
+ recipelist = []
+ for pn in sorted(pkg_pn):
+ pref = preferred_versions[pn]
+ inherits = [os.path.splitext(os.path.basename(f))[0] for f in tinfoil.cooker.recipecaches[''].inherits[pref[1]]]
+ for cls in skip_classes:
+ if cls in inherits:
+ break
+ else:
+ recipelist.append(pn)
+
+ tinfoil.shutdown()
+
+ resume_from = args.resume_from
+ if resume_from:
+ if not resume_from in recipelist:
+ print('%s is not a testable recipe' % resume_from)
+ return 1
+ if args.only:
+ only = args.only.split(',')
+ for onlyitem in only:
+ for pn in recipelist:
+ if fnmatch.fnmatch(pn, onlyitem):
+ break
+ else:
+ print('%s does not match any testable recipe' % onlyitem)
+ return 1
+ else:
+ only = None
+ if args.skip:
+ skip = args.skip.split(',')
+ else:
+ skip = []
+
+ recipes = []
+ for pn in recipelist:
+ if resume_from:
+ if pn == resume_from:
+ resume_from = None
+ else:
+ continue
+
+ if args.only:
+ for item in only:
+ if fnmatch.fnmatch(pn, item):
+ break
+ else:
+ continue
+
+ skipit = False
+ for item in skip:
+ if fnmatch.fnmatch(pn, item):
+ skipit = True
+ if skipit:
+ continue
+
+ recipes.append(pn)
+
+ return recipes
+
+
+def stress_extract(args):
+ import bb.process
+
+ recipes = select_recipes(args)
+
+ failures = 0
+ tmpdir = tempfile.mkdtemp()
+ os.setpgrp()
+ try:
+ for pn in recipes:
+ sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
+ sys.stdout.flush()
+ failed = False
+ skipped = None
+
+ srctree = os.path.join(tmpdir, pn)
+ try:
+ bb.process.run('devtool extract %s %s' % (pn, srctree))
+ except bb.process.ExecutionError as exc:
+ if exc.exitcode == 4:
+ skipped = 'incompatible'
+ else:
+ failed = True
+ with open('stress_%s_extract.log' % pn, 'w') as f:
+ f.write(str(exc))
+
+ if os.path.exists(srctree):
+ shutil.rmtree(srctree)
+
+ if failed:
+ print('failed')
+ failures += 1
+ elif skipped:
+ print('skipped (%s)' % skipped)
+ else:
+ print('ok')
+ except KeyboardInterrupt:
+ # We want any child processes killed. This is crude, but effective.
+ os.killpg(0, signal.SIGTERM)
+
+ if failures:
+ return 1
+ else:
+ return 0
+
+
+def stress_modify(args):
+ import bb.process
+
+ recipes = select_recipes(args)
+
+ failures = 0
+ tmpdir = tempfile.mkdtemp()
+ os.setpgrp()
+ try:
+ for pn in recipes:
+ sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
+ sys.stdout.flush()
+ failed = False
+ reset = True
+ skipped = None
+
+ srctree = os.path.join(tmpdir, pn)
+ try:
+ bb.process.run('devtool modify -x %s %s' % (pn, srctree))
+ except bb.process.ExecutionError as exc:
+ if exc.exitcode == 4:
+ skipped = 'incompatible'
+ else:
+ with open('stress_%s_modify.log' % pn, 'w') as f:
+ f.write(str(exc))
+ failed = 'modify'
+ reset = False
+
+ if not skipped:
+ if not failed:
+ try:
+ bb.process.run('bitbake -c install %s' % pn)
+ except bb.process.CmdError as exc:
+ with open('stress_%s_install.log' % pn, 'w') as f:
+ f.write(str(exc))
+ failed = 'build'
+ if reset:
+ try:
+ bb.process.run('devtool reset %s' % pn)
+ except bb.process.CmdError as exc:
+ print('devtool reset failed: %s' % str(exc))
+ break
+
+ if os.path.exists(srctree):
+ shutil.rmtree(srctree)
+
+ if failed:
+ print('failed (%s)' % failed)
+ failures += 1
+ elif skipped:
+ print('skipped (%s)' % skipped)
+ else:
+ print('ok')
+ except KeyboardInterrupt:
+ # We want any child processes killed. This is crude, but effective.
+ os.killpg(0, signal.SIGTERM)
+
+ if failures:
+ return 1
+ else:
+ return 0
+
+
+def main():
+ parser = argparse_oe.ArgumentParser(description="devtool stress tester",
+ epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+ parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
+ parser.add_argument('-r', '--resume-from', help='Resume from specified recipe', metavar='PN')
+ parser.add_argument('-o', '--only', help='Only test specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
+ parser.add_argument('-s', '--skip', help='Skip specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST', default='gcc-source-*,kernel-devsrc,package-index,perf,meta-world-pkgdata,glibc-locale,glibc-mtrace,glibc-scripts,os-release')
+ parser.add_argument('-c', '--skip-classes', help='Skip recipes inheriting specified classes (comma-separated) - default %(default)s', metavar='CLASSLIST', default='native,nativesdk,cross,cross-canadian,image,populate_sdk,meta,packagegroup')
+ subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
+ subparsers.required = True
+
+ parser_modify = subparsers.add_parser('modify',
+ help='Run "devtool modify" followed by a build with bitbake on matching recipes',
+ description='Runs "devtool modify" followed by a build with bitbake on matching recipes')
+ parser_modify.set_defaults(func=stress_modify)
+
+ parser_extract = subparsers.add_parser('extract',
+ help='Run "devtool extract" on matching recipes',
+ description='Runs "devtool extract" on matching recipes')
+ parser_extract.set_defaults(func=stress_extract)
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logger.setLevel(logging.DEBUG)
+
+ import scriptpath
+ bitbakepath = scriptpath.add_bitbake_lib_path()
+ if not bitbakepath:
+ logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+ return 1
+ logger.debug('Found bitbake path: %s' % bitbakepath)
+
+ ret = args.func(args)
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control
index 7550ea53be..ad6070c369 100755
--- a/scripts/contrib/dialog-power-control
+++ b/scripts/contrib/dialog-power-control
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# Simple script to show a manual power prompt for when you want to use
# automated hardware testing with testimage.bbclass but you don't have a
# web-enabled power strip or similar to do the power on/off/cycle.
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
index 2144aac936..f436f9bae0 100755
--- a/scripts/contrib/documentation-audit.sh
+++ b/scripts/contrib/documentation-audit.sh
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# Perform an audit of which packages provide documentation and which
# are missing -doc packages.
#
@@ -7,7 +9,6 @@
# this script after source'ing the build environment script, so you're
# running it from build/ directory.
#
-# Maintainer: Scott Garman <scott.a.garman@intel.com>
REPORT_DOC_SIMPLE="documentation_exists.txt"
REPORT_DOC_DETAIL="documentation_exists_detail.txt"
@@ -26,7 +27,7 @@ fi
echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or "
-echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"Commercial\""
+echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"commercial\""
for pkg in `bitbake -s | awk '{ print \$1 }'`; do
if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
diff --git a/scripts/contrib/graph-tool b/scripts/contrib/graph-tool
index 6dc7d337f8..26488930e0 100755
--- a/scripts/contrib/graph-tool
+++ b/scripts/contrib/graph-tool
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Simple graph query utility
# useful for getting answers from .dot files produced by bitbake -g
@@ -7,21 +7,17 @@
#
# Copyright 2013 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
import sys
+import os
+import argparse
+
+scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
+sys.path.insert(0, scripts_lib_path)
+import argparse_oe
+
def get_path_networkx(dotfile, fromnode, tonode):
try:
@@ -30,8 +26,7 @@ def get_path_networkx(dotfile, fromnode, tonode):
print('ERROR: Please install the networkx python module')
sys.exit(1)
- graph = networkx.DiGraph(networkx.read_dot(dotfile))
-
+ graph = networkx.DiGraph(networkx.nx_pydot.read_dot(dotfile))
def node_missing(node):
import difflib
close_matches = difflib.get_close_matches(node, graph.nodes(), cutoff=0.7)
@@ -46,47 +41,78 @@ def get_path_networkx(dotfile, fromnode, tonode):
return networkx.all_simple_paths(graph, source=fromnode, target=tonode)
-def find_paths(args, usage):
- if len(args) < 3:
- usage()
- sys.exit(1)
+def find_paths(args):
+ path = None
+ for path in get_path_networkx(args.dotfile, args.fromnode, args.tonode):
+ print(" -> ".join(map(str, path)))
+ if not path:
+ print("ERROR: no path from %s to %s in graph" % (args.fromnode, args.tonode))
+ return 1
+
+
+def filter_graph(args):
+ import fnmatch
+
+ exclude_tasks = []
+ if args.exclude_tasks:
+ for task in args.exclude_tasks.split(','):
+ if not task.startswith('do_'):
+ task = 'do_%s' % task
+ exclude_tasks.append(task)
+
+ def checkref(strval):
+ strval = strval.strip().strip('"')
+ target, taskname = strval.rsplit('.', 1)
+ if exclude_tasks:
+ for extask in exclude_tasks:
+ if fnmatch.fnmatch(taskname, extask):
+ return False
+ if strval in args.ref or target in args.ref:
+ return True
+ return False
+
+ with open(args.infile, 'r') as f:
+ for line in f:
+ line = line.rstrip()
+ if line.startswith(('digraph', '}')):
+ print(line)
+ elif '->' in line:
+ linesplit = line.split('->')
+ if checkref(linesplit[0]) and checkref(linesplit[1]):
+ print(line)
+ elif (not args.no_nodes) and checkref(line.split()[0]):
+ print(line)
- fromnode = args[1]
- tonode = args[2]
- paths = list(get_path_networkx(args[0], fromnode, tonode))
- if paths:
- for path in paths:
- print ' -> '.join(path)
- else:
- print("ERROR: no path from %s to %s in graph" % (fromnode, tonode))
- sys.exit(1)
def main():
- import optparse
- parser = optparse.OptionParser(
- usage = '''%prog [options] <command> <arguments>
+ parser = argparse_oe.ArgumentParser(description='Small utility for working with .dot graph files')
-Available commands:
- find-paths <dotfile> <from> <to>
- Find all of the paths between two nodes in a dot graph''')
+ subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
+ subparsers.required = True
- #parser.add_option("-d", "--debug",
- # help = "Report all SRCREV values, not just ones where AUTOREV has been used",
- # action="store_true", dest="debug", default=False)
+ parser_find_paths = subparsers.add_parser('find-paths',
+ help='Find all of the paths between two nodes in a dot graph',
+ description='Finds all of the paths between two nodes in a dot graph')
+ parser_find_paths.add_argument('dotfile', help='.dot graph to search in')
+ parser_find_paths.add_argument('fromnode', help='starting node name')
+ parser_find_paths.add_argument('tonode', help='ending node name')
+ parser_find_paths.set_defaults(func=find_paths)
- options, args = parser.parse_args(sys.argv)
- args = args[1:]
+ parser_filter = subparsers.add_parser('filter',
+ help='Pare down a task graph to contain only the specified references',
+ description='Pares down a task-depends.dot graph produced by bitbake -g to contain only the specified references')
+ parser_filter.add_argument('infile', help='Input file')
+ parser_filter.add_argument('ref', nargs='+', help='Reference to include (either recipe/target name or full target.taskname specification)')
+ parser_filter.add_argument('-n', '--no-nodes', action='store_true', help='Skip node formatting lines')
+ parser_filter.add_argument('-x', '--exclude-tasks', help='Comma-separated list of tasks to exclude (do_ prefix optional, wildcards allowed)')
+ parser_filter.set_defaults(func=filter_graph)
- if len(args) < 1:
- parser.print_help()
- sys.exit(1)
+ args = parser.parse_args()
- if args[0] == "find-paths":
- find_paths(args[1:], parser.print_help)
- else:
- parser.print_help()
- sys.exit(1)
+ ret = args.func(args)
+ return ret
if __name__ == "__main__":
- main()
+ ret = main()
+ sys.exit(ret)
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest
new file mode 100755
index 0000000000..3c07a73a4e
--- /dev/null
+++ b/scripts/contrib/image-manifest
@@ -0,0 +1,523 @@
+#!/usr/bin/env python3
+
+# Script to extract information from image manifests
+#
+# Copyright (C) 2018 Intel Corporation
+# Copyright (C) 2021 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import sys
+import os
+import argparse
+import logging
+import json
+import shutil
+import tempfile
+import tarfile
+from collections import OrderedDict
+
+scripts_path = os.path.dirname(__file__)
+lib_path = scripts_path + '/../lib'
+sys.path = sys.path + [lib_path]
+
+import scriptutils
+logger = scriptutils.logger_create(os.path.basename(__file__))
+
+import argparse_oe
+import scriptpath
+bitbakepath = scriptpath.add_bitbake_lib_path()
+if not bitbakepath:
+ logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+ sys.exit(1)
+logger.debug('Using standard bitbake path %s' % bitbakepath)
+scriptpath.add_oe_lib_path()
+
+import bb.tinfoil
+import bb.utils
+import oe.utils
+import oe.recipeutils
+
+def get_pkg_list(manifest):
+ pkglist = []
+ with open(manifest, 'r') as f:
+ for line in f:
+ linesplit = line.split()
+ if len(linesplit) == 3:
+ # manifest file
+ pkglist.append(linesplit[0])
+ elif len(linesplit) == 1:
+ # build dependency file
+ pkglist.append(linesplit[0])
+ return sorted(pkglist)
+
+def list_packages(args):
+ pkglist = get_pkg_list(args.manifest)
+ for pkg in pkglist:
+ print('%s' % pkg)
+
+def pkg2recipe(tinfoil, pkg):
+ if "-native" in pkg:
+ logger.info('skipping %s' % pkg)
+ return None
+
+ pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
+ pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
+ logger.debug('pkgdatafile %s' % pkgdatafile)
+ try:
+ f = open(pkgdatafile, 'r')
+ for line in f:
+ if line.startswith('PN:'):
+ recipe = line.split(':', 1)[1].strip()
+ return recipe
+ except Exception:
+ logger.warning('%s is missing' % pkgdatafile)
+ return None
+
+def get_recipe_list(manifest, tinfoil):
+ pkglist = get_pkg_list(manifest)
+ recipelist = []
+ for pkg in pkglist:
+ recipe = pkg2recipe(tinfoil,pkg)
+ if recipe:
+ if not recipe in recipelist:
+ recipelist.append(recipe)
+
+ return sorted(recipelist)
+
+def list_recipes(args):
+ import bb.tinfoil
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ tinfoil.prepare(config_only=True)
+ recipelist = get_recipe_list(args.manifest, tinfoil)
+ for recipe in sorted(recipelist):
+ print('%s' % recipe)
+
+def list_layers(args):
+
+ def find_git_repo(pth):
+ checkpth = pth
+ while checkpth != os.sep:
+ if os.path.exists(os.path.join(checkpth, '.git')):
+ return checkpth
+ checkpth = os.path.dirname(checkpth)
+ return None
+
+ def get_git_remote_branch(repodir):
+ try:
+ stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
+ except bb.process.ExecutionError as e:
+ stdout = None
+ if stdout:
+ return stdout.strip()
+ else:
+ return None
+
+ def get_git_head_commit(repodir):
+ try:
+ stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
+ except bb.process.ExecutionError as e:
+ stdout = None
+ if stdout:
+ return stdout.strip()
+ else:
+ return None
+
+ def get_git_repo_url(repodir, remote='origin'):
+ import bb.process
+ # Try to get upstream repo location from origin remote
+ try:
+ stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
+ except bb.process.ExecutionError as e:
+ stdout = None
+ if stdout:
+ for line in stdout.splitlines():
+ splitline = line.split()
+ if len(splitline) > 1:
+ if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
+ return splitline[1]
+ return None
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ tinfoil.prepare(config_only=False)
+ layers = OrderedDict()
+ for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
+ layerdata = OrderedDict()
+ layername = os.path.basename(layerdir)
+ logger.debug('layername %s, layerdir %s' % (layername, layerdir))
+ if layername in layers:
+ logger.warning('layername %s is not unique in configuration' % layername)
+ layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
+ logger.debug('trying layername %s' % layername)
+ if layername in layers:
+ logger.error('Layer name %s is not unique in configuration' % layername)
+ sys.exit(2)
+ repodir = find_git_repo(layerdir)
+ if repodir:
+ remotebranch = get_git_remote_branch(repodir)
+ remote = 'origin'
+ if remotebranch and '/' in remotebranch:
+ rbsplit = remotebranch.split('/', 1)
+ layerdata['actual_branch'] = rbsplit[1]
+ remote = rbsplit[0]
+ layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
+ if os.path.abspath(repodir) != os.path.abspath(layerdir):
+ layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
+ commit = get_git_head_commit(repodir)
+ if commit:
+ layerdata['vcs_commit'] = commit
+ layers[layername] = layerdata
+
+ json.dump(layers, args.output, indent=2)
+
+def get_recipe(args):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ tinfoil.prepare(config_only=True)
+
+ recipe = pkg2recipe(tinfoil, args.package)
+ print(' %s package provided by %s' % (args.package, recipe))
+
+def pkg_dependencies(args):
+ def get_recipe_info(tinfoil, recipe):
+ try:
+ info = tinfoil.get_recipe_info(recipe)
+ except Exception:
+ logger.error('Failed to get recipe info for: %s' % recipe)
+ sys.exit(1)
+ if not info:
+ logger.warning('No recipe info found for: %s' % recipe)
+ sys.exit(1)
+ append_files = tinfoil.get_file_appends(info.fn)
+ appends = True
+ data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
+ data.pn = info.pn
+ data.pv = info.pv
+ return data
+
+ def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
+ spaces = ' ' * order
+ data = recipe_info[rn]
+ if args.native:
+ logger.debug('%s- %s' % (spaces, data.pn))
+ elif "-native" not in data.pn:
+ if "cross" not in data.pn:
+ logger.debug('%s- %s' % (spaces, data.pn))
+
+ depends = []
+ for dep in data.depends:
+ if dep not in assume_provided:
+ depends.append(dep)
+
+ # First find all dependencies not in package list.
+ for dep in depends:
+ if dep not in packages:
+ packages.append(dep)
+ dep_data = get_recipe_info(tinfoil, dep)
+ # Do this once now to reduce the number of bitbake calls.
+ dep_data.depends = dep_data.getVar('DEPENDS').split()
+ recipe_info[dep] = dep_data
+
+ # Then recursively analyze all of the dependencies for the current recipe.
+ for dep in depends:
+ find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ tinfoil.prepare()
+
+ assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
+ logger.debug('assumed provided:')
+ for ap in sorted(assume_provided):
+ logger.debug(' - %s' % ap)
+
+ recipe = pkg2recipe(tinfoil, args.package)
+ data = get_recipe_info(tinfoil, recipe)
+ data.depends = []
+ depends = data.getVar('DEPENDS').split()
+ for dep in depends:
+ if dep not in assume_provided:
+ data.depends.append(dep)
+
+ recipe_info = dict([(recipe, data)])
+ packages = []
+ find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
+
+ print('\nThe following packages are required to build %s' % recipe)
+ for p in sorted(packages):
+ data = recipe_info[p]
+ if "-native" not in data.pn:
+ if "cross" not in data.pn:
+ print(" %s (%s)" % (data.pn,p))
+
+ if args.native:
+ print('\nThe following native packages are required to build %s' % recipe)
+ for p in sorted(packages):
+ data = recipe_info[p]
+ if "-native" in data.pn:
+ print(" %s(%s)" % (data.pn,p))
+ if "cross" in data.pn:
+ print(" %s(%s)" % (data.pn,p))
+
+def default_config():
+ vlist = OrderedDict()
+ vlist['PV'] = 'yes'
+ vlist['SUMMARY'] = 'no'
+ vlist['DESCRIPTION'] = 'no'
+ vlist['SECTION'] = 'no'
+ vlist['LICENSE'] = 'yes'
+ vlist['HOMEPAGE'] = 'no'
+ vlist['BUGTRACKER'] = 'no'
+ vlist['PROVIDES'] = 'no'
+ vlist['BBCLASSEXTEND'] = 'no'
+ vlist['DEPENDS'] = 'no'
+ vlist['PACKAGECONFIG'] = 'no'
+ vlist['SRC_URI'] = 'yes'
+ vlist['SRCREV'] = 'yes'
+ vlist['EXTRA_OECONF'] = 'no'
+ vlist['EXTRA_OESCONS'] = 'no'
+ vlist['EXTRA_OECMAKE'] = 'no'
+ vlist['EXTRA_OEMESON'] = 'no'
+
+ clist = OrderedDict()
+ clist['variables'] = vlist
+ clist['filepath'] = 'no'
+ clist['sha256sum'] = 'no'
+ clist['layerdir'] = 'no'
+ clist['layer'] = 'no'
+ clist['inherits'] = 'no'
+ clist['source_urls'] = 'no'
+ clist['packageconfig_opts'] = 'no'
+ clist['patches'] = 'no'
+ clist['packagedir'] = 'no'
+ return clist
+
+def dump_config(args):
+ config = default_config()
+ f = open('default_config.json', 'w')
+ json.dump(config, f, indent=2)
+ logger.info('Default config list dumped to default_config.json')
+
+def export_manifest_info(args):
+
+ def handle_value(value):
+ if value:
+ return oe.utils.squashspaces(value)
+ else:
+ return value
+
+ if args.config:
+ logger.debug('config: %s' % args.config)
+ f = open(args.config, 'r')
+ config = json.load(f, object_pairs_hook=OrderedDict)
+ else:
+ config = default_config()
+ if logger.isEnabledFor(logging.DEBUG):
+ print('Configuration:')
+ json.dump(config, sys.stdout, indent=2)
+ print('')
+
+ tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
+ logger.debug('tmp dir: %s' % tmpoutdir)
+
+ # export manifest
+ shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
+
+ with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
+ tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ tinfoil.prepare(config_only=False)
+
+ pkglist = get_pkg_list(args.manifest)
+ # export pkg list
+ f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
+ for pkg in pkglist:
+ f.write('%s\n' % pkg)
+ f.close()
+
+ recipelist = []
+ for pkg in pkglist:
+ recipe = pkg2recipe(tinfoil,pkg)
+ if recipe:
+ if not recipe in recipelist:
+ recipelist.append(recipe)
+ recipelist.sort()
+ # export recipe list
+ f = open(os.path.join(tmpoutdir, "recipes"), 'w')
+ for recipe in recipelist:
+ f.write('%s\n' % recipe)
+ f.close()
+
+ try:
+ rvalues = OrderedDict()
+ for pn in sorted(recipelist):
+ logger.debug('Package: %s' % pn)
+ rd = tinfoil.parse_recipe(pn)
+
+ rvalues[pn] = OrderedDict()
+
+ for varname in config['variables']:
+ if config['variables'][varname] == 'yes':
+ rvalues[pn][varname] = handle_value(rd.getVar(varname))
+
+ fpth = rd.getVar('FILE')
+ layerdir = oe.recipeutils.find_layerdir(fpth)
+ if config['filepath'] == 'yes':
+ rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
+ if config['sha256sum'] == 'yes':
+ rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
+
+ if config['layerdir'] == 'yes':
+ rvalues[pn]['layerdir'] = layerdir
+
+ if config['layer'] == 'yes':
+ rvalues[pn]['layer'] = os.path.basename(layerdir)
+
+ if config['inherits'] == 'yes':
+ gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
+ lr = set(rd.getVar("__inherit_cache") or [])
+ rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
+
+ if config['source_urls'] == 'yes':
+ rvalues[pn]['source_urls'] = []
+ for url in (rd.getVar('SRC_URI') or '').split():
+ if not url.startswith('file://'):
+ url = url.split(';')[0]
+ rvalues[pn]['source_urls'].append(url)
+
+ if config['packageconfig_opts'] == 'yes':
+ rvalues[pn]['packageconfig_opts'] = OrderedDict()
+ for key in rd.getVarFlags('PACKAGECONFIG').keys():
+ if key == 'doc':
+ continue
+ rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key, True)
+
+ if config['patches'] == 'yes':
+ patches = oe.recipeutils.get_recipe_patches(rd)
+ rvalues[pn]['patches'] = []
+ if patches:
+ recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
+ bb.utils.mkdirhier(recipeoutdir)
+ for patch in patches:
+ # Patches may be in other layers too
+ patchlayerdir = oe.recipeutils.find_layerdir(patch)
+ # patchlayerdir will be None for remote patches, which we ignore
+ # (since currently they are considered as part of sources)
+ if patchlayerdir:
+ rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
+ shutil.copy(patch, recipeoutdir)
+
+ if config['packagedir'] == 'yes':
+ pn_dir = os.path.join(tmpoutdir, pn)
+ bb.utils.mkdirhier(pn_dir)
+ f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
+ json.dump(rvalues[pn], f, indent=2)
+ f.close()
+
+ with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
+ json.dump(rvalues, f, indent=2)
+
+ if args.output:
+ outname = os.path.basename(args.output)
+ else:
+ outname = os.path.splitext(os.path.basename(args.manifest))[0]
+ if outname.endswith('.tar.gz'):
+ outname = outname[:-7]
+ elif outname.endswith('.tgz'):
+ outname = outname[:-4]
+
+ tarfn = outname
+ if tarfn.endswith(os.sep):
+ tarfn = tarfn[:-1]
+ if not tarfn.endswith(('.tar.gz', '.tgz')):
+ tarfn += '.tar.gz'
+ with open(tarfn, 'wb') as f:
+ with tarfile.open(None, "w:gz", f) as tar:
+ tar.add(tmpoutdir, outname)
+ finally:
+ shutil.rmtree(tmpoutdir)
+
+
+def main():
+ parser = argparse_oe.ArgumentParser(description="Image manifest utility",
+ epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+ parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
+ parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
+ subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
+ subparsers.required = True
+
+ # get recipe info
+ parser_get_recipes = subparsers.add_parser('recipe-info',
+ help='Get recipe info',
+ description='Get recipe information for a package')
+ parser_get_recipes.add_argument('package', help='Package name')
+ parser_get_recipes.set_defaults(func=get_recipe)
+
+ # list runtime dependencies
+ parser_pkg_dep = subparsers.add_parser('list-depends',
+ help='List dependencies',
+ description='List dependencies required to build the package')
+ parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
+ parser_pkg_dep.add_argument('package', help='Package name')
+ parser_pkg_dep.set_defaults(func=pkg_dependencies)
+
+ # list recipes
+ parser_recipes = subparsers.add_parser('list-recipes',
+ help='List recipes producing packages within an image',
+ description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
+ parser_recipes.add_argument('manifest', help='Manifest file')
+ parser_recipes.set_defaults(func=list_recipes)
+
+ # list packages
+ parser_packages = subparsers.add_parser('list-packages',
+ help='List packages within an image',
+ description='Lists packages that went into an image, using the manifest')
+ parser_packages.add_argument('manifest', help='Manifest file')
+ parser_packages.set_defaults(func=list_packages)
+
+ # list layers
+ parser_layers = subparsers.add_parser('list-layers',
+ help='List included layers',
+ description='Lists included layers')
+ parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
+ default=sys.stdout, type=argparse.FileType('w'))
+ parser_layers.set_defaults(func=list_layers)
+
+ # dump default configuration file
+ parser_dconfig = subparsers.add_parser('dump-config',
+ help='Dump default config',
+ description='Dump default config to default_config.json')
+ parser_dconfig.set_defaults(func=dump_config)
+
+ # export recipe info for packages in manifest
+ parser_export = subparsers.add_parser('manifest-info',
+ help='Export recipe info for a manifest',
+ description='Export recipe information using the manifest')
+ parser_export.add_argument('-c', '--config', help='load config from json file')
+ parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
+ parser_export.add_argument('manifest', help='Manifest file')
+ parser_export.set_defaults(func=export_manifest_info)
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logger.setLevel(logging.DEBUG)
+ logger.debug("Debug Enabled")
+ elif args.quiet:
+ logger.setLevel(logging.ERROR)
+
+ ret = args.func(args)
+
+ return ret
+
+
+if __name__ == "__main__":
+ try:
+ ret = main()
+ except Exception:
+ ret = 1
+ import traceback
+ traceback.print_exc()
+ sys.exit(ret)
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py
index 2f3b8b06a6..bb288e9099 100755
--- a/scripts/contrib/list-packageconfig-flags.py
+++ b/scripts/contrib/list-packageconfig-flags.py
@@ -1,21 +1,10 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation.
-#
# Copyright (C) 2013 Wind River Systems, Inc.
# Copyright (C) 2014 Intel Corporation
#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
# - list available recipes which have PACKAGECONFIG flags
# - list available PACKAGECONFIG flags and all affected recipes
# - list all recipes and PACKAGECONFIG information
@@ -37,7 +26,6 @@ if not bitbakepath:
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
sys.exit(1)
-import bb.cache
import bb.cooker
import bb.providers
import bb.tinfoil
@@ -45,7 +33,7 @@ import bb.tinfoil
def get_fnlist(bbhandler, pkg_pn, preferred):
''' Get all recipe file names '''
if preferred:
- (latest_versions, preferred_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecache, pkg_pn)
+ (latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn)
fn_list = []
for pn in sorted(pkg_pn):
@@ -58,11 +46,11 @@ def get_fnlist(bbhandler, pkg_pn, preferred):
def get_recipesdata(bbhandler, preferred):
''' Get data of all available recipes which have PACKAGECONFIG flags '''
- pkg_pn = bbhandler.cooker.recipecache.pkg_pn
+ pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
data_dict = {}
for fn in get_fnlist(bbhandler, pkg_pn, preferred):
- data = bb.cache.Cache.loadDataFull(fn, bbhandler.cooker.collection.get_file_appends(fn), bbhandler.config_data)
+ data = bbhandler.parse_recipe_file(fn)
flags = data.getVarFlags("PACKAGECONFIG")
flags.pop('doc', None)
if flags:
@@ -77,7 +65,7 @@ def collect_pkgs(data_dict):
for fn in data_dict:
pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG")
pkgconfigflags.pop('doc', None)
- pkgname = data_dict[fn].getVar("P", True)
+ pkgname = data_dict[fn].getVar("PN")
pkg_dict[pkgname] = sorted(pkgconfigflags.keys())
return pkg_dict
@@ -86,7 +74,7 @@ def collect_flags(pkg_dict):
''' Collect available PACKAGECONFIG flags and all affected pkgs '''
# flag_dict = {'flag': ['pkg1', 'pkg2',...]}
flag_dict = {}
- for pkgname, flaglist in pkg_dict.iteritems():
+ for pkgname, flaglist in pkg_dict.items():
for flag in flaglist:
if flag in flag_dict:
flag_dict[flag].append(pkgname)
@@ -104,8 +92,8 @@ def display_pkgs(pkg_dict):
pkgname_len += 1
header = '%-*s%s' % (pkgname_len, str("RECIPE NAME"), str("PACKAGECONFIG FLAGS"))
- print header
- print str("").ljust(len(header), '=')
+ print(header)
+ print(str("").ljust(len(header), '='))
for pkgname in sorted(pkg_dict):
print('%-*s%s' % (pkgname_len, pkgname, ' '.join(pkg_dict[pkgname])))
@@ -115,28 +103,28 @@ def display_flags(flag_dict):
flag_len = len("PACKAGECONFIG FLAG") + 5
header = '%-*s%s' % (flag_len, str("PACKAGECONFIG FLAG"), str("RECIPE NAMES"))
- print header
- print str("").ljust(len(header), '=')
+ print(header)
+ print(str("").ljust(len(header), '='))
for flag in sorted(flag_dict):
print('%-*s%s' % (flag_len, flag, ' '.join(sorted(flag_dict[flag]))))
def display_all(data_dict):
''' Display all pkgs and PACKAGECONFIG information '''
- print str("").ljust(50, '=')
+ print(str("").ljust(50, '='))
for fn in data_dict:
- print('%s' % data_dict[fn].getVar("P", True))
- print fn
- packageconfig = data_dict[fn].getVar("PACKAGECONFIG", True) or ''
+ print('%s' % data_dict[fn].getVar("P"))
+ print(fn)
+ packageconfig = data_dict[fn].getVar("PACKAGECONFIG") or ''
if packageconfig.strip() == '':
packageconfig = 'None'
print('PACKAGECONFIG %s' % packageconfig)
- for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").iteritems():
+ for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").items():
if flag == "doc":
continue
print('PACKAGECONFIG[%s] %s' % (flag, flag_val))
- print ''
+ print('')
def main():
pkg_dict = {}
@@ -160,20 +148,20 @@ def main():
options, args = parser.parse_args(sys.argv)
- bbhandler = bb.tinfoil.Tinfoil()
- bbhandler.prepare()
- print("Gathering recipe data...")
- data_dict = get_recipesdata(bbhandler, options.preferred)
-
- if options.listtype == 'flags':
- pkg_dict = collect_pkgs(data_dict)
- flag_dict = collect_flags(pkg_dict)
- display_flags(flag_dict)
- elif options.listtype == 'recipes':
- pkg_dict = collect_pkgs(data_dict)
- display_pkgs(pkg_dict)
- elif options.listtype == 'all':
- display_all(data_dict)
+ with bb.tinfoil.Tinfoil() as bbhandler:
+ bbhandler.prepare()
+ print("Gathering recipe data...")
+ data_dict = get_recipesdata(bbhandler, options.preferred)
+
+ if options.listtype == 'flags':
+ pkg_dict = collect_pkgs(data_dict)
+ flag_dict = collect_flags(pkg_dict)
+ display_flags(flag_dict)
+ elif options.listtype == 'recipes':
+ pkg_dict = collect_pkgs(data_dict)
+ display_pkgs(pkg_dict)
+ elif options.listtype == 'all':
+ display_all(data_dict)
if __name__ == "__main__":
main()
diff --git a/scripts/contrib/mkefidisk.sh b/scripts/contrib/mkefidisk.sh
deleted file mode 100755
index 55f72b0f54..0000000000
--- a/scripts/contrib/mkefidisk.sh
+++ /dev/null
@@ -1,403 +0,0 @@
-#!/bin/sh
-#
-# Copyright (c) 2012, Intel Corporation.
-# All rights reserved.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
-# the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-
-LANG=C
-
-# Set to 1 to enable additional output
-DEBUG=0
-OUT="/dev/null"
-
-#
-# Defaults
-#
-# 20 Mb for the boot partition
-BOOT_SIZE=20
-# 5% for swap
-SWAP_RATIO=5
-
-# Cleanup after die()
-cleanup() {
- debug "Syncing and unmounting devices"
- # Unmount anything we mounted
- unmount $ROOTFS_MNT || error "Failed to unmount $ROOTFS_MNT"
- unmount $BOOTFS_MNT || error "Failed to unmount $BOOTFS_MNT"
- unmount $HDDIMG_ROOTFS_MNT || error "Failed to unmount $HDDIMG_ROOTFS_MNT"
- unmount $HDDIMG_MNT || error "Failed to unmount $HDDIMG_MNT"
-
- # Remove the TMPDIR
- debug "Removing temporary files"
- if [ -d "$TMPDIR" ]; then
- rm -rf $TMPDIR || error "Failed to remove $TMPDIR"
- fi
-}
-
-trap 'die "Signal Received, Aborting..."' HUP INT TERM
-
-# Logging routines
-WARNINGS=0
-ERRORS=0
-CLEAR="$(tput sgr0)"
-INFO="$(tput bold)"
-RED="$(tput setaf 1)$(tput bold)"
-GREEN="$(tput setaf 2)$(tput bold)"
-YELLOW="$(tput setaf 3)$(tput bold)"
-info() {
- echo "${INFO}$1${CLEAR}"
-}
-error() {
- ERRORS=$((ERRORS+1))
- echo "${RED}$1${CLEAR}"
-}
-warn() {
- WARNINGS=$((WARNINGS+1))
- echo "${YELLOW}$1${CLEAR}"
-}
-success() {
- echo "${GREEN}$1${CLEAR}"
-}
-die() {
- error "$1"
- cleanup
- exit 1
-}
-debug() {
- if [ $DEBUG -eq 1 ]; then
- echo "$1"
- fi
-}
-
-usage() {
- echo "Usage: $(basename $0) [-v] DEVICE HDDIMG TARGET_DEVICE"
- echo " -v: Verbose debug"
- echo " DEVICE: The device to write the image to, e.g. /dev/sdh"
- echo " HDDIMG: The hddimg file to generate the efi disk from"
- echo " TARGET_DEVICE: The device the target will boot from, e.g. /dev/mmcblk0"
-}
-
-image_details() {
- IMG=$1
- info "Image details"
- echo " image: $(stat --printf '%N\n' $IMG)"
- echo " size: $(stat -L --printf '%s bytes\n' $IMG)"
- echo " modified: $(stat -L --printf '%y\n' $IMG)"
- echo " type: $(file -L -b $IMG)"
- echo ""
-}
-
-device_details() {
- DEV=$1
- BLOCK_SIZE=512
-
- info "Device details"
- echo " device: $DEVICE"
- if [ -f "/sys/class/block/$DEV/device/vendor" ]; then
- echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)"
- else
- echo " vendor: UNKOWN"
- fi
- if [ -f "/sys/class/block/$DEV/device/model" ]; then
- echo " model: $(cat /sys/class/block/$DEV/device/model)"
- else
- echo " model: UNKNOWN"
- fi
- if [ -f "/sys/class/block/$DEV/size" ]; then
- echo " size: $(($(cat /sys/class/block/$DEV/size) * $BLOCK_SIZE)) bytes"
- else
- echo " size: UNKNOWN"
- fi
- echo ""
-}
-
-unmount_device() {
- grep -q $DEVICE /proc/mounts
- if [ $? -eq 0 ]; then
- warn "$DEVICE listed in /proc/mounts, attempting to unmount"
- umount $DEVICE* 2>/dev/null
- return $?
- fi
- return 0
-}
-
-unmount() {
- if [ "$1" = "" ] ; then
- return 0
- fi
- grep -q $1 /proc/mounts
- if [ $? -eq 0 ]; then
- debug "Unmounting $1"
- umount $1
- return $?
- fi
- return 0
-}
-
-#
-# Parse and validate arguments
-#
-if [ $# -lt 3 ] || [ $# -gt 4 ]; then
- usage
- exit 1
-fi
-
-if [ "$1" = "-v" ]; then
- DEBUG=1
- OUT="1"
- shift
-fi
-
-DEVICE=$1
-HDDIMG=$2
-TARGET_DEVICE=$3
-
-LINK=$(readlink $DEVICE)
-if [ $? -eq 0 ]; then
- DEVICE="$LINK"
-fi
-
-if [ ! -w "$DEVICE" ]; then
- usage
- if [ ! -e "${DEVICE}" ] ; then
- die "Device $DEVICE cannot be found"
- else
- die "Device $DEVICE is not writable (need to run under sudo?)"
- fi
-fi
-
-if [ ! -e "$HDDIMG" ]; then
- usage
- die "HDDIMG $HDDIMG does not exist"
-fi
-
-#
-# Ensure the hddimg is not mounted
-#
-unmount "$HDDIMG" || die "Failed to unmount $HDDIMG"
-
-#
-# Check if any $DEVICE partitions are mounted
-#
-unmount_device || die "Failed to unmount $DEVICE"
-
-#
-# Confirm device with user
-#
-image_details $HDDIMG
-device_details $(basename $DEVICE)
-echo -n "${INFO}Prepare EFI image on $DEVICE [y/N]?${CLEAR} "
-read RESPONSE
-if [ "$RESPONSE" != "y" ]; then
- echo "Image creation aborted"
- exit 0
-fi
-
-
-#
-# Prepare the temporary working space
-#
-TMPDIR=$(mktemp -d mkefidisk-XXX) || die "Failed to create temporary mounting directory."
-HDDIMG_MNT=$TMPDIR/hddimg
-HDDIMG_ROOTFS_MNT=$TMPDIR/hddimg_rootfs
-ROOTFS_MNT=$TMPDIR/rootfs
-BOOTFS_MNT=$TMPDIR/bootfs
-mkdir $HDDIMG_MNT || die "Failed to create $HDDIMG_MNT"
-mkdir $HDDIMG_ROOTFS_MNT || die "Failed to create $HDDIMG_ROOTFS_MNT"
-mkdir $ROOTFS_MNT || die "Failed to create $ROOTFS_MNT"
-mkdir $BOOTFS_MNT || die "Failed to create $BOOTFS_MNT"
-
-
-#
-# Partition $DEVICE
-#
-DEVICE_SIZE=$(parted -s $DEVICE unit mb print | grep ^Disk | cut -d" " -f 3 | sed -e "s/MB//")
-# If the device size is not reported there may not be a valid label
-if [ "$DEVICE_SIZE" = "" ] ; then
- parted -s $DEVICE mklabel msdos || die "Failed to create MSDOS partition table"
- DEVICE_SIZE=$(parted -s $DEVICE unit mb print | grep ^Disk | cut -d" " -f 3 | sed -e "s/MB//")
-fi
-SWAP_SIZE=$((DEVICE_SIZE*SWAP_RATIO/100))
-ROOTFS_SIZE=$((DEVICE_SIZE-BOOT_SIZE-SWAP_SIZE))
-ROOTFS_START=$((BOOT_SIZE))
-ROOTFS_END=$((ROOTFS_START+ROOTFS_SIZE))
-SWAP_START=$((ROOTFS_END))
-
-# MMC devices use a partition prefix character 'p'
-PART_PREFIX=""
-if [ ! "${DEVICE#/dev/mmcblk}" = "${DEVICE}" ] || [ ! "${DEVICE#/dev/loop}" = "${DEVICE}" ]; then
- PART_PREFIX="p"
-fi
-BOOTFS=$DEVICE${PART_PREFIX}1
-ROOTFS=$DEVICE${PART_PREFIX}2
-SWAP=$DEVICE${PART_PREFIX}3
-
-TARGET_PART_PREFIX=""
-if [ ! "${TARGET_DEVICE#/dev/mmcblk}" = "${TARGET_DEVICE}" ]; then
- TARGET_PART_PREFIX="p"
-fi
-TARGET_ROOTFS=$TARGET_DEVICE${TARGET_PART_PREFIX}2
-TARGET_SWAP=$TARGET_DEVICE${TARGET_PART_PREFIX}3
-
-echo ""
-info "Boot partition size: $BOOT_SIZE MB ($BOOTFS)"
-info "ROOTFS partition size: $ROOTFS_SIZE MB ($ROOTFS)"
-info "Swap partition size: $SWAP_SIZE MB ($SWAP)"
-echo ""
-
-# Use MSDOS by default as GPT cannot be reliably distributed in disk image form
-# as it requires the backup table to be on the last block of the device, which
-# of course varies from device to device.
-
-info "Partitioning installation media ($DEVICE)"
-
-debug "Deleting partition table on $DEVICE"
-dd if=/dev/zero of=$DEVICE bs=512 count=2 >$OUT 2>&1 || die "Failed to zero beginning of $DEVICE"
-
-debug "Creating new partition table (MSDOS) on $DEVICE"
-parted -s $DEVICE mklabel msdos >$OUT 2>&1 || die "Failed to create MSDOS partition table"
-
-debug "Creating boot partition on $BOOTFS"
-parted -s $DEVICE mkpart primary 0% $BOOT_SIZE >$OUT 2>&1 || die "Failed to create BOOT partition"
-
-debug "Enabling boot flag on $BOOTFS"
-parted -s $DEVICE set 1 boot on >$OUT 2>&1 || die "Failed to enable boot flag"
-
-debug "Creating ROOTFS partition on $ROOTFS"
-parted -s $DEVICE mkpart primary $ROOTFS_START $ROOTFS_END >$OUT 2>&1 || die "Failed to create ROOTFS partition"
-
-debug "Creating swap partition on $SWAP"
-parted -s $DEVICE mkpart primary $SWAP_START 100% >$OUT 2>&1 || die "Failed to create SWAP partition"
-
-if [ $DEBUG -eq 1 ]; then
- parted -s $DEVICE print
-fi
-
-
-#
-# Check if any $DEVICE partitions are mounted after partitioning
-#
-unmount_device || die "Failed to unmount $DEVICE partitions"
-
-
-#
-# Format $DEVICE partitions
-#
-info "Formatting partitions"
-debug "Formatting $BOOTFS as vfat"
-if [ ! "${DEVICE#/dev/loop}" = "${DEVICE}" ]; then
- mkfs.vfat -I $BOOTFS -n "EFI" >$OUT 2>&1 || die "Failed to format $BOOTFS"
-else
- mkfs.vfat $BOOTFS -n "EFI" >$OUT 2>&1 || die "Failed to format $BOOTFS"
-fi
-
-debug "Formatting $ROOTFS as ext3"
-mkfs.ext3 -F $ROOTFS -L "ROOT" >$OUT 2>&1 || die "Failed to format $ROOTFS"
-
-debug "Formatting swap partition ($SWAP)"
-mkswap $SWAP >$OUT 2>&1 || die "Failed to prepare swap"
-
-
-#
-# Installing to $DEVICE
-#
-debug "Mounting images and device in preparation for installation"
-mount -o loop $HDDIMG $HDDIMG_MNT >$OUT 2>&1 || error "Failed to mount $HDDIMG"
-mount -o loop $HDDIMG_MNT/rootfs.img $HDDIMG_ROOTFS_MNT >$OUT 2>&1 || error "Failed to mount rootfs.img"
-mount $ROOTFS $ROOTFS_MNT >$OUT 2>&1 || error "Failed to mount $ROOTFS on $ROOTFS_MNT"
-mount $BOOTFS $BOOTFS_MNT >$OUT 2>&1 || error "Failed to mount $BOOTFS on $BOOTFS_MNT"
-
-info "Preparing boot partition"
-EFIDIR="$BOOTFS_MNT/EFI/BOOT"
-cp $HDDIMG_MNT/vmlinuz $BOOTFS_MNT >$OUT 2>&1 || error "Failed to copy vmlinuz"
-# Copy the efi loader and configs (booti*.efi and grub.cfg if it exists)
-cp -r $HDDIMG_MNT/EFI $BOOTFS_MNT >$OUT 2>&1 || error "Failed to copy EFI dir"
-# Silently ignore a missing gummiboot loader dir (we might just be a GRUB image)
-cp -r $HDDIMG_MNT/loader $BOOTFS_MNT >$OUT 2>&1
-
-# Update the boot loaders configurations for an installed image
-# Remove any existing root= kernel parameters and:
-# o Add a root= parameter with the target rootfs
-# o Specify ro so fsck can be run during boot
-# o Specify rootwait in case the target media is an asyncronous block device
-# such as MMC or USB disks
-# o Specify "quiet" to minimize boot time when using slow serial consoles
-
-# Look for a GRUB installation
-GRUB_CFG="$EFIDIR/grub.cfg"
-if [ -e "$GRUB_CFG" ]; then
- info "Configuring GRUB"
- # Delete the install entry
- sed -i "/menuentry 'install'/,/^}/d" $GRUB_CFG
- # Delete the initrd lines
- sed -i "/initrd /d" $GRUB_CFG
- # Delete any LABEL= strings
- sed -i "s/ LABEL=[^ ]*/ /" $GRUB_CFG
-
- sed -i "s@ root=[^ ]*@ @" $GRUB_CFG
- sed -i "s@vmlinuz @vmlinuz root=$TARGET_ROOTFS ro rootwait quiet @" $GRUB_CFG
-fi
-
-# Look for a gummiboot installation
-GUMMI_ENTRIES="$BOOTFS_MNT/loader/entries"
-GUMMI_CFG="$GUMMI_ENTRIES/boot.conf"
-if [ -d "$GUMMI_ENTRIES" ]; then
- info "Configuring Gummiboot"
- # remove the install target if it exists
- rm $GUMMI_ENTRIES/install.conf >$OUT 2>&1
-
- if [ ! -e "$GUMMI_CFG" ]; then
- echo "ERROR: $GUMMI_CFG not found"
- fi
-
- sed -i "/initrd /d" $GUMMI_CFG
- sed -i "s@ root=[^ ]*@ @" $GUMMI_CFG
- sed -i "s@options *LABEL=boot @options LABEL=Boot root=$TARGET_ROOTFS ro rootwait quiet @" $GUMMI_CFG
-fi
-
-# Ensure we have at least one EFI bootloader configured
-if [ ! -e $GRUB_CFG ] && [ ! -e $GUMMI_CFG ]; then
- die "No EFI bootloader configuration found"
-fi
-
-
-info "Copying ROOTFS files (this may take a while)"
-cp -a $HDDIMG_ROOTFS_MNT/* $ROOTFS_MNT >$OUT 2>&1 || die "Root FS copy failed"
-
-echo "$TARGET_SWAP swap swap defaults 0 0" >> $ROOTFS_MNT/etc/fstab
-
-# We dont want udev to mount our root device while we're booting...
-if [ -d $ROOTFS_MNT/etc/udev/ ] ; then
- echo "$TARGET_DEVICE" >> $ROOTFS_MNT/etc/udev/mount.blacklist
-fi
-
-
-# Call cleanup to unmount devices and images and remove the TMPDIR
-cleanup
-
-echo ""
-if [ $WARNINGS -ne 0 ] && [ $ERRORS -eq 0 ]; then
- echo "${YELLOW}Installation completed with warnings${CLEAR}"
- echo "${YELLOW}Warnings: $WARNINGS${CLEAR}"
-elif [ $ERRORS -ne 0 ]; then
- echo "${RED}Installation encountered errors${CLEAR}"
- echo "${RED}Errors: $ERRORS${CLEAR}"
- echo "${YELLOW}Warnings: $WARNINGS${CLEAR}"
-else
- success "Installation completed successfully"
-fi
-echo ""
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py
new file mode 100755
index 0000000000..de3862c897
--- /dev/null
+++ b/scripts/contrib/oe-build-perf-report-email.py
@@ -0,0 +1,276 @@
+#!/usr/bin/python3
+#
+# Send build performance test report emails
+#
+# Copyright (c) 2017, Intel Corporation.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import argparse
+import base64
+import logging
+import os
+import pwd
+import re
+import shutil
+import smtplib
+import socket
+import subprocess
+import sys
+import tempfile
+from email.mime.image import MIMEImage
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+
+
+# Setup logging
+logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
+log = logging.getLogger('oe-build-perf-report')
+
+
+# Find js scaper script
+SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf',
+ 'scrape-html-report.js')
+if not os.path.isfile(SCRAPE_JS):
+ log.error("Unableto find oe-build-perf-report-scrape.js")
+ sys.exit(1)
+
+
+class ReportError(Exception):
+ """Local errors"""
+ pass
+
+
+def check_utils():
+ """Check that all needed utils are installed in the system"""
+ missing = []
+ for cmd in ('phantomjs', 'optipng'):
+ if not shutil.which(cmd):
+ missing.append(cmd)
+ if missing:
+ log.error("The following tools are missing: %s", ' '.join(missing))
+ sys.exit(1)
+
+
+def parse_args(argv):
+ """Parse command line arguments"""
+ description = """Email build perf test report"""
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description=description)
+
+ parser.add_argument('--debug', '-d', action='store_true',
+ help="Verbose logging")
+ parser.add_argument('--quiet', '-q', action='store_true',
+ help="Only print errors")
+ parser.add_argument('--to', action='append',
+ help="Recipients of the email")
+ parser.add_argument('--cc', action='append',
+ help="Carbon copy recipients of the email")
+ parser.add_argument('--bcc', action='append',
+ help="Blind carbon copy recipients of the email")
+ parser.add_argument('--subject', default="Yocto build perf test report",
+ help="Email subject")
+ parser.add_argument('--outdir', '-o',
+ help="Store files in OUTDIR. Can be used to preserve "
+ "the email parts")
+ parser.add_argument('--text',
+ help="Plain text message")
+ parser.add_argument('--html',
+ help="HTML peport generated by oe-build-perf-report")
+ parser.add_argument('--phantomjs-args', action='append',
+ help="Extra command line arguments passed to PhantomJS")
+
+ args = parser.parse_args(argv)
+
+ if not args.html and not args.text:
+ parser.error("Please specify --html and/or --text")
+
+ return args
+
+
+def decode_png(infile, outfile):
+ """Parse/decode/optimize png data from a html element"""
+ with open(infile) as f:
+ raw_data = f.read()
+
+ # Grab raw base64 data
+ b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1)
+ b64_data = re.sub('">.+$', '', b64_data, 1)
+
+ # Replace file with proper decoded png
+ with open(outfile, 'wb') as f:
+ f.write(base64.b64decode(b64_data))
+
+ subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
+
+
+def mangle_html_report(infile, outfile, pngs):
+ """Mangle html file into a email compatible format"""
+ paste = True
+ png_dir = os.path.dirname(outfile)
+ with open(infile) as f_in:
+ with open(outfile, 'w') as f_out:
+ for line in f_in.readlines():
+ stripped = line.strip()
+ # Strip out scripts
+ if stripped == '<!--START-OF-SCRIPTS-->':
+ paste = False
+ elif stripped == '<!--END-OF-SCRIPTS-->':
+ paste = True
+ elif paste:
+ if re.match('^.+href="data:image/png;base64', stripped):
+ # Strip out encoded pngs (as they're huge in size)
+ continue
+ elif 'www.gstatic.com' in stripped:
+ # HACK: drop references to external static pages
+ continue
+
+ # Replace charts with <img> elements
+ match = re.match('<div id="(?P<id>\w+)"', stripped)
+ if match and match.group('id') in pngs:
+ f_out.write('<img src="cid:{}"\n'.format(match.group('id')))
+ else:
+ f_out.write(line)
+
+
+def scrape_html_report(report, outdir, phantomjs_extra_args=None):
+ """Scrape html report into a format sendable by email"""
+ tmpdir = tempfile.mkdtemp(dir='.')
+ log.debug("Using tmpdir %s for phantomjs output", tmpdir)
+
+ if not os.path.isdir(outdir):
+ os.mkdir(outdir)
+ if os.path.splitext(report)[1] not in ('.html', '.htm'):
+ raise ReportError("Invalid file extension for report, needs to be "
+ "'.html' or '.htm'")
+
+ try:
+ log.info("Scraping HTML report with PhangomJS")
+ extra_args = phantomjs_extra_args if phantomjs_extra_args else []
+ subprocess.check_output(['phantomjs', '--debug=true'] + extra_args +
+ [SCRAPE_JS, report, tmpdir],
+ stderr=subprocess.STDOUT)
+
+ pngs = []
+ images = []
+ for fname in os.listdir(tmpdir):
+ base, ext = os.path.splitext(fname)
+ if ext == '.png':
+ log.debug("Decoding %s", fname)
+ decode_png(os.path.join(tmpdir, fname),
+ os.path.join(outdir, fname))
+ pngs.append(base)
+ images.append(fname)
+ elif ext in ('.html', '.htm'):
+ report_file = fname
+ else:
+ log.warning("Unknown file extension: '%s'", ext)
+ #shutil.move(os.path.join(tmpdir, fname), outdir)
+
+ log.debug("Mangling html report file %s", report_file)
+ mangle_html_report(os.path.join(tmpdir, report_file),
+ os.path.join(outdir, report_file), pngs)
+ return (os.path.join(outdir, report_file),
+ [os.path.join(outdir, i) for i in images])
+ finally:
+ shutil.rmtree(tmpdir)
+
+def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[],
+ blind_copy=[]):
+ """Send email"""
+ # Generate email message
+ text_msg = html_msg = None
+ if text_fn:
+ with open(text_fn) as f:
+ text_msg = MIMEText("Yocto build performance test report.\n" +
+ f.read(), 'plain')
+ if html_fn:
+ html_msg = msg = MIMEMultipart('related')
+ with open(html_fn) as f:
+ html_msg.attach(MIMEText(f.read(), 'html'))
+ for img_fn in image_fns:
+ # Expect that content id is same as the filename
+ cid = os.path.splitext(os.path.basename(img_fn))[0]
+ with open(img_fn, 'rb') as f:
+ image_msg = MIMEImage(f.read())
+ image_msg['Content-ID'] = '<{}>'.format(cid)
+ html_msg.attach(image_msg)
+
+ if text_msg and html_msg:
+ msg = MIMEMultipart('alternative')
+ msg.attach(text_msg)
+ msg.attach(html_msg)
+ elif text_msg:
+ msg = text_msg
+ elif html_msg:
+ msg = html_msg
+ else:
+ raise ReportError("Neither plain text nor html body specified")
+
+ pw_data = pwd.getpwuid(os.getuid())
+ full_name = pw_data.pw_gecos.split(',')[0]
+ email = os.environ.get('EMAIL',
+ '{}@{}'.format(pw_data.pw_name, socket.getfqdn()))
+ msg['From'] = "{} <{}>".format(full_name, email)
+ msg['To'] = ', '.join(recipients)
+ if copy:
+ msg['Cc'] = ', '.join(copy)
+ if blind_copy:
+ msg['Bcc'] = ', '.join(blind_copy)
+ msg['Subject'] = subject
+
+ # Send email
+ with smtplib.SMTP('localhost') as smtp:
+ smtp.send_message(msg)
+
+
+def main(argv=None):
+ """Script entry point"""
+ args = parse_args(argv)
+ if args.quiet:
+ log.setLevel(logging.ERROR)
+ if args.debug:
+ log.setLevel(logging.DEBUG)
+
+ check_utils()
+
+ if args.outdir:
+ outdir = args.outdir
+ if not os.path.exists(outdir):
+ os.mkdir(outdir)
+ else:
+ outdir = tempfile.mkdtemp(dir='.')
+
+ try:
+ log.debug("Storing email parts in %s", outdir)
+ html_report = images = None
+ if args.html:
+ html_report, images = scrape_html_report(args.html, outdir,
+ args.phantomjs_args)
+
+ if args.to:
+ log.info("Sending email to %s", ', '.join(args.to))
+ if args.cc:
+ log.info("Copying to %s", ', '.join(args.cc))
+ if args.bcc:
+ log.info("Blind copying to %s", ', '.join(args.bcc))
+ send_email(args.text, html_report, images, args.subject,
+ args.to, args.cc, args.bcc)
+ except subprocess.CalledProcessError as err:
+ log.error("%s, with output:\n%s", str(err), err.output.decode())
+ return 1
+ except ReportError as err:
+ log.error(err)
+ return 1
+ finally:
+ if not args.outdir:
+ log.debug("Wiping %s", outdir)
+ shutil.rmtree(outdir)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
new file mode 100755
index 0000000000..62c509f51c
--- /dev/null
+++ b/scripts/contrib/patchreview.py
@@ -0,0 +1,238 @@
+#! /usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+# TODO
+# - option to just list all broken files
+# - test suite
+# - validate signed-off-by
+
+status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied")
+
+class Result:
+ # Whether the patch has an Upstream-Status or not
+ missing_upstream_status = False
+ # If the Upstream-Status tag is malformed in some way (string for bad bit)
+ malformed_upstream_status = None
+ # If the Upstream-Status value is unknown (boolean)
+ unknown_upstream_status = False
+ # The upstream status value (Pending, etc)
+ upstream_status = None
+ # Whether the patch has a Signed-off-by or not
+ missing_sob = False
+ # Whether the Signed-off-by tag is malformed in some way
+ malformed_sob = False
+ # The Signed-off-by tag value
+ sob = None
+ # Whether a patch looks like a CVE but doesn't have a CVE tag
+ missing_cve = False
+
+def blame_patch(patch):
+ """
+ From a patch filename, return a list of "commit summary (author name <author
+ email>)" strings representing the history.
+ """
+ import subprocess
+ return subprocess.check_output(("git", "log",
+ "--follow", "--find-renames", "--diff-filter=A",
+ "--format=%s (%aN <%aE>)",
+ "--", patch)).decode("utf-8").splitlines()
+
+def patchreview(path, patches):
+ import re, os.path
+
+ # General pattern: start of line, optional whitespace, tag with optional
+ # hyphen or spaces, maybe a colon, some whitespace, then the value, all case
+ # insensitive.
+ sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
+ status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE)
+ cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
+ cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
+
+ results = {}
+
+ for patch in patches:
+
+ fullpath = os.path.join(path, patch)
+ result = Result()
+ results[fullpath] = result
+
+ content = open(fullpath, encoding='ascii', errors='ignore').read()
+
+ # Find the Signed-off-by tag
+ match = sob_re.search(content)
+ if match:
+ value = match.group(1)
+ if value != "Signed-off-by:":
+ result.malformed_sob = value
+ result.sob = match.group(2)
+ else:
+ result.missing_sob = True
+
+
+ # Find the Upstream-Status tag
+ match = status_re.search(content)
+ if match:
+ value = match.group(1)
+ if value != "Upstream-Status:":
+ result.malformed_upstream_status = value
+
+ value = match.group(2).lower()
+ # TODO: check case
+ if value not in status_values:
+ result.unknown_upstream_status = True
+ result.upstream_status = value
+ else:
+ result.missing_upstream_status = True
+
+ # Check that patches which looks like CVEs have CVE tags
+ if cve_re.search(patch) or cve_re.search(content):
+ if not cve_tag_re.search(content):
+ result.missing_cve = True
+ # TODO: extract CVE list
+
+ return results
+
+
+def analyse(results, want_blame=False, verbose=True):
+ """
+ want_blame: display blame data for each malformed patch
+ verbose: display per-file results instead of just summary
+ """
+
+ # want_blame requires verbose, so disable blame if we're not verbose
+ if want_blame and not verbose:
+ want_blame = False
+
+ total_patches = 0
+ missing_sob = 0
+ malformed_sob = 0
+ missing_status = 0
+ malformed_status = 0
+ missing_cve = 0
+ pending_patches = 0
+
+ for patch in sorted(results):
+ r = results[patch]
+ total_patches += 1
+ need_blame = False
+
+ # Build statistics
+ if r.missing_sob:
+ missing_sob += 1
+ if r.malformed_sob:
+ malformed_sob += 1
+ if r.missing_upstream_status:
+ missing_status += 1
+ if r.malformed_upstream_status or r.unknown_upstream_status:
+ malformed_status += 1
+ # Count patches with no status as pending
+ pending_patches +=1
+ if r.missing_cve:
+ missing_cve += 1
+ if r.upstream_status == "pending":
+ pending_patches += 1
+
+ # Output warnings
+ if r.missing_sob:
+ need_blame = True
+ if verbose:
+ print("Missing Signed-off-by tag (%s)" % patch)
+ if r.malformed_sob:
+ need_blame = True
+ if verbose:
+ print("Malformed Signed-off-by '%s' (%s)" % (r.malformed_sob, patch))
+ if r.missing_cve:
+ need_blame = True
+ if verbose:
+ print("Missing CVE tag (%s)" % patch)
+ if r.missing_upstream_status:
+ need_blame = True
+ if verbose:
+ print("Missing Upstream-Status tag (%s)" % patch)
+ if r.malformed_upstream_status:
+ need_blame = True
+ if verbose:
+ print("Malformed Upstream-Status '%s' (%s)" % (r.malformed_upstream_status, patch))
+ if r.unknown_upstream_status:
+ need_blame = True
+ if verbose:
+ print("Unknown Upstream-Status value '%s' (%s)" % (r.upstream_status, patch))
+
+ if want_blame and need_blame:
+ print("\n".join(blame_patch(patch)) + "\n")
+
+ def percent(num):
+ try:
+ return "%d (%d%%)" % (num, round(num * 100.0 / total_patches))
+ except ZeroDivisionError:
+ return "N/A"
+
+ if verbose:
+ print()
+
+ print("""Total patches found: %d
+Patches missing Signed-off-by: %s
+Patches with malformed Signed-off-by: %s
+Patches missing CVE: %s
+Patches missing Upstream-Status: %s
+Patches with malformed Upstream-Status: %s
+Patches in Pending state: %s""" % (total_patches,
+ percent(missing_sob),
+ percent(malformed_sob),
+ percent(missing_cve),
+ percent(missing_status),
+ percent(malformed_status),
+ percent(pending_patches)))
+
+
+
+def histogram(results):
+ from toolz import recipes, dicttoolz
+ import math
+ counts = recipes.countby(lambda r: r.upstream_status, results.values())
+ bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
+ for k in bars:
+ print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
+
+
+if __name__ == "__main__":
+ import argparse, subprocess, os
+
+ args = argparse.ArgumentParser(description="Patch Review Tool")
+ args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
+ args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
+ args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
+ args.add_argument("-j", "--json", help="update JSON")
+ args.add_argument("directory", help="directory to scan")
+ args = args.parse_args()
+
+ patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split()
+ results = patchreview(args.directory, patches)
+ analyse(results, want_blame=args.blame, verbose=args.verbose)
+
+ if args.json:
+ import json, os.path, collections
+ if os.path.isfile(args.json):
+ data = json.load(open(args.json))
+ else:
+ data = []
+
+ row = collections.Counter()
+ row["total"] = len(results)
+ row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip()
+ for r in results.values():
+ if r.upstream_status in status_values:
+ row[r.upstream_status] += 1
+ if r.malformed_upstream_status or r.missing_upstream_status:
+ row['malformed-upstream-status'] += 1
+ if r.malformed_sob or r.missing_sob:
+ row['malformed-sob'] += 1
+
+ data.append(row)
+ json.dump(data, open(args.json, "w"))
+
+ if args.histogram:
+ print()
+ histogram(results)
diff --git a/scripts/contrib/patchtest.sh b/scripts/contrib/patchtest.sh
new file mode 100755
index 0000000000..b1e1ea334b
--- /dev/null
+++ b/scripts/contrib/patchtest.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+#
+# patchtest: Run patchtest on commits starting at master
+#
+# Copyright (c) 2017, Intel Corporation.
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+
+set -o errexit
+
+# Default values
+pokydir=''
+
+usage() {
+CMD=$(basename $0)
+cat <<EOM
+Usage: $CMD [-h] [-p pokydir]
+ -p pokydir Defaults to current directory
+EOM
+>&2
+ exit 1
+}
+
+function clone() {
+ local REPOREMOTE=$1
+ local REPODIR=$2
+ if [ ! -d $REPODIR ]; then
+ git clone $REPOREMOTE $REPODIR --quiet
+ else
+ ( cd $REPODIR; git pull --quiet )
+ fi
+}
+
+while getopts ":p:h" opt; do
+ case $opt in
+ p)
+ pokydir=$OPTARG
+ ;;
+ h)
+ usage
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ usage
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ usage
+ ;;
+ esac
+done
+shift $((OPTIND-1))
+
+CDIR="$PWD"
+
+# default pokydir to current directory if user did not specify one
+if [ -z "$pokydir" ]; then
+ pokydir="$CDIR"
+fi
+
+PTENV="$PWD/patchtest"
+PT="$PTENV/patchtest"
+PTOE="$PTENV/patchtest-oe"
+
+if ! which virtualenv > /dev/null; then
+ echo "Install virtualenv before proceeding"
+ exit 1;
+fi
+
+# activate the virtual env
+virtualenv $PTENV --quiet
+source $PTENV/bin/activate
+
+cd $PTENV
+
+# clone or pull
+clone git://git.yoctoproject.org/patchtest $PT
+clone git://git.yoctoproject.org/patchtest-oe $PTOE
+
+# install requirements
+pip install -r $PT/requirements.txt --quiet
+pip install -r $PTOE/requirements.txt --quiet
+
+PATH="$PT:$PT/scripts:$PATH"
+
+# loop through parent to HEAD and execute patchtest on each commit
+for commit in $(git rev-list master..HEAD --reverse)
+do
+ shortlog="$(git log "$commit^1..$commit" --pretty='%h: %aN: %cd: %s')"
+ log="$(git format-patch "$commit^1..$commit" --stdout | patchtest - -r $pokydir -s $PTOE/tests --base-commit $commit^1 --json 2>/dev/null | create-summary --fail --only-results)"
+ if [ -z "$log" ]; then
+ shortlog="$shortlog: OK"
+ else
+ shortlog="$shortlog: FAIL"
+ fi
+ echo "$shortlog"
+ echo "$log" | sed -n -e '/Issue/p' -e '/Suggested fix/p'
+ echo ""
+done
+
+deactivate
+
+cd $CDIR
diff --git a/scripts/contrib/python/generate-manifest-2.7.py b/scripts/contrib/python/generate-manifest-2.7.py
deleted file mode 100755
index 936522efc5..0000000000
--- a/scripts/contrib/python/generate-manifest-2.7.py
+++ /dev/null
@@ -1,398 +0,0 @@
-#!/usr/bin/env python
-
-# generate Python Manifest for the OpenEmbedded build system
-# (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de>
-# (C) 2007 Jeremy Laine
-# licensed under MIT, see COPYING.MIT
-#
-# June 22, 2011 -- Mark Hatle <mark.hatle@windriver.com>
-# * Updated to no longer generate special -dbg package, instead use the
-# single system -dbg
-# * Update version with ".1" to indicate this change
-
-import os
-import sys
-import time
-
-VERSION = "2.7.2"
-
-__author__ = "Michael 'Mickey' Lauer <mlauer@vanille-media.de>"
-__version__ = "20110222.2"
-
-class MakefileMaker:
-
- def __init__( self, outfile ):
- """initialize"""
- self.packages = {}
- self.targetPrefix = "${libdir}/python%s/" % VERSION[:3]
- self.output = outfile
- self.out( """
-# WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file.
-# Generator: '%s' Version %s (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de>
-# Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy
-""" % ( sys.argv[0], __version__ ) )
-
- #
- # helper functions
- #
-
- def out( self, data ):
- """print a line to the output file"""
- self.output.write( "%s\n" % data )
-
- def setPrefix( self, targetPrefix ):
- """set a file prefix for addPackage files"""
- self.targetPrefix = targetPrefix
-
- def doProlog( self ):
- self.out( """ """ )
- self.out( "" )
-
- def addPackage( self, name, description, dependencies, filenames ):
- """add a package to the Makefile"""
- if type( filenames ) == type( "" ):
- filenames = filenames.split()
- fullFilenames = []
- for filename in filenames:
- if filename[0] != "$":
- fullFilenames.append( "%s%s" % ( self.targetPrefix, filename ) )
- else:
- fullFilenames.append( filename )
- self.packages[name] = description, dependencies, fullFilenames
-
- def doBody( self ):
- """generate body of Makefile"""
-
- global VERSION
-
- #
- # generate provides line
- #
-
- provideLine = 'PROVIDES+="'
- for name in sorted(self.packages):
- provideLine += "%s " % name
- provideLine += '"'
-
- self.out( provideLine )
- self.out( "" )
-
- #
- # generate package line
- #
-
- packageLine = 'PACKAGES="${PN}-dbg '
- for name in sorted(self.packages):
- if name.startswith("${PN}-distutils"):
- if name == "${PN}-distutils":
- packageLine += "%s-staticdev %s " % (name, name)
- elif name != '${PN}-dbg':
- packageLine += "%s " % name
- packageLine += '${PN}-modules"'
-
- self.out( packageLine )
- self.out( "" )
-
- #
- # generate package variables
- #
-
- for name, data in sorted(self.packages.iteritems()):
- desc, deps, files = data
-
- #
- # write out the description, revision and dependencies
- #
- self.out( 'SUMMARY_%s="%s"' % ( name, desc ) )
- self.out( 'RDEPENDS_%s="%s"' % ( name, deps ) )
-
- line = 'FILES_%s="' % name
-
- #
- # check which directories to make in the temporary directory
- #
-
- dirset = {} # if python had a set-datatype this would be sufficient. for now, we're using a dict instead.
- for target in files:
- dirset[os.path.dirname( target )] = True
-
- #
- # generate which files to copy for the target (-dfR because whole directories are also allowed)
- #
-
- for target in files:
- line += "%s " % target
-
- line += '"'
- self.out( line )
- self.out( "" )
-
- self.out( 'SUMMARY_${PN}-modules="All Python modules"' )
- line = 'RDEPENDS_${PN}-modules="'
-
- for name, data in sorted(self.packages.iteritems()):
- if name not in ['${PN}-dev', '${PN}-distutils-staticdev']:
- line += "%s " % name
-
- self.out( "%s \"" % line )
- self.out( 'ALLOW_EMPTY_${PN}-modules = "1"' )
-
- def doEpilog( self ):
- self.out( """""" )
- self.out( "" )
-
- def make( self ):
- self.doProlog()
- self.doBody()
- self.doEpilog()
-
-if __name__ == "__main__":
-
- if len( sys.argv ) > 1:
- try:
- os.unlink(sys.argv[1])
- except Exception:
- sys.exc_clear()
- outfile = file( sys.argv[1], "w" )
- else:
- outfile = sys.stdout
-
- m = MakefileMaker( outfile )
-
- # Add packages here. Only specify dlopen-style library dependencies here, no ldd-style dependencies!
- # Parameters: revision, name, description, dependencies, filenames
- #
-
- m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re",
- "__future__.* _abcoll.* abc.* copy.* copy_reg.* ConfigParser.* " +
- "genericpath.* getopt.* linecache.* new.* " +
- "os.* posixpath.* struct.* " +
- "warnings.* site.* stat.* " +
- "UserDict.* UserList.* UserString.* " +
- "lib-dynload/binascii.so lib-dynload/_struct.so lib-dynload/time.so " +
- "lib-dynload/xreadlines.so types.* platform.* ${bindir}/python* " +
- "_weakrefset.* sysconfig.* _sysconfigdata.* config/Makefile " +
- "${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h " +
- "${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py ")
-
- m.addPackage( "${PN}-dev", "Python development package", "${PN}-core",
- "${includedir} " +
- "${libdir}/lib*${SOLIBSDEV} " +
- "${libdir}/*.la " +
- "${libdir}/*.a " +
- "${libdir}/*.o " +
- "${libdir}/pkgconfig " +
- "${base_libdir}/*.a " +
- "${base_libdir}/*.o " +
- "${datadir}/aclocal " +
- "${datadir}/pkgconfig " )
-
- m.addPackage( "${PN}-2to3", "Python automated Python 2 to 3 code translator", "${PN}-core",
- "${bindir}/2to3 lib2to3" ) # package
-
- m.addPackage( "${PN}-idle", "Python Integrated Development Environment", "${PN}-core ${PN}-tkinter",
- "${bindir}/idle idlelib" ) # package
-
- m.addPackage( "${PN}-pydoc", "Python interactive help support", "${PN}-core ${PN}-lang ${PN}-stringold ${PN}-re",
- "${bindir}/pydoc pydoc.* pydoc_data" )
-
- m.addPackage( "${PN}-smtpd", "Python Simple Mail Transport Daemon", "${PN}-core ${PN}-netserver ${PN}-email ${PN}-mime",
- "${bindir}/smtpd.* smtpd.*" )
-
- m.addPackage( "${PN}-audio", "Python Audio Handling", "${PN}-core",
- "wave.* chunk.* sndhdr.* lib-dynload/ossaudiodev.so lib-dynload/audioop.so audiodev.* sunaudio.* sunau.* toaiff.*" )
-
- m.addPackage( "${PN}-bsddb", "Python bindings for the Berkeley Database", "${PN}-core",
- "bsddb lib-dynload/_bsddb.so" ) # package
-
- m.addPackage( "${PN}-codecs", "Python codecs, encodings & i18n support", "${PN}-core ${PN}-lang",
- "codecs.* encodings gettext.* locale.* lib-dynload/_locale.so lib-dynload/_codecs* lib-dynload/_multibytecodec.so lib-dynload/unicodedata.so stringprep.* xdrlib.*" )
-
- m.addPackage( "${PN}-compile", "Python bytecode compilation support", "${PN}-core",
- "py_compile.* compileall.*" )
-
- m.addPackage( "${PN}-compiler", "Python compiler support", "${PN}-core",
- "compiler" ) # package
-
- m.addPackage( "${PN}-compression", "Python high-level compression support", "${PN}-core ${PN}-zlib",
- "gzip.* zipfile.* tarfile.* lib-dynload/bz2.so" )
-
- m.addPackage( "${PN}-crypt", "Python basic cryptographic and hashing support", "${PN}-core",
- "hashlib.* md5.* sha.* lib-dynload/crypt.so lib-dynload/_hashlib.so lib-dynload/_sha256.so lib-dynload/_sha512.so" )
-
- m.addPackage( "${PN}-textutils", "Python option parsing, text wrapping and CSV support", "${PN}-core ${PN}-io ${PN}-re ${PN}-stringold",
- "lib-dynload/_csv.so csv.* optparse.* textwrap.*" )
-
- m.addPackage( "${PN}-curses", "Python curses support", "${PN}-core",
- "curses lib-dynload/_curses.so lib-dynload/_curses_panel.so" ) # directory + low level module
-
- m.addPackage( "${PN}-ctypes", "Python C types support", "${PN}-core",
- "ctypes lib-dynload/_ctypes.so lib-dynload/_ctypes_test.so" ) # directory + low level module
-
- m.addPackage( "${PN}-datetime", "Python calendar and time support", "${PN}-core ${PN}-codecs",
- "_strptime.* calendar.* lib-dynload/datetime.so" )
-
- m.addPackage( "${PN}-db", "Python file-based database support", "${PN}-core",
- "anydbm.* dumbdbm.* whichdb.* " )
-
- m.addPackage( "${PN}-debugger", "Python debugger", "${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint",
- "bdb.* pdb.*" )
-
- m.addPackage( "${PN}-difflib", "Python helpers for computing deltas between objects", "${PN}-lang ${PN}-re",
- "difflib.*" )
-
- m.addPackage( "${PN}-distutils-staticdev", "Python distribution utilities (static libraries)", "${PN}-distutils",
- "config/lib*.a" ) # package
-
- m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core",
- "config distutils" ) # package
-
- m.addPackage( "${PN}-doctest", "Python framework for running examples in docstrings", "${PN}-core ${PN}-lang ${PN}-io ${PN}-re ${PN}-unittest ${PN}-debugger ${PN}-difflib",
- "doctest.*" )
-
- # FIXME consider adding to some higher level package
- m.addPackage( "${PN}-elementtree", "Python elementree", "${PN}-core",
- "lib-dynload/_elementtree.so" )
-
- m.addPackage( "${PN}-email", "Python email support", "${PN}-core ${PN}-io ${PN}-re ${PN}-mime ${PN}-audio ${PN}-image ${PN}-netclient",
- "imaplib.* email" ) # package
-
- m.addPackage( "${PN}-fcntl", "Python's fcntl interface", "${PN}-core",
- "lib-dynload/fcntl.so" )
-
- m.addPackage( "${PN}-hotshot", "Python hotshot performance profiler", "${PN}-core",
- "hotshot lib-dynload/_hotshot.so" )
-
- m.addPackage( "${PN}-html", "Python HTML processing support", "${PN}-core",
- "formatter.* htmlentitydefs.* htmllib.* markupbase.* sgmllib.* HTMLParser.* " )
-
- m.addPackage( "${PN}-importlib", "Python import implementation library", "${PN}-core",
- "importlib" )
-
- m.addPackage( "${PN}-gdbm", "Python GNU database support", "${PN}-core",
- "lib-dynload/gdbm.so" )
-
- m.addPackage( "${PN}-image", "Python graphical image handling", "${PN}-core",
- "colorsys.* imghdr.* lib-dynload/imageop.so lib-dynload/rgbimg.so" )
-
- m.addPackage( "${PN}-io", "Python low-level I/O", "${PN}-core ${PN}-math ${PN}-textutils ${PN}-netclient ${PN}-contextlib",
- "lib-dynload/_socket.so lib-dynload/_io.so lib-dynload/_ssl.so lib-dynload/select.so lib-dynload/termios.so lib-dynload/cStringIO.so " +
- "pipes.* socket.* ssl.* tempfile.* StringIO.* io.* _pyio.*" )
-
- m.addPackage( "${PN}-json", "Python JSON support", "${PN}-core ${PN}-math ${PN}-re ${PN}-codecs",
- "json lib-dynload/_json.so" ) # package
-
- m.addPackage( "${PN}-lang", "Python low-level language support", "${PN}-core",
- "lib-dynload/_bisect.so lib-dynload/_collections.so lib-dynload/_heapq.so lib-dynload/_weakref.so lib-dynload/_functools.so " +
- "lib-dynload/array.so lib-dynload/itertools.so lib-dynload/operator.so lib-dynload/parser.so " +
- "atexit.* bisect.* code.* codeop.* collections.* dis.* functools.* heapq.* inspect.* keyword.* opcode.* symbol.* repr.* token.* " +
- "tokenize.* traceback.* weakref.*" )
-
- m.addPackage( "${PN}-logging", "Python logging support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-stringold",
- "logging" ) # package
-
- m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime",
- "mailbox.*" )
-
- m.addPackage( "${PN}-math", "Python math support", "${PN}-core ${PN}-crypt",
- "lib-dynload/cmath.so lib-dynload/math.so lib-dynload/_random.so random.* sets.*" )
-
- m.addPackage( "${PN}-mime", "Python MIME handling APIs", "${PN}-core ${PN}-io",
- "mimetools.* uu.* quopri.* rfc822.* MimeWriter.*" )
-
- m.addPackage( "${PN}-mmap", "Python memory-mapped file support", "${PN}-core ${PN}-io",
- "lib-dynload/mmap.so " )
-
- m.addPackage( "${PN}-multiprocessing", "Python multiprocessing support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-threading ${PN}-ctypes ${PN}-mmap",
- "lib-dynload/_multiprocessing.so multiprocessing" ) # package
-
- m.addPackage( "${PN}-netclient", "Python Internet Protocol clients", "${PN}-core ${PN}-crypt ${PN}-datetime ${PN}-io ${PN}-lang ${PN}-logging ${PN}-mime",
- "*Cookie*.* " +
- "base64.* cookielib.* ftplib.* gopherlib.* hmac.* httplib.* mimetypes.* nntplib.* poplib.* smtplib.* telnetlib.* urllib.* urllib2.* urlparse.* uuid.* rfc822.* mimetools.*" )
-
- m.addPackage( "${PN}-netserver", "Python Internet Protocol servers", "${PN}-core ${PN}-netclient ${PN}-shell ${PN}-threading",
- "cgi.* *HTTPServer.* SocketServer.*" )
-
- m.addPackage( "${PN}-numbers", "Python number APIs", "${PN}-core ${PN}-lang ${PN}-re",
- "decimal.* fractions.* numbers.*" )
-
- m.addPackage( "${PN}-pickle", "Python serialisation/persistence support", "${PN}-core ${PN}-codecs ${PN}-io ${PN}-re",
- "pickle.* shelve.* lib-dynload/cPickle.so pickletools.*" )
-
- m.addPackage( "${PN}-pkgutil", "Python package extension utility support", "${PN}-core",
- "pkgutil.*")
-
- m.addPackage( "${PN}-pprint", "Python pretty-print support", "${PN}-core ${PN}-io",
- "pprint.*" )
-
- m.addPackage( "${PN}-profile", "Python basic performance profiling support", "${PN}-core ${PN}-textutils",
- "profile.* pstats.* cProfile.* lib-dynload/_lsprof.so" )
-
- m.addPackage( "${PN}-re", "Python Regular Expression APIs", "${PN}-core",
- "re.* sre.* sre_compile.* sre_constants* sre_parse.*" ) # _sre is builtin
-
- m.addPackage( "${PN}-readline", "Python readline support", "${PN}-core",
- "lib-dynload/readline.so rlcompleter.*" )
-
- m.addPackage( "${PN}-resource", "Python resource control interface", "${PN}-core",
- "lib-dynload/resource.so" )
-
- m.addPackage( "${PN}-shell", "Python shell-like functionality", "${PN}-core ${PN}-re",
- "cmd.* commands.* dircache.* fnmatch.* glob.* popen2.* shlex.* shutil.*" )
-
- m.addPackage( "${PN}-robotparser", "Python robots.txt parser", "${PN}-core ${PN}-netclient",
- "robotparser.*")
-
- m.addPackage( "${PN}-subprocess", "Python subprocess support", "${PN}-core ${PN}-io ${PN}-re ${PN}-fcntl ${PN}-pickle",
- "subprocess.*" )
-
- m.addPackage( "${PN}-sqlite3", "Python Sqlite3 database support", "${PN}-core ${PN}-datetime ${PN}-lang ${PN}-crypt ${PN}-io ${PN}-threading ${PN}-zlib",
- "lib-dynload/_sqlite3.so sqlite3/dbapi2.* sqlite3/__init__.* sqlite3/dump.*" )
-
- m.addPackage( "${PN}-sqlite3-tests", "Python Sqlite3 database support tests", "${PN}-core ${PN}-sqlite3",
- "sqlite3/test" )
-
- m.addPackage( "${PN}-stringold", "Python string APIs [deprecated]", "${PN}-core ${PN}-re",
- "lib-dynload/strop.so string.* stringold.*" )
-
- m.addPackage( "${PN}-syslog", "Python syslog interface", "${PN}-core",
- "lib-dynload/syslog.so" )
-
- m.addPackage( "${PN}-terminal", "Python terminal controlling support", "${PN}-core ${PN}-io",
- "pty.* tty.*" )
-
- m.addPackage( "${PN}-tests", "Python tests", "${PN}-core",
- "test" ) # package
-
- m.addPackage( "${PN}-threading", "Python threading & synchronization support", "${PN}-core ${PN}-lang",
- "_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* Queue.*" )
-
- m.addPackage( "${PN}-tkinter", "Python Tcl/Tk bindings", "${PN}-core",
- "lib-dynload/_tkinter.so lib-tk" ) # package
-
- m.addPackage( "${PN}-unittest", "Python unit testing framework", "${PN}-core ${PN}-stringold ${PN}-lang ${PN}-io ${PN}-difflib ${PN}-pprint ${PN}-shell",
- "unittest/" )
-
- m.addPackage( "${PN}-unixadmin", "Python Unix administration support", "${PN}-core",
- "lib-dynload/nis.so lib-dynload/grp.so lib-dynload/pwd.so getpass.*" )
-
- m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-elementtree ${PN}-re",
- "lib-dynload/pyexpat.so xml xmllib.*" ) # package
-
- m.addPackage( "${PN}-xmlrpc", "Python XML-RPC support", "${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang",
- "xmlrpclib.* SimpleXMLRPCServer.* DocXMLRPCServer.*" )
-
- m.addPackage( "${PN}-zlib", "Python zlib compression support", "${PN}-core",
- "lib-dynload/zlib.so" )
-
- m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime",
- "mailbox.*" )
-
- m.addPackage( "${PN}-argparse", "Python command line argument parser", "${PN}-core ${PN}-codecs ${PN}-textutils",
- "argparse.*" )
-
- m.addPackage( "${PN}-contextlib", "Python utilities for with-statement" +
- "contexts.", "${PN}-core",
- "${libdir}/python${PYTHON_MAJMIN}/contextlib.*" )
-
- m.make()
diff --git a/scripts/contrib/python/generate-manifest-3.4.py b/scripts/contrib/python/generate-manifest-3.4.py
deleted file mode 100755
index 06eecdc5d0..0000000000
--- a/scripts/contrib/python/generate-manifest-3.4.py
+++ /dev/null
@@ -1,388 +0,0 @@
-#!/usr/bin/env python
-
-# generate Python Manifest for the OpenEmbedded build system
-# (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de>
-# (C) 2007 Jeremy Laine
-# licensed under MIT, see COPYING.MIT
-#
-# June 22, 2011 -- Mark Hatle <mark.hatle@windriver.com>
-# * Updated to no longer generate special -dbg package, instead use the
-# single system -dbg
-# * Update version with ".1" to indicate this change
-#
-# 2014 Khem Raj <raj.khem@gmail.com>
-# Added python3 support
-#
-import os
-import sys
-import time
-
-VERSION = "3.4.2"
-
-__author__ = "Michael 'Mickey' Lauer <mlauer@vanille-media.de>"
-__version__ = "20140131"
-
-class MakefileMaker:
-
- def __init__( self, outfile ):
- """initialize"""
- self.packages = {}
- self.targetPrefix = "${libdir}/python%s/" % VERSION[:3]
- self.output = outfile
- self.out( """
-# WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file.
-# Generator: '%s' Version %s (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de>
-# Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy
-""" % ( sys.argv[0], __version__ ) )
-
- #
- # helper functions
- #
-
- def out( self, data ):
- """print a line to the output file"""
- self.output.write( "%s\n" % data )
-
- def setPrefix( self, targetPrefix ):
- """set a file prefix for addPackage files"""
- self.targetPrefix = targetPrefix
-
- def doProlog( self ):
- self.out( """ """ )
- self.out( "" )
-
- def addPackage( self, name, description, dependencies, filenames ):
- """add a package to the Makefile"""
- if type( filenames ) == type( "" ):
- filenames = filenames.split()
- fullFilenames = []
- for filename in filenames:
- if filename[0] != "$":
- fullFilenames.append( "%s%s" % ( self.targetPrefix, filename ) )
- else:
- fullFilenames.append( filename )
- self.packages[name] = description, dependencies, fullFilenames
-
- def doBody( self ):
- """generate body of Makefile"""
-
- global VERSION
-
- #
- # generate provides line
- #
-
- provideLine = 'PROVIDES+="'
- for name in sorted(self.packages):
- provideLine += "%s " % name
- provideLine += '"'
-
- self.out( provideLine )
- self.out( "" )
-
- #
- # generate package line
- #
-
- packageLine = 'PACKAGES="${PN}-dbg '
- for name in sorted(self.packages):
- if name.startswith("${PN}-distutils"):
- if name == "${PN}-distutils":
- packageLine += "%s-staticdev %s " % (name, name)
- elif name != '${PN}-dbg':
- packageLine += "%s " % name
- packageLine += '${PN}-modules"'
-
- self.out( packageLine )
- self.out( "" )
-
- #
- # generate package variables
- #
-
- for name, data in sorted(self.packages.iteritems()):
- desc, deps, files = data
-
- #
- # write out the description, revision and dependencies
- #
- self.out( 'SUMMARY_%s="%s"' % ( name, desc ) )
- self.out( 'RDEPENDS_%s="%s"' % ( name, deps ) )
-
- line = 'FILES_%s="' % name
-
- #
- # check which directories to make in the temporary directory
- #
-
- dirset = {} # if python had a set-datatype this would be sufficient. for now, we're using a dict instead.
- for target in files:
- dirset[os.path.dirname( target )] = True
-
- #
- # generate which files to copy for the target (-dfR because whole directories are also allowed)
- #
-
- for target in files:
- line += "%s " % target
-
- line += '"'
- self.out( line )
- self.out( "" )
-
- self.out( 'SUMMARY_${PN}-modules="All Python modules"' )
- line = 'RDEPENDS_${PN}-modules="'
-
- for name, data in sorted(self.packages.iteritems()):
- if name not in ['${PN}-dev', '${PN}-distutils-staticdev']:
- line += "%s " % name
-
- self.out( "%s \"" % line )
- self.out( 'ALLOW_EMPTY_${PN}-modules = "1"' )
-
- def doEpilog( self ):
- self.out( """""" )
- self.out( "" )
-
- def make( self ):
- self.doProlog()
- self.doBody()
- self.doEpilog()
-
-if __name__ == "__main__":
-
- if len( sys.argv ) > 1:
- try:
- os.unlink(sys.argv[1])
- except Exception:
- sys.exc_clear()
- outfile = file( sys.argv[1], "w" )
- else:
- outfile = sys.stdout
-
- m = MakefileMaker( outfile )
-
- # Add packages here. Only specify dlopen-style library dependencies here, no ldd-style dependencies!
- # Parameters: revision, name, description, dependencies, filenames
- #
-
- m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re ${PN}-reprlib ${PN}-codecs ${PN}-io ${PN}-math",
- "__future__.* _abcoll.* abc.* copy.* copyreg.* ConfigParser.* " +
- "genericpath.* getopt.* linecache.* new.* " +
- "os.* posixpath.* struct.* " +
- "warnings.* site.* stat.* " +
- "UserDict.* UserList.* UserString.* " +
- "lib-dynload/binascii.*.so lib-dynload/_struct.*.so lib-dynload/time.*.so " +
- "lib-dynload/xreadlines.*.so types.* platform.* ${bindir}/python* " +
- "_weakrefset.* sysconfig.* _sysconfigdata.* config/Makefile " +
- "${includedir}/python${PYTHON_BINABI}/pyconfig*.h " +
- "${libdir}/python${PYTHON_MAJMIN}/collections " +
- "${libdir}/python${PYTHON_MAJMIN}/_collections_abc.* " +
- "${libdir}/python${PYTHON_MAJMIN}/_sitebuiltins.* " +
- "${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py ")
-
- m.addPackage( "${PN}-dev", "Python development package", "${PN}-core",
- "${includedir} " +
- "${libdir}/lib*${SOLIBSDEV} " +
- "${libdir}/*.la " +
- "${libdir}/*.a " +
- "${libdir}/*.o " +
- "${libdir}/pkgconfig " +
- "${base_libdir}/*.a " +
- "${base_libdir}/*.o " +
- "${datadir}/aclocal " +
- "${datadir}/pkgconfig " )
-
- m.addPackage( "${PN}-2to3", "Python automated Python 2 to 3 code translator", "${PN}-core",
- "lib2to3" ) # package
-
- m.addPackage( "${PN}-idle", "Python Integrated Development Environment", "${PN}-core ${PN}-tkinter",
- "${bindir}/idle idlelib" ) # package
-
- m.addPackage( "${PN}-pydoc", "Python interactive help support", "${PN}-core ${PN}-lang ${PN}-stringold ${PN}-re",
- "${bindir}/pydoc pydoc.* pydoc_data" )
-
- m.addPackage( "${PN}-smtpd", "Python Simple Mail Transport Daemon", "${PN}-core ${PN}-netserver ${PN}-email ${PN}-mime",
- "${bindir}/smtpd.* smtpd.*" )
-
- m.addPackage( "${PN}-audio", "Python Audio Handling", "${PN}-core",
- "wave.* chunk.* sndhdr.* lib-dynload/ossaudiodev.*.so lib-dynload/audioop.*.so audiodev.* sunaudio.* sunau.* toaiff.*" )
-
- m.addPackage( "${PN}-asyncio", "Python Asynchronous I/O, event loop, coroutines and tasks", "${PN}-core",
- "asyncio" )
-
- m.addPackage( "${PN}-codecs", "Python codecs, encodings & i18n support", "${PN}-core ${PN}-lang",
- "codecs.* encodings gettext.* locale.* lib-dynload/_locale.*.so lib-dynload/_codecs* lib-dynload/_multibytecodec.*.so lib-dynload/unicodedata.*.so stringprep.* xdrlib.*" )
-
- m.addPackage( "${PN}-compile", "Python bytecode compilation support", "${PN}-core",
- "py_compile.* compileall.*" )
-
- m.addPackage( "${PN}-compression", "Python high-level compression support", "${PN}-core ${PN}-codecs",
- "gzip.* zipfile.* tarfile.* lib-dynload/bz2.*.so" )
-
- m.addPackage( "${PN}-crypt", "Python basic cryptographic and hashing support", "${PN}-core",
- "hashlib.* md5.* sha.* lib-dynload/crypt.*.so lib-dynload/_hashlib.*.so lib-dynload/_sha256.*.so lib-dynload/_sha512.*.so" )
-
- m.addPackage( "${PN}-textutils", "Python option parsing, text wrapping and CSV support", "${PN}-core ${PN}-io ${PN}-re ${PN}-stringold",
- "lib-dynload/_csv.*.so csv.* optparse.* textwrap.*" )
-
- m.addPackage( "${PN}-curses", "Python curses support", "${PN}-core",
- "curses lib-dynload/_curses.*.so lib-dynload/_curses_panel.*.so" ) # directory + low level module
-
- m.addPackage( "${PN}-ctypes", "Python C types support", "${PN}-core",
- "ctypes lib-dynload/_ctypes.*.so lib-dynload/_ctypes_test.*.so" ) # directory + low level module
-
- m.addPackage( "${PN}-datetime", "Python calendar and time support", "${PN}-core ${PN}-codecs",
- "_strptime.* calendar.* lib-dynload/datetime.*.so" )
-
- m.addPackage( "${PN}-db", "Python file-based database support", "${PN}-core",
- "anydbm.* dumbdbm.* whichdb.* dbm lib-dynload/_dbm.*.so" )
-
- m.addPackage( "${PN}-debugger", "Python debugger", "${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint",
- "bdb.* pdb.*" )
-
- m.addPackage( "${PN}-difflib", "Python helpers for computing deltas between objects", "${PN}-lang ${PN}-re",
- "difflib.*" )
-
- m.addPackage( "${PN}-distutils-staticdev", "Python distribution utilities (static libraries)", "${PN}-distutils",
- "config/lib*.a" ) # package
-
- m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core",
- "config distutils" ) # package
-
- m.addPackage( "${PN}-doctest", "Python framework for running examples in docstrings", "${PN}-core ${PN}-lang ${PN}-io ${PN}-re ${PN}-unittest ${PN}-debugger ${PN}-difflib",
- "doctest.*" )
-
- # FIXME consider adding to some higher level package
- m.addPackage( "${PN}-elementtree", "Python elementree", "${PN}-core",
- "lib-dynload/_elementtree.*.so" )
-
- m.addPackage( "${PN}-email", "Python email support", "${PN}-core ${PN}-io ${PN}-re ${PN}-mime ${PN}-audio ${PN}-image ${PN}-netclient",
- "imaplib.* email" ) # package
-
- m.addPackage( "${PN}-fcntl", "Python's fcntl interface", "${PN}-core",
- "lib-dynload/fcntl.*.so" )
-
- m.addPackage( "${PN}-html", "Python HTML processing support", "${PN}-core",
- "formatter.* htmlentitydefs.* htmllib.* markupbase.* sgmllib.* HTMLParser.* " )
-
- m.addPackage( "${PN}-importlib", "Python import implementation library", "${PN}-core",
- "importlib" )
-
- m.addPackage( "${PN}-gdbm", "Python GNU database support", "${PN}-core",
- "lib-dynload/_gdbm.*.so" )
-
- m.addPackage( "${PN}-image", "Python graphical image handling", "${PN}-core",
- "colorsys.* imghdr.* lib-dynload/imageop.*.so lib-dynload/rgbimg.*.so" )
-
- m.addPackage( "${PN}-io", "Python low-level I/O", "${PN}-core ${PN}-math",
- "lib-dynload/_socket.*.so lib-dynload/_io.*.so lib-dynload/_ssl.*.so lib-dynload/select.*.so lib-dynload/termios.*.so lib-dynload/cStringIO.*.so " +
- "pipes.* socket.* ssl.* tempfile.* StringIO.* io.* _pyio.*" )
-
- m.addPackage( "${PN}-json", "Python JSON support", "${PN}-core ${PN}-math ${PN}-re",
- "json lib-dynload/_json.*.so" ) # package
-
- m.addPackage( "${PN}-lang", "Python low-level language support", "${PN}-core",
- "lib-dynload/_bisect.*.so lib-dynload/_collections.*.so lib-dynload/_heapq.*.so lib-dynload/_weakref.*.so lib-dynload/_functools.*.so " +
- "lib-dynload/array.*.so lib-dynload/itertools.*.so lib-dynload/operator.*.so lib-dynload/parser.*.so " +
- "atexit.* bisect.* code.* codeop.* collections.* _collections_abc.* dis.* functools.* heapq.* inspect.* keyword.* opcode.* symbol.* repr.* token.* " +
- "tokenize.* traceback.* weakref.*" )
-
- m.addPackage( "${PN}-logging", "Python logging support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-stringold",
- "logging" ) # package
-
- m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime",
- "mailbox.*" )
-
- m.addPackage( "${PN}-math", "Python math support", "${PN}-core",
- "lib-dynload/cmath.*.so lib-dynload/math.*.so lib-dynload/_random.*.so random.* sets.*" )
-
- m.addPackage( "${PN}-mime", "Python MIME handling APIs", "${PN}-core ${PN}-io",
- "mimetools.* uu.* quopri.* rfc822.* MimeWriter.*" )
-
- m.addPackage( "${PN}-mmap", "Python memory-mapped file support", "${PN}-core ${PN}-io",
- "lib-dynload/mmap.*.so " )
-
- m.addPackage( "${PN}-multiprocessing", "Python multiprocessing support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-threading ${PN}-ctypes ${PN}-mmap",
- "lib-dynload/_multiprocessing.*.so multiprocessing" ) # package
-
- m.addPackage( "${PN}-netclient", "Python Internet Protocol clients", "${PN}-core ${PN}-crypt ${PN}-datetime ${PN}-io ${PN}-lang ${PN}-logging ${PN}-mime",
- "*Cookie*.* " +
- "base64.* cookielib.* ftplib.* gopherlib.* hmac.* httplib.* mimetypes.* nntplib.* poplib.* smtplib.* telnetlib.* urllib uuid.* rfc822.* mimetools.*" )
-
- m.addPackage( "${PN}-netserver", "Python Internet Protocol servers", "${PN}-core ${PN}-netclient ${PN}-shell ${PN}-threading",
- "cgi.* *HTTPServer.* SocketServer.*" )
-
- m.addPackage( "${PN}-numbers", "Python number APIs", "${PN}-core ${PN}-lang ${PN}-re",
- "decimal.* fractions.* numbers.*" )
-
- m.addPackage( "${PN}-pickle", "Python serialisation/persistence support", "${PN}-core ${PN}-codecs ${PN}-io ${PN}-re",
- "pickle.* shelve.* lib-dynload/cPickle.*.so pickletools.*" )
-
- m.addPackage( "${PN}-pkgutil", "Python package extension utility support", "${PN}-core",
- "pkgutil.*")
-
- m.addPackage( "${PN}-pprint", "Python pretty-print support", "${PN}-core ${PN}-io",
- "pprint.*" )
-
- m.addPackage( "${PN}-profile", "Python basic performance profiling support", "${PN}-core ${PN}-textutils",
- "profile.* pstats.* cProfile.* lib-dynload/_lsprof.*.so" )
-
- m.addPackage( "${PN}-re", "Python Regular Expression APIs", "${PN}-core",
- "re.* sre.* sre_compile.* sre_constants* sre_parse.*" ) # _sre is builtin
-
- m.addPackage( "${PN}-readline", "Python readline support", "${PN}-core",
- "lib-dynload/readline.*.so rlcompleter.*" )
-
- m.addPackage( "${PN}-reprlib", "Python alternate repr() implementation", "${PN}-core",
- "reprlib.py" )
-
- m.addPackage( "${PN}-resource", "Python resource control interface", "${PN}-core",
- "lib-dynload/resource.*.so" )
-
- m.addPackage( "${PN}-shell", "Python shell-like functionality", "${PN}-core ${PN}-re",
- "cmd.* commands.* dircache.* fnmatch.* glob.* popen2.* shlex.* shutil.*" )
-
- m.addPackage( "${PN}-subprocess", "Python subprocess support", "${PN}-core ${PN}-io ${PN}-re ${PN}-fcntl ${PN}-pickle",
- "subprocess.*" )
-
- m.addPackage( "${PN}-sqlite3", "Python Sqlite3 database support", "${PN}-core ${PN}-datetime ${PN}-lang ${PN}-crypt ${PN}-io ${PN}-threading",
- "lib-dynload/_sqlite3.*.so sqlite3/dbapi2.* sqlite3/__init__.* sqlite3/dump.*" )
-
- m.addPackage( "${PN}-sqlite3-tests", "Python Sqlite3 database support tests", "${PN}-core ${PN}-sqlite3",
- "sqlite3/test" )
-
- m.addPackage( "${PN}-stringold", "Python string APIs [deprecated]", "${PN}-core ${PN}-re",
- "lib-dynload/strop.*.so string.* stringold.*" )
-
- m.addPackage( "${PN}-syslog", "Python syslog interface", "${PN}-core",
- "lib-dynload/syslog.*.so" )
-
- m.addPackage( "${PN}-terminal", "Python terminal controlling support", "${PN}-core ${PN}-io",
- "pty.* tty.*" )
-
- m.addPackage( "${PN}-tests", "Python tests", "${PN}-core",
- "test" ) # package
-
- m.addPackage( "${PN}-threading", "Python threading & synchronization support", "${PN}-core ${PN}-lang",
- "_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* Queue.*" )
-
- m.addPackage( "${PN}-tkinter", "Python Tcl/Tk bindings", "${PN}-core",
- "lib-dynload/_tkinter.*.so lib-tk tkinter" ) # package
-
- m.addPackage( "${PN}-unittest", "Python unit testing framework", "${PN}-core ${PN}-stringold ${PN}-lang ${PN}-io ${PN}-difflib ${PN}-pprint ${PN}-shell",
- "unittest/" )
-
- m.addPackage( "${PN}-unixadmin", "Python Unix administration support", "${PN}-core",
- "lib-dynload/nis.*.so lib-dynload/grp.*.so lib-dynload/pwd.*.so getpass.*" )
-
- m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-elementtree ${PN}-re",
- "lib-dynload/pyexpat.*.so xml xmllib.*" ) # package
-
- m.addPackage( "${PN}-xmlrpc", "Python XML-RPC support", "${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang",
- "xmlrpclib.* SimpleXMLRPCServer.* DocXMLRPCServer.* xmlrpc" )
-
- m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime",
- "mailbox.*" )
-
- m.make()
diff --git a/scripts/contrib/serdevtry b/scripts/contrib/serdevtry
index 74bd7b7161..9144730e7e 100755
--- a/scripts/contrib/serdevtry
+++ b/scripts/contrib/serdevtry
@@ -2,7 +2,8 @@
# Copyright (C) 2014 Intel Corporation
#
-# Released under the MIT license (see COPYING.MIT)
+# SPDX-License-Identifier: MIT
+#
if [ "$1" = "" -o "$1" = "--help" ] ; then
echo "Usage: $0 <serial terminal command>"
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh
index 9e5725ae54..23f238adf6 100755
--- a/scripts/contrib/test_build_time.sh
+++ b/scripts/contrib/test_build_time.sh
@@ -3,22 +3,8 @@
# Build performance regression test script
#
# Copyright 2011 Intel Corporation
-# All rights reserved.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
+# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script is intended to be used in conjunction with "git bisect run"
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh
index 8e20a9ea7d..478e8b0d03 100755
--- a/scripts/contrib/test_build_time_worker.sh
+++ b/scripts/contrib/test_build_time_worker.sh
@@ -1,5 +1,7 @@
#!/bin/bash
-
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# This is an example script to be used in conjunction with test_build_time.sh
if [ "$TEST_BUILDDIR" = "" ] ; then
diff --git a/scripts/contrib/uncovered b/scripts/contrib/uncovered
new file mode 100755
index 0000000000..f16128cb7a
--- /dev/null
+++ b/scripts/contrib/uncovered
@@ -0,0 +1,26 @@
+#!/bin/bash -eur
+#
+# Find python modules uncovered by oe-seltest
+#
+# Copyright (c) 2016, Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Author: Ed Bartosh <ed.bartosh@linux.intel.com>
+#
+
+if [ ! "$#" -eq 1 -o -t 0 ] ; then
+ echo 'Usage: coverage report | ./scripts/contrib/uncovered <dir>' 1>&2
+ exit 1
+fi
+
+path=$(readlink -ev $1)
+
+if [ ! -d "$path" ] ; then
+ echo "directory $1 doesn't exist" 1>&2
+ exit 1
+fi
+
+diff -u <(grep "$path" | grep -v '0%$' | cut -f1 -d: | sort) \
+ <(find $path | xargs file | grep 'Python script' | cut -f1 -d:| sort) | \
+ grep "^+$path" | cut -c2-
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py
index 86cc82bca3..7bffa78e23 100755
--- a/scripts/contrib/verify-homepage.py
+++ b/scripts/contrib/verify-homepage.py
@@ -1,63 +1,64 @@
-#!/usr/bin/env python
-
-# This script is used for verify HOMEPAGE.
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This script can be used to verify HOMEPAGE values for all recipes in
+# the current configuration.
# The result is influenced by network environment, since the timeout of connect url is 5 seconds as default.
import sys
import os
import subprocess
-import urllib2
+import urllib.request
-def search_bitbakepath():
- bitbakepath = ""
- # Search path to bitbake lib dir in order to load bb modules
- if os.path.exists(os.path.join(os.path.dirname(sys.argv[0]), '../../bitbake/lib/bb')):
- bitbakepath = os.path.join(os.path.dirname(sys.argv[0]), '../../bitbake/lib')
- bitbakepath = os.path.abspath(bitbakepath)
- else:
- # Look for bitbake/bin dir in PATH
- for pth in os.environ['PATH'].split(':'):
- if os.path.exists(os.path.join(pth, '../lib/bb')):
- bitbakepath = os.path.abspath(os.path.join(pth, '../lib'))
- break
- if not bitbakepath:
- sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
- sys.exit(1)
- return bitbakepath
-
-# For importing the following modules
-sys.path.insert(0, search_bitbakepath())
+# Allow importing scripts/lib modules
+scripts_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
+lib_path = scripts_path + '/lib'
+sys.path = sys.path + [lib_path]
+import scriptpath
+import scriptutils
+
+# Allow importing bitbake modules
+bitbakepath = scriptpath.add_bitbake_lib_path()
+
import bb.tinfoil
+logger = scriptutils.logger_create('verify_homepage')
+
def wgetHomepage(pn, homepage):
result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True)
if result:
- bb.warn("Failed to verify HOMEPAGE (%s) of %s" % (homepage, pn))
+ logger.warning("%s: failed to verify HOMEPAGE: %s " % (pn, homepage))
return 1
else:
return 0
def verifyHomepage(bbhandler):
- pkg_pn = bbhandler.cooker.recipecache.pkg_pn
+ pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
pnlist = sorted(pkg_pn)
count = 0
+ checked = []
for pn in pnlist:
- fn = pkg_pn[pn].pop()
- data = bb.cache.Cache.loadDataFull(fn, bbhandler.cooker.collection.get_file_appends(fn), bbhandler.config_data)
- homepage = data.getVar("HOMEPAGE")
- if homepage:
- try:
- urllib2.urlopen(homepage, timeout=5)
- except Exception:
- count = count + wgetHomepage(pn, homepage)
+ for fn in pkg_pn[pn]:
+ # There's no point checking multiple BBCLASSEXTENDed variants of the same recipe
+ realfn, _, _ = bb.cache.virtualfn2realfn(fn)
+ if realfn in checked:
+ continue
+ data = bbhandler.parse_recipe_file(realfn)
+ homepage = data.getVar("HOMEPAGE")
+ if homepage:
+ try:
+ urllib.request.urlopen(homepage, timeout=5)
+ except Exception:
+ count = count + wgetHomepage(os.path.basename(realfn), homepage)
+ checked.append(realfn)
return count
if __name__=='__main__':
- failcount = 0
- bbhandler = bb.tinfoil.Tinfoil()
- bbhandler.prepare()
- print "Start to verify HOMEPAGE:"
- failcount = verifyHomepage(bbhandler)
- print "finish to verify HOMEPAGE."
- print "Summary: %s failed" % failcount
+ with bb.tinfoil.Tinfoil() as bbhandler:
+ bbhandler.prepare()
+ logger.info("Start verifying HOMEPAGE:")
+ failcount = verifyHomepage(bbhandler)
+ logger.info("Finished verifying HOMEPAGE.")
+ logger.info("Summary: %s failed" % failcount)