summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
authorNathan Rossi <nathan@nathanrossi.com>2019-09-11 14:13:07 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2019-09-11 15:28:55 +0100
commit47edd51970ed0c33edbe04fd72abd1cfc6ecd3d1 (patch)
tree11360f5f7a8730fd8138037dec75f652c5a707db /scripts
parent86b8a1d534bfcd70775c6e2b59eabe10de29f526 (diff)
downloadopenembedded-core-contrib-47edd51970ed0c33edbe04fd72abd1cfc6ecd3d1.tar.gz
resulttool: Handle multiple series containing ptestresults
Handle multiple results series having ptestresults content. The contents are merged on a per-result basis where duplicates are ignored (with a warning message printed). The 'ptestresults.sections' collection is also merged on a per-suite basis. Signed-off-by: Nathan Rossi <nathan@nathanrossi.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'scripts')
-rw-r--r--scripts/lib/resulttool/report.py44
1 files changed, 32 insertions, 12 deletions
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py
index 8b03717d29d..883b52517b9 100644
--- a/scripts/lib/resulttool/report.py
+++ b/scripts/lib/resulttool/report.py
@@ -32,16 +32,22 @@ class ResultsTextReport(object):
# Ensure tests without any test results still show up on the report
for suite in result['ptestresult.sections']:
if suite not in self.ptests[machine]:
- self.ptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ self.ptests[machine][suite] = {
+ 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
+ 'failed_testcases': [], "testcases": set(),
+ }
if 'duration' in result['ptestresult.sections'][suite]:
self.ptests[machine][suite]['duration'] = result['ptestresult.sections'][suite]['duration']
if 'timeout' in result['ptestresult.sections'][suite]:
self.ptests[machine][suite]['duration'] += " T"
- return
+ return True
+
+ # process test result
try:
_, suite, test = k.split(".", 2)
except ValueError:
- return
+ return True
+
# Handle 'glib-2.0'
if 'ptestresult.sections' in result and suite not in result['ptestresult.sections']:
try:
@@ -50,11 +56,23 @@ class ResultsTextReport(object):
suite = suite + "." + suite1
except ValueError:
pass
+
if suite not in self.ptests[machine]:
- self.ptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ self.ptests[machine][suite] = {
+ 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
+ 'failed_testcases': [], "testcases": set(),
+ }
+
+ # do not process duplicate results
+ if test in self.ptests[machine][suite]["testcases"]:
+ print("Warning duplicate ptest result '{}.{}' for {}".format(suite, test, machine))
+ return False
+
for tk in self.result_types:
if status in self.result_types[tk]:
self.ptests[machine][suite][tk] += 1
+ self.ptests[machine][suite]["testcases"].add(test)
+ return True
def handle_ltptest_result(self, k, status, result, machine):
if machine not in self.ltptests:
@@ -124,17 +142,20 @@ class ResultsTextReport(object):
result = testresult.get('result', [])
for k in result:
test_status = result[k].get('status', [])
+ if k.startswith("ptestresult."):
+ if not self.handle_ptest_result(k, test_status, result, machine):
+ continue
+ elif k.startswith("ltpresult."):
+ self.handle_ltptest_result(k, test_status, result, machine)
+ elif k.startswith("ltpposixresult."):
+ self.handle_ltpposixtest_result(k, test_status, result, machine)
+
+ # process result if it was not skipped by a handler
for tk in self.result_types:
if test_status in self.result_types[tk]:
test_count_report[tk] += 1
if test_status in self.result_types['failed']:
test_count_report['failed_testcases'].append(k)
- if k.startswith("ptestresult."):
- self.handle_ptest_result(k, test_status, result, machine)
- if k.startswith("ltpresult."):
- self.handle_ltptest_result(k, test_status, result, machine)
- if k.startswith("ltpposixresult."):
- self.handle_ltpposixtest_result(k, test_status, result, machine)
return test_count_report
def print_test_report(self, template_file_name, test_count_reports):
@@ -210,8 +231,7 @@ class ResultsTextReport(object):
# Check to see if there is already results for these kinds of tests for the machine
for key in result['result'].keys():
testtype = str(key).split('.')[0]
- if ((machine in self.ptests and testtype == "ptestresult" and self.ptests[machine]) or
- (machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or
+ if ((machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or
(machine in self.ltpposixtests and testtype == "ltpposixresult" and self.ltpposixtests[machine])):
print("Already have test results for %s on %s, skipping %s" %(str(key).split('.')[0], machine, resultid))
skip = True