Add a script to parse the make test output into XML file so it can be consumed by CITE/Sponge.
Change-Id: I37fe7a31cbd921cee54656a09cd67108f8a14eff
diff --git a/Makefile b/Makefile
index e3d52a3..add2bfd 100644
--- a/Makefile
+++ b/Makefile
@@ -13,6 +13,10 @@
test:
wvtest/wvtestrun $(MAKE) runtests
+test_with_output:
+ $(MAKE) runtests > result.txt
+ result_parser.py result.txt result.xml
+
#TODO(apenwarr): use a smarter allocator.
# We could enable parallelism by depending on $(addsuffix ...) instead of
# looping through them one by one. But then we end up running multiple
@@ -77,6 +81,7 @@
ip addr show eth0; \
ip addr show eth1; \
ip addr show eth2; \
+ ip addr show eth4; \
cat /proc/cpuinfo
configs/nmap.%.tmp:
diff --git a/result_parser.py b/result_parser.py
new file mode 100755
index 0000000..01c6c0a
--- /dev/null
+++ b/result_parser.py
@@ -0,0 +1,183 @@
+#!/usr/bin/python
+"""Parse the wvtest results from the output of 'make test_with_output'.
+
+^Testing ".+" in .+:$ marks starts of a test file
+^! .+$ marks a test case status
+
+The test result will be saved to a .xml file that can be used by our dashboard
+system to stream the data into Sponge easily.
+"""
+
+__author__ = 'brucefan@google.com (Chun Fan)'
+
+
+import os
+import re
+import sys
+
+
+SUITES_TEMPLATE = """
+<testsuites disabled="0" errors="{num_of_errors}"
+ failures="{num_of_failures}" tests="{num_of_tests}"
+ name="{suites_name}" time="0.0">
+ {test_suites}
+</testsuites>
+"""
+
+SUITE_TEMPLATE = """
+<testsuite disabled="0" errors="{suite_errors}"
+ failures="{suite_failures}" name="{suite_name}"
+ tests="{suite_tests}" time="0.0">
+ {test_cases}
+</testsuite>
+"""
+
+TEST_CASE_PASS_TEMPLATE = """
+<testcase classname="{class_name}" name="{test_case_name}"
+ status="run" time="0.0"/>
+"""
+
+TEST_CASE_FAILURE_TEMPLATE = """
+<testcase classname="{class_name}" name="{test_case_name}"
+ status="run" time="0.0">
+ <failure>
+ {result_msg}
+ </failure>
+</testcase>
+"""
+
+TEST_CASE_ERROR_TEMPLATE = """
+<testcase classname="%(class_name)s" name="%(test_case_name)s"
+ status="run" time="0.0">
+ <error>
+ {result_msg}
+ </error>
+</testcase>
+"""
+
+class TestSuitesResult(object):
+
+ def __init__(self, name):
+ self.name = name
+ self.test_suite_results = []
+
+ def AddTestSuiteResult(self, test_suite_result):
+ self.test_suite_results.append(test_suite_result)
+
+ def ToXml(self):
+ self.test_suites_xml = '\n'.join(
+ [ts.ToXml() for ts in self.test_suite_results])
+ self.total_tests = sum(
+ [ts.total_tests for ts in self.test_suite_results])
+ self.failures = sum(
+ [ts.failures for ts in self.test_suite_results])
+ return SUITES_TEMPLATE.format(
+ suites_name=self.name,
+ num_of_errors=0,
+ num_of_failures=self.failures,
+ num_of_tests=self.total_tests,
+ test_suites=self.test_suites_xml)
+
+
+class TestSuiteResult(object):
+
+ def __init__(self, name):
+ self.name = name
+ self.test_case_results = []
+
+ def AddTestCaseResult(self, test_case_result):
+ self.test_case_results.append(test_case_result)
+
+ def ToXml(self):
+ self.test_cases_xml = '\n'.join(
+ [tc.ToXml() for tc in self.test_case_results])
+ self.total_tests = len(self.test_case_results)
+ self.failures = len([tc for tc in self.test_case_results if tc.result])
+ return SUITE_TEMPLATE.format(
+ suite_name=self.name,
+ suite_errors=0,
+ suite_failures=self.failures,
+ suite_tests=self.total_tests,
+ test_cases=self.test_cases_xml)
+
+
+class TestCaseResult(object):
+
+ PASS = 0
+ FAILED = 1
+ ERROR = 2
+
+ def __init__(self, name, class_name, result, result_msg=None):
+ self.name = name
+ self.class_name = class_name
+ self.result = result
+ self.result_msg = result_msg
+ self.result_template_map = {
+ TestCaseResult.PASS: TEST_CASE_PASS_TEMPLATE,
+ TestCaseResult.FAILED: TEST_CASE_FAILURE_TEMPLATE,
+ TestCaseResult.ERROR: TEST_CASE_ERROR_TEMPLATE}
+
+ def ToXml(self):
+ template = self.result_template_map.get(
+ self.result, TEST_CASE_FAILURE_TEMPLATE)
+ if self.result:
+ print 'Test case: ', self.name, 'FAILED with result code', self.result
+ return template.format(class_name=self.class_name,
+ test_case_name=self.name,
+ result_msg=self.result_msg)
+ else:
+ print 'Test case: ', self.name, 'PASSED with result code', self.result
+ return template.format(class_name=self.class_name,
+ test_case_name=self.name)
+
+
+def ParseTestResult(result_file, output_xml_file):
+ """Parse the given result file and dump out parsed data to given output."""
+ if not os.path.exists(result_file):
+ print 'Error: Given result file does not exist: ', result_file
+ return
+ test_suites = TestSuitesResult('Bruno Release Smoke')
+ current_test_suite = None
+ with open(result_file, 'r') as f:
+ for line in f:
+ m = re.search(r'^Testing \"(.+)\" in (.+):$', line)
+ if m:
+ # a new start for a test file
+ suite_name = m.group(1)
+ file_name = m.group(2)
+ print 'Processing suite', suite_name, 'in file', file_name
+ current_test_suite = TestSuiteResult(suite_name)
+ test_suites.AddTestSuiteResult(current_test_suite)
+ continue
+ if line.startswith('! '):
+ if not current_test_suite:
+ print 'WARNING: Encounter a test case before a test suite is seen.'
+ continue
+ # Found a test case result
+ parts = line.strip().split(' ')
+ test_case_name = ' '.join(parts[2:-2]).strip()
+ test_case_class_name = '%s-%s' % (
+ current_test_suite.name, test_case_name)
+ test_case_result = TestCaseResult.PASS
+ test_case_result_msg = ''
+ if parts[-1] != 'ok':
+ test_case_result = TestCaseResult.FAILED
+ test_case_result_msg = line
+ print 'Processing test case: ', test_case_name, test_case_result
+ current_test_suite.AddTestCaseResult(
+ TestCaseResult(
+ name=test_case_name,
+ class_name=test_case_class_name,
+ result=test_case_result,
+ result_msg=test_case_result_msg))
+ # Now it is time to write the xml output
+ with open(output_xml_file, 'w') as f:
+ f.write(test_suites.ToXml())
+
+
+def main(argv):
+ ParseTestResult(sys.argv[1], sys.argv[2])
+
+
+if __name__ == '__main__':
+ main(sys.argv)