Generate reports in JUnit and JSON formats // Resolve #2891

This commit is contained in:
Ivan Kravets
2022-04-23 19:19:25 +03:00
parent 9b221a06c8
commit c9026a1b9c
14 changed files with 319 additions and 58 deletions

View File

@ -44,6 +44,7 @@ Please check `Migration guide from 5.x to 6.0 <https://docs.platformio.org/en/la
* **Unit Testing** * **Unit Testing**
- Generate reports in JUnit and JSON formats using the `pio test --output-format <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html#cmdoption-pio-test-output-format>`__ option (`issue #2891 <https://github.com/platformio/platformio-core/issues/2891>`_)
- Added support for test hierarchies (nested test suites) (`issue #4135 <https://github.com/platformio/platformio-core/issues/4135>`_) - Added support for test hierarchies (nested test suites) (`issue #4135 <https://github.com/platformio/platformio-core/issues/4135>`_)
- Provide more information when the native program crashed on a host (errored with a negative return code) (`issue #3429 <https://github.com/platformio/platformio-core/issues/3429>`_) - Provide more information when the native program crashed on a host (errored with a negative return code) (`issue #3429 <https://github.com/platformio/platformio-core/issues/3429>`_)
- Fixed an issue when command line parameters ("--ignore", "--filter") do not override values defined in the |PIOCONF| (`issue #3845 <https://github.com/platformio/platformio-core/issues/3845>`_) - Fixed an issue when command line parameters ("--ignore", "--filter") do not override values defined in the |PIOCONF| (`issue #3845 <https://github.com/platformio/platformio-core/issues/3845>`_)

2
docs

Submodule docs updated: 4e24a04fd1...ad5788644c

View File

@ -76,6 +76,12 @@ from platformio.unittest.runners.factory import TestRunnerFactory
type=click.IntRange(0, 1), type=click.IntRange(0, 1),
help="Set initial DTR line state for Serial Monitor", help="Set initial DTR line state for Serial Monitor",
) )
@click.option("--output-format", type=click.Choice(["json", "junit"]))
@click.option(
"--output-path",
default=os.getcwd,
type=click.Path(dir_okay=True, resolve_path=True),
)
@click.option("--verbose", "-v", is_flag=True) @click.option("--verbose", "-v", is_flag=True)
@click.pass_context @click.pass_context
def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
@ -93,6 +99,8 @@ def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefine
no_reset, no_reset,
monitor_rts, monitor_rts,
monitor_dtr, monitor_dtr,
output_format,
output_path,
verbose, verbose,
): ):
app.set_session_var("custom_project_conf", project_conf) app.set_session_var("custom_project_conf", project_conf)
@ -104,7 +112,8 @@ def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefine
if not verbose: if not verbose:
click.echo("Verbose mode can be enabled via `-v, --verbose` option") click.echo("Verbose mode can be enabled via `-v, --verbose` option")
click.secho("Collected %d test suites" % len(test_names), bold=True) click.secho("Collected %d tests" % len(test_names), bold=True, nl=False)
click.echo(" (%s)" % ", ".join(test_names))
test_summary = TestSummary(os.path.basename(project_dir)) test_summary = TestSummary(os.path.basename(project_dir))
default_envs = config.default_envs() default_envs = config.default_envs()
@ -156,12 +165,23 @@ def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefine
runner.start(ctx) runner.start(ctx)
print_suite_footer(test_suite) print_suite_footer(test_suite)
# automatically generate JSON report for PIO IDE
TestReportFactory.new("json", test_summary).generate(
os.path.join(
config.get("platformio", "build_dir"), "pio-test-report-latest.json"
)
)
# Reset custom project config # Reset custom project config
app.set_session_var("custom_project_conf", None) app.set_session_var("custom_project_conf", None)
stdout_report = TestReportFactory.new("stdout", test_summary) stdout_report = TestReportFactory.new("stdout", test_summary)
stdout_report.generate(verbose=verbose) stdout_report.generate(verbose=verbose)
if output_format:
custom_report = TestReportFactory.new(output_format, test_summary)
custom_report.generate(output_path=output_path, verbose=True)
if test_summary.is_errored or test_summary.get_status_nums(TestStatus.FAILED): if test_summary.is_errored or test_summary.get_status_nums(TestStatus.FAILED):
raise exception.ReturnErrorCode(1) raise exception.ReturnErrorCode(1)

View File

@ -21,7 +21,7 @@ class TestReportBase:
def __init__(self, test_summary): def __init__(self, test_summary):
self.test_summary = test_summary self.test_summary = test_summary
def generate(self): def generate(self, output_path, verbose):
raise NotImplementedError() raise NotImplementedError()

View File

@ -0,0 +1,99 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import os
import click
from platformio.unittest.reports.base import TestReportBase
from platformio.unittest.result import TestStatus
class JsonTestReport(TestReportBase):
def generate(self, output_path, verbose=False):
if os.path.isdir(output_path):
output_path = os.path.join(
output_path,
"pio-test-report-%s-%s.json"
% (
self.test_summary.name,
datetime.datetime.now().strftime("%Y%m%d%H%M%S"),
),
)
with open(output_path, mode="w", encoding="utf8") as fp:
json.dump(self.to_json(), fp)
if verbose:
click.secho(f"Saved JSON report to the {output_path}", fg="green")
def to_json(self):
result = dict(
version="1.0",
name=self.test_summary.name,
duration=self.test_summary.duration,
testcase_nums=self.test_summary.case_nums,
error_nums=self.test_summary.get_status_nums(TestStatus.ERRORED),
failure_nums=self.test_summary.get_status_nums(TestStatus.FAILED),
skipped_nums=self.test_summary.get_status_nums(TestStatus.SKIPPED),
test_suites=[],
)
for test_suite in self.test_summary.suites:
result["test_suites"].append(self.test_suite_to_json(test_suite))
return result
def test_suite_to_json(self, test_suite):
result = dict(
env_name=test_suite.env_name,
test_name=test_suite.test_name,
status=test_suite.status.name,
duration=test_suite.duration,
timestamp=datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
"%Y-%m-%dT%H:%M:%S"
)
if test_suite.timestamp
else None,
testcase_nums=len(test_suite.cases),
error_nums=test_suite.get_status_nums(TestStatus.ERRORED),
failure_nums=test_suite.get_status_nums(TestStatus.FAILED),
skipped_nums=test_suite.get_status_nums(TestStatus.SKIPPED),
test_cases=[],
)
for test_case in test_suite.cases:
result["test_cases"].append(self.test_case_to_json(test_case))
return result
@staticmethod
def test_case_to_json(test_case):
result = dict(
name=test_case.name,
status=test_case.status.name,
message=test_case.message,
stdout=test_case.stdout,
duration=test_case.duration,
exception=None,
source=None,
)
if test_case.exception:
result["exception"] = "%s: %s" % (
test_case.exception.__class__.__name__,
test_case.exception,
)
if test_case.source:
result["source"] = dict(
file=test_case.source.file, line=test_case.source.line
)
return result

View File

@ -0,0 +1,102 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import os
import xml.etree.ElementTree as ET
import click
from platformio.unittest.reports.base import TestReportBase
from platformio.unittest.result import TestStatus
class JunitTestReport(TestReportBase):
def generate(self, output_path, verbose=False):
if os.path.isdir(output_path):
output_path = os.path.join(
output_path,
"pio-test-report-%s-%s-junit.xml"
% (
self.test_summary.name,
datetime.datetime.now().strftime("%Y%m%d%H%M%S"),
),
)
with open(output_path, mode="wb") as fp:
self.build_xml_tree().write(fp, encoding="utf8")
if verbose:
click.secho(f"Saved JUnit report to the {output_path}", fg="green")
def build_xml_tree(self):
root = ET.Element("testsuites")
root.set("name", self.test_summary.name)
root.set("tests", str(self.test_summary.case_nums))
root.set("errors", str(self.test_summary.get_status_nums(TestStatus.ERRORED)))
root.set("failures", str(self.test_summary.get_status_nums(TestStatus.FAILED)))
root.set("time", str(self.test_summary.duration))
for suite in self.test_summary.suites:
root.append(self.build_testsuite_node(suite))
return ET.ElementTree(root)
def build_testsuite_node(self, test_suite):
element = ET.Element("testsuite")
element.set("name", f"{test_suite.env_name}:{test_suite.test_name}")
element.set("tests", str(len(test_suite.cases)))
element.set("errors", str(test_suite.get_status_nums(TestStatus.ERRORED)))
element.set("failures", str(test_suite.get_status_nums(TestStatus.FAILED)))
element.set("skipped", str(test_suite.get_status_nums(TestStatus.SKIPPED)))
element.set("time", str(test_suite.duration))
if test_suite.timestamp:
element.set(
"timestamp",
datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
"%Y-%m-%dT%H:%M:%S"
),
)
for test_case in test_suite.cases:
element.append(self.build_testcase_node(test_case))
return element
def build_testcase_node(self, test_case):
element = ET.Element("testcase")
element.set("name", str(test_case.name))
element.set("time", str(test_case.duration))
element.set("status", str(test_case.status.name))
if test_case.status == TestStatus.SKIPPED:
element.append(ET.Element("skipped"))
elif test_case.status == TestStatus.ERRORED:
element.append(self.build_testcase_error_node(test_case))
elif test_case.status == TestStatus.FAILED:
element.append(self.build_testcase_failure_node(test_case))
return element
@staticmethod
def build_testcase_error_node(test_case):
element = ET.Element("error")
element.set("type", test_case.exception.__class__.__name__)
element.set("message", str(test_case.exception))
if test_case.stdout:
element.text = test_case.stdout
return element
@staticmethod
def build_testcase_failure_node(test_case):
element = ET.Element("failure")
if test_case.message:
element.set("message", test_case.message)
if test_case.stdout:
element.text = test_case.stdout
return element

View File

@ -21,7 +21,7 @@ from platformio.unittest.result import TestStatus
class StdoutTestReport(TestReportBase): class StdoutTestReport(TestReportBase):
def generate(self, verbose=False): def generate(self, verbose=False): # pylint: disable=arguments-differ
click.echo() click.echo()
tabular_data = [] tabular_data = []

View File

@ -44,43 +44,52 @@ class TestCaseSource:
class TestCase: class TestCase:
def __init__( # pylint: disable=too-many-arguments def __init__( # pylint: disable=too-many-arguments
self, name, status, message=None, stdout=None, source=None self,
name,
status,
message=None,
stdout=None,
source=None,
duration=0,
exception=None,
): ):
assert isinstance(status, TestStatus) assert isinstance(status, TestStatus)
if status == TestStatus.ERRORED:
assert isinstance(exception, Exception)
self.name = name.strip() self.name = name.strip()
self.status = status self.status = status
self.message = message.strip() if message else None self.message = message
self.stdout = stdout.strip() if stdout else None self.stdout = stdout
self.source = source self.source = source
self.duration = duration
self.exception = exception
class TestSuite: class TestSuite:
def __init__(self, env_name, test_name): def __init__(self, env_name, test_name):
self.env_name = env_name self.env_name = env_name
self.test_name = test_name self.test_name = test_name
self.timestamp = 0
self.duration = 0 self.duration = 0
self._cases = [] self._cases = []
self._start_timestamp = 0
self._finished = False self._finished = False
self._error = None
@property @property
def cases(self): def cases(self):
return self._cases return self._cases
def get_status_nums(self, status):
return len([True for c in self._cases if c.status == status])
@property @property
def status(self): def status(self):
if self._error: for s in (TestStatus.ERRORED, TestStatus.FAILED):
return TestStatus.ERRORED if self.get_status_nums(s):
if self.get_status_nums(TestStatus.FAILED): return s
return TestStatus.FAILED
if self._cases and any(c.status == TestStatus.PASSED for c in self._cases): if self._cases and any(c.status == TestStatus.PASSED for c in self._cases):
return TestStatus.PASSED return TestStatus.PASSED
return TestStatus.SKIPPED return TestStatus.SKIPPED
def get_status_nums(self, status):
return len([True for c in self._cases if c.status == status])
def add_case(self, case: TestCase): def add_case(self, case: TestCase):
assert isinstance(case, TestCase) assert isinstance(case, TestCase)
self._cases.append(case) self._cases.append(case)
@ -89,16 +98,13 @@ class TestSuite:
return self._finished return self._finished
def on_start(self): def on_start(self):
self._start_timestamp = time.time() self.timestamp = time.time()
def on_error(self, exc):
self._error = exc
def on_finish(self): def on_finish(self):
if self.is_finished(): if self.is_finished():
return return
self._finished = True self._finished = True
self.duration = time.time() - self._start_timestamp self.duration = time.time() - self.timestamp
class TestSummary: class TestSummary:

View File

@ -14,6 +14,7 @@
import click import click
from platformio.exception import ReturnErrorCode
from platformio.platform.factory import PlatformFactory from platformio.platform.factory import PlatformFactory
from platformio.unittest.exception import UnitTestSuiteError from platformio.unittest.exception import UnitTestSuiteError
from platformio.unittest.result import TestCase, TestCaseSource, TestStatus from platformio.unittest.result import TestCase, TestCaseSource, TestStatus
@ -85,12 +86,17 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
self.test_suite.on_start() self.test_suite.on_start()
try: try:
self.setup() self.setup()
for stage in ("build", "upload", "run"): for stage in ("building", "uploading", "testing"):
getattr(self, f"stage_{stage}")() getattr(self, f"stage_{stage}")()
except Exception as exc: # pylint: disable=broad-except except Exception as exc: # pylint: disable=broad-except
if str(exc) != "1": # from returncode click.secho(str(exc), fg="red", err=True)
click.secho(str(exc), fg="red", err=True) self.test_suite.add_case(
self.test_suite.on_error(exc) TestCase(
name=f"{self.test_suite.env_name}:{self.test_suite.test_name}",
status=TestStatus.ERRORED,
exception=exc,
)
)
finally: finally:
self.test_suite.on_finish() self.test_suite.on_finish()
self.teardown() self.teardown()
@ -98,7 +104,7 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
def setup(self): def setup(self):
pass pass
def stage_build(self): def stage_building(self):
if self.options.without_building: if self.options.without_building:
return None return None
click.secho("Building...", bold=self.options.verbose) click.secho("Building...", bold=self.options.verbose)
@ -107,9 +113,12 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
targets.append("__debug") targets.append("__debug")
if self.platform.is_embedded(): if self.platform.is_embedded():
targets.append("checkprogsize") targets.append("checkprogsize")
return self.run_project_targets(targets) try:
return self.run_project_targets(targets)
except ReturnErrorCode:
raise UnitTestSuiteError("Building stage has failed, see errors above.")
def stage_upload(self): def stage_uploading(self):
if self.options.without_uploading or not self.platform.is_embedded(): if self.options.without_uploading or not self.platform.is_embedded():
return None return None
click.secho("Uploading...", bold=self.options.verbose) click.secho("Uploading...", bold=self.options.verbose)
@ -120,15 +129,18 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
targets.append("__test") targets.append("__test")
if not self.options.without_debugging: if not self.options.without_debugging:
targets.append("__debug") targets.append("__debug")
return self.run_project_targets(targets) try:
return self.run_project_targets(targets)
except ReturnErrorCode:
raise UnitTestSuiteError("Uploading stage has failed, see errors above.")
def stage_run(self): def stage_testing(self):
if self.options.without_testing: if self.options.without_testing:
return None return None
click.secho("Running...", bold=self.options.verbose) click.secho("Testing...", bold=self.options.verbose)
if self.platform.is_embedded(): if self.platform.is_embedded():
return self.stage_run_on_target() return self.stage_testing_on_target()
return self.stage_run_on_host() return self.stage_testing_on_host()
def teardown(self): def teardown(self):
pass pass
@ -138,19 +150,16 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
from platformio.commands.run.command import cli as run_cmd from platformio.commands.run.command import cli as run_cmd
assert self.cmd_ctx assert self.cmd_ctx
try: return self.cmd_ctx.invoke(
return self.cmd_ctx.invoke( run_cmd,
run_cmd, project_conf=self.project_config.path,
project_conf=self.project_config.path, upload_port=self.options.upload_port,
upload_port=self.options.upload_port, verbose=self.options.verbose,
verbose=self.options.verbose, silent=not self.options.verbose,
silent=not self.options.verbose, environment=[self.test_suite.env_name],
environment=[self.test_suite.env_name], disable_auto_clean="nobuild" in targets,
disable_auto_clean="nobuild" in targets, target=targets,
target=targets, )
)
except Exception as exc:
raise UnitTestSuiteError(exc)
def configure_build_env(self, env): # pylint: disable=no-self-use def configure_build_env(self, env): # pylint: disable=no-self-use
""" """
@ -159,7 +168,7 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
""" """
return env return env
def on_run_output(self, data): def on_test_output(self, data):
click.echo(data, nl=False) click.echo(data, nl=False)
self.parse_testcases(data) self.parse_testcases(data)
@ -178,7 +187,7 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
source = None source = None
if "source_file" in data: if "source_file" in data:
source = TestCaseSource( source = TestCaseSource(
file=data["source_file"], line=data.get("source_line") file=data["source_file"], line=int(data.get("source_line"))
) )
self.test_suite.add_case( self.test_suite.add_case(
TestCase( TestCase(

View File

@ -25,7 +25,7 @@ class TestRunnerEmbeddedMixin:
SERIAL_TIMEOUT = 600 SERIAL_TIMEOUT = 600
def stage_run_on_target(self): def stage_testing_on_target(self):
click.echo( click.echo(
"If you don't see any output for the first 10 secs, " "If you don't see any output for the first 10 secs, "
"please reset board (press reset button)" "please reset board (press reset button)"
@ -68,7 +68,7 @@ class TestRunnerEmbeddedMixin:
continue continue
if isinstance(line, bytes): if isinstance(line, bytes):
line = line.decode("utf8", "ignore") line = line.decode("utf8", "ignore")
self.on_run_output(line) self.on_test_output(line)
ser.close() ser.close()
def get_test_port(self): def get_test_port(self):

View File

@ -20,12 +20,12 @@ from platformio.unittest.exception import UnitTestError
class TestRunnerNativeMixin: class TestRunnerNativeMixin:
def stage_run_on_host(self): def stage_testing_on_host(self):
build_dir = self.project_config.get("platformio", "build_dir") build_dir = self.project_config.get("platformio", "build_dir")
result = proc.exec_command( result = proc.exec_command(
[os.path.join(build_dir, self.test_suite.env_name, "program")], [os.path.join(build_dir, self.test_suite.env_name, "program")],
stdout=proc.LineBufferedAsyncPipe(self.on_run_output), stdout=proc.LineBufferedAsyncPipe(self.on_test_output),
stderr=proc.LineBufferedAsyncPipe(self.on_run_output), stderr=proc.LineBufferedAsyncPipe(self.on_test_output),
) )
if result["returncode"] == 0: if result["returncode"] == 0:
return True return True

View File

@ -31,7 +31,7 @@ class UnityTestRunner(TestRunnerBase):
# test/test_foo.cpp:44:test_function_foo:FAIL: Expected 32 Was 33 # test/test_foo.cpp:44:test_function_foo:FAIL: Expected 32 Was 33
TESTCASE_PARSE_RE = re.compile( TESTCASE_PARSE_RE = re.compile(
r"(?P<source_file>[^:]+):(?P<source_line>\d+):(?P<name>[^:]+):" r"(?P<source_file>[^:]+):(?P<source_line>\d+):(?P<name>[^:]+):"
r"(?P<status>PASS|IGNORE|FAIL)(?:(?P<message>.+)$)?" r"(?P<status>PASS|IGNORE|FAIL)(:\s*(?P<message>.+)$)?"
) )
UNITY_CONFIG_H = """ UNITY_CONFIG_H = """
@ -238,7 +238,7 @@ void unityOutputComplete(void) { unittest_uart_end(); }
encoding="utf8", encoding="utf8",
) )
def on_run_output(self, data): def on_test_output(self, data):
if not data.strip(): if not data.strip():
return click.echo(data, nl=False) return click.echo(data, nl=False)

View File

@ -13,13 +13,15 @@
# limitations under the License. # limitations under the License.
import os import os
import xml.etree.ElementTree as ET
from pathlib import Path from pathlib import Path
from platformio import proc from platformio import proc
from platformio.unittest.command import unittest_cmd from platformio.unittest.command import unittest_cmd
def test_calculator_example(): def test_calculator_example(tmp_path: Path):
junit_output_path = tmp_path / "junit.xml"
result = proc.exec_command( result = proc.exec_command(
[ [
"platformio", "platformio",
@ -27,15 +29,37 @@ def test_calculator_example():
"-d", "-d",
os.path.join("examples", "unit-testing", "calculator"), os.path.join("examples", "unit-testing", "calculator"),
"-e", "-e",
"uno",
"-e",
"native", "native",
"--output-format=junit",
"--output-path",
str(junit_output_path),
] ]
) )
assert result["returncode"] != 0 assert result["returncode"] != 0
# pylint: disable=unsupported-membership-test # pylint: disable=unsupported-membership-test
assert all( assert all(
s in (result["err"] + result["out"]) for s in ("PASSED", "FAILED") s in (result["err"] + result["out"]) for s in ("ERRORED", "PASSED", "FAILED")
), result["out"] ), result["out"]
# test JUnit output
junit_testsuites = ET.parse(junit_output_path).getroot()
assert int(junit_testsuites.get("tests")) == 11
assert int(junit_testsuites.get("errors")) == 2
assert int(junit_testsuites.get("failures")) == 1
assert len(junit_testsuites.findall("testsuite")) == 9
junit_errored_testcase = junit_testsuites.find(
".//testcase[@name='uno:test_embedded']"
)
assert junit_errored_testcase.get("status") == "ERRORED"
assert junit_errored_testcase.find("error").get("type") == "UnitTestSuiteError"
junit_failed_testcase = junit_testsuites.find(
".//testcase[@name='test_calculator_division']"
)
assert junit_failed_testcase.get("status") == "FAILED"
assert junit_failed_testcase.find("failure").get("message") == "Expected 32 Was 33"
def test_nested_suites(clirunner, validate_cliresult, tmp_path: Path): def test_nested_suites(clirunner, validate_cliresult, tmp_path: Path):
project_dir = tmp_path / "project" project_dir = tmp_path / "project"