forked from platformio/platformio-core
Generate reports in JUnit and JSON formats // Resolve #2891
This commit is contained in:
@ -44,6 +44,7 @@ Please check `Migration guide from 5.x to 6.0 <https://docs.platformio.org/en/la
|
||||
|
||||
* **Unit Testing**
|
||||
|
||||
- Generate reports in JUnit and JSON formats using the `pio test --output-format <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html#cmdoption-pio-test-output-format>`__ option (`issue #2891 <https://github.com/platformio/platformio-core/issues/2891>`_)
|
||||
- Added support for test hierarchies (nested test suites) (`issue #4135 <https://github.com/platformio/platformio-core/issues/4135>`_)
|
||||
- Provide more information when the native program crashed on a host (errored with a negative return code) (`issue #3429 <https://github.com/platformio/platformio-core/issues/3429>`_)
|
||||
- Fixed an issue when command line parameters ("--ignore", "--filter") do not override values defined in the |PIOCONF| (`issue #3845 <https://github.com/platformio/platformio-core/issues/3845>`_)
|
||||
|
2
docs
2
docs
Submodule docs updated: 4e24a04fd1...ad5788644c
2
examples
2
examples
Submodule examples updated: 383d740230...18c0d44404
@ -76,6 +76,12 @@ from platformio.unittest.runners.factory import TestRunnerFactory
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state for Serial Monitor",
|
||||
)
|
||||
@click.option("--output-format", type=click.Choice(["json", "junit"]))
|
||||
@click.option(
|
||||
"--output-path",
|
||||
default=os.getcwd,
|
||||
type=click.Path(dir_okay=True, resolve_path=True),
|
||||
)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.pass_context
|
||||
def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
|
||||
@ -93,6 +99,8 @@ def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefine
|
||||
no_reset,
|
||||
monitor_rts,
|
||||
monitor_dtr,
|
||||
output_format,
|
||||
output_path,
|
||||
verbose,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
@ -104,7 +112,8 @@ def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefine
|
||||
|
||||
if not verbose:
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
click.secho("Collected %d test suites" % len(test_names), bold=True)
|
||||
click.secho("Collected %d tests" % len(test_names), bold=True, nl=False)
|
||||
click.echo(" (%s)" % ", ".join(test_names))
|
||||
|
||||
test_summary = TestSummary(os.path.basename(project_dir))
|
||||
default_envs = config.default_envs()
|
||||
@ -156,12 +165,23 @@ def unittest_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefine
|
||||
runner.start(ctx)
|
||||
print_suite_footer(test_suite)
|
||||
|
||||
# automatically generate JSON report for PIO IDE
|
||||
TestReportFactory.new("json", test_summary).generate(
|
||||
os.path.join(
|
||||
config.get("platformio", "build_dir"), "pio-test-report-latest.json"
|
||||
)
|
||||
)
|
||||
|
||||
# Reset custom project config
|
||||
app.set_session_var("custom_project_conf", None)
|
||||
|
||||
stdout_report = TestReportFactory.new("stdout", test_summary)
|
||||
stdout_report.generate(verbose=verbose)
|
||||
|
||||
if output_format:
|
||||
custom_report = TestReportFactory.new(output_format, test_summary)
|
||||
custom_report.generate(output_path=output_path, verbose=True)
|
||||
|
||||
if test_summary.is_errored or test_summary.get_status_nums(TestStatus.FAILED):
|
||||
raise exception.ReturnErrorCode(1)
|
||||
|
||||
|
@ -21,7 +21,7 @@ class TestReportBase:
|
||||
def __init__(self, test_summary):
|
||||
self.test_summary = test_summary
|
||||
|
||||
def generate(self):
|
||||
def generate(self, output_path, verbose):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
|
99
platformio/unittest/reports/json.py
Normal file
99
platformio/unittest/reports/json.py
Normal file
@ -0,0 +1,99 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.unittest.reports.base import TestReportBase
|
||||
from platformio.unittest.result import TestStatus
|
||||
|
||||
|
||||
class JsonTestReport(TestReportBase):
|
||||
def generate(self, output_path, verbose=False):
|
||||
if os.path.isdir(output_path):
|
||||
output_path = os.path.join(
|
||||
output_path,
|
||||
"pio-test-report-%s-%s.json"
|
||||
% (
|
||||
self.test_summary.name,
|
||||
datetime.datetime.now().strftime("%Y%m%d%H%M%S"),
|
||||
),
|
||||
)
|
||||
|
||||
with open(output_path, mode="w", encoding="utf8") as fp:
|
||||
json.dump(self.to_json(), fp)
|
||||
|
||||
if verbose:
|
||||
click.secho(f"Saved JSON report to the {output_path}", fg="green")
|
||||
|
||||
def to_json(self):
|
||||
result = dict(
|
||||
version="1.0",
|
||||
name=self.test_summary.name,
|
||||
duration=self.test_summary.duration,
|
||||
testcase_nums=self.test_summary.case_nums,
|
||||
error_nums=self.test_summary.get_status_nums(TestStatus.ERRORED),
|
||||
failure_nums=self.test_summary.get_status_nums(TestStatus.FAILED),
|
||||
skipped_nums=self.test_summary.get_status_nums(TestStatus.SKIPPED),
|
||||
test_suites=[],
|
||||
)
|
||||
for test_suite in self.test_summary.suites:
|
||||
result["test_suites"].append(self.test_suite_to_json(test_suite))
|
||||
return result
|
||||
|
||||
def test_suite_to_json(self, test_suite):
|
||||
result = dict(
|
||||
env_name=test_suite.env_name,
|
||||
test_name=test_suite.test_name,
|
||||
status=test_suite.status.name,
|
||||
duration=test_suite.duration,
|
||||
timestamp=datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
|
||||
"%Y-%m-%dT%H:%M:%S"
|
||||
)
|
||||
if test_suite.timestamp
|
||||
else None,
|
||||
testcase_nums=len(test_suite.cases),
|
||||
error_nums=test_suite.get_status_nums(TestStatus.ERRORED),
|
||||
failure_nums=test_suite.get_status_nums(TestStatus.FAILED),
|
||||
skipped_nums=test_suite.get_status_nums(TestStatus.SKIPPED),
|
||||
test_cases=[],
|
||||
)
|
||||
for test_case in test_suite.cases:
|
||||
result["test_cases"].append(self.test_case_to_json(test_case))
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def test_case_to_json(test_case):
|
||||
result = dict(
|
||||
name=test_case.name,
|
||||
status=test_case.status.name,
|
||||
message=test_case.message,
|
||||
stdout=test_case.stdout,
|
||||
duration=test_case.duration,
|
||||
exception=None,
|
||||
source=None,
|
||||
)
|
||||
if test_case.exception:
|
||||
result["exception"] = "%s: %s" % (
|
||||
test_case.exception.__class__.__name__,
|
||||
test_case.exception,
|
||||
)
|
||||
if test_case.source:
|
||||
result["source"] = dict(
|
||||
file=test_case.source.file, line=test_case.source.line
|
||||
)
|
||||
return result
|
102
platformio/unittest/reports/junit.py
Normal file
102
platformio/unittest/reports/junit.py
Normal file
@ -0,0 +1,102 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import click
|
||||
|
||||
from platformio.unittest.reports.base import TestReportBase
|
||||
from platformio.unittest.result import TestStatus
|
||||
|
||||
|
||||
class JunitTestReport(TestReportBase):
|
||||
def generate(self, output_path, verbose=False):
|
||||
if os.path.isdir(output_path):
|
||||
output_path = os.path.join(
|
||||
output_path,
|
||||
"pio-test-report-%s-%s-junit.xml"
|
||||
% (
|
||||
self.test_summary.name,
|
||||
datetime.datetime.now().strftime("%Y%m%d%H%M%S"),
|
||||
),
|
||||
)
|
||||
|
||||
with open(output_path, mode="wb") as fp:
|
||||
self.build_xml_tree().write(fp, encoding="utf8")
|
||||
|
||||
if verbose:
|
||||
click.secho(f"Saved JUnit report to the {output_path}", fg="green")
|
||||
|
||||
def build_xml_tree(self):
|
||||
root = ET.Element("testsuites")
|
||||
root.set("name", self.test_summary.name)
|
||||
root.set("tests", str(self.test_summary.case_nums))
|
||||
root.set("errors", str(self.test_summary.get_status_nums(TestStatus.ERRORED)))
|
||||
root.set("failures", str(self.test_summary.get_status_nums(TestStatus.FAILED)))
|
||||
root.set("time", str(self.test_summary.duration))
|
||||
for suite in self.test_summary.suites:
|
||||
root.append(self.build_testsuite_node(suite))
|
||||
return ET.ElementTree(root)
|
||||
|
||||
def build_testsuite_node(self, test_suite):
|
||||
element = ET.Element("testsuite")
|
||||
element.set("name", f"{test_suite.env_name}:{test_suite.test_name}")
|
||||
element.set("tests", str(len(test_suite.cases)))
|
||||
element.set("errors", str(test_suite.get_status_nums(TestStatus.ERRORED)))
|
||||
element.set("failures", str(test_suite.get_status_nums(TestStatus.FAILED)))
|
||||
element.set("skipped", str(test_suite.get_status_nums(TestStatus.SKIPPED)))
|
||||
element.set("time", str(test_suite.duration))
|
||||
if test_suite.timestamp:
|
||||
element.set(
|
||||
"timestamp",
|
||||
datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
|
||||
"%Y-%m-%dT%H:%M:%S"
|
||||
),
|
||||
)
|
||||
for test_case in test_suite.cases:
|
||||
element.append(self.build_testcase_node(test_case))
|
||||
return element
|
||||
|
||||
def build_testcase_node(self, test_case):
|
||||
element = ET.Element("testcase")
|
||||
element.set("name", str(test_case.name))
|
||||
element.set("time", str(test_case.duration))
|
||||
element.set("status", str(test_case.status.name))
|
||||
if test_case.status == TestStatus.SKIPPED:
|
||||
element.append(ET.Element("skipped"))
|
||||
elif test_case.status == TestStatus.ERRORED:
|
||||
element.append(self.build_testcase_error_node(test_case))
|
||||
elif test_case.status == TestStatus.FAILED:
|
||||
element.append(self.build_testcase_failure_node(test_case))
|
||||
return element
|
||||
|
||||
@staticmethod
|
||||
def build_testcase_error_node(test_case):
|
||||
element = ET.Element("error")
|
||||
element.set("type", test_case.exception.__class__.__name__)
|
||||
element.set("message", str(test_case.exception))
|
||||
if test_case.stdout:
|
||||
element.text = test_case.stdout
|
||||
return element
|
||||
|
||||
@staticmethod
|
||||
def build_testcase_failure_node(test_case):
|
||||
element = ET.Element("failure")
|
||||
if test_case.message:
|
||||
element.set("message", test_case.message)
|
||||
if test_case.stdout:
|
||||
element.text = test_case.stdout
|
||||
return element
|
@ -21,7 +21,7 @@ from platformio.unittest.result import TestStatus
|
||||
|
||||
|
||||
class StdoutTestReport(TestReportBase):
|
||||
def generate(self, verbose=False):
|
||||
def generate(self, verbose=False): # pylint: disable=arguments-differ
|
||||
click.echo()
|
||||
|
||||
tabular_data = []
|
||||
|
@ -44,43 +44,52 @@ class TestCaseSource:
|
||||
|
||||
class TestCase:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, name, status, message=None, stdout=None, source=None
|
||||
self,
|
||||
name,
|
||||
status,
|
||||
message=None,
|
||||
stdout=None,
|
||||
source=None,
|
||||
duration=0,
|
||||
exception=None,
|
||||
):
|
||||
assert isinstance(status, TestStatus)
|
||||
if status == TestStatus.ERRORED:
|
||||
assert isinstance(exception, Exception)
|
||||
self.name = name.strip()
|
||||
self.status = status
|
||||
self.message = message.strip() if message else None
|
||||
self.stdout = stdout.strip() if stdout else None
|
||||
self.message = message
|
||||
self.stdout = stdout
|
||||
self.source = source
|
||||
self.duration = duration
|
||||
self.exception = exception
|
||||
|
||||
|
||||
class TestSuite:
|
||||
def __init__(self, env_name, test_name):
|
||||
self.env_name = env_name
|
||||
self.test_name = test_name
|
||||
self.timestamp = 0
|
||||
self.duration = 0
|
||||
self._cases = []
|
||||
self._start_timestamp = 0
|
||||
self._finished = False
|
||||
self._error = None
|
||||
|
||||
@property
|
||||
def cases(self):
|
||||
return self._cases
|
||||
|
||||
def get_status_nums(self, status):
|
||||
return len([True for c in self._cases if c.status == status])
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
if self._error:
|
||||
return TestStatus.ERRORED
|
||||
if self.get_status_nums(TestStatus.FAILED):
|
||||
return TestStatus.FAILED
|
||||
for s in (TestStatus.ERRORED, TestStatus.FAILED):
|
||||
if self.get_status_nums(s):
|
||||
return s
|
||||
if self._cases and any(c.status == TestStatus.PASSED for c in self._cases):
|
||||
return TestStatus.PASSED
|
||||
return TestStatus.SKIPPED
|
||||
|
||||
def get_status_nums(self, status):
|
||||
return len([True for c in self._cases if c.status == status])
|
||||
|
||||
def add_case(self, case: TestCase):
|
||||
assert isinstance(case, TestCase)
|
||||
self._cases.append(case)
|
||||
@ -89,16 +98,13 @@ class TestSuite:
|
||||
return self._finished
|
||||
|
||||
def on_start(self):
|
||||
self._start_timestamp = time.time()
|
||||
|
||||
def on_error(self, exc):
|
||||
self._error = exc
|
||||
self.timestamp = time.time()
|
||||
|
||||
def on_finish(self):
|
||||
if self.is_finished():
|
||||
return
|
||||
self._finished = True
|
||||
self.duration = time.time() - self._start_timestamp
|
||||
self.duration = time.time() - self.timestamp
|
||||
|
||||
|
||||
class TestSummary:
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import ReturnErrorCode
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.unittest.exception import UnitTestSuiteError
|
||||
from platformio.unittest.result import TestCase, TestCaseSource, TestStatus
|
||||
@ -85,12 +86,17 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
|
||||
self.test_suite.on_start()
|
||||
try:
|
||||
self.setup()
|
||||
for stage in ("build", "upload", "run"):
|
||||
for stage in ("building", "uploading", "testing"):
|
||||
getattr(self, f"stage_{stage}")()
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if str(exc) != "1": # from returncode
|
||||
click.secho(str(exc), fg="red", err=True)
|
||||
self.test_suite.on_error(exc)
|
||||
click.secho(str(exc), fg="red", err=True)
|
||||
self.test_suite.add_case(
|
||||
TestCase(
|
||||
name=f"{self.test_suite.env_name}:{self.test_suite.test_name}",
|
||||
status=TestStatus.ERRORED,
|
||||
exception=exc,
|
||||
)
|
||||
)
|
||||
finally:
|
||||
self.test_suite.on_finish()
|
||||
self.teardown()
|
||||
@ -98,7 +104,7 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
def stage_build(self):
|
||||
def stage_building(self):
|
||||
if self.options.without_building:
|
||||
return None
|
||||
click.secho("Building...", bold=self.options.verbose)
|
||||
@ -107,9 +113,12 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
|
||||
targets.append("__debug")
|
||||
if self.platform.is_embedded():
|
||||
targets.append("checkprogsize")
|
||||
return self.run_project_targets(targets)
|
||||
try:
|
||||
return self.run_project_targets(targets)
|
||||
except ReturnErrorCode:
|
||||
raise UnitTestSuiteError("Building stage has failed, see errors above.")
|
||||
|
||||
def stage_upload(self):
|
||||
def stage_uploading(self):
|
||||
if self.options.without_uploading or not self.platform.is_embedded():
|
||||
return None
|
||||
click.secho("Uploading...", bold=self.options.verbose)
|
||||
@ -120,15 +129,18 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
|
||||
targets.append("__test")
|
||||
if not self.options.without_debugging:
|
||||
targets.append("__debug")
|
||||
return self.run_project_targets(targets)
|
||||
try:
|
||||
return self.run_project_targets(targets)
|
||||
except ReturnErrorCode:
|
||||
raise UnitTestSuiteError("Uploading stage has failed, see errors above.")
|
||||
|
||||
def stage_run(self):
|
||||
def stage_testing(self):
|
||||
if self.options.without_testing:
|
||||
return None
|
||||
click.secho("Running...", bold=self.options.verbose)
|
||||
click.secho("Testing...", bold=self.options.verbose)
|
||||
if self.platform.is_embedded():
|
||||
return self.stage_run_on_target()
|
||||
return self.stage_run_on_host()
|
||||
return self.stage_testing_on_target()
|
||||
return self.stage_testing_on_host()
|
||||
|
||||
def teardown(self):
|
||||
pass
|
||||
@ -138,19 +150,16 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
|
||||
from platformio.commands.run.command import cli as run_cmd
|
||||
|
||||
assert self.cmd_ctx
|
||||
try:
|
||||
return self.cmd_ctx.invoke(
|
||||
run_cmd,
|
||||
project_conf=self.project_config.path,
|
||||
upload_port=self.options.upload_port,
|
||||
verbose=self.options.verbose,
|
||||
silent=not self.options.verbose,
|
||||
environment=[self.test_suite.env_name],
|
||||
disable_auto_clean="nobuild" in targets,
|
||||
target=targets,
|
||||
)
|
||||
except Exception as exc:
|
||||
raise UnitTestSuiteError(exc)
|
||||
return self.cmd_ctx.invoke(
|
||||
run_cmd,
|
||||
project_conf=self.project_config.path,
|
||||
upload_port=self.options.upload_port,
|
||||
verbose=self.options.verbose,
|
||||
silent=not self.options.verbose,
|
||||
environment=[self.test_suite.env_name],
|
||||
disable_auto_clean="nobuild" in targets,
|
||||
target=targets,
|
||||
)
|
||||
|
||||
def configure_build_env(self, env): # pylint: disable=no-self-use
|
||||
"""
|
||||
@ -159,7 +168,7 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
|
||||
"""
|
||||
return env
|
||||
|
||||
def on_run_output(self, data):
|
||||
def on_test_output(self, data):
|
||||
click.echo(data, nl=False)
|
||||
self.parse_testcases(data)
|
||||
|
||||
@ -178,7 +187,7 @@ class TestRunnerBase(TestRunnerNativeMixin, TestRunnerEmbeddedMixin):
|
||||
source = None
|
||||
if "source_file" in data:
|
||||
source = TestCaseSource(
|
||||
file=data["source_file"], line=data.get("source_line")
|
||||
file=data["source_file"], line=int(data.get("source_line"))
|
||||
)
|
||||
self.test_suite.add_case(
|
||||
TestCase(
|
||||
|
@ -25,7 +25,7 @@ class TestRunnerEmbeddedMixin:
|
||||
|
||||
SERIAL_TIMEOUT = 600
|
||||
|
||||
def stage_run_on_target(self):
|
||||
def stage_testing_on_target(self):
|
||||
click.echo(
|
||||
"If you don't see any output for the first 10 secs, "
|
||||
"please reset board (press reset button)"
|
||||
@ -68,7 +68,7 @@ class TestRunnerEmbeddedMixin:
|
||||
continue
|
||||
if isinstance(line, bytes):
|
||||
line = line.decode("utf8", "ignore")
|
||||
self.on_run_output(line)
|
||||
self.on_test_output(line)
|
||||
ser.close()
|
||||
|
||||
def get_test_port(self):
|
||||
|
@ -20,12 +20,12 @@ from platformio.unittest.exception import UnitTestError
|
||||
|
||||
|
||||
class TestRunnerNativeMixin:
|
||||
def stage_run_on_host(self):
|
||||
def stage_testing_on_host(self):
|
||||
build_dir = self.project_config.get("platformio", "build_dir")
|
||||
result = proc.exec_command(
|
||||
[os.path.join(build_dir, self.test_suite.env_name, "program")],
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_run_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_run_output),
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_test_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_test_output),
|
||||
)
|
||||
if result["returncode"] == 0:
|
||||
return True
|
||||
|
@ -31,7 +31,7 @@ class UnityTestRunner(TestRunnerBase):
|
||||
# test/test_foo.cpp:44:test_function_foo:FAIL: Expected 32 Was 33
|
||||
TESTCASE_PARSE_RE = re.compile(
|
||||
r"(?P<source_file>[^:]+):(?P<source_line>\d+):(?P<name>[^:]+):"
|
||||
r"(?P<status>PASS|IGNORE|FAIL)(?:(?P<message>.+)$)?"
|
||||
r"(?P<status>PASS|IGNORE|FAIL)(:\s*(?P<message>.+)$)?"
|
||||
)
|
||||
|
||||
UNITY_CONFIG_H = """
|
||||
@ -238,7 +238,7 @@ void unityOutputComplete(void) { unittest_uart_end(); }
|
||||
encoding="utf8",
|
||||
)
|
||||
|
||||
def on_run_output(self, data):
|
||||
def on_test_output(self, data):
|
||||
if not data.strip():
|
||||
return click.echo(data, nl=False)
|
||||
|
||||
|
@ -13,13 +13,15 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
|
||||
from platformio import proc
|
||||
from platformio.unittest.command import unittest_cmd
|
||||
|
||||
|
||||
def test_calculator_example():
|
||||
def test_calculator_example(tmp_path: Path):
|
||||
junit_output_path = tmp_path / "junit.xml"
|
||||
result = proc.exec_command(
|
||||
[
|
||||
"platformio",
|
||||
@ -27,15 +29,37 @@ def test_calculator_example():
|
||||
"-d",
|
||||
os.path.join("examples", "unit-testing", "calculator"),
|
||||
"-e",
|
||||
"uno",
|
||||
"-e",
|
||||
"native",
|
||||
"--output-format=junit",
|
||||
"--output-path",
|
||||
str(junit_output_path),
|
||||
]
|
||||
)
|
||||
assert result["returncode"] != 0
|
||||
# pylint: disable=unsupported-membership-test
|
||||
assert all(
|
||||
s in (result["err"] + result["out"]) for s in ("PASSED", "FAILED")
|
||||
s in (result["err"] + result["out"]) for s in ("ERRORED", "PASSED", "FAILED")
|
||||
), result["out"]
|
||||
|
||||
# test JUnit output
|
||||
junit_testsuites = ET.parse(junit_output_path).getroot()
|
||||
assert int(junit_testsuites.get("tests")) == 11
|
||||
assert int(junit_testsuites.get("errors")) == 2
|
||||
assert int(junit_testsuites.get("failures")) == 1
|
||||
assert len(junit_testsuites.findall("testsuite")) == 9
|
||||
junit_errored_testcase = junit_testsuites.find(
|
||||
".//testcase[@name='uno:test_embedded']"
|
||||
)
|
||||
assert junit_errored_testcase.get("status") == "ERRORED"
|
||||
assert junit_errored_testcase.find("error").get("type") == "UnitTestSuiteError"
|
||||
junit_failed_testcase = junit_testsuites.find(
|
||||
".//testcase[@name='test_calculator_division']"
|
||||
)
|
||||
assert junit_failed_testcase.get("status") == "FAILED"
|
||||
assert junit_failed_testcase.find("failure").get("message") == "Expected 32 Was 33"
|
||||
|
||||
|
||||
def test_nested_suites(clirunner, validate_cliresult, tmp_path: Path):
|
||||
project_dir = tmp_path / "project"
|
||||
|
Reference in New Issue
Block a user