forked from espressif/esp-idf
Merge branch 'ci/improve_dynamic_pipeline_report' into 'master'
feat(ci): Add known failure cases to the target test report Closes IDFCI-1989 and IDFCI-1992 See merge request espressif/esp-idf!30925
This commit is contained in:
@@ -394,5 +394,7 @@ test_idf_pytest_plugin:
|
|||||||
reports:
|
reports:
|
||||||
junit: XUNIT_RESULT.xml
|
junit: XUNIT_RESULT.xml
|
||||||
script:
|
script:
|
||||||
- cd tools/ci/idf_pytest
|
- cd ${IDF_PATH}/tools/ci/dynamic_pipelines/tests/test_report_generator
|
||||||
|
- python -m unittest test_target_test_report_generator.py
|
||||||
|
- cd ${IDF_PATH}/tools/ci/idf_pytest
|
||||||
- pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
- pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
||||||
|
@@ -148,6 +148,7 @@
|
|||||||
|
|
||||||
.patterns-idf-pytest-plugin: &patterns-idf-pytest-plugin
|
.patterns-idf-pytest-plugin: &patterns-idf-pytest-plugin
|
||||||
- "tools/ci/idf_pytest/**/*"
|
- "tools/ci/idf_pytest/**/*"
|
||||||
|
- "tools/ci/dynamic_pipelines/tests/**/*"
|
||||||
|
|
||||||
##############
|
##############
|
||||||
# if anchors #
|
# if anchors #
|
||||||
|
@@ -1,7 +1,9 @@
|
|||||||
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
import inspect
|
import inspect
|
||||||
|
import os
|
||||||
import typing as t
|
import typing as t
|
||||||
|
import urllib.parse
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from xml.etree.ElementTree import Element
|
from xml.etree.ElementTree import Element
|
||||||
|
|
||||||
@@ -17,7 +19,7 @@ class Job:
|
|||||||
tags: t.Optional[t.List[str]] = None,
|
tags: t.Optional[t.List[str]] = None,
|
||||||
stage: t.Optional[str] = None,
|
stage: t.Optional[str] = None,
|
||||||
parallel: int = 1,
|
parallel: int = 1,
|
||||||
variables: t.Dict[str, str] = None,
|
variables: t.Optional[t.Dict[str, str]] = None,
|
||||||
script: t.Optional[t.List[str]] = None,
|
script: t.Optional[t.List[str]] = None,
|
||||||
before_script: t.Optional[t.List[str]] = None,
|
before_script: t.Optional[t.List[str]] = None,
|
||||||
after_script: t.Optional[t.List[str]] = None,
|
after_script: t.Optional[t.List[str]] = None,
|
||||||
@@ -131,6 +133,8 @@ class TestCase:
|
|||||||
failure: t.Optional[str] = None
|
failure: t.Optional[str] = None
|
||||||
skipped: t.Optional[str] = None
|
skipped: t.Optional[str] = None
|
||||||
ci_job_url: t.Optional[str] = None
|
ci_job_url: t.Optional[str] = None
|
||||||
|
ci_dashboard_url: t.Optional[str] = None
|
||||||
|
dut_log_url: t.Optional[str] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_failure(self) -> bool:
|
def is_failure(self) -> bool:
|
||||||
@@ -150,16 +154,25 @@ class TestCase:
|
|||||||
print('WARNING: Node Invalid: ', node)
|
print('WARNING: Node Invalid: ', node)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# url to test cases dashboard
|
||||||
|
grafana_base_url = urllib.parse.urljoin(os.getenv('CI_DASHBOARD_HOST', ''), '/d/Ucg477Fnz/case-list')
|
||||||
|
encoded_params = urllib.parse.urlencode({'var-case_id': node.attrib['name']}, quote_via=urllib.parse.quote)
|
||||||
|
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'name': node.attrib['name'],
|
'name': node.attrib['name'],
|
||||||
'file': node.attrib.get('file'),
|
'file': node.attrib.get('file'),
|
||||||
'time': float(node.attrib.get('time') or 0),
|
'time': float(node.attrib.get('time') or 0),
|
||||||
'ci_job_url': node.attrib.get('ci_job_url') or '',
|
'ci_job_url': node.attrib.get('ci_job_url') or '',
|
||||||
|
'ci_dashboard_url': f'{grafana_base_url}?{encoded_params}',
|
||||||
}
|
}
|
||||||
|
|
||||||
failure_node = node.find('failure')
|
failure_node = node.find('failure')
|
||||||
|
# bool(failure_node) is False, so compare with None
|
||||||
|
if failure_node is None:
|
||||||
|
failure_node = node.find('error')
|
||||||
if failure_node is not None:
|
if failure_node is not None:
|
||||||
kwargs['failure'] = failure_node.attrib['message']
|
message = failure_node.attrib.get('message', '')
|
||||||
|
kwargs['failure'] = message
|
||||||
|
|
||||||
skipped_node = node.find('skipped')
|
skipped_node = node.find('skipped')
|
||||||
if skipped_node is not None:
|
if skipped_node is not None:
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
import fnmatch
|
||||||
import html
|
import html
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -15,8 +15,12 @@ from idf_build_apps.constants import BuildStatus
|
|||||||
from idf_ci.uploader import AppUploader
|
from idf_ci.uploader import AppUploader
|
||||||
from prettytable import PrettyTable
|
from prettytable import PrettyTable
|
||||||
|
|
||||||
from .constants import COMMENT_START_MARKER, REPORT_TEMPLATE_FILEPATH, TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
from .constants import COMMENT_START_MARKER
|
||||||
|
from .constants import REPORT_TEMPLATE_FILEPATH
|
||||||
|
from .constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
||||||
from .models import TestCase
|
from .models import TestCase
|
||||||
|
from .utils import is_url
|
||||||
|
from .utils import load_known_failure_cases
|
||||||
|
|
||||||
|
|
||||||
class ReportGenerator:
|
class ReportGenerator:
|
||||||
@@ -32,6 +36,7 @@ class ReportGenerator:
|
|||||||
|
|
||||||
self.title = title
|
self.title = title
|
||||||
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
|
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
|
||||||
|
self.additional_info = ''
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_download_link_for_url(url: str) -> str:
|
def get_download_link_for_url(url: str) -> str:
|
||||||
@@ -66,11 +71,14 @@ class ReportGenerator:
|
|||||||
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
|
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
|
||||||
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
|
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
|
||||||
|
|
||||||
comment = f'''#### {self.title}
|
comment = f'#### {self.title}\n'
|
||||||
|
if self.additional_info:
|
||||||
|
comment += f'{self.additional_info}\n'
|
||||||
|
|
||||||
|
comment += f"""
|
||||||
Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id})
|
Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id})
|
||||||
|
|
||||||
'''
|
"""
|
||||||
print(comment)
|
print(comment)
|
||||||
|
|
||||||
if self.mr is None:
|
if self.mr is None:
|
||||||
@@ -87,9 +95,9 @@ Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (
|
|||||||
note.save()
|
note.save()
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
new_comment = f'''{COMMENT_START_MARKER}
|
new_comment = f"""{COMMENT_START_MARKER}
|
||||||
|
|
||||||
{comment}'''
|
{comment}"""
|
||||||
self.mr.notes.create({'body': new_comment})
|
self.mr.notes.create({'body': new_comment})
|
||||||
|
|
||||||
|
|
||||||
@@ -237,41 +245,114 @@ class TargetTestReportGenerator(ReportGenerator):
|
|||||||
super().__init__(project_id, mr_iid, pipeline_id, title=title)
|
super().__init__(project_id, mr_iid, pipeline_id, title=title)
|
||||||
|
|
||||||
self.test_cases = test_cases
|
self.test_cases = test_cases
|
||||||
|
self._known_failure_cases_set = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def known_failure_cases_set(self) -> t.Optional[t.Set[str]]:
|
||||||
|
if self._known_failure_cases_set is None:
|
||||||
|
self._known_failure_cases_set = load_known_failure_cases()
|
||||||
|
|
||||||
|
return self._known_failure_cases_set
|
||||||
|
|
||||||
|
def get_known_failure_cases(self) -> t.List[TestCase]:
|
||||||
|
if self.known_failure_cases_set is None:
|
||||||
|
return []
|
||||||
|
matched_cases = [
|
||||||
|
testcase
|
||||||
|
for testcase in self.test_cases
|
||||||
|
if any(fnmatch.fnmatch(testcase.name, pattern) for pattern in self.known_failure_cases_set)
|
||||||
|
and testcase.is_failure
|
||||||
|
]
|
||||||
|
return matched_cases
|
||||||
|
|
||||||
|
def _filter_test_cases(self, condition: t.Callable[[TestCase], bool]) -> t.List[TestCase]:
|
||||||
|
"""
|
||||||
|
Filter test cases based on a given condition. In this scenario, we filter by status,
|
||||||
|
however it is possible to filter by other criteria.
|
||||||
|
|
||||||
|
:param condition: A function that evaluates to True or False for each test case.
|
||||||
|
:return: List of filtered TestCase instances.
|
||||||
|
"""
|
||||||
|
return [tc for tc in self.test_cases if condition(tc)]
|
||||||
|
|
||||||
|
def _create_table_for_test_cases(
|
||||||
|
self, test_cases: t.List[TestCase], headers: t.List[str], row_attrs: t.List[str]
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Create a PrettyTable and convert it to an HTML string for the provided test cases.
|
||||||
|
:param test_cases: List of TestCase objects to include in the table.
|
||||||
|
:param headers: List of strings for the table headers.
|
||||||
|
:param row_attrs: List of attributes to include in each row.
|
||||||
|
:return: HTML table string.
|
||||||
|
"""
|
||||||
|
table = PrettyTable()
|
||||||
|
table.field_names = headers
|
||||||
|
for tc in test_cases:
|
||||||
|
row = []
|
||||||
|
for attr in row_attrs:
|
||||||
|
value = getattr(tc, attr, '')
|
||||||
|
if is_url(value):
|
||||||
|
link = f'<a href="{value}">link</a>'
|
||||||
|
row.append(link)
|
||||||
|
else:
|
||||||
|
row.append(value)
|
||||||
|
table.add_row(row)
|
||||||
|
|
||||||
|
return self.table_to_html_str(table)
|
||||||
|
|
||||||
def _get_report_str(self) -> str:
|
def _get_report_str(self) -> str:
|
||||||
|
"""
|
||||||
|
Generate a complete HTML report string by processing test cases.
|
||||||
|
:return: Complete HTML report string.
|
||||||
|
"""
|
||||||
table_str = ''
|
table_str = ''
|
||||||
|
|
||||||
failed_test_cases = [tc for tc in self.test_cases if tc.is_failure]
|
known_failures = self.get_known_failure_cases()
|
||||||
|
known_failure_case_names = {case.name for case in known_failures}
|
||||||
|
failed_test_cases = self._filter_test_cases(
|
||||||
|
lambda tc: tc.is_failure and tc.name not in known_failure_case_names
|
||||||
|
)
|
||||||
|
skipped_test_cases = self._filter_test_cases(lambda tc: tc.is_skipped)
|
||||||
|
successful_test_cases = self._filter_test_cases(lambda tc: tc.is_success)
|
||||||
|
|
||||||
if failed_test_cases:
|
if failed_test_cases:
|
||||||
table_str += '<h2>Failed Test Cases</h2>'
|
table_str += '<h2>Failed Test Cases (Excludes Known Failure Cases)</h2>'
|
||||||
|
table_str += self._create_table_for_test_cases(
|
||||||
|
test_cases=failed_test_cases,
|
||||||
|
headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'],
|
||||||
|
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
|
||||||
|
)
|
||||||
|
|
||||||
failed_test_cases_table = PrettyTable()
|
if known_failures:
|
||||||
failed_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL']
|
table_str += '<h2>Known Failure Cases</h2>'
|
||||||
for tc in failed_test_cases:
|
table_str += self._create_table_for_test_cases(
|
||||||
failed_test_cases_table.add_row([tc.name, tc.file, tc.failure, tc.ci_job_url])
|
test_cases=known_failures,
|
||||||
|
headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'],
|
||||||
|
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
|
||||||
|
)
|
||||||
|
|
||||||
table_str += self.table_to_html_str(failed_test_cases_table)
|
|
||||||
|
|
||||||
skipped_test_cases = [tc for tc in self.test_cases if tc.is_skipped]
|
|
||||||
if skipped_test_cases:
|
if skipped_test_cases:
|
||||||
table_str += '<h2>Skipped Test Cases</h2>'
|
table_str += '<h2>Skipped Test Cases</h2>'
|
||||||
|
table_str += self._create_table_for_test_cases(
|
||||||
|
test_cases=skipped_test_cases,
|
||||||
|
headers=['Test Case', 'Test Script File Path', 'Skipped Reason', 'Grafana URL'],
|
||||||
|
row_attrs=['name', 'file', 'skipped', 'ci_dashboard_url'],
|
||||||
|
)
|
||||||
|
|
||||||
skipped_test_cases_table = PrettyTable()
|
|
||||||
skipped_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Skipped Reason']
|
|
||||||
for tc in skipped_test_cases:
|
|
||||||
skipped_test_cases_table.add_row([tc.name, tc.file, tc.skipped])
|
|
||||||
|
|
||||||
table_str += self.table_to_html_str(skipped_test_cases_table)
|
|
||||||
|
|
||||||
successful_test_cases = [tc for tc in self.test_cases if tc.is_success]
|
|
||||||
if successful_test_cases:
|
if successful_test_cases:
|
||||||
table_str += '<h2>Succeeded Test Cases</h2>'
|
table_str += '<h2>Succeeded Test Cases</h2>'
|
||||||
|
table_str += self._create_table_for_test_cases(
|
||||||
successful_test_cases_table = PrettyTable()
|
test_cases=successful_test_cases,
|
||||||
successful_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Job URL']
|
headers=['Test Case', 'Test Script File Path', 'Job URL', 'Grafana URL'],
|
||||||
for tc in successful_test_cases:
|
row_attrs=['name', 'file', 'ci_job_url', 'ci_dashboard_url'],
|
||||||
successful_test_cases_table.add_row([tc.name, tc.file, tc.ci_job_url])
|
)
|
||||||
|
self.additional_info = (
|
||||||
table_str += self.table_to_html_str(successful_test_cases_table)
|
'**Test Case Summary:**\n'
|
||||||
|
f'- **Failed Test Cases (Excludes Known Failure Cases):** {len(failed_test_cases)}\n'
|
||||||
|
f'- **Known Failures:** {len(known_failures)}\n'
|
||||||
|
f'- **Skipped Test Cases:** {len(skipped_test_cases)}\n'
|
||||||
|
f'- **Succeeded Test Cases:** {len(successful_test_cases)}\n\n'
|
||||||
|
f'Please check report below for more information.\n\n'
|
||||||
|
)
|
||||||
|
|
||||||
return self.generate_html_report(table_str)
|
return self.generate_html_report(table_str)
|
||||||
|
@@ -1,14 +1,12 @@
|
|||||||
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import glob
|
|
||||||
import os
|
import os
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
import __init__ # noqa: F401 # inject the system path
|
import __init__ # noqa: F401 # inject the system path
|
||||||
from dynamic_pipelines.models import TestCase
|
|
||||||
from dynamic_pipelines.report import TargetTestReportGenerator
|
from dynamic_pipelines.report import TargetTestReportGenerator
|
||||||
|
from dynamic_pipelines.utils import parse_testcases_from_filepattern
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
@@ -52,11 +50,6 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
test_cases = []
|
test_cases = parse_testcases_from_filepattern(args.junit_report_filepattern)
|
||||||
for f in glob.glob(args.junit_report_filepattern):
|
|
||||||
root = ET.parse(f).getroot()
|
|
||||||
for tc in root.findall('.//testcase'):
|
|
||||||
test_cases.append(TestCase.from_test_case_node(tc))
|
|
||||||
|
|
||||||
report_generator = TargetTestReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases)
|
report_generator = TargetTestReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases)
|
||||||
report_generator.post_report(args.job_id, args.commit_id)
|
report_generator.post_report(args.job_id, args.commit_id)
|
||||||
|
@@ -7,4 +7,5 @@ generate_pytest_report:
|
|||||||
paths:
|
paths:
|
||||||
- target_test_report.html
|
- target_test_report.html
|
||||||
script:
|
script:
|
||||||
|
- python tools/ci/get_known_failure_cases_file.py
|
||||||
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py
|
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py
|
||||||
|
0
tools/ci/dynamic_pipelines/tests/__init__.py
Normal file
0
tools/ci/dynamic_pipelines/tests/__init__.py
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<testsuites>
|
||||||
|
<testsuite errors="2" failures="0" hostname="FA002598" name="pytest" skipped="0" tests="2" time="22.981" timestamp="2024-05-17T17:51:26.669364">
|
||||||
|
<testcase classname="components.driver.test_apps.i2c_test_apps.pytest_i2c" file="components/driver/test_apps/i2c_test_apps/pytest_i2c.py" line="21" name="('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device" time="11.910">
|
||||||
|
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
||||||
|
yield CaseTester(dut, **kwargs)
|
||||||
|
tools/ci/idf_unity_tester.py:202: in __init__
|
||||||
|
self._manager = Manager()
|
||||||
|
/usr/lib/python3.9/multiprocessing/context.py:57: in Manager
|
||||||
|
m.start()
|
||||||
|
/usr/lib/python3.9/multiprocessing/managers.py:557: in start
|
||||||
|
self._address = reader.recv()
|
||||||
|
/usr/lib/python3.9/multiprocessing/connection.py:255: in recv
|
||||||
|
buf = self._recv_bytes()
|
||||||
|
/usr/lib/python3.9/multiprocessing/connection.py:419: in _recv_bytes
|
||||||
|
buf = self._recv(4)
|
||||||
|
/usr/lib/python3.9/multiprocessing/connection.py:388: in _recv
|
||||||
|
raise EOFError
|
||||||
|
E EOFError</error>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="components.driver.test_apps.i2s_test_apps.i2s_multi_dev.pytest_i2s_multi_dev" file="components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py" line="5" name="('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev" time="11.071">
|
||||||
|
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
||||||
|
yield CaseTester(dut, **kwargs)
|
||||||
|
tools/ci/idf_unity_tester.py:202: in __init__
|
||||||
|
self._manager = Manager()
|
||||||
|
/usr/lib/python3.9/multiprocessing/context.py:57: in Manager
|
||||||
|
m.start()
|
||||||
|
/usr/lib/python3.9/multiprocessing/managers.py:557: in start
|
||||||
|
self._address = reader.recv()
|
||||||
|
/usr/lib/python3.9/multiprocessing/connection.py:255: in recv
|
||||||
|
buf = self._recv_bytes()
|
||||||
|
/usr/lib/python3.9/multiprocessing/connection.py:419: in _recv_bytes
|
||||||
|
buf = self._recv(4)
|
||||||
|
/usr/lib/python3.9/multiprocessing/connection.py:388: in _recv
|
||||||
|
raise EOFError
|
||||||
|
E EOFError</error>
|
||||||
|
</testcase>
|
||||||
|
</testsuite>
|
||||||
|
<testsuite errors="0" failures="1" hostname="GX64-C2-SH-1-ITS1N4" name="pytest" skipped="0" tests="3" time="101.163" timestamp="2024-05-17T17:52:04.061589">
|
||||||
|
<testcase classname="components.vfs.test_apps.pytest_vfs" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.default.test_vfs_default" time="30.044"/>
|
||||||
|
<testcase classname="components.vfs.test_apps.pytest_vfs" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.iram.test_vfs_default" time="28.323"/>
|
||||||
|
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c2.default.test_wpa_supplicant_ut" time="42.796">
|
||||||
|
<failure message="AssertionError: Unity test failed">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1272: in pytest_runtest_call
|
||||||
|
self._raise_dut_failed_cases_if_exists(duts) # type: ignore
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1207: in _raise_dut_failed_cases_if_exists
|
||||||
|
raise AssertionError('Unity test failed')
|
||||||
|
E AssertionError: Unity test failed</failure>
|
||||||
|
</testcase>
|
||||||
|
</testsuite>
|
||||||
|
<testsuite errors="0" failures="0" hostname="runner-zmdq2hnf-project-103-concurrent-3" name="pytest" skipped="1" tests="8" time="123.596" timestamp="2024-05-17T03:04:11.412971">
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="114" name="test_python_interpreter_unix" time="7.523"/>
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="133" name="test_python_interpreter_win" time="0.000">
|
||||||
|
<skipped message="Linux does not support executing .exe files" type="pytest.skip">/builds/espressif/esp-idf/tools/test_build_system/test_common.py:134: Linux does not support executing .exe files</skipped>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="147" name="test_invoke_confserver" time="10.179"/>
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="153" name="test_ccache_used_to_build" time="23.713"/>
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="171" name="test_toolchain_prefix_in_description_file" time="8.390"/>
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="178" name="test_subcommands_with_options" time="28.118"/>
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="194" name="test_fallback_to_build_system_target" time="11.687"/>
|
||||||
|
<testcase classname="test_common" file="test_common.py" line="203" name="test_create_component_project" time="33.986"/>
|
||||||
|
</testsuite>
|
||||||
|
<testsuite errors="0" failures="4" hostname="FA002285" name="pytest" skipped="0" tests="4" time="231.048" timestamp="2024-05-17T17:50:02.291973">
|
||||||
|
<testcase classname="components.esp_timer.test_apps.pytest_esp_timer_ut" file="components/esp_timer/test_apps/pytest_esp_timer_ut.py" line="20" name="esp32c3.release.test_esp_timer" time="39.686">
|
||||||
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||||
|
Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
||||||
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
|
index = func(self, pattern, *args, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:153: in expect_exact
|
||||||
|
return self.pexpect_proc.expect_exact(pattern, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:432: in expect_exact
|
||||||
|
return exp.expect_loop(timeout)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||||
|
return self.timeout(e)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||||
|
raise exc
|
||||||
|
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47b1ab46a0>
|
||||||
|
E searcher: searcher_string:
|
||||||
|
E 0: b'Press ENTER to see the list of tests'
|
||||||
|
E <pytest_embedded.log.PexpectProcess object at 0x7f47b1ab46a0>
|
||||||
|
E searcher: searcher_string:
|
||||||
|
E 0: b'Press ENTER to see the list of tests'
|
||||||
|
|
||||||
|
The above exception was the direct cause of the following exception:
|
||||||
|
components/esp_timer/test_apps/pytest_esp_timer_ut.py:24: in test_esp_timer
|
||||||
|
dut.run_all_single_board_cases(timeout=120)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:435: in run_all_single_board_cases
|
||||||
|
for case in self.test_menu:
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:243: in test_menu
|
||||||
|
self._test_menu = self._parse_test_menu()
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:124: in _parse_test_menu
|
||||||
|
self.expect_exact(ready_line)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||||
|
raise e.__class__(debug_str) from e
|
||||||
|
E pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||||
|
E Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
||||||
|
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</failure>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.512safe.test_wear_levelling" time="69.850">
|
||||||
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||||
|
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
||||||
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
|
index = func(self, pattern, *args, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:131: in expect
|
||||||
|
return self.pexpect_proc.expect(pattern, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:354: in expect
|
||||||
|
return self.expect_list(compiled_pattern_list,
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:383: in expect_list
|
||||||
|
return exp.expect_loop(timeout)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||||
|
return self.timeout(e)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||||
|
raise exc
|
||||||
|
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47af653b80>
|
||||||
|
E searcher: searcher_re:
|
||||||
|
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||||
|
E <pytest_embedded.log.PexpectProcess object at 0x7f47af653b80>
|
||||||
|
E searcher: searcher_re:
|
||||||
|
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||||
|
|
||||||
|
The above exception was the direct cause of the following exception:
|
||||||
|
components/wear_levelling/test_apps/pytest_wear_levelling.py:18: in test_wear_levelling
|
||||||
|
dut.expect_unity_test_output()
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:180: in expect_unity_test_output
|
||||||
|
self.expect(UNITY_SUMMARY_LINE_REGEX, timeout=timeout)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||||
|
raise e.__class__(debug_str) from e
|
||||||
|
E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||||
|
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
||||||
|
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</failure>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.release.test_wear_levelling" time="70.304">
|
||||||
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||||
|
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
||||||
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
|
index = func(self, pattern, *args, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:131: in expect
|
||||||
|
return self.pexpect_proc.expect(pattern, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:354: in expect
|
||||||
|
return self.expect_list(compiled_pattern_list,
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:383: in expect_list
|
||||||
|
return exp.expect_loop(timeout)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||||
|
return self.timeout(e)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||||
|
raise exc
|
||||||
|
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47afc540d0>
|
||||||
|
E searcher: searcher_re:
|
||||||
|
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||||
|
E <pytest_embedded.log.PexpectProcess object at 0x7f47afc540d0>
|
||||||
|
E searcher: searcher_re:
|
||||||
|
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||||
|
|
||||||
|
The above exception was the direct cause of the following exception:
|
||||||
|
components/wear_levelling/test_apps/pytest_wear_levelling.py:18: in test_wear_levelling
|
||||||
|
dut.expect_unity_test_output()
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:180: in expect_unity_test_output
|
||||||
|
self.expect(UNITY_SUMMARY_LINE_REGEX, timeout=timeout)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||||
|
raise e.__class__(debug_str) from e
|
||||||
|
E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||||
|
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
||||||
|
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</failure>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c3.default.test_wpa_supplicant_ut" time="51.208">
|
||||||
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||||
|
Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0
|
||||||
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
|
index = func(self, pattern, *args, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:153: in expect_exact
|
||||||
|
return self.pexpect_proc.expect_exact(pattern, **kwargs)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:432: in expect_exact
|
||||||
|
return exp.expect_loop(timeout)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||||
|
return self.timeout(e)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||||
|
raise exc
|
||||||
|
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47afc54df0>
|
||||||
|
E searcher: searcher_string:
|
||||||
|
E 0: b'Press ENTER to see the list of tests'
|
||||||
|
E <pytest_embedded.log.PexpectProcess object at 0x7f47afc54df0>
|
||||||
|
E searcher: searcher_string:
|
||||||
|
E 0: b'Press ENTER to see the list of tests'
|
||||||
|
|
||||||
|
The above exception was the direct cause of the following exception:
|
||||||
|
components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py:17: in test_wpa_supplicant_ut
|
||||||
|
dut.run_all_single_board_cases()
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:435: in run_all_single_board_cases
|
||||||
|
for case in self.test_menu:
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:243: in test_menu
|
||||||
|
self._test_menu = self._parse_test_menu()
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:124: in _parse_test_menu
|
||||||
|
self.expect_exact(ready_line)
|
||||||
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||||
|
raise e.__class__(debug_str) from e
|
||||||
|
E pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||||
|
E Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0
|
||||||
|
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt</failure>
|
||||||
|
</testcase>
|
||||||
|
</testsuite>
|
||||||
|
</testsuites>
|
@@ -0,0 +1,178 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<title>Test Report</title>
|
||||||
|
|
||||||
|
<link
|
||||||
|
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css"
|
||||||
|
rel="stylesheet"
|
||||||
|
/>
|
||||||
|
<link
|
||||||
|
href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css"
|
||||||
|
rel="stylesheet"
|
||||||
|
/>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class="container-fluid"><h2>Failed Test Cases (Excludes Known Failure Cases)</h2><table data-toggle="table" data-search="true">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Test Case</th>
|
||||||
|
<th>Test Script File Path</th>
|
||||||
|
<th>Failure Reason</th>
|
||||||
|
<th>Job URL</th>
|
||||||
|
<th>Grafana URL</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device</td>
|
||||||
|
<td>components/driver/test_apps/i2c_test_apps/pytest_i2c.py</td>
|
||||||
|
<td>failed on setup with "EOFError"</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27defaults%27%2C%20%27defaults%27%29.test_i2c_multi_device">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev</td>
|
||||||
|
<td>components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py</td>
|
||||||
|
<td>failed on setup with "EOFError"</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27default%27%2C%20%27default%27%29.test_i2s_multi_dev">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>esp32c3.release.test_wear_levelling</td>
|
||||||
|
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
|
||||||
|
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_wear_levelling">link</a></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table><h2>Known Failure Cases</h2><table data-toggle="table" data-search="true">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Test Case</th>
|
||||||
|
<th>Test Script File Path</th>
|
||||||
|
<th>Failure Reason</th>
|
||||||
|
<th>Job URL</th>
|
||||||
|
<th>Grafana URL</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>esp32c2.default.test_wpa_supplicant_ut</td>
|
||||||
|
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
|
||||||
|
<td>AssertionError: Unity test failed</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c2.default.test_wpa_supplicant_ut">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>esp32c3.release.test_esp_timer</td>
|
||||||
|
<td>components/esp_timer/test_apps/pytest_esp_timer_ut.py</td>
|
||||||
|
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_esp_timer">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>esp32c3.512safe.test_wear_levelling</td>
|
||||||
|
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
|
||||||
|
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.512safe.test_wear_levelling">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>esp32c3.default.test_wpa_supplicant_ut</td>
|
||||||
|
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
|
||||||
|
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.default.test_wpa_supplicant_ut">link</a></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table><h2>Skipped Test Cases</h2><table data-toggle="table" data-search="true">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Test Case</th>
|
||||||
|
<th>Test Script File Path</th>
|
||||||
|
<th>Skipped Reason</th>
|
||||||
|
<th>Grafana URL</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>test_python_interpreter_win</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td>Linux does not support executing .exe files</td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_python_interpreter_win">link</a></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table><h2>Succeeded Test Cases</h2><table data-toggle="table" data-search="true">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Test Case</th>
|
||||||
|
<th>Test Script File Path</th>
|
||||||
|
<th>Job URL</th>
|
||||||
|
<th>Grafana URL</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>esp32c2.default.test_vfs_default</td>
|
||||||
|
<td>components/vfs/test_apps/pytest_vfs.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c2.default.test_vfs_default">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>esp32c2.iram.test_vfs_default</td>
|
||||||
|
<td>components/vfs/test_apps/pytest_vfs.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c2.iram.test_vfs_default">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>test_python_interpreter_unix</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_python_interpreter_unix">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>test_invoke_confserver</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_invoke_confserver">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>test_ccache_used_to_build</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_ccache_used_to_build">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>test_toolchain_prefix_in_description_file</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_toolchain_prefix_in_description_file">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>test_subcommands_with_options</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_subcommands_with_options">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>test_fallback_to_build_system_target</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_fallback_to_build_system_target">link</a></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>test_create_component_project</td>
|
||||||
|
<td>test_common.py</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_create_component_project">link</a></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table></div>
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
|
||||||
|
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
@@ -0,0 +1,96 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci', 'python_packages'))
|
||||||
|
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci'))
|
||||||
|
|
||||||
|
|
||||||
|
from dynamic_pipelines.report import TargetTestReportGenerator # noqa: E402
|
||||||
|
from dynamic_pipelines.utils import parse_testcases_from_filepattern # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
class TestReportGeneration(unittest.TestCase):
|
||||||
|
@classmethod
|
||||||
|
def load_expected_report(cls, file_path: str) -> str:
|
||||||
|
"""
|
||||||
|
Loads the content of an expected report HTML file.
|
||||||
|
|
||||||
|
:param file_path: The path to the file containing the expected HTML report.
|
||||||
|
:return: The content of the file as a string.
|
||||||
|
"""
|
||||||
|
with open(file_path, 'r') as file:
|
||||||
|
return file.read()
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
patcher = patch('dynamic_pipelines.report.Gitlab')
|
||||||
|
env_patcher = patch.dict('os.environ', {
|
||||||
|
'CI_DASHBOARD_HOST': 'https://test_dashboard_host',
|
||||||
|
'CI_PAGES_URL': 'https://artifacts_path',
|
||||||
|
'CI_JOB_ID': '1',
|
||||||
|
})
|
||||||
|
env_patcher.start()
|
||||||
|
self.MockGitlab = patcher.start()
|
||||||
|
self.addCleanup(patcher.stop)
|
||||||
|
self.addCleanup(env_patcher.stop)
|
||||||
|
self.reports_sample_data_path = os.path.join(
|
||||||
|
os.environ.get('IDF_PATH', ''), # type: ignore
|
||||||
|
'tools',
|
||||||
|
'ci',
|
||||||
|
'dynamic_pipelines',
|
||||||
|
'tests',
|
||||||
|
'test_report_generator',
|
||||||
|
'reports_sample_data'
|
||||||
|
)
|
||||||
|
self.mock_project = MagicMock()
|
||||||
|
self.mock_mr = MagicMock()
|
||||||
|
|
||||||
|
self.MockGitlab.return_value.project = self.mock_project
|
||||||
|
self.mock_project.mergerequests.get.return_value = self.mock_mr
|
||||||
|
|
||||||
|
self.expected_report_html = self.load_expected_report(
|
||||||
|
os.path.join(self.reports_sample_data_path, 'expected_target_test_report.html')
|
||||||
|
)
|
||||||
|
|
||||||
|
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
|
||||||
|
self.report_generator = TargetTestReportGenerator(
|
||||||
|
project_id=123, mr_iid=1, pipeline_id=456, title='Test Report', test_cases=test_cases
|
||||||
|
)
|
||||||
|
self.report_generator._known_failure_cases_set = {
|
||||||
|
'*.test_wpa_supplicant_ut',
|
||||||
|
'esp32c3.release.test_esp_timer',
|
||||||
|
'*.512safe.test_wear_levelling',
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_known_failure_cases(self) -> None:
|
||||||
|
known_failure_cases = self.report_generator.get_known_failure_cases()
|
||||||
|
self.assertEqual(len(known_failure_cases), 4)
|
||||||
|
|
||||||
|
def test_failed_cases_in_report(self) -> None:
|
||||||
|
known_failures = self.report_generator.get_known_failure_cases()
|
||||||
|
known_failure_case_names = {case.name for case in known_failures}
|
||||||
|
failed_testcases = self.report_generator._filter_test_cases(
|
||||||
|
lambda tc: tc.is_failure and tc.name not in known_failure_case_names
|
||||||
|
)
|
||||||
|
self.assertEqual(len(failed_testcases), 3)
|
||||||
|
|
||||||
|
def test_skipped_cases_in_report(self) -> None:
|
||||||
|
skipped_testcases = self.report_generator._filter_test_cases(lambda tc: tc.is_skipped)
|
||||||
|
self.assertEqual(len(skipped_testcases), 1)
|
||||||
|
|
||||||
|
def test_successful_cases_in_report(self) -> None:
|
||||||
|
succeeded_testcases = self.report_generator._filter_test_cases(lambda tc: tc.is_success)
|
||||||
|
self.assertEqual(len(succeeded_testcases), 9)
|
||||||
|
|
||||||
|
def test_complete_html_structure(self) -> None:
|
||||||
|
report = self.report_generator._get_report_str()
|
||||||
|
self.assertEqual(report, self.expected_report_html)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@@ -1,10 +1,16 @@
|
|||||||
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import typing as t
|
import typing as t
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from .models import Job
|
from .models import Job
|
||||||
|
from .models import TestCase
|
||||||
|
|
||||||
|
|
||||||
def dump_jobs_to_yaml(
|
def dump_jobs_to_yaml(
|
||||||
@@ -33,3 +39,45 @@ def dump_jobs_to_yaml(
|
|||||||
|
|
||||||
with open(output_filepath, 'w') as fw:
|
with open(output_filepath, 'w') as fw:
|
||||||
yaml.dump(yaml_dict, fw, indent=2)
|
yaml.dump(yaml_dict, fw, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_testcases_from_filepattern(junit_report_filepattern: str) -> t.List[TestCase]:
|
||||||
|
"""
|
||||||
|
Parses test cases from XML files matching the provided file pattern.
|
||||||
|
|
||||||
|
>>> test_cases = parse_testcases_from_filepattern("path/to/your/junit/reports/*.xml")
|
||||||
|
|
||||||
|
:param junit_report_filepattern: The file pattern to match XML files containing JUnit test reports.
|
||||||
|
:return: List[TestCase]: A list of TestCase objects parsed from the XML files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
test_cases = []
|
||||||
|
for f in glob.glob(junit_report_filepattern):
|
||||||
|
root = ET.parse(f).getroot()
|
||||||
|
for tc in root.findall('.//testcase'):
|
||||||
|
test_cases.append(TestCase.from_test_case_node(tc))
|
||||||
|
return test_cases
|
||||||
|
|
||||||
|
|
||||||
|
def load_known_failure_cases() -> t.Optional[t.Set[str]]:
|
||||||
|
known_failures_file = os.getenv('KNOWN_FAILURE_CASES_FILE_NAME', '')
|
||||||
|
if not known_failures_file:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
with open(known_failures_file) as f:
|
||||||
|
file_content = f.read()
|
||||||
|
known_cases_list = re.sub(re.compile('#.*\n'), '', file_content).split()
|
||||||
|
return {case.strip() for case in known_cases_list}
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def is_url(string: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the string is a valid URL by parsing it and verifying if it contains both a scheme and a network location.
|
||||||
|
|
||||||
|
:param string: The string to check if it is a URL.
|
||||||
|
:return: True if the string is a valid URL, False otherwise.
|
||||||
|
"""
|
||||||
|
parsed = urlparse(string)
|
||||||
|
return bool(parsed.scheme) and bool(parsed.netloc)
|
||||||
|
Reference in New Issue
Block a user