From 40c8a16ad4e5ae6f1c39b2b3637c4a055a282fb5 Mon Sep 17 00:00:00 2001 From: Aleksei Apaseev Date: Sat, 18 May 2024 19:00:08 +0800 Subject: [PATCH] ci: add known failure cases to the target test report target test report: - refactor the methods to ensure reusability - cover report generation with tests - introduce a known failure tests block in report - add grafana link to testcases dashboard --- .gitlab/ci/host-test.yml | 4 +- .gitlab/ci/rules.yml | 1 + tools/ci/dynamic_pipelines/models.py | 17 +- tools/ci/dynamic_pipelines/report.py | 141 +++++++++--- .../scripts/generate_target_test_report.py | 13 +- .../templates/generate_target_test_report.yml | 1 + tools/ci/dynamic_pipelines/tests/__init__.py | 0 .../tests/test_report_generator/__init__.py | 0 .../reports_sample_data/XUNIT_REPORT.xml | 200 ++++++++++++++++++ .../expected_target_test_report.html | 178 ++++++++++++++++ .../test_target_test_report_generator.py | 96 +++++++++ tools/ci/dynamic_pipelines/utils.py | 48 +++++ 12 files changed, 656 insertions(+), 43 deletions(-) create mode 100644 tools/ci/dynamic_pipelines/tests/__init__.py create mode 100644 tools/ci/dynamic_pipelines/tests/test_report_generator/__init__.py create mode 100644 tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/XUNIT_REPORT.xml create mode 100644 tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/expected_target_test_report.html create mode 100644 tools/ci/dynamic_pipelines/tests/test_report_generator/test_target_test_report_generator.py diff --git a/.gitlab/ci/host-test.yml b/.gitlab/ci/host-test.yml index f2ab6b89d2..04e2612f40 100644 --- a/.gitlab/ci/host-test.yml +++ b/.gitlab/ci/host-test.yml @@ -394,5 +394,7 @@ test_idf_pytest_plugin: reports: junit: XUNIT_RESULT.xml script: - - cd tools/ci/idf_pytest + - cd ${IDF_PATH}/tools/ci/dynamic_pipelines/tests/test_report_generator + - python -m unittest test_target_test_report_generator.py + - cd ${IDF_PATH}/tools/ci/idf_pytest - pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml diff --git a/.gitlab/ci/rules.yml b/.gitlab/ci/rules.yml index b492bdc60c..9fab5014d9 100644 --- a/.gitlab/ci/rules.yml +++ b/.gitlab/ci/rules.yml @@ -148,6 +148,7 @@ .patterns-idf-pytest-plugin: &patterns-idf-pytest-plugin - "tools/ci/idf_pytest/**/*" + - "tools/ci/dynamic_pipelines/tests/**/*" ############## # if anchors # diff --git a/tools/ci/dynamic_pipelines/models.py b/tools/ci/dynamic_pipelines/models.py index 2e8dc234a4..5fff31c3f5 100644 --- a/tools/ci/dynamic_pipelines/models.py +++ b/tools/ci/dynamic_pipelines/models.py @@ -1,7 +1,9 @@ # SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 import inspect +import os import typing as t +import urllib.parse from dataclasses import dataclass from xml.etree.ElementTree import Element @@ -17,7 +19,7 @@ class Job: tags: t.Optional[t.List[str]] = None, stage: t.Optional[str] = None, parallel: int = 1, - variables: t.Dict[str, str] = None, + variables: t.Optional[t.Dict[str, str]] = None, script: t.Optional[t.List[str]] = None, before_script: t.Optional[t.List[str]] = None, after_script: t.Optional[t.List[str]] = None, @@ -131,6 +133,8 @@ class TestCase: failure: t.Optional[str] = None skipped: t.Optional[str] = None ci_job_url: t.Optional[str] = None + ci_dashboard_url: t.Optional[str] = None + dut_log_url: t.Optional[str] = None @property def is_failure(self) -> bool: @@ -150,16 +154,25 @@ class TestCase: print('WARNING: Node Invalid: ', node) return None + # url to test cases dashboard + grafana_base_url = urllib.parse.urljoin(os.getenv('CI_DASHBOARD_HOST', ''), '/d/Ucg477Fnz/case-list') + encoded_params = urllib.parse.urlencode({'var-case_id': node.attrib['name']}, quote_via=urllib.parse.quote) + kwargs = { 'name': node.attrib['name'], 'file': node.attrib.get('file'), 'time': float(node.attrib.get('time') or 0), 'ci_job_url': node.attrib.get('ci_job_url') or '', + 'ci_dashboard_url': f'{grafana_base_url}?{encoded_params}', } failure_node = node.find('failure') + # bool(failure_node) is False, so compare with None + if failure_node is None: + failure_node = node.find('error') if failure_node is not None: - kwargs['failure'] = failure_node.attrib['message'] + message = failure_node.attrib.get('message', '') + kwargs['failure'] = message skipped_node = node.find('skipped') if skipped_node is not None: diff --git a/tools/ci/dynamic_pipelines/report.py b/tools/ci/dynamic_pipelines/report.py index a070c125c1..1f0de199aa 100644 --- a/tools/ci/dynamic_pipelines/report.py +++ b/tools/ci/dynamic_pipelines/report.py @@ -1,7 +1,7 @@ # SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 - import abc +import fnmatch import html import os import re @@ -15,8 +15,12 @@ from idf_build_apps.constants import BuildStatus from idf_ci.uploader import AppUploader from prettytable import PrettyTable -from .constants import COMMENT_START_MARKER, REPORT_TEMPLATE_FILEPATH, TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME +from .constants import COMMENT_START_MARKER +from .constants import REPORT_TEMPLATE_FILEPATH +from .constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME from .models import TestCase +from .utils import is_url +from .utils import load_known_failure_cases class ReportGenerator: @@ -32,6 +36,7 @@ class ReportGenerator: self.title = title self.output_filepath = self.title.lower().replace(' ', '_') + '.html' + self.additional_info = '' @staticmethod def get_download_link_for_url(url: str) -> str: @@ -66,11 +71,14 @@ class ReportGenerator: # CI_PAGES_URL is {URL}/esp-idf, which missed one `-` url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf') - comment = f'''#### {self.title} + comment = f'#### {self.title}\n' + if self.additional_info: + comment += f'{self.additional_info}\n' + comment += f""" Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id}) -''' +""" print(comment) if self.mr is None: @@ -87,9 +95,9 @@ Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} ( note.save() break else: - new_comment = f'''{COMMENT_START_MARKER} + new_comment = f"""{COMMENT_START_MARKER} -{comment}''' +{comment}""" self.mr.notes.create({'body': new_comment}) @@ -237,41 +245,114 @@ class TargetTestReportGenerator(ReportGenerator): super().__init__(project_id, mr_iid, pipeline_id, title=title) self.test_cases = test_cases + self._known_failure_cases_set = None + + @property + def known_failure_cases_set(self) -> t.Optional[t.Set[str]]: + if self._known_failure_cases_set is None: + self._known_failure_cases_set = load_known_failure_cases() + + return self._known_failure_cases_set + + def get_known_failure_cases(self) -> t.List[TestCase]: + if self.known_failure_cases_set is None: + return [] + matched_cases = [ + testcase + for testcase in self.test_cases + if any(fnmatch.fnmatch(testcase.name, pattern) for pattern in self.known_failure_cases_set) + and testcase.is_failure + ] + return matched_cases + + def _filter_test_cases(self, condition: t.Callable[[TestCase], bool]) -> t.List[TestCase]: + """ + Filter test cases based on a given condition. In this scenario, we filter by status, + however it is possible to filter by other criteria. + + :param condition: A function that evaluates to True or False for each test case. + :return: List of filtered TestCase instances. + """ + return [tc for tc in self.test_cases if condition(tc)] + + def _create_table_for_test_cases( + self, test_cases: t.List[TestCase], headers: t.List[str], row_attrs: t.List[str] + ) -> str: + """ + Create a PrettyTable and convert it to an HTML string for the provided test cases. + :param test_cases: List of TestCase objects to include in the table. + :param headers: List of strings for the table headers. + :param row_attrs: List of attributes to include in each row. + :return: HTML table string. + """ + table = PrettyTable() + table.field_names = headers + for tc in test_cases: + row = [] + for attr in row_attrs: + value = getattr(tc, attr, '') + if is_url(value): + link = f'link' + row.append(link) + else: + row.append(value) + table.add_row(row) + + return self.table_to_html_str(table) def _get_report_str(self) -> str: + """ + Generate a complete HTML report string by processing test cases. + :return: Complete HTML report string. + """ table_str = '' - failed_test_cases = [tc for tc in self.test_cases if tc.is_failure] + known_failures = self.get_known_failure_cases() + known_failure_case_names = {case.name for case in known_failures} + failed_test_cases = self._filter_test_cases( + lambda tc: tc.is_failure and tc.name not in known_failure_case_names + ) + skipped_test_cases = self._filter_test_cases(lambda tc: tc.is_skipped) + successful_test_cases = self._filter_test_cases(lambda tc: tc.is_success) + if failed_test_cases: - table_str += '

Failed Test Cases

' + table_str += '

Failed Test Cases (Excludes Known Failure Cases)

' + table_str += self._create_table_for_test_cases( + test_cases=failed_test_cases, + headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'], + row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'], + ) - failed_test_cases_table = PrettyTable() - failed_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL'] - for tc in failed_test_cases: - failed_test_cases_table.add_row([tc.name, tc.file, tc.failure, tc.ci_job_url]) + if known_failures: + table_str += '

Known Failure Cases

' + table_str += self._create_table_for_test_cases( + test_cases=known_failures, + headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'], + row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'], + ) - table_str += self.table_to_html_str(failed_test_cases_table) - - skipped_test_cases = [tc for tc in self.test_cases if tc.is_skipped] if skipped_test_cases: table_str += '

Skipped Test Cases

' + table_str += self._create_table_for_test_cases( + test_cases=skipped_test_cases, + headers=['Test Case', 'Test Script File Path', 'Skipped Reason', 'Grafana URL'], + row_attrs=['name', 'file', 'skipped', 'ci_dashboard_url'], + ) - skipped_test_cases_table = PrettyTable() - skipped_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Skipped Reason'] - for tc in skipped_test_cases: - skipped_test_cases_table.add_row([tc.name, tc.file, tc.skipped]) - - table_str += self.table_to_html_str(skipped_test_cases_table) - - successful_test_cases = [tc for tc in self.test_cases if tc.is_success] if successful_test_cases: table_str += '

Succeeded Test Cases

' - - successful_test_cases_table = PrettyTable() - successful_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Job URL'] - for tc in successful_test_cases: - successful_test_cases_table.add_row([tc.name, tc.file, tc.ci_job_url]) - - table_str += self.table_to_html_str(successful_test_cases_table) + table_str += self._create_table_for_test_cases( + test_cases=successful_test_cases, + headers=['Test Case', 'Test Script File Path', 'Job URL', 'Grafana URL'], + row_attrs=['name', 'file', 'ci_job_url', 'ci_dashboard_url'], + ) + self.additional_info = ( + '**Test Case Summary:**\n' + f'- **Failed Test Cases (Excludes Known Failure Cases):** {len(failed_test_cases)}\n' + f'- **Known Failures:** {len(known_failures)}\n' + f'- **Skipped Test Cases:** {len(skipped_test_cases)}\n' + f'- **Succeeded Test Cases:** {len(successful_test_cases)}\n\n' + f'Please check report below for more information.\n\n' + ) return self.generate_html_report(table_str) diff --git a/tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py b/tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py index cd736ad9f1..07a8952bcf 100644 --- a/tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py +++ b/tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py @@ -1,14 +1,12 @@ # SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 - import argparse -import glob import os -import xml.etree.ElementTree as ET import __init__ # noqa: F401 # inject the system path -from dynamic_pipelines.models import TestCase from dynamic_pipelines.report import TargetTestReportGenerator +from dynamic_pipelines.utils import parse_testcases_from_filepattern + if __name__ == '__main__': parser = argparse.ArgumentParser( @@ -52,11 +50,6 @@ if __name__ == '__main__': args = parser.parse_args() - test_cases = [] - for f in glob.glob(args.junit_report_filepattern): - root = ET.parse(f).getroot() - for tc in root.findall('.//testcase'): - test_cases.append(TestCase.from_test_case_node(tc)) - + test_cases = parse_testcases_from_filepattern(args.junit_report_filepattern) report_generator = TargetTestReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases) report_generator.post_report(args.job_id, args.commit_id) diff --git a/tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml b/tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml index 62cba4d77f..6559bc3502 100644 --- a/tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml +++ b/tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml @@ -7,4 +7,5 @@ generate_pytest_report: paths: - target_test_report.html script: + - python tools/ci/get_known_failure_cases_file.py - python tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py diff --git a/tools/ci/dynamic_pipelines/tests/__init__.py b/tools/ci/dynamic_pipelines/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tools/ci/dynamic_pipelines/tests/test_report_generator/__init__.py b/tools/ci/dynamic_pipelines/tests/test_report_generator/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/XUNIT_REPORT.xml b/tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/XUNIT_REPORT.xml new file mode 100644 index 0000000000..8f3737b75d --- /dev/null +++ b/tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/XUNIT_REPORT.xml @@ -0,0 +1,200 @@ + + + + + conftest.py:74: in case_tester + yield CaseTester(dut, **kwargs) +tools/ci/idf_unity_tester.py:202: in __init__ + self._manager = Manager() +/usr/lib/python3.9/multiprocessing/context.py:57: in Manager + m.start() +/usr/lib/python3.9/multiprocessing/managers.py:557: in start + self._address = reader.recv() +/usr/lib/python3.9/multiprocessing/connection.py:255: in recv + buf = self._recv_bytes() +/usr/lib/python3.9/multiprocessing/connection.py:419: in _recv_bytes + buf = self._recv(4) +/usr/lib/python3.9/multiprocessing/connection.py:388: in _recv + raise EOFError +E EOFError + + + conftest.py:74: in case_tester + yield CaseTester(dut, **kwargs) +tools/ci/idf_unity_tester.py:202: in __init__ + self._manager = Manager() +/usr/lib/python3.9/multiprocessing/context.py:57: in Manager + m.start() +/usr/lib/python3.9/multiprocessing/managers.py:557: in start + self._address = reader.recv() +/usr/lib/python3.9/multiprocessing/connection.py:255: in recv + buf = self._recv_bytes() +/usr/lib/python3.9/multiprocessing/connection.py:419: in _recv_bytes + buf = self._recv(4) +/usr/lib/python3.9/multiprocessing/connection.py:388: in _recv + raise EOFError +E EOFError + + + + + + + /root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1272: in pytest_runtest_call + self._raise_dut_failed_cases_if_exists(duts) # type: ignore +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1207: in _raise_dut_failed_cases_if_exists + raise AssertionError('Unity test failed') +E AssertionError: Unity test failed + + + + + + /builds/espressif/esp-idf/tools/test_build_system/test_common.py:134: Linux does not support executing .exe files + + + + + + + + + + + /root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper + index = func(self, pattern, *args, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:153: in expect_exact + return self.pexpect_proc.expect_exact(pattern, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:432: in expect_exact + return exp.expect_loop(timeout) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop + return self.timeout(e) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout + raise exc +E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47b1ab46a0> +E searcher: searcher_string: +E 0: b'Press ENTER to see the list of tests' +E <pytest_embedded.log.PexpectProcess object at 0x7f47b1ab46a0> +E searcher: searcher_string: +E 0: b'Press ENTER to see the list of tests' + +The above exception was the direct cause of the following exception: +components/esp_timer/test_apps/pytest_esp_timer_ut.py:24: in test_esp_timer + dut.run_all_single_board_cases(timeout=120) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:435: in run_all_single_board_cases + for case in self.test_menu: +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:243: in test_menu + self._test_menu = self._parse_test_menu() +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:124: in _parse_test_menu + self.expect_exact(ready_line) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper + raise e.__class__(debug_str) from e +E pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" +E Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710 +E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt + + + /root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper + index = func(self, pattern, *args, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:131: in expect + return self.pexpect_proc.expect(pattern, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:354: in expect + return self.expect_list(compiled_pattern_list, +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:383: in expect_list + return exp.expect_loop(timeout) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop + return self.timeout(e) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout + raise exc +E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47af653b80> +E searcher: searcher_re: +E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)') +E <pytest_embedded.log.PexpectProcess object at 0x7f47af653b80> +E searcher: searcher_re: +E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)') + +The above exception was the direct cause of the following exception: +components/wear_levelling/test_apps/pytest_wear_levelling.py:18: in test_wear_levelling + dut.expect_unity_test_output() +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:180: in expect_unity_test_output + self.expect(UNITY_SUMMARY_LINE_REGEX, timeout=timeout) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper + raise e.__class__(debug_str) from e +E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" +E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes) +E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt + + + /root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper + index = func(self, pattern, *args, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:131: in expect + return self.pexpect_proc.expect(pattern, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:354: in expect + return self.expect_list(compiled_pattern_list, +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:383: in expect_list + return exp.expect_loop(timeout) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop + return self.timeout(e) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout + raise exc +E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47afc540d0> +E searcher: searcher_re: +E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)') +E <pytest_embedded.log.PexpectProcess object at 0x7f47afc540d0> +E searcher: searcher_re: +E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)') + +The above exception was the direct cause of the following exception: +components/wear_levelling/test_apps/pytest_wear_levelling.py:18: in test_wear_levelling + dut.expect_unity_test_output() +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:180: in expect_unity_test_output + self.expect(UNITY_SUMMARY_LINE_REGEX, timeout=timeout) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper + raise e.__class__(debug_str) from e +E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" +E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes) +E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt + + + /root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper + index = func(self, pattern, *args, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:153: in expect_exact + return self.pexpect_proc.expect_exact(pattern, **kwargs) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:432: in expect_exact + return exp.expect_loop(timeout) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop + return self.timeout(e) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout + raise exc +E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47afc54df0> +E searcher: searcher_string: +E 0: b'Press ENTER to see the list of tests' +E <pytest_embedded.log.PexpectProcess object at 0x7f47afc54df0> +E searcher: searcher_string: +E 0: b'Press ENTER to see the list of tests' + +The above exception was the direct cause of the following exception: +components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py:17: in test_wpa_supplicant_ut + dut.run_all_single_board_cases() +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:435: in run_all_single_board_cases + for case in self.test_menu: +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:243: in test_menu + self._test_menu = self._parse_test_menu() +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:124: in _parse_test_menu + self.expect_exact(ready_line) +/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper + raise e.__class__(debug_str) from e +E pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" +E Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0 +E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt + + + diff --git a/tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/expected_target_test_report.html b/tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/expected_target_test_report.html new file mode 100644 index 0000000000..b4c143879b --- /dev/null +++ b/tools/ci/dynamic_pipelines/tests/test_report_generator/reports_sample_data/expected_target_test_report.html @@ -0,0 +1,178 @@ + + + + + Test Report + + + + + + +

Failed Test Cases (Excludes Known Failure Cases)

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Test CaseTest Script File PathFailure ReasonJob URLGrafana URL
('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_devicecomponents/driver/test_apps/i2c_test_apps/pytest_i2c.pyfailed on setup with "EOFError"link
('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_devcomponents/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.pyfailed on setup with "EOFError"link
esp32c3.release.test_wear_levellingcomponents/wear_levelling/test_apps/pytest_wear_levelling.pypexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?POK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txtlink

Known Failure Cases

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Test CaseTest Script File PathFailure ReasonJob URLGrafana URL
esp32c2.default.test_wpa_supplicant_utcomponents/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.pyAssertionError: Unity test failedlink
esp32c3.release.test_esp_timercomponents/esp_timer/test_apps/pytest_esp_timer_ut.pypexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txtlink
esp32c3.512safe.test_wear_levellingcomponents/wear_levelling/test_apps/pytest_wear_levelling.pypexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?POK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txtlink
esp32c3.default.test_wpa_supplicant_utcomponents/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.pypexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txtlink

Skipped Test Cases

+ + + + + + + + + + + + + + + + +
Test CaseTest Script File PathSkipped ReasonGrafana URL
test_python_interpreter_wintest_common.pyLinux does not support executing .exe fileslink

Succeeded Test Cases

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Test CaseTest Script File PathJob URLGrafana URL
esp32c2.default.test_vfs_defaultcomponents/vfs/test_apps/pytest_vfs.pylink
esp32c2.iram.test_vfs_defaultcomponents/vfs/test_apps/pytest_vfs.pylink
test_python_interpreter_unixtest_common.pylink
test_invoke_confservertest_common.pylink
test_ccache_used_to_buildtest_common.pylink
test_toolchain_prefix_in_description_filetest_common.pylink
test_subcommands_with_optionstest_common.pylink
test_fallback_to_build_system_targettest_common.pylink
test_create_component_projecttest_common.pylink
+ + + + + diff --git a/tools/ci/dynamic_pipelines/tests/test_report_generator/test_target_test_report_generator.py b/tools/ci/dynamic_pipelines/tests/test_report_generator/test_target_test_report_generator.py new file mode 100644 index 0000000000..a3d517e3dc --- /dev/null +++ b/tools/ci/dynamic_pipelines/tests/test_report_generator/test_target_test_report_generator.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python +# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD +# SPDX-License-Identifier: Apache-2.0 +import os.path +import sys +import unittest +from unittest.mock import MagicMock +from unittest.mock import patch + +sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci', 'python_packages')) +sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci')) + + +from dynamic_pipelines.report import TargetTestReportGenerator # noqa: E402 +from dynamic_pipelines.utils import parse_testcases_from_filepattern # noqa: E402 + + +class TestReportGeneration(unittest.TestCase): + @classmethod + def load_expected_report(cls, file_path: str) -> str: + """ + Loads the content of an expected report HTML file. + + :param file_path: The path to the file containing the expected HTML report. + :return: The content of the file as a string. + """ + with open(file_path, 'r') as file: + return file.read() + + def setUp(self) -> None: + patcher = patch('dynamic_pipelines.report.Gitlab') + env_patcher = patch.dict('os.environ', { + 'CI_DASHBOARD_HOST': 'https://test_dashboard_host', + 'CI_PAGES_URL': 'https://artifacts_path', + 'CI_JOB_ID': '1', + }) + env_patcher.start() + self.MockGitlab = patcher.start() + self.addCleanup(patcher.stop) + self.addCleanup(env_patcher.stop) + self.reports_sample_data_path = os.path.join( + os.environ.get('IDF_PATH', ''), # type: ignore + 'tools', + 'ci', + 'dynamic_pipelines', + 'tests', + 'test_report_generator', + 'reports_sample_data' + ) + self.mock_project = MagicMock() + self.mock_mr = MagicMock() + + self.MockGitlab.return_value.project = self.mock_project + self.mock_project.mergerequests.get.return_value = self.mock_mr + + self.expected_report_html = self.load_expected_report( + os.path.join(self.reports_sample_data_path, 'expected_target_test_report.html') + ) + + test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml')) + self.report_generator = TargetTestReportGenerator( + project_id=123, mr_iid=1, pipeline_id=456, title='Test Report', test_cases=test_cases + ) + self.report_generator._known_failure_cases_set = { + '*.test_wpa_supplicant_ut', + 'esp32c3.release.test_esp_timer', + '*.512safe.test_wear_levelling', + } + + def test_known_failure_cases(self) -> None: + known_failure_cases = self.report_generator.get_known_failure_cases() + self.assertEqual(len(known_failure_cases), 4) + + def test_failed_cases_in_report(self) -> None: + known_failures = self.report_generator.get_known_failure_cases() + known_failure_case_names = {case.name for case in known_failures} + failed_testcases = self.report_generator._filter_test_cases( + lambda tc: tc.is_failure and tc.name not in known_failure_case_names + ) + self.assertEqual(len(failed_testcases), 3) + + def test_skipped_cases_in_report(self) -> None: + skipped_testcases = self.report_generator._filter_test_cases(lambda tc: tc.is_skipped) + self.assertEqual(len(skipped_testcases), 1) + + def test_successful_cases_in_report(self) -> None: + succeeded_testcases = self.report_generator._filter_test_cases(lambda tc: tc.is_success) + self.assertEqual(len(succeeded_testcases), 9) + + def test_complete_html_structure(self) -> None: + report = self.report_generator._get_report_str() + self.assertEqual(report, self.expected_report_html) + + +if __name__ == '__main__': + unittest.main() diff --git a/tools/ci/dynamic_pipelines/utils.py b/tools/ci/dynamic_pipelines/utils.py index 63e7645479..d10bfce50a 100644 --- a/tools/ci/dynamic_pipelines/utils.py +++ b/tools/ci/dynamic_pipelines/utils.py @@ -1,10 +1,16 @@ # SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 +import glob +import os +import re import typing as t +import xml.etree.ElementTree as ET +from urllib.parse import urlparse import yaml from .models import Job +from .models import TestCase def dump_jobs_to_yaml( @@ -33,3 +39,45 @@ def dump_jobs_to_yaml( with open(output_filepath, 'w') as fw: yaml.dump(yaml_dict, fw, indent=2) + + +def parse_testcases_from_filepattern(junit_report_filepattern: str) -> t.List[TestCase]: + """ + Parses test cases from XML files matching the provided file pattern. + + >>> test_cases = parse_testcases_from_filepattern("path/to/your/junit/reports/*.xml") + + :param junit_report_filepattern: The file pattern to match XML files containing JUnit test reports. + :return: List[TestCase]: A list of TestCase objects parsed from the XML files. + """ + + test_cases = [] + for f in glob.glob(junit_report_filepattern): + root = ET.parse(f).getroot() + for tc in root.findall('.//testcase'): + test_cases.append(TestCase.from_test_case_node(tc)) + return test_cases + + +def load_known_failure_cases() -> t.Optional[t.Set[str]]: + known_failures_file = os.getenv('KNOWN_FAILURE_CASES_FILE_NAME', '') + if not known_failures_file: + return None + try: + with open(known_failures_file) as f: + file_content = f.read() + known_cases_list = re.sub(re.compile('#.*\n'), '', file_content).split() + return {case.strip() for case in known_cases_list} + except FileNotFoundError: + return None + + +def is_url(string: str) -> bool: + """ + Check if the string is a valid URL by parsing it and verifying if it contains both a scheme and a network location. + + :param string: The string to check if it is a URL. + :return: True if the string is a valid URL, False otherwise. + """ + parsed = urlparse(string) + return bool(parsed.scheme) and bool(parsed.netloc)