ci: apply idf-ci on generating child pipelines

This commit is contained in:
Fu Hanxi
2025-06-13 14:30:39 +02:00
parent a5257dcc39
commit 15b411270e
16 changed files with 134 additions and 557 deletions

View File

@@ -259,11 +259,17 @@ variables:
.after_script:build:ccache-show-stats:
after_script:
- source tools/ci/utils.sh
- section_start "ccache_show_stats" "Show ccache statistics"
- *show_ccache_statistics
- section_end "ccache_show_stats"
.after_script:build:ccache-show-stats:upload-failed-job-logs:
after_script:
- source tools/ci/utils.sh
- section_start "ccache_show_stats" "Show ccache statistics"
- *show_ccache_statistics
- section_end "ccache_show_stats"
- *upload_failed_job_log_artifacts
##############################

View File

@@ -1,5 +1,40 @@
preserve_non_test_related_apps = false
[local_runtime_envs]
EXTRA_CFLAGS = "-Werror -Werror=deprecated-declarations -Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function -Wstrict-prototypes"
EXTRA_CXXFLAGS = "-Werror -Werror=deprecated-declarations -Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function"
LDGEN_CHECK_MAPPING = "1"
IDF_CI_BUILD = "1"
[gitlab]
[gitlab.build_pipeline]
job_tags = ['build', 'shiny']
job_template_name = '.dynamic_build_template'
job_template_jinja = '' # write in tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
pre_yaml_jinja = """
include:
- .gitlab/ci/common.yml
- tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
- tools/ci/dynamic_pipelines/templates/test_child_pipeline.yml
"""
yaml_jinja = """
{{ settings.gitlab.build_pipeline.pre_yaml_jinja }}
workflow:
name: {{ settings.gitlab.build_pipeline.workflow_name }}
rules:
- when: always
{{ jobs }}
""" # simplified since we included the tools/ci/dynamic_pipelines/templates/test_child_pipeline.yml
[gitlab.test_pipeline]
job_template_name = '.dynamic_target_test_template'
job_template_jinja = '' # write in tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
pre_yaml_jinja = """
include:
- .gitlab/ci/common.yml
- tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
- tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml
"""

View File

@@ -42,12 +42,10 @@ from idf_ci_local.uploader import AppUploader
from idf_ci_utils import IDF_PATH
from idf_ci_utils import idf_relpath
from idf_pytest.constants import DEFAULT_LOGDIR
from idf_pytest.constants import DEFAULT_SDKCONFIG
from idf_pytest.plugin import IDF_LOCAL_PLUGIN_KEY
from idf_pytest.plugin import IdfLocalPlugin
from idf_pytest.plugin import requires_elf_or_map
from idf_pytest.utils import format_case_id
from pytest_embedded.plugin import multi_dut_argument
from pytest_embedded.plugin import multi_dut_fixture
from pytest_embedded_idf.dut import IdfDut
from pytest_embedded_idf.unity_tester import CaseTester
@@ -72,23 +70,6 @@ def case_tester(unity_tester: CaseTester) -> CaseTester:
return unity_tester
@pytest.fixture
@multi_dut_argument
def config(request: FixtureRequest) -> str:
return getattr(request, 'param', None) or DEFAULT_SDKCONFIG # type: ignore
@pytest.fixture
@multi_dut_fixture
def target(request: FixtureRequest, dut_total: int, dut_index: int) -> str:
plugin = request.config.stash[IDF_LOCAL_PLUGIN_KEY]
if dut_total == 1:
return plugin.target[0] # type: ignore
return plugin.target[dut_index] # type: ignore
@pytest.fixture
def test_func_name(request: FixtureRequest) -> str:
return request.node.function.__name__ # type: ignore

View File

@@ -4,31 +4,7 @@ import os
from idf_ci_utils import IDF_PATH
# use relative path to avoid absolute path in pipeline
DEFAULT_TEST_PATHS = [
'examples',
os.path.join('tools', 'test_apps'),
'components',
]
DEFAULT_APPS_BUILD_PER_JOB = 60
DEFAULT_CASES_TEST_PER_JOB = 30
DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'build_child_pipeline.yml')
DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'target_test_child_pipeline.yml')
DEFAULT_BUILD_CHILD_PIPELINE_NAME = 'Build Child Pipeline'
DEFAULT_TARGET_TEST_CHILD_PIPELINE_NAME = 'Target Test Child Pipeline'
DEFAULT_TARGET_TEST_JOB_TEMPLATE_NAME = '.dynamic_target_test_template'
TIMEOUT_4H_TEMPLATE_NAME = '.timeout_4h_template'
TEST_RELATED_BUILD_JOB_NAME = 'build_test_related_apps'
NON_TEST_RELATED_BUILD_JOB_NAME = 'build_non_test_related_apps'
COMMENT_START_MARKER = '### Dynamic Pipeline Report'
TEST_RELATED_APPS_FILENAME = 'test_related_apps.txt'
NON_TEST_RELATED_APPS_FILENAME = 'non_test_related_apps.txt'
TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME = 'test_related_apps_download_urls.yml'
REPORT_TEMPLATE_FILEPATH = os.path.join(
@@ -44,8 +20,6 @@ RETRY_JOB_PICTURE_PATH = 'tools/ci/dynamic_pipelines/templates/retry-jobs.png'
RETRY_JOB_TITLE = '\n\nRetry failed jobs with with help of "retry_failed_jobs" stage of the pipeline:'
RETRY_JOB_PICTURE_LINK = '![Retry Jobs Image]({pic_url})'
BUILD_ONLY_LABEL = 'For Maintainers: Only Build Tests'
KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH = os.path.join(
IDF_PATH, 'tools', 'ci', 'dynamic_pipelines', 'templates', 'known_generate_test_child_pipeline_warnings.yml'
)

View File

@@ -1,131 +1,14 @@
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import inspect
import os
import typing as t
import urllib.parse
from dataclasses import dataclass
from xml.etree.ElementTree import Element
import yaml
from idf_ci_utils import IDF_PATH
class Job:
def __init__(
self,
*,
name: str,
extends: t.Optional[t.List[str]] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
parallel: int = 1,
variables: t.Optional[t.Dict[str, str]] = None,
script: t.Optional[t.List[str]] = None,
before_script: t.Optional[t.List[str]] = None,
after_script: t.Optional[t.List[str]] = None,
needs: t.Optional[t.List[str]] = None,
**kwargs: t.Any,
) -> None:
self.name = name
self.extends = extends
self.tags = tags
self.stage = stage
self.parallel = parallel
self.variables = variables or {}
self.script = script
self.before_script = before_script
self.after_script = after_script
self.needs = needs
for k, v in kwargs.items():
setattr(self, k, v)
def __str__(self) -> str:
return yaml.dump(self.to_dict()) # type: ignore
def set_variable(self, key: str, value: str) -> None:
self.variables[key] = value
def to_dict(self) -> t.Dict[str, t.Any]:
res = {}
for k, v in inspect.getmembers(self):
if k.startswith('_'):
continue
# name is the dict key
if k == 'name':
continue
# parallel 1 is not allowed
if k == 'parallel' and v == 1:
continue
if v is None:
continue
if inspect.ismethod(v) or inspect.isfunction(v):
continue
res[k] = v
return {self.name: res}
class EmptyJob(Job):
def __init__(
self,
*,
name: t.Optional[str] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
before_script: t.Optional[t.List[str]] = None,
after_script: t.Optional[t.List[str]] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
name=name or 'fake_pass_job',
tags=tags or ['fast_run', 'shiny'],
stage=stage or 'build',
script=['echo "This is a fake job to pass the pipeline"'],
before_script=before_script or [],
after_script=after_script or [],
**kwargs,
)
class BuildJob(Job):
def __init__(
self,
*,
extends: t.Optional[t.List[str]] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
extends=extends or ['.dynamic_build_template'],
tags=tags or ['build', 'shiny'],
stage=stage or 'build',
**kwargs,
)
class TargetTestJob(Job):
def __init__(
self,
*,
extends: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
extends=extends or ['.dynamic_target_test_template'],
stage=stage or 'target_test',
**kwargs,
)
@dataclass
class TestCase:
name: str

View File

@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""This file is used for generating the child pipeline for build jobs."""
@@ -8,26 +8,13 @@ import typing as t
import __init__ # noqa: F401 # inject the system path
import yaml
from idf_build_apps.manifest import FolderRule
from idf_build_apps.utils import semicolon_separated_str_to_list
from idf_ci_local.app import dump_apps_to_txt
from idf_ci.idf_gitlab import build_child_pipeline
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_CONFIG_RULES_STR
from idf_pytest.constants import DEFAULT_FULL_BUILD_TEST_COMPONENTS
from idf_pytest.constants import DEFAULT_FULL_BUILD_TEST_FILEPATTERNS
from idf_pytest.constants import CollectMode
from idf_pytest.script import get_all_apps
from dynamic_pipelines.constants import DEFAULT_APPS_BUILD_PER_JOB
from dynamic_pipelines.constants import DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH
from dynamic_pipelines.constants import DEFAULT_BUILD_CHILD_PIPELINE_NAME
from dynamic_pipelines.constants import DEFAULT_TEST_PATHS
from dynamic_pipelines.constants import NON_TEST_RELATED_APPS_FILENAME
from dynamic_pipelines.constants import NON_TEST_RELATED_BUILD_JOB_NAME
from dynamic_pipelines.constants import TEST_RELATED_APPS_FILENAME
from dynamic_pipelines.constants import TEST_RELATED_BUILD_JOB_NAME
from dynamic_pipelines.models import BuildJob
from dynamic_pipelines.models import EmptyJob
from dynamic_pipelines.utils import dump_jobs_to_yaml
BUILD_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'build_child_pipeline.yml')
TEST_PATHS = ['examples', os.path.join('tools', 'test_apps'), 'components']
def _separate_str_to_list(s: str) -> t.List[str]:
@@ -56,90 +43,15 @@ def main(arguments: argparse.Namespace) -> None:
if configs:
extra_default_build_targets = configs.get('extra_default_build_targets') or []
build_jobs = []
###########################################
# special case with -k, ignore other args #
###########################################
if arguments.filter_expr:
# build only test related apps
test_related_apps, _ = get_all_apps(
arguments.paths,
target=CollectMode.ALL,
config_rules_str=DEFAULT_CONFIG_RULES_STR,
filter_expr=arguments.filter_expr,
marker_expr='not host_test',
extra_default_build_targets=extra_default_build_targets,
)
dump_apps_to_txt(sorted(test_related_apps), TEST_RELATED_APPS_FILENAME)
print(f'Generate test related apps file {TEST_RELATED_APPS_FILENAME} with {len(test_related_apps)} apps')
if extra_default_build_targets:
FolderRule.DEFAULT_BUILD_TARGETS.extend(extra_default_build_targets)
test_apps_build_job = BuildJob(
name=TEST_RELATED_BUILD_JOB_NAME,
parallel=len(test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(test_apps_build_job)
else:
#############
# all cases #
#############
test_related_apps, non_test_related_apps = get_all_apps(
arguments.paths,
CollectMode.ALL,
marker_expr='not host_test',
config_rules_str=DEFAULT_CONFIG_RULES_STR,
extra_default_build_targets=extra_default_build_targets,
compare_manifest_sha_filepath=arguments.compare_manifest_sha_filepath,
modified_components=arguments.modified_components,
modified_files=arguments.modified_files,
ignore_app_dependencies_components=arguments.ignore_app_dependencies_components,
ignore_app_dependencies_filepatterns=arguments.ignore_app_dependencies_filepatterns,
)
dump_apps_to_txt(sorted(test_related_apps), TEST_RELATED_APPS_FILENAME)
print(f'Generate test related apps file {TEST_RELATED_APPS_FILENAME} with {len(test_related_apps)} apps')
dump_apps_to_txt(sorted(non_test_related_apps), NON_TEST_RELATED_APPS_FILENAME)
print(
f'Generate non-test related apps file {NON_TEST_RELATED_APPS_FILENAME} '
f'with {len(non_test_related_apps)} apps'
)
if test_related_apps:
test_apps_build_job = BuildJob(
name=TEST_RELATED_BUILD_JOB_NAME,
parallel=len(test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(test_apps_build_job)
if non_test_related_apps:
non_test_apps_build_job = BuildJob(
name=NON_TEST_RELATED_BUILD_JOB_NAME,
parallel=len(non_test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': NON_TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(non_test_apps_build_job)
if mr_labels := os.getenv('CI_MERGE_REQUEST_LABELS'):
print(f'MR labels: {mr_labels}')
# check if there's no jobs
if not build_jobs:
print('No apps need to be built. Create one empty job instead')
build_jobs.append(EmptyJob())
extra_include_yml = []
else:
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/test_child_pipeline.yml']
dump_jobs_to_yaml(build_jobs, arguments.yaml_output, DEFAULT_BUILD_CHILD_PIPELINE_NAME, extra_include_yml)
print(f'Generate child pipeline yaml file {arguments.yaml_output} with {sum(j.parallel for j in build_jobs)} jobs')
build_child_pipeline(
paths=args.paths,
modified_files=args.modified_files,
compare_manifest_sha_filepath=args.compare_manifest_sha_filepath,
yaml_output=args.yaml_output,
)
if __name__ == '__main__':
@@ -150,22 +62,17 @@ if __name__ == '__main__':
parser.add_argument(
'-o',
'--yaml-output',
default=DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH,
default=BUILD_CHILD_PIPELINE_FILEPATH,
help='Output YAML path',
)
# use relative path to avoid absolute path in pipeline
parser.add_argument(
'-p',
'--paths',
nargs='+',
default=DEFAULT_TEST_PATHS,
default=TEST_PATHS,
help='Paths to the apps to build.',
)
parser.add_argument(
'-k',
'--filter-expr',
help='only build tests matching given filter expression. For example: -k "test_hello_world". Works only'
'for pytest',
)
parser.add_argument(
'--default-build-test-rules',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'default-build-test-rules.yml'),
@@ -176,16 +83,6 @@ if __name__ == '__main__':
default=os.path.join(IDF_PATH, '.manifest_sha'),
help='Path to the recorded manifest sha file generated by `idf-build-apps dump-manifest-sha`',
)
parser.add_argument(
'--modified-components',
type=_separate_str_to_list,
default=os.getenv('MR_MODIFIED_COMPONENTS'),
help='semicolon-separated string which specifies the modified components. '
'app with `depends_components` set in the corresponding manifest files would only be built '
'if depends on any of the specified components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--modified-files',
type=_separate_str_to_list,
@@ -196,65 +93,7 @@ if __name__ == '__main__':
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'-ic',
'--ignore-app-dependencies-components',
type=_separate_str_to_list,
help='semicolon-separated string which specifies the modified components used for '
'ignoring checking the app dependencies. '
'The `depends_components` and `depends_filepatterns` set in the manifest files will be ignored '
'when any of the specified components matches any of the modified components. '
'Must be used together with --modified-components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'-if',
'--ignore-app-dependencies-filepatterns',
type=_separate_str_to_list,
help='semicolon-separated string which specifies the file patterns used for '
'ignoring checking the app dependencies. '
'The `depends_components` and `depends_filepatterns` set in the manifest files will be ignored '
'when any of the specified file patterns matches any of the modified files. '
'Must be used together with --modified-files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
args = parser.parse_args()
if test_case_filters := os.getenv('IDF_CI_SELECT_BY_FILTER_EXPR', None):
args.filter_expr = test_case_filters
if not os.getenv('CI_MERGE_REQUEST_IID') or os.getenv('IDF_CI_SELECT_ALL_PYTEST_CASES') == '1':
print('Build and run all test cases, and compile all cmake apps')
args.modified_components = None
args.modified_files = None
args.ignore_app_dependencies_components = None
args.ignore_app_dependencies_filepatterns = None
elif args.filter_expr is not None:
print('Build and run only test cases matching "%s"' % args.filter_expr)
args.modified_components = None
args.modified_files = None
args.ignore_app_dependencies_components = None
args.ignore_app_dependencies_filepatterns = None
else:
print(
f'Build and run only test cases matching:\n'
f'- modified components: {args.modified_components}\n'
f'- modified files: {args.modified_files}'
)
if args.modified_components is not None and not args.ignore_app_dependencies_components:
# setting default values
args.ignore_app_dependencies_components = DEFAULT_FULL_BUILD_TEST_COMPONENTS
if args.modified_files is not None and not args.ignore_app_dependencies_filepatterns:
# setting default values
args.ignore_app_dependencies_filepatterns = DEFAULT_FULL_BUILD_TEST_FILEPATTERNS
if not os.path.isfile(args.compare_manifest_sha_filepath):
# ignore if the file does not exist
args.compare_manifest_sha_filepath = None
main(args)

View File

@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""This file is used for generating the child pipeline for target test jobs.
@@ -8,119 +8,39 @@
"""
import argparse
import glob
import os
import typing as t
from collections import Counter
from collections import defaultdict
import __init__ # noqa: F401 # inject the system path
import yaml
from idf_build_apps import App
from idf_ci_local.app import import_apps_from_txt
from idf_ci import get_pytest_cases
from idf_ci.idf_gitlab import test_child_pipeline
from idf_ci.idf_pytest import GroupedPytestCases
from idf_ci.idf_pytest.models import GroupKey
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import TIMEOUT_4H_MARKERS
from idf_pytest.script import get_pytest_cases
from dynamic_pipelines.constants import BUILD_ONLY_LABEL
from dynamic_pipelines.constants import DEFAULT_CASES_TEST_PER_JOB
from dynamic_pipelines.constants import DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH
from dynamic_pipelines.constants import DEFAULT_TARGET_TEST_CHILD_PIPELINE_NAME
from dynamic_pipelines.constants import DEFAULT_TARGET_TEST_JOB_TEMPLATE_NAME
from dynamic_pipelines.constants import DEFAULT_TEST_PATHS
from dynamic_pipelines.constants import KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH
from dynamic_pipelines.constants import TIMEOUT_4H_TEMPLATE_NAME
from dynamic_pipelines.models import EmptyJob
from dynamic_pipelines.models import Job
from dynamic_pipelines.models import TargetTestJob
from dynamic_pipelines.utils import dump_jobs_to_yaml
BUILD_ONLY_LABEL = 'For Maintainers: Only Build Tests'
TIMEOUT_4H_TEMPLATE_NAME = '.timeout_4h_template'
TARGET_TEST_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'target_test_child_pipeline.yml')
def get_tags_with_amount(s: str) -> t.List[str]:
c: Counter = Counter()
for _t in s.split(','):
c[_t] += 1
res = set()
for target, amount in c.items():
if amount > 1:
res.add(f'{target}_{amount}')
else:
res.add(target)
return sorted(res)
def get_target_test_jobs(
paths: str, apps: t.List[App], exclude_runner_tags: t.Set[str]
) -> t.Tuple[t.List[Job], t.List[str], t.List[str]]:
"""
Return the target test jobs and the extra yaml files to include
"""
def main(output_filepath: str) -> None:
if mr_labels := os.getenv('CI_MERGE_REQUEST_LABELS'):
print(f'MR labels: {mr_labels}')
if BUILD_ONLY_LABEL in mr_labels.split(','):
print('MR has build only label, skip generating target test child pipeline')
return [EmptyJob()], [], []
test_child_pipeline(
output_filepath,
cases=GroupedPytestCases([]),
)
return
pytest_cases = get_pytest_cases(
paths,
apps=apps,
marker_expr='not host_test', # since it's generating target-test child pipeline
)
cases = GroupedPytestCases(get_pytest_cases())
no_env_marker_test_cases: t.List[str] = []
res = defaultdict(list)
for case in pytest_cases:
if not case.env_markers:
no_env_marker_test_cases.append(case.item.nodeid)
continue
res[(case.target_selector, tuple(sorted(case.env_markers)))].append(case)
target_test_jobs: t.List[Job] = []
for (target_selector, env_markers), cases in res.items():
runner_tags = get_tags_with_amount(target_selector) + list(env_markers)
if ','.join(runner_tags) in exclude_runner_tags:
print('WARNING: excluding test cases with runner tags:', runner_tags)
continue
_extends = [DEFAULT_TARGET_TEST_JOB_TEMPLATE_NAME]
for timeout_4h_marker in TIMEOUT_4H_MARKERS:
if timeout_4h_marker in env_markers:
_extends.append(TIMEOUT_4H_TEMPLATE_NAME)
target_test_job = TargetTestJob(
extends=_extends,
name=f'{target_selector} - {",".join(env_markers)}',
tags=runner_tags,
parallel=len(cases) // DEFAULT_CASES_TEST_PER_JOB + 1,
)
target_test_job.set_variable('TARGET_SELECTOR', f"'{target_selector}'")
target_test_job.set_variable('ENV_MARKERS', "'" + ' and '.join(env_markers) + "'")
target_test_job.set_variable('PYTEST_NODES', ' '.join([f"'{case.item.nodeid}'" for case in cases]))
target_test_jobs.append(target_test_job)
extra_include_yml: t.List[str] = []
if not target_test_jobs:
print('No target test cases required, create one empty job instead')
target_test_jobs.append(EmptyJob())
else:
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml']
if os.getenv('IDF_CI_IS_DEBUG_PIPELINE') == '1':
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/fast_pipeline.yml']
no_env_marker_test_cases.sort()
return target_test_jobs, extra_include_yml, no_env_marker_test_cases
def generate_target_test_child_pipeline(
paths: str,
apps: t.List[App],
output_filepath: str,
) -> None:
with open(KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH) as fr:
known_warnings_dict = yaml.safe_load(fr) or dict()
@@ -130,37 +50,34 @@ def generate_target_test_child_pipeline(
if exclude_runner_tags := os.getenv('EXCLUDE_RUNNER_TAGS'):
exclude_runner_tags_set.update(exclude_runner_tags.split(';'))
target_test_jobs, extra_include_yml, no_env_marker_test_cases = get_target_test_jobs(
paths=paths,
apps=apps,
exclude_runner_tags=exclude_runner_tags_set,
)
flattened_cases = []
additional_dict: t.Dict[GroupKey, t.Dict[str, t.Any]] = {}
for key, grouped_cases in cases.grouped_cases.items():
# skip test cases with no runner tags
if ','.join(sorted(key.runner_tags)) in exclude_runner_tags_set:
print(f'WARNING: excluding test cases with runner tags: {key.runner_tags}')
continue
known_no_env_marker_test_cases = set(known_warnings_dict.get('no_env_marker_test_cases', []))
no_env_marker_test_cases_set = set(no_env_marker_test_cases)
flattened_cases.extend(grouped_cases)
no_env_marker_test_cases_fail = False
if no_env_marker_test_cases_set - known_no_env_marker_test_cases:
print('ERROR: NEW "no_env_marker_test_cases" detected:')
for case in no_env_marker_test_cases_set - known_no_env_marker_test_cases:
print(f' - {case}')
no_env_marker_test_cases_fail = True
for case in grouped_cases:
for timeout_4h_marker in TIMEOUT_4H_MARKERS:
if timeout_4h_marker in case.env_markers:
if key not in additional_dict:
additional_dict[key] = {
'extra_extends': [],
}
print(
'Please add at least one environment markers to the test cases listed above. '
'You may check all the env markers here: tools/ci/idf_pytest/constants.py'
)
if TIMEOUT_4H_TEMPLATE_NAME not in additional_dict[key]['extra_extends']:
additional_dict[key]['extra_extends'].append(TIMEOUT_4H_TEMPLATE_NAME)
if no_env_marker_test_cases_fail:
raise SystemExit('Failed to generate target test child pipeline.')
dump_jobs_to_yaml(
target_test_jobs,
test_child_pipeline(
output_filepath,
DEFAULT_TARGET_TEST_CHILD_PIPELINE_NAME,
extra_include_yml,
cases=GroupedPytestCases(
cases=flattened_cases,
additional_dict=additional_dict,
),
)
print(f'Generate child pipeline yaml file {output_filepath} with {sum(j.parallel for j in target_test_jobs)} jobs')
if __name__ == '__main__':
@@ -168,35 +85,13 @@ if __name__ == '__main__':
description='Generate Target Test Child Pipeline. Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-p',
'--paths',
nargs='+',
default=DEFAULT_TEST_PATHS,
help='Paths to the apps to build.',
)
parser.add_argument(
'-o',
'--output',
default=DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH,
default=TARGET_TEST_CHILD_PIPELINE_FILEPATH,
help='Output child pipeline file path',
)
parser.add_argument(
'--app-info-filepattern',
default='app_info_*.txt',
help='glob pattern to specify the files that include built app info generated by '
'`idf-build-apps --collect-app-info ...`. will not raise ValueError when binary '
'paths not exist in local file system if not listed recorded in the app info.',
)
args = parser.parse_args()
apps = []
for f in glob.glob(args.app_info_filepattern):
apps.extend(import_apps_from_txt(f))
generate_target_test_child_pipeline(
paths=args.paths,
apps=apps,
output_filepath=args.output,
)
main(args.output)

View File

@@ -9,6 +9,7 @@
- .before_script:build
- .after_script:build:ccache-show-stats:upload-failed-job-logs
image: $ESP_ENV_IMAGE
tags: [build, shiny]
stage: build
timeout: 1 hour
variables:
@@ -55,9 +56,7 @@
variables:
SUBMODULES_TO_FETCH: "none"
# set while generating the pipeline
PYTEST_NODES: ""
TARGET_SELECTOR: ""
ENV_MARKERS: ""
nodes: ""
INSTALL_EXTRA_TOOLS: "xtensa-esp-elf-gdb riscv32-esp-elf-gdb openocd-esp32 esp-rom-elfs"
PYTEST_EXTRA_FLAGS: "--dev-passwd ${ETHERNET_TEST_PASSWORD} --dev-user ${ETHERNET_TEST_USER} --capture=fd --verbosity=0 --unity-test-report-mode merge"
needs:
@@ -86,9 +85,7 @@
- retry_failed git clone $TEST_ENV_CONFIG_REPO
- run_cmd python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
# CI specific options start from "--known-failure-cases-file xxx". could ignore when running locally
- run_cmd pytest ${PYTEST_NODES}
--target ${TARGET_SELECTOR}
-m ${ENV_MARKERS}
- run_cmd pytest ${nodes}
--pipeline-id $PARENT_PIPELINE_ID
--junitxml=XUNIT_RESULT_${CI_JOB_NAME_SLUG}.xml
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}

View File

@@ -1,16 +0,0 @@
.generate_pytest_report_base:
stage: .post
tags: [build, shiny]
image: $ESP_ENV_IMAGE
artifacts:
paths:
- target_test_report.html
expire_in: 2 week
when: always
fast_pipeline:pipeline_ended:always_failed:
when: on_success
extends: .generate_pytest_report_base
script:
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test
- exit 30

View File

@@ -11,7 +11,6 @@ generate_pytest_report:
- succeeded_cases.html
expire_in: 2 week
when: always
script:
- python tools/ci/get_known_failure_cases_file.py
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test

View File

@@ -5,6 +5,10 @@ generate_pytest_build_report:
- build
- shiny
when: always
needs:
- pipeline: $PARENT_PIPELINE_ID
job: pipeline_variables
- build_apps
artifacts:
paths:
- failed_apps.html
@@ -14,7 +18,6 @@ generate_pytest_build_report:
- test_related_apps_download_urls.yml
expire_in: 2 week
when: always
script:
- env
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type build
@@ -27,6 +30,10 @@ generate_pytest_child_pipeline:
tags:
- build
- shiny
needs:
- pipeline: $PARENT_PIPELINE_ID
job: pipeline_variables
- build_apps
artifacts:
paths:
- target_test_child_pipeline.yml

View File

@@ -10,7 +10,6 @@ from urllib.parse import urlencode
from urllib.parse import urlparse
import requests
import yaml
from .constants import CI_DASHBOARD_API
from .constants import CI_JOB_TOKEN
@@ -18,42 +17,9 @@ from .constants import CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
from .constants import CI_PAGES_URL
from .constants import CI_PROJECT_URL
from .models import GitlabJob
from .models import Job
from .models import TestCase
def dump_jobs_to_yaml(
jobs: t.List[Job],
output_filepath: str,
pipeline_name: str,
extra_include_yml: t.Optional[t.List[str]] = None,
) -> None:
yaml_dict = {}
for job in jobs:
yaml_dict.update(job.to_dict())
# global stuffs
yaml_dict.update(
{
'include': [
'tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml',
'.gitlab/ci/common.yml',
],
'workflow': {
'name': pipeline_name,
'rules': [
# always run the child pipeline, if they are created
{'when': 'always'},
],
},
}
)
yaml_dict['include'].extend(extra_include_yml or [])
with open(output_filepath, 'w') as fw:
yaml.dump(yaml_dict, fw, indent=2)
def parse_testcases_from_filepattern(junit_report_filepattern: str) -> t.List[TestCase]:
"""
Parses test cases from XML files matching the provided file pattern.

View File

@@ -19,7 +19,7 @@ SUPPORTED_TARGETS = [
'esp32c61',
]
PREVIEW_TARGETS: t.List[str] = [] # this PREVIEW_TARGETS excludes 'linux' target
DEFAULT_SDKCONFIG = 'default'
DEFAULT_LOGDIR = 'pytest-embedded'
# by default the timeout is 1h, for some special cases we need to extend it

View File

@@ -6,9 +6,11 @@ from pathlib import Path
from xml.etree import ElementTree as ET
import pytest
import yaml
from _pytest.config import Config
from _pytest.python import Function
from _pytest.runner import CallInfo
from dynamic_pipelines.constants import KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH
from idf_ci import IdfPytestPlugin
from idf_ci import PytestCase
from idf_ci.idf_pytest.plugin import IDF_CI_PYTEST_DEBUG_INFO_KEY
@@ -18,7 +20,6 @@ from pytest_embedded.utils import find_by_suffix
from pytest_ignore_test_results.ignore_results import ChildCase
from pytest_ignore_test_results.ignore_results import ChildCasesStashKey
from .constants import DEFAULT_SDKCONFIG
from .utils import format_case_id
from .utils import merge_junit_files
from .utils import normalize_testcase_file_path
@@ -83,6 +84,12 @@ class IdfLocalPlugin:
'IGNORE': 'skipped',
}
def __init__(self) -> None:
with open(KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH) as fr:
known_warnings_dict = yaml.safe_load(fr) or dict()
self.exclude_no_env_markers_test_cases: t.Set[str] = set(known_warnings_dict['no_env_marker_test_cases'])
@staticmethod
def get_param(item: Function, key: str, default: t.Any = None) -> t.Any:
# funcargs is not calculated while collection
@@ -111,6 +118,17 @@ class IdfLocalPlugin:
item.stash[IDF_CI_PYTEST_DEBUG_INFO_KEY] = 'skipped by temp_skip markers'
continue
if not case.env_markers and 'host_test' not in case.all_markers:
if case.name in self.exclude_no_env_markers_test_cases:
deselected_items.append(item)
continue
raise ValueError(
f'Test case {case.name} does not have any env markers. '
f'Please add env markers to the test case or add it to the '
f'`no_env_markers_test_cases` list in {KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH}'
)
filtered_items.append(item)
items[:] = filtered_items
@@ -119,12 +137,11 @@ class IdfLocalPlugin:
config.hook.pytest_deselected(items=deselected_items)
# OKAY!!! All left ones will be executed, sort it and add more markers
items[:] = sorted(
items, key=lambda x: (os.path.dirname(x.path), self.get_param(x, 'config', DEFAULT_SDKCONFIG))
)
items[:] = sorted(items, key=lambda x: (os.path.dirname(x.path), self.get_param(x, 'config', 'default')))
for item in items:
case = IdfPytestPlugin.get_case_by_item(item)
# set default timeout 10 minutes for each case
if 'timeout' not in item.keywords:
item.add_marker(pytest.mark.timeout(10 * 60))

View File

@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2023-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import logging
import os
@@ -62,10 +62,6 @@ def merge_junit_files(junit_files: t.List[str], target_path: str) -> None:
fw.write(ET.tostring(merged_testsuite))
def comma_sep_str_to_list(s: str) -> t.List[str]:
return [s.strip() for s in s.split(',') if s.strip()]
def normalize_testcase_file_path(file: str, app_path: t.Union[str, tuple]) -> str:
"""
Normalize file paths to a consistent format, resolving relative paths based on the `app_path`.
@@ -82,9 +78,7 @@ def normalize_testcase_file_path(file: str, app_path: t.Union[str, tuple]) -> st
def normalize_path(file_path: str, app_path: str) -> str:
"""Helper function to normalize a single path."""
if not os.path.isabs(file_path):
resolved_path = os.path.normpath(
os.path.join(app_path, file_path.removeprefix('./').removeprefix('../'))
)
resolved_path = os.path.normpath(os.path.join(app_path, file_path.removeprefix('./').removeprefix('../')))
else:
resolved_path = os.path.normpath(file_path)

View File

@@ -6,7 +6,7 @@
# https://docs.espressif.com/projects/esp-idf/en/latest/api-guides/tools/idf-tools.html
# ci
idf-ci==0.1.18
idf-ci==0.1.20
coverage
jsonschema