Merge branch 'ci/nested_dynamic_pipeline' into 'master'

CI: Dynamic Pipeline!!!

Closes IDFCI-1211, IDFCI-1729, IDF-8500, IDF-8456, IDFCI-1828, RDT-610, and RDT-615

See merge request espressif/esp-idf!26662
This commit is contained in:
Fu Hanxi
2024-01-11 13:08:28 +08:00
74 changed files with 3415 additions and 5657 deletions

41
.gitignore vendored
View File

@@ -24,18 +24,6 @@ GPATH
# cache dir
.cache/
# Components Unit Test Apps files
components/**/build/
components/**/build_*_*/
components/**/sdkconfig
components/**/sdkconfig.old
# Example project files
examples/**/build/
examples/**/build_esp*_*/
examples/**/sdkconfig
examples/**/sdkconfig.old
# Doc build artifacts
docs/_build/
docs/doxygen_sqlite3.db
@@ -44,16 +32,23 @@ docs/doxygen_sqlite3.db
docs/_static/DejaVuSans.ttf
docs/_static/NotoSansSC-Regular.otf
# Components Unit Test Apps files
components/**/build/
components/**/build_*_*/
components/**/sdkconfig
components/**/sdkconfig.old
# Example project files
examples/**/build/
examples/**/build_*_*/
examples/**/sdkconfig
examples/**/sdkconfig.old
# Unit test app files
tools/unit-test-app/sdkconfig
tools/unit-test-app/sdkconfig.old
tools/unit-test-app/build
tools/unit-test-app/build_*_*/
tools/unit-test-app/output
tools/unit-test-app/test_configs
# Unit Test CMake compile log folder
log_ut_cmake
tools/unit-test-app/sdkconfig
tools/unit-test-app/sdkconfig.old
# test application build files
tools/test_apps/**/build/
@@ -61,7 +56,8 @@ tools/test_apps/**/build_*_*/
tools/test_apps/**/sdkconfig
tools/test_apps/**/sdkconfig.old
TEST_LOGS
TEST_LOGS/
build_summary_*.xml
# gcov coverage reports
*.gcda
@@ -101,8 +97,9 @@ managed_components
# pytest log
pytest_embedded_log/
list_job_*.txt
size_info.txt
list_job*.txt
size_info*.txt
XUNIT_RESULT*.xml
# clang config (for LSP)
.clangd

View File

@@ -28,5 +28,4 @@ include:
- '.gitlab/ci/build.yml'
- '.gitlab/ci/integration_test.yml'
- '.gitlab/ci/host-test.yml'
- '.gitlab/ci/target-test.yml'
- '.gitlab/ci/deploy.yml'

View File

@@ -1,7 +1,7 @@
.build_template:
stage: build
extends:
- .after_script:build:ccache
- .after_script:build:ccache:upload-when-fail
image: $ESP_ENV_IMAGE
tags:
- build
@@ -32,8 +32,6 @@
# keep the size info to help track the binary size
- size_info.txt
- "**/build*/size.json"
when: always
expire_in: 4 days
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
@@ -45,8 +43,8 @@
examples/bluetooth/esp_ble_mesh/ble_mesh_console
examples/bluetooth/hci/controller_hci_uart_esp32
examples/wifi/iperf
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
# for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server
- python tools/ci/artifacts_handler.py upload
@@ -64,307 +62,14 @@
--copy-sdkconfig
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
$TEST_BUILD_OPTS_EXTRA
- python tools/ci/artifacts_handler.py upload
.build_pytest_template:
extends:
- .build_cmake_template
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
.build_pytest_no_jtag_template:
extends:
- .build_cmake_template
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test and not jtag\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
.build_pytest_jtag_template:
extends:
- .build_cmake_template
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test and jtag\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
build_pytest_examples_esp32:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32
parallel: 6
variables:
IDF_TARGET: esp32
TEST_DIR: examples
build_pytest_examples_esp32s2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32s2
parallel: 3
variables:
IDF_TARGET: esp32s2
TEST_DIR: examples
build_pytest_examples_esp32s3:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32s3
parallel: 4
variables:
IDF_TARGET: esp32s3
TEST_DIR: examples
build_pytest_examples_esp32c3:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c3
parallel: 4
variables:
IDF_TARGET: esp32c3
TEST_DIR: examples
build_pytest_examples_esp32c2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c2
parallel: 2
variables:
IDF_TARGET: esp32c2
TEST_DIR: examples
build_pytest_examples_esp32c6:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c6
parallel: 2
variables:
IDF_TARGET: esp32c6
TEST_DIR: examples
build_pytest_examples_esp32h2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32h2
parallel: 2
variables:
IDF_TARGET: esp32h2
TEST_DIR: examples
build_pytest_examples_esp32p4:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32p4
parallel: 2
variables:
IDF_TARGET: esp32p4
TEST_DIR: examples
build_pytest_examples_jtag: # for all targets
extends:
- .build_pytest_jtag_template
- .rules:build:example_test
variables:
IDF_TARGET: all
TEST_DIR: examples
build_pytest_components_esp32:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32
parallel: 5
variables:
IDF_TARGET: esp32
TEST_DIR: components
build_pytest_components_esp32s2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32s2
parallel: 4
variables:
IDF_TARGET: esp32s2
TEST_DIR: components
build_pytest_components_esp32s3:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32s3
parallel: 4
variables:
IDF_TARGET: esp32s3
TEST_DIR: components
build_pytest_components_esp32c3:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c3
parallel: 4
variables:
IDF_TARGET: esp32c3
TEST_DIR: components
build_pytest_components_esp32c2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c2
parallel: 3
variables:
IDF_TARGET: esp32c2
TEST_DIR: components
build_pytest_components_esp32c6:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c6
parallel: 3
variables:
IDF_TARGET: esp32c6
TEST_DIR: components
build_pytest_components_esp32h2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32h2
parallel: 4
variables:
IDF_TARGET: esp32h2
TEST_DIR: components
build_pytest_components_esp32p4:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32p4
parallel: 4
variables:
IDF_TARGET: esp32p4
TEST_DIR: components
build_only_components_apps:
extends:
- .build_cmake_template
- .rules:build:component_ut
parallel: 5
script:
- set_component_ut_vars
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $COMPONENT_UT_DIRS -v
-t all
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
build_pytest_test_apps_esp32:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32
variables:
IDF_TARGET: esp32
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32s2:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32s2
variables:
IDF_TARGET: esp32s2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32s3:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32s3
parallel: 2
variables:
IDF_TARGET: esp32s3
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c3:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32c3
variables:
IDF_TARGET: esp32c3
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c2:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32c2
variables:
IDF_TARGET: esp32c2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c6:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32c6
variables:
IDF_TARGET: esp32c6
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32h2:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32h2
variables:
IDF_TARGET: esp32h2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32p4:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32p4
variables:
IDF_TARGET: esp32p4
TEST_DIR: tools/test_apps
build_only_tools_test_apps:
extends:
- .build_cmake_template
- .rules:build:custom_test
parallel: 9
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py tools/test_apps -v
-t all
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
######################
# build_template_app #
######################
.build_template_app_template:
extends:
- .build_template
@@ -376,12 +81,10 @@ build_only_tools_test_apps:
BUILD_LOG_CMAKE: "${LOG_PATH}/cmake_@t_@w.txt"
BUILD_COMMAND_ARGS: ""
artifacts:
when: always
paths:
- log_template_app/*
- size_info.txt
- build_template_app/**/size.json
expire_in: 1 week
script:
# Set the variable for 'esp-idf-template' testing
- ESP_IDF_TEMPLATE_GIT=${ESP_IDF_TEMPLATE_GIT:-"https://github.com/espressif/esp-idf-template.git"}
@@ -404,96 +107,27 @@ fast_template_app:
BUILD_COMMAND_ARGS: "-p"
#------------------------------------------------------------------------------
build_examples_cmake_esp32:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32
parallel: 8
variables:
IDF_TARGET: esp32
TEST_DIR: examples
build_examples_cmake_esp32s2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32s2
parallel: 7
variables:
IDF_TARGET: esp32s2
TEST_DIR: examples
build_examples_cmake_esp32s3:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32s3
parallel: 11
variables:
IDF_TARGET: esp32s3
TEST_DIR: examples
build_examples_cmake_esp32c2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c2
parallel: 7
variables:
IDF_TARGET: esp32c2
TEST_DIR: examples
build_examples_cmake_esp32c3:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c3
parallel: 9
variables:
IDF_TARGET: esp32c3
TEST_DIR: examples
build_examples_cmake_esp32c6:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c6
parallel: 11
variables:
IDF_TARGET: esp32c6
TEST_DIR: examples
build_examples_cmake_esp32h2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32h2
parallel: 9
variables:
IDF_TARGET: esp32h2
TEST_DIR: examples
build_examples_cmake_esp32p4:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32p4
parallel: 4
variables:
IDF_TARGET: esp32p4
TEST_DIR: examples
########################################
# Clang Build Apps Without Tests Cases #
########################################
build_clang_test_apps_esp32:
extends:
- .build_cmake_clang_template
- .rules:build:custom_test-esp32
- .rules:build
variables:
IDF_TARGET: esp32
build_clang_test_apps_esp32s2:
extends:
- .build_cmake_clang_template
- .rules:build:custom_test-esp32s2
- .rules:build
variables:
IDF_TARGET: esp32s2
build_clang_test_apps_esp32s3:
extends:
- .build_cmake_clang_template
- .rules:build:custom_test-esp32s3
- .rules:build
variables:
IDF_TARGET: esp32s3
@@ -510,26 +144,29 @@ build_clang_test_apps_esp32s3:
build_clang_test_apps_esp32c3:
extends:
- .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c3
- .rules:build
variables:
IDF_TARGET: esp32c3
build_clang_test_apps_esp32c2:
extends:
- .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c2
- .rules:build
variables:
IDF_TARGET: esp32c2
build_clang_test_apps_esp32c6:
extends:
- .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c6
- .rules:build
# TODO: c6 builds fail in master due to missing headers
allow_failure: true
variables:
IDF_TARGET: esp32c6
######################
# Build System Tests #
######################
.test_build_system_template:
stage: host_test
extends:
@@ -554,7 +191,6 @@ pytest_build_system:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml
@@ -571,7 +207,6 @@ pytest_build_system_macos:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml
@@ -603,7 +238,6 @@ pytest_build_system_win:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml
@@ -641,3 +275,44 @@ build_template_app:
needs:
- job: fast_template_app
artifacts: false
####################
# Dynamic Pipeline #
####################
generate_build_child_pipeline:
extends:
- .build_template
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- pipeline_variables
artifacts:
paths:
- build_child_pipeline.yml
- test_related_apps.txt
- non_test_related_apps.txt
script:
- run_cmd python tools/ci/dynamic_pipelines/scripts/generate_build_child_pipeline.py
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
build_child_pipeline:
stage: build
needs:
- job: fast_template_app
artifacts: false
- pipeline_variables
- generate_build_child_pipeline
variables:
IS_MR_PIPELINE: $IS_MR_PIPELINE
MR_MODIFIED_COMPONENTS: $MR_MODIFIED_COMPONENTS
MR_MODIFIED_FILES: $MR_MODIFIED_FILES
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
BUILD_AND_TEST_ALL_APPS: $BUILD_AND_TEST_ALL_APPS
# https://gitlab.com/gitlab-org/gitlab/-/issues/214340
inherit:
variables: false
trigger:
include:
- artifact: build_child_pipeline.yml
job: generate_build_child_pipeline
strategy: depend

View File

@@ -85,6 +85,7 @@ variables:
################################################
.common_before_scripts: &common-before_scripts |
source tools/ci/utils.sh
is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
@@ -208,6 +209,10 @@ variables:
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
.after_script:build:ccache:
after_script:
- *show_ccache_statistics
.after_script:build:ccache:upload-when-fail:
after_script:
- *show_ccache_statistics
- *upload_failed_job_log_artifacts
@@ -345,6 +350,9 @@ default:
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
artifacts:
expire_in: 1 week
when: always
retry:
max: 2
when:

View File

@@ -66,104 +66,6 @@
included_in:
- build:check
# ---------------
# Build Test Jobs
# ---------------
"build:{0}-{1}":
matrix:
- *target_test
- *all_targets
labels:
- build
patterns:
- build_components
- build_system
- build_target_test
- downloadable-tools
included_in:
- "build:{0}"
- build:target_test
####################
# Target Test Jobs #
####################
"test:{0}-{1}":
matrix:
- *target_test
- *all_targets
labels: # For each rule, use labels <test_type> and <test_type>-<target>
- "{0}"
- "{0}_{1}"
- target_test
patterns: # For each rule, use patterns <test_type> and build-<test_type>
- "{0}"
- "build-{0}"
included_in: # Parent rules
- "build:{0}"
- "build:{0}-{1}"
- build:target_test
# -------------
# Special Cases
# -------------
# To reduce the specific runners' usage.
# Do not create these jobs by default patterns on development branches
# Can be triggered by labels or related changes
"test:{0}-{1}-{2}":
matrix:
- *target_test
- *all_targets
- - wifi # pytest*wifi*
- ethernet # pytest*ethernet*
- sdio # pytest*sdio*
- usb # USB Device & Host tests
- adc # pytest*adc*
- i154
- flash_multi
- ecdsa
- nvs_encr_hmac
patterns:
- "{0}-{1}-{2}"
- "{0}-{2}"
- "target_test-{2}"
labels:
- "{0}_{1}"
- "{0}"
- target_test
included_in:
- "build:{0}-{1}"
- "build:{0}"
- build:target_test
# For example_test*flash_encryption_wifi_high_traffic jobs
# set `INCLUDE_NIGHTLY_RUN` variable when triggered on development branches
"test:example_test-{0}-include_nightly_run-rule":
matrix:
- - esp32
- esp32c3
specific_rules:
- "if-example_test-ota-include_nightly_run-rule"
included_in:
- "build:example_test-{0}"
- "build:example_test"
- build:target_test
# For i154 runners
"test:example_test-i154":
patterns:
- "example_test-i154"
- "target_test-i154"
labels:
- target_test
- example_test
included_in:
- "build:example_test-esp32s3"
- "build:example_test-esp32c6"
- "build:example_test-esp32h2"
- "build:example_test"
- build:target_test
"test:host_test":
labels:
- host_test

View File

@@ -121,7 +121,6 @@ build_docs_html_full:
artifacts: false
optional: true
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@@ -135,7 +134,6 @@ build_docs_html_full_prod:
- .doc-rules:build:docs-full-prod
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@@ -152,7 +150,6 @@ build_docs_html_partial:
artifacts: false
optional: true
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@@ -175,7 +172,6 @@ build_docs_pdf:
artifacts: false
optional: true
artifacts:
when: always
paths:
- docs/_build/*/*/latex/*
expire_in: 4 days
@@ -188,7 +184,6 @@ build_docs_pdf_prod:
- .doc-rules:build:docs-full-prod
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
artifacts:
when: always
paths:
- docs/_build/*/*/latex/*
expire_in: 4 days
@@ -266,11 +261,9 @@ check_doc_links:
artifacts: false
tags: ["build", "amd64", "internet"]
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/linkcheck/*.txt
expire_in: 1 week
allow_failure: true
script:
- cd docs

View File

@@ -28,7 +28,6 @@ test_nvs_coverage:
artifacts:
paths:
- components/nvs_flash/test_nvs_host/coverage_report
expire_in: 1 week
script:
- cd components/nvs_flash/test_nvs_host
- make coverage_report
@@ -65,7 +64,6 @@ test_reproducible_build:
- "**/build*/*.bin"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
expire_in: 1 week
test_spiffs_on_host:
extends: .host_test_template
@@ -110,7 +108,6 @@ test_cli_installer:
paths:
- tools/tools.new.json
- tools/test_idf_tools/test_python_env_logs.txt
expire_in: 1 week
image:
name: $ESP_ENV_IMAGE
entrypoint: [""] # use system python3. no extra pip package installed
@@ -130,7 +127,6 @@ test_cli_installer:
when: on_failure
paths:
- components/efuse/${IDF_TARGET}/esp_efuse_table.c
expire_in: 1 week
script:
- cd ${IDF_PATH}/components/efuse/
- ./efuse_table_gen.py -t "${IDF_TARGET}" ${IDF_PATH}/components/efuse/${IDF_TARGET}/esp_efuse_table.csv
@@ -173,7 +169,6 @@ test_logtrace_proc:
paths:
- tools/esp_app_trace/test/logtrace/output
- tools/esp_app_trace/test/logtrace/.coverage
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/esp_app_trace/test/logtrace
- ./test.sh
@@ -185,7 +180,6 @@ test_sysviewtrace_proc:
paths:
- tools/esp_app_trace/test/sysview/output
- tools/esp_app_trace/test/sysview/.coverage
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/esp_app_trace/test/sysview
- ./test.sh
@@ -194,13 +188,11 @@ test_tools:
extends:
- .host_test_template
artifacts:
when: always
paths:
- ${IDF_PATH}/*.out
- ${IDF_PATH}/XUNIT_*.xml
reports:
junit: ${IDF_PATH}/XUNIT_*.xml
expire_in: 1 week
variables:
LC_ALL: C.UTF-8
INSTALL_QEMU: 1 # for test_idf_qemu.py
@@ -280,13 +272,11 @@ test_pytest_qemu:
- .host_test_template
- .before_script:build
artifacts:
when: always
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
reports:
junit: XUNIT_RESULT.xml
expire_in: 1 week
allow_failure: true # IDFCI-1752
parallel:
matrix:
@@ -299,8 +289,8 @@ test_pytest_qemu:
--pytest-apps
-m qemu
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
- run_cmd pytest
--target $IDF_TARGET
@@ -316,22 +306,20 @@ test_pytest_linux:
- .host_test_template
- .before_script:build
artifacts:
when: always
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
- "**/build*/build_log.txt"
reports:
junit: XUNIT_RESULT.xml
expire_in: 1 week
script:
- run_cmd python tools/ci/ci_build_apps.py components examples tools/test_apps -vv
--target linux
--pytest-apps
-m host_test
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
- run_cmd pytest
--target linux
@@ -339,3 +327,16 @@ test_pytest_linux:
--junitxml=XUNIT_RESULT.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt
--app-info-filepattern \"list_job_*.txt\"
test_idf_pytest_plugin:
extends:
- .host_test_template
- .rules:patterns:idf-pytest-plugin
variables:
SUBMODULES_TO_FETCH: "none"
artifacts:
reports:
junit: XUNIT_RESULT.xml
script:
- cd tools/ci/idf_pytest
- pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml

View File

@@ -84,7 +84,6 @@ check_chip_support_components:
paths:
- esp_hw_support_part.h
- bootloader_support_part.h
expire_in: 1 week
script:
- python tools/ci/check_soc_headers_leak.py
- find ${IDF_PATH}/components/soc/*/include/soc/ -name "*_struct.h" -print0 | xargs -0 -n1 ./tools/ci/check_soc_struct_headers.py
@@ -98,7 +97,6 @@ check_esp_err_to_name:
when: on_failure
paths:
- components/esp_common/esp_err_to_name.c
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/
- ./gen_esp_err_to_name.py
@@ -122,12 +120,6 @@ check_version_tag:
script:
- (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1)
check_artifacts_expire_time:
extends: .pre_check_template
script:
# check if we have set expire time for all artifacts
- python tools/ci/check_artifacts_expire_time.py
check_test_scripts_build_test_rules:
extends:
- .pre_check_template
@@ -153,9 +145,22 @@ pipeline_variables:
tags:
- build
script:
- MODIFIED_FILES=$(echo $GIT_DIFF_OUTPUT | xargs)
# MODIFIED_FILES is a list of files that changed, could be used everywhere
- MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | xargs)
- echo "MODIFIED_FILES=$MODIFIED_FILES" >> pipeline.env
- echo "MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | xargs)" >> pipeline.env
# MR_MODIFIED_FILES and MR_MODIFIED_COMPONENTS are semicolon separated lists that is used in MR only
# for non MR pipeline, these are empty lists
- |
if [ $IS_MR_PIPELINE == "0" ]; then
echo "MR_MODIFIED_FILES=" >> pipeline.env
echo "MR_MODIFIED_COMPONENTS=" >> pipeline.env
else
MR_MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | tr '\n' ';')
echo "MR_MODIFIED_FILES=\"$MR_MODIFIED_FILES\"" >> pipeline.env
MR_MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | tr '\n' ';')
echo "MR_MODIFIED_COMPONENTS=\"$MR_MODIFIED_COMPONENTS\"" >> pipeline.env
fi
- |
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)BUILD_AND_TEST_ALL_APPS(,|$)"; then
echo "BUILD_AND_TEST_ALL_APPS=1" >> pipeline.env
@@ -165,4 +170,3 @@ pipeline_variables:
artifacts:
reports:
dotenv: pipeline.env
expire_in: 4 days

File diff suppressed because it is too large Load Diff

View File

@@ -6,8 +6,6 @@ clang_tidy_check:
artifacts:
paths:
- clang_tidy_reports/
when: always
expire_in: 1 day
variables:
IDF_TOOLCHAIN: clang
script:
@@ -23,10 +21,8 @@ check_pylint:
needs:
- pipeline_variables
artifacts:
when: always
reports:
codequality: pylint.json
expire_in: 1 week
script:
- |
if [ -n "$CI_MERGE_REQUEST_IID" ]; then
@@ -72,10 +68,8 @@ check_pylint:
GIT_DEPTH: 0
REPORT_PATTERN: clang_tidy_reports/**/*.txt
artifacts:
when: always
paths:
- $REPORT_PATTERN
expire_in: 1 week
dependencies: # Here is not a hard dependency relationship, could be skipped when only python files changed. so we do not use "needs" here.
- clang_tidy_check

File diff suppressed because it is too large Load Diff

View File

@@ -91,20 +91,13 @@ repos:
always_run: true
files: '\.gitlab/CODEOWNERS'
pass_filenames: false
- id: check-rules-yml
name: Check rules.yml all rules have at lease one job applied, all rules needed exist
entry: tools/ci/check_rules_yml.py
language: python
files: '\.gitlab/ci/.+\.yml|\.gitlab-ci.yml|\.gitmodules'
pass_filenames: false
additional_dependencies:
- PyYAML == 5.3.1
- id: check-generated-rules
name: Check rules are generated (based on .gitlab/ci/dependencies/dependencies.yml)
entry: tools/ci/generate_rules.py
language: python
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml'
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml|.gitlab-ci.yml'
pass_filenames: false
require_serial: true
additional_dependencies:
- PyYAML == 5.3.1
- id: mypy-check
@@ -115,6 +108,7 @@ repos:
- 'mypy-extensions==0.4.3'
- 'types-setuptools==57.4.14'
- 'types-PyYAML==0.1.9'
- 'types-requests'
exclude: >
(?x)^(
.*_pb2.py
@@ -161,7 +155,7 @@ repos:
require_serial: true
additional_dependencies:
- PyYAML == 5.3.1
- idf_build_apps~=1.0
- idf-build-apps~=2.0.0rc1
- id: sort-build-test-rules-ymls
name: sort .build-test-rules.yml files
entry: tools/ci/check_build_test_rules.py sort-yaml
@@ -185,6 +179,14 @@ repos:
language: python
always_run: true
require_serial: true
- id: gitlab-yaml-linter
name: Check gitlab yaml files
entry: tools/ci/gitlab_yaml_linter.py
language: python
files: '\.gitlab-ci\.yml|\.gitlab/ci/.+\.yml'
pass_filenames: false
additional_dependencies:
- PyYAML == 5.3.1
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:

View File

@@ -21,6 +21,12 @@ components/driver/test_apps/i2s_test_apps/legacy_i2s_driver:
components/driver/test_apps/legacy_adc_driver:
disable:
- if: SOC_ADC_SUPPORTED != 1
depends_components:
- efuse
- esp_driver_i2s
- esp_driver_spi
depends_filepatterns:
- components/driver/deprecated/**/*adc*
components/driver/test_apps/legacy_i2c_driver:
disable_test:

View File

@@ -204,6 +204,7 @@ def test_esp_emac_hal(dut: IdfDut) -> None:
@pytest.mark.esp32
@pytest.mark.ip101
@pytest.mark.temp_skip_ci(targets=['esp32'], reason='runner under maintenance')
@pytest.mark.parametrize('config', [
'default_ip101',
], indirect=True)

View File

@@ -15,25 +15,22 @@ components/fatfs/test_apps/flash_wl:
disable_test:
- if: IDF_TARGET not in ["esp32", "esp32c3"]
reason: only one target per arch needed
depends_components:
- esp_partition
- spi_flash
- fatfs
- vfs
- wear_leveling
components/fatfs/test_apps/sdcard:
disable:
- if: IDF_TARGET == "esp32p4"
temporary: true
reason: target esp32p4 is not supported yet # TODO: IDF-7501
disable_test:
- if: IDF_TARGET not in ["esp32", "esp32c3"]
temporary: true
reason: lack of runners
depends_components:
- esp_driver_sdmmc
- esp_driver_spi

View File

@@ -5,3 +5,7 @@ components/mbedtls/test_apps:
- if: CONFIG_NAME == "psram" and SOC_SPIRAM_SUPPORTED != 1
- if: CONFIG_NAME == "psram_all_ext" and SOC_SPIRAM_SUPPORTED != 1
- if: CONFIG_NAME == "ecdsa_sign" and SOC_ECDSA_SUPPORTED != 1
depends_components:
- efuse
depends_filepatterns:
- components/mbedtls/port/ecdsa/*

View File

@@ -1,2 +1,2 @@
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-P4 | ESP32-S2 | ESP32-S3 |
| ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- | -------- |
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-S2 | ESP32-S3 |
| ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- |

View File

@@ -3,3 +3,8 @@
components/usb/test_apps:
enable:
- if: SOC_USB_OTG_SUPPORTED == 1
depends_components:
- usb
depends_filepatterns:
- components/hal/usb*.c
- components/hal/esp32*/include/hal/usb*.h

View File

@@ -1,53 +1,54 @@
# SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2021-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# pylint: disable=W0621 # redefined-outer-name
# This file is a pytest root configuration file and provide the following functionalities:
# 1. Defines a few fixtures that could be used under the whole project.
# 2. Defines a few hook functions.
#
# IDF is using [pytest](https://github.com/pytest-dev/pytest) and
# [pytest-embedded plugin](https://github.com/espressif/pytest-embedded) as its example test framework.
#
# This is an experimental feature, and if you found any bug or have any question, please report to
# https://github.com/espressif/pytest-embedded/issues
# [pytest-embedded plugin](https://github.com/espressif/pytest-embedded) as its test framework.
# if you found any bug or have any question,
# please report to https://github.com/espressif/pytest-embedded/issues
# or discuss at https://github.com/espressif/pytest-embedded/discussions
import os
import sys
import gitlab
if os.path.join(os.path.dirname(__file__), 'tools', 'ci') not in sys.path:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci'))
if os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages') not in sys.path:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages'))
import glob
import json
import io
import logging
import os
import re
import sys
import typing as t
import zipfile
from copy import deepcopy
from datetime import datetime
from typing import Callable, Optional
import common_test_methods # noqa: F401
import gitlab_api
import pytest
import requests
import yaml
from _pytest.config import Config
from _pytest.fixtures import FixtureRequest
from artifacts_handler import ArtifactType
from dynamic_pipelines.constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
from idf_ci.app import import_apps_from_txt
from idf_ci.uploader import AppUploader
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_SDKCONFIG, ENV_MARKERS, SPECIAL_MARKERS, TARGET_MARKERS, PytestCase
from idf_pytest.plugin import IDF_PYTEST_EMBEDDED_KEY, ITEM_PYTEST_CASE_KEY, IdfPytestEmbedded
from idf_pytest.utils import format_case_id
from pytest_embedded.plugin import multi_dut_argument, multi_dut_fixture
from pytest_embedded_idf.dut import IdfDut
from pytest_embedded_idf.unity_tester import CaseTester
try:
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_SDKCONFIG, ENV_MARKERS, SPECIAL_MARKERS, TARGET_MARKERS
from idf_pytest.plugin import IDF_PYTEST_EMBEDDED_KEY, IdfPytestEmbedded
from idf_pytest.utils import format_case_id, get_target_marker_from_expr
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci'))
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_SDKCONFIG, ENV_MARKERS, SPECIAL_MARKERS, TARGET_MARKERS
from idf_pytest.plugin import IDF_PYTEST_EMBEDDED_KEY, IdfPytestEmbedded
from idf_pytest.utils import format_case_id, get_target_marker_from_expr
try:
import common_test_methods # noqa: F401
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages'))
import common_test_methods # noqa: F401
############
# Fixtures #
@@ -100,9 +101,91 @@ def test_case_name(request: FixtureRequest, target: str, config: str) -> str:
return format_case_id(target, config, request.node.originalname, is_qemu=is_qemu, params=filtered_params) # type: ignore
@pytest.fixture(scope='session')
def pipeline_id(request: FixtureRequest) -> t.Optional[str]:
return request.config.getoption('pipeline_id', None) or os.getenv('PARENT_PIPELINE_ID', None) # type: ignore
class BuildReportDownloader:
def __init__(self, presigned_url_yaml: str) -> None:
self.app_presigned_urls_dict: t.Dict[str, t.Dict[str, str]] = yaml.safe_load(presigned_url_yaml)
def download_app(
self, app_build_path: str, artifact_type: ArtifactType = ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
) -> None:
url = self.app_presigned_urls_dict[app_build_path][artifact_type.value]
logging.debug('Downloading app from %s', url)
with io.BytesIO() as f:
for chunk in requests.get(url).iter_content(chunk_size=1024 * 1024):
if chunk:
f.write(chunk)
f.seek(0)
with zipfile.ZipFile(f) as zip_ref:
zip_ref.extractall()
@pytest.fixture(scope='session')
def app_downloader(pipeline_id: t.Optional[str]) -> t.Union[AppUploader, BuildReportDownloader, None]:
if not pipeline_id:
return None
if (
'IDF_S3_BUCKET' in os.environ
and 'IDF_S3_ACCESS_KEY' in os.environ
and 'IDF_S3_SECRET_KEY' in os.environ
and 'IDF_S3_SERVER' in os.environ
and 'IDF_S3_BUCKET' in os.environ
):
return AppUploader(pipeline_id)
logging.info('Downloading build report from the build pipeline %s', pipeline_id)
test_app_presigned_urls_file = None
try:
gl = gitlab_api.Gitlab(os.getenv('CI_PROJECT_ID', 'espressif/esp-idf'))
except gitlab.exceptions.GitlabAuthenticationError:
msg = """To download artifacts from gitlab, please create ~/.python-gitlab.cfg with the following content:
[global]
default = internal
ssl_verify = true
timeout = 5
[internal]
url = <OUR INTERNAL HTTPS SERVER URL>
private_token = <YOUR PERSONAL ACCESS TOKEN>
api_version = 4
"""
raise SystemExit(msg)
for child_pipeline in gl.project.pipelines.get(pipeline_id, lazy=True).bridges.list(iterator=True):
if child_pipeline.name == 'build_child_pipeline':
for job in gl.project.pipelines.get(child_pipeline.downstream_pipeline['id'], lazy=True).jobs.list(
iterator=True
):
if job.name == 'generate_pytest_build_report':
test_app_presigned_urls_file = gl.download_artifact(
job.id, [TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME]
)[0]
break
if test_app_presigned_urls_file:
return BuildReportDownloader(test_app_presigned_urls_file)
return None
@pytest.fixture
@multi_dut_fixture
def build_dir(app_path: str, target: Optional[str], config: Optional[str]) -> str:
def build_dir(
request: FixtureRequest,
app_path: str,
target: t.Optional[str],
config: t.Optional[str],
app_downloader: t.Optional[AppUploader],
) -> str:
"""
Check local build dir with the following priority:
@@ -114,14 +197,25 @@ def build_dir(app_path: str, target: Optional[str], config: Optional[str]) -> st
Returns:
valid build directory
"""
check_dirs = []
if target is not None and config is not None:
check_dirs.append(f'build_{target}_{config}')
if target is not None:
check_dirs.append(f'build_{target}')
if config is not None:
check_dirs.append(f'build_{config}')
check_dirs.append('build')
# download from minio on CI
case: PytestCase = request._pyfuncitem.stash[ITEM_PYTEST_CASE_KEY]
if app_downloader:
# somehow hardcoded...
app_build_path = os.path.join(os.path.relpath(app_path, IDF_PATH), f'build_{target}_{config}')
if case.requires_elf_or_map:
app_downloader.download_app(app_build_path)
else:
app_downloader.download_app(app_build_path, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES)
check_dirs = [f'build_{target}_{config}']
else:
check_dirs = []
if target is not None and config is not None:
check_dirs.append(f'build_{target}_{config}')
if target is not None:
check_dirs.append(f'build_{target}')
if config is not None:
check_dirs.append(f'build_{config}')
check_dirs.append('build')
for check_dir in check_dirs:
binary_path = os.path.join(app_path, check_dir)
@@ -138,13 +232,20 @@ def build_dir(app_path: str, target: Optional[str], config: Optional[str]) -> st
@pytest.fixture(autouse=True)
@multi_dut_fixture
def junit_properties(test_case_name: str, record_xml_attribute: Callable[[str, object], None]) -> None:
def junit_properties(test_case_name: str, record_xml_attribute: t.Callable[[str, object], None]) -> None:
"""
This fixture is autoused and will modify the junit report test case name to <target>.<config>.<case_name>
"""
record_xml_attribute('name', test_case_name)
@pytest.fixture(autouse=True)
@multi_dut_fixture
def ci_job_url(record_xml_attribute: t.Callable[[str, object], None]) -> None:
if ci_job_url := os.getenv('CI_JOB_URL'):
record_xml_attribute('ci_job_url', ci_job_url)
@pytest.fixture(autouse=True)
def set_test_case_name(request: FixtureRequest, test_case_name: str) -> None:
request.node.funcargs['test_case_name'] = test_case_name
@@ -154,7 +255,7 @@ def set_test_case_name(request: FixtureRequest, test_case_name: str) -> None:
# Log Util Functions #
######################
@pytest.fixture
def log_performance(record_property: Callable[[str, object], None]) -> Callable[[str, str], None]:
def log_performance(record_property: t.Callable[[str, object], None]) -> t.Callable[[str, str], None]:
"""
log performance item with pre-defined format to the console
and record it under the ``properties`` tag in the junit report if available.
@@ -172,7 +273,7 @@ def log_performance(record_property: Callable[[str, object], None]) -> Callable[
@pytest.fixture
def check_performance(idf_path: str) -> Callable[[str, float, str], None]:
def check_performance(idf_path: str) -> t.Callable[[str, float, str], None]:
"""
check if the given performance item meets the passing standard or not
"""
@@ -186,9 +287,9 @@ def check_performance(idf_path: str) -> Callable[[str, float, str], None]:
"""
def _find_perf_item(operator: str, path: str) -> float:
with open(path, 'r') as f:
with open(path) as f:
data = f.read()
match = re.search(r'#define\s+IDF_PERFORMANCE_{}_{}\s+([\d.]+)'.format(operator, item.upper()), data)
match = re.search(fr'#define\s+IDF_PERFORMANCE_{operator}_{item.upper()}\s+([\d.]+)', data)
return float(match.group(1)) # type: ignore
def _check_perf(operator: str, standard_value: float) -> None:
@@ -198,7 +299,7 @@ def check_performance(idf_path: str) -> Callable[[str, float, str], None]:
ret = value >= standard_value
if not ret:
raise AssertionError(
"[Performance] {} value is {}, doesn't meet pass standard {}".format(item, value, standard_value)
f"[Performance] {item} value is {value}, doesn't meet pass standard {standard_value}"
)
path_prefix = os.path.join(idf_path, 'components', 'idf_test', 'include')
@@ -212,7 +313,7 @@ def check_performance(idf_path: str) -> Callable[[str, float, str], None]:
for performance_file in performance_files:
try:
standard = _find_perf_item(op, performance_file)
except (IOError, AttributeError):
except (OSError, AttributeError):
# performance file doesn't exist or match is not found in it
continue
@@ -221,13 +322,13 @@ def check_performance(idf_path: str) -> Callable[[str, float, str], None]:
break
if not found_item:
raise AssertionError('Failed to get performance standard for {}'.format(item))
raise AssertionError(f'Failed to get performance standard for {item}')
return real_func
@pytest.fixture
def log_minimum_free_heap_size(dut: IdfDut, config: str) -> Callable[..., None]:
def log_minimum_free_heap_size(dut: IdfDut, config: str) -> t.Callable[..., None]:
def real_func() -> None:
res = dut.expect(r'Minimum free heap size: (\d+) bytes')
logging.info(
@@ -247,12 +348,12 @@ def log_minimum_free_heap_size(dut: IdfDut, config: str) -> Callable[..., None]:
return real_func
@pytest.fixture
@pytest.fixture(scope='session')
def dev_password(request: FixtureRequest) -> str:
return request.config.getoption('dev_passwd') or ''
@pytest.fixture
@pytest.fixture(scope='session')
def dev_user(request: FixtureRequest) -> str:
return request.config.getoption('dev_user') or ''
@@ -274,66 +375,69 @@ def pytest_addoption(parser: pytest.Parser) -> None:
'--dev-passwd',
help='password associated with some specific device/service used during the test execution',
)
idf_group.addoption(
'--app-info-basedir',
default=IDF_PATH,
help='app info base directory. specify this value when you\'re building under a '
'different IDF_PATH. (Default: $IDF_PATH)',
)
idf_group.addoption(
'--app-info-filepattern',
help='glob pattern to specify the files that include built app info generated by '
'`idf-build-apps --collect-app-info ...`. will not raise ValueError when binary '
'paths not exist in local file system if not listed recorded in the app info.',
'`idf-build-apps --collect-app-info ...`. will not raise ValueError when binary '
'paths not exist in local file system if not listed recorded in the app info.',
)
idf_group.addoption(
'--pipeline-id',
help='main pipeline id, not the child pipeline id. Specify this option to download the artifacts '
'from the minio server for debugging purpose.',
)
def pytest_configure(config: Config) -> None:
# cli option "--target"
target = config.getoption('target') or ''
target = [_t.strip().lower() for _t in (config.getoption('target', '') or '').split(',') if _t.strip()]
# add markers based on idf_pytest/constants.py
for name, description in {
**TARGET_MARKERS,
**ENV_MARKERS,
**SPECIAL_MARKERS,
}.items():
config.addinivalue_line('markers', f'{name}: {description}')
help_commands = ['--help', '--fixtures', '--markers', '--version']
for cmd in help_commands:
if cmd in config.invocation_params.args:
target = 'unneeded'
target = ['unneeded']
break
if not target: # also could specify through markexpr via "-m"
target = get_target_marker_from_expr(config.getoption('markexpr') or '')
markexpr = config.getoption('markexpr') or ''
# check marker expr set via "pytest -m"
if not target and markexpr:
# we use `-m "esp32 and generic"` in our CI to filter the test cases
# this doesn't cover all use cases, but fit what we do in CI.
for marker in markexpr.split('and'):
marker = marker.strip()
if marker in TARGET_MARKERS:
target.append(marker)
apps_list = None
app_info_basedir = config.getoption('app_info_basedir')
# "--target" must be set
if not target:
raise SystemExit(
"""Pass `--target TARGET[,TARGET...]` to specify all targets the test cases are using.
- for single DUT, we run with `pytest --target esp32`
- for multi DUT, we run with `pytest --target esp32,esp32,esp32s2` to indicate all DUTs
"""
)
apps = None
app_info_filepattern = config.getoption('app_info_filepattern')
if app_info_filepattern:
apps_list = []
for file in glob.glob(os.path.join(IDF_PATH, app_info_filepattern)):
with open(file) as fr:
for line in fr.readlines():
if not line.strip():
continue
# each line is a valid json
app_info = json.loads(line.strip())
if app_info_basedir and app_info['app_dir'].startswith(app_info_basedir):
relative_app_dir = os.path.relpath(app_info['app_dir'], app_info_basedir)
apps_list.append(os.path.join(IDF_PATH, os.path.join(relative_app_dir, app_info['build_dir'])))
print('Detected app: ', apps_list[-1])
else:
print(
f'WARNING: app_info base dir {app_info_basedir} not recognizable in {app_info["app_dir"]}, skipping...'
)
continue
apps = []
for f in glob.glob(os.path.join(IDF_PATH, app_info_filepattern)):
apps.extend(import_apps_from_txt(f))
config.stash[IDF_PYTEST_EMBEDDED_KEY] = IdfPytestEmbedded(
target=target,
sdkconfig=config.getoption('sdkconfig'),
apps_list=apps_list,
apps=apps,
)
config.pluginmanager.register(config.stash[IDF_PYTEST_EMBEDDED_KEY])
for name, description in {**TARGET_MARKERS, **ENV_MARKERS, **SPECIAL_MARKERS}.items():
config.addinivalue_line('markers', f'{name}: {description}')
def pytest_unconfigure(config: Config) -> None:
_pytest_embedded = config.stash.get(IDF_PYTEST_EMBEDDED_KEY, None)

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,14 @@
# For users checking this example, ignore the following code. This is so that
# the prebuilt project is built automatically in ESP-IDF CI.
if("$ENV{CI}")
# otherwise these file won't be rebuilt when switching the built target within the same job
file(REMOVE
${CMAKE_SOURCE_DIR}/prebuilt/sdkconfig
${CMAKE_SOURCE_DIR}/main/libprebuilt.a
${CMAKE_SOURCE_DIR}/main/prebuilt.h
)
file(REMOVE_RECURSE ${CMAKE_SOURCE_DIR}/prebuilt/build)
execute_process(COMMAND ${IDFTOOL} build
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/prebuilt)
endif()

View File

@@ -18,11 +18,33 @@ examples/network/simple_sniffer:
- if: IDF_TARGET not in ["esp32", "esp32c3", "esp32s3"]
temporary: true
reason: lack of runners
depends_filepatterns:
- tools/ci/python_packages/common_test_methods.py
- examples/common_components/protocol_examples_common/**/*
- examples/protocols/**/*
- examples/wifi/**/*
- examples/network/simple_sniffer/**/*
- components/mbedtls/port/dynamic/*
- examples/system/ota/**/*
depends_components:
- app_update
- esp_https_ota
examples/network/sta2eth:
disable:
- if: SOC_WIFI_SUPPORTED != 1
examples/network/vlan_support:
disable_test:
- if: IDF_TARGET not in ["esp32"]
reason: Runner uses esp32 ethernet kit
depends_components:
- esp_eth
depends_filepatterns:
- tools/ci/python_packages/common_test_methods.py
- examples/common_components/**/*
- examples/protocols/**/*
- examples/system/ota/**/*
- examples/ethernet/iperf/**/*
- examples/network/vlan_support/**/*
- components/esp_netif/esp_netif_handlers.c

View File

@@ -1,5 +1,12 @@
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
.adc_dependencies: &adc_dependencies
depends_components:
- esp_adc
- efuse
- esp_driver_i2s
- esp_driver_spi
.i2c_dependencies: &i2c_dependencies
depends_filepatterns:
# components
@@ -10,10 +17,12 @@
examples/peripherals/adc/continuous_read:
disable:
- if: SOC_ADC_DMA_SUPPORTED != 1
<<: *adc_dependencies
examples/peripherals/adc/oneshot_read:
disable:
- if: SOC_ADC_SUPPORTED != 1
<<: *adc_dependencies
examples/peripherals/analog_comparator:
disable:
@@ -30,6 +39,16 @@ examples/peripherals/dac:
disable:
- if: SOC_DAC_SUPPORTED != 1
examples/peripherals/dac/dac_cosine_wave:
disable:
- if: SOC_DAC_SUPPORTED != 1
depends_components:
- esp_adc
- efuse
- esp_driver_i2s
- esp_driver_spi
- esp_driver_dac
examples/peripherals/gpio:
depends_components:
- esp_driver_gpio
@@ -446,6 +465,21 @@ examples/peripherals/usb:
disable:
- if: SOC_USB_OTG_SUPPORTED != 1
examples/peripherals/usb/device:
enable:
- if: SOC_USB_OTG_SUPPORTED == 1
disable_test:
- if: IDF_TARGET == "esp32s3"
temporary: true
reason: lack of runners
depends_components:
- usb
depends_filepatterns:
- components/hal/usb*.c
- components/hal/esp32*/include/hal/usb*.h
- examples/peripherals/usb/host/**/*
- examples/peripherals/usb/device/**/*
examples/peripherals/usb_serial_jtag/usb_serial_jtag_echo:
disable:
- if: SOC_USB_SERIAL_JTAG_SUPPORTED != 1

View File

@@ -1,9 +0,0 @@
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
examples/peripherals/usb/device:
enable:
- if: SOC_USB_OTG_SUPPORTED == 1
disable_test:
- if: IDF_TARGET == "esp32s3"
temporary: true
reason: lack of runners

View File

@@ -1,5 +1,24 @@
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
.ethernet_dependencies: &ethernet_dependencies
# TODO: IDFCI-1821
depends_filepatterns:
- tools/ci/python_packages/common_test_methods.py
- components/esp_netif/esp_netif_handlers.c
.wifi_dependencies: &wifi_dependencies
depends_filepatterns:
- tools/ci/python_packages/common_test_methods.py
- examples/common_components/protocol_examples_common/**/*
- examples/protocols/**/*
- examples/wifi/**/*
- examples/network/simple_sniffer/**/*
- components/mbedtls/port/dynamic/*
- examples/system/ota/**/*
depends_components:
- app_update
- esp_https_ota
examples/protocols/esp_http_client:
enable:
- if: INCLUDE_DEFAULT == 1 or IDF_TARGET == "linux"
@@ -11,6 +30,7 @@ examples/protocols/esp_http_client:
- if: IDF_TARGET == "esp32p4"
temporary: true
reason: not supported on p4
<<: *ethernet_dependencies
examples/protocols/esp_local_ctrl:
disable:
@@ -22,6 +42,7 @@ examples/protocols/esp_local_ctrl:
- if: IDF_TARGET not in ["esp32", "esp32c3", "esp32s3"]
temporary: true
reason: lack of runners
<<: *wifi_dependencies
examples/protocols/http_request:
disable:
@@ -32,6 +53,7 @@ examples/protocols/http_request:
- if: IDF_TARGET != "esp32"
temporary: true
reason: only test on esp32
<<: *ethernet_dependencies
examples/protocols/http_server:
disable:
@@ -42,6 +64,7 @@ examples/protocols/http_server:
- if: IDF_TARGET not in ["esp32", "esp32c3", "esp32s3"]
temporary: true
reason: lack of runners
<<: *wifi_dependencies
examples/protocols/http_server/captive_portal:
disable:
@@ -52,12 +75,14 @@ examples/protocols/http_server/captive_portal:
- if: IDF_TARGET != "esp32"
temporary: true
reason: only test on esp32
<<: *wifi_dependencies
examples/protocols/http_server/restful_server:
disable:
- if: IDF_TARGET in ["esp32h2", "esp32p4"]
temporary: true
reason: not supported on p4 # TODO: IDF-8076
<<: *wifi_dependencies
examples/protocols/http_server/ws_echo_server:
disable:
@@ -68,6 +93,7 @@ examples/protocols/http_server/ws_echo_server:
- if: IDF_TARGET != "esp32"
temporary: true
reason: only test on esp32
<<: *wifi_dependencies
examples/protocols/https_mbedtls:
disable:
@@ -78,6 +104,7 @@ examples/protocols/https_mbedtls:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/https_request:
disable:
@@ -88,6 +115,7 @@ examples/protocols/https_request:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/https_server/simple:
disable:
@@ -98,6 +126,7 @@ examples/protocols/https_server/simple:
- if: IDF_TARGET not in ["esp32", "esp32c3", "esp32s3"]
temporary: true
reason: lack of runners
<<: *wifi_dependencies
examples/protocols/https_server/wss_server:
disable:
@@ -108,6 +137,7 @@ examples/protocols/https_server/wss_server:
- if: IDF_TARGET != "esp32"
temporary: true
reason: only test on esp32
<<: *wifi_dependencies
examples/protocols/https_x509_bundle:
disable:
@@ -118,6 +148,7 @@ examples/protocols/https_x509_bundle:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/icmp_echo:
disable:
@@ -126,6 +157,7 @@ examples/protocols/icmp_echo:
reason: not supported on p4
disable_test:
- if: SOC_WIFI_SUPPORTED != 1
<<: *wifi_dependencies
examples/protocols/l2tap:
disable:
@@ -158,6 +190,7 @@ examples/protocols/mqtt/ssl:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/mqtt/ssl_ds:
disable:
@@ -186,6 +219,7 @@ examples/protocols/mqtt/tcp:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/mqtt/ws:
disable:
@@ -196,6 +230,7 @@ examples/protocols/mqtt/ws:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/mqtt/wss:
disable:
@@ -206,6 +241,7 @@ examples/protocols/mqtt/wss:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/mqtt5:
disable:
@@ -216,6 +252,7 @@ examples/protocols/mqtt5:
- if: IDF_TARGET != "esp32"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/protocols/smtp_client:
disable:
@@ -228,6 +265,7 @@ examples/protocols/sntp:
- if: IDF_TARGET == "esp32"
temporary: true
reason: the other targets are not tested yet
<<: *wifi_dependencies
examples/protocols/sockets:
disable:
@@ -244,12 +282,12 @@ examples/protocols/sockets/non_blocking:
examples/protocols/sockets/tcp_client:
disable_test:
- if: SOC_WIFI_SUPPORTED != 1
enable:
- if: INCLUDE_DEFAULT == 1 or IDF_TARGET == "linux"
# linux target won't work with CONFIG_EXAMPLE_SOCKET_IP_INPUT_STDIN=y
disable:
- if: IDF_TARGET == "esp32p4"
temporary: true
reason: not supported on p4
<<: *wifi_dependencies
examples/protocols/sockets/tcp_server:
disable:
@@ -258,6 +296,7 @@ examples/protocols/sockets/tcp_server:
reason: not supported on p4
disable_test:
- if: SOC_WIFI_SUPPORTED != 1
<<: *wifi_dependencies
examples/protocols/sockets/udp_client:
disable:
@@ -266,6 +305,7 @@ examples/protocols/sockets/udp_client:
reason: not supported on p4
disable_test:
- if: SOC_WIFI_SUPPORTED != 1
<<: *wifi_dependencies
examples/protocols/sockets/udp_server:
disable:
@@ -274,6 +314,7 @@ examples/protocols/sockets/udp_server:
reason: not supported on p4
disable_test:
- if: SOC_WIFI_SUPPORTED != 1
<<: *wifi_dependencies
examples/protocols/static_ip:
disable:

View File

@@ -101,6 +101,7 @@ def actual_test(dut: Dut) -> None:
@pytest.mark.esp32 # internally tested using ESP32 with IP101 but may support all targets with SPI Ethernet
@pytest.mark.ip101
@pytest.mark.temp_skip_ci(targets=['esp32'], reason='runner under maintenance')
@pytest.mark.flaky(reruns=3, reruns_delay=5)
def test_esp_netif_l2tap_example(dut: Dut) -> None:
actual_test(dut)

View File

@@ -1,5 +1,5 @@
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-S2 | ESP32-S3 | Linux |
| ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- | ----- |
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-S2 | ESP32-S3 |
| ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- |
# TCP Client example

View File

@@ -13,3 +13,8 @@ examples/security/nvs_encryption_hmac:
- if: IDF_TARGET not in ["esp32c3"]
temporary: true
reason: lack of runners
depends_components:
- nvs_flash
- nvs_sec_provider
depends_filepatterns:
- examples/security/nvs_encryption_hmac/**/*

View File

@@ -1,6 +1,3 @@
## IDF Component Manager Manifest File
dependencies:
joltwallet/littlefs: "==1.5.5"
## Required IDF version
idf:
version: ">=5.2.0"
joltwallet/littlefs: "~=1.10.0"

View File

@@ -1,5 +1,11 @@
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
.ethernet_dependencies: &ethernet_dependencies
# TODO: IDFCI-1821
depends_filepatterns:
- tools/ci/python_packages/common_test_methods.py
- components/esp_netif/esp_netif_handlers.c
examples/system/app_trace_basic:
disable:
- if: IDF_TARGET in ["esp32c6", "esp32h2", "esp32p4"]
@@ -151,6 +157,21 @@ examples/system/ota/advanced_https_ota:
- if: IDF_TARGET == "esp32c2" or IDF_TARGET == "esp32c6"
temporary: true
reason: lack of runners
depends_filepatterns:
- components/esp_netif/esp_netif_handlers.c
- components/mbedtls/port/dynamic/*
- examples/common_components/**/*
- examples/ethernet/iperf/**/*
- examples/network/simple_sniffer/**/*
- examples/network/vlan_support/**/*
- examples/protocols/**/*
- examples/system/ota/**/*
- examples/wifi/**/*
- tools/ci/python_packages/common_test_methods.py
depends_components:
- app_update
- esp_https_ota
- esp_eth
examples/system/ota/native_ota_example:
disable:
@@ -161,6 +182,7 @@ examples/system/ota/native_ota_example:
- if: IDF_TARGET == "esp32c6"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/system/ota/otatool:
disable:
@@ -173,10 +195,14 @@ examples/system/ota/pre_encrypted_ota:
- if: IDF_TARGET in ["esp32h2", "esp32p4"]
temporary: true
reason: target esp32h2, esp32p4 is not supported yet
- if: CONFIG_NAME == "partial_download" and IDF_TARGET == "esp32c3"
temporary: true
reason: build failed
disable_test:
- if: IDF_TARGET == "esp32c2" or IDF_TARGET == "esp32c6"
temporary: true
reason: lack of runners
<<: *ethernet_dependencies
examples/system/ota/simple_ota_example:
disable:
@@ -188,6 +214,9 @@ examples/system/ota/simple_ota_example:
- if: IDF_TARGET == "esp32c2" or IDF_TARGET == "esp32c6"
temporary: true
reason: lack of runners
depends_components:
- app_update
- esp_https_ota
examples/system/perfmon:
enable:

View File

@@ -1,8 +1,9 @@
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
.zigbee_dependencies: &zigbee_dependencies
depends_components:
- ieee802154
depends_filepatterns:
- components/ieee802154/**/*
- examples/zigbee/light_sample/**/*
examples/zigbee/esp_zigbee_gateway:

View File

@@ -12,6 +12,7 @@ addopts =
--skip-check-coredump y
--logfile-extension ".txt"
--check-duplicates y
--ignore-glob */managed_components/*
# ignore DeprecationWarning
filterwarnings =

View File

@@ -12,6 +12,7 @@ from pathlib import Path
from zipfile import ZipFile
import urllib3
from idf_pytest.constants import DEFAULT_BUILD_LOG_FILENAME
from minio import Minio
@@ -33,7 +34,7 @@ TYPE_PATTERNS_DICT = {
'**/build*/*.elf',
],
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
'**/build*/build_log.txt',
f'**/build*/{DEFAULT_BUILD_LOG_FILENAME}',
'**/build*/*.bin',
'**/build*/bootloader/*.bin',
'**/build*/partition_table/*.bin',
@@ -41,17 +42,17 @@ TYPE_PATTERNS_DICT = {
'**/build*/flash_project_args',
'**/build*/config/sdkconfig.json',
'**/build*/project_description.json',
'list_job_*.txt',
'list_job*.txt',
],
ArtifactType.LOGS: [
'**/build*/build_log.txt',
f'**/build*/{DEFAULT_BUILD_LOG_FILENAME}',
],
ArtifactType.SIZE_REPORTS: [
'**/build*/size.json',
'size_info.txt',
],
ArtifactType.JUNIT_REPORTS: [
'XUNIT_RESULT.xml',
'XUNIT_RESULT*.xml',
],
ArtifactType.MODIFIED_FILES_AND_COMPONENTS_REPORT: [
'pipeline.env',
@@ -66,6 +67,23 @@ def getenv(env_var: str) -> str:
raise Exception(f'Environment variable {env_var} not set') from e
def get_minio_client() -> Minio:
return Minio(
getenv('IDF_S3_SERVER').replace('https://', ''),
access_key=getenv('IDF_S3_ACCESS_KEY'),
secret_key=getenv('IDF_S3_SECRET_KEY'),
http_client=urllib3.PoolManager(
num_pools=10,
timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
retries=urllib3.Retry(
total=5,
backoff_factor=0.2,
status_forcelist=[500, 502, 503, 504],
),
),
)
def _download_files(
pipeline_id: int,
*,
@@ -131,7 +149,7 @@ def _upload_files(
try:
if has_file:
obj_name = f'{pipeline_id}/{artifact_type.value}/{job_name.split(" ")[0]}/{job_id}.zip'
obj_name = f'{pipeline_id}/{artifact_type.value}/{job_name.rsplit(" ", maxsplit=1)[0]}/{job_id}.zip'
print(f'Created archive file: {job_id}.zip, uploading as {obj_name}')
client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
@@ -168,19 +186,7 @@ if __name__ == '__main__':
args = parser.parse_args()
client = Minio(
getenv('IDF_S3_SERVER').replace('https://', ''),
access_key=getenv('IDF_S3_ACCESS_KEY'),
secret_key=getenv('IDF_S3_SECRET_KEY'),
http_client=urllib3.PoolManager(
timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
retries=urllib3.Retry(
total=5,
backoff_factor=0.2,
status_forcelist=[500, 502, 503, 504],
),
),
)
client = get_minio_client()
ci_pipeline_id = args.pipeline_id or getenv('CI_PIPELINE_ID') # required
if args.action == 'download':

View File

@@ -60,6 +60,7 @@ build_stage2() {
--build-dir ${BUILD_DIR} \
--build-log ${BUILD_LOG_CMAKE} \
--size-file size.json \
--keep-going \
--collect-size-info size_info.txt \
--default-build-targets esp32 esp32s2 esp32s3 esp32c2 esp32c3 esp32c6 esp32h2 esp32p4
}

View File

@@ -1,54 +0,0 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# internal use only
# check if expire time is set for all artifacts
import os
import yaml
IDF_PATH = os.getenv('IDF_PATH')
if not IDF_PATH:
print('Please set IDF_PATH before running this script')
raise SystemExit(-1)
GITLAB_CONFIG_FILE = os.path.join(IDF_PATH, '.gitlab-ci.yml')
def check_artifacts_expire_time() -> None:
with open(GITLAB_CONFIG_FILE, 'r') as f:
config = yaml.load(f, Loader=yaml.FullLoader)
# load files listed in `include`
if 'include' in config:
for _file in config['include']:
with open(os.path.join(IDF_PATH or '', _file)) as f:
config.update(yaml.load(f, Loader=yaml.FullLoader))
print('expire time for jobs:')
errors = []
job_names = list(config.keys())
job_names.sort()
for job_name in job_names:
try:
if 'expire_in' not in config[job_name]['artifacts']:
errors.append(job_name)
else:
print('{}: {}'.format(job_name, config[job_name]['artifacts']['expire_in']))
except (KeyError, TypeError):
# this is not job, or the job does not have artifacts
pass
if errors:
print('\n\nThe following jobs did not set expire time for its artifacts')
for error in errors:
print(error)
raise SystemExit(-2)
if __name__ == '__main__':
check_artifacts_expire_time()

View File

@@ -13,7 +13,7 @@ from pathlib import Path
from typing import Dict, List, Optional, Tuple
import yaml
from idf_ci_utils import IDF_PATH
from idf_ci_utils import IDF_PATH, get_all_manifest_files
YES = u'\u2713'
NO = u'\u2717'
@@ -148,9 +148,7 @@ def check_readme(
'all',
recursive=True,
exclude_list=exclude_dirs or [],
manifest_files=[
str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')
],
manifest_files=get_all_manifest_files(),
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
)
)
@@ -304,9 +302,7 @@ def check_test_scripts(
'all',
recursive=True,
exclude_list=exclude_dirs or [],
manifest_files=[
str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')
],
manifest_files=get_all_manifest_files(),
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
)
)
@@ -382,7 +378,7 @@ def sort_yaml(files: List[str]) -> None:
def check_exist() -> None:
exit_code = 0
config_files = [str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')]
config_files = get_all_manifest_files()
for file in config_files:
if 'managed_components' in Path(file).parts:
continue

View File

@@ -1,157 +0,0 @@
#!/usr/bin/env python
#
# SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""
Check if all rules in rules.yml used or not in CI yaml files.
"""
import argparse
import os
import re
import sys
from copy import deepcopy
from typing import Any, Dict, List, Optional, Set, Union
import yaml
from idf_ci_utils import IDF_PATH
ROOT_YML_FP = os.path.join(IDF_PATH, '.gitlab-ci.yml')
def load_yaml(file_path: str) -> Any:
return yaml.load(open(file_path), Loader=yaml.FullLoader)
class YMLConfig:
def __init__(self, root_yml_file_path: str) -> None:
self._config: Optional[Dict] = None
self._all_extends: Optional[Set] = None
self.root_yml = load_yaml(root_yml_file_path)
assert self.root_yml
@staticmethod
def _list(str_or_list: Union[str, List]) -> List:
if isinstance(str_or_list, str):
return [str_or_list]
if isinstance(str_or_list, list):
return str_or_list
raise ValueError(
'Wrong type: {}. Only supports str or list.'.format(type(str_or_list))
)
@property
def config(self) -> Dict:
if self._config:
return self._config
all_config = dict()
for item in self.root_yml['include']:
all_config.update(load_yaml(os.path.join(IDF_PATH, item)))
self._config = all_config
return self._config
@property
def all_extends(self) -> Set:
if self._all_extends:
return self._all_extends
res = set([])
for v in self.config.values():
if 'extends' in v:
for item in self._list(v['extends']):
if item.startswith('.rules:'):
res.add(item)
self._all_extends = res
return self._all_extends
def exists(self, key: str) -> bool:
if key in self.all_extends:
return True
return False
YML_CONFIG = YMLConfig(ROOT_YML_FP)
def get_needed_rules() -> Set[str]:
return deepcopy(YML_CONFIG.all_extends)
def validate_needed_rules(rules_yml: 'os.PathLike[str]') -> int:
res = 0
needed_rules = deepcopy(YML_CONFIG.all_extends)
with open(rules_yml) as fr:
for index, line in enumerate(fr):
if line.startswith('.rules:'):
key = line.strip().rsplit(':', 1)[0]
if not YML_CONFIG.exists(key):
print(
'{}:{}:WARNING:rule "{}" unused'.format(rules_yml, index, key)
)
else:
needed_rules.remove(key)
if needed_rules:
for item in needed_rules:
print('ERROR: missing rule: "{}"'.format(item))
res = 1
if res == 0:
print('Pass')
return res
def parse_submodule_paths(
gitsubmodules: str = os.path.join(IDF_PATH, '.gitmodules')
) -> List[str]:
path_regex = re.compile(r'^\s+path = (.+)$', re.MULTILINE)
with open(gitsubmodules, 'r') as f:
data = f.read()
res = []
for item in path_regex.finditer(data):
res.append(item.group(1))
return res
def validate_submodule_patterns() -> int:
submodule_paths = sorted(['.gitmodules'] + parse_submodule_paths())
submodule_paths_in_patterns = sorted(
YML_CONFIG.config.get('.patterns-submodule', [])
)
res = 0
if submodule_paths != submodule_paths_in_patterns:
res = 1
print('please update the pattern ".patterns-submodule"')
should_remove = set(submodule_paths_in_patterns) - set(submodule_paths)
if should_remove:
print(f'- should remove: {should_remove}')
should_add = set(submodule_paths) - set(submodule_paths_in_patterns)
if should_add:
print(f'- should add: {should_add}')
return res
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'rules_yml',
nargs='?',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
help='rules.yml file path',
)
args = parser.parse_args()
exit_code = 0
if validate_needed_rules(args.rules_yml):
exit_code = 1
if validate_submodule_patterns():
exit_code = 1
sys.exit(exit_code)

View File

@@ -39,7 +39,7 @@ def check(pattern_yml: str, exclude_list: str) -> Tuple[Set, Set]:
git_files = get_git_files(os.path.join(IDF_PATH, 'tools'), full_path=True)
for f in git_files:
f = Path(f)
if f in rules_files_set or f in exclude_files_set:
if f in rules_files_set or f in exclude_files_set or str(f).startswith(os.path.join(IDF_PATH, 'tools', 'test_apps')):
continue
missing_files.add(os.path.relpath(f, IDF_PATH))

View File

@@ -10,13 +10,15 @@ import os
import sys
import typing as t
import unittest
from collections import defaultdict
from pathlib import Path
import yaml
from idf_build_apps import LOGGER, App, build_apps, find_apps, setup_logging
from idf_build_apps.constants import SUPPORTED_TARGETS
from idf_ci_utils import IDF_PATH
from dynamic_pipelines.constants import DEFAULT_TEST_PATHS
from idf_build_apps import build_apps, setup_logging
from idf_build_apps.utils import semicolon_separated_str_to_list
from idf_pytest.constants import (DEFAULT_BUILD_TEST_RULES_FILEPATH, DEFAULT_CONFIG_RULES_STR,
DEFAULT_FULL_BUILD_TEST_FILEPATTERNS, DEFAULT_IGNORE_WARNING_FILEPATH)
from idf_pytest.script import get_all_apps
CI_ENV_VARS = {
'EXTRA_CFLAGS': '-Werror -Werror=deprecated-declarations -Werror=unused-variable '
@@ -27,118 +29,6 @@ CI_ENV_VARS = {
}
def get_pytest_apps(
paths: t.List[str],
target: str,
config_rules_str: t.List[str],
marker_expr: str,
filter_expr: str,
preserve_all: bool = False,
extra_default_build_targets: t.Optional[t.List[str]] = None,
modified_components: t.Optional[t.List[str]] = None,
modified_files: t.Optional[t.List[str]] = None,
ignore_app_dependencies_filepatterns: t.Optional[t.List[str]] = None,
) -> t.List[App]:
from idf_pytest.script import get_pytest_cases
pytest_cases = get_pytest_cases(paths, target, marker_expr, filter_expr)
_paths: t.Set[str] = set()
test_related_app_configs = defaultdict(set)
for case in pytest_cases:
for app in case.apps:
_paths.add(app.path)
test_related_app_configs[app.path].add(app.config)
if not extra_default_build_targets:
extra_default_build_targets = []
app_dirs = list(_paths)
if not app_dirs:
raise RuntimeError('No apps found')
LOGGER.info(f'Found {len(app_dirs)} apps')
app_dirs.sort()
apps = find_apps(
app_dirs,
target=target,
build_dir='build_@t_@w',
config_rules_str=config_rules_str,
build_log_path='build_log.txt',
size_json_path='size.json',
check_warnings=True,
manifest_rootpath=IDF_PATH,
manifest_files=[str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')],
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
modified_components=modified_components,
modified_files=modified_files,
ignore_app_dependencies_filepatterns=ignore_app_dependencies_filepatterns,
)
for app in apps:
is_test_related = app.config_name in test_related_app_configs[app.app_dir]
if not preserve_all and not is_test_related:
app.preserve = False
if app.target == 'linux':
app._size_json_path = None # no esp_idf_size for linux target
return apps # type: ignore
def get_cmake_apps(
paths: t.List[str],
target: str,
config_rules_str: t.List[str],
preserve_all: bool = False,
extra_default_build_targets: t.Optional[t.List[str]] = None,
modified_components: t.Optional[t.List[str]] = None,
modified_files: t.Optional[t.List[str]] = None,
ignore_app_dependencies_filepatterns: t.Optional[t.List[str]] = None,
) -> t.List[App]:
from idf_pytest.constants import PytestApp
from idf_pytest.script import get_pytest_cases
apps = find_apps(
paths,
recursive=True,
target=target,
build_dir='build_@t_@w',
config_rules_str=config_rules_str,
build_log_path='build_log.txt',
size_json_path='size.json',
check_warnings=True,
preserve=False,
manifest_rootpath=IDF_PATH,
manifest_files=[str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')],
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
modified_components=modified_components,
modified_files=modified_files,
ignore_app_dependencies_filepatterns=ignore_app_dependencies_filepatterns,
)
apps_for_build = []
pytest_cases_apps = [app for case in get_pytest_cases(paths, target) for app in case.apps]
for app in apps:
if preserve_all: # relpath
app.preserve = True
if PytestApp(os.path.realpath(app.app_dir), app.target, app.config_name) in pytest_cases_apps:
LOGGER.debug('Skipping build app with pytest scripts: %s', app)
continue
if app.target == 'linux':
app._size_json_path = None # no esp_idf_size for linux target
apps_for_build.append(app)
return apps_for_build
APPS_BUILD_PER_JOB = 30
def main(args: argparse.Namespace) -> None:
extra_default_build_targets: t.List[str] = []
if args.default_build_test_rules:
@@ -148,39 +38,24 @@ def main(args: argparse.Namespace) -> None:
if configs:
extra_default_build_targets = configs.get('extra_default_build_targets') or []
if args.pytest_apps:
LOGGER.info('Only build apps with pytest scripts')
apps = get_pytest_apps(
args.paths,
args.target,
args.config,
args.marker_expr,
args.filter_expr,
args.preserve_all,
extra_default_build_targets,
args.modified_components,
args.modified_files,
args.ignore_app_dependencies_filepatterns,
)
else:
LOGGER.info('build apps. will skip pytest apps with pytest scripts')
apps = get_cmake_apps(
args.paths,
args.target,
args.config,
args.preserve_all,
extra_default_build_targets,
args.modified_components,
args.modified_files,
args.ignore_app_dependencies_filepatterns,
)
LOGGER.info('Found %d apps after filtering', len(apps))
LOGGER.info(
'Suggest setting the parallel count to %d for this build job',
len(apps) // APPS_BUILD_PER_JOB + 1,
test_related_apps, non_test_related_apps = get_all_apps(
args.paths,
args.target,
config_rules_str=args.config,
marker_expr=args.marker_expr,
filter_expr=args.filter_expr,
preserve_all=args.preserve_all,
extra_default_build_targets=extra_default_build_targets,
modified_files=args.modified_components,
modified_components=args.modified_files,
ignore_app_dependencies_filepatterns=args.ignore_app_dependencies_filepatterns,
)
if args.pytest_apps:
apps = test_related_apps
else:
apps = non_test_related_apps
if args.extra_preserve_dirs:
for app in apps:
if app.preserve:
@@ -192,7 +67,7 @@ def main(args: argparse.Namespace) -> None:
app.preserve = True
res = build_apps(
apps,
sorted(apps),
parallel_count=args.parallel_count,
parallel_index=args.parallel_index,
dry_run=False,
@@ -206,12 +81,10 @@ def main(args: argparse.Namespace) -> None:
modified_components=args.modified_components,
modified_files=args.modified_files,
ignore_app_dependencies_filepatterns=args.ignore_app_dependencies_filepatterns,
junitxml=args.junitxml,
)
if isinstance(res, tuple):
sys.exit(res[0])
else:
sys.exit(res)
sys.exit(res)
if __name__ == '__main__':
@@ -219,7 +92,7 @@ if __name__ == '__main__':
description='Build all the apps for different test types. Will auto remove those non-test apps binaries',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument('paths', nargs='+', help='Paths to the apps to build.')
parser.add_argument('paths', nargs='*', help='Paths to the apps to build.')
parser.add_argument(
'-t',
'--target',
@@ -228,7 +101,7 @@ if __name__ == '__main__':
)
parser.add_argument(
'--config',
default=['sdkconfig.ci=default', 'sdkconfig.ci.*=', '=default'],
default=DEFAULT_CONFIG_RULES_STR,
nargs='+',
help='Adds configurations (sdkconfig file names) to build. This can either be '
'FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, '
@@ -272,7 +145,7 @@ if __name__ == '__main__':
)
parser.add_argument(
'--ignore-warning-file',
default=os.path.join(IDF_PATH, 'tools', 'ci', 'ignore_build_warnings.txt'),
default=DEFAULT_IGNORE_WARNING_FILEPATH,
type=argparse.FileType('r'),
help='Ignore the warning strings in the specified file. Each line should be a regex string.',
)
@@ -290,7 +163,8 @@ if __name__ == '__main__':
parser.add_argument(
'--pytest-apps',
action='store_true',
help='Only build apps with pytest scripts. Will build apps without pytest scripts if this flag is unspecified.',
help='Only build apps required by pytest scripts. '
'Will build non-test-related apps if this flag is unspecified.',
)
parser.add_argument(
'-m',
@@ -307,7 +181,7 @@ if __name__ == '__main__':
)
parser.add_argument(
'--default-build-test-rules',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'default-build-test-rules.yml'),
default=DEFAULT_BUILD_TEST_RULES_FILEPATH,
help='default build test rules config file',
)
parser.add_argument(
@@ -318,69 +192,64 @@ if __name__ == '__main__':
)
parser.add_argument(
'--modified-components',
nargs='*',
default=None,
help='space-separated list which specifies the modified components. app with `depends_components` set in the '
'corresponding manifest files would only be built if depends on any of the specified components.',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified components. '
'app with `depends_components` set in the corresponding manifest files would only be built '
'if depends on any of the specified components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--modified-files',
nargs='*',
default=None,
help='space-separated list which specifies the modified files. app with `depends_filepatterns` set in the '
'corresponding manifest files would only be built if any of the specified file pattern matches any of the '
'specified modified files.',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified files. '
'app with `depends_filepatterns` set in the corresponding manifest files would only be built '
'if any of the specified file pattern matches any of the specified modified files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'-if',
'--ignore-app-dependencies-filepatterns',
nargs='*',
default=None,
help='space-separated list which specifies the file patterns used for ignoring checking the app dependencies. '
'The `depends_components` and `depends_filepatterns` set in the manifest files will be ignored when any of the '
'specified file patterns matches any of the modified files. Must be used together with --modified-files',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the file patterns used for '
'ignoring checking the app dependencies. '
'The `depends_components` and `depends_filepatterns` set in the manifest files '
'will be ignored when any of the specified file patterns matches any of the modified files. '
'Must be used together with --modified-files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--junitxml',
default='build_summary_@p.xml',
help='Path to the junitxml file. If specified, the junitxml file will be generated',
)
arguments = parser.parse_args()
setup_logging(arguments.verbose)
# set default paths
if not arguments.paths:
arguments.paths = DEFAULT_TEST_PATHS
# skip setting flags in CI
if not arguments.skip_setting_flags and not os.getenv('CI_JOB_ID'):
for _k, _v in CI_ENV_VARS.items():
os.environ[_k] = _v
LOGGER.info(f'env var {_k} set to "{_v}"')
print(f'env var {_k} set to "{_v}"')
if os.getenv('IS_MR_PIPELINE') == '0' or os.getenv('BUILD_AND_TEST_ALL_APPS') == '1':
# if it's not MR pipeline or env var BUILD_AND_TEST_ALL_APPS=1,
# remove component dependency related arguments
if 'modified_components' in arguments:
arguments.modified_components = None
if 'modified_files' in arguments:
arguments.modified_files = None
arguments.modified_components = None
arguments.modified_files = None
arguments.ignore_app_dependencies_filepatterns = None
# file patterns to tigger full build
if 'modified_components' in arguments and not arguments.ignore_app_dependencies_filepatterns:
arguments.ignore_app_dependencies_filepatterns = [
# tools
'tools/cmake/**/*',
'tools/tools.json',
# components
'components/cxx/**/*',
'components/esp_common/**/*',
'components/esp_hw_support/**/*',
'components/esp_rom/**/*',
'components/esp_system/**/*',
'components/esp_timer/**/*',
'components/freertos/**/*',
'components/hal/**/*',
'components/heap/**/*',
'components/log/**/*',
'components/newlib/**/*',
'components/riscv/**/*',
'components/soc/**/*',
'components/xtensa/**/*',
]
# default file patterns to tigger full build
if arguments.modified_files is not None and arguments.ignore_app_dependencies_filepatterns is None:
arguments.ignore_app_dependencies_filepatterns = DEFAULT_FULL_BUILD_TEST_FILEPATTERNS
main(arguments)

View File

@@ -0,0 +1,9 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
import sys
tools_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..'))
if tools_dir not in sys.path:
sys.path.append(tools_dir)

View File

@@ -0,0 +1,31 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
from idf_ci_utils import IDF_PATH
# use relative path to avoid absolute path in pipeline
DEFAULT_TEST_PATHS = [
'examples',
os.path.join('tools', 'test_apps'),
'components',
]
DEFAULT_APPS_BUILD_PER_JOB = 60
DEFAULT_CASES_TEST_PER_JOB = 60
DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'build_child_pipeline.yml')
DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'target_test_child_pipeline.yml')
TEST_RELATED_BUILD_JOB_NAME = 'build_test_related_apps'
NON_TEST_RELATED_BUILD_JOB_NAME = 'build_non_test_related_apps'
COMMENT_START_MARKER = '### Dynamic Pipeline Report'
TEST_RELATED_APPS_FILENAME = 'test_related_apps.txt'
NON_TEST_RELATED_APPS_FILENAME = 'non_test_related_apps.txt'
TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME = 'test_related_apps_download_urls.yml'
REPORT_TEMPLATE_FILEPATH = os.path.join(
IDF_PATH, 'tools', 'ci', 'dynamic_pipelines', 'templates', 'report.template.html'
)

View File

@@ -0,0 +1,169 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import inspect
import typing as t
from dataclasses import dataclass
from xml.etree.ElementTree import Element
import yaml
class Job:
def __init__(
self,
*,
name: str,
extends: t.Optional[t.List[str]] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
parallel: int = 1,
variables: t.Dict[str, str] = None,
script: t.Optional[t.List[str]] = None,
before_script: t.Optional[t.List[str]] = None,
after_script: t.Optional[t.List[str]] = None,
needs: t.Optional[t.List[str]] = None,
**kwargs: t.Any,
) -> None:
self.name = name
self.extends = extends
self.tags = tags
self.stage = stage
self.parallel = parallel
self.variables = variables or {}
self.script = script
self.before_script = before_script
self.after_script = after_script
self.needs = needs
for k, v in kwargs.items():
setattr(self, k, v)
def __str__(self) -> str:
return yaml.dump(self.to_dict()) # type: ignore
def set_variable(self, key: str, value: str) -> None:
self.variables[key] = value
def to_dict(self) -> t.Dict[str, t.Any]:
res = {}
for k, v in inspect.getmembers(self):
if k.startswith('_'):
continue
# name is the dict key
if k == 'name':
continue
# parallel 1 is not allowed
if k == 'parallel' and v == 1:
continue
if v is None:
continue
if inspect.ismethod(v) or inspect.isfunction(v):
continue
res[k] = v
return {self.name: res}
class EmptyJob(Job):
def __init__(
self,
*,
name: t.Optional[str] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
before_script: t.Optional[t.List[str]] = None,
after_script: t.Optional[t.List[str]] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
name=name or 'fake_pass_job',
tags=tags or ['build', 'shiny'],
stage=stage or 'build',
script=['echo "This is a fake job to pass the pipeline"'],
before_script=before_script or [],
after_script=after_script or [],
**kwargs,
)
class BuildJob(Job):
def __init__(
self,
*,
extends: t.Optional[t.List[str]] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
extends=extends or ['.dynamic_build_template'],
tags=tags or ['build', 'shiny'],
stage=stage or 'build',
**kwargs,
)
class TargetTestJob(Job):
def __init__(
self,
*,
extends: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
extends=extends or ['.dynamic_target_test_template'],
stage=stage or 'target_test',
**kwargs,
)
@dataclass
class TestCase:
name: str
file: str
time: float
failure: t.Optional[str] = None
skipped: t.Optional[str] = None
ci_job_url: t.Optional[str] = None
@property
def is_failure(self) -> bool:
return self.failure is not None
@property
def is_skipped(self) -> bool:
return self.skipped is not None
@property
def is_success(self) -> bool:
return not self.is_failure and not self.is_skipped
@classmethod
def from_test_case_node(cls, node: Element) -> t.Optional['TestCase']:
if 'name' not in node.attrib:
print('WARNING: Node Invalid: ', node)
return None
kwargs = {
'name': node.attrib['name'],
'file': node.attrib.get('file'),
'time': float(node.attrib.get('time') or 0),
'ci_job_url': node.attrib.get('ci_job_url') or '',
}
failure_node = node.find('failure')
if failure_node is not None:
kwargs['failure'] = failure_node.attrib['message']
skipped_node = node.find('skipped')
if skipped_node is not None:
kwargs['skipped'] = skipped_node.attrib['message']
return cls(**kwargs) # type: ignore

View File

@@ -0,0 +1,276 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import abc
import html
import os
import re
import typing as t
import yaml
from artifacts_handler import ArtifactType
from gitlab_api import Gitlab
from idf_build_apps import App
from idf_build_apps.constants import BuildStatus
from idf_ci.uploader import AppUploader
from prettytable import PrettyTable
from .constants import COMMENT_START_MARKER, REPORT_TEMPLATE_FILEPATH, TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
from .models import TestCase
class ReportGenerator:
REGEX_PATTERN = '#### {}[^####]+'
def __init__(self, project_id: int, mr_iid: int, pipeline_id: int, *, title: str):
gl_project = Gitlab(project_id).project
if mr_iid is not None:
self.mr = gl_project.mergerequests.get(mr_iid)
else:
self.mr = None
self.pipeline_id = pipeline_id
self.title = title
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
@staticmethod
def get_download_link_for_url(url: str) -> str:
if url:
return f'<a href="{url}">Download</a>'
return ''
def generate_html_report(self, table_str: str) -> str:
# we're using bootstrap table
table_str = table_str.replace('<table>', '<table data-toggle="table" data-search="true">')
with open(REPORT_TEMPLATE_FILEPATH) as fr:
template = fr.read()
return template.replace('{{title}}', self.title).replace('{{table}}', table_str)
@staticmethod
def table_to_html_str(table: PrettyTable) -> str:
return html.unescape(table.get_html_string()) # type: ignore
@abc.abstractmethod
def _get_report_str(self) -> str:
raise NotImplementedError
def post_report(self, job_id: int, commit_id: str) -> None:
# report in html format, otherwise will exceed the limit
with open(self.output_filepath, 'w') as fw:
fw.write(self._get_report_str())
# for example, {URL}/-/esp-idf/-/jobs/{id}/artifacts/list_job_84.txt
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
comment = f'''#### {self.title}
Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id})
'''
if self.mr is None:
print('No MR found, skip posting comment')
return
for note in self.mr.notes.list(iterator=True):
if note.body.startswith(COMMENT_START_MARKER):
updated_str = re.sub(self.REGEX_PATTERN.format(self.title), comment, note.body)
if updated_str == note.body: # not updated
updated_str = f'{note.body.strip()}\n\n{comment}'
note.body = updated_str
note.save()
break
else:
new_comment = f'''{COMMENT_START_MARKER}
{comment}'''
self.mr.notes.create({'body': new_comment})
class BuildReportGenerator(ReportGenerator):
def __init__(
self,
project_id: int,
mr_iid: int,
pipeline_id: int,
*,
title: str = 'Build Report',
apps: t.List[App],
):
super().__init__(project_id, mr_iid, pipeline_id, title=title)
self.apps = apps
self.apps_presigned_url_filepath = TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
def _get_report_str(self) -> str:
if not self.apps:
print('No apps found, skip generating build report')
return 'No Apps Built'
uploader = AppUploader(self.pipeline_id)
table_str = ''
failed_apps = [app for app in self.apps if app.build_status == BuildStatus.FAILED]
if failed_apps:
table_str += '<h2>Failed Apps</h2>'
failed_apps_table = PrettyTable()
failed_apps_table.field_names = [
'App Dir',
'Build Dir',
'Failed Reason',
'Build Log',
]
for app in failed_apps:
failed_apps_table.add_row(
[
app.app_dir,
app.build_dir,
app.build_comment or '',
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(failed_apps_table)
built_test_related_apps = [app for app in self.apps if app.build_status == BuildStatus.SUCCESS and app.preserve]
if built_test_related_apps:
table_str += '<h2>Built Apps (Test Related)</h2>'
built_apps_table = PrettyTable()
built_apps_table.field_names = [
'App Dir',
'Build Dir',
'Bin Files with Build Log (without map and elf)',
'Map and Elf Files',
]
app_presigned_urls_dict: t.Dict[str, t.Dict[str, str]] = {}
for app in built_test_related_apps:
_d = {
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES.value: uploader.get_app_presigned_url(
app, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
),
ArtifactType.MAP_AND_ELF_FILES.value: uploader.get_app_presigned_url(
app, ArtifactType.MAP_AND_ELF_FILES
),
}
built_apps_table.add_row(
[
app.app_dir,
app.build_dir,
self.get_download_link_for_url(_d[ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES]),
self.get_download_link_for_url(_d[ArtifactType.MAP_AND_ELF_FILES]),
]
)
app_presigned_urls_dict[app.build_path] = _d
# also generate a yaml file that includes the apps and the presigned urls
# for helping debugging locally
with open(self.apps_presigned_url_filepath, 'w') as fw:
yaml.dump(app_presigned_urls_dict, fw)
table_str += self.table_to_html_str(built_apps_table)
built_non_test_related_apps = [
app for app in self.apps if app.build_status == BuildStatus.SUCCESS and not app.preserve
]
if built_non_test_related_apps:
table_str += '<h2>Built Apps (Non Test Related)</h2>'
built_apps_table = PrettyTable()
built_apps_table.field_names = [
'App Dir',
'Build Dir',
'Build Log',
]
for app in built_non_test_related_apps:
built_apps_table.add_row(
[
app.app_dir,
app.build_dir,
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(built_apps_table)
skipped_apps = [app for app in self.apps if app.build_status == BuildStatus.SKIPPED]
if skipped_apps:
table_str += '<h2>Skipped Apps</h2>'
skipped_apps_table = PrettyTable()
skipped_apps_table.field_names = ['App Dir', 'Build Dir', 'Skipped Reason', 'Build Log']
for app in skipped_apps:
skipped_apps_table.add_row(
[
app.app_dir,
app.build_dir,
app.build_comment or '',
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(skipped_apps_table)
return self.generate_html_report(table_str)
class TargetTestReportGenerator(ReportGenerator):
def __init__(
self,
project_id: int,
mr_iid: int,
pipeline_id: int,
*,
title: str = 'Target Test Report',
test_cases: t.List[TestCase],
):
super().__init__(project_id, mr_iid, pipeline_id, title=title)
self.test_cases = test_cases
def _get_report_str(self) -> str:
table_str = ''
failed_test_cases = [tc for tc in self.test_cases if tc.is_failure]
if failed_test_cases:
table_str += '<h2>Failed Test Cases</h2>'
failed_test_cases_table = PrettyTable()
failed_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL']
for tc in failed_test_cases:
failed_test_cases_table.add_row([tc.name, tc.file, tc.failure, tc.ci_job_url])
table_str += self.table_to_html_str(failed_test_cases_table)
skipped_test_cases = [tc for tc in self.test_cases if tc.is_skipped]
if skipped_test_cases:
table_str += '<h2>Skipped Test Cases</h2>'
skipped_test_cases_table = PrettyTable()
skipped_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Skipped Reason']
for tc in skipped_test_cases:
skipped_test_cases_table.add_row([tc.name, tc.file, tc.skipped])
table_str += self.table_to_html_str(skipped_test_cases_table)
successful_test_cases = [tc for tc in self.test_cases if tc.is_success]
if successful_test_cases:
table_str += '<h2>Succeeded Test Cases</h2>'
successful_test_cases_table = PrettyTable()
successful_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Job URL']
for tc in successful_test_cases:
successful_test_cases_table.add_row([tc.name, tc.file, tc.ci_job_url])
table_str += self.table_to_html_str(successful_test_cases_table)
return self.generate_html_report(table_str)

View File

@@ -0,0 +1,25 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
import sys
IDF_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
# run this scripts only in idf path, to ensure the relative path is the same
os.chdir(IDF_PATH)
if 'IDF_PATH' not in os.environ:
os.environ['IDF_PATH'] = IDF_PATH
tools_path = os.path.join(os.path.dirname(__file__), '..', '..', '..')
if tools_path not in sys.path:
sys.path.append(tools_path)
tools_ci_path = os.path.join(os.path.dirname(__file__), '..', '..')
if tools_ci_path not in sys.path:
sys.path.append(tools_ci_path)
tools_ci_python_packages_path = os.path.join(os.path.dirname(__file__), '..', '..', 'python_packages')
if tools_ci_python_packages_path not in sys.path:
sys.path.append(tools_ci_python_packages_path)

View File

@@ -0,0 +1,73 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import sys
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.constants import TEST_RELATED_APPS_FILENAME
from idf_build_apps import build_apps, setup_logging
from idf_build_apps.utils import semicolon_separated_str_to_list
from idf_ci.app import import_apps_from_txt
from idf_pytest.constants import DEFAULT_IGNORE_WARNING_FILEPATH
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Build Apps for Dynamic Pipeline')
parser.add_argument('app_list_file', default=TEST_RELATED_APPS_FILENAME, help='List of apps to build')
parser.add_argument(
'--build-verbose',
action='store_true',
help='Enable verbose output from build system.',
)
parser.add_argument('--parallel-count', default=1, type=int, help='Number of parallel build jobs.')
parser.add_argument(
'--parallel-index',
default=1,
type=int,
help='Index (1-based) of the job, out of the number specified by --parallel-count.',
)
parser.add_argument(
'--ignore-warning-file',
default=DEFAULT_IGNORE_WARNING_FILEPATH,
type=argparse.FileType('r'),
help='Ignore the warning strings in the specified file. Each line should be a regex string.',
)
parser.add_argument(
'--modified-components',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified components. '
'app with `depends_components` set in the corresponding manifest files would only be built '
'if depends on any of the specified components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--collect-app-info',
default='list_job_@p.txt',
help='If specified, the test case name and app info json will be written to this file',
)
parser.add_argument(
'--junitxml',
default='build_summary_@p.xml',
help='Path to the junitxml file. If specified, the junitxml file will be generated',
)
args = parser.parse_args()
setup_logging(verbose=1)
sys.exit(
build_apps(
import_apps_from_txt(args.app_list_file),
build_verbose=args.build_verbose,
keep_going=True,
ignore_warning_file=args.ignore_warning_file,
modified_components=args.modified_components,
check_app_dependencies=True,
parallel_count=args.parallel_count,
parallel_index=args.parallel_index,
collect_size_info='size_info_@p.txt',
collect_app_info=args.collect_app_info,
junitxml=args.junitxml,
)
)

View File

@@ -0,0 +1,193 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""This file is used for generating the child pipeline for build jobs."""
import argparse
import os
import typing as t
import __init__ # noqa: F401 # inject the system path
import yaml
from dynamic_pipelines.constants import (DEFAULT_APPS_BUILD_PER_JOB, DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH,
DEFAULT_TEST_PATHS, NON_TEST_RELATED_APPS_FILENAME,
NON_TEST_RELATED_BUILD_JOB_NAME, TEST_RELATED_APPS_FILENAME,
TEST_RELATED_BUILD_JOB_NAME)
from dynamic_pipelines.models import BuildJob, EmptyJob
from dynamic_pipelines.utils import dump_jobs_to_yaml
from idf_build_apps.utils import semicolon_separated_str_to_list
from idf_ci.app import dump_apps_to_txt
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_CONFIG_RULES_STR, DEFAULT_FULL_BUILD_TEST_FILEPATTERNS, CollectMode
from idf_pytest.script import get_all_apps
def main(arguments: argparse.Namespace) -> None:
# load from default build test rules config file
extra_default_build_targets: t.List[str] = []
if arguments.default_build_test_rules:
with open(arguments.default_build_test_rules) as fr:
configs = yaml.safe_load(fr)
if configs:
extra_default_build_targets = configs.get('extra_default_build_targets') or []
build_jobs = []
###########################################
# special case with -k, ignore other args #
###########################################
if arguments.filter_expr:
# build only test related apps
test_related_apps, _ = get_all_apps(
arguments.paths,
target=CollectMode.ALL,
config_rules_str=DEFAULT_CONFIG_RULES_STR,
filter_expr=arguments.filter_expr,
marker_expr='not host_test',
extra_default_build_targets=extra_default_build_targets,
)
dump_apps_to_txt(sorted(test_related_apps), TEST_RELATED_APPS_FILENAME)
print(f'Generate test related apps file {TEST_RELATED_APPS_FILENAME} with {len(test_related_apps)} apps')
test_apps_build_job = BuildJob(
name=TEST_RELATED_BUILD_JOB_NAME,
parallel=len(test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(test_apps_build_job)
else:
#############
# all cases #
#############
test_related_apps, non_test_related_apps = get_all_apps(
arguments.paths,
CollectMode.ALL,
marker_expr='not host_test',
config_rules_str=DEFAULT_CONFIG_RULES_STR,
extra_default_build_targets=extra_default_build_targets,
modified_components=arguments.modified_components,
modified_files=arguments.modified_files,
ignore_app_dependencies_filepatterns=arguments.ignore_app_dependencies_filepatterns,
)
dump_apps_to_txt(sorted(test_related_apps), TEST_RELATED_APPS_FILENAME)
print(f'Generate test related apps file {TEST_RELATED_APPS_FILENAME} with {len(test_related_apps)} apps')
dump_apps_to_txt(sorted(non_test_related_apps), NON_TEST_RELATED_APPS_FILENAME)
print(
f'Generate non-test related apps file {NON_TEST_RELATED_APPS_FILENAME} with {len(non_test_related_apps)} apps'
)
if test_related_apps:
test_apps_build_job = BuildJob(
name=TEST_RELATED_BUILD_JOB_NAME,
parallel=len(test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(test_apps_build_job)
if non_test_related_apps:
non_test_apps_build_job = BuildJob(
name=NON_TEST_RELATED_BUILD_JOB_NAME,
parallel=len(non_test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': NON_TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(non_test_apps_build_job)
# check if there's no jobs
if not build_jobs:
print('No apps need to be built. Create one empty job instead')
build_jobs.append(EmptyJob())
extra_include_yml = []
else:
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/test_child_pipeline.yml']
dump_jobs_to_yaml(build_jobs, arguments.yaml_output, extra_include_yml)
print(f'Generate child pipeline yaml file {arguments.yaml_output} with {sum(j.parallel for j in build_jobs)} jobs')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Generate build child pipeline',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-o',
'--yaml-output',
default=DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH,
help='Output YAML path',
)
parser.add_argument(
'-p',
'--paths',
nargs='+',
default=DEFAULT_TEST_PATHS,
help='Paths to the apps to build.',
)
parser.add_argument(
'-k',
'--filter-expr',
help='only build tests matching given filter expression. For example: -k "test_hello_world". Works only'
'for pytest',
)
parser.add_argument(
'--default-build-test-rules',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'default-build-test-rules.yml'),
help='default build test rules config file',
)
parser.add_argument(
'--modified-components',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified components. '
'app with `depends_components` set in the corresponding manifest files would only be built '
'if depends on any of the specified components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--modified-files',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified files. '
'app with `depends_filepatterns` set in the corresponding manifest files would only be built '
'if any of the specified file pattern matches any of the specified modified files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'-if',
'--ignore-app-dependencies-filepatterns',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the file patterns used for '
'ignoring checking the app dependencies. '
'The `depends_components` and `depends_filepatterns` set in the manifest files will be ignored '
'when any of the specified file patterns matches any of the modified files. '
'Must be used together with --modified-files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
args = parser.parse_args()
if os.getenv('IS_MR_PIPELINE') == '0' or os.getenv('BUILD_AND_TEST_ALL_APPS') == '1':
print('Build and run all test cases, and compile all cmake apps')
args.modified_components = None
args.modified_files = None
args.ignore_app_dependencies_filepatterns = None
elif args.filter_expr is not None:
print('Build and run only test cases matching "%s"' % args.filter_expr)
args.modified_components = None
args.modified_files = None
args.ignore_app_dependencies_filepatterns = None
else:
print('Build and run only test cases matching the modified components and files')
if args.modified_files and not args.ignore_app_dependencies_filepatterns:
# setting default values
args.ignore_app_dependencies_filepatterns = DEFAULT_FULL_BUILD_TEST_FILEPATTERNS
main(args)

View File

@@ -0,0 +1,59 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import glob
import os
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.report import BuildReportGenerator
from idf_ci.app import import_apps_from_txt
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--mr-iid',
type=int,
default=os.getenv('CI_MERGE_REQUEST_IID'),
help='Merge Request IID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'--job-id',
type=int,
default=os.getenv('CI_JOB_ID'),
help='Job ID',
)
parser.add_argument(
'--commit-id',
default=os.getenv('CI_COMMIT_SHORT_SHA'),
help='MR commit ID',
)
parser.add_argument(
'--app-list-filepattern',
default='list_job_*.txt',
help='App list file pattern',
)
args = parser.parse_args()
apps = []
for f in glob.glob(args.app_list_filepattern):
apps.extend(import_apps_from_txt(f))
report_generator = BuildReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, apps=apps)
report_generator.post_report(args.job_id, args.commit_id)

View File

@@ -0,0 +1,131 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""This file is used for generating the child pipeline for target test jobs.
1. Check the build jobs' artifacts to get the built apps' information.
2. Post the Build Report if it's running in an MR pipeline.
3. Generate the child pipeline for target test jobs.
"""
import argparse
import glob
import os
import typing as t
from collections import Counter, defaultdict
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.constants import (DEFAULT_CASES_TEST_PER_JOB, DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH,
DEFAULT_TEST_PATHS)
from dynamic_pipelines.models import EmptyJob, Job, TargetTestJob
from dynamic_pipelines.utils import dump_jobs_to_yaml
from gitlab.v4.objects import Project
from gitlab_api import Gitlab
from idf_build_apps import App
from idf_ci.app import import_apps_from_txt
from idf_pytest.script import get_pytest_cases
def get_tags_with_amount(s: str) -> t.List[str]:
c: Counter = Counter()
for _t in s.split(','):
c[_t] += 1
res = set()
for target, amount in c.items():
if amount > 1:
res.add(f'{target}_{amount}')
else:
res.add(target)
return sorted(res)
def generate_target_test_child_pipeline(project: Project, paths: str, apps: t.List[App], output_filepath: str) -> None:
pytest_cases = get_pytest_cases(
paths,
apps=apps,
marker_expr='not host_test', # since it's generating target-test child pipeline
)
res = defaultdict(list)
for case in pytest_cases:
if not case.env_markers:
print(f'No env markers found for {case.item.originalname} in {case.path}. Ignoring...')
continue
res[(case.target_selector, tuple(sorted(case.env_markers)))].append(case)
target_test_jobs: t.List[Job] = []
for (target_selector, env_markers), cases in res.items():
runner_tags = get_tags_with_amount(target_selector) + list(env_markers)
# we don't need to get all runner, as long as we get one runner, it's fine
runner_list = project.runners.list(status='online', tag_list=','.join(runner_tags), get_all=False)
if not runner_list:
print(f'WARNING: No runner found with tag {",".join(runner_tags)}, ignoring the following test cases:')
for case in cases:
print(f' - {case.name}')
continue
target_test_job = TargetTestJob(
name=f'{target_selector} - {",".join(env_markers)}',
tags=runner_tags,
parallel=len(cases) // DEFAULT_CASES_TEST_PER_JOB + 1,
)
target_test_job.set_variable('TARGET_SELECTOR', f"'{target_selector}'")
target_test_job.set_variable('ENV_MARKERS', "'" + ' and '.join(env_markers) + "'")
target_test_job.set_variable('PYTEST_NODES', ' '.join([f"'{case.item.nodeid}'" for case in cases]))
target_test_jobs.append(target_test_job)
if not target_test_jobs:
print('No target test cases required, create one empty job instead')
target_test_jobs.append(EmptyJob())
extra_include_yml = []
else:
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml']
dump_jobs_to_yaml(target_test_jobs, output_filepath, extra_include_yml)
print(f'Generate child pipeline yaml file {output_filepath} with {sum(j.parallel for j in target_test_jobs)} jobs')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Generate Target Test Child Pipeline. Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-p',
'--paths',
nargs='+',
default=DEFAULT_TEST_PATHS,
help='Paths to the apps to build.',
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'-o',
'--output',
default=DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH,
help='Output child pipeline file path',
)
args = parser.parse_args()
app_list_filepattern = 'list_job_*.txt'
apps = []
for f in glob.glob(app_list_filepattern):
apps.extend(import_apps_from_txt(f))
gl_project = Gitlab(args.project_id).project
generate_target_test_child_pipeline(gl_project, args.paths, apps, args.output)

View File

@@ -0,0 +1,62 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import glob
import os
import xml.etree.ElementTree as ET
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.models import TestCase
from dynamic_pipelines.report import TargetTestReportGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--mr-iid',
type=int,
default=os.getenv('CI_MERGE_REQUEST_IID'),
help='Merge Request IID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'--job-id',
type=int,
default=os.getenv('CI_JOB_ID'),
help='Job ID',
)
parser.add_argument(
'--commit-id',
default=os.getenv('CI_COMMIT_SHORT_SHA'),
help='MR commit ID',
)
parser.add_argument(
'--junit-report-filepattern',
default='XUNIT_RESULT*.xml',
help='Junit Report file pattern',
)
args = parser.parse_args()
test_cases = []
for f in glob.glob(args.junit_report_filepattern):
root = ET.parse(f).getroot()
for tc in root.findall('.//testcase'):
test_cases.append(TestCase.from_test_case_node(tc))
report_generator = TargetTestReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases)
report_generator.post_report(args.job_id, args.commit_id)

View File

@@ -0,0 +1,85 @@
# This file is used to generate build jobs for pytest case dynamic pipeline
# don't add real jobs in this file
########################
# Build Jobs Templates #
########################
.dynamic_build_template:
extends:
- .before_script:build
- .after_script:build:ccache:upload-when-fail
image: $ESP_ENV_IMAGE
stage: build
variables:
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
IDF_CCACHE_ENABLE: "1"
needs:
- pipeline: $PARENT_PIPELINE_ID
job: generate_build_child_pipeline
artifacts:
paths:
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
# Now we're uploading/downloading the binary files from our internal storage server
#
# keep the log file to help debug
- "**/build*/build_log.txt"
# build spec files
- build_summary_*.xml
# list of built apps
- list_job_*.txt
when: always
expire_in: 1 week
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/dynamic_pipelines/scripts/child_pipeline_build_apps.py $APP_LIST_FILE
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MR_MODIFIED_COMPONENTS}
--junitxml "build_summary_${CI_JOB_NAME_SLUG}.xml"
.dynamic_target_test_template:
extends:
- .before_script:fetch:target_test
image: $TARGET_TEST_ENV_IMAGE
stage: target_test
timeout: 1 hour
variables:
SUBMODULES_TO_FETCH: "none"
# set while generating the pipeline
PYTEST_NODES: ""
TARGET_SELECTOR: ""
ENV_MARKERS: ""
cache:
# Usually do not need submodule-cache in target_test
- key: pip-cache-${LATEST_GIT_TAG}
paths:
- .cache/pip
policy: pull
artifacts:
paths:
- XUNIT_RESULT*.xml
- pytest_embedded_log/
# Child pipeline reports won't be collected in the main one
# https://gitlab.com/groups/gitlab-org/-/epics/8205
# reports:
# junit: XUNIT_RESULT.xml
script:
# get known failure cases
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
# get runner env config file
- retry_failed git clone $TEST_ENV_CONFIG_REPO
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
# CI specific options start from "--known-failure-cases-file xxx". could ignore when running locally
- run_cmd pytest ${PYTEST_NODES}
--target ${TARGET_SELECTOR}
-m ${ENV_MARKERS}
--pipeline-id $PARENT_PIPELINE_ID
--junitxml=XUNIT_RESULT_${CI_JOB_NAME_SLUG}.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
${PYTEST_EXTRA_FLAGS}
--app-info-filepattern "list_job_*.txt"
after_script:
- python tools/ci/artifacts_handler.py upload --type logs junit_reports

View File

@@ -0,0 +1,10 @@
generate_pytest_report:
stage: .post
tags: [build, shiny]
image: $ESP_ENV_IMAGE
when: always
artifacts:
paths:
- target_test_report.html
script:
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py

View File

@@ -0,0 +1,23 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<title>{{title}}</title>
<link
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css"
rel="stylesheet"
/>
<link
href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css"
rel="stylesheet"
/>
</head>
<body>
<div class="container-fluid">{{table}}</div>
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
</body>
</html>

View File

@@ -0,0 +1,41 @@
generate_pytest_build_report:
stage: assign_test
image: $ESP_ENV_IMAGE
tags:
- build
- shiny
when: always
artifacts:
paths:
- build_report.html
- test_related_apps_download_urls.yml
script:
- python tools/ci/dynamic_pipelines/scripts/generate_build_report.py
generate_pytest_child_pipeline:
# finally, we can get some use out of the default behavior that downloads all artifacts from the previous stage
stage: assign_test
image: $ESP_ENV_IMAGE
tags:
- build
- shiny
artifacts:
paths:
- target_test_child_pipeline.yml
script:
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_child_pipeline.py
Pytest Target Test Jobs:
stage: target_test
needs:
- generate_pytest_child_pipeline
variables:
PARENT_PIPELINE_ID: $PARENT_PIPELINE_ID
# https://gitlab.com/gitlab-org/gitlab/-/issues/214340
inherit:
variables: false
trigger:
include:
- artifact: target_test_child_pipeline.yml
job: generate_pytest_child_pipeline
strategy: depend

View File

@@ -0,0 +1,37 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import typing as t
import yaml
from .models import Job
def dump_jobs_to_yaml(
jobs: t.List[Job], output_filepath: str, extra_include_yml: t.Optional[t.List[str]] = None
) -> None:
yaml_dict = {}
for job in jobs:
yaml_dict.update(job.to_dict())
# global stuffs
yaml_dict.update(
{
'include': [
'tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml',
'.gitlab/ci/common.yml',
],
# https://gitlab.com/gitlab-org/gitlab/-/issues/222370#note_662695503
'workflow': {
'rules': [
{'if': '$CI_MERGE_REQUEST_IID'},
{'if': '$CI_COMMIT_BRANCH'},
],
},
}
)
yaml_dict['include'].extend(extra_include_yml or [])
with open(output_filepath, 'w') as fw:
yaml.dump(yaml_dict, fw, indent=2)

View File

@@ -40,3 +40,12 @@ tools/templates/sample_component/main.c
tools/ci/cleanup_ignore_lists.py
tools/ci/artifacts_handler.py
tools/unit-test-app/**/*
tools/ci/gitlab_yaml_linter.py
tools/ci/dynamic_pipelines/**/*
tools/ci/idf_ci/**/*
tools/ci/get_supported_examples.sh
tools/ci/python_packages/common_test_methods.py
tools/ci/python_packages/gitlab_api.py
tools/ci/python_packages/idf_http_server_test/**/*
tools/ci/python_packages/idf_iperf_test_util/**/*
tools/esp_prov/**/*

View File

@@ -62,7 +62,6 @@ tools/ci/check_kconfigs.py
tools/ci/check_readme_links.py
tools/ci/check_requirement_files.py
tools/ci/check_rules_components_patterns.py
tools/ci/check_rules_yml.py
tools/ci/check_soc_struct_headers.py
tools/ci/check_tools_files_patterns.py
tools/ci/check_type_comments.py
@@ -74,6 +73,7 @@ tools/ci/fix_empty_prototypes.sh
tools/ci/generate_rules.py
tools/ci/get-full-sources.sh
tools/ci/get_supported_examples.sh
tools/ci/gitlab_yaml_linter.py
tools/ci/mirror-submodule-update.sh
tools/ci/multirun_with_pyenv.sh
tools/ci/push_to_github.sh

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
# SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
@@ -11,8 +11,7 @@ from collections import defaultdict
from itertools import product
import yaml
from check_rules_yml import get_needed_rules
from idf_ci_utils import IDF_PATH
from idf_ci_utils import IDF_PATH, GitlabYmlConfig
try:
import pygraphviz as pgv
@@ -100,6 +99,7 @@ class RulesWriter:
self.cfg = self.expand_matrices()
self.rules = self.expand_rules()
self.yml_config = GitlabYmlConfig()
self.graph = None
def expand_matrices(self): # type: () -> dict
@@ -201,7 +201,7 @@ class RulesWriter:
def new_rules_str(self): # type: () -> str
res = []
for k, v in sorted(self.rules.items()):
if '.rules:' + k not in get_needed_rules():
if '.rules:' + k not in self.yml_config.used_rules:
print(f'WARNING: unused rule: {k}, skipping...')
continue
res.append(self.RULES_TEMPLATE.format(k, self._format_rule(k, v)))

100
tools/ci/gitlab_yaml_linter.py Executable file
View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""
Check gitlab ci yaml files
"""
import argparse
import os
import typing as t
from functools import cached_property
from idf_ci_utils import IDF_PATH, GitlabYmlConfig, get_submodule_dirs
class YmlLinter:
def __init__(self, yml_config: GitlabYmlConfig) -> None:
self.yml_config = yml_config
self._errors: t.List[str] = []
@cached_property
def lint_functions(self) -> t.List[str]:
funcs = []
for func in dir(self):
if func.startswith('_lint_'):
funcs.append(func)
return funcs
def lint(self) -> None:
exit_code = 0
for func in self.lint_functions:
getattr(self, func)()
if self._errors:
print(f'Errors found while running {func}:')
exit_code = 1
print('\t- ' + '\n\t- '.join(self._errors))
self._errors = [] # reset
exit(exit_code)
# name it like _1_ to make it run first
def _lint_1_yml_parser(self) -> None:
for k, v in self.yml_config.config.items():
if (
k not in self.yml_config.global_keys
and k not in self.yml_config.anchors
and k not in self.yml_config.jobs
):
raise SystemExit(f'Parser incorrect. Key {k} not in global keys, rules or jobs')
def _lint_default_values_artifacts(self) -> None:
defaults_artifacts = self.yml_config.default.get('artifacts', {})
for job_name, d in self.yml_config.jobs.items():
for k, v in d.get('artifacts', {}).items():
if k not in defaults_artifacts:
continue
if v == defaults_artifacts[k]:
self._errors.append(f'job {job_name} key {k} has same value as default value {v}')
def _lint_submodule_patterns(self) -> None:
submodule_paths = sorted(['.gitmodules'] + get_submodule_dirs())
submodule_paths_in_patterns = sorted(self.yml_config.config.get('.patterns-submodule', []))
if submodule_paths != submodule_paths_in_patterns:
unused_patterns = set(submodule_paths_in_patterns) - set(submodule_paths)
if unused_patterns:
for item in unused_patterns:
self._errors.append(f'non-exist pattern {item}. Please remove {item} from .patterns-submodule')
undefined_patterns = set(submodule_paths) - set(submodule_paths_in_patterns)
if undefined_patterns:
for item in undefined_patterns:
self._errors.append(f'undefined pattern {item}. Please add {item} to .patterns-submodule')
def _lint_gitlab_yml_rules(self) -> None:
unused_rules = self.yml_config.rules - self.yml_config.used_rules
for item in unused_rules:
self._errors.append(f'Unused rule: {item}, please remove it')
undefined_rules = self.yml_config.used_rules - self.yml_config.rules
for item in undefined_rules:
self._errors.append(f'Undefined rule: {item}')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--root-yml-filepath', help='root yml file path', default=os.path.join(IDF_PATH, '.gitlab-ci.yml')
)
args = parser.parse_args()
config = GitlabYmlConfig(args.root_yml_filepath)
linter = YmlLinter(config)
linter.lint()

View File

40
tools/ci/idf_ci/app.py Normal file
View File

@@ -0,0 +1,40 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import sys
import typing as t
from typing import Literal
from idf_build_apps import App, CMakeApp, json_to_app
from idf_ci.uploader import AppUploader, get_app_uploader
class IdfCMakeApp(CMakeApp):
uploader: t.ClassVar[t.Optional['AppUploader']] = get_app_uploader()
build_system: Literal['idf_cmake'] = 'idf_cmake'
def _post_build(self) -> None:
super()._post_build()
if self.uploader:
self.uploader.upload_app(self.build_path)
def dump_apps_to_txt(apps: t.List[App], output_filepath: str) -> None:
with open(output_filepath, 'w') as fw:
for app in apps:
fw.write(app.model_dump_json() + '\n')
def import_apps_from_txt(input_filepath: str) -> t.List[App]:
apps: t.List[App] = []
with open(input_filepath) as fr:
for line in fr:
if line := line.strip():
try:
apps.append(json_to_app(line, extra_classes=[IdfCMakeApp]))
except Exception: # noqa
print('Failed to deserialize app from line: %s' % line)
sys.exit(1)
return apps

150
tools/ci/idf_ci/uploader.py Normal file
View File

@@ -0,0 +1,150 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import glob
import os
import typing as t
from datetime import timedelta
from zipfile import ZIP_DEFLATED, ZipFile
import minio
from artifacts_handler import ArtifactType, get_minio_client, getenv
from idf_build_apps import App
from idf_build_apps.utils import rmdir
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_BUILD_LOG_FILENAME
class AppUploader:
TYPE_PATTERNS_DICT = {
ArtifactType.MAP_AND_ELF_FILES: [
'bootloader/*.map',
'bootloader/*.elf',
'*.map',
'*.elf',
],
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
'*.bin',
'bootloader/*.bin',
'partition_table/*.bin',
'flasher_args.json',
'flash_project_args',
'config/sdkconfig.json',
'project_description.json',
],
ArtifactType.LOGS: [
DEFAULT_BUILD_LOG_FILENAME,
],
}
def __init__(self, pipeline_id: t.Union[str, int, None] = None) -> None:
self.pipeline_id = str(pipeline_id or '1')
self._client = get_minio_client()
def get_app_object_name(self, app_path: str, zip_name: str, artifact_type: ArtifactType) -> str:
return f'{self.pipeline_id}/{artifact_type.value}/{app_path}/{zip_name}'
def _upload_app(self, app_build_path: str, artifact_type: ArtifactType) -> bool:
app_path, build_dir = os.path.split(app_build_path)
zip_filename = f'{build_dir}.zip'
has_file = False
with ZipFile(
zip_filename,
'w',
compression=ZIP_DEFLATED,
# 1 is the fastest compression level
# the size differs not much between 1 and 9
compresslevel=1,
) as zw:
for pattern in self.TYPE_PATTERNS_DICT[artifact_type]:
for file in glob.glob(os.path.join(app_build_path, pattern), recursive=True):
zw.write(file)
has_file = True
uploaded = False
try:
if has_file:
obj_name = self.get_app_object_name(app_path, zip_filename, artifact_type)
print(f'Created archive file: {zip_filename}, uploading as {obj_name}')
self._client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, zip_filename)
uploaded = True
finally:
os.remove(zip_filename)
return uploaded
def upload_app(self, app_build_path: str, artifact_type: t.Optional[ArtifactType] = None) -> None:
uploaded = False
if not artifact_type:
for _artifact_type in [
ArtifactType.MAP_AND_ELF_FILES,
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES,
ArtifactType.LOGS,
]:
uploaded |= self._upload_app(app_build_path, _artifact_type)
else:
uploaded = self._upload_app(app_build_path, artifact_type)
if uploaded:
rmdir(app_build_path, exclude_file_patterns=DEFAULT_BUILD_LOG_FILENAME)
def _download_app(self, app_build_path: str, artifact_type: ArtifactType) -> None:
app_path, build_dir = os.path.split(app_build_path)
zip_filename = f'{build_dir}.zip'
# path are relative to IDF_PATH
current_dir = os.getcwd()
os.chdir(IDF_PATH)
try:
obj_name = self.get_app_object_name(app_path, zip_filename, artifact_type)
print(f'Downloading {obj_name}')
try:
try:
self._client.stat_object(getenv('IDF_S3_BUCKET'), obj_name)
except minio.error.S3Error as e:
raise SystemExit(
f'No such file on minio server: {obj_name}. '
f'Probably the build failed or the artifacts got expired. '
f'Full error message: {str(e)}'
)
else:
self._client.fget_object(getenv('IDF_S3_BUCKET'), obj_name, zip_filename)
print(f'Downloaded to {zip_filename}')
except minio.error.S3Error as e:
raise SystemExit('Shouldn\'t happen, please report this bug in the CI channel' + str(e))
with ZipFile(zip_filename, 'r') as zr:
zr.extractall()
os.remove(zip_filename)
finally:
os.chdir(current_dir)
def download_app(self, app_build_path: str, artifact_type: t.Optional[ArtifactType] = None) -> None:
if not artifact_type:
for _artifact_type in [ArtifactType.MAP_AND_ELF_FILES, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES]:
self._download_app(app_build_path, _artifact_type)
else:
self._download_app(app_build_path, artifact_type)
def get_app_presigned_url(self, app: App, artifact_type: ArtifactType) -> str:
obj_name = self.get_app_object_name(app.app_dir, f'{app.build_dir}.zip', artifact_type)
try:
self._client.stat_object(
getenv('IDF_S3_BUCKET'),
obj_name,
)
except minio.error.S3Error:
return ''
else:
return self._client.get_presigned_url( # type: ignore
'GET', getenv('IDF_S3_BUCKET'), obj_name, expires=timedelta(days=4)
)
def get_app_uploader() -> t.Optional['AppUploader']:
if parent_pipeline_id := os.getenv('PARENT_PIPELINE_ID'):
return AppUploader(parent_pipeline_id)
return None

View File

@@ -8,12 +8,14 @@ import logging
import os
import subprocess
import sys
from typing import Any, List
import typing as t
from functools import cached_property
from pathlib import Path
IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..')))
def get_submodule_dirs(full_path: bool = False) -> List[str]:
def get_submodule_dirs(full_path: bool = False) -> t.List[str]:
"""
To avoid issue could be introduced by multi-os or additional dependency,
we use python and git to get this output
@@ -71,7 +73,7 @@ def is_executable(full_path: str) -> bool:
return os.access(full_path, os.X_OK)
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> List[str]:
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> t.List[str]:
"""
Get the result of git ls-files
:param path: path to run git ls-files
@@ -98,11 +100,10 @@ def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> List[str]:
return [os.path.join(path, f) for f in files] if full_path else files
def is_in_directory(file_path: str, folder: str) -> bool:
return os.path.realpath(file_path).startswith(os.path.realpath(folder) + os.sep)
def to_list(s: t.Any) -> t.List[t.Any]:
if not s:
return []
def to_list(s: Any) -> List[Any]:
if isinstance(s, (set, tuple)):
return list(s)
@@ -110,3 +111,83 @@ def to_list(s: Any) -> List[Any]:
return s
return [s]
class GitlabYmlConfig:
def __init__(self, root_yml_filepath: str = os.path.join(IDF_PATH, '.gitlab-ci.yml')) -> None:
self._config: t.Dict[str, t.Any] = {}
self._defaults: t.Dict[str, t.Any] = {}
self._load(root_yml_filepath)
def _load(self, root_yml_filepath: str) -> None:
# avoid unused import in other pre-commit hooks
import yaml
all_config = dict()
root_yml = yaml.load(open(root_yml_filepath), Loader=yaml.FullLoader)
for item in root_yml['include']:
all_config.update(yaml.load(open(os.path.join(IDF_PATH, item)), Loader=yaml.FullLoader))
if 'default' in all_config:
self._defaults = all_config.pop('default')
self._config = all_config
@property
def default(self) -> t.Dict[str, t.Any]:
return self._defaults
@property
def config(self) -> t.Dict[str, t.Any]:
return self._config
@cached_property
def global_keys(self) -> t.List[str]:
return ['default', 'include', 'workflow', 'variables', 'stages']
@cached_property
def anchors(self) -> t.Dict[str, t.Any]:
return {k: v for k, v in self.config.items() if k.startswith('.')}
@cached_property
def jobs(self) -> t.Dict[str, t.Any]:
return {k: v for k, v in self.config.items() if not k.startswith('.') and k not in self.global_keys}
@cached_property
def rules(self) -> t.Set[str]:
return {k for k, _ in self.anchors.items() if self._is_rule_key(k)}
@cached_property
def used_rules(self) -> t.Set[str]:
res = set()
for v in self.config.values():
if not isinstance(v, dict):
continue
for item in to_list(v.get('extends')):
if self._is_rule_key(item):
res.add(item)
return res
@staticmethod
def _is_rule_key(key: str) -> bool:
return key.startswith('.rules:') or key.endswith('template')
def get_all_manifest_files() -> t.List[str]:
"""
:rtype: object
"""
paths: t.List[str] = []
for p in Path(IDF_PATH).glob('**/.build-test-rules.yml'):
if 'managed_components' in p.parts:
continue
paths.append(str(p))
return paths

View File

@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""
@@ -7,8 +7,12 @@ Pytest Related Constants. Don't import third-party packages here.
import os
import typing as t
from dataclasses import dataclass
from enum import Enum
from functools import cached_property
from pathlib import Path
from _pytest.python import Function
from idf_ci_utils import IDF_PATH
from pytest_embedded.utils import to_list
SUPPORTED_TARGETS = ['esp32', 'esp32s2', 'esp32c3', 'esp32s3', 'esp32c2', 'esp32c6', 'esp32h2', 'esp32p4']
@@ -35,10 +39,11 @@ SPECIAL_MARKERS = {
'temp_skip': 'temp skip tests for specified targets both in ci and locally',
'nightly_run': 'tests should be executed as part of the nightly trigger pipeline',
'host_test': 'tests which should not be built at the build stage, and instead built in host_test stage',
'qemu': 'build and test using qemu-system-xtensa, not real target',
}
ENV_MARKERS = {
# special markers
'qemu': 'build and test using qemu, not real target',
# single-dut markers
'generic': 'tests should be run on generic runners',
'flash_suspend': 'support flash suspend feature',
@@ -89,7 +94,6 @@ ENV_MARKERS = {
'adc': 'ADC related tests should run on adc runners',
'xtal32k': 'Runner with external 32k crystal connected',
'no32kXtal': 'Runner with no external 32k crystal connected',
'multi_dut_modbus_rs485': 'a pair of runners connected by RS485 bus',
'psramv0': 'Runner with PSRAM version 0',
'esp32eco3': 'Runner with esp32 eco3 connected',
'ecdsa_efuse': 'Runner with test ECDSA private keys programmed in efuse',
@@ -98,6 +102,7 @@ ENV_MARKERS = {
'i2c_oled': 'Runner with ssd1306 I2C oled connected',
'httpbin': 'runner for tests that need to access the httpbin service',
# multi-dut markers
'multi_dut_modbus_rs485': 'a pair of runners connected by RS485 bus',
'ieee802154': 'ieee802154 related tests should run on ieee802154 runners.',
'openthread_br': 'tests should be used for openthread border router.',
'openthread_bbr': 'tests should be used for openthread border router linked to Internet.',
@@ -109,9 +114,41 @@ ENV_MARKERS = {
'sdio_master_slave': 'Test sdio multi board, esp32+esp32',
'sdio_multidev_32_c6': 'Test sdio multi board, esp32+esp32c6',
'usj_device': 'Test usb_serial_jtag and usb_serial_jtag is used as serial only (not console)',
'twai_std': 'twai runner with all twai supported targets connect to usb-can adapter'
'twai_std': 'twai runner with all twai supported targets connect to usb-can adapter',
}
DEFAULT_CONFIG_RULES_STR = ['sdkconfig.ci=default', 'sdkconfig.ci.*=', '=default']
DEFAULT_IGNORE_WARNING_FILEPATH = os.path.join(IDF_PATH, 'tools', 'ci', 'ignore_build_warnings.txt')
DEFAULT_BUILD_TEST_RULES_FILEPATH = os.path.join(IDF_PATH, '.gitlab', 'ci', 'default-build-test-rules.yml')
DEFAULT_FULL_BUILD_TEST_FILEPATTERNS = [
# tools
'tools/cmake/**/*',
'tools/tools.json',
# components
'components/cxx/**/*',
'components/esp_common/**/*',
'components/esp_hw_support/**/*',
'components/esp_rom/**/*',
'components/esp_system/**/*',
'components/esp_timer/**/*',
'components/freertos/**/*',
'components/hal/**/*',
'components/heap/**/*',
'components/log/**/*',
'components/newlib/**/*',
'components/riscv/**/*',
'components/soc/**/*',
'components/xtensa/**/*',
]
DEFAULT_BUILD_LOG_FILENAME = 'build_log.txt'
class CollectMode(str, Enum):
SINGLE_SPECIFIC = 'single_specific'
MULTI_SPECIFIC = 'multi_specific'
MULTI_ALL_WITH_PARAM = 'multi_all_with_param'
ALL = 'all'
@dataclass
class PytestApp:
@@ -122,38 +159,47 @@ class PytestApp:
def __hash__(self) -> int:
return hash((self.path, self.target, self.config))
@cached_property
def build_dir(self) -> str:
return os.path.join(self.path, f'build_{self.target}_{self.config}')
@dataclass
class PytestCase:
path: str
name: str
apps: t.Set[PytestApp]
target: str
apps: t.List[PytestApp]
item: Function
def __hash__(self) -> int:
return hash((self.path, self.name, self.apps, self.all_markers))
@cached_property
def path(self) -> str:
return str(self.item.path)
@cached_property
def name(self) -> str:
return self.item.originalname # type: ignore
@cached_property
def targets(self) -> t.List[str]:
return [app.target for app in self.apps]
@cached_property
def is_single_dut_test_case(self) -> bool:
return True if len(self.apps) == 1 else False
@cached_property
def is_host_test(self) -> bool:
return 'host_test' in self.all_markers or 'linux' in self.targets
# the following markers could be changed dynamically, don't use cached_property
@property
def all_markers(self) -> t.Set[str]:
return {marker.name for marker in self.item.iter_markers()}
@property
def is_nightly_run(self) -> bool:
return 'nightly_run' in self.all_markers
@property
def target_markers(self) -> t.Set[str]:
return {marker for marker in self.all_markers if marker in TARGET_MARKERS}
@property
def env_markers(self) -> t.Set[str]:
return {marker for marker in self.all_markers if marker in ENV_MARKERS}
@property
def skipped_targets(self) -> t.Set[str]:
def _get_temp_markers_disabled_targets(marker_name: str) -> t.Set[str]:
temp_marker = self.item.get_closest_marker(marker_name)
@@ -179,4 +225,65 @@ class PytestCase:
else: # we use `temp_skip` locally
skip_targets = temp_skip_targets
return skip_targets
return {marker for marker in self.all_markers if marker in TARGET_MARKERS} - skip_targets
@property
def env_markers(self) -> t.Set[str]:
return {marker for marker in self.all_markers if marker in ENV_MARKERS}
@property
def target_selector(self) -> str:
return ','.join(app.target for app in self.apps)
@property
def requires_elf_or_map(self) -> bool:
"""
This property determines whether the test case requires elf or map file. By default, one app in the test case
only requires .bin files.
:return: True if the test case requires elf or map file, False otherwise
"""
if 'jtag' in self.env_markers or 'usb_serial_jtag' in self.env_markers:
return True
if any('panic' in Path(app.path).parts for app in self.apps):
return True
return False
def all_built_in_app_lists(self, app_lists: t.Optional[t.List[str]] = None) -> t.Optional[str]:
"""
Check if all binaries of the test case are built in the app lists.
:param app_lists: app lists to check
:return: debug string if not all binaries are built in the app lists, None otherwise
"""
if app_lists is None:
# ignore this feature
return None
bin_found = [0] * len(self.apps)
for i, app in enumerate(self.apps):
if app.build_dir in app_lists:
bin_found[i] = 1
if sum(bin_found) == 0:
msg = f'Skip test case {self.name} because all following binaries are not listed in the app lists: '
for app in self.apps:
msg += f'\n - {app.build_dir}'
print(msg)
return msg
if sum(bin_found) == len(self.apps):
return None
# some found, some not, looks suspicious
msg = f'Found some binaries of test case {self.name} are not listed in the app lists.'
for i, app in enumerate(self.apps):
if bin_found[i] == 0:
msg += f'\n - {app.build_dir}'
msg += '\nMight be a issue of .build-test-rules.yml files'
print(msg)
return msg

View File

@@ -1,9 +1,10 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import logging
import os
import typing as t
from collections import defaultdict
from functools import cached_property
from xml.etree import ElementTree as ET
import pytest
@@ -11,17 +12,20 @@ from _pytest.config import ExitCode
from _pytest.main import Session
from _pytest.python import Function
from _pytest.runner import CallInfo
from idf_build_apps import App
from idf_build_apps.constants import BuildStatus
from pytest_embedded import Dut
from pytest_embedded.plugin import parse_multi_dut_args
from pytest_embedded.utils import find_by_suffix, to_list
from pytest_ignore_test_results.ignore_results import ChildCase, ChildCasesStashKey
from .constants import DEFAULT_SDKCONFIG, PREVIEW_TARGETS, SUPPORTED_TARGETS, PytestApp, PytestCase
from .utils import format_case_id, merge_junit_files
from .constants import DEFAULT_SDKCONFIG, PREVIEW_TARGETS, SUPPORTED_TARGETS, CollectMode, PytestApp, PytestCase
from .utils import comma_sep_str_to_list, format_case_id, merge_junit_files
IDF_PYTEST_EMBEDDED_KEY = pytest.StashKey['IdfPytestEmbedded']()
ITEM_FAILED_CASES_KEY = pytest.StashKey[list]()
ITEM_FAILED_KEY = pytest.StashKey[bool]()
ITEM_PYTEST_CASE_KEY = pytest.StashKey[PytestCase]()
class IdfPytestEmbedded:
@@ -33,80 +37,133 @@ class IdfPytestEmbedded:
def __init__(
self,
target: str,
sdkconfig: t.Optional[str] = None,
apps_list: t.Optional[t.List[str]] = None,
target: t.Union[t.List[str], str],
*,
single_target_duplicate_mode: bool = False,
apps: t.Optional[t.List[App]] = None,
):
# CLI options to filter the test cases
self.target = target.lower()
self.sdkconfig = sdkconfig
self.apps_list = apps_list
if isinstance(target, str):
self.target = sorted(comma_sep_str_to_list(target))
else:
self.target = sorted(target)
if not self.target:
raise ValueError('`target` should not be empty')
# these are useful while gathering all the multi-dut test cases
# when this mode is activated,
#
# pytest.mark.esp32
# pytest.mark.parametrize('count', [2], indirect=True)
# def test_foo(dut):
# pass
#
# should be collected when running `pytest --target esp32`
#
# otherwise, it should be collected when running `pytest --target esp32,esp32`
self._single_target_duplicate_mode = single_target_duplicate_mode
self.apps_list = (
[os.path.join(app.app_dir, app.build_dir) for app in apps if app.build_status == BuildStatus.SUCCESS]
if apps
else None
)
self.cases: t.List[PytestCase] = []
# record the additional info
# test case id: {key: value}
self.additional_info: t.Dict[str, t.Dict[str, t.Any]] = defaultdict(dict)
@cached_property
def collect_mode(self) -> CollectMode:
if len(self.target) == 1:
if self.target[0] == CollectMode.MULTI_ALL_WITH_PARAM:
return CollectMode.MULTI_ALL_WITH_PARAM
else:
return CollectMode.SINGLE_SPECIFIC
else:
return CollectMode.MULTI_SPECIFIC
@staticmethod
def get_param(item: Function, key: str, default: t.Any = None) -> t.Any:
# implement like this since this is a limitation of pytest, couldn't get fixture values while collecting
# https://github.com/pytest-dev/pytest/discussions/9689
# funcargs is not calculated while collection
# callspec is something defined in parametrize
if not hasattr(item, 'callspec'):
return default
return item.callspec.params.get(key, default) or default
def item_to_pytest_case(self, item: Function) -> PytestCase:
count = 1
case_path = str(item.path)
case_name = item.originalname
target = self.target
"""
Turn pytest item to PytestCase
"""
count = self.get_param(item, 'count', 1)
# funcargs is not calculated while collection
if hasattr(item, 'callspec'):
count = item.callspec.params.get('count', 1)
app_paths = to_list(
parse_multi_dut_args(
count,
self.get_param(item, 'app_path', os.path.dirname(case_path)),
)
)
configs = to_list(parse_multi_dut_args(count, self.get_param(item, 'config', 'default')))
targets = to_list(parse_multi_dut_args(count, self.get_param(item, 'target', target)))
else:
app_paths = [os.path.dirname(case_path)]
configs = ['default']
targets = [target]
# default app_path is where the test script locates
app_paths = to_list(parse_multi_dut_args(count, self.get_param(item, 'app_path', os.path.dirname(item.path))))
configs = to_list(parse_multi_dut_args(count, self.get_param(item, 'config', DEFAULT_SDKCONFIG)))
targets = to_list(parse_multi_dut_args(count, self.get_param(item, 'target', self.target[0])))
case_apps = set()
for i in range(count):
case_apps.add(PytestApp(app_paths[i], targets[i], configs[i]))
def abspath_or_relpath(s: str) -> str:
if os.path.abspath(s) and s.startswith(os.getcwd()):
return os.path.relpath(s)
return s
return PytestCase(
case_path,
case_name,
case_apps,
self.target,
item,
[PytestApp(abspath_or_relpath(app_paths[i]), targets[i], configs[i]) for i in range(count)], item
)
@pytest.hookimpl(tryfirst=True)
def pytest_sessionstart(self, session: Session) -> None:
# same behavior for vanilla pytest-embedded '--target'
session.config.option.target = self.target
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(self, items: t.List[Function]) -> None:
item_to_case: t.Dict[Function, PytestCase] = {}
"""
Background info:
# Add Markers to the test cases
We're using `pytest.mark.[TARGET]` as a syntactic sugar to indicate that they are actually supported by all
the listed targets. For example,
>>> @pytest.mark.esp32
>>> @pytest.mark.esp32s2
should be treated as
>>> @pytest.mark.parametrize('target', [
>>> 'esp32',
>>> 'esp32s2',
>>> ], indirect=True)
All single-dut test cases, and some of the multi-dut test cases with the same targets, are using this
way to indicate the supported targets.
To avoid ambiguity,
- when we're collecting single-dut test cases with esp32, we call
`pytest --collect-only --target esp32`
- when we're collecting multi-dut test cases, we list all the targets, even when they're the same
`pytest --collect-only --target esp32,esp32` for two esp32 connected
`pytest --collect-only --target esp32,esp32s2` for esp32 and esp32s2 connected
therefore, we have two different logic for searching test cases, explained in 2.1 and 2.2
"""
# 1. Filter according to nighty_run related markers
if os.getenv('INCLUDE_NIGHTLY_RUN') == '1':
# nightly_run and non-nightly_run cases are both included
pass
elif os.getenv('NIGHTLY_RUN') == '1':
# only nightly_run cases are included
items[:] = [_item for _item in items if _item.get_closest_marker('nightly_run') is not None]
else:
# only non-nightly_run cases are included
items[:] = [_item for _item in items if _item.get_closest_marker('nightly_run') is None]
# 2. Add markers according to special markers
item_to_case_dict: t.Dict[Function, PytestCase] = {}
for item in items:
# generate PytestCase for each item
case = self.item_to_pytest_case(item)
item_to_case[item] = case
# set default timeout 10 minutes for each case
if 'timeout' not in item.keywords:
item.add_marker(pytest.mark.timeout(10 * 60))
# add markers for special markers
item.stash[ITEM_PYTEST_CASE_KEY] = item_to_case_dict[item] = self.item_to_pytest_case(item)
if 'supported_targets' in item.keywords:
for _target in SUPPORTED_TARGETS:
item.add_marker(_target)
@@ -117,75 +174,63 @@ class IdfPytestEmbedded:
for _target in [*SUPPORTED_TARGETS, *PREVIEW_TARGETS]:
item.add_marker(_target)
# 3.1. CollectMode.SINGLE_SPECIFIC, like `pytest --target esp32`
if self.collect_mode == CollectMode.SINGLE_SPECIFIC:
filtered_items = []
for item in items:
case = item_to_case_dict[item]
# single-dut one
if case.is_single_dut_test_case and self.target[0] in case.target_markers:
filtered_items.append(item)
# multi-dut ones and in single_target_duplicate_mode
elif self._single_target_duplicate_mode and not case.is_single_dut_test_case:
# ignore those test cases with `target` defined in parametrize, since these will be covered in 3.3
if self.get_param(item, 'target', None) is None and self.target[0] in case.target_markers:
filtered_items.append(item)
items[:] = filtered_items
# 3.2. CollectMode.MULTI_SPECIFIC, like `pytest --target esp32,esp32`
elif self.collect_mode == CollectMode.MULTI_SPECIFIC:
items[:] = [_item for _item in items if item_to_case_dict[_item].targets == self.target]
# 3.3. CollectMode.MULTI_ALL_WITH_PARAM, intended to be used by `get_pytest_cases`
else:
items[:] = [
_item
for _item in items
if not item_to_case_dict[_item].is_single_dut_test_case
and self.get_param(_item, 'target', None) is not None
]
# 4. filter by `self.apps_list`, skip the test case if not listed
# should only be used in CI
_items = []
for item in items:
case = item_to_case_dict[item]
if msg := case.all_built_in_app_lists(self.apps_list):
self.additional_info[case.name]['skip_reason'] = msg
else:
_items.append(item)
# OKAY!!! All left ones will be executed, sort it and add more markers
items[:] = sorted(
_items, key=lambda x: (os.path.dirname(x.path), self.get_param(x, 'config', DEFAULT_SDKCONFIG))
)
for item in items:
case = item_to_case_dict[item]
# set default timeout 10 minutes for each case
if 'timeout' not in item.keywords:
item.add_marker(pytest.mark.timeout(10 * 60))
# add 'xtal_40mhz' tag as a default tag for esp32c2 target
# only add this marker for esp32c2 cases
if self.target == 'esp32c2' and 'esp32c2' in case.target_markers and 'xtal_26mhz' not in case.all_markers:
if 'esp32c2' in self.target and 'esp32c2' in case.targets and 'xtal_26mhz' not in case.all_markers:
item.add_marker('xtal_40mhz')
# Filter the test cases
filtered_items = []
for item in items:
case = item_to_case[item]
# filter by "nightly_run" marker
if os.getenv('INCLUDE_NIGHTLY_RUN') == '1':
# Do not filter nightly_run cases
pass
elif os.getenv('NIGHTLY_RUN') == '1':
if not case.is_nightly_run:
logging.debug(
'Skipping test case %s because of this test case is not a nightly run test case', item.name
)
continue
else:
if case.is_nightly_run:
logging.debug(
'Skipping test case %s because of this test case is a nightly run test case', item.name
)
continue
# filter by target
if self.target not in case.target_markers:
continue
if self.target in case.skipped_targets:
continue
# filter by sdkconfig
if self.sdkconfig:
if self.get_param(item, 'config', DEFAULT_SDKCONFIG) != self.sdkconfig:
continue
# filter by apps_list, skip the test case if not listed
# should only be used in CI
if self.apps_list is not None:
bin_not_found = False
for case_app in case.apps:
# in ci, always use build_<target>_<config> as build dir
binary_path = os.path.join(case_app.path, f'build_{case_app.target}_{case_app.config}')
if binary_path not in self.apps_list:
logging.info(
'Skipping test case %s because binary path %s is not listed in app info list files',
item.name,
binary_path,
)
bin_not_found = True
break
if bin_not_found:
continue
# finally!
filtered_items.append(item)
# sort the test cases with (app folder, config)
items[:] = sorted(
filtered_items,
key=lambda x: (os.path.dirname(x.path), self.get_param(x, 'config', DEFAULT_SDKCONFIG))
)
def pytest_report_collectionfinish(self, items: t.List[Function]) -> None:
for item in items:
self.cases.append(self.item_to_pytest_case(item))
self.cases = [item.stash[ITEM_PYTEST_CASE_KEY] for item in items]
def pytest_custom_test_case_name(self, item: Function) -> str:
return item.funcargs.get('test_case_name', item.nodeid) # type: ignore
@@ -252,6 +297,9 @@ class IdfPytestEmbedded:
if 'file' in case.attrib:
case.attrib['file'] = case.attrib['file'].replace('/IDF/', '') # our unity test framework
if ci_job_url := os.getenv('CI_JOB_URL'):
case.attrib['ci_job_url'] = ci_job_url
xml.write(junit)
def pytest_sessionfinish(self, session: Session, exitstatus: int) -> None:

View File

@@ -0,0 +1,2 @@
[pytest]
python_files = test_*.py

View File

@@ -1,18 +1,24 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import fnmatch
import io
import logging
import os.path
import typing as t
from contextlib import redirect_stdout
from pathlib import Path
import pytest
from _pytest.config import ExitCode
from idf_build_apps import App, find_apps
from idf_build_apps.constants import SUPPORTED_TARGETS, BuildStatus
from idf_ci.app import IdfCMakeApp
from idf_ci_utils import IDF_PATH, get_all_manifest_files, to_list
from idf_py_actions.constants import PREVIEW_TARGETS as TOOLS_PREVIEW_TARGETS
from idf_py_actions.constants import SUPPORTED_TARGETS as TOOLS_SUPPORTED_TARGETS
from pytest_embedded.utils import to_list
from .constants import PytestCase
from .constants import DEFAULT_BUILD_LOG_FILENAME, DEFAULT_CONFIG_RULES_STR, CollectMode, PytestCase
from .plugin import IdfPytestEmbedded
@@ -35,15 +41,28 @@ def get_pytest_files(paths: t.List[str]) -> t.List[str]:
def get_pytest_cases(
paths: t.Union[str, t.List[str]],
target: str = 'all',
target: str = CollectMode.ALL,
*,
marker_expr: t.Optional[str] = None,
filter_expr: t.Optional[str] = None,
apps: t.Optional[t.List[App]] = None,
) -> t.List[PytestCase]:
if target == 'all':
targets = TOOLS_SUPPORTED_TARGETS + TOOLS_PREVIEW_TARGETS
else:
targets = [target]
"""
For single-dut test cases, `target` could be
- [TARGET], e.g. `esp32`, to get the test cases for the given target
- or `single_all`, to get all single-dut test cases
For multi-dut test cases, `target` could be
- [TARGET,[TARGET...]], e.g. `esp32,esp32s2`, to get the test cases for the given targets
- or `multi_all`, to get all multi-dut test cases
:param paths: paths to search for pytest scripts
:param target: target or keywords to get test cases for, detailed above
:param marker_expr: pytest marker expression, `-m`
:param filter_expr: pytest filter expression, `-k`
:param apps: built app list, skip the tests required by apps not in the list
:return: list of test cases
"""
paths = to_list(paths)
cases: t.List[PytestCase] = []
@@ -52,12 +71,12 @@ def get_pytest_cases(
print(f'WARNING: no pytest scripts found for target {target} under paths {", ".join(paths)}')
return cases
for target in targets:
collector = IdfPytestEmbedded(target)
def _get_pytest_cases(_target: str, _single_target_duplicate_mode: bool = False) -> t.List[PytestCase]:
collector = IdfPytestEmbedded(_target, single_target_duplicate_mode=_single_target_duplicate_mode, apps=apps)
with io.StringIO() as buf:
with redirect_stdout(buf):
cmd = ['--collect-only', *pytest_scripts, '--target', target, '-q']
cmd = ['--collect-only', *pytest_scripts, '--target', _target, '-q']
if marker_expr:
cmd.extend(['-m', marker_expr])
if filter_expr:
@@ -66,11 +85,129 @@ def get_pytest_cases(
if res.value != ExitCode.OK:
if res.value == ExitCode.NO_TESTS_COLLECTED:
print(f'WARNING: no pytest app found for target {target} under paths {", ".join(paths)}')
print(f'WARNING: no pytest app found for target {_target} under paths {", ".join(paths)}')
else:
print(buf.getvalue())
raise RuntimeError(f'pytest collection failed at {", ".join(paths)} with command \"{" ".join(cmd)}\"')
raise RuntimeError(
f'pytest collection failed at {", ".join(paths)} with command \"{" ".join(cmd)}\"'
)
cases.extend(collector.cases)
return collector.cases # type: ignore
return cases
if target == CollectMode.ALL:
targets = TOOLS_SUPPORTED_TARGETS + TOOLS_PREVIEW_TARGETS + [CollectMode.MULTI_ALL_WITH_PARAM]
else:
targets = [target]
for _target in targets:
if target == CollectMode.ALL:
cases.extend(_get_pytest_cases(_target, _single_target_duplicate_mode=True))
else:
cases.extend(_get_pytest_cases(_target))
return sorted(cases, key=lambda x: (x.path, x.name, str(x.targets)))
def get_all_apps(
paths: t.List[str],
target: str = CollectMode.ALL,
*,
marker_expr: t.Optional[str] = None,
filter_expr: t.Optional[str] = None,
config_rules_str: t.Optional[t.List[str]] = None,
preserve_all: bool = False,
extra_default_build_targets: t.Optional[t.List[str]] = None,
modified_components: t.Optional[t.List[str]] = None,
modified_files: t.Optional[t.List[str]] = None,
ignore_app_dependencies_filepatterns: t.Optional[t.List[str]] = None,
) -> t.Tuple[t.Set[App], t.Set[App]]:
"""
Return the tuple of test-required apps and non-test-related apps
:param paths: paths to search for pytest scripts
:param target: target or keywords to get test cases for, explained in `get_pytest_cases`
:param marker_expr: pytest marker expression, `-m`
:param filter_expr: pytest filter expression, `-k`
:param config_rules_str: config rules string
:param preserve_all: preserve all apps
:param extra_default_build_targets: extra default build targets
:param modified_components: modified components
:param modified_files: modified files
:param ignore_app_dependencies_filepatterns: ignore app dependencies filepatterns
:return: tuple of test-required apps and non-test-related apps
"""
all_apps = find_apps(
paths,
target,
build_system=IdfCMakeApp,
recursive=True,
build_dir='build_@t_@w',
config_rules_str=config_rules_str or DEFAULT_CONFIG_RULES_STR,
build_log_filename=DEFAULT_BUILD_LOG_FILENAME,
size_json_filename='size.json',
check_warnings=True,
manifest_rootpath=IDF_PATH,
manifest_files=get_all_manifest_files(),
default_build_targets=SUPPORTED_TARGETS + (extra_default_build_targets or []),
modified_components=modified_components,
modified_files=modified_files,
ignore_app_dependencies_filepatterns=ignore_app_dependencies_filepatterns,
include_skipped_apps=True,
)
pytest_cases = get_pytest_cases(
paths,
target,
marker_expr=marker_expr,
filter_expr=filter_expr,
)
modified_pytest_cases = []
if modified_files:
modified_pytest_scripts = [
os.path.dirname(f) for f in modified_files if fnmatch.fnmatch(os.path.basename(f), 'pytest_*.py')
]
if modified_pytest_scripts:
modified_pytest_cases = get_pytest_cases(
modified_pytest_scripts,
target,
marker_expr=marker_expr,
filter_expr=filter_expr,
)
# app_path, target, config
pytest_app_path_tuple_dict: t.Dict[t.Tuple[Path, str, str], PytestCase] = {}
for case in pytest_cases:
for app in case.apps:
pytest_app_path_tuple_dict[(Path(app.path), app.target, app.config)] = case
modified_pytest_app_path_tuple_dict: t.Dict[t.Tuple[Path, str, str], PytestCase] = {}
for case in modified_pytest_cases:
for app in case.apps:
modified_pytest_app_path_tuple_dict[(Path(app.path), app.target, app.config)] = case
test_related_apps: t.Set[App] = set()
non_test_related_apps: t.Set[App] = set()
for app in all_apps:
# override build_status if test script got modified
if case := modified_pytest_app_path_tuple_dict.get((Path(app.app_dir), app.target, app.config_name)):
test_related_apps.add(app)
app.build_status = BuildStatus.SHOULD_BE_BUILT
app.preserve = True
logging.debug('Found app: %s - required by modified test case %s', app, case.path)
elif app.build_status != BuildStatus.SKIPPED:
if case := pytest_app_path_tuple_dict.get((Path(app.app_dir), app.target, app.config_name)):
test_related_apps.add(app)
# should be built if
app.build_status = BuildStatus.SHOULD_BE_BUILT
app.preserve = True
logging.debug('Found test-related app: %s - required by %s', app, case.path)
else:
non_test_related_apps.add(app)
app.preserve = preserve_all
logging.debug('Found non-test-related app: %s', app)
print(f'Found {len(test_related_apps)} test-related apps')
print(f'Found {len(non_test_related_apps)} non-test-related apps')
return test_related_apps, non_test_related_apps

View File

@@ -0,0 +1,48 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
import sys
from pathlib import Path
tools_ci_dir = os.path.join(os.path.dirname(__file__), '..', '..')
if tools_ci_dir not in sys.path:
sys.path.append(tools_ci_dir)
tools_dir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
if tools_dir not in sys.path:
sys.path.append(tools_dir)
def create_project(name: str, folder: Path) -> Path:
p = folder / name
p.mkdir(parents=True, exist_ok=True)
(p / 'main').mkdir(parents=True, exist_ok=True)
with open(p / 'CMakeLists.txt', 'w') as fw:
fw.write(
"""cmake_minimum_required(VERSION 3.16)
include($ENV{{IDF_PATH}}/tools/cmake/project.cmake)
project({})
""".format(
name
)
)
with open(p / 'main' / 'CMakeLists.txt', 'w') as fw:
fw.write(
"""idf_component_register(SRCS "{}.c"
INCLUDE_DIRS ".")
""".format(
name
)
)
with open(p / 'main' / f'{name}.c', 'w') as fw:
fw.write(
"""#include <stdio.h>
void app_main(void) {}
"""
)
return p

View File

@@ -0,0 +1,100 @@
# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from pathlib import Path
from idf_pytest.script import SUPPORTED_TARGETS, get_all_apps
from conftest import create_project
def test_get_all_apps_non(tmp_path: Path) -> None:
create_project('foo', tmp_path)
create_project('bar', tmp_path)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)])
assert test_related_apps == set()
assert len(non_test_related_apps) == 2 * len(SUPPORTED_TARGETS)
def test_get_all_apps_single_dut_test_script(tmp_path: Path) -> None:
create_project('foo', tmp_path)
with open(tmp_path / 'foo' / 'pytest_get_all_apps_single_dut_test_script.py', 'w') as fw:
fw.write(
"""import pytest
@pytest.mark.esp32
@pytest.mark.esp32s2
def test_foo(dut):
pass
"""
)
create_project('bar', tmp_path)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)], target='all')
assert len(test_related_apps) == 2
assert len(non_test_related_apps) == 2 * len(SUPPORTED_TARGETS) - 2
def test_get_all_apps_multi_dut_test_script(tmp_path: Path) -> None:
create_project('foo', tmp_path)
with open(tmp_path / 'foo' / 'pytest_get_all_apps_multi_dut_test_script.py', 'w') as fw:
fw.write(
"""import pytest
@pytest.mark.parametrize(
'count, target', [
(2, 'esp32s2|esp32s3'),
(3, 'esp32|esp32s3|esp32'),
], indirect=True
)
def test_foo(dut):
pass
"""
)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)], target='all')
assert len(test_related_apps) == 3 # 32, s2, s3
assert len(non_test_related_apps) == len(SUPPORTED_TARGETS) - 3
def test_get_all_apps_modified_pytest_script(tmp_path: Path) -> None:
create_project('foo', tmp_path)
create_project('bar', tmp_path)
(tmp_path / 'pytest_get_all_apps_modified_pytest_script.py').write_text(
"""import pytest
import os
@pytest.mark.parametrize('count, target', [(2, 'esp32')], indirect=True)
@pytest.mark.parametrize('app_path', [
'{}|{}'.format(os.path.join(os.path.dirname(__file__), 'foo'), os.path.join(os.path.dirname(__file__), 'bar')),
], indirect=True
)
def test_multi_foo_bar(dut):
pass
""",
encoding='utf-8',
)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)], target='all')
assert len(test_related_apps) == 2 # foo-esp32, bar-esp32
assert len(non_test_related_apps) == 2 * len(SUPPORTED_TARGETS) - 2
test_related_apps, non_test_related_apps = get_all_apps(
[str(tmp_path)], target='all', modified_files=[], modified_components=[]
)
assert len(test_related_apps) == 0
assert len(non_test_related_apps) == 0
test_related_apps, non_test_related_apps = get_all_apps(
[str(tmp_path)],
target='all',
modified_files=[str(tmp_path / 'pytest_get_all_apps_modified_pytest_script.py')],
modified_components=[],
)
assert len(test_related_apps) == 2
assert len(non_test_related_apps) == 0

View File

@@ -0,0 +1,86 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from pathlib import Path
from idf_pytest.constants import CollectMode
from idf_pytest.script import get_pytest_cases
TEMPLATE_SCRIPT = '''
import pytest
@pytest.mark.esp32
@pytest.mark.esp32s2
def test_foo_single(dut):
pass
@pytest.mark.parametrize(
'count, target', [
(2, 'esp32|esp32s2'),
(3, 'esp32s2|esp32s2|esp32s3'),
], indirect=True
)
def test_foo_multi(dut):
pass
@pytest.mark.esp32
@pytest.mark.esp32s2
@pytest.mark.parametrize(
'count', [2], indirect=True
)
def test_foo_multi_with_marker(dut):
pass
'''
def test_get_pytest_cases_single_specific(tmp_path: Path) -> None:
script = tmp_path / 'pytest_get_pytest_cases_single_specific.py'
script.write_text(TEMPLATE_SCRIPT)
cases = get_pytest_cases([str(tmp_path)], 'esp32')
assert len(cases) == 1
assert cases[0].targets == ['esp32']
def test_get_pytest_cases_multi_specific(tmp_path: Path) -> None:
script = tmp_path / 'pytest_get_pytest_cases_multi_specific.py'
script.write_text(TEMPLATE_SCRIPT)
cases = get_pytest_cases([str(tmp_path)], 'esp32s3,esp32s2, esp32s2')
assert len(cases) == 1
assert cases[0].targets == ['esp32s2', 'esp32s2', 'esp32s3']
def test_get_pytest_cases_multi_all(tmp_path: Path) -> None:
script = tmp_path / 'pytest_get_pytest_cases_multi_all.py'
script.write_text(TEMPLATE_SCRIPT)
cases = get_pytest_cases([str(tmp_path)], CollectMode.MULTI_ALL_WITH_PARAM)
assert len(cases) == 2
assert cases[0].targets == ['esp32', 'esp32s2']
assert cases[1].targets == ['esp32s2', 'esp32s2', 'esp32s3']
def test_get_pytest_cases_all(tmp_path: Path) -> None:
script = tmp_path / 'pytest_get_pytest_cases_all.py'
script.write_text(TEMPLATE_SCRIPT)
cases = get_pytest_cases([str(tmp_path)], CollectMode.ALL)
assert len(cases) == 6
assert cases[0].targets == ['esp32', 'esp32s2']
assert cases[0].name == 'test_foo_multi'
assert cases[1].targets == ['esp32s2', 'esp32s2', 'esp32s3']
assert cases[1].name == 'test_foo_multi'
assert cases[2].targets == ['esp32', 'esp32']
assert cases[2].name == 'test_foo_multi_with_marker'
assert cases[3].targets == ['esp32s2', 'esp32s2']
assert cases[3].name == 'test_foo_multi_with_marker'
assert cases[4].targets == ['esp32']
assert cases[4].name == 'test_foo_single'
assert cases[5].targets == ['esp32s2']
assert cases[5].name == 'test_foo_single'

View File

@@ -6,10 +6,10 @@ import os
import typing as t
from xml.etree import ElementTree as ET
from .constants import TARGET_MARKERS
def format_case_id(target: t.Optional[str], config: t.Optional[str], case: str, is_qemu: bool = False, params: t.Optional[dict] = None) -> str:
def format_case_id(
target: t.Optional[str], config: t.Optional[str], case: str, is_qemu: bool = False, params: t.Optional[dict] = None
) -> str:
parts = []
if target:
parts.append((str(target) + '_qemu') if is_qemu else str(target))
@@ -23,23 +23,6 @@ def format_case_id(target: t.Optional[str], config: t.Optional[str], case: str,
return '.'.join(parts)
def get_target_marker_from_expr(markexpr: str) -> str:
candidates = set()
# we use `-m "esp32 and generic"` in our CI to filter the test cases
# this doesn't cover all use cases, but fit what we do in CI.
for marker in markexpr.split('and'):
marker = marker.strip()
if marker in TARGET_MARKERS:
candidates.add(marker)
if len(candidates) > 1:
raise ValueError(f'Specified more than one target markers: {candidates}. Please specify no more than one.')
elif len(candidates) == 1:
return candidates.pop()
else:
raise ValueError('Please specify one target marker via "--target [TARGET]" or via "-m [TARGET]"')
def merge_junit_files(junit_files: t.List[str], target_path: str) -> None:
if len(junit_files) <= 1:
return
@@ -78,3 +61,7 @@ def merge_junit_files(junit_files: t.List[str], target_path: str) -> None:
with open(target_path, 'wb') as fw:
fw.write(ET.tostring(merged_testsuite))
def comma_sep_str_to_list(s: str) -> t.List[str]:
return [s.strip() for s in s.split(',') if s.strip()]

View File

@@ -1,5 +1,6 @@
# SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import logging
import os
@@ -10,7 +11,7 @@ import tempfile
import time
import zipfile
from functools import wraps
from typing import Any, Callable, Dict, List, Optional
from typing import Any, Callable, Dict, List, Optional, Union
import gitlab
@@ -63,7 +64,7 @@ class Gitlab(object):
DOWNLOAD_ERROR_MAX_RETRIES = 3
def __init__(self, project_id: Optional[int] = None):
def __init__(self, project_id: Union[int, str, None] = None):
config_data_from_env = os.getenv('PYTHON_GITLAB_CONFIG')
if config_data_from_env:
# prefer to load config from env variable
@@ -129,7 +130,7 @@ class Gitlab(object):
archive_file.extractall(destination)
@retry
def download_artifact(self, job_id: int, artifact_path: str, destination: Optional[str] = None) -> List[bytes]:
def download_artifact(self, job_id: int, artifact_path: List[str], destination: Optional[str] = None) -> List[bytes]:
"""
download specific path of job artifacts and extract to destination.

View File

@@ -3,7 +3,7 @@
# ci
coverage
idf-build-apps
idf-build-apps~=2.0.0rc1
jsonschema
junit_xml
python-gitlab

View File

@@ -10,7 +10,7 @@ pytest-timeout
pytest-ignore-test-results
# build
idf-build-apps
idf-build-apps~=2.0.0rc1
# dependencies in pytest test scripts
scapy

View File

@@ -52,6 +52,16 @@ tools/test_apps/protocols/mqtt/publish_connect_test:
- if: IDF_TARGET == "esp32s2" or IDF_TARGET == "esp32c3"
temporary: true
reason: lack of runners
depends_components:
- esp_eth
depends_filepatterns:
- tools/ci/python_packages/common_test_methods.py
- examples/common_components/**/*
- examples/protocols/**/*
- examples/system/ota/**/*
- examples/ethernet/iperf/**/*
- examples/network/vlan_support/**/*
- components/esp_netif/esp_netif_handlers.c
tools/test_apps/protocols/netif_components:
enable: