mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-01 19:54:32 +02:00
Merge branch 'ci/improve_overall_logging_v5.4' into 'release/v5.4'
ci: target-test job skip installing toolchain, only install python env (v5.4) See merge request espressif/esp-idf!34905
This commit is contained in:
@@ -12,7 +12,7 @@
|
|||||||
IDF_CCACHE_ENABLE: "1"
|
IDF_CCACHE_ENABLE: "1"
|
||||||
dependencies: []
|
dependencies: []
|
||||||
|
|
||||||
.build_cmake_template:
|
.build_cmake_clang_template:
|
||||||
extends:
|
extends:
|
||||||
- .build_template
|
- .build_template
|
||||||
- .before_script:build
|
- .before_script:build
|
||||||
@@ -34,29 +34,11 @@
|
|||||||
- "**/build*/size.json"
|
- "**/build*/size.json"
|
||||||
expire_in: 1 week
|
expire_in: 1 week
|
||||||
when: always
|
when: always
|
||||||
script:
|
|
||||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
|
||||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
|
||||||
-t $IDF_TARGET
|
|
||||||
--copy-sdkconfig
|
|
||||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
|
||||||
--parallel-index ${CI_NODE_INDEX:-1}
|
|
||||||
--extra-preserve-dirs
|
|
||||||
examples/bluetooth/esp_ble_mesh/ble_mesh_console
|
|
||||||
examples/bluetooth/hci/controller_hci_uart_esp32
|
|
||||||
examples/wifi/iperf
|
|
||||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
|
||||||
--modified-files ${MR_MODIFIED_FILES}
|
|
||||||
# for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server
|
|
||||||
- python tools/ci/artifacts_handler.py upload
|
|
||||||
|
|
||||||
.build_cmake_clang_template:
|
|
||||||
extends:
|
|
||||||
- .build_cmake_template
|
|
||||||
variables:
|
variables:
|
||||||
IDF_TOOLCHAIN: clang
|
IDF_TOOLCHAIN: clang
|
||||||
TEST_BUILD_OPTS_EXTRA: ""
|
TEST_BUILD_OPTS_EXTRA: ""
|
||||||
TEST_DIR: tools/test_apps/system/clang_build_test
|
TEST_DIR: tools/test_apps/system/clang_build_test
|
||||||
|
PYTEST_IGNORE_COLLECT_IMPORT_ERROR: "1"
|
||||||
script:
|
script:
|
||||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||||
@@ -126,7 +108,7 @@ gcc_static_analyzer:
|
|||||||
ANALYZING_APP: "examples/get-started/hello_world"
|
ANALYZING_APP: "examples/get-started/hello_world"
|
||||||
script:
|
script:
|
||||||
- echo "CONFIG_COMPILER_STATIC_ANALYZER=y" >> ${ANALYZING_APP}/sdkconfig.defaults
|
- echo "CONFIG_COMPILER_STATIC_ANALYZER=y" >> ${ANALYZING_APP}/sdkconfig.defaults
|
||||||
- python -m idf_build_apps build -vv -p ${ANALYZING_APP} -t all
|
- python -m idf_build_apps build -v -p ${ANALYZING_APP} -t all
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
# Clang Build Apps Without Tests Cases #
|
# Clang Build Apps Without Tests Cases #
|
||||||
|
@@ -57,7 +57,7 @@ variables:
|
|||||||
# Docker images
|
# Docker images
|
||||||
ESP_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-env-v5.4:1"
|
ESP_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-env-v5.4:1"
|
||||||
ESP_IDF_DOC_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-doc-env-v5.4:1-1"
|
ESP_IDF_DOC_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-doc-env-v5.4:1-1"
|
||||||
TARGET_TEST_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/target-test-env-v5.4:1"
|
TARGET_TEST_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/target-test-env-v5.4:2"
|
||||||
SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5"
|
SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5"
|
||||||
PRE_COMMIT_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-pre-commit:1"
|
PRE_COMMIT_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-pre-commit:1"
|
||||||
|
|
||||||
@@ -141,40 +141,48 @@ variables:
|
|||||||
export IDF_MIRROR_PREFIX_MAP=
|
export IDF_MIRROR_PREFIX_MAP=
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# install latest python packages
|
if [[ "${CI_JOB_STAGE}" != "target_test" ]]; then
|
||||||
# target test jobs
|
section_start "running_install_sh" "Running install.sh"
|
||||||
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
|
if [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
|
||||||
run_cmd bash install.sh --enable-ci --enable-pytest --enable-test-specific
|
run_cmd bash install.sh --enable-ci --enable-docs
|
||||||
elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
|
elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
|
||||||
run_cmd bash install.sh --enable-ci --enable-docs
|
|
||||||
elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
|
|
||||||
run_cmd bash install.sh --enable-ci
|
|
||||||
else
|
|
||||||
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
|
||||||
run_cmd bash install.sh --enable-ci
|
run_cmd bash install.sh --enable-ci
|
||||||
else
|
else
|
||||||
run_cmd bash install.sh --enable-ci --enable-pytest --enable-test-specific
|
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
||||||
|
run_cmd bash install.sh --enable-ci
|
||||||
|
else
|
||||||
|
run_cmd bash install.sh --enable-ci --enable-pytest --enable-test-specific
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
section_end "running_install_sh"
|
||||||
|
else
|
||||||
|
section_start "install_python_env" "Install Python environment"
|
||||||
|
run_cmd python tools/idf_tools.py install-python-env --features ci,pytest,test-specific
|
||||||
|
section_end "install_python_env"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Install esp-clang if necessary
|
if [[ ! -z "$INSTALL_EXTRA_TOOLS" ]]; then
|
||||||
|
section_start "installing_optional_tools" "Install optional tools ${INSTALL_EXTRA_TOOLS}"
|
||||||
|
$IDF_PATH/tools/idf_tools.py --non-interactive install $INSTALL_EXTRA_TOOLS
|
||||||
|
section_end "installing_optional_tools"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install esp-clang if necessary (esp-clang is separately installed)
|
||||||
if [[ "$IDF_TOOLCHAIN" == "clang" && -z "$CI_CLANG_DISTRO_URL" ]]; then
|
if [[ "$IDF_TOOLCHAIN" == "clang" && -z "$CI_CLANG_DISTRO_URL" ]]; then
|
||||||
$IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
|
$IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Install QEMU if necessary
|
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
|
||||||
if [[ ! -z "$INSTALL_QEMU" ]]; then
|
section_start "IDF_SKIP_TOOLS_CHECK" "Skip required tools check"
|
||||||
$IDF_PATH/tools/idf_tools.py --non-interactive install qemu-xtensa qemu-riscv32
|
export IDF_SKIP_TOOLS_CHECK=1
|
||||||
|
section_end "IDF_SKIP_TOOLS_CHECK"
|
||||||
fi
|
fi
|
||||||
|
section_start "source_export" "Source export.sh"
|
||||||
# Since the version 3.21 CMake passes source files and include dirs to ninja using absolute paths.
|
|
||||||
# Needed for pytest junit reports.
|
|
||||||
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
|
|
||||||
|
|
||||||
source ./export.sh
|
source ./export.sh
|
||||||
|
section_end "source_export"
|
||||||
|
|
||||||
# Custom clang toolchain
|
# Custom clang toolchain
|
||||||
if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then
|
if [[ "$IDF_TOOLCHAIN" == "clang" && ! -z "$CI_CLANG_DISTRO_URL" ]]; then
|
||||||
echo "Using custom clang from ${CI_CLANG_DISTRO_URL}"
|
echo "Using custom clang from ${CI_CLANG_DISTRO_URL}"
|
||||||
wget $CI_CLANG_DISTRO_URL
|
wget $CI_CLANG_DISTRO_URL
|
||||||
ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL)
|
ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL)
|
||||||
@@ -198,6 +206,8 @@ variables:
|
|||||||
rm -rf ${CI_PYTHON_TOOL_REPO}
|
rm -rf ${CI_PYTHON_TOOL_REPO}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
info "setup tools and python venv done"
|
||||||
|
|
||||||
.show_ccache_statistics: &show_ccache_statistics |
|
.show_ccache_statistics: &show_ccache_statistics |
|
||||||
# Show ccache statistics if enabled globally
|
# Show ccache statistics if enabled globally
|
||||||
test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats -vv || true
|
test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats -vv || true
|
||||||
@@ -222,10 +232,11 @@ variables:
|
|||||||
- export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
|
- export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
|
||||||
# remove idf-env.json, since it may contains enabled "features"
|
# remove idf-env.json, since it may contains enabled "features"
|
||||||
- rm -f $IDF_TOOLS_PATH/idf-env.json
|
- rm -f $IDF_TOOLS_PATH/idf-env.json
|
||||||
- $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
|
|
||||||
# This adds tools (compilers) and the version-specific Python environment to PATH
|
# This adds tools (compilers) and the version-specific Python environment to PATH
|
||||||
- *setup_tools_and_idf_python_venv
|
- *setup_tools_and_idf_python_venv
|
||||||
- fetch_submodules
|
- fetch_submodules
|
||||||
|
variables:
|
||||||
|
INSTALL_EXTRA_TOOLS: cmake ninja
|
||||||
|
|
||||||
.after_script:build:macos:upload-failed-job-logs:ccache-show-stats:
|
.after_script:build:macos:upload-failed-job-logs:ccache-show-stats:
|
||||||
after_script:
|
after_script:
|
||||||
|
@@ -195,7 +195,7 @@ test_tools:
|
|||||||
junit: ${IDF_PATH}/XUNIT_*.xml
|
junit: ${IDF_PATH}/XUNIT_*.xml
|
||||||
variables:
|
variables:
|
||||||
LC_ALL: C.UTF-8
|
LC_ALL: C.UTF-8
|
||||||
INSTALL_QEMU: 1 # for test_idf_qemu.py
|
INSTALL_EXTRA_TOOLS: "qemu-xtensa qemu-riscv32" # for test_idf_qemu.py
|
||||||
script:
|
script:
|
||||||
- stat=0
|
- stat=0
|
||||||
- cd ${IDF_PATH}/tools/ci/test_autocomplete
|
- cd ${IDF_PATH}/tools/ci/test_autocomplete
|
||||||
@@ -282,9 +282,10 @@ test_pytest_qemu:
|
|||||||
junit: XUNIT_RESULT.xml
|
junit: XUNIT_RESULT.xml
|
||||||
parallel:
|
parallel:
|
||||||
matrix:
|
matrix:
|
||||||
- IDF_TARGET: [esp32, esp32c3]
|
- IDF_TARGET: "esp32"
|
||||||
variables:
|
INSTALL_EXTRA_TOOLS: "qemu-xtensa"
|
||||||
INSTALL_QEMU: 1
|
- IDF_TARGET: "esp32c3"
|
||||||
|
INSTALL_EXTRA_TOOLS: "qemu-riscv32"
|
||||||
script:
|
script:
|
||||||
- run_cmd python tools/ci/ci_build_apps.py . -v
|
- run_cmd python tools/ci/ci_build_apps.py . -v
|
||||||
--target $IDF_TARGET
|
--target $IDF_TARGET
|
||||||
@@ -348,7 +349,7 @@ test_pytest_macos:
|
|||||||
variables:
|
variables:
|
||||||
PYTEST_IGNORE_COLLECT_IMPORT_ERROR: "1"
|
PYTEST_IGNORE_COLLECT_IMPORT_ERROR: "1"
|
||||||
script:
|
script:
|
||||||
- run_cmd python tools/ci/ci_build_apps.py components examples tools/test_apps -vv
|
- run_cmd python tools/ci/ci_build_apps.py components examples tools/test_apps -v
|
||||||
--target linux
|
--target linux
|
||||||
--pytest-apps
|
--pytest-apps
|
||||||
-m \"host_test and macos_shell\"
|
-m \"host_test and macos_shell\"
|
||||||
|
@@ -1,35 +1,9 @@
|
|||||||
# SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: CC0-1.0
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
import subprocess
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import yaml
|
|
||||||
from pytest_embedded import Dut
|
from pytest_embedded import Dut
|
||||||
|
|
||||||
|
|
||||||
def validate_sbom(dut: Dut) -> None:
|
|
||||||
dirname = path.dirname(path.abspath(__file__))
|
|
||||||
sbom_file = path.join(path.dirname(path.dirname(dirname)), 'sbom.yml')
|
|
||||||
gcc_input_file = path.join(dirname, 'test_sbom', 'newlib_version.c')
|
|
||||||
gcc = 'riscv32-esp-elf-gcc'
|
|
||||||
if dut.target in dut.XTENSA_TARGETS:
|
|
||||||
gcc = f'xtensa-{dut.target}-elf-gcc'
|
|
||||||
gcc_process = subprocess.run(f'{gcc} -E {gcc_input_file}',
|
|
||||||
shell=True,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
text=True,
|
|
||||||
check=True)
|
|
||||||
output_lines = gcc_process.stdout.splitlines()
|
|
||||||
assert output_lines, 'Can not get newlib version'
|
|
||||||
toolchain_newlib_version = output_lines[-1].replace(' ', '.')
|
|
||||||
with open(sbom_file, 'r', encoding='utf-8') as yaml_file:
|
|
||||||
sbom_newlib_version = yaml.safe_load(yaml_file).get('version')
|
|
||||||
assert sbom_newlib_version, 'Can not get newlib version from sbom.yml'
|
|
||||||
assert toolchain_newlib_version == sbom_newlib_version, 'toolchain_newlib_version != sbom_newlib_version'
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.generic
|
@pytest.mark.generic
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'config',
|
'config',
|
||||||
@@ -44,5 +18,4 @@ def validate_sbom(dut: Dut) -> None:
|
|||||||
indirect=True
|
indirect=True
|
||||||
)
|
)
|
||||||
def test_newlib(dut: Dut) -> None:
|
def test_newlib(dut: Dut) -> None:
|
||||||
validate_sbom(dut)
|
|
||||||
dut.run_all_single_board_cases()
|
dut.run_all_single_board_cases()
|
||||||
|
@@ -152,11 +152,8 @@ def _upload_files(
|
|||||||
try:
|
try:
|
||||||
if has_file:
|
if has_file:
|
||||||
obj_name = f'{pipeline_id}/{artifact_type.value}/{sanitize_job_name(job_name)}/{job_id}.zip'
|
obj_name = f'{pipeline_id}/{artifact_type.value}/{sanitize_job_name(job_name)}/{job_id}.zip'
|
||||||
print(f'Created archive file: {job_id}.zip, uploading as {obj_name}')
|
|
||||||
|
|
||||||
client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
|
client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
|
||||||
url = client.get_presigned_url('GET', getenv('IDF_S3_BUCKET'), obj_name)
|
print(f'Created archive file: {job_id}.zip, uploaded as {obj_name}')
|
||||||
print(f'Please download the archive file which includes {artifact_type.value} from {url}')
|
|
||||||
finally:
|
finally:
|
||||||
os.remove(f'{job_id}.zip')
|
os.remove(f'{job_id}.zip')
|
||||||
|
|
||||||
|
@@ -54,7 +54,7 @@ build_stage2() {
|
|||||||
# Override EXTRA_CFLAGS and EXTRA_CXXFLAGS in the environment
|
# Override EXTRA_CFLAGS and EXTRA_CXXFLAGS in the environment
|
||||||
export EXTRA_CFLAGS=${PEDANTIC_CFLAGS/-Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function/}
|
export EXTRA_CFLAGS=${PEDANTIC_CFLAGS/-Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function/}
|
||||||
export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS/-Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function/}
|
export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS/-Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function/}
|
||||||
python -m idf_build_apps build -vv \
|
python -m idf_build_apps build -v \
|
||||||
-p ${TEMPLATE_APP_PATH} \
|
-p ${TEMPLATE_APP_PATH} \
|
||||||
-t all \
|
-t all \
|
||||||
${CONFIG_STR} \
|
${CONFIG_STR} \
|
||||||
@@ -69,7 +69,7 @@ build_stage2() {
|
|||||||
|
|
||||||
build_stage1() {
|
build_stage1() {
|
||||||
CONFIG_STR=$(get_config_str sdkconfig.ci2.*=)
|
CONFIG_STR=$(get_config_str sdkconfig.ci2.*=)
|
||||||
python -m idf_build_apps build -vv \
|
python -m idf_build_apps build -v \
|
||||||
-p ${TEMPLATE_APP_PATH} \
|
-p ${TEMPLATE_APP_PATH} \
|
||||||
-t all \
|
-t all \
|
||||||
${CONFIG_STR} \
|
${CONFIG_STR} \
|
||||||
|
@@ -10,9 +10,13 @@
|
|||||||
- .after_script:build:ccache-show-stats:upload-failed-job-logs
|
- .after_script:build:ccache-show-stats:upload-failed-job-logs
|
||||||
image: $ESP_ENV_IMAGE
|
image: $ESP_ENV_IMAGE
|
||||||
stage: build
|
stage: build
|
||||||
|
timeout: 1 hour
|
||||||
variables:
|
variables:
|
||||||
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
|
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
|
||||||
IDF_CCACHE_ENABLE: "1"
|
IDF_CCACHE_ENABLE: "1"
|
||||||
|
# Since the version 3.21 CMake passes source files and include dirs to ninja using absolute paths.
|
||||||
|
# Needed for pytest junit reports.
|
||||||
|
INSTALL_EXTRA_TOOLS: cmake
|
||||||
needs:
|
needs:
|
||||||
- pipeline: $PARENT_PIPELINE_ID
|
- pipeline: $PARENT_PIPELINE_ID
|
||||||
job: generate_build_child_pipeline
|
job: generate_build_child_pipeline
|
||||||
@@ -51,6 +55,7 @@
|
|||||||
PYTEST_NODES: ""
|
PYTEST_NODES: ""
|
||||||
TARGET_SELECTOR: ""
|
TARGET_SELECTOR: ""
|
||||||
ENV_MARKERS: ""
|
ENV_MARKERS: ""
|
||||||
|
INSTALL_EXTRA_TOOLS: "xtensa-esp-elf-gdb riscv32-esp-elf-gdb openocd-esp32 esp-rom-elfs"
|
||||||
PYTEST_EXTRA_FLAGS: "--dev-passwd ${ETHERNET_TEST_PASSWORD} --dev-user ${ETHERNET_TEST_USER} --capture=fd --verbosity=0"
|
PYTEST_EXTRA_FLAGS: "--dev-passwd ${ETHERNET_TEST_PASSWORD} --dev-user ${ETHERNET_TEST_USER} --capture=fd --verbosity=0"
|
||||||
cache:
|
cache:
|
||||||
# Usually do not need submodule-cache in target_test
|
# Usually do not need submodule-cache in target_test
|
||||||
@@ -70,10 +75,10 @@
|
|||||||
expire_in: 1 week
|
expire_in: 1 week
|
||||||
script:
|
script:
|
||||||
# get known failure cases
|
# get known failure cases
|
||||||
- python tools/ci/get_known_failure_cases_file.py
|
- run_cmd python tools/ci/get_known_failure_cases_file.py
|
||||||
# get runner env config file
|
# get runner env config file
|
||||||
- retry_failed git clone $TEST_ENV_CONFIG_REPO
|
- retry_failed git clone $TEST_ENV_CONFIG_REPO
|
||||||
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
|
- run_cmd python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
|
||||||
# CI specific options start from "--known-failure-cases-file xxx". could ignore when running locally
|
# CI specific options start from "--known-failure-cases-file xxx". could ignore when running locally
|
||||||
- run_cmd pytest ${PYTEST_NODES}
|
- run_cmd pytest ${PYTEST_NODES}
|
||||||
--target ${TARGET_SELECTOR}
|
--target ${TARGET_SELECTOR}
|
||||||
@@ -85,4 +90,7 @@
|
|||||||
--parallel-index ${CI_NODE_INDEX:-1}
|
--parallel-index ${CI_NODE_INDEX:-1}
|
||||||
${PYTEST_EXTRA_FLAGS}
|
${PYTEST_EXTRA_FLAGS}
|
||||||
after_script:
|
after_script:
|
||||||
- python tools/ci/artifacts_handler.py upload --type logs junit_reports
|
- source tools/ci/utils.sh
|
||||||
|
- section_start "upload_junit_reports"
|
||||||
|
- run_cmd python tools/ci/artifacts_handler.py upload --type logs junit_reports
|
||||||
|
- section_end "upload_junit_reports"
|
||||||
|
@@ -35,7 +35,9 @@ function add_doc_server_ssh_keys() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function fetch_submodules() {
|
function fetch_submodules() {
|
||||||
|
section_start "fetch_submodules" "Fetching submodules..."
|
||||||
python "${SUBMODULE_FETCH_TOOL}" -s "${SUBMODULES_TO_FETCH}"
|
python "${SUBMODULE_FETCH_TOOL}" -s "${SUBMODULES_TO_FETCH}"
|
||||||
|
section_end "fetch_submodules"
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_all_submodules() {
|
function get_all_submodules() {
|
||||||
@@ -49,6 +51,19 @@ function set_component_ut_vars() {
|
|||||||
echo "exported variables COMPONENT_UT_DIRS, COMPONENT_UT_EXCLUDES"
|
echo "exported variables COMPONENT_UT_DIRS, COMPONENT_UT_EXCLUDES"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# https://docs.gitlab.com/ee/ci/yaml/script.html#use-a-script-to-improve-display-of-collapsible-sections
|
||||||
|
function section_start() {
|
||||||
|
local section_title="${1}"
|
||||||
|
local section_description="${2:-$section_title}"
|
||||||
|
|
||||||
|
echo -e "section_start:`date +%s`:${section_title}[collapsed=true]\r\e[0K${section_description}"
|
||||||
|
}
|
||||||
|
function section_end() {
|
||||||
|
local section_title="${1}"
|
||||||
|
|
||||||
|
echo -e "section_end:`date +%s`:${section_title}\r\e[0K"
|
||||||
|
}
|
||||||
|
|
||||||
function error() {
|
function error() {
|
||||||
printf "\033[0;31m%s\n\033[0m" "${1}" >&2
|
printf "\033[0;31m%s\n\033[0m" "${1}" >&2
|
||||||
}
|
}
|
||||||
|
@@ -2122,8 +2122,12 @@ def process_tool(
|
|||||||
|
|
||||||
if not tool.versions_installed:
|
if not tool.versions_installed:
|
||||||
if tool.get_install_type() == IDFTool.INSTALL_ALWAYS:
|
if tool.get_install_type() == IDFTool.INSTALL_ALWAYS:
|
||||||
handle_missing_versions(tool, tool_name, install_cmd, prefer_system_hint)
|
if os.getenv('IDF_SKIP_TOOLS_CHECK', '0') == '1':
|
||||||
tool_found = False
|
warn(f'Tool {tool_name} is not installed and IDF_SKIP_TOOLS_CHECK is set. '
|
||||||
|
'This may cause build failures.')
|
||||||
|
else:
|
||||||
|
handle_missing_versions(tool, tool_name, install_cmd, prefer_system_hint)
|
||||||
|
tool_found = False
|
||||||
# If a tool found, but it is optional and does not have versions installed, use whatever is in PATH.
|
# If a tool found, but it is optional and does not have versions installed, use whatever is in PATH.
|
||||||
return tool_export_paths, tool_export_vars, tool_found
|
return tool_export_paths, tool_export_vars, tool_found
|
||||||
|
|
||||||
|
@@ -151,9 +151,15 @@ class PanicTestDut(IdfDut):
|
|||||||
logging.info('espcoredump output is written to %s', self.coredump_output.name)
|
logging.info('espcoredump output is written to %s', self.coredump_output.name)
|
||||||
|
|
||||||
self.serial.close()
|
self.serial.close()
|
||||||
subprocess.check_call(espcoredump_args, stdout=self.coredump_output)
|
try:
|
||||||
self.coredump_output.flush()
|
subprocess.check_call(espcoredump_args, stdout=self.coredump_output, stderr=self.coredump_output)
|
||||||
self.coredump_output.seek(0)
|
except subprocess.CalledProcessError:
|
||||||
|
self.coredump_output.flush()
|
||||||
|
with open(output_file_name, 'r') as file:
|
||||||
|
logging.error('espcoredump failed with output: %s', file.read())
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self.coredump_output.seek(0)
|
||||||
|
|
||||||
def process_coredump_uart(
|
def process_coredump_uart(
|
||||||
self, expected: Optional[List[Union[str, re.Pattern]]] = None, wait_reboot: bool = True
|
self, expected: Optional[List[Union[str, re.Pattern]]] = None, wait_reboot: bool = True
|
||||||
|
@@ -312,6 +312,19 @@ class TestUsage(TestUsageBase):
|
|||||||
|
|
||||||
self.assertNotIn(tool_to_test, output)
|
self.assertNotIn(tool_to_test, output)
|
||||||
|
|
||||||
|
def test_export_with_required_tools_check_skipped(self):
|
||||||
|
self.run_idf_tools_with_error(['export'], assertError=True)
|
||||||
|
|
||||||
|
new_os_environ = os.environ.copy()
|
||||||
|
new_os_environ['IDF_SKIP_TOOLS_CHECK'] = '1'
|
||||||
|
with patch('os.environ', new_os_environ):
|
||||||
|
self.run_idf_tools_with_action(['export'])
|
||||||
|
|
||||||
|
self.run_idf_tools_with_action(['install', OPENOCD])
|
||||||
|
output = self.run_idf_tools_with_action(['export'])
|
||||||
|
self.assertIn('%s/tools/openocd-esp32/%s/openocd-esp32/bin' %
|
||||||
|
(self.temp_tools_dir, OPENOCD_VERSION), output)
|
||||||
|
|
||||||
|
|
||||||
# TestUsageUnix tests installed tools on UNIX platforms
|
# TestUsageUnix tests installed tools on UNIX platforms
|
||||||
@unittest.skipIf(sys.platform == 'win32', reason='Tools for UNIX differ')
|
@unittest.skipIf(sys.platform == 'win32', reason='Tools for UNIX differ')
|
||||||
|
Reference in New Issue
Block a user