diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 061a6f75b3..640b238ff7 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,15 +1,3 @@ -stages: - - upload_cache - - pre_check - - build - - assign_test - - build_doc - - target_test - - host_test - - test_deploy - - deploy - - post_deploy - workflow: rules: # Disable those non-protected push triggered pipelines @@ -27,216 +15,10 @@ workflow: IS_MR_PIPELINE: 0 - when: always -variables: -# System environment - - # Common parameters for the 'make' during CI tests - MAKEFLAGS: "-j5 --no-keep-going" - -# GitLab-CI environment - - # XXX_ATTEMPTS variables (https://docs.gitlab.com/ce/ci/yaml/README.html#job-stages-attempts) are not defined here. - # Use values from "CI / CD Settings" - "Variables". - - # GIT_STRATEGY is not defined here. - # Use an option from "CI / CD Settings" - "General pipelines". - - # we will download archive for each submodule instead of clone. - # we don't do "recursive" when fetch submodule as they're not used in CI now. - GIT_SUBMODULE_STRATEGY: none - SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py" - # by default we will fetch all submodules - # jobs can overwrite this variable to only fetch submodules they required - # set to "none" if don't need to fetch submodules - SUBMODULES_TO_FETCH: "all" - # tell build system do not check submodule update as we download archive instead of clone - IDF_SKIP_CHECK_SUBMODULES: 1 - - IDF_PATH: "$CI_PROJECT_DIR" - BATCH_BUILD: "1" - V: "0" - CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py" - PYTHON_VER: 3.8.17 - - # Docker images - BOT_DOCKER_IMAGE_TAG: ":latest" - - ESP_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-env-v5.2:2" - ESP_IDF_DOC_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-doc-env-v5.2:2-1" - QEMU_IMAGE: "${CI_DOCKER_REGISTRY}/qemu-v5.2:2-20230522" - TARGET_TEST_ENV_IMAGE: "$CI_DOCKER_REGISTRY/target-test-env-v5.2:2" - - SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5" - - PRE_COMMIT_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-pre-commit:1" - - # target test config file, used by assign test job - CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/.gitlab/ci/target-test.yml" - - # target test repo parameters - TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git" - - # cache python dependencies - PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" - - # Set this variable to the branch of idf-constraints repo in order to test a custom Python constraint file. The - # branch name must be without the remote part ("origin/"). Keep the variable empty in order to use the constraint - # file from https://dl.espressif.com/dl/esp-idf. - CI_PYTHON_CONSTRAINT_BRANCH: "" - - # Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH. - CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.2.txt" - - # Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI. - # Keep the variable empty when not used. - CI_PYTHON_TOOL_REPO: "" - - # Set this variable to the branch of a Python tool repo specified in CI_PYTHON_TOOL_REPO. The - # branch name must be without the remote part ("origin/"). Keep the variable empty when not used. - # This is used only if CI_PYTHON_TOOL_REPO is not empty. - CI_PYTHON_TOOL_BRANCH: "" - - IDF_CI_BUILD: 1 - -cache: - # pull only for most of the use cases since it's cache dir. - # Only set "push" policy for "upload_cache" stage jobs - - key: pip-cache - paths: - - .cache/pip - policy: pull - - key: submodule-cache - paths: - - .cache/submodule_archives - policy: pull - - -.common_before_scripts: &common-before_scripts | - source tools/ci/utils.sh - is_based_on_commits $REQUIRED_ANCESTOR_COMMITS - - if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then - export IDF_MIRROR_PREFIX_MAP= - fi - - if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)include_nightly_run(,|$)"; then - export INCLUDE_NIGHTLY_RUN="1" - fi - - # configure cmake related flags - source tools/ci/configure_ci_environment.sh - - # add extra python packages - export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH" - -.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv | - # must use after setup_tools_except_target_test - # otherwise the export.sh won't work properly - - # download constraint file for dev - if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then - wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH} - mkdir -p ~/.espressif - mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE} - fi - - # Mirror - if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then - export IDF_MIRROR_PREFIX_MAP= - fi - - # install latest python packages - # target test jobs - if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then - # ttfw jobs - if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then - run_cmd bash install.sh --enable-ci --enable-ttfw - else - run_cmd bash install.sh --enable-ci --enable-pytest - fi - elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then - run_cmd bash install.sh --enable-ci --enable-docs - elif [[ "${CI_JOB_STAGE}" == "build" ]]; then - run_cmd bash install.sh --enable-ci --enable-pytest - else - if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then - run_cmd bash install.sh --enable-ci - else - run_cmd bash install.sh --enable-ci --enable-pytest - fi - fi - - # Install esp-clang if necessary - if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then - $IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang - fi - - source ./export.sh - - # Custom clang - if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then - echo "Using custom clang from ${CI_CLANG_DISTRO_URL}" - wget $CI_CLANG_DISTRO_URL - ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL) - tar -x -f $ARCH_NAME - export PATH=$PWD/esp-clang/bin:$PATH - fi - - # Custom OpenOCD - if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then - echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}" - wget $OOCD_DISTRO_URL - ARCH_NAME=$(basename $OOCD_DISTRO_URL) - tar -x -f $ARCH_NAME - export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts - export PATH=$PWD/openocd-esp32/bin:$PATH - fi - - if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then - git clone --quiet --depth=1 -b ${CI_PYTHON_TOOL_BRANCH} https://gitlab-ci-token:${ESPCI_TOKEN}@${GITLAB_HTTPS_HOST}/espressif/${CI_PYTHON_TOOL_REPO}.git - pip install ./${CI_PYTHON_TOOL_REPO} - rm -rf ${CI_PYTHON_TOOL_REPO} - fi - -before_script: - - *common-before_scripts - - *setup_tools_and_idf_python_venv - - add_gitlab_ssh_keys - - fetch_submodules - -.before_script_minimal: - before_script: - - *common-before_scripts - -.before_script_macos: - before_script: - - *common-before_scripts - # On macOS, these tools need to be installed - - export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}" - - $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja - # This adds tools (compilers) and the version-specific Python environment to PATH - - *setup_tools_and_idf_python_venv - - fetch_submodules - -.before_script_build_jobs: - before_script: - - *common-before_scripts - - *setup_tools_and_idf_python_venv - - add_gitlab_ssh_keys - - fetch_submodules - - export EXTRA_CFLAGS=${PEDANTIC_CFLAGS} - - export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS} - -default: - retry: - max: 2 - when: - # In case of a runner failure we could hop to another one, or a network error could go away. - - runner_system_failure - # Job execution timeout may be caused by a network issue. - - job_execution_timeout +# Place the default settings in `.gitlab/ci/common.yml` instead include: + - '.gitlab/ci/common.yml' - '.gitlab/ci/rules.yml' - '.gitlab/ci/upload_cache.yml' - '.gitlab/ci/docs.yml' diff --git a/.gitlab/ci/assign-test.yml b/.gitlab/ci/assign-test.yml index 663d084e57..e0ce8f8b34 100644 --- a/.gitlab/ci/assign-test.yml +++ b/.gitlab/ci/assign-test.yml @@ -41,7 +41,7 @@ assign_integration_test: extends: - .assign_test_template - .rules:test:integration_test - - .before_script_minimal + - .before_script:minimal image: ${CI_INTEGRATION_TEST_ENV_IMAGE} needs: - build_ssc_esp32 diff --git a/.gitlab/ci/build.yml b/.gitlab/ci/build.yml index 7bf31e0893..8fbe9a4991 100644 --- a/.gitlab/ci/build.yml +++ b/.gitlab/ci/build.yml @@ -1,5 +1,7 @@ .build_template: stage: build + extends: + - .after_script:build:ccache image: $ESP_ENV_IMAGE tags: - build @@ -8,15 +10,13 @@ variables: # Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings. IDF_CCACHE_ENABLE: "1" - after_script: - # Show ccache statistics if enabled globally - - test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true dependencies: [] .build_cmake_template: extends: - .build_template - - .before_script_build_jobs + - .before_script:build + - .after_script:build:ccache-upload dependencies: # set dependencies to null to avoid missing artifacts issue needs: - job: fast_template_app @@ -45,28 +45,6 @@ - components/idf_test/unit_test/*.yml when: always expire_in: 4 days - after_script: - # Show ccache statistics if enabled globally - - test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true - # upload the binary files to s3 server - - echo -e "\e[0Ksection_start:`date +%s`:upload_binaries_to_s3_server[collapsed=true]\r\e[0KUploading binaries to s3 Server" - - shopt -s globstar - # use || true to bypass the no-file error - - zip ${CI_JOB_ID}.zip **/build*/*.bin || true - - zip ${CI_JOB_ID}.zip **/build*/*.elf || true - - zip ${CI_JOB_ID}.zip **/build*/*.map || true - - zip ${CI_JOB_ID}.zip **/build*/flasher_args.json || true - - zip ${CI_JOB_ID}.zip **/build*/flash_project_args || true - - zip ${CI_JOB_ID}.zip **/build*/config/sdkconfig.json || true - - zip ${CI_JOB_ID}.zip **/build*/sdkconfig || true - - zip ${CI_JOB_ID}.zip **/build*/bootloader/*.bin || true - - zip ${CI_JOB_ID}.zip **/build*/partition_table/*.bin || true - - mc alias set shiny-s3 ${SHINY_S3_SERVER} ${SHINY_S3_ACCESS_KEY} ${SHINY_S3_SECRET_KEY} - - mc cp ${CI_JOB_ID}.zip shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip - - echo -e "\e[0Ksection_end:`date +%s`:upload_binaries_to_s3_server\r\e[0K" - - echo "Please download the full binary files (including *.elf and *.map files) from the following share link" - # would be clean up after 4 days - - mc share download shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip --expire=96h script: # CI specific options start from "--parallel-count xxx". could ignore when running locally - run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v @@ -102,7 +80,7 @@ .build_pytest_template: extends: - .build_cmake_template - - .before_script_build_jobs + - .before_script:build artifacts: paths: - "**/build*/size.json" @@ -148,7 +126,7 @@ .build_pytest_jtag_template: extends: - .build_cmake_template - - .before_script_build_jobs + - .before_script:build artifacts: paths: - "**/build*/size.json" @@ -424,7 +402,7 @@ build_only_tools_test_apps: .build_template_app_template: extends: - .build_template - - .before_script_build_jobs + - .before_script:build variables: LOG_PATH: "${CI_PROJECT_DIR}/log_template_app" BUILD_PATH: "${CI_PROJECT_DIR}/build_template_app" @@ -526,7 +504,7 @@ build_ssc_esp32h2: .build_esp_idf_tests_cmake_template: extends: - .build_cmake_template - - .before_script_build_jobs + - .before_script:build artifacts: paths: - "**/build*/size.json" @@ -770,7 +748,7 @@ pytest_build_system: pytest_build_system_macos: extends: - .test_build_system_template - - .before_script_macos + - .before_script:build:macos - .rules:build:macos tags: - macos_shell @@ -786,7 +764,7 @@ pytest_build_system_macos: build_docker: extends: - - .before_script_minimal + - .before_script:minimal - .rules:build:docker stage: host_test needs: [] diff --git a/.gitlab/ci/common.yml b/.gitlab/ci/common.yml new file mode 100644 index 0000000000..a2f4364a9d --- /dev/null +++ b/.gitlab/ci/common.yml @@ -0,0 +1,260 @@ +##################### +# Default Variables # +##################### +stages: + - upload_cache + - pre_check + - build + - assign_test + - build_doc + - target_test + - host_test + - test_deploy + - deploy + - post_deploy + +variables: +# System environment + + # Common parameters for the 'make' during CI tests + MAKEFLAGS: "-j5 --no-keep-going" + +# GitLab-CI environment + + # XXX_ATTEMPTS variables (https://docs.gitlab.com/ce/ci/yaml/README.html#job-stages-attempts) are not defined here. + # Use values from "CI / CD Settings" - "Variables". + + # GIT_STRATEGY is not defined here. + # Use an option from "CI / CD Settings" - "General pipelines". + + # we will download archive for each submodule instead of clone. + # we don't do "recursive" when fetch submodule as they're not used in CI now. + GIT_SUBMODULE_STRATEGY: none + SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py" + # by default we will fetch all submodules + # jobs can overwrite this variable to only fetch submodules they required + # set to "none" if don't need to fetch submodules + SUBMODULES_TO_FETCH: "all" + # tell build system do not check submodule update as we download archive instead of clone + IDF_SKIP_CHECK_SUBMODULES: 1 + + IDF_PATH: "$CI_PROJECT_DIR" + BATCH_BUILD: "1" + V: "0" + CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py" + PYTHON_VER: 3.8.17 + + # Docker images + BOT_DOCKER_IMAGE_TAG: ":latest" + + ESP_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-env-v5.2:2" + ESP_IDF_DOC_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-doc-env-v5.2:2-1" + QEMU_IMAGE: "${CI_DOCKER_REGISTRY}/qemu-v5.2:2-20230522" + TARGET_TEST_ENV_IMAGE: "$CI_DOCKER_REGISTRY/target-test-env-v5.2:2" + + SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5" + + PRE_COMMIT_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-pre-commit:1" + + # target test config file, used by assign test job + CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/.gitlab/ci/target-test.yml" + + # target test repo parameters + TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git" + + # cache python dependencies + PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" + + # Set this variable to the branch of idf-constraints repo in order to test a custom Python constraint file. The + # branch name must be without the remote part ("origin/"). Keep the variable empty in order to use the constraint + # file from https://dl.espressif.com/dl/esp-idf. + CI_PYTHON_CONSTRAINT_BRANCH: "" + + # Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH. + CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.2.txt" + + # Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI. + # Keep the variable empty when not used. + CI_PYTHON_TOOL_REPO: "" + + # Set this variable to the branch of a Python tool repo specified in CI_PYTHON_TOOL_REPO. The + # branch name must be without the remote part ("origin/"). Keep the variable empty when not used. + # This is used only if CI_PYTHON_TOOL_REPO is not empty. + CI_PYTHON_TOOL_BRANCH: "" + + IDF_CI_BUILD: 1 + +################################################ +# `before_script` and `after_script` Templates # +################################################ +.common_before_scripts: &common-before_scripts | + source tools/ci/utils.sh + is_based_on_commits $REQUIRED_ANCESTOR_COMMITS + + if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then + export IDF_MIRROR_PREFIX_MAP= + fi + + if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)include_nightly_run(,|$)"; then + export INCLUDE_NIGHTLY_RUN="1" + fi + + # configure cmake related flags + source tools/ci/configure_ci_environment.sh + + # add extra python packages + export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH" + +.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv | + # must use after setup_tools_except_target_test + # otherwise the export.sh won't work properly + + # download constraint file for dev + if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then + wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH} + mkdir -p ~/.espressif + mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE} + fi + + # Mirror + if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then + export IDF_MIRROR_PREFIX_MAP= + fi + + # install latest python packages + # target test jobs + if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then + # ttfw jobs + if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then + run_cmd bash install.sh --enable-ci --enable-ttfw + else + run_cmd bash install.sh --enable-ci --enable-pytest + fi + elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then + run_cmd bash install.sh --enable-ci --enable-docs + elif [[ "${CI_JOB_STAGE}" == "build" ]]; then + run_cmd bash install.sh --enable-ci --enable-pytest + else + if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then + run_cmd bash install.sh --enable-ci + else + run_cmd bash install.sh --enable-ci --enable-pytest + fi + fi + + # Install esp-clang if necessary + if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then + $IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang + fi + + source ./export.sh + + # Custom clang + if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then + echo "Using custom clang from ${CI_CLANG_DISTRO_URL}" + wget $CI_CLANG_DISTRO_URL + ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL) + tar -x -f $ARCH_NAME + export PATH=$PWD/esp-clang/bin:$PATH + fi + + # Custom OpenOCD + if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then + echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}" + wget $OOCD_DISTRO_URL + ARCH_NAME=$(basename $OOCD_DISTRO_URL) + tar -x -f $ARCH_NAME + export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts + export PATH=$PWD/openocd-esp32/bin:$PATH + fi + + if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then + git clone --quiet --depth=1 -b ${CI_PYTHON_TOOL_BRANCH} https://gitlab-ci-token:${ESPCI_TOKEN}@${GITLAB_HTTPS_HOST}/espressif/${CI_PYTHON_TOOL_REPO}.git + pip install ./${CI_PYTHON_TOOL_REPO} + rm -rf ${CI_PYTHON_TOOL_REPO} + fi + +.show_ccache_statistics: &show_ccache_statistics | + # Show ccache statistics if enabled globally + test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true + +.upload_built_binaries_to_s3: &upload_built_binaries_to_s3 | + # upload the binary files to s3 server + echo -e "\e[0Ksection_start:`date +%s`:upload_binaries_to_s3_server[collapsed=true]\r\e[0KUploading binaries to s3 Server" + shopt -s globstar + # use || true to bypass the no-file error + zip ${CI_JOB_ID}.zip **/build*/*.bin || true + zip ${CI_JOB_ID}.zip **/build*/*.elf || true + zip ${CI_JOB_ID}.zip **/build*/*.map || true + zip ${CI_JOB_ID}.zip **/build*/flasher_args.json || true + zip ${CI_JOB_ID}.zip **/build*/flash_project_args || true + zip ${CI_JOB_ID}.zip **/build*/config/sdkconfig.json || true + zip ${CI_JOB_ID}.zip **/build*/sdkconfig || true + zip ${CI_JOB_ID}.zip **/build*/bootloader/*.bin || true + zip ${CI_JOB_ID}.zip **/build*/partition_table/*.bin || true + shopt -u globstar + mc cp ${CI_JOB_ID}.zip shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip || true + echo -e "\e[0Ksection_end:`date +%s`:upload_binaries_to_s3_server\r\e[0K" + echo "Please download the full binary files (including *.elf and *.map files) from the following share link" + # would be clean up after 4 days + mc share download shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip --expire=96h + +.before_script:minimal: + before_script: + - *common-before_scripts + +.before_script:build:macos: + before_script: + - *common-before_scripts + # On macOS, these tools need to be installed + - export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}" + - $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja + # This adds tools (compilers) and the version-specific Python environment to PATH + - *setup_tools_and_idf_python_venv + - fetch_submodules + +.before_script:build: + before_script: + - *common-before_scripts + - *setup_tools_and_idf_python_venv + - add_gitlab_ssh_keys + - fetch_submodules + - export EXTRA_CFLAGS=${PEDANTIC_CFLAGS} + - export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS} + +.after_script:build:ccache: + after_script: + - *show_ccache_statistics + +.after_script:build:ccache-upload: + after_script: + - *show_ccache_statistics + - *upload_built_binaries_to_s3 + +############# +# `default` # +############# +default: + cache: + # pull only for most of the use cases since it's cache dir. + # Only set "push" policy for "upload_cache" stage jobs + - key: pip-cache + paths: + - .cache/pip + policy: pull + - key: submodule-cache + paths: + - .cache/submodule_archives + policy: pull + before_script: + - *common-before_scripts + - *setup_tools_and_idf_python_venv + - add_gitlab_ssh_keys + - fetch_submodules + retry: + max: 2 + when: + # In case of a runner failure we could hop to another one, or a network error could go away. + - runner_system_failure + # Job execution timeout may be caused by a network issue. + - job_execution_timeout diff --git a/.gitlab/ci/deploy.yml b/.gitlab/ci/deploy.yml index e2bd864bc7..e5a58f230b 100644 --- a/.gitlab/ci/deploy.yml +++ b/.gitlab/ci/deploy.yml @@ -31,7 +31,7 @@ check_submodule_sync: push_to_github: extends: - .deploy_job_template - - .before_script_minimal + - .before_script:minimal - .rules:protected-no_label dependencies: [] script: @@ -52,7 +52,7 @@ deploy_update_SHA_in_esp-dockerfiles: deploy_test_result: extends: - .deploy_job_template - - .before_script_minimal + - .before_script:minimal - .rules:ref:master-always image: $CI_DOCKER_REGISTRY/bot-env:1 dependencies: [] diff --git a/.gitlab/ci/host-test.yml b/.gitlab/ci/host-test.yml index a11a9a0609..7276836bd2 100644 --- a/.gitlab/ci/host-test.yml +++ b/.gitlab/ci/host-test.yml @@ -114,7 +114,7 @@ test_idf_py: test_idf_tools: extends: - .host_test_template - - .before_script_minimal + - .before_script:minimal artifacts: when: on_failure paths: @@ -294,7 +294,7 @@ test_gen_soc_caps_kconfig: test_pytest_qemu: extends: - .host_test_template - - .before_script_build_jobs + - .before_script:build image: $QEMU_IMAGE artifacts: when: always @@ -328,7 +328,7 @@ test_pytest_qemu: test_pytest_linux: extends: - .host_test_template - - .before_script_build_jobs + - .before_script:build artifacts: when: always paths: diff --git a/.gitlab/ci/pre_check.yml b/.gitlab/ci/pre_check.yml index ba85e65333..024da3c59f 100644 --- a/.gitlab/ci/pre_check.yml +++ b/.gitlab/ci/pre_check.yml @@ -8,7 +8,7 @@ .check_pre_commit_template: extends: - .pre_check_template - - .before_script_minimal + - .before_script:minimal image: $PRE_COMMIT_IMAGE check_pre_commit_master_release: @@ -164,7 +164,7 @@ check_artifacts_expire_time: check_test_scripts_build_test_rules: extends: - .pre_check_template - - .before_script_build_jobs + - .before_script:build script: # required pytest related packages - run_cmd bash install.sh --enable-pytest @@ -173,7 +173,7 @@ check_test_scripts_build_test_rules: check_configure_ci_environment_parsing: extends: - .pre_check_template - - .before_script_build_jobs + - .before_script:build - .rules:build script: - cd tools/ci @@ -183,7 +183,7 @@ mr_variables: extends: - .pre_check_template - .rules:mr - - .before_script_minimal + - .before_script:minimal tags: - build script: diff --git a/.gitlab/ci/target-test.yml b/.gitlab/ci/target-test.yml index 718f0bf2ec..af1a3f847c 100644 --- a/.gitlab/ci/target-test.yml +++ b/.gitlab/ci/target-test.yml @@ -1527,7 +1527,7 @@ UT_S3: extends: - .target_test_job_template - .rules:test:integration_test - - .before_script_minimal + - .before_script:minimal image: ${CI_INTEGRATION_TEST_ENV_IMAGE} cache: [] needs: # the assign already needs all the build jobs diff --git a/.gitlab/ci/upload_cache.yml b/.gitlab/ci/upload_cache.yml index 375194249d..c523078be9 100644 --- a/.gitlab/ci/upload_cache.yml +++ b/.gitlab/ci/upload_cache.yml @@ -9,7 +9,7 @@ upload-pip-cache: extends: - .upload_cache_template - - .before_script_minimal + - .before_script:minimal - .rules:patterns:python-cache tags: - $GEO @@ -29,7 +29,7 @@ upload-pip-cache: upload-submodules-cache: extends: - .upload_cache_template - - .before_script_minimal + - .before_script:minimal - .rules:patterns:submodule tags: - $GEO