Merge branch 'ci/remove-submodule-fetch-script' into 'master'

Ci/remove submodule fetch script

Closes IDFCI-2980

See merge request espressif/esp-idf!39992
This commit is contained in:
Fu Hanxi
2025-06-23 19:22:19 +02:00
14 changed files with 55 additions and 163 deletions

View File

@@ -3,10 +3,7 @@
extends: extends:
- .after_script:build:ccache-show-stats:upload-failed-job-logs - .after_script:build:ccache-show-stats:upload-failed-job-logs
image: $ESP_ENV_IMAGE image: $ESP_ENV_IMAGE
tags: tags: [build, shiny]
- build
# build only on shiny servers since shiny storage server is at the same location
- shiny
variables: variables:
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings. # Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
IDF_CCACHE_ENABLE: "1" IDF_CCACHE_ENABLE: "1"

View File

@@ -30,26 +30,15 @@ variables:
# - set GIT_STRATEGY: "clone" to shiny runners # - set GIT_STRATEGY: "clone" to shiny runners
# - set GIT_STRATEGY: "fetch" to brew runners # - set GIT_STRATEGY: "fetch" to brew runners
GIT_STRATEGY: clone GIT_STRATEGY: clone
# we will download archive for each submodule instead of clone.
# we don't do "recursive" when fetch submodule as they're not used in CI now.
GIT_SUBMODULE_STRATEGY: none
# since we're using merged-result pipelines, the last commit should work for most cases
GIT_DEPTH: 1 GIT_DEPTH: 1
# --no-recurse-submodules: we use cache for submodules GIT_SUBMODULE_STRATEGY: none # here we use cache for submodules, so we don't need to fetch them every time
# since we're using merged-result pipelines, the last commit should work for most cases
# --prune --prune-tags: in case remote branch or tag is force pushed # --prune --prune-tags: in case remote branch or tag is force pushed
GIT_FETCH_EXTRA_FLAGS: "--no-recurse-submodules --prune --prune-tags" GIT_FETCH_EXTRA_FLAGS: "--no-recurse-submodules --prune --prune-tags"
# we're using .cache folder for caches # we're using .cache folder for caches
GIT_CLEAN_FLAGS: -ffdx -e .cache/ GIT_CLEAN_FLAGS: -ffdx -e .cache/
LATEST_GIT_TAG: v6.0-dev LATEST_GIT_TAG: v6.0-dev
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
# by default we will fetch all submodules
# jobs can overwrite this variable to only fetch submodules they required
# set to "none" if don't need to fetch submodules
SUBMODULES_TO_FETCH: "all"
# tell build system do not check submodule update as we download archive instead of clone
IDF_SKIP_CHECK_SUBMODULES: 1
IDF_PATH: "$CI_PROJECT_DIR" IDF_PATH: "$CI_PROJECT_DIR"
V: "0" V: "0"
CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py" CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
@@ -392,11 +381,9 @@ default:
paths: paths:
- .cache/pip - .cache/pip
policy: pull policy: pull
- key: submodule-cache-${LATEST_GIT_TAG} - key: git-submodule-cache-${LATEST_GIT_TAG}
fallback_keys:
- submodule-cache
paths: paths:
- .cache/submodule_archives - .git/modules
policy: pull policy: pull
before_script: before_script:
- *common-before_scripts - *common-before_scripts

View File

@@ -11,9 +11,7 @@ check_submodule_sync:
tags: [ brew, github_sync ] tags: [ brew, github_sync ]
retry: 2 retry: 2
variables: variables:
# for brew runners, we always set GIT_STRATEGY to fetch GIT_STRATEGY: fetch # use brew local mirror first
GIT_STRATEGY: fetch
SUBMODULES_TO_FETCH: "none"
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git" PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
dependencies: [] dependencies: []
script: script:
@@ -36,10 +34,8 @@ push_to_github:
- check_submodule_sync - check_submodule_sync
tags: [ brew, github_sync ] tags: [ brew, github_sync ]
variables: variables:
# for brew runners, we always set GIT_STRATEGY to fetch GIT_STRATEGY: fetch # use brew local mirror first
GIT_STRATEGY: fetch GIT_DEPTH: 0 # github needs full record of commits
# github also need full record of commits
GIT_DEPTH: 0
script: script:
- add_github_ssh_keys - add_github_ssh_keys
- git remote remove github &>/dev/null || true - git remote remove github &>/dev/null || true
@@ -58,11 +54,11 @@ deploy_update_SHA_in_esp-dockerfiles:
dependencies: [] dependencies: []
variables: variables:
GIT_DEPTH: 2 GIT_DEPTH: 2
tags: [ shiny, build ] tags: [build, shiny]
script: script:
- 'curl --header "PRIVATE-TOKEN: ${ESPCI_SCRIPTS_TOKEN}" -o create_MR_in_esp_dockerfile.sh $GITLAB_HTTP_SERVER/api/v4/projects/1260/repository/files/create_MR_in_esp_dockerfile%2Fcreate_MR_in_esp_dockerfile.sh/raw\?ref\=master' - 'curl --header "PRIVATE-TOKEN: ${ESPCI_SCRIPTS_TOKEN}" -o create_MR_in_esp_dockerfile.sh $GITLAB_HTTP_SERVER/api/v4/projects/1260/repository/files/create_MR_in_esp_dockerfile%2Fcreate_MR_in_esp_dockerfile.sh/raw\?ref\=master'
- chmod +x create_MR_in_esp_dockerfile.sh - chmod +x create_MR_in_esp_dockerfile.sh
- ./create_MR_in_esp_dockerfile.sh - ./create_MR_in_esp_dockerfile.sh
environment: environment:
name: eploy_update_SHA_in_esp-dockerfiles_production name: deploy_update_SHA_in_esp-dockerfiles_production
deployment_tier: production deployment_tier: production

View File

@@ -58,7 +58,7 @@
check_readme_links: check_readme_links:
extends: extends:
- .pre_check_template - .pre_check_template
tags: ["build", "amd64", "internet"] tags: ["amd64", "brew"]
allow_failure: true allow_failure: true
rules: rules:
- <<: *if-protected-check - <<: *if-protected-check

View File

@@ -2,8 +2,7 @@
extends: .rules:test:host_test extends: .rules:test:host_test
stage: host_test stage: host_test
image: $ESP_ENV_IMAGE image: $ESP_ENV_IMAGE
tags: tags: [build, shiny]
- host_test
dependencies: # set dependencies to null to avoid missing artifacts issue dependencies: # set dependencies to null to avoid missing artifacts issue
# run host_test jobs immediately, only after upload cache # run host_test jobs immediately, only after upload cache
needs: needs:
@@ -384,8 +383,6 @@ test_idf_pytest_plugin:
extends: extends:
- .host_test_template - .host_test_template
- .rules:patterns:idf-pytest-plugin - .rules:patterns:idf-pytest-plugin
variables:
SUBMODULES_TO_FETCH: "none"
artifacts: artifacts:
reports: reports:
junit: XUNIT_RESULT.xml junit: XUNIT_RESULT.xml

View File

@@ -36,7 +36,6 @@ gen_integration_pipeline:
cache: [] cache: []
tags: [fast_run, shiny] tags: [fast_run, shiny]
variables: variables:
SUBMODULES_TO_FETCH: "none"
GIT_LFS_SKIP_SMUDGE: 1 GIT_LFS_SKIP_SMUDGE: 1
needs: needs:
- job: fast_template_app - job: fast_template_app

View File

@@ -1,8 +1,7 @@
.pre_check_template: .pre_check_template:
stage: pre_check stage: pre_check
image: $ESP_ENV_IMAGE image: $ESP_ENV_IMAGE
tags: tags: [build, shiny]
- host_test
dependencies: # set dependencies to null to avoid missing artifacts issue dependencies: # set dependencies to null to avoid missing artifacts issue
check_version: check_version:
@@ -16,7 +15,6 @@ check_version:
# need a full clone to get the latest tag # need a full clone to get the latest tag
# the --shallow-since=$(git log -1 --format=%as $LATEST_GIT_TAG) option is not accurate # the --shallow-since=$(git log -1 --format=%as $LATEST_GIT_TAG) option is not accurate
GIT_STRATEGY: fetch GIT_STRATEGY: fetch
SUBMODULES_TO_FETCH: "none"
GIT_DEPTH: 0 GIT_DEPTH: 0
script: script:
- export IDF_PATH=$PWD - export IDF_PATH=$PWD
@@ -34,8 +32,6 @@ check_blobs:
extends: extends:
- .pre_check_template - .pre_check_template
- .rules:build:check - .rules:build:check
variables:
SUBMODULES_TO_FETCH: "components/esp_wifi/lib;components/esp_phy/lib;components/esp_coex/lib"
script: script:
# Check if Wi-Fi library header files match between IDF and the version used when compiling the libraries # Check if Wi-Fi library header files match between IDF and the version used when compiling the libraries
- IDF_TARGET=esp32 $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh - IDF_TARGET=esp32 $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh
@@ -108,7 +104,6 @@ check_version_tag:
# need a full clone to get the latest tag # need a full clone to get the latest tag
# the --shallow-since=$(git log -1 --format=%as $LATEST_GIT_TAG) option is not accurate # the --shallow-since=$(git log -1 --format=%as $LATEST_GIT_TAG) option is not accurate
GIT_STRATEGY: fetch GIT_STRATEGY: fetch
SUBMODULES_TO_FETCH: "none"
GIT_DEPTH: 0 GIT_DEPTH: 0
script: script:
- (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1) - (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1)
@@ -211,13 +206,12 @@ baseline_manifest_sha:
when: always when: always
redundant_pass_job: redundant_pass_job:
stage: pre_check extends:
- .pre_check_template
tags: [shiny, fast_run] tags: [shiny, fast_run]
image: $ESP_ENV_IMAGE
dependencies: null
before_script: []
cache: [] cache: []
extends: [] variables:
GIT_STRATEGY: none
before_script: []
script: script:
- echo "This job is redundant to ensure the 'retry_failed_jobs' job can exist and not be skipped" - echo "This job is redundant to ensure the 'retry_failed_jobs' job can exist and not be skipped"
when: always

View File

@@ -24,9 +24,9 @@ check_pre_commit_upload_cache:
paths: paths:
- .cache/pre-commit - .cache/pre-commit
policy: pull-push policy: pull-push
- key: submodule-cache-${LATEST_GIT_TAG} - key: git-submodule-cache-${LATEST_GIT_TAG}
paths: paths:
- .cache/submodule_archives - .git/modules
policy: pull policy: pull
check_pre_commit: check_pre_commit:
@@ -41,9 +41,9 @@ check_pre_commit:
paths: paths:
- .cache/pre-commit - .cache/pre-commit
policy: pull policy: pull
- key: submodule-cache-${LATEST_GIT_TAG} - key: git-submodule-cache-${LATEST_GIT_TAG}
paths: paths:
- .cache/submodule_archives - .git/modules
policy: pull policy: pull
check_powershell: check_powershell:
@@ -74,7 +74,7 @@ check_powershell:
paths: paths:
- .cache/pre-commit - .cache/pre-commit
policy: pull policy: pull
- key: submodule-cache-${LATEST_GIT_TAG} - key: git-submodule-cache-${LATEST_GIT_TAG}
paths: paths:
- .cache/submodule_archives - .git/modules
policy: pull policy: pull

View File

@@ -249,6 +249,9 @@
changes: *patterns-submodule changes: *patterns-submodule
- <<: *if-label-upload_cache - <<: *if-label-upload_cache
when: manual when: manual
- <<: *if-dev-push
changes:
- .gitlab/ci/upload_cache.yml
### Patterns ### ### Patterns ###
.rules:patterns:clang_tidy: .rules:patterns:clang_tidy:

View File

@@ -3,8 +3,7 @@
extends: .rules:test:host_test extends: .rules:test:host_test
stage: host_test stage: host_test
image: $ESP_ENV_IMAGE image: $ESP_ENV_IMAGE
tags: tags: [windows-build, brew]
- windows-build
dependencies: # set dependencies to null to avoid missing artifacts issue dependencies: # set dependencies to null to avoid missing artifacts issue
# run host_test jobs immediately, only after upload cache # run host_test jobs immediately, only after upload cache
needs: needs:
@@ -14,6 +13,8 @@
- job: upload-submodules-cache - job: upload-submodules-cache
optional: true optional: true
artifacts: false artifacts: false
variables:
GIT_STRATEGY: fetch # use brew local mirror first
before_script: [] before_script: []
after_script: [] after_script: []
@@ -61,7 +62,7 @@ test_tools_win:
- python -m pip install jsonschema - python -m pip install jsonschema
- .\install.ps1 --enable-ci --enable-pytest - .\install.ps1 --enable-ci --enable-pytest
- .\export.ps1 - .\export.ps1
- python "${SUBMODULE_FETCH_TOOL}" -s "all" - git submodule update --init
- cd ${IDF_PATH}/tools/test_idf_py - cd ${IDF_PATH}/tools/test_idf_py
- pytest --noconftest test_idf_py.py --junitxml=${IDF_PATH}/XUNIT_IDF_PY.xml - pytest --noconftest test_idf_py.py --junitxml=${IDF_PATH}/XUNIT_IDF_PY.xml
- pytest --noconftest test_hints.py --junitxml=${IDF_PATH}/XUNIT_HINTS.xml - pytest --noconftest test_hints.py --junitxml=${IDF_PATH}/XUNIT_HINTS.xml
@@ -79,7 +80,7 @@ test_tools_win:
script: script:
- .\install.ps1 --enable-ci --enable-pytest - .\install.ps1 --enable-ci --enable-pytest
- . .\export.ps1 - . .\export.ps1
- python "${SUBMODULE_FETCH_TOOL}" -s "all" - git submodule update --init
- cd ${IDF_PATH}\tools\test_build_system - cd ${IDF_PATH}\tools\test_build_system
- pytest --parallel-count ${CI_NODE_TOTAL} --parallel-index ${CI_NODE_INDEX} --junitxml=${CI_PROJECT_DIR}\XUNIT_RESULT.xml - pytest --parallel-count ${CI_NODE_TOTAL} --parallel-index ${CI_NODE_INDEX} --junitxml=${CI_PROJECT_DIR}\XUNIT_RESULT.xml
@@ -89,8 +90,7 @@ pytest_build_system_win:
- .rules:labels:windows_pytest_build_system - .rules:labels:windows_pytest_build_system
parallel: 2 parallel: 2
needs: [] needs: []
tags: tags: [windows-build, brew]
- windows-build
artifacts: artifacts:
paths: paths:
- XUNIT_RESULT.xml - XUNIT_RESULT.xml

View File

@@ -35,14 +35,14 @@ upload-submodules-cache:
- $GEO - $GEO
- cache - cache
cache: cache:
key: submodule-cache-${LATEST_GIT_TAG} key: git-submodule-cache-${LATEST_GIT_TAG}
paths: paths:
- .cache/submodule_archives - .git/modules
policy: push policy: push
script: script:
# use the default gitlab server # use the default gitlab server
- unset LOCAL_GITLAB_HTTPS_HOST - unset LOCAL_GITLAB_HTTPS_HOST
- rm -rf .cache/submodule_archives # clear old submodule archives - rm -rf .git/modules # clear old submodules
- add_gitlab_ssh_keys - add_gitlab_ssh_keys
- fetch_submodules - fetch_submodules
parallel: parallel:

View File

@@ -1,113 +1,33 @@
#!/usr/bin/env python #!/usr/bin/env python
# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD # SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# internal use only for CI # internal use only for CI
# download archive of one commit instead of cloning entire submodule repo # download archive of one commit instead of cloning entire submodule repo
import argparse import argparse
import os
import re
import shutil
import subprocess import subprocess
import sys
import time import time
from typing import Any, List
import gitlab_api
SUBMODULE_PATTERN = re.compile(r"\[submodule \"([^\"]+)\"]")
PATH_PATTERN = re.compile(r'path\s+=\s+(\S+)')
URL_PATTERN = re.compile(r'url\s+=\s+(\S+)')
SUBMODULE_ARCHIVE_TEMP_FOLDER = 'submodule_archive'
# need to match the one defined in CI yaml files for caching purpose
SUBMODULE_ARCHIVE_CACHE_DIR = '.cache/submodule_archives'
class SubModule(object):
# We don't need to support recursive submodule clone now
GIT_LS_TREE_OUTPUT_PATTERN = re.compile(r'\d+\s+commit\s+([0-9a-f]+)\s+')
def __init__(self, gitlab_inst: gitlab_api.Gitlab, path: str, url: str) -> None:
self.path = path
self.url = url
self.gitlab_inst = gitlab_inst
self.project_id = self._get_project_id(url)
self.commit_id = self._get_commit_id(path)
def _get_commit_id(self, path: str) -> str:
output = subprocess.check_output(['git', 'ls-tree', 'HEAD', path]).decode()
# example output: 160000 commit d88a262fbdf35e5abb372280eb08008749c3faa0 components/esp_wifi/lib
match = self.GIT_LS_TREE_OUTPUT_PATTERN.search(output)
return match.group(1) if match is not None else ''
def _get_project_id(self, url: str) -> Any:
base_name = os.path.basename(url)
project_id = self.gitlab_inst.get_project_id(os.path.splitext(base_name)[0], # remove .git
namespace='espressif')
return project_id
def download_archive(self) -> None:
print('Update submodule: {}: {}'.format(self.path, self.commit_id))
path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER,
self.project_id, SUBMODULE_ARCHIVE_CACHE_DIR)
renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path))
os.rename(path_name, renamed_path)
shutil.rmtree(self.path, ignore_errors=True)
shutil.move(renamed_path, os.path.dirname(self.path))
def update_submodule(git_module_file: str, submodules_to_update: List) -> None:
gitlab_inst = gitlab_api.Gitlab()
submodules = []
with open(git_module_file, 'r') as f:
data = f.read()
match = SUBMODULE_PATTERN.search(data)
if match is not None:
while True:
next_match = SUBMODULE_PATTERN.search(data, pos=match.end())
if next_match:
end_pos = next_match.start()
else:
end_pos = len(data)
path_match = PATH_PATTERN.search(data, pos=match.end(), endpos=end_pos)
url_match = URL_PATTERN.search(data, pos=match.end(), endpos=end_pos)
path = path_match.group(1) if path_match is not None else ''
url = url_match.group(1) if url_match is not None else ''
filter_result = True
if submodules_to_update:
if path not in submodules_to_update:
filter_result = False
if filter_result:
submodules.append(SubModule(gitlab_inst, path, url))
match = next_match
if not match:
break
shutil.rmtree(SUBMODULE_ARCHIVE_TEMP_FOLDER, ignore_errors=True)
for submodule in submodules:
submodule.download_archive()
if __name__ == '__main__': if __name__ == '__main__':
start_time = time.time()
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--repo_path', '-p', default='.', help='repo path') parser.add_argument('--repo_path', '-p', default='.', help='repo path')
parser.add_argument('--submodule', '-s', default='all', parser.add_argument(
help='Submodules to update. By default update all submodules. ' '--submodule',
'For multiple submodules, separate them with `;`. ' '-s',
'`all` and `none` are special values that indicates we fetch all / none submodules') default='all',
help='Submodules to update. By default update all submodules. '
'For multiple submodules, separate them with `;`. '
'`all` and `none` are special values that indicates we fetch all / none submodules',
)
args = parser.parse_args() args = parser.parse_args()
if args.submodule == 'none':
print("don't need to update submodules") print('This script is deprecated, please use the following git command with gitlab cache `.git/modules` instead.')
exit(0) print('Calling `git submodule update --init --depth=1` ...')
if args.submodule == 'all':
_submodules = [] start_time = time.time()
else: subprocess.check_call(
_submodules = args.submodule.split(';') ['git', 'submodule', 'update', '--init', '--depth=1'], stdout=sys.stdout, stderr=sys.stderr, cwd=args.repo_path
update_submodule(os.path.join(args.repo_path, '.gitmodules'), _submodules) )
print('total time spent on update submodule: {:.02f}s'.format(time.time() - start_time)) print('total time spent on update submodule: {:.02f}s'.format(time.time() - start_time))

View File

@@ -50,7 +50,6 @@
stage: target_test stage: target_test
timeout: 1 hour timeout: 1 hour
variables: variables:
SUBMODULES_TO_FETCH: "none"
# set while generating the pipeline # set while generating the pipeline
PYTEST_NODES: "" PYTEST_NODES: ""
TARGET_SELECTOR: "" TARGET_SELECTOR: ""

View File

@@ -36,7 +36,7 @@ function add_doc_server_ssh_keys() {
function fetch_submodules() { function fetch_submodules() {
section_start "fetch_submodules" "Fetching submodules..." section_start "fetch_submodules" "Fetching submodules..."
python "${SUBMODULE_FETCH_TOOL}" -s "${SUBMODULES_TO_FETCH}" git submodule update --init --depth 1 --force
section_end "fetch_submodules" section_end "fetch_submodules"
} }