mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-05 21:54:33 +02:00
ci: cache submodules
This commit is contained in:
@@ -97,12 +97,16 @@ variables:
|
|||||||
CI_PYTHON_TOOL_BRANCH: ""
|
CI_PYTHON_TOOL_BRANCH: ""
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
key: pip-cache
|
|
||||||
paths:
|
|
||||||
- .cache/pip
|
|
||||||
# pull only for most of the use cases since it's cache dir.
|
# pull only for most of the use cases since it's cache dir.
|
||||||
# Only set "push" policy for "upload_cache" stage jobs since it would install all pypi packages
|
# Only set "push" policy for "upload_cache" stage jobs
|
||||||
policy: pull
|
- key: pip-cache
|
||||||
|
paths:
|
||||||
|
- .cache/pip
|
||||||
|
policy: pull
|
||||||
|
- key: submodule-cache
|
||||||
|
paths:
|
||||||
|
- .cache/submodule_archives
|
||||||
|
policy: pull
|
||||||
|
|
||||||
.setup_tools_unless_target_test: &setup_tools_unless_target_test |
|
.setup_tools_unless_target_test: &setup_tools_unless_target_test |
|
||||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||||
|
@@ -7,11 +7,14 @@
|
|||||||
tags:
|
tags:
|
||||||
- host_test
|
- host_test
|
||||||
dependencies: []
|
dependencies: []
|
||||||
|
# run host_test jobs immediately, only after upload cache
|
||||||
needs:
|
needs:
|
||||||
- job: upload-pip-cache-shiny
|
- job: upload-pip-cache
|
||||||
optional: true # run host_test jobs immediately, only after upload cache
|
optional: true
|
||||||
- job: upload-pip-cache-brew
|
artifacts: false
|
||||||
optional: true # run host_test jobs immediately, only after upload cache
|
- job: upload-submodules-cache
|
||||||
|
optional: true
|
||||||
|
artifacts: false
|
||||||
|
|
||||||
test_nvs_on_host:
|
test_nvs_on_host:
|
||||||
extends: .host_test_template
|
extends: .host_test_template
|
||||||
|
@@ -194,6 +194,7 @@
|
|||||||
- "components/tinyusb/tinyusb"
|
- "components/tinyusb/tinyusb"
|
||||||
- "components/unity/unity"
|
- "components/unity/unity"
|
||||||
- "examples/peripherals/secure_element/atecc608_ecdsa/components/esp-cryptoauthlib"
|
- "examples/peripherals/secure_element/atecc608_ecdsa/components/esp-cryptoauthlib"
|
||||||
|
- ".gitmodules"
|
||||||
|
|
||||||
.patterns-example_test-related_changes-ota: &patterns-example_test-related_changes-ota
|
.patterns-example_test-related_changes-ota: &patterns-example_test-related_changes-ota
|
||||||
- "examples/system/ota/**/*"
|
- "examples/system/ota/**/*"
|
||||||
@@ -270,6 +271,11 @@
|
|||||||
- <<: *if-dev-push
|
- <<: *if-dev-push
|
||||||
changes: *patterns-python-cache
|
changes: *patterns-python-cache
|
||||||
|
|
||||||
|
.rules:patterns:submodule:
|
||||||
|
rules:
|
||||||
|
- <<: *if-dev-push
|
||||||
|
changes: *patterns-submodule
|
||||||
|
|
||||||
.rules:dev:
|
.rules:dev:
|
||||||
rules:
|
rules:
|
||||||
- <<: *if-trigger
|
- <<: *if-trigger
|
||||||
|
@@ -1,17 +1,22 @@
|
|||||||
|
# pull only for most of the use cases for cache
|
||||||
|
# only set "push" policy for the jobs under this file.
|
||||||
|
# The cache would be updated when files matched specified patterns changes.
|
||||||
|
|
||||||
.upload_cache_template:
|
.upload_cache_template:
|
||||||
stage: upload_cache
|
stage: upload_cache
|
||||||
image: $ESP_ENV_IMAGE
|
image: $ESP_ENV_IMAGE
|
||||||
|
|
||||||
.upload_pip_cache_template:
|
upload-pip-cache:
|
||||||
extends:
|
extends:
|
||||||
- .upload_cache_template
|
- .upload_cache_template
|
||||||
- .rules:patterns:python-cache
|
- .rules:patterns:python-cache
|
||||||
|
tags:
|
||||||
|
- $GEO
|
||||||
|
- build
|
||||||
cache:
|
cache:
|
||||||
key: pip-cache
|
key: pip-cache
|
||||||
paths:
|
paths:
|
||||||
- .cache/pip
|
- .cache/pip
|
||||||
# pull only for most of the use cases since it's cache dir.
|
|
||||||
# Only set "push" policy for "upload_cache" stage jobs since it would install all pypi packages
|
|
||||||
policy: push
|
policy: push
|
||||||
before_script: []
|
before_script: []
|
||||||
script:
|
script:
|
||||||
@@ -22,15 +27,29 @@
|
|||||||
- $IDF_PATH/tools/idf_tools.py install-python-env --features pytest
|
- $IDF_PATH/tools/idf_tools.py install-python-env --features pytest
|
||||||
# TODO: remove this, IDFCI-1207
|
# TODO: remove this, IDFCI-1207
|
||||||
- pip install esptool -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
- pip install esptool -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
||||||
|
parallel:
|
||||||
|
matrix:
|
||||||
|
- GEO: [ 'shiny', 'brew' ]
|
||||||
|
|
||||||
upload-pip-cache-shiny:
|
upload-submodules-cache:
|
||||||
extends: .upload_pip_cache_template
|
extends:
|
||||||
|
- .upload_cache_template
|
||||||
|
- .rules:patterns:submodule
|
||||||
tags:
|
tags:
|
||||||
- shiny
|
- $GEO
|
||||||
- build
|
|
||||||
|
|
||||||
upload-pip-cache-brew:
|
|
||||||
extends: .upload_pip_cache_template
|
|
||||||
tags:
|
|
||||||
- brew
|
|
||||||
- build
|
- build
|
||||||
|
cache:
|
||||||
|
key: submodule-cache
|
||||||
|
paths:
|
||||||
|
- .cache/submodule_archives
|
||||||
|
policy: push
|
||||||
|
before_script: []
|
||||||
|
script:
|
||||||
|
- source tools/ci/utils.sh
|
||||||
|
- is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
|
||||||
|
- source tools/ci/setup_python.sh
|
||||||
|
- rm -rf .cache/submodule_archives # clear old submodule archives
|
||||||
|
- fetch_submodules
|
||||||
|
parallel:
|
||||||
|
matrix:
|
||||||
|
- GEO: [ 'shiny', 'brew' ]
|
||||||
|
2
.gitmodules
vendored
2
.gitmodules
vendored
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
[submodule "components/bt/controller/lib_esp32"]
|
[submodule "components/bt/controller/lib_esp32"]
|
||||||
path = components/bt/controller/lib_esp32
|
path = components/bt/controller/lib_esp32
|
||||||
url = ../../espressif/esp32-bt-lib.git
|
url = ../../espressif/esp32-bt-lib.git
|
||||||
|
|
||||||
[submodule "components/bootloader/subproject/components/micro-ecc/micro-ecc"]
|
[submodule "components/bootloader/subproject/components/micro-ecc/micro-ecc"]
|
||||||
path = components/bootloader/subproject/components/micro-ecc/micro-ecc
|
path = components/bootloader/subproject/components/micro-ecc/micro-ecc
|
||||||
|
@@ -19,6 +19,8 @@ PATH_PATTERN = re.compile(r'path\s+=\s+(\S+)')
|
|||||||
URL_PATTERN = re.compile(r'url\s+=\s+(\S+)')
|
URL_PATTERN = re.compile(r'url\s+=\s+(\S+)')
|
||||||
|
|
||||||
SUBMODULE_ARCHIVE_TEMP_FOLDER = 'submodule_archive'
|
SUBMODULE_ARCHIVE_TEMP_FOLDER = 'submodule_archive'
|
||||||
|
# need to match the one defined in CI yaml files for caching purpose
|
||||||
|
SUBMODULE_ARCHIVE_CACHE_DIR = '.cache/submodule_archives'
|
||||||
|
|
||||||
|
|
||||||
class SubModule(object):
|
class SubModule(object):
|
||||||
@@ -28,6 +30,7 @@ class SubModule(object):
|
|||||||
|
|
||||||
def __init__(self, gitlab_inst, path, url):
|
def __init__(self, gitlab_inst, path, url):
|
||||||
self.path = path
|
self.path = path
|
||||||
|
self.url = url
|
||||||
self.gitlab_inst = gitlab_inst
|
self.gitlab_inst = gitlab_inst
|
||||||
self.project_id = self._get_project_id(url)
|
self.project_id = self._get_project_id(url)
|
||||||
self.commit_id = self._get_commit_id(path)
|
self.commit_id = self._get_commit_id(path)
|
||||||
@@ -48,7 +51,7 @@ class SubModule(object):
|
|||||||
def download_archive(self):
|
def download_archive(self):
|
||||||
print('Update submodule: {}: {}'.format(self.path, self.commit_id))
|
print('Update submodule: {}: {}'.format(self.path, self.commit_id))
|
||||||
path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER,
|
path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER,
|
||||||
self.project_id)
|
self.project_id, SUBMODULE_ARCHIVE_CACHE_DIR)
|
||||||
renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path))
|
renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path))
|
||||||
os.rename(path_name, renamed_path)
|
os.rename(path_name, renamed_path)
|
||||||
shutil.rmtree(self.path, ignore_errors=True)
|
shutil.rmtree(self.path, ignore_errors=True)
|
||||||
|
@@ -177,7 +177,7 @@ class Gitlab(object):
|
|||||||
return job_id_list
|
return job_id_list
|
||||||
|
|
||||||
@retry
|
@retry
|
||||||
def download_archive(self, ref: str, destination: str, project_id: Optional[int] = None) -> str:
|
def download_archive(self, ref: str, destination: str, project_id: Optional[int] = None, cache_dir: Optional[str] = None) -> str:
|
||||||
"""
|
"""
|
||||||
Download archive of certain commit of a repository and extract to destination path
|
Download archive of certain commit of a repository and extract to destination path
|
||||||
|
|
||||||
@@ -191,6 +191,23 @@ class Gitlab(object):
|
|||||||
else:
|
else:
|
||||||
project = self.gitlab_inst.projects.get(project_id)
|
project = self.gitlab_inst.projects.get(project_id)
|
||||||
|
|
||||||
|
if cache_dir:
|
||||||
|
local_archive_file = os.path.join(cache_dir, f'{ref}.tar.gz')
|
||||||
|
os.makedirs(os.path.dirname(local_archive_file), exist_ok=True)
|
||||||
|
if os.path.isfile(local_archive_file):
|
||||||
|
print('Use cached archive file. Skipping download...')
|
||||||
|
else:
|
||||||
|
with open(local_archive_file, 'wb') as fw:
|
||||||
|
try:
|
||||||
|
project.repository_archive(sha=ref, streamed=True, action=fw.write)
|
||||||
|
except gitlab.GitlabGetError as e:
|
||||||
|
print('Failed to archive from project {}'.format(project_id))
|
||||||
|
raise e
|
||||||
|
print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(local_archive_file)) / (1024 * 1024)))
|
||||||
|
|
||||||
|
return self.decompress_archive(local_archive_file, destination)
|
||||||
|
|
||||||
|
# no cache
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||||
try:
|
try:
|
||||||
project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
|
project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
|
||||||
@@ -198,9 +215,13 @@ class Gitlab(object):
|
|||||||
print('Failed to archive from project {}'.format(project_id))
|
print('Failed to archive from project {}'.format(project_id))
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
print('archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
|
print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
|
||||||
|
|
||||||
with tarfile.open(temp_file.name, 'r') as archive_file:
|
return self.decompress_archive(temp_file.name, destination)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decompress_archive(path: str, destination: str) -> str:
|
||||||
|
with tarfile.open(path, 'r') as archive_file:
|
||||||
root_name = archive_file.getnames()[0]
|
root_name = archive_file.getnames()[0]
|
||||||
archive_file.extractall(destination)
|
archive_file.extractall(destination)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user