forked from espressif/esp-idf
ci: upload to minio server instead of gitlab artifacts for pytest jobs
This commit is contained in:
@@ -22,6 +22,11 @@
|
||||
- [Manifest File to Control the Build/Test apps](#manifest-file-to-control-the-buildtest-apps)
|
||||
- [Grammar](#grammar)
|
||||
- [Special Rules](#special-rules)
|
||||
- [Upload/Download Artifacts to Internal Minio Server](#uploaddownload-artifacts-to-internal-minio-server)
|
||||
- [Env Vars](#env-vars)
|
||||
- [Artifacts Types and File Patterns](#artifacts-types-and-file-patterns)
|
||||
- [Upload](#upload)
|
||||
- [Download](#download)
|
||||
|
||||
## General Workflow
|
||||
|
||||
@@ -245,3 +250,75 @@ In ESP-IDF CI, there's a few more special rules are additionally supported to di
|
||||
|
||||
- Add MR labels `BUILD_AND_TEST_ALL_APPS`
|
||||
- Run in protected branches
|
||||
|
||||
## Upload/Download Artifacts to Internal Minio Server
|
||||
|
||||
### Users Without Access to Minio
|
||||
|
||||
If you don't have access to the internal Minio server, you can still download the artifacts from the shared link in the job log.
|
||||
|
||||
The log will look like this:
|
||||
|
||||
```shell
|
||||
Pipeline ID : 587355
|
||||
Job name : build_clang_test_apps_esp32
|
||||
Job ID : 40272275
|
||||
Created archive file: 40272275.zip, uploading as 587355/build_dir_without_map_and_elf_files/build_clang_test_apps_esp32/40272275.zip
|
||||
Please download the archive file includes build_dir_without_map_and_elf_files from [INTERNAL_URL]
|
||||
```
|
||||
|
||||
### Users With Access to Minio
|
||||
|
||||
#### Env Vars for Minio
|
||||
|
||||
Minio takes these env vars to connect to the server:
|
||||
|
||||
- `IDF_S3_SERVER`
|
||||
- `IDF_S3_ACCESS_KEY`
|
||||
- `IDF_S3_SECRET_KEY`
|
||||
- `IDF_S3_BUCKET`
|
||||
|
||||
#### Artifacts Types and File Patterns
|
||||
|
||||
The artifacts types and corresponding file patterns are defined in tools/ci/artifacts_handler.py, inside `ArtifactType` and `TYPE_PATTERNS_DICT`.
|
||||
|
||||
#### Upload
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py upload
|
||||
```
|
||||
|
||||
will upload the files that match the file patterns to minio object storage with name:
|
||||
|
||||
`<pipeline_id>/<artifact_type>/<job_name>/<job_id>.zip`
|
||||
|
||||
For example, job 39043328 will upload these four files:
|
||||
|
||||
- `575500/map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/build_dir_without_map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/logs/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/size_reports/build_pytest_examples_esp32/39043328.zip`
|
||||
|
||||
#### Download
|
||||
|
||||
You may run
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id>
|
||||
```
|
||||
|
||||
to download all files of the pipeline, or
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern>
|
||||
```
|
||||
|
||||
to download all files with the specified job name or pattern, or
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern> --type <artifact_type> <artifact_type> ...
|
||||
```
|
||||
|
||||
to download all files with the specified job name or pattern and artifact type(s).
|
||||
|
||||
You may check all detailed documentation with `python tools/ci/artifacts_handler.py download -h`
|
||||
|
@@ -3,16 +3,9 @@
|
||||
stage: assign_test
|
||||
tags:
|
||||
- assign_test
|
||||
dependencies: []
|
||||
variables:
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
artifacts:
|
||||
paths:
|
||||
- ${TEST_DIR}/test_configs/
|
||||
- artifact_index.json
|
||||
when: always
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- run_cmd python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py $TEST_TYPE $TEST_DIR -c $CI_TARGET_TEST_CONFIG_FILE -o $TEST_DIR/test_configs
|
||||
|
||||
assign_integration_test:
|
||||
extends:
|
||||
|
@@ -16,7 +16,7 @@
|
||||
extends:
|
||||
- .build_template
|
||||
- .before_script:build
|
||||
- .after_script:build:ccache-upload
|
||||
- .after_script:build:ccache
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
@@ -25,22 +25,14 @@
|
||||
optional: true # only MR pipelines would have this
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
|
||||
# Now we're uploading/downloading the binary files from our internal storage server
|
||||
#
|
||||
# keep the log file to help debug
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
# ttfw tests require elf files
|
||||
- "**/build*/*.elf"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
# ttfw tests require sdkconfig file
|
||||
- "**/build*/sdkconfig"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- list_job_*.txt
|
||||
# keep the size info to help track the binary size
|
||||
- size_info.txt
|
||||
- "**/build*/size.json"
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
script:
|
||||
@@ -56,6 +48,7 @@
|
||||
examples/wifi/iperf
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- upload_artifacts_to_s3
|
||||
|
||||
.build_cmake_clang_template:
|
||||
extends:
|
||||
@@ -74,41 +67,27 @@
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
$TEST_BUILD_OPTS_EXTRA
|
||||
- upload_artifacts_to_s3
|
||||
|
||||
.build_pytest_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .before_script:build
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
# - "**/build*/*.elf"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- list_job_*.txt
|
||||
- size_info.txt
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
-m \"not host_test\"
|
||||
--pytest-apps
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- upload_artifacts_to_s3
|
||||
|
||||
.build_pytest_no_jtag_template:
|
||||
extends: .build_pytest_template
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
@@ -120,28 +99,11 @@
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- upload_artifacts_to_s3
|
||||
|
||||
.build_pytest_jtag_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .before_script:build
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
- "**/build*/*.elf" # need elf for gdb
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- list_job_*.txt
|
||||
- size_info.txt
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
@@ -153,6 +115,7 @@
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- upload_artifacts_to_s3
|
||||
|
||||
build_pytest_examples_esp32:
|
||||
extends:
|
||||
@@ -199,17 +162,9 @@ build_pytest_examples_esp32c2:
|
||||
IDF_TARGET: esp32c2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_jtag: # for all targets
|
||||
extends:
|
||||
- .build_pytest_jtag_template
|
||||
- .rules:build:example_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: all
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32c6:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32c6
|
||||
parallel: 2
|
||||
variables:
|
||||
@@ -218,13 +173,21 @@ build_pytest_examples_esp32c6:
|
||||
|
||||
build_pytest_examples_esp32h2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32h2
|
||||
parallel: 2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_jtag: # for all targets
|
||||
extends:
|
||||
- .build_pytest_jtag_template
|
||||
- .rules:build:example_test
|
||||
variables:
|
||||
IDF_TARGET: all
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_components_esp32:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
@@ -302,33 +265,11 @@ build_only_components_apps:
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
|
||||
.build_pytest_test_apps_template:
|
||||
extends: .build_pytest_template
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
# pytest test apps requires elf files for coredump tests
|
||||
- "**/build*/*.elf"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/bootloader/*.elf"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- "**/build*/project_description.json"
|
||||
- list_job_*.txt
|
||||
- size_info.txt
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
- upload_artifacts_to_s3
|
||||
|
||||
build_pytest_test_apps_esp32:
|
||||
extends:
|
||||
- .build_pytest_test_apps_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
@@ -336,7 +277,7 @@ build_pytest_test_apps_esp32:
|
||||
|
||||
build_pytest_test_apps_esp32s2:
|
||||
extends:
|
||||
- .build_pytest_test_apps_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32s2
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
@@ -344,7 +285,7 @@ build_pytest_test_apps_esp32s2:
|
||||
|
||||
build_pytest_test_apps_esp32s3:
|
||||
extends:
|
||||
- .build_pytest_test_apps_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32s3
|
||||
parallel: 2
|
||||
variables:
|
||||
@@ -353,7 +294,7 @@ build_pytest_test_apps_esp32s3:
|
||||
|
||||
build_pytest_test_apps_esp32c3:
|
||||
extends:
|
||||
- .build_pytest_test_apps_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32c3
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
@@ -361,7 +302,7 @@ build_pytest_test_apps_esp32c3:
|
||||
|
||||
build_pytest_test_apps_esp32c2:
|
||||
extends:
|
||||
- .build_pytest_test_apps_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32c2
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
@@ -369,7 +310,7 @@ build_pytest_test_apps_esp32c2:
|
||||
|
||||
build_pytest_test_apps_esp32c6:
|
||||
extends:
|
||||
- .build_pytest_test_apps_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32c6
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
@@ -377,7 +318,7 @@ build_pytest_test_apps_esp32c6:
|
||||
|
||||
build_pytest_test_apps_esp32h2:
|
||||
extends:
|
||||
- .build_pytest_test_apps_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32h2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
@@ -396,6 +337,7 @@ build_only_tools_test_apps:
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- upload_artifacts_to_s3
|
||||
|
||||
.build_template_app_template:
|
||||
extends:
|
||||
|
@@ -21,7 +21,7 @@ variables:
|
||||
|
||||
# GitLab-CI environment
|
||||
|
||||
# XXX_ATTEMPTS variables (https://docs.gitlab.com/ce/ci/yaml/README.html#job-stages-attempts) are not defined here.
|
||||
# XXX_ATTEMPTS variables (https://docs.gitlab.com/ee/ci/runners/configure_runners.html#job-stages-attempts) are not defined here.
|
||||
# Use values from "CI / CD Settings" - "Variables".
|
||||
|
||||
# GIT_STRATEGY is not defined here.
|
||||
@@ -178,27 +178,6 @@ variables:
|
||||
# Show ccache statistics if enabled globally
|
||||
test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
|
||||
|
||||
.upload_built_binaries_to_s3: &upload_built_binaries_to_s3 |
|
||||
# upload the binary files to s3 server
|
||||
echo -e "\e[0Ksection_start:`date +%s`:upload_binaries_to_s3_server[collapsed=true]\r\e[0KUploading binaries to s3 Server"
|
||||
shopt -s globstar
|
||||
# use || true to bypass the no-file error
|
||||
zip ${CI_JOB_ID}.zip **/build*/*.bin || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/*.elf || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/*.map || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/flasher_args.json || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/flash_project_args || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/config/sdkconfig.json || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/sdkconfig || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/bootloader/*.bin || true
|
||||
zip ${CI_JOB_ID}.zip **/build*/partition_table/*.bin || true
|
||||
shopt -u globstar
|
||||
mc cp ${CI_JOB_ID}.zip shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip || true
|
||||
echo -e "\e[0Ksection_end:`date +%s`:upload_binaries_to_s3_server\r\e[0K"
|
||||
echo "Please download the full binary files (including *.elf and *.map files) from the following share link"
|
||||
# would be clean up after 4 days
|
||||
mc share download shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip --expire=96h
|
||||
|
||||
.before_script:minimal:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
@@ -226,11 +205,6 @@ variables:
|
||||
after_script:
|
||||
- *show_ccache_statistics
|
||||
|
||||
.after_script:build:ccache-upload:
|
||||
after_script:
|
||||
- *show_ccache_statistics
|
||||
- *upload_built_binaries_to_s3
|
||||
|
||||
#############
|
||||
# `default` #
|
||||
#############
|
||||
|
@@ -1190,6 +1190,54 @@
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-submodule
|
||||
|
||||
.rules:build:example_test:
|
||||
rules:
|
||||
- <<: *if-revert-branch
|
||||
when: never
|
||||
- <<: *if-protected
|
||||
- <<: *if-example_test-ota-include_nightly_run-rule
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-example_test
|
||||
- <<: *if-label-example_test_esp32
|
||||
- <<: *if-label-example_test_esp32c2
|
||||
- <<: *if-label-example_test_esp32c3
|
||||
- <<: *if-label-example_test_esp32c6
|
||||
- <<: *if-label-example_test_esp32h2
|
||||
- <<: *if-label-example_test_esp32p4
|
||||
- <<: *if-label-example_test_esp32s2
|
||||
- <<: *if-label-example_test_esp32s3
|
||||
- <<: *if-label-target_test
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-build-example_test
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-build_components
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-build_system
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-downloadable-tools
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test-adc
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test-ethernet
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test-i154
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test-nvs_encr_hmac
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test-sdio
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test-usb
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example_test-wifi
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-target_test-adc
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-target_test-ecdsa
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-target_test-wifi
|
||||
|
||||
.rules:build:example_test-esp32:
|
||||
rules:
|
||||
- <<: *if-revert-branch
|
||||
|
File diff suppressed because it is too large
Load Diff
187
tools/ci/artifacts_handler.py
Normal file
187
tools/ci/artifacts_handler.py
Normal file
@@ -0,0 +1,187 @@
|
||||
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import glob
|
||||
import os
|
||||
import typing as t
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
|
||||
import urllib3
|
||||
from minio import Minio
|
||||
|
||||
|
||||
class ArtifactType(str, Enum):
|
||||
MAP_AND_ELF_FILES = 'map_and_elf_files'
|
||||
BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES = 'build_dir_without_map_and_elf_files'
|
||||
|
||||
LOGS = 'logs'
|
||||
SIZE_REPORTS = 'size_reports'
|
||||
|
||||
|
||||
TYPE_PATTERNS_DICT = {
|
||||
ArtifactType.MAP_AND_ELF_FILES: [
|
||||
'**/build*/**/*.map',
|
||||
'**/build*/**/*.elf',
|
||||
],
|
||||
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
|
||||
'**/build*/build_log.txt',
|
||||
'**/build*/**/*.bin',
|
||||
'**/build*/flasher_args.json',
|
||||
'**/build*/flash_project_args',
|
||||
'**/build*/config/sdkconfig.json',
|
||||
'**/build*/project_description.json',
|
||||
'list_job_*.txt',
|
||||
],
|
||||
ArtifactType.LOGS: [
|
||||
'**/build*/build_log.txt',
|
||||
],
|
||||
ArtifactType.SIZE_REPORTS: [
|
||||
'**/build*/size.json',
|
||||
'size_info.txt',
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def getenv(env_var: str) -> str:
|
||||
try:
|
||||
return os.environ[env_var]
|
||||
except KeyError as e:
|
||||
raise Exception(f'Environment variable {env_var} not set') from e
|
||||
|
||||
|
||||
def _download_files(
|
||||
pipeline_id: int,
|
||||
*,
|
||||
artifact_type: t.Optional[ArtifactType] = None,
|
||||
job_name: t.Optional[str] = None,
|
||||
job_id: t.Optional[int] = None,
|
||||
) -> None:
|
||||
if artifact_type:
|
||||
prefix = f'{pipeline_id}/{artifact_type.value}/'
|
||||
else:
|
||||
prefix = f'{pipeline_id}/'
|
||||
|
||||
for obj in client.list_objects(getenv('IDF_S3_BUCKET'), prefix=prefix, recursive=True):
|
||||
obj_name = obj.object_name
|
||||
obj_p = Path(obj_name)
|
||||
# <pipeline_id>/<action_type>/<job_name>/<job_id>.zip
|
||||
if len(obj_p.parts) != 4:
|
||||
print(f'Invalid object name: {obj_name}')
|
||||
continue
|
||||
|
||||
if job_name:
|
||||
# could be a pattern
|
||||
if not fnmatch.fnmatch(obj_p.parts[2], job_name):
|
||||
print(f'Job name {job_name} does not match {obj_p.parts[2]}')
|
||||
continue
|
||||
|
||||
if job_id:
|
||||
if obj_p.parts[3] != f'{job_id}.zip':
|
||||
print(f'Job ID {job_id} does not match {obj_p.parts[3]}')
|
||||
continue
|
||||
|
||||
client.fget_object(getenv('IDF_S3_BUCKET'), obj_name, obj_name)
|
||||
print(f'Downloaded {obj_name}')
|
||||
|
||||
if obj_name.endswith('.zip'):
|
||||
with ZipFile(obj_name, 'r') as zr:
|
||||
zr.extractall()
|
||||
print(f'Extracted {obj_name}')
|
||||
|
||||
os.remove(obj_name)
|
||||
|
||||
|
||||
def _upload_files(
|
||||
pipeline_id: int,
|
||||
*,
|
||||
artifact_type: ArtifactType,
|
||||
job_name: str,
|
||||
job_id: str,
|
||||
) -> None:
|
||||
has_file = False
|
||||
with ZipFile(f'{job_id}.zip', 'w') as zw:
|
||||
for pattern in TYPE_PATTERNS_DICT[artifact_type]:
|
||||
for file in glob.glob(pattern, recursive=True):
|
||||
zw.write(file)
|
||||
has_file = True
|
||||
|
||||
try:
|
||||
if has_file:
|
||||
obj_name = f'{pipeline_id}/{artifact_type.value}/{job_name.split(" ")[0]}/{job_id}.zip'
|
||||
print(f'Created archive file: {job_id}.zip, uploading as {obj_name}')
|
||||
|
||||
client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
|
||||
url = client.get_presigned_url('GET', getenv('IDF_S3_BUCKET'), obj_name)
|
||||
print(f'Please download the archive file which includes {artifact_type.value} from {url}')
|
||||
finally:
|
||||
os.remove(f'{job_id}.zip')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Download or upload files from/to S3, the object name would be '
|
||||
'[PIPELINE_ID]/[ACTION_TYPE]/[JOB_NAME]/[JOB_ID].zip.'
|
||||
'\n'
|
||||
'For example: 123456/binaries/build_pytest_examples_esp32/123456789.zip',
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||
)
|
||||
|
||||
common_args = argparse.ArgumentParser(add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
common_args.add_argument('--pipeline-id', type=int, help='Pipeline ID')
|
||||
common_args.add_argument(
|
||||
'--type', type=str, nargs='+', choices=[a.value for a in ArtifactType], help='Types of files to download'
|
||||
)
|
||||
|
||||
action = parser.add_subparsers(dest='action', help='Download or Upload')
|
||||
download = action.add_parser('download', help='Download files from S3', parents=[common_args])
|
||||
upload = action.add_parser('upload', help='Upload files to S3', parents=[common_args])
|
||||
|
||||
download.add_argument('--job-name', type=str, help='Job name pattern')
|
||||
download.add_argument('--job-id', type=int, help='Job ID')
|
||||
|
||||
upload.add_argument('--job-name', type=str, help='Job name')
|
||||
upload.add_argument('--job-id', type=int, help='Job ID')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
client = Minio(
|
||||
getenv('IDF_S3_SERVER').replace('https://', ''),
|
||||
access_key=getenv('IDF_S3_ACCESS_KEY'),
|
||||
secret_key=getenv('IDF_S3_SECRET_KEY'),
|
||||
http_client=urllib3.PoolManager(
|
||||
timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
|
||||
retries=urllib3.Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
ci_pipeline_id = args.pipeline_id or getenv('CI_PIPELINE_ID') # required
|
||||
if args.action == 'download':
|
||||
method = _download_files
|
||||
ci_job_name = args.job_name # optional
|
||||
ci_job_id = args.job_id # optional
|
||||
else:
|
||||
method = _upload_files # type: ignore
|
||||
ci_job_name = args.job_name or getenv('CI_JOB_NAME') # required
|
||||
ci_job_id = args.job_id or getenv('CI_JOB_ID') # required
|
||||
|
||||
if args.type:
|
||||
types = [ArtifactType(t) for t in args.type]
|
||||
else:
|
||||
types = list(ArtifactType)
|
||||
|
||||
print(f'{"Pipeline ID":15}: {ci_pipeline_id}')
|
||||
if ci_job_name:
|
||||
print(f'{"Job name":15}: {ci_job_name}')
|
||||
if ci_job_id:
|
||||
print(f'{"Job ID":15}: {ci_job_id}')
|
||||
|
||||
for _t in types:
|
||||
method(ci_pipeline_id, artifact_type=_t, job_name=ci_job_name, job_id=ci_job_id) # type: ignore
|
@@ -3,7 +3,7 @@
|
||||
# internal use only for CI
|
||||
# get latest MR information by source branch
|
||||
#
|
||||
# SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-FileCopyrightText: 2020-2023 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
@@ -73,6 +73,16 @@ def get_mr_components(source_branch: str) -> t.List[str]:
|
||||
return list(components)
|
||||
|
||||
|
||||
def get_target_in_tags(tags: str) -> str:
|
||||
from idf_pytest.constants import TARGET_MARKERS
|
||||
|
||||
for x in tags.split(','):
|
||||
if x in TARGET_MARKERS:
|
||||
return x
|
||||
|
||||
raise RuntimeError(f'No target marker found in {tags}')
|
||||
|
||||
|
||||
def _print_list(_list: t.List[str], separator: str = '\n') -> None:
|
||||
print(separator.join(_list))
|
||||
|
||||
@@ -88,6 +98,8 @@ if __name__ == '__main__':
|
||||
actions.add_parser('files', parents=[common_args])
|
||||
actions.add_parser('commits', parents=[common_args])
|
||||
actions.add_parser('components', parents=[common_args])
|
||||
target = actions.add_parser('target_in_tags')
|
||||
target.add_argument('tags', help='comma separated tags, e.g., esp32,generic')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -99,5 +111,7 @@ if __name__ == '__main__':
|
||||
_print_list([commit.id for commit in get_mr_commits(args.src_branch)])
|
||||
elif args.action == 'components':
|
||||
_print_list(get_mr_components(args.src_branch))
|
||||
elif args.action == 'target_in_tags':
|
||||
print(get_target_in_tags(args.tags))
|
||||
else:
|
||||
raise NotImplementedError('not possible to get here')
|
||||
|
@@ -40,3 +40,4 @@ tools/templates/sample_component/include/main.h
|
||||
tools/templates/sample_component/main.c
|
||||
tools/ci/cleanup_ignore_lists.py
|
||||
tools/ci/idf_pytest/**/*
|
||||
tools/ci/artifacts_handler.py
|
||||
|
@@ -49,6 +49,11 @@ function set_component_ut_vars() {
|
||||
echo "exported variables COMPONENT_UT_DIRS, COMPONENT_UT_EXCLUDES"
|
||||
}
|
||||
|
||||
function upload_artifacts_to_s3() {
|
||||
# for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server
|
||||
python tools/ci/artifacts_handler.py upload
|
||||
}
|
||||
|
||||
function error() {
|
||||
printf "\033[0;31m%s\n\033[0m" "${1}" >&2
|
||||
}
|
||||
|
@@ -10,3 +10,4 @@ python-gitlab
|
||||
pyyaml
|
||||
SimpleWebSocketServer
|
||||
pylint-gitlab
|
||||
minio
|
||||
|
Reference in New Issue
Block a user