forked from espressif/esp-idf
Merge branch 'bugfix/force_sync_only_esptool_for_target_test_jobs' into 'release/v3.3'
ci: force sync only esptool for target test jobs (v3.3) See merge request espressif/esp-idf!11904
This commit is contained in:
@ -22,9 +22,16 @@ variables:
|
|||||||
# GIT_STRATEGY is not defined here.
|
# GIT_STRATEGY is not defined here.
|
||||||
# Use an option from "CI / CD Settings" - "General pipelines".
|
# Use an option from "CI / CD Settings" - "General pipelines".
|
||||||
|
|
||||||
# "normal" strategy for fetching only top-level submodules since nothing requires the sub-submodules code for building IDF.
|
# we will download archive for each submodule instead of clone.
|
||||||
# If the "recursive" strategy is used we have a problem with using relative URLs for sub-submodules.
|
# we don't do "recursive" when fetch submodule as they're not used in CI now.
|
||||||
GIT_SUBMODULE_STRATEGY: normal
|
GIT_SUBMODULE_STRATEGY: none
|
||||||
|
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
|
||||||
|
# by default we will fetch all submodules
|
||||||
|
# jobs can overwrite this variable to only fetch submodules they required
|
||||||
|
# set to "none" if don't need to fetch submodules
|
||||||
|
SUBMODULES_TO_FETCH: "all"
|
||||||
|
# tell build system do not check submodule update as we download archive instead of clone
|
||||||
|
IDF_SKIP_CHECK_SUBMODULES: 1
|
||||||
|
|
||||||
UNIT_TEST_BUILD_SYSTEM: make
|
UNIT_TEST_BUILD_SYSTEM: make
|
||||||
# IDF environment
|
# IDF environment
|
||||||
@ -57,9 +64,10 @@ variables:
|
|||||||
.show_submodule_urls: &show_submodule_urls |
|
.show_submodule_urls: &show_submodule_urls |
|
||||||
git config --get-regexp '^submodule\..*\.url$' || true
|
git config --get-regexp '^submodule\..*\.url$' || true
|
||||||
|
|
||||||
|
.fetch_submodules: &fetch_submodules |
|
||||||
|
python $SUBMODULE_FETCH_TOOL -s $SUBMODULES_TO_FETCH
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- echo "Running common script"
|
|
||||||
- *show_submodule_urls
|
|
||||||
- source tools/ci/setup_python.sh
|
- source tools/ci/setup_python.sh
|
||||||
# apply bot filter in before script
|
# apply bot filter in before script
|
||||||
- *apply_bot_filter
|
- *apply_bot_filter
|
||||||
@ -75,6 +83,8 @@ before_script:
|
|||||||
- *setup_tools_unless_target_test
|
- *setup_tools_unless_target_test
|
||||||
# Set some options and environment for CI
|
# Set some options and environment for CI
|
||||||
- source tools/ci/configure_ci_environment.sh
|
- source tools/ci/configure_ci_environment.sh
|
||||||
|
- export PYTHONPATH="${PYTHONPATH}:${CI_PROJECT_DIR}/tools/ci/python_packages"
|
||||||
|
- *fetch_submodules
|
||||||
|
|
||||||
# used for check scripts which we want to run unconditionally
|
# used for check scripts which we want to run unconditionally
|
||||||
.before_script_lesser_nofilter: &before_script_lesser_nofilter
|
.before_script_lesser_nofilter: &before_script_lesser_nofilter
|
||||||
@ -874,6 +884,7 @@ assign_test:
|
|||||||
reports:
|
reports:
|
||||||
junit: $LOG_PATH/*/XUNIT_RESULT.xml
|
junit: $LOG_PATH/*/XUNIT_RESULT.xml
|
||||||
variables:
|
variables:
|
||||||
|
SUBMODULES_TO_FETCH: "components/esptool_py/esptool"
|
||||||
TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw"
|
TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw"
|
||||||
TEST_CASE_PATH: "$CI_PROJECT_DIR/examples"
|
TEST_CASE_PATH: "$CI_PROJECT_DIR/examples"
|
||||||
CONFIG_FILE_PATH: "${CI_PROJECT_DIR}/examples/test_configs"
|
CONFIG_FILE_PATH: "${CI_PROJECT_DIR}/examples/test_configs"
|
||||||
@ -936,7 +947,7 @@ assign_test:
|
|||||||
- $LOG_PATH
|
- $LOG_PATH
|
||||||
expire_in: 1 week
|
expire_in: 1 week
|
||||||
variables:
|
variables:
|
||||||
GIT_SUBMODULE_STRATEGY: none
|
SUBMODULES_TO_FETCH: "none"
|
||||||
LOCAL_ENV_CONFIG_PATH: "$CI_PROJECT_DIR/ci-test-runner-configs/$CI_RUNNER_DESCRIPTION/ESP32_IDF"
|
LOCAL_ENV_CONFIG_PATH: "$CI_PROJECT_DIR/ci-test-runner-configs/$CI_RUNNER_DESCRIPTION/ESP32_IDF"
|
||||||
LOG_PATH: "$CI_PROJECT_DIR/$CI_COMMIT_SHA"
|
LOG_PATH: "$CI_PROJECT_DIR/$CI_COMMIT_SHA"
|
||||||
TEST_CASE_FILE_PATH: "$CI_PROJECT_DIR/auto_test_script/TestCaseFiles"
|
TEST_CASE_FILE_PATH: "$CI_PROJECT_DIR/auto_test_script/TestCaseFiles"
|
||||||
@ -1002,6 +1013,8 @@ example_test_002:
|
|||||||
tags:
|
tags:
|
||||||
- ESP32
|
- ESP32
|
||||||
- Example_ShieldBox_Basic
|
- Example_ShieldBox_Basic
|
||||||
|
variables:
|
||||||
|
SUBMODULES_TO_FETCH: "components/esptool_py/esptool;components/micro-ecc/micro-ecc"
|
||||||
|
|
||||||
.example_test_003:
|
.example_test_003:
|
||||||
<<: *example_test_template
|
<<: *example_test_template
|
||||||
|
@ -144,7 +144,7 @@ EXTRA_COMPONENT_DIRS ?=
|
|||||||
COMPONENT_DIRS := $(PROJECT_PATH)/components $(EXTRA_COMPONENT_DIRS) $(IDF_PATH)/components $(PROJECT_PATH)/main
|
COMPONENT_DIRS := $(PROJECT_PATH)/components $(EXTRA_COMPONENT_DIRS) $(IDF_PATH)/components $(PROJECT_PATH)/main
|
||||||
endif
|
endif
|
||||||
# Make sure that every directory in the list is an absolute path without trailing slash.
|
# Make sure that every directory in the list is an absolute path without trailing slash.
|
||||||
# This is necessary to split COMPONENT_DIRS into SINGLE_COMPONENT_DIRS and MULTI_COMPONENT_DIRS below.
|
# This is necessary to split COMPONENT_DIRS into SINGLE_COMPONENT_DIRS and MULTI_COMPONENT_DIRS below.
|
||||||
COMPONENT_DIRS := $(foreach cd,$(COMPONENT_DIRS),$(abspath $(cd)))
|
COMPONENT_DIRS := $(foreach cd,$(COMPONENT_DIRS),$(abspath $(cd)))
|
||||||
export COMPONENT_DIRS
|
export COMPONENT_DIRS
|
||||||
|
|
||||||
@ -153,11 +153,11 @@ $(warning SRCDIRS variable is deprecated. These paths can be added to EXTRA_COMP
|
|||||||
COMPONENT_DIRS += $(abspath $(SRCDIRS))
|
COMPONENT_DIRS += $(abspath $(SRCDIRS))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# List of component directories, i.e. directories which contain a component.mk file
|
# List of component directories, i.e. directories which contain a component.mk file
|
||||||
SINGLE_COMPONENT_DIRS := $(abspath $(dir $(dir $(foreach cd,$(COMPONENT_DIRS),\
|
SINGLE_COMPONENT_DIRS := $(abspath $(dir $(dir $(foreach cd,$(COMPONENT_DIRS),\
|
||||||
$(wildcard $(cd)/component.mk)))))
|
$(wildcard $(cd)/component.mk)))))
|
||||||
|
|
||||||
# List of components directories, i.e. directories which may contain components
|
# List of components directories, i.e. directories which may contain components
|
||||||
MULTI_COMPONENT_DIRS := $(filter-out $(SINGLE_COMPONENT_DIRS),$(COMPONENT_DIRS))
|
MULTI_COMPONENT_DIRS := $(filter-out $(SINGLE_COMPONENT_DIRS),$(COMPONENT_DIRS))
|
||||||
|
|
||||||
# The project Makefile can define a list of components, but if it does not do this
|
# The project Makefile can define a list of components, but if it does not do this
|
||||||
@ -582,7 +582,16 @@ clean: app-clean bootloader-clean config-clean ldgen-clean
|
|||||||
# or out of date, and exit if so. Components can add paths to this variable.
|
# or out of date, and exit if so. Components can add paths to this variable.
|
||||||
#
|
#
|
||||||
# This only works for components inside IDF_PATH
|
# This only works for components inside IDF_PATH
|
||||||
|
#
|
||||||
|
# For internal use:
|
||||||
|
# IDF_SKIP_CHECK_SUBMODULES may be set in the environment to skip the submodule check.
|
||||||
|
# This can be used e.g. in CI when submodules are checked out by different means.
|
||||||
|
IDF_SKIP_CHECK_SUBMODULES ?= 0
|
||||||
|
|
||||||
check-submodules:
|
check-submodules:
|
||||||
|
ifeq ($(IDF_SKIP_CHECK_SUBMODULES),1)
|
||||||
|
@echo "skip submodule check on internal CI"
|
||||||
|
else
|
||||||
# Check if .gitmodules exists, otherwise skip submodule check, assuming flattened structure
|
# Check if .gitmodules exists, otherwise skip submodule check, assuming flattened structure
|
||||||
ifneq ("$(wildcard ${IDF_PATH}/.gitmodules)","")
|
ifneq ("$(wildcard ${IDF_PATH}/.gitmodules)","")
|
||||||
|
|
||||||
@ -610,7 +619,7 @@ endef
|
|||||||
# so the argument is suitable for use with 'git submodule' commands
|
# so the argument is suitable for use with 'git submodule' commands
|
||||||
$(foreach submodule,$(subst $(IDF_PATH)/,,$(filter $(IDF_PATH)/%,$(COMPONENT_SUBMODULES))),$(eval $(call GenerateSubmoduleCheckTarget,$(submodule))))
|
$(foreach submodule,$(subst $(IDF_PATH)/,,$(filter $(IDF_PATH)/%,$(COMPONENT_SUBMODULES))),$(eval $(call GenerateSubmoduleCheckTarget,$(submodule))))
|
||||||
endif # End check for .gitmodules existence
|
endif # End check for .gitmodules existence
|
||||||
|
endif # End check for IDF_SKIP_CHECK_SUBMODULES
|
||||||
|
|
||||||
# PHONY target to list components in the build and their paths
|
# PHONY target to list components in the build and their paths
|
||||||
list-components:
|
list-components:
|
||||||
|
107
tools/ci/ci_fetch_submodule.py
Normal file
107
tools/ci/ci_fetch_submodule.py
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# internal use only for CI
|
||||||
|
# download archive of one commit instead of cloning entire submodule repo
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import argparse
|
||||||
|
import shutil
|
||||||
|
import time
|
||||||
|
|
||||||
|
import gitlab_api
|
||||||
|
|
||||||
|
SUBMODULE_PATTERN = re.compile(r"\[submodule \"([^\"]+)\"]")
|
||||||
|
PATH_PATTERN = re.compile(r"path\s+=\s+(\S+)")
|
||||||
|
URL_PATTERN = re.compile(r"url\s+=\s+(\S+)")
|
||||||
|
|
||||||
|
SUBMODULE_ARCHIVE_TEMP_FOLDER = "submodule_archive"
|
||||||
|
|
||||||
|
|
||||||
|
class SubModule(object):
|
||||||
|
# We don't need to support recursive submodule clone now
|
||||||
|
|
||||||
|
GIT_LS_TREE_OUTPUT_PATTERN = re.compile(r"\d+\s+commit\s+([0-9a-f]+)\s+")
|
||||||
|
|
||||||
|
def __init__(self, gitlab_inst, path, url):
|
||||||
|
self.path = path
|
||||||
|
self.gitlab_inst = gitlab_inst
|
||||||
|
self.project_id = self._get_project_id(url)
|
||||||
|
self.commit_id = self._get_commit_id(path)
|
||||||
|
|
||||||
|
def _get_commit_id(self, path):
|
||||||
|
output = subprocess.check_output(["git", "ls-tree", "HEAD", path])
|
||||||
|
output = output.decode()
|
||||||
|
# example output: 160000 commit d88a262fbdf35e5abb372280eb08008749c3faa0 components/esp_wifi/lib
|
||||||
|
match = self.GIT_LS_TREE_OUTPUT_PATTERN.search(output)
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
def _get_project_id(self, url):
|
||||||
|
base_name = os.path.basename(url)
|
||||||
|
project_id = self.gitlab_inst.get_project_id(os.path.splitext(base_name)[0], # remove .git
|
||||||
|
namespace="espressif")
|
||||||
|
return project_id
|
||||||
|
|
||||||
|
def download_archive(self):
|
||||||
|
print("Update submodule: {}: {}".format(self.path, self.commit_id))
|
||||||
|
path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER,
|
||||||
|
self.project_id)
|
||||||
|
renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path))
|
||||||
|
os.rename(path_name, renamed_path)
|
||||||
|
shutil.rmtree(self.path, ignore_errors=True)
|
||||||
|
shutil.move(renamed_path, os.path.dirname(self.path))
|
||||||
|
|
||||||
|
|
||||||
|
def update_submodule(git_module_file, submodules_to_update):
|
||||||
|
gitlab_inst = gitlab_api.Gitlab()
|
||||||
|
submodules = []
|
||||||
|
with open(git_module_file, "r") as f:
|
||||||
|
data = f.read()
|
||||||
|
match = SUBMODULE_PATTERN.search(data)
|
||||||
|
while True:
|
||||||
|
next_match = SUBMODULE_PATTERN.search(data, pos=match.end())
|
||||||
|
if next_match:
|
||||||
|
end_pos = next_match.start()
|
||||||
|
else:
|
||||||
|
end_pos = len(data)
|
||||||
|
path_match = PATH_PATTERN.search(data, pos=match.end(), endpos=end_pos)
|
||||||
|
url_match = URL_PATTERN.search(data, pos=match.end(), endpos=end_pos)
|
||||||
|
path = path_match.group(1)
|
||||||
|
url = url_match.group(1)
|
||||||
|
|
||||||
|
filter_result = True
|
||||||
|
if submodules_to_update:
|
||||||
|
if path not in submodules_to_update:
|
||||||
|
filter_result = False
|
||||||
|
if filter_result:
|
||||||
|
submodules.append(SubModule(gitlab_inst, path, url))
|
||||||
|
|
||||||
|
match = next_match
|
||||||
|
if not match:
|
||||||
|
break
|
||||||
|
|
||||||
|
shutil.rmtree(SUBMODULE_ARCHIVE_TEMP_FOLDER, ignore_errors=True)
|
||||||
|
|
||||||
|
for submodule in submodules:
|
||||||
|
submodule.download_archive()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
start_time = time.time()
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--repo_path", "-p", default=".", help="repo path")
|
||||||
|
parser.add_argument("--submodule", "-s", default="all",
|
||||||
|
help="Submodules to update. By default update all submodules. "
|
||||||
|
"For multiple submodules, separate them with `;`. "
|
||||||
|
"`all` and `none` are special values that indicates we fetch all / none submodules")
|
||||||
|
args = parser.parse_args()
|
||||||
|
if args.submodule == "none":
|
||||||
|
print("don't need to update submodules")
|
||||||
|
exit(0)
|
||||||
|
if args.submodule == "all":
|
||||||
|
_submodules = []
|
||||||
|
else:
|
||||||
|
_submodules = args.submodule.split(";")
|
||||||
|
update_submodule(os.path.join(args.repo_path, ".gitmodules"), _submodules)
|
||||||
|
print("total time spent on update submodule: {:.02f}s".format(time.time() - start_time))
|
174
tools/ci/python_packages/gitlab_api.py
Normal file
174
tools/ci/python_packages/gitlab_api.py
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import argparse
|
||||||
|
import tempfile
|
||||||
|
import tarfile
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
import gitlab
|
||||||
|
|
||||||
|
|
||||||
|
class Gitlab(object):
|
||||||
|
JOB_NAME_PATTERN = re.compile(r"(\w+)(\s+(\d+)/(\d+))?")
|
||||||
|
|
||||||
|
def __init__(self, project_id=None):
|
||||||
|
config_data_from_env = os.getenv("PYTHON_GITLAB_CONFIG")
|
||||||
|
if config_data_from_env:
|
||||||
|
# prefer to load config from env variable
|
||||||
|
with tempfile.NamedTemporaryFile("w", delete=False) as temp_file:
|
||||||
|
temp_file.write(config_data_from_env)
|
||||||
|
config_files = [temp_file.name]
|
||||||
|
else:
|
||||||
|
# otherwise try to use config file at local filesystem
|
||||||
|
config_files = None
|
||||||
|
self.gitlab_inst = gitlab.Gitlab.from_config(config_files=config_files)
|
||||||
|
self.gitlab_inst.auth()
|
||||||
|
if project_id:
|
||||||
|
self.project = self.gitlab_inst.projects.get(project_id)
|
||||||
|
else:
|
||||||
|
self.project = None
|
||||||
|
|
||||||
|
def get_project_id(self, name, namespace=None):
|
||||||
|
"""
|
||||||
|
search project ID by name
|
||||||
|
|
||||||
|
:param name: project name
|
||||||
|
:param namespace: namespace to match when we have multiple project with same name
|
||||||
|
:return: project ID
|
||||||
|
"""
|
||||||
|
projects = self.gitlab_inst.projects.list(search=name)
|
||||||
|
for project in projects:
|
||||||
|
if namespace is None:
|
||||||
|
if len(projects) == 1:
|
||||||
|
project_id = project.id
|
||||||
|
break
|
||||||
|
if project.namespace["path"] == namespace:
|
||||||
|
project_id = project.id
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise ValueError("Can't find project")
|
||||||
|
return project_id
|
||||||
|
|
||||||
|
def download_artifacts(self, job_id, destination):
|
||||||
|
"""
|
||||||
|
download full job artifacts and extract to destination.
|
||||||
|
|
||||||
|
:param job_id: Gitlab CI job ID
|
||||||
|
:param destination: extract artifacts to path.
|
||||||
|
"""
|
||||||
|
job = self.project.jobs.get(job_id)
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||||
|
job.artifacts(streamed=True, action=temp_file.write)
|
||||||
|
|
||||||
|
with zipfile.ZipFile(temp_file.name, "r") as archive_file:
|
||||||
|
archive_file.extractall(destination)
|
||||||
|
|
||||||
|
def download_artifact(self, job_id, artifact_path, destination=None):
|
||||||
|
"""
|
||||||
|
download specific path of job artifacts and extract to destination.
|
||||||
|
|
||||||
|
:param job_id: Gitlab CI job ID
|
||||||
|
:param artifact_path: list of path in artifacts (relative path to artifact root path)
|
||||||
|
:param destination: destination of artifact. Do not save to file if destination is None
|
||||||
|
:return: A list of artifact file raw data.
|
||||||
|
"""
|
||||||
|
job = self.project.jobs.get(job_id)
|
||||||
|
|
||||||
|
raw_data_list = []
|
||||||
|
|
||||||
|
for a_path in artifact_path:
|
||||||
|
try:
|
||||||
|
data = job.artifact(a_path)
|
||||||
|
except gitlab.GitlabGetError as e:
|
||||||
|
print("Failed to download '{}' form job {}".format(a_path, job_id))
|
||||||
|
raise e
|
||||||
|
raw_data_list.append(data)
|
||||||
|
if destination:
|
||||||
|
file_path = os.path.join(destination, a_path)
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(file_path))
|
||||||
|
except OSError:
|
||||||
|
# already exists
|
||||||
|
pass
|
||||||
|
with open(file_path, "wb") as f:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
return raw_data_list
|
||||||
|
|
||||||
|
def find_job_id(self, job_name, pipeline_id=None):
|
||||||
|
"""
|
||||||
|
Get Job ID from job name of specific pipeline
|
||||||
|
|
||||||
|
:param job_name: job name
|
||||||
|
:param pipeline_id: If None, will get pipeline id from CI pre-defined variable.
|
||||||
|
:return: a list of job IDs (parallel job will generate multiple jobs)
|
||||||
|
"""
|
||||||
|
job_id_list = []
|
||||||
|
if pipeline_id is None:
|
||||||
|
pipeline_id = os.getenv("CI_PIPELINE_ID")
|
||||||
|
pipeline = self.project.pipelines.get(pipeline_id)
|
||||||
|
jobs = pipeline.jobs.list(all=True)
|
||||||
|
for job in jobs:
|
||||||
|
match = self.JOB_NAME_PATTERN.match(job.name)
|
||||||
|
if match:
|
||||||
|
if match.group(1) == job_name:
|
||||||
|
job_id_list.append({"id": job.id, "parallel_num": match.group(3)})
|
||||||
|
return job_id_list
|
||||||
|
|
||||||
|
def download_archive(self, ref, destination, project_id=None):
|
||||||
|
"""
|
||||||
|
Download archive of certain commit of a repository and extract to destination path
|
||||||
|
|
||||||
|
:param ref: commit or branch name
|
||||||
|
:param destination: destination path of extracted archive file
|
||||||
|
:param project_id: download project of current instance if project_id is None
|
||||||
|
:return: root path name of archive file
|
||||||
|
"""
|
||||||
|
if project_id is None:
|
||||||
|
project = self.project
|
||||||
|
else:
|
||||||
|
project = self.gitlab_inst.projects.get(project_id)
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||||
|
try:
|
||||||
|
project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
|
||||||
|
except gitlab.GitlabGetError as e:
|
||||||
|
print("Failed to archive from project {}".format(project_id))
|
||||||
|
raise e
|
||||||
|
|
||||||
|
print("archive size: {:.03f}MB".format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
|
||||||
|
|
||||||
|
with tarfile.open(temp_file.name, "r") as archive_file:
|
||||||
|
root_name = archive_file.getnames()[0]
|
||||||
|
archive_file.extractall(destination)
|
||||||
|
|
||||||
|
return os.path.join(os.path.realpath(destination), root_name)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("action")
|
||||||
|
parser.add_argument("project_id", type=int)
|
||||||
|
parser.add_argument("--pipeline_id", "-i", type=int, default=None)
|
||||||
|
parser.add_argument("--ref", "-r", default="master")
|
||||||
|
parser.add_argument("--job_id", "-j", type=int, default=None)
|
||||||
|
parser.add_argument("--job_name", "-n", default=None)
|
||||||
|
parser.add_argument("--project_name", "-m", default=None)
|
||||||
|
parser.add_argument("--destination", "-d", default=None)
|
||||||
|
parser.add_argument("--artifact_path", "-a", nargs="*", default=None)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
gitlab_inst = Gitlab(args.project_id)
|
||||||
|
if args.action == "download_artifacts":
|
||||||
|
gitlab_inst.download_artifacts(args.job_id, args.destination)
|
||||||
|
if args.action == "download_artifact":
|
||||||
|
gitlab_inst.download_artifact(args.job_id, args.artifact_path, args.destination)
|
||||||
|
elif args.action == "find_job_id":
|
||||||
|
job_ids = gitlab_inst.find_job_id(args.job_name, args.pipeline_id)
|
||||||
|
print(";".join([",".join([str(j["id"]), j["parallel_num"]]) for j in job_ids]))
|
||||||
|
elif args.action == "download_archive":
|
||||||
|
gitlab_inst.download_archive(args.ref, args.destination)
|
||||||
|
elif args.action == "get_project_id":
|
||||||
|
ret = gitlab_inst.get_project_id(args.project_name)
|
||||||
|
print("project id: {}".format(ret))
|
@ -11,6 +11,14 @@ if(NOT GIT_FOUND)
|
|||||||
else()
|
else()
|
||||||
|
|
||||||
function(git_submodule_check root_path)
|
function(git_submodule_check root_path)
|
||||||
|
# for internal use:
|
||||||
|
# skip submodule check if running on Gitlab CI and job is configured as not clone submodules
|
||||||
|
if($ENV{IDF_SKIP_CHECK_SUBMODULES})
|
||||||
|
if($ENV{IDF_SKIP_CHECK_SUBMODULES} EQUAL 1)
|
||||||
|
message("skip submodule check on internal CI")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND ${GIT_EXECUTABLE} submodule status
|
COMMAND ${GIT_EXECUTABLE} submodule status
|
||||||
|
Reference in New Issue
Block a user