mirror of
https://github.com/espressif/esp-idf.git
synced 2025-10-02 10:00:57 +02:00
Merge branch 'feat/load_idf_ext_from_components' into 'master'
feat(tools): Load idf.py extensions from project's component directories and python packages Closes IDF-5736 and IDF-4121 See merge request espressif/esp-idf!39875
This commit is contained in:
@@ -301,7 +301,112 @@ Arguments from a file can be combined with additional command line arguments, an
|
|||||||
|
|
||||||
A further example of how this argument file can be used, e.g., creating configuration profile files via @filename, is in the :example_file:`Multiple Build Configurations Example <build_system/cmake/multi_config/README.md>`.
|
A further example of how this argument file can be used, e.g., creating configuration profile files via @filename, is in the :example_file:`Multiple Build Configurations Example <build_system/cmake/multi_config/README.md>`.
|
||||||
|
|
||||||
|
Extending ``idf.py``
|
||||||
|
====================
|
||||||
|
|
||||||
|
``idf.py`` can be extended with additional subcommands, global options, and callbacks provided by extension files in your project and components which participate in the build, as well as by external Python packages exposing entry points.
|
||||||
|
|
||||||
|
- **From components participating in the build**: Place a file named ``idf_ext.py`` in the project root or in a component's root directory that is registered in the project's ``CMakeLists.txt``. Component extensions are discovered after the project is configured - run ``idf.py build`` or ``idf.py reconfigure`` to make newly added commands available.
|
||||||
|
- **From Python entry points**: Any installed Python package may contribute extensions by defining an entry point in the ``idf_extension`` group. Package installation is sufficient, no project build is required.
|
||||||
|
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
Extensions must not define subcommands or options that have the same names as the core ``idf.py`` commands. Custom actions and options are checked for name collisions, overriding defaults is not possible and a warning is printed. For Python entry points, use unique identifiers as duplicate entry point names will be ignored with a warning.
|
||||||
|
|
||||||
|
Extension File Example
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
An extension file defines an ``action_extensions`` function which returns additional actions/options. The same structure is used for component-based extensions (``idf_ext.py``) and for package-based extensions (e.g., ``<package_name>_ext.py``):
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
import click
|
||||||
|
|
||||||
|
def action_extensions(base_actions: dict, project_path: str) -> dict:
|
||||||
|
def hello_test(subcommand_name: str, ctx: click.Context, global_args: dict, **action_args: Any) -> None:
|
||||||
|
message = action_args.get('message')
|
||||||
|
print(f"Running action: {subcommand_name}. Message: {message}")
|
||||||
|
|
||||||
|
def global_callback_detail(ctx: click.Context, global_args: dict, tasks: list) -> None:
|
||||||
|
if getattr(global_args, 'detail', False):
|
||||||
|
print(f"About to execute {len(tasks)} task(s): {[t.name for t in tasks]}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"version": "1",
|
||||||
|
"global_options": [
|
||||||
|
{
|
||||||
|
"names": ["--detail", "-d"],
|
||||||
|
"is_flag": True,
|
||||||
|
"help": "Enable detailed output",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"global_action_callbacks": [global_callback_detail],
|
||||||
|
"actions": {
|
||||||
|
"hello": {
|
||||||
|
"callback": hello_test,
|
||||||
|
"short_help": "Hello from component",
|
||||||
|
"help": "Test command from component extension",
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"names": ["--message", "-m"],
|
||||||
|
"help": "Custom message to display",
|
||||||
|
"default": "Hi there!",
|
||||||
|
"type": str,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Extension API Reference
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
The ``action_extensions`` function takes arguments ``base_actions`` (all currently registered commands) and ``project_path`` (absolute project directory) and returns a dictionary with up to four keys:
|
||||||
|
|
||||||
|
- ``version``: A string representing the interface version of the extension. Currently, the API version is ``1``. **This key is mandatory** and must be provided.
|
||||||
|
- ``global_options``: A list of options available globally for all commands. Each option is a dictionary with fields such as ``names``, ``help``, ``type``, ``is_flag``, ``scope``, etc.
|
||||||
|
- ``global_action_callbacks``: A list of functions called once before any task execution. Each global action callback function accepts three arguments:
|
||||||
|
|
||||||
|
- ``ctx`` — The `click context`_
|
||||||
|
- ``global_args`` — All available global arguments
|
||||||
|
- ``tasks`` — The list of tasks to be executed. Task refer to the action / sub-command used with `idf.py`
|
||||||
|
|
||||||
|
- ``actions``: A dictionary of new subcommands. Each action has a ``callback`` function and may also include ``options``, ``arguments``, ``dependencies``, etc. Each action callback function accepts three to four arguments:
|
||||||
|
|
||||||
|
- ``subcommand_name`` — the name of the command (useful if multiple commands share the same callback)
|
||||||
|
- ``ctx`` — the `click context`_
|
||||||
|
- ``global_args`` — all available global arguments,
|
||||||
|
- ``**action_args`` — (optional) arguments passed to the action
|
||||||
|
|
||||||
|
Basic Usage Examples
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
1) Provide an extension from a component in your project
|
||||||
|
|
||||||
|
Create ``idf_ext.py`` in the project root or in a registered component (for example ``components/my_component/idf_ext.py``). Use the extension file example above as your ``idf_ext.py`` implementation.
|
||||||
|
|
||||||
|
Run ``idf.py build`` or ``idf.py reconfigure`` to load the new command, then ``idf.py --help`` will show the new extension.
|
||||||
|
|
||||||
|
2) Provide an extension via a Python package entry point
|
||||||
|
|
||||||
|
Implement your extension in a module named ``<package_name>_ext.py`` using the extension file example above, and expose the ``action_extensions`` function via the ``idf_extension`` entry-point group. For example, with ``pyproject.toml``:
|
||||||
|
|
||||||
|
.. code-block:: TOML
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "my_comp"
|
||||||
|
version = "0.1.0"
|
||||||
|
|
||||||
|
[project.entry-points.idf_extension]
|
||||||
|
my_pkg_ext = "my_component.my_ext:action_extensions"
|
||||||
|
|
||||||
|
|
||||||
|
Install the package into the same Python environment as ``idf.py`` (for example with ``pip install -e .`` in the package directory). It is recommended to use a unique module name (e.g., ``<package_name>_ext.py``) to avoid name conflicts. After successful installation, ``idf.py --help`` will show the new extension.
|
||||||
|
|
||||||
.. _cmake: https://cmake.org
|
.. _cmake: https://cmake.org
|
||||||
.. _ninja: https://ninja-build.org
|
.. _ninja: https://ninja-build.org
|
||||||
.. _esptool.py: https://github.com/espressif/esptool/#readme
|
.. _esptool.py: https://github.com/espressif/esptool/#readme
|
||||||
.. _CCache: https://ccache.dev/
|
.. _CCache: https://ccache.dev/
|
||||||
|
.. _click context: https://click.palletsprojects.com/en/stable/api/#context
|
||||||
|
@@ -301,7 +301,112 @@ uf2 二进制文件也可以通过 :ref:`idf.py uf2 <generate-uf2-binary>` 生
|
|||||||
|
|
||||||
关于参数文件的更多示例,如通过 @filename 创建配置文件概要,请参阅 :example_file:`多个构建配置示例 <build_system/cmake/multi_config/README.md>`。
|
关于参数文件的更多示例,如通过 @filename 创建配置文件概要,请参阅 :example_file:`多个构建配置示例 <build_system/cmake/multi_config/README.md>`。
|
||||||
|
|
||||||
|
扩展 ``idf.py``
|
||||||
|
====================
|
||||||
|
|
||||||
|
``idf.py`` 支持扩展功能。通过项目中的扩展文件以及参与构建的组件中的扩展文件,可以增加额外的子命令、全局选项和回调函数;通过暴露入口点的外部 Python 包,可以提供新的扩展功能。
|
||||||
|
|
||||||
|
- **参与构建的组件**:在项目根目录,或注册在项目 ``CMakeLists.txt`` 中的组件根目录,放置名为 ``idf_ext.py`` 的文件,该文件会在项目配置完成后得到识别。运行 ``idf.py build`` 或 ``idf.py reconfigure``,新添加的命令即可生效。
|
||||||
|
- **Python 入口点**:对于任何已安装的 Python 包,在 ``idf_extension`` 组中定义入口点后,就可以提供扩展功能。只要安装了 Python 包就可以使用扩展功能,无需重新构建项目。
|
||||||
|
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
扩展不能定义与 ``idf.py`` 命令同名的子命令或选项。系统会检查自定义的动作和选项名称是否存在冲突,不允许覆盖默认命令,如有冲突会打印警告。对于 Python 入口点,必须使用唯一标识符,否则会忽略重复的入口点名称并发出警告。
|
||||||
|
|
||||||
|
扩展文件示例
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
扩展文件需要定义一个 ``action_extensions`` 函数,用于返回扩展的动作或选项。组件扩展 ``idf_ext.py`` 和基于包的扩展(例如 ``<package_name>_ext.py``)使用相同的结构,如下所示:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
import click
|
||||||
|
|
||||||
|
def action_extensions(base_actions: dict, project_path: str) -> dict:
|
||||||
|
def hello_test(subcommand_name: str, ctx: click.Context, global_args: dict, **action_args: Any) -> None:
|
||||||
|
message = action_args.get('message')
|
||||||
|
print(f"Running action: {subcommand_name}. Message: {message}")
|
||||||
|
|
||||||
|
def global_callback_detail(ctx: click.Context, global_args: dict, tasks: list) -> None:
|
||||||
|
if getattr(global_args, 'detail', False):
|
||||||
|
print(f"About to execute {len(tasks)} task(s): {[t.name for t in tasks]}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"version": "1",
|
||||||
|
"global_options": [
|
||||||
|
{
|
||||||
|
"names": ["--detail", "-d"],
|
||||||
|
"is_flag": True,
|
||||||
|
"help": "Enable detailed output",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"global_action_callbacks": [global_callback_detail],
|
||||||
|
"actions": {
|
||||||
|
"hello": {
|
||||||
|
"callback": hello_test,
|
||||||
|
"short_help": "Hello from component",
|
||||||
|
"help": "Test command from component extension",
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"names": ["--message", "-m"],
|
||||||
|
"help": "Custom message to display",
|
||||||
|
"default": "Hi there!",
|
||||||
|
"type": str,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
扩展 API 参考
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
``action_extensions`` 函数接收两个参数: ``base_actions`` 表示当前已注册的所有命令, ``project_path`` 表示项目的绝对路径。该函数返回一个包含最多四个键的字典:
|
||||||
|
|
||||||
|
- ``version``:表示扩展接口版本。当前 API 版本为 ``1``。此键为必填项。
|
||||||
|
- ``global_options``:一组全局选项,适用于所有命令。每个选项都是一个字典,包含 ``names``、 ``help``、 ``type``、 ``is_flag``、 ``scope`` 等字段。
|
||||||
|
- ``global_action_callbacks``:表示一组全局回调函数,在执行任何任务之前都会调用一次。每个全局回调函数接受三个参数:
|
||||||
|
|
||||||
|
- ``ctx``:即 `click context`_
|
||||||
|
- ``global_args``:所有可用的全局参数
|
||||||
|
- ``tasks``:将要执行的任务列表。任务指的是运行 ``idf.py`` 时所调用的具体动作或子命令
|
||||||
|
|
||||||
|
- ``actions``:子命令字典,用于定义新的子命令。每个子命令都有一个 ``callback`` 函数,并且可以包含 ``options``、 ``arguments``、 ``dependencies`` 等。每个回调函数接受三到四个参数:
|
||||||
|
|
||||||
|
- ``subcommand_name``:命令的名称(在多个命令共享同一回调时很有用)
|
||||||
|
- ``ctx``:即 `click context`_
|
||||||
|
- ``global_args``:所有可用的全局参数
|
||||||
|
- ``**action_args``:传递给该子命令的具体参数,可选
|
||||||
|
|
||||||
|
基本用法示例
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
1) **通过项目组件提供扩展**
|
||||||
|
|
||||||
|
在项目根目录或某个已注册的组件目录下创建 ``idf_ext.py`` (例如 ``components/my_component/idf_ext.py`` )。实现内容可参考上面的扩展文件示例。
|
||||||
|
|
||||||
|
运行 ``idf.py build`` 或 ``idf.py reconfigure`` 加载新命令,然后执行 ``idf.py --help`` 即可看到新扩展。
|
||||||
|
|
||||||
|
2) **通过 Python 包入口点提供扩展**
|
||||||
|
|
||||||
|
使用上述扩展文件示例,在名为 ``<package_name>_ext.py`` 的模块中实现扩展,并通过 ``idf_extension`` 入口点组暴露 ``action_extensions`` 函数。例如,在 ``pyproject.toml`` 中配置:
|
||||||
|
|
||||||
|
.. code-block:: TOML
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "my_comp"
|
||||||
|
version = "0.1.0"
|
||||||
|
|
||||||
|
[project.entry-points.idf_extension]
|
||||||
|
my_pkg_ext = "my_component.my_ext:action_extensions"
|
||||||
|
|
||||||
|
|
||||||
|
将该包安装到与 ``idf.py`` 相同的 Python 环境中(例如在包目录下执行 ``pip install -e .``)。建议使用唯一的模块名(例如 ``<package_name>_ext.py``)避免命名冲突。安装成功后,运行 ``idf.py --help`` 就可以看到新扩展命令。
|
||||||
|
|
||||||
.. _cmake: https://cmake.org
|
.. _cmake: https://cmake.org
|
||||||
.. _ninja: https://ninja-build.org
|
.. _ninja: https://ninja-build.org
|
||||||
.. _esptool.py: https://github.com/espressif/esptool/#readme
|
.. _esptool.py: https://github.com/espressif/esptool/#readme
|
||||||
.. _CCache: https://ccache.dev/
|
.. _CCache: https://ccache.dev/
|
||||||
|
.. _click context: https://click.palletsprojects.com/en/stable/api/#context
|
||||||
|
131
tools/idf.py
131
tools/idf.py
@@ -14,6 +14,8 @@
|
|||||||
# their specific function instead.
|
# their specific function instead.
|
||||||
import codecs
|
import codecs
|
||||||
import glob
|
import glob
|
||||||
|
import importlib.metadata
|
||||||
|
import importlib.util
|
||||||
import json
|
import json
|
||||||
import locale
|
import locale
|
||||||
import os.path
|
import os.path
|
||||||
@@ -246,6 +248,8 @@ def init_cli(verbose_output: list | None = None) -> Any:
|
|||||||
self.callback(self.name, context, global_args, **action_args)
|
self.callback(self.name, context, global_args, **action_args)
|
||||||
|
|
||||||
class Action(click.Command):
|
class Action(click.Command):
|
||||||
|
callback: Callable
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str | None = None,
|
name: str | None = None,
|
||||||
@@ -721,6 +725,90 @@ def init_cli(verbose_output: list | None = None) -> Any:
|
|||||||
|
|
||||||
return tasks_to_run
|
return tasks_to_run
|
||||||
|
|
||||||
|
def load_cli_extension_from_dir(ext_dir: str) -> Any | None:
|
||||||
|
"""Load extension 'idf_ext.py' from directory and return the action_extensions function"""
|
||||||
|
ext_file = os.path.join(ext_dir, 'idf_ext.py')
|
||||||
|
if not os.path.exists(ext_file):
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
module_name = f'idf_ext_{os.path.basename(ext_dir)}'
|
||||||
|
spec = importlib.util.spec_from_file_location(module_name, ext_file)
|
||||||
|
if spec is None or spec.loader is None:
|
||||||
|
raise ImportError('Failed to load python module')
|
||||||
|
ext_module = importlib.util.module_from_spec(spec)
|
||||||
|
sys.modules[module_name] = ext_module
|
||||||
|
spec.loader.exec_module(ext_module)
|
||||||
|
|
||||||
|
if hasattr(ext_module, 'action_extensions'):
|
||||||
|
return ext_module.action_extensions
|
||||||
|
else:
|
||||||
|
print_warning(f"Warning: Extension {ext_file} has no attribute 'action_extensions'")
|
||||||
|
|
||||||
|
except (ImportError, SyntaxError) as e:
|
||||||
|
print_warning(f'Warning: Failed to import extension {ext_file}: {e}')
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def load_cli_extensions_from_entry_points() -> list[tuple[str, Any]]:
|
||||||
|
"""Load extensions from Python entry points"""
|
||||||
|
extensions: list[tuple[str, Any]] = []
|
||||||
|
eps = importlib.metadata.entry_points(group='idf_extension')
|
||||||
|
|
||||||
|
# declarative value is the path-like identifier of entry point defined in the components config file
|
||||||
|
# having same declarative value for multiple entry points results in loading only one of them (undeterministic)
|
||||||
|
eps_declarative_values: list[str] = []
|
||||||
|
for ep in eps:
|
||||||
|
if ep.value in eps_declarative_values:
|
||||||
|
conflicting_names = [e.name for e in eps if e.value == ep.value]
|
||||||
|
print_warning(
|
||||||
|
f"Warning: Entry point's declarative value [extension_file_name:method_name] "
|
||||||
|
f'name collision detected for - {ep.value}. The same {ep.value} is used by '
|
||||||
|
f'{conflicting_names} entry points. To ensure successful loading, please use'
|
||||||
|
' a different extension file name or method name for the entry point.'
|
||||||
|
)
|
||||||
|
# Remove any already loaded extensions with conflicting names
|
||||||
|
extensions[:] = [ext for ext in extensions if ext[0] not in conflicting_names]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if ep.value == 'idf_ext:action_extensions':
|
||||||
|
print_warning(
|
||||||
|
f'Entry point "{ep.name}" has declarative value "{ep.value}". For external components, '
|
||||||
|
'it is recommended to use name like <<COMPONENT_NAME>>_ext:action_extensions, '
|
||||||
|
"so it does not interfere with the project's idf_ext.py file."
|
||||||
|
)
|
||||||
|
|
||||||
|
eps_declarative_values.append(ep.value)
|
||||||
|
try:
|
||||||
|
extension_func = ep.load()
|
||||||
|
extensions.append((ep.name, extension_func))
|
||||||
|
except Exception as e:
|
||||||
|
print_warning(f'Warning: Failed to load entry point extension "{ep.name}": {e}')
|
||||||
|
|
||||||
|
return extensions
|
||||||
|
|
||||||
|
def resolve_build_dir() -> str:
|
||||||
|
"""Resolve build directory from command line arguments
|
||||||
|
return build path if explicitly set, otherwise default build path"""
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(add_help=False)
|
||||||
|
parser.add_argument('-B', '--build-dir', default=os.path.join(project_dir, 'build'))
|
||||||
|
args, _ = parser.parse_known_args()
|
||||||
|
build_dir: str = args.build_dir
|
||||||
|
return os.path.abspath(build_dir)
|
||||||
|
|
||||||
|
def _extract_relevant_path(path: str) -> str:
|
||||||
|
"""
|
||||||
|
Returns part of the path starting from 'components' or 'managed_components'.
|
||||||
|
If neither is found, returns the full path.
|
||||||
|
"""
|
||||||
|
for keyword in ('components', 'managed_components'):
|
||||||
|
# arg path is loaded from project_description.json, where paths are always defined with '/'
|
||||||
|
if keyword in path.split('/'):
|
||||||
|
return keyword + path.split(keyword, 1)[1]
|
||||||
|
return path
|
||||||
|
|
||||||
# That's a tiny parser that parse project-dir even before constructing
|
# That's a tiny parser that parse project-dir even before constructing
|
||||||
# fully featured click parser to be sure that extensions are loaded from the right place
|
# fully featured click parser to be sure that extensions are loaded from the right place
|
||||||
@click.command(
|
@click.command(
|
||||||
@@ -774,21 +862,40 @@ def init_cli(verbose_output: list | None = None) -> Any:
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
print_warning(f'WARNING: Cannot load idf.py extension "{name}"')
|
print_warning(f'WARNING: Cannot load idf.py extension "{name}"')
|
||||||
|
|
||||||
# Load extensions from project dir
|
component_idf_ext_dirs = []
|
||||||
if os.path.exists(os.path.join(project_dir, 'idf_ext.py')):
|
# Get component directories with idf extensions that participate in the build
|
||||||
sys.path.append(project_dir)
|
build_dir_path = resolve_build_dir()
|
||||||
|
project_description_json_file = os.path.join(build_dir_path, 'project_description.json')
|
||||||
|
if os.path.exists(project_description_json_file):
|
||||||
try:
|
try:
|
||||||
from idf_ext import action_extensions
|
with open(project_description_json_file, encoding='utf-8') as f:
|
||||||
except ImportError:
|
project_desc = json.load(f)
|
||||||
print_warning('Error importing extension file idf_ext.py. Skipping.')
|
all_component_info = project_desc.get('build_component_info', {})
|
||||||
print_warning(
|
for _, comp_info in all_component_info.items():
|
||||||
"Please make sure that it contains implementation (even if it's empty) of add_action_extensions"
|
comp_dir = comp_info.get('dir')
|
||||||
)
|
if comp_dir and os.path.isdir(comp_dir) and os.path.exists(os.path.join(comp_dir, 'idf_ext.py')):
|
||||||
|
component_idf_ext_dirs.append(comp_dir)
|
||||||
|
except (OSError, json.JSONDecodeError) as e:
|
||||||
|
print_warning(f'Warning: Failed to read component info from project_description.json: {e}')
|
||||||
|
# Load extensions from directories that participate in the build (components and project)
|
||||||
|
for ext_dir in component_idf_ext_dirs + [project_dir]:
|
||||||
|
extension_func = load_cli_extension_from_dir(ext_dir)
|
||||||
|
if extension_func:
|
||||||
|
try:
|
||||||
|
all_actions = merge_action_lists(all_actions, custom_actions=extension_func(all_actions, project_dir))
|
||||||
|
except Exception as e:
|
||||||
|
print_warning(f'WARNING: Cannot load directory extension from "{ext_dir}": {e}')
|
||||||
|
else:
|
||||||
|
if ext_dir != project_dir:
|
||||||
|
print(f'INFO: Loaded component extension from "{_extract_relevant_path(ext_dir)}"')
|
||||||
|
|
||||||
|
# Load extensions from Python entry points
|
||||||
|
entry_point_extensions = load_cli_extensions_from_entry_points()
|
||||||
|
for name, extension_func in entry_point_extensions:
|
||||||
try:
|
try:
|
||||||
all_actions = merge_action_lists(all_actions, action_extensions(all_actions, project_dir))
|
all_actions = merge_action_lists(all_actions, custom_actions=extension_func(all_actions, project_dir))
|
||||||
except NameError:
|
except Exception as e:
|
||||||
pass
|
print_warning(f'WARNING: Cannot load entry point extension "{name}": {e}')
|
||||||
|
|
||||||
cli_help = (
|
cli_help = (
|
||||||
'ESP-IDF CLI build management tool. '
|
'ESP-IDF CLI build management tool. '
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
import asyncio
|
import asyncio
|
||||||
import importlib
|
import importlib
|
||||||
@@ -8,21 +8,17 @@ import re
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from asyncio.subprocess import Process
|
from asyncio.subprocess import Process
|
||||||
|
from collections.abc import Generator
|
||||||
from pkgutil import iter_modules
|
from pkgutil import iter_modules
|
||||||
|
from re import Match
|
||||||
from types import FunctionType
|
from types import FunctionType
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Dict
|
|
||||||
from typing import Generator
|
|
||||||
from typing import List
|
|
||||||
from typing import Match
|
|
||||||
from typing import Optional
|
|
||||||
from typing import TextIO
|
from typing import TextIO
|
||||||
from typing import Tuple
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import yaml
|
import yaml
|
||||||
from esp_idf_monitor import get_ansi_converter
|
from esp_idf_monitor import get_ansi_converter
|
||||||
|
|
||||||
from idf_py_actions.errors import NoSerialPortFoundError
|
from idf_py_actions.errors import NoSerialPortFoundError
|
||||||
|
|
||||||
from .constants import GENERATORS
|
from .constants import GENERATORS
|
||||||
@@ -43,7 +39,7 @@ SHELL_COMPLETE_RUN = SHELL_COMPLETE_VAR in os.environ
|
|||||||
# https://docs.python.org/3/reference/compound_stmts.html#function-definitions
|
# https://docs.python.org/3/reference/compound_stmts.html#function-definitions
|
||||||
# Default parameter values are evaluated from left to right
|
# Default parameter values are evaluated from left to right
|
||||||
# when the function definition is executed
|
# when the function definition is executed
|
||||||
def get_build_context(ctx: Dict={}) -> Dict:
|
def get_build_context(ctx: dict = {}) -> dict:
|
||||||
"""
|
"""
|
||||||
The build context is set in the ensure_build_directory function. It can be used
|
The build context is set in the ensure_build_directory function. It can be used
|
||||||
in modules or other code, which don't have direct access to such information.
|
in modules or other code, which don't have direct access to such information.
|
||||||
@@ -64,13 +60,13 @@ def _set_build_context(args: 'PropertyDict') -> None:
|
|||||||
|
|
||||||
proj_desc_fn = f'{args.build_dir}/project_description.json'
|
proj_desc_fn = f'{args.build_dir}/project_description.json'
|
||||||
try:
|
try:
|
||||||
with open(proj_desc_fn, 'r', encoding='utf-8') as f:
|
with open(proj_desc_fn, encoding='utf-8') as f:
|
||||||
ctx['proj_desc'] = json.load(f)
|
ctx['proj_desc'] = json.load(f)
|
||||||
except (OSError, ValueError) as e:
|
except (OSError, ValueError) as e:
|
||||||
raise FatalError(f'Cannot load {proj_desc_fn}: {e}')
|
raise FatalError(f'Cannot load {proj_desc_fn}: {e}')
|
||||||
|
|
||||||
|
|
||||||
def executable_exists(args: List) -> bool:
|
def executable_exists(args: list) -> bool:
|
||||||
try:
|
try:
|
||||||
subprocess.check_output(args)
|
subprocess.check_output(args)
|
||||||
return True
|
return True
|
||||||
@@ -79,7 +75,7 @@ def executable_exists(args: List) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _idf_version_from_cmake() -> Optional[str]:
|
def _idf_version_from_cmake() -> str | None:
|
||||||
"""Acquires version of ESP-IDF from version.cmake"""
|
"""Acquires version of ESP-IDF from version.cmake"""
|
||||||
version_path = os.path.join(os.environ['IDF_PATH'], 'tools/cmake/version.cmake')
|
version_path = os.path.join(os.environ['IDF_PATH'], 'tools/cmake/version.cmake')
|
||||||
regex = re.compile(r'^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)')
|
regex = re.compile(r'^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)')
|
||||||
@@ -92,28 +88,38 @@ def _idf_version_from_cmake() -> Optional[str]:
|
|||||||
if m:
|
if m:
|
||||||
ver[m.group(1)] = m.group(2)
|
ver[m.group(1)] = m.group(2)
|
||||||
|
|
||||||
return 'v%s.%s.%s' % (ver['MAJOR'], ver['MINOR'], ver['PATCH'])
|
return f'v{ver["MAJOR"]}.{ver["MINOR"]}.{ver["PATCH"]}'
|
||||||
except (KeyError, OSError):
|
except (KeyError, OSError):
|
||||||
sys.stderr.write('WARNING: Cannot find ESP-IDF version in version.cmake\n')
|
sys.stderr.write('WARNING: Cannot find ESP-IDF version in version.cmake\n')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_target(path: str, sdkconfig_filename: str='sdkconfig') -> Optional[str]:
|
def get_target(path: str, sdkconfig_filename: str = 'sdkconfig') -> str | None:
|
||||||
path = os.path.join(path, sdkconfig_filename)
|
path = os.path.join(path, sdkconfig_filename)
|
||||||
return get_sdkconfig_value(path, 'CONFIG_IDF_TARGET')
|
return get_sdkconfig_value(path, 'CONFIG_IDF_TARGET')
|
||||||
|
|
||||||
|
|
||||||
def idf_version() -> Optional[str]:
|
def idf_version() -> str | None:
|
||||||
"""Print version of ESP-IDF"""
|
"""Print version of ESP-IDF"""
|
||||||
|
|
||||||
# Try to get version from git:
|
# Try to get version from git:
|
||||||
try:
|
try:
|
||||||
version: Optional[str] = subprocess.check_output([
|
version: str | None = (
|
||||||
|
subprocess.check_output(
|
||||||
|
[
|
||||||
'git',
|
'git',
|
||||||
'--git-dir=%s' % os.path.join(os.environ['IDF_PATH'], '.git'),
|
f'--git-dir={os.path.join(os.environ["IDF_PATH"], ".git")}',
|
||||||
'--work-tree=%s' % os.environ['IDF_PATH'],
|
f'--work-tree={os.environ["IDF_PATH"]}',
|
||||||
'describe', '--tags', '--dirty', '--match', 'v*.*',
|
'describe',
|
||||||
]).decode('utf-8', 'ignore').strip()
|
'--tags',
|
||||||
|
'--dirty',
|
||||||
|
'--match',
|
||||||
|
'v*.*',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.decode('utf-8', 'ignore')
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
# if failed, then try to parse cmake.version file
|
# if failed, then try to parse cmake.version file
|
||||||
sys.stderr.write('WARNING: Git version unavailable, reading from source\n')
|
sys.stderr.write('WARNING: Git version unavailable, reading from source\n')
|
||||||
@@ -128,19 +134,18 @@ def get_default_serial_port() -> Any:
|
|||||||
try:
|
try:
|
||||||
import esptool
|
import esptool
|
||||||
import serial.tools.list_ports
|
import serial.tools.list_ports
|
||||||
|
|
||||||
ports = list(sorted(p.device for p in serial.tools.list_ports.comports()))
|
ports = list(sorted(p.device for p in serial.tools.list_ports.comports()))
|
||||||
if sys.platform == 'darwin':
|
if sys.platform == 'darwin':
|
||||||
ports = [
|
ports = [port for port in ports if not port.endswith(('Bluetooth-Incoming-Port', 'wlan-debug'))]
|
||||||
port
|
|
||||||
for port in ports
|
|
||||||
if not port.endswith(('Bluetooth-Incoming-Port', 'wlan-debug'))
|
|
||||||
]
|
|
||||||
# high baud rate could cause the failure of creation of the connection
|
# high baud rate could cause the failure of creation of the connection
|
||||||
esp = esptool.get_default_connected_device(serial_list=ports, port=None, connect_attempts=4,
|
esp = esptool.get_default_connected_device(
|
||||||
initial_baud=115200)
|
serial_list=ports, port=None, connect_attempts=4, initial_baud=115200
|
||||||
|
)
|
||||||
if esp is None:
|
if esp is None:
|
||||||
raise NoSerialPortFoundError(
|
raise NoSerialPortFoundError(
|
||||||
"No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.")
|
"No serial ports found. Connect a device, or use '-p PORT' option to set a specific port."
|
||||||
|
)
|
||||||
|
|
||||||
serial_port = esp.serial_port
|
serial_port = esp.serial_port
|
||||||
esp._port.close()
|
esp._port.close()
|
||||||
@@ -150,29 +155,29 @@ def get_default_serial_port() -> Any:
|
|||||||
except NoSerialPortFoundError:
|
except NoSerialPortFoundError:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FatalError('An exception occurred during detection of the serial port: {}'.format(e))
|
raise FatalError(f'An exception occurred during detection of the serial port: {e}')
|
||||||
|
|
||||||
|
|
||||||
# function prints warning when autocompletion is not being performed
|
# function prints warning when autocompletion is not being performed
|
||||||
# set argument stream to sys.stderr for errors and exceptions
|
# set argument stream to sys.stderr for errors and exceptions
|
||||||
def print_warning(message: str, stream: Optional[TextIO]=None) -> None:
|
def print_warning(message: str, stream: TextIO | None = None) -> None:
|
||||||
if not SHELL_COMPLETE_RUN:
|
if not SHELL_COMPLETE_RUN:
|
||||||
print(message, file=stream or sys.stderr)
|
print(message, file=stream or sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def color_print(message: str, color: str, newline: Optional[str]='\n') -> None:
|
def color_print(message: str, color: str, newline: str | None = '\n') -> None:
|
||||||
"""Print a message to stderr with colored highlighting"""
|
"""Print a message to stderr with colored highlighting"""
|
||||||
ansi_normal = '\033[0m'
|
ansi_normal = '\033[0m'
|
||||||
sys.stderr.write('%s%s%s%s' % (color, message, ansi_normal, newline))
|
sys.stderr.write(f'{color}{message}{ansi_normal}{newline}')
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
|
||||||
def yellow_print(message: str, newline: Optional[str]='\n') -> None:
|
def yellow_print(message: str, newline: str | None = '\n') -> None:
|
||||||
ansi_yellow = '\033[0;33m'
|
ansi_yellow = '\033[0;33m'
|
||||||
color_print(message, ansi_yellow, newline)
|
color_print(message, ansi_yellow, newline)
|
||||||
|
|
||||||
|
|
||||||
def red_print(message: str, newline: Optional[str]='\n') -> None:
|
def red_print(message: str, newline: str | None = '\n') -> None:
|
||||||
ansi_red = '\033[1;31m'
|
ansi_red = '\033[1;31m'
|
||||||
color_print(message, ansi_red, newline)
|
color_print(message, ansi_red, newline)
|
||||||
|
|
||||||
@@ -181,15 +186,12 @@ def debug_print_idf_version() -> None:
|
|||||||
print_warning(f'ESP-IDF {idf_version() or "version unknown"}')
|
print_warning(f'ESP-IDF {idf_version() or "version unknown"}')
|
||||||
|
|
||||||
|
|
||||||
def load_hints() -> Dict:
|
def load_hints() -> dict:
|
||||||
"""Helper function to load hints yml file"""
|
"""Helper function to load hints yml file"""
|
||||||
hints: Dict = {
|
hints: dict = {'yml': [], 'modules': []}
|
||||||
'yml': [],
|
|
||||||
'modules': []
|
|
||||||
}
|
|
||||||
|
|
||||||
current_module_dir = os.path.dirname(__file__)
|
current_module_dir = os.path.dirname(__file__)
|
||||||
with open(os.path.join(current_module_dir, 'hints.yml'), 'r', encoding='utf-8') as file:
|
with open(os.path.join(current_module_dir, 'hints.yml'), encoding='utf-8') as file:
|
||||||
hints['yml'] = yaml.safe_load(file)
|
hints['yml'] = yaml.safe_load(file)
|
||||||
|
|
||||||
hint_modules_dir = os.path.join(current_module_dir, 'hint_modules')
|
hint_modules_dir = os.path.join(current_module_dir, 'hint_modules')
|
||||||
@@ -206,13 +208,13 @@ def load_hints() -> Dict:
|
|||||||
red_print(f'Failed to import "{name}" from "{hint_modules_dir}" as a module')
|
red_print(f'Failed to import "{name}" from "{hint_modules_dir}" as a module')
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
red_print('Module "{}" does not have function generate_hint.'.format(name))
|
red_print(f'Module "{name}" does not have function generate_hint.')
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
|
|
||||||
return hints
|
return hints
|
||||||
|
|
||||||
|
|
||||||
def generate_hints_buffer(output: str, hints: Dict) -> Generator:
|
def generate_hints_buffer(output: str, hints: dict) -> Generator:
|
||||||
"""Helper function to process hints within a string buffer"""
|
"""Helper function to process hints within a string buffer"""
|
||||||
# Call modules for possible hints with unchanged output. Note that
|
# Call modules for possible hints with unchanged output. Note that
|
||||||
# hints in hints.yml expect new line trimmed, but modules should
|
# hints in hints.yml expect new line trimmed, but modules should
|
||||||
@@ -227,7 +229,7 @@ def generate_hints_buffer(output: str, hints: Dict) -> Generator:
|
|||||||
for hint in hints['yml']:
|
for hint in hints['yml']:
|
||||||
variables_list = hint.get('variables')
|
variables_list = hint.get('variables')
|
||||||
hint_list, hint_vars, re_vars = [], [], []
|
hint_list, hint_vars, re_vars = [], [], []
|
||||||
match: Optional[Match[str]] = None
|
match: Match[str] | None = None
|
||||||
try:
|
try:
|
||||||
if variables_list:
|
if variables_list:
|
||||||
for variables in variables_list:
|
for variables in variables_list:
|
||||||
@@ -238,12 +240,12 @@ def generate_hints_buffer(output: str, hints: Dict) -> Generator:
|
|||||||
try:
|
try:
|
||||||
hint_list.append(hint['hint'].format(*hint_vars))
|
hint_list.append(hint['hint'].format(*hint_vars))
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
red_print('Argument {} missing in {}. Check hints.yml file.'.format(e, hint))
|
red_print(f'Argument {e} missing in {hint}. Check hints.yml file.')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
match = re.compile(hint['re']).search(output)
|
match = re.compile(hint['re']).search(output)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
red_print('Argument {} missing in {}. Check hints.yml file.'.format(e, hint))
|
red_print(f'Argument {e} missing in {hint}. Check hints.yml file.')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
except re.error as e:
|
except re.error as e:
|
||||||
red_print('{} from hints.yml have {} problem. Check hints.yml file.'.format(hint['re'], e))
|
red_print('{} from hints.yml have {} problem. Check hints.yml file.'.format(hint['re'], e))
|
||||||
@@ -256,14 +258,14 @@ def generate_hints_buffer(output: str, hints: Dict) -> Generator:
|
|||||||
try:
|
try:
|
||||||
yield ' '.join(['HINT:', hint['hint'].format(extra_info)])
|
yield ' '.join(['HINT:', hint['hint'].format(extra_info)])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise KeyError("Argument 'hint' missing in {}. Check hints.yml file.".format(hint))
|
raise KeyError(f"Argument 'hint' missing in {hint}. Check hints.yml file.")
|
||||||
|
|
||||||
|
|
||||||
def generate_hints(*filenames: str) -> Generator:
|
def generate_hints(*filenames: str) -> Generator:
|
||||||
"""Getting output files and printing hints on how to resolve errors based on the output."""
|
"""Getting output files and printing hints on how to resolve errors based on the output."""
|
||||||
hints = load_hints()
|
hints = load_hints()
|
||||||
for file_name in filenames:
|
for file_name in filenames:
|
||||||
with open(file_name, 'r', encoding='utf-8') as file:
|
with open(file_name, encoding='utf-8') as file:
|
||||||
yield from generate_hints_buffer(file.read(), hints)
|
yield from generate_hints_buffer(file.read(), hints)
|
||||||
|
|
||||||
|
|
||||||
@@ -284,8 +286,18 @@ def fit_text_in_terminal(out: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
class RunTool:
|
class RunTool:
|
||||||
def __init__(self, tool_name: str, args: List, cwd: str, env: Optional[Dict]=None, custom_error_handler: Optional[FunctionType]=None,
|
def __init__(
|
||||||
build_dir: Optional[str]=None, hints: bool=True, force_progression: bool=False, interactive: bool=False, convert_output: bool=False
|
self,
|
||||||
|
tool_name: str,
|
||||||
|
args: list,
|
||||||
|
cwd: str,
|
||||||
|
env: dict | None = None,
|
||||||
|
custom_error_handler: FunctionType | None = None,
|
||||||
|
build_dir: str | None = None,
|
||||||
|
hints: bool = True,
|
||||||
|
force_progression: bool = False,
|
||||||
|
interactive: bool = False,
|
||||||
|
convert_output: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.tool_name = tool_name
|
self.tool_name = tool_name
|
||||||
self.args = args
|
self.args = args
|
||||||
@@ -301,20 +313,23 @@ class RunTool:
|
|||||||
|
|
||||||
def __call__(self) -> None:
|
def __call__(self) -> None:
|
||||||
def quote_arg(arg: str) -> str:
|
def quote_arg(arg: str) -> str:
|
||||||
""" Quote the `arg` with whitespace in them because it can cause problems when we call it from a subprocess."""
|
"""
|
||||||
if re.match(r"^(?![\'\"]).*\s.*", arg):
|
Quote the `arg` with whitespace in them because
|
||||||
|
it can cause problems when we call it from a subprocess.
|
||||||
|
"""
|
||||||
|
if re.match(r'^(?![\'\"]).*\s.*', arg):
|
||||||
return ''.join(["'", arg, "'"])
|
return ''.join(["'", arg, "'"])
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
self.args = [str(arg) for arg in self.args]
|
self.args = [str(arg) for arg in self.args]
|
||||||
display_args = ' '.join(quote_arg(arg) for arg in self.args)
|
display_args = ' '.join(quote_arg(arg) for arg in self.args)
|
||||||
print('Running %s in directory %s' % (self.tool_name, quote_arg(self.cwd)))
|
print(f'Running {self.tool_name} in directory {quote_arg(self.cwd)}')
|
||||||
print('Executing "%s"...' % str(display_args))
|
print(f'Executing "{str(display_args)}"...')
|
||||||
|
|
||||||
env_copy = dict(os.environ)
|
env_copy = dict(os.environ)
|
||||||
env_copy.update(self.env or {})
|
env_copy.update(self.env or {})
|
||||||
|
|
||||||
process: Union[Process, subprocess.CompletedProcess[bytes]]
|
process: Process | subprocess.CompletedProcess[bytes]
|
||||||
if self.hints:
|
if self.hints:
|
||||||
process, stderr_output_file, stdout_output_file = asyncio.run(self.run_command(self.args, env_copy))
|
process, stderr_output_file, stdout_output_file = asyncio.run(self.run_command(self.args, env_copy))
|
||||||
else:
|
else:
|
||||||
@@ -332,12 +347,14 @@ class RunTool:
|
|||||||
if not self.interactive:
|
if not self.interactive:
|
||||||
for hint in generate_hints(stderr_output_file, stdout_output_file):
|
for hint in generate_hints(stderr_output_file, stdout_output_file):
|
||||||
yellow_print(hint)
|
yellow_print(hint)
|
||||||
raise FatalError('{} failed with exit code {}, output of the command is in the {} and {}'.format(self.tool_name, process.returncode,
|
raise FatalError(
|
||||||
stderr_output_file, stdout_output_file))
|
f'{self.tool_name} failed with exit code {process.returncode}, '
|
||||||
|
f'output of the command is in the {stderr_output_file} and {stdout_output_file}'
|
||||||
|
)
|
||||||
|
|
||||||
raise FatalError('{} failed with exit code {}'.format(self.tool_name, process.returncode))
|
raise FatalError(f'{self.tool_name} failed with exit code {process.returncode}')
|
||||||
|
|
||||||
async def run_command(self, cmd: List, env_copy: Dict) -> Tuple[Process, Optional[str], Optional[str]]:
|
async def run_command(self, cmd: list, env_copy: dict) -> tuple[Process, str | None, str | None]:
|
||||||
"""Run the `cmd` command with capturing stderr and stdout from that function and return returncode
|
"""Run the `cmd` command with capturing stderr and stdout from that function and return returncode
|
||||||
and of the command, the id of the process, paths to captured output"""
|
and of the command, the id of the process, paths to captured output"""
|
||||||
log_dir_name = 'log'
|
log_dir_name = 'log'
|
||||||
@@ -348,13 +365,24 @@ class RunTool:
|
|||||||
# Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup
|
# Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup
|
||||||
# limit was added for avoiding error in idf.py confserver
|
# limit was added for avoiding error in idf.py confserver
|
||||||
try:
|
try:
|
||||||
p = await asyncio.create_subprocess_exec(*cmd, env=env_copy, limit=1024 * 256, cwd=self.cwd, stdout=asyncio.subprocess.PIPE,
|
p = await asyncio.create_subprocess_exec(
|
||||||
stderr=asyncio.subprocess.PIPE)
|
*cmd,
|
||||||
|
env=env_copy,
|
||||||
|
limit=1024 * 256,
|
||||||
|
cwd=self.cwd,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
message = f'ERROR: {sys.executable} doesn\'t support asyncio. The issue can be worked around by re-running idf.py with the "--no-hints" argument.'
|
message = (
|
||||||
|
f"ERROR: {sys.executable} doesn't support asyncio. "
|
||||||
|
"Workaround: re-run idf.py with the '--no-hints' argument."
|
||||||
|
)
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
message += ' To fix the issue use the Windows Installer for setting up your python environment, ' \
|
message += (
|
||||||
|
' To fix the issue use the Windows Installer for setting up your python environment, '
|
||||||
'available from: https://dl.espressif.com/dl/esp-idf/'
|
'available from: https://dl.espressif.com/dl/esp-idf/'
|
||||||
|
)
|
||||||
sys.exit(message)
|
sys.exit(message)
|
||||||
|
|
||||||
stderr_output_file = os.path.join(self.build_dir, log_dir_name, f'idf_py_stderr_output_{p.pid}')
|
stderr_output_file = os.path.join(self.build_dir, log_dir_name, f'idf_py_stderr_output_{p.pid}')
|
||||||
@@ -363,7 +391,8 @@ class RunTool:
|
|||||||
try:
|
try:
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
self.read_and_write_stream(p.stderr, stderr_output_file, sys.stderr),
|
self.read_and_write_stream(p.stderr, stderr_output_file, sys.stderr),
|
||||||
self.read_and_write_stream(p.stdout, stdout_output_file, sys.stdout))
|
self.read_and_write_stream(p.stdout, stdout_output_file, sys.stdout),
|
||||||
|
)
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
# The process we are trying to read from was terminated. Print the
|
# The process we are trying to read from was terminated. Print the
|
||||||
# message here and let the asyncio to finish, because
|
# message here and let the asyncio to finish, because
|
||||||
@@ -376,9 +405,11 @@ class RunTool:
|
|||||||
await p.wait() # added for avoiding None returncode
|
await p.wait() # added for avoiding None returncode
|
||||||
return p, stderr_output_file, stdout_output_file
|
return p, stderr_output_file, stdout_output_file
|
||||||
|
|
||||||
async def read_and_write_stream(self, input_stream: asyncio.StreamReader, output_filename: str,
|
async def read_and_write_stream(
|
||||||
output_stream: TextIO) -> None:
|
self, input_stream: asyncio.StreamReader, output_filename: str, output_stream: TextIO
|
||||||
|
) -> None:
|
||||||
"""read the output of the `input_stream` and then write it into `output_filename` and `output_stream`"""
|
"""read the output of the `input_stream` and then write it into `output_filename` and `output_stream`"""
|
||||||
|
|
||||||
def delete_ansi_escape(text: str) -> str:
|
def delete_ansi_escape(text: str) -> str:
|
||||||
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
||||||
return ansi_escape.sub('', text)
|
return ansi_escape.sub('', text)
|
||||||
@@ -394,7 +425,7 @@ class RunTool:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def read_stream() -> Optional[str]:
|
async def read_stream() -> str | None:
|
||||||
try:
|
try:
|
||||||
output_b = await input_stream.readline()
|
output_b = await input_stream.readline()
|
||||||
return output_b.decode(errors='ignore')
|
return output_b.decode(errors='ignore')
|
||||||
@@ -404,7 +435,7 @@ class RunTool:
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def read_interactive_stream() -> Optional[str]:
|
async def read_interactive_stream() -> str | None:
|
||||||
buffer = b''
|
buffer = b''
|
||||||
while True:
|
while True:
|
||||||
output_b = await input_stream.read(1)
|
output_b = await input_stream.read(1)
|
||||||
@@ -470,9 +501,11 @@ class RunTool:
|
|||||||
for hint in generate_hints_buffer(last_line, hints):
|
for hint in generate_hints_buffer(last_line, hints):
|
||||||
yellow_print(hint)
|
yellow_print(hint)
|
||||||
last_line = ''
|
last_line = ''
|
||||||
except (RuntimeError, EnvironmentError) as e:
|
except (OSError, RuntimeError) as e:
|
||||||
yellow_print('WARNING: The exception {} was raised and we can\'t capture all your {} and '
|
yellow_print(
|
||||||
'hints on how to resolve errors can be not accurate.'.format(e, output_stream.name.strip('<>')))
|
"WARNING: The exception {} was raised and we can't capture all your {} and "
|
||||||
|
'hints on how to resolve errors can be not accurate.'.format(e, output_stream.name.strip('<>'))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def run_tool(*args: Any, **kwargs: Any) -> None:
|
def run_tool(*args: Any, **kwargs: Any) -> None:
|
||||||
@@ -480,8 +513,14 @@ def run_tool(*args: Any, **kwargs: Any) -> None:
|
|||||||
return RunTool(*args, **kwargs)()
|
return RunTool(*args, **kwargs)()
|
||||||
|
|
||||||
|
|
||||||
def run_target(target_name: str, args: 'PropertyDict', env: Optional[Dict]=None,
|
def run_target(
|
||||||
custom_error_handler: Optional[FunctionType]=None, force_progression: bool=False, interactive: bool=False) -> None:
|
target_name: str,
|
||||||
|
args: 'PropertyDict',
|
||||||
|
env: dict | None = None,
|
||||||
|
custom_error_handler: FunctionType | None = None,
|
||||||
|
force_progression: bool = False,
|
||||||
|
interactive: bool = False,
|
||||||
|
) -> None:
|
||||||
"""Run target in build directory."""
|
"""Run target in build directory."""
|
||||||
if env is None:
|
if env is None:
|
||||||
env = {}
|
env = {}
|
||||||
@@ -498,11 +537,19 @@ def run_target(target_name: str, args: 'PropertyDict', env: Optional[Dict]=None,
|
|||||||
if 'CLICOLOR_FORCE' not in env:
|
if 'CLICOLOR_FORCE' not in env:
|
||||||
env['CLICOLOR_FORCE'] = '1'
|
env['CLICOLOR_FORCE'] = '1'
|
||||||
|
|
||||||
RunTool(generator_cmd[0], generator_cmd + [target_name], args.build_dir, env, custom_error_handler, hints=not args.no_hints,
|
RunTool(
|
||||||
force_progression=force_progression, interactive=interactive)()
|
generator_cmd[0],
|
||||||
|
generator_cmd + [target_name],
|
||||||
|
args.build_dir,
|
||||||
|
env,
|
||||||
|
custom_error_handler,
|
||||||
|
hints=not args.no_hints,
|
||||||
|
force_progression=force_progression,
|
||||||
|
interactive=interactive,
|
||||||
|
)()
|
||||||
|
|
||||||
|
|
||||||
def _strip_quotes(value: str, regexp: re.Pattern=re.compile(r"^\"(.*)\"$|^'(.*)'$|^(.*)$")) -> Optional[str]:
|
def _strip_quotes(value: str, regexp: re.Pattern = re.compile(r"^\"(.*)\"$|^'(.*)'$|^(.*)$")) -> str | None:
|
||||||
"""
|
"""
|
||||||
Strip quotes like CMake does during parsing cache entries
|
Strip quotes like CMake does during parsing cache entries
|
||||||
"""
|
"""
|
||||||
@@ -510,7 +557,7 @@ def _strip_quotes(value: str, regexp: re.Pattern=re.compile(r"^\"(.*)\"$|^'(.*)'
|
|||||||
return [x for x in matching_values.groups() if x is not None][0].rstrip() if matching_values is not None else None
|
return [x for x in matching_values.groups() if x is not None][0].rstrip() if matching_values is not None else None
|
||||||
|
|
||||||
|
|
||||||
def _parse_cmakecache(path: str) -> Dict:
|
def _parse_cmakecache(path: str) -> dict:
|
||||||
"""
|
"""
|
||||||
Parse the CMakeCache file at 'path'.
|
Parse the CMakeCache file at 'path'.
|
||||||
|
|
||||||
@@ -529,13 +576,13 @@ def _parse_cmakecache(path: str) -> Dict:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _parse_cmdl_cmakecache(entries: List) -> Dict[str, str]:
|
def _parse_cmdl_cmakecache(entries: list) -> dict[str, str]:
|
||||||
"""
|
"""
|
||||||
Parse list of CMake cache entries passed in via the -D option.
|
Parse list of CMake cache entries passed in via the -D option.
|
||||||
|
|
||||||
Returns a dict of name:value.
|
Returns a dict of name:value.
|
||||||
"""
|
"""
|
||||||
result: Dict = {}
|
result: dict = {}
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
key, value = entry.split('=', 1)
|
key, value = entry.split('=', 1)
|
||||||
value = _strip_quotes(value)
|
value = _strip_quotes(value)
|
||||||
@@ -544,7 +591,7 @@ def _parse_cmdl_cmakecache(entries: List) -> Dict[str, str]:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _new_cmakecache_entries(cache: Dict, cache_cmdl: Dict) -> bool:
|
def _new_cmakecache_entries(cache: dict, cache_cmdl: dict) -> bool:
|
||||||
for entry in cache_cmdl:
|
for entry in cache_cmdl:
|
||||||
if entry not in cache:
|
if entry not in cache:
|
||||||
return True
|
return True
|
||||||
@@ -557,14 +604,15 @@ def _detect_cmake_generator(prog_name: str) -> Any:
|
|||||||
"""
|
"""
|
||||||
Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found.
|
Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found.
|
||||||
"""
|
"""
|
||||||
for (generator_name, generator) in GENERATORS.items():
|
for generator_name, generator in GENERATORS.items():
|
||||||
if executable_exists(generator['version']):
|
if executable_exists(generator['version']):
|
||||||
return generator_name
|
return generator_name
|
||||||
raise FatalError("To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH" % prog_name)
|
raise FatalError(f"To use {prog_name}, either the 'ninja' or 'GNU make' build tool must be available in the PATH")
|
||||||
|
|
||||||
|
|
||||||
def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmake: bool=False,
|
def ensure_build_directory(
|
||||||
env: Optional[Dict]=None) -> None:
|
args: 'PropertyDict', prog_name: str, always_run_cmake: bool = False, env: dict | None = None
|
||||||
|
) -> None:
|
||||||
"""Check the build directory exists and that cmake has been run there.
|
"""Check the build directory exists and that cmake has been run there.
|
||||||
|
|
||||||
If this isn't the case, create the build directory (if necessary) and
|
If this isn't the case, create the build directory (if necessary) and
|
||||||
@@ -583,11 +631,11 @@ def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmak
|
|||||||
# Verify the project directory
|
# Verify the project directory
|
||||||
if not os.path.isdir(project_dir):
|
if not os.path.isdir(project_dir):
|
||||||
if not os.path.exists(project_dir):
|
if not os.path.exists(project_dir):
|
||||||
raise FatalError('Project directory %s does not exist' % project_dir)
|
raise FatalError(f'Project directory {project_dir} does not exist')
|
||||||
else:
|
else:
|
||||||
raise FatalError('%s must be a project directory' % project_dir)
|
raise FatalError(f'{project_dir} must be a project directory')
|
||||||
if not os.path.exists(os.path.join(project_dir, 'CMakeLists.txt')):
|
if not os.path.exists(os.path.join(project_dir, 'CMakeLists.txt')):
|
||||||
raise FatalError('CMakeLists.txt not found in project directory %s' % project_dir)
|
raise FatalError(f'CMakeLists.txt not found in project directory {project_dir}')
|
||||||
|
|
||||||
# Verify/create the build directory
|
# Verify/create the build directory
|
||||||
build_dir = args.build_dir
|
build_dir = args.build_dir
|
||||||
@@ -598,7 +646,7 @@ def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmak
|
|||||||
cache_path = os.path.join(build_dir, 'CMakeCache.txt')
|
cache_path = os.path.join(build_dir, 'CMakeCache.txt')
|
||||||
cache = _parse_cmakecache(cache_path) if os.path.exists(cache_path) else {}
|
cache = _parse_cmakecache(cache_path) if os.path.exists(cache_path) else {}
|
||||||
|
|
||||||
args.define_cache_entry.append('CCACHE_ENABLE=%d' % args.ccache)
|
args.define_cache_entry.append(f'CCACHE_ENABLE={args.ccache:d}')
|
||||||
|
|
||||||
cache_cmdl = _parse_cmdl_cmakecache(args.define_cache_entry)
|
cache_cmdl = _parse_cmdl_cmakecache(args.define_cache_entry)
|
||||||
|
|
||||||
@@ -614,7 +662,7 @@ def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmak
|
|||||||
'-G',
|
'-G',
|
||||||
args.generator,
|
args.generator,
|
||||||
'-DPYTHON_DEPS_CHECKED=1',
|
'-DPYTHON_DEPS_CHECKED=1',
|
||||||
'-DPYTHON={}'.format(sys.executable),
|
f'-DPYTHON={sys.executable}',
|
||||||
'-DESP_PLATFORM=1',
|
'-DESP_PLATFORM=1',
|
||||||
]
|
]
|
||||||
if args.cmake_warn_uninitialized:
|
if args.cmake_warn_uninitialized:
|
||||||
@@ -641,17 +689,20 @@ def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmak
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
generator = _detect_cmake_generator(prog_name)
|
generator = _detect_cmake_generator(prog_name)
|
||||||
if args.generator is None:
|
if args.generator is None:
|
||||||
args.generator = (generator) # reuse the previously configured generator, if none was given
|
args.generator = generator # reuse the previously configured generator, if none was given
|
||||||
if generator != args.generator:
|
if generator != args.generator:
|
||||||
raise FatalError("Build is configured for generator '%s' not '%s'. Run '%s fullclean' to start again." %
|
raise FatalError(
|
||||||
(generator, args.generator, prog_name))
|
f"Build is configured for generator '{generator}' not '{args.generator}'. "
|
||||||
|
f"Run '{prog_name} fullclean' to start again."
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
home_dir = cache['CMAKE_HOME_DIRECTORY']
|
home_dir = cache['CMAKE_HOME_DIRECTORY']
|
||||||
if os.path.realpath(home_dir) != os.path.realpath(project_dir):
|
if os.path.realpath(home_dir) != os.path.realpath(project_dir):
|
||||||
raise FatalError(
|
raise FatalError(
|
||||||
"Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again." %
|
f"Build directory '{build_dir}' configured for project '{os.path.realpath(home_dir)}' "
|
||||||
(build_dir, os.path.realpath(home_dir), os.path.realpath(project_dir), prog_name))
|
f"not '{os.path.realpath(project_dir)}'. Run '{prog_name} fullclean' to start again."
|
||||||
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet
|
pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet
|
||||||
|
|
||||||
@@ -659,8 +710,10 @@ def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmak
|
|||||||
python = cache['PYTHON']
|
python = cache['PYTHON']
|
||||||
if os.path.normcase(python) != os.path.normcase(sys.executable):
|
if os.path.normcase(python) != os.path.normcase(sys.executable):
|
||||||
raise FatalError(
|
raise FatalError(
|
||||||
"'{}' is currently active in the environment while the project was configured with '{}'. "
|
f"'{sys.executable}' is currently active in the environment while the project was "
|
||||||
"Run '{} fullclean' to start again.".format(sys.executable, python, prog_name))
|
f"configured with '{python}'. "
|
||||||
|
f"Run '{prog_name} fullclean' to start again."
|
||||||
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -668,8 +721,41 @@ def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmak
|
|||||||
_set_build_context(args)
|
_set_build_context(args)
|
||||||
|
|
||||||
|
|
||||||
def merge_action_lists(*action_lists: Dict) -> Dict:
|
def merge_action_lists(*action_lists: dict, custom_actions: dict[str, Any] | None = None) -> dict:
|
||||||
merged_actions: Dict = {
|
"""
|
||||||
|
Merge multiple action lists into a single dictionary.
|
||||||
|
|
||||||
|
External action lists (via custom_actions) come from outside components or
|
||||||
|
user-defined extensions:
|
||||||
|
- Any duplicate with an existing action or option will trigger a warning,
|
||||||
|
and external definitions will not override defaults.
|
||||||
|
|
||||||
|
*action_lists: Actions that comes from official ESP-IDF development
|
||||||
|
custom_actions: Actions that comes from external extensions
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _get_all_action_identifiers(actions_dict: dict[str, Any]) -> set[str]:
|
||||||
|
"""Extract all action names and their aliases as a single set."""
|
||||||
|
return {name for name in actions_dict.keys()} | {
|
||||||
|
alias for action in actions_dict.values() for alias in action.get('aliases', [])
|
||||||
|
}
|
||||||
|
|
||||||
|
def _check_action_conflicts(name: str, action: dict[str, Any], existing_identifiers: set[str]) -> None:
|
||||||
|
"""Check if an action name or its aliases conflict with existing identifiers.
|
||||||
|
Raises UserWarning if conflicts are found.
|
||||||
|
"""
|
||||||
|
if name in existing_identifiers:
|
||||||
|
raise UserWarning(f"Action '{name}' already defined")
|
||||||
|
|
||||||
|
aliases = action.get('aliases', [])
|
||||||
|
conflicting_aliases = set(aliases) & existing_identifiers
|
||||||
|
if conflicting_aliases:
|
||||||
|
raise UserWarning(
|
||||||
|
f"Action '{name}' has aliases {list(conflicting_aliases)} "
|
||||||
|
'that conflict with existing actions or aliases'
|
||||||
|
)
|
||||||
|
|
||||||
|
merged_actions: dict = {
|
||||||
'global_options': [],
|
'global_options': [],
|
||||||
'actions': {},
|
'actions': {},
|
||||||
'global_action_callbacks': [],
|
'global_action_callbacks': [],
|
||||||
@@ -678,10 +764,43 @@ def merge_action_lists(*action_lists: Dict) -> Dict:
|
|||||||
merged_actions['global_options'].extend(action_list.get('global_options', []))
|
merged_actions['global_options'].extend(action_list.get('global_options', []))
|
||||||
merged_actions['actions'].update(action_list.get('actions', {}))
|
merged_actions['actions'].update(action_list.get('actions', {}))
|
||||||
merged_actions['global_action_callbacks'].extend(action_list.get('global_action_callbacks', []))
|
merged_actions['global_action_callbacks'].extend(action_list.get('global_action_callbacks', []))
|
||||||
|
|
||||||
|
if not custom_actions:
|
||||||
|
return merged_actions
|
||||||
|
|
||||||
|
if not custom_actions.get('version'):
|
||||||
|
raise AttributeError(
|
||||||
|
'Attribute "version" is required in custom extension. '
|
||||||
|
'Please update your extension dictionary to contain the "version" attribute.'
|
||||||
|
)
|
||||||
|
|
||||||
|
existing_identifiers = _get_all_action_identifiers(merged_actions['actions'])
|
||||||
|
for name, action in custom_actions.get('actions', {}).items():
|
||||||
|
try:
|
||||||
|
_check_action_conflicts(name, action, existing_identifiers)
|
||||||
|
merged_actions['actions'][name] = action
|
||||||
|
existing_identifiers.add(name)
|
||||||
|
existing_identifiers.update(action.get('aliases', []))
|
||||||
|
except UserWarning as e:
|
||||||
|
yellow_print(f'WARNING: {e}. External action will not be added.')
|
||||||
|
|
||||||
|
for new_opt in custom_actions.get('global_options', []):
|
||||||
|
if any(
|
||||||
|
set(new_opt.get('names', [])) & set(existing.get('names', []))
|
||||||
|
for existing in merged_actions['global_options']
|
||||||
|
):
|
||||||
|
yellow_print(
|
||||||
|
f'WARNING: Global option {new_opt["names"]} already defined. External option will not be added.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
merged_actions['global_options'].append(new_opt)
|
||||||
|
|
||||||
|
merged_actions['global_action_callbacks'].extend(custom_actions.get('global_action_callbacks', []))
|
||||||
|
|
||||||
return merged_actions
|
return merged_actions
|
||||||
|
|
||||||
|
|
||||||
def get_sdkconfig_filename(args: 'PropertyDict', cache_cmdl: Optional[Dict]=None) -> str:
|
def get_sdkconfig_filename(args: 'PropertyDict', cache_cmdl: dict | None = None) -> str:
|
||||||
"""
|
"""
|
||||||
Get project's sdkconfig file name.
|
Get project's sdkconfig file name.
|
||||||
"""
|
"""
|
||||||
@@ -693,7 +812,7 @@ def get_sdkconfig_filename(args: 'PropertyDict', cache_cmdl: Optional[Dict]=None
|
|||||||
|
|
||||||
proj_desc_path = os.path.join(args.build_dir, 'project_description.json')
|
proj_desc_path = os.path.join(args.build_dir, 'project_description.json')
|
||||||
try:
|
try:
|
||||||
with open(proj_desc_path, 'r', encoding='utf-8') as f:
|
with open(proj_desc_path, encoding='utf-8') as f:
|
||||||
proj_desc = json.load(f)
|
proj_desc = json.load(f)
|
||||||
return str(proj_desc['config_file'])
|
return str(proj_desc['config_file'])
|
||||||
except (OSError, KeyError):
|
except (OSError, KeyError):
|
||||||
@@ -702,7 +821,7 @@ def get_sdkconfig_filename(args: 'PropertyDict', cache_cmdl: Optional[Dict]=None
|
|||||||
return os.path.join(args.project_dir, 'sdkconfig')
|
return os.path.join(args.project_dir, 'sdkconfig')
|
||||||
|
|
||||||
|
|
||||||
def get_sdkconfig_value(sdkconfig_file: str, key: str) -> Optional[str]:
|
def get_sdkconfig_value(sdkconfig_file: str, key: str) -> str | None:
|
||||||
"""
|
"""
|
||||||
Return the value of given key from sdkconfig_file.
|
Return the value of given key from sdkconfig_file.
|
||||||
If sdkconfig_file does not exist or the option is not present, returns None.
|
If sdkconfig_file does not exist or the option is not present, returns None.
|
||||||
@@ -713,8 +832,8 @@ def get_sdkconfig_value(sdkconfig_file: str, key: str) -> Optional[str]:
|
|||||||
# keep track of the last seen value for the given key
|
# keep track of the last seen value for the given key
|
||||||
value = None
|
value = None
|
||||||
# if the value is quoted, this excludes the quotes from the value
|
# if the value is quoted, this excludes the quotes from the value
|
||||||
pattern = re.compile(r"^{}=\"?([^\"]*)\"?$".format(key))
|
pattern = re.compile(rf'^{key}=\"?([^\"]*)\"?$')
|
||||||
with open(sdkconfig_file, 'r', encoding='utf-8') as f:
|
with open(sdkconfig_file, encoding='utf-8') as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
match = re.match(pattern, line)
|
match = re.match(pattern, line)
|
||||||
if match:
|
if match:
|
||||||
@@ -722,15 +841,16 @@ def get_sdkconfig_value(sdkconfig_file: str, key: str) -> Optional[str]:
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def is_target_supported(project_path: str, supported_targets: List) -> bool:
|
def is_target_supported(project_path: str, supported_targets: list) -> bool:
|
||||||
"""
|
"""
|
||||||
Returns True if the active target is supported, or False otherwise.
|
Returns True if the active target is supported, or False otherwise.
|
||||||
"""
|
"""
|
||||||
return get_target(project_path) in supported_targets
|
return get_target(project_path) in supported_targets
|
||||||
|
|
||||||
|
|
||||||
def _check_idf_target(args: 'PropertyDict', prog_name: str, cache: Dict,
|
def _check_idf_target(
|
||||||
cache_cmdl: Dict, env: Optional[Dict]=None) -> None:
|
args: 'PropertyDict', prog_name: str, cache: dict, cache_cmdl: dict, env: dict | None = None
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Cross-check the three settings (sdkconfig, CMakeCache, environment) and if there is
|
Cross-check the three settings (sdkconfig, CMakeCache, environment) and if there is
|
||||||
mismatch, fail with instructions on how to fix this.
|
mismatch, fail with instructions on how to fix this.
|
||||||
@@ -750,34 +870,45 @@ def _check_idf_target(args: 'PropertyDict', prog_name: str, cache: Dict,
|
|||||||
if idf_target_from_env:
|
if idf_target_from_env:
|
||||||
# Let's check that IDF_TARGET values are consistent
|
# Let's check that IDF_TARGET values are consistent
|
||||||
if idf_target_from_sdkconfig and idf_target_from_sdkconfig != idf_target_from_env:
|
if idf_target_from_sdkconfig and idf_target_from_sdkconfig != idf_target_from_env:
|
||||||
raise FatalError("Project sdkconfig '{cfg}' was generated for target '{t_conf}', but environment variable IDF_TARGET "
|
raise FatalError(
|
||||||
"is set to '{t_env}'. Run '{prog} set-target {t_env}' to generate new sdkconfig file for target {t_env}."
|
f"Project sdkconfig '{sdkconfig}' was generated for target '{idf_target_from_sdkconfig}', "
|
||||||
.format(cfg=sdkconfig, t_conf=idf_target_from_sdkconfig, t_env=idf_target_from_env, prog=prog_name))
|
f"but environment variable IDF_TARGET is set to '{idf_target_from_env}'. "
|
||||||
|
f"Run '{prog_name} set-target {idf_target_from_env}' to generate new sdkconfig "
|
||||||
|
f'file for target {idf_target_from_env}.'
|
||||||
|
)
|
||||||
|
|
||||||
if idf_target_from_cache and idf_target_from_cache != idf_target_from_env:
|
if idf_target_from_cache and idf_target_from_cache != idf_target_from_env:
|
||||||
raise FatalError("Target settings are not consistent: '{t_env}' in the environment, '{t_cache}' in CMakeCache.txt. "
|
raise FatalError(
|
||||||
"Run '{prog} fullclean' to start again."
|
f"Target settings are not consistent: '{idf_target_from_env}' in the environment, "
|
||||||
.format(t_env=idf_target_from_env, t_cache=idf_target_from_cache, prog=prog_name))
|
f"'{idf_target_from_cache}' in CMakeCache.txt. "
|
||||||
|
f"Run '{prog_name} fullclean' to start again."
|
||||||
|
)
|
||||||
|
|
||||||
if idf_target_from_cache_cmdl and idf_target_from_cache_cmdl != idf_target_from_env:
|
if idf_target_from_cache_cmdl and idf_target_from_cache_cmdl != idf_target_from_env:
|
||||||
raise FatalError("Target '{t_cmdl}' specified on command line is not consistent with "
|
raise FatalError(
|
||||||
"target '{t_env}' in the environment."
|
f"Target '{idf_target_from_cache_cmdl}' specified on command line is not consistent with "
|
||||||
.format(t_cmdl=idf_target_from_cache_cmdl, t_env=idf_target_from_env))
|
f"target '{idf_target_from_env}' in the environment."
|
||||||
|
)
|
||||||
elif idf_target_from_cache_cmdl:
|
elif idf_target_from_cache_cmdl:
|
||||||
# Check if -DIDF_TARGET is consistent with target in CMakeCache.txt
|
# Check if -DIDF_TARGET is consistent with target in CMakeCache.txt
|
||||||
if idf_target_from_cache and idf_target_from_cache != idf_target_from_cache_cmdl:
|
if idf_target_from_cache and idf_target_from_cache != idf_target_from_cache_cmdl:
|
||||||
raise FatalError("Target '{t_cmdl}' specified on command line is not consistent with "
|
raise FatalError(
|
||||||
"target '{t_cache}' in CMakeCache.txt. Run '{prog} set-target {t_cmdl}' to re-generate "
|
f"Target '{idf_target_from_cache_cmdl}' specified on command line is not consistent with "
|
||||||
|
f"target '{idf_target_from_cache}' in CMakeCache.txt. "
|
||||||
|
f"Run '{prog_name} set-target {idf_target_from_cache_cmdl}' to re-generate "
|
||||||
'CMakeCache.txt.'
|
'CMakeCache.txt.'
|
||||||
.format(t_cache=idf_target_from_cache, t_cmdl=idf_target_from_cache_cmdl, prog=prog_name))
|
)
|
||||||
|
|
||||||
elif idf_target_from_cache:
|
elif idf_target_from_cache:
|
||||||
# This shouldn't happen, unless the user manually edits CMakeCache.txt or sdkconfig, but let's check anyway.
|
# This shouldn't happen, unless the user manually edits CMakeCache.txt or sdkconfig, but let's check anyway.
|
||||||
if idf_target_from_sdkconfig and idf_target_from_cache != idf_target_from_sdkconfig:
|
if idf_target_from_sdkconfig and idf_target_from_cache != idf_target_from_sdkconfig:
|
||||||
raise FatalError("Project sdkconfig '{cfg}' was generated for target '{t_conf}', but CMakeCache.txt contains '{t_cache}'. "
|
raise FatalError(
|
||||||
"To keep the setting in sdkconfig ({t_conf}) and re-generate CMakeCache.txt, run '{prog} fullclean'. "
|
f"Project sdkconfig '{sdkconfig}' was generated for target '{idf_target_from_sdkconfig}', but "
|
||||||
"To re-generate sdkconfig for '{t_cache}' target, run '{prog} set-target {t_cache}'."
|
f"CMakeCache.txt contains '{idf_target_from_cache}'. To keep the setting in sdkconfig "
|
||||||
.format(cfg=sdkconfig, t_conf=idf_target_from_sdkconfig, t_cache=idf_target_from_cache, prog=prog_name))
|
f"({idf_target_from_sdkconfig}) and re-generate CMakeCache.txt, run '{prog_name} fullclean'. To "
|
||||||
|
f"re-generate sdkconfig for '{idf_target_from_cache}' target, run '{prog_name} set-target "
|
||||||
|
f"{idf_target_from_cache}'."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TargetChoice(click.Choice):
|
class TargetChoice(click.Choice):
|
||||||
@@ -786,8 +917,9 @@ class TargetChoice(click.Choice):
|
|||||||
- ignores hyphens
|
- ignores hyphens
|
||||||
- not case sensitive
|
- not case sensitive
|
||||||
"""
|
"""
|
||||||
def __init__(self, choices: List) -> None:
|
|
||||||
super(TargetChoice, self).__init__(choices, case_sensitive=False)
|
def __init__(self, choices: list) -> None:
|
||||||
|
super().__init__(choices, case_sensitive=False)
|
||||||
|
|
||||||
def convert(self, value: Any, param: click.Parameter, ctx: click.Context) -> Any:
|
def convert(self, value: Any, param: click.Parameter, ctx: click.Context) -> Any:
|
||||||
def normalize(string: str) -> str:
|
def normalize(string: str) -> str:
|
||||||
@@ -797,7 +929,7 @@ class TargetChoice(click.Choice):
|
|||||||
ctx.token_normalize_func = normalize
|
ctx.token_normalize_func = normalize
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return super(TargetChoice, self).convert(value, param, ctx)
|
return super().convert(value, param, ctx)
|
||||||
finally:
|
finally:
|
||||||
ctx.token_normalize_func = saved_token_normalize_func
|
ctx.token_normalize_func = saved_token_normalize_func
|
||||||
|
|
||||||
@@ -807,7 +939,7 @@ class PropertyDict(dict):
|
|||||||
if name in self:
|
if name in self:
|
||||||
return self[name]
|
return self[name]
|
||||||
else:
|
else:
|
||||||
raise AttributeError("'PropertyDict' object has no attribute '%s'" % name)
|
raise AttributeError(f"'PropertyDict' object has no attribute '{name}'")
|
||||||
|
|
||||||
def __setattr__(self, name: str, value: Any) -> None:
|
def __setattr__(self, name: str, value: Any) -> None:
|
||||||
self[name] = value
|
self[name] = value
|
||||||
@@ -816,4 +948,4 @@ class PropertyDict(dict):
|
|||||||
if name in self:
|
if name in self:
|
||||||
del self[name]
|
del self[name]
|
||||||
else:
|
else:
|
||||||
raise AttributeError("'PropertyDict' object has no attribute '%s'" % name)
|
raise AttributeError(f"'PropertyDict' object has no attribute '{name}'")
|
||||||
|
364
tools/test_build_system/test_idf_extension.py
Normal file
364
tools/test_build_system/test_idf_extension.py
Normal file
@@ -0,0 +1,364 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
import typing
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from test_build_system_helpers import IdfPyFunc
|
||||||
|
from test_build_system_helpers import replace_in_file
|
||||||
|
|
||||||
|
from conftest import should_clean_test_dir
|
||||||
|
|
||||||
|
# Template constants for extension packages from entrypoints
|
||||||
|
TEST_EXT_TEMPLATE = """
|
||||||
|
def action_extensions(base_actions, project_path):
|
||||||
|
def test_extension_action(target_name, ctx, args):
|
||||||
|
print("Test extension action executed - {suffix}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return {{
|
||||||
|
'version': '1',
|
||||||
|
'global_options': [{global_options}],
|
||||||
|
'actions': {{
|
||||||
|
{actions}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
PYPROJECT_TOML_TEMPLATE = """
|
||||||
|
[project]
|
||||||
|
name = "{package_name}"
|
||||||
|
version = "0.1.0"
|
||||||
|
|
||||||
|
[project.entry-points.idf_extension]
|
||||||
|
{entry_point_name} = "{declarative_value}"
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionPackageManager:
|
||||||
|
"""
|
||||||
|
Helper class to manage multiple extension packages within a single test.
|
||||||
|
Tracks all created packages and handles cleanup automatically.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, func_work_dir: Path, request: pytest.FixtureRequest):
|
||||||
|
self.func_work_dir = func_work_dir
|
||||||
|
self.request = request
|
||||||
|
self.packages: list[tuple[Path, str]] = []
|
||||||
|
|
||||||
|
def create_package(
|
||||||
|
self,
|
||||||
|
suffix: str,
|
||||||
|
template_vars: dict | None = None,
|
||||||
|
) -> tuple[str, str]:
|
||||||
|
"""
|
||||||
|
Create and install an extension package with the given suffix.
|
||||||
|
- suffix: Package suffix for unique naming
|
||||||
|
- template_vars: Dictionary of variables to substitute in templates
|
||||||
|
"""
|
||||||
|
test_name_sanitized = self.request.node.name.replace('[', '_').replace(']', '')
|
||||||
|
|
||||||
|
# Default template variables
|
||||||
|
default_vars = {
|
||||||
|
'suffix': suffix,
|
||||||
|
'package_name': f'test-idf-extension-package-{suffix}',
|
||||||
|
'package_dir_name': f'{test_name_sanitized}_pkg_{suffix}',
|
||||||
|
'action_name': f'test-extension-action-{suffix}',
|
||||||
|
'entry_point_name': f'test_extension_{suffix}',
|
||||||
|
'declarative_value': f'test_extension_package_{suffix}.test_ext:action_extensions',
|
||||||
|
# Template placeholders - can be overridden via template_vars
|
||||||
|
'global_options': '',
|
||||||
|
'actions': f"""'{f'test-extension-action-{suffix}'}': {{
|
||||||
|
'callback': test_extension_action,
|
||||||
|
'help': 'Test action from extension package - {suffix}'
|
||||||
|
}}""",
|
||||||
|
'extension_file_name': 'test_ext.py',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge with user-provided variables
|
||||||
|
if template_vars:
|
||||||
|
default_vars.update(template_vars)
|
||||||
|
|
||||||
|
package_path = self.func_work_dir / default_vars['package_dir_name']
|
||||||
|
package_path.mkdir(exist_ok=True)
|
||||||
|
logging.debug(f"Creating python package '{default_vars['package_name']}' in directory '{package_path}'")
|
||||||
|
test_package_dir = package_path / f'test_extension_package_{suffix}'
|
||||||
|
test_package_dir.mkdir(exist_ok=True)
|
||||||
|
(test_package_dir / '__init__.py').write_text('')
|
||||||
|
|
||||||
|
# Fill test_ext.py with template
|
||||||
|
(test_package_dir / default_vars['extension_file_name']).write_text(
|
||||||
|
textwrap.dedent(TEST_EXT_TEMPLATE.format(**default_vars))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fill pyproject.toml with template
|
||||||
|
(package_path / 'pyproject.toml').write_text(textwrap.dedent(PYPROJECT_TOML_TEMPLATE.format(**default_vars)))
|
||||||
|
|
||||||
|
# Install the package
|
||||||
|
cmd = [sys.executable, '-m', 'pip', 'install', '-e', '.']
|
||||||
|
logging.debug(f'Running command: {" ".join(cmd)} in {package_path}')
|
||||||
|
try:
|
||||||
|
subprocess.run(cmd, check=True, cwd=package_path, capture_output=True, text=True)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logging.error(f'Failed to install package at {package_path}: {e.stderr}')
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Track the package for cleanup
|
||||||
|
self.packages.append((package_path, default_vars['package_name']))
|
||||||
|
|
||||||
|
return default_vars['entry_point_name'], default_vars['action_name']
|
||||||
|
|
||||||
|
def cleanup(self) -> None:
|
||||||
|
"""
|
||||||
|
Uninstall all packages and clean up directories.
|
||||||
|
"""
|
||||||
|
for package_path, package_name in self.packages:
|
||||||
|
try:
|
||||||
|
subprocess.run([sys.executable, '-m', 'pip', 'uninstall', '-y', package_name])
|
||||||
|
logging.debug(f'Uninstalled test extension package: {package_name}')
|
||||||
|
except Exception as e:
|
||||||
|
logging.warning(f'Failed to uninstall test extension package: {e}')
|
||||||
|
|
||||||
|
if should_clean_test_dir(self.request):
|
||||||
|
try:
|
||||||
|
shutil.rmtree(package_path, ignore_errors=True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def extension_package_manager(
|
||||||
|
func_work_dir: Path, request: pytest.FixtureRequest
|
||||||
|
) -> typing.Generator[ExtensionPackageManager, None, None]:
|
||||||
|
"""
|
||||||
|
Fixture that provides an ExtensionPackageManager to create multiple extension packages
|
||||||
|
within a single test.
|
||||||
|
"""
|
||||||
|
manager = ExtensionPackageManager(func_work_dir, request)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield manager
|
||||||
|
finally:
|
||||||
|
manager.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
# ----------- Test cases for component extension -----------
|
||||||
|
|
||||||
|
|
||||||
|
def test_extension_from_component(idf_py: IdfPyFunc, test_app_copy: Path) -> None:
|
||||||
|
logging.info('Test loading extensions from component directories')
|
||||||
|
|
||||||
|
# Create a component with a CLI extension
|
||||||
|
idf_py('create-component', '-C', 'components', 'test_component')
|
||||||
|
component_dir = test_app_copy / 'components' / 'test_component'
|
||||||
|
idf_ext_py = component_dir / 'idf_ext.py'
|
||||||
|
idf_ext_py.write_text(
|
||||||
|
textwrap.dedent(
|
||||||
|
TEST_EXT_TEMPLATE.format(
|
||||||
|
suffix='component extension',
|
||||||
|
global_options='',
|
||||||
|
actions="""'test-component-action': {
|
||||||
|
'callback': test_extension_action,
|
||||||
|
'help': 'Test action from component extension'
|
||||||
|
}""",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
replace_in_file(
|
||||||
|
test_app_copy / 'main' / 'CMakeLists.txt',
|
||||||
|
'# placeholder_inside_idf_component_register',
|
||||||
|
'\n'.join(['INCLUDE_DIRS "." ', 'REQUIRES "test_component" ']),
|
||||||
|
)
|
||||||
|
|
||||||
|
idf_py('reconfigure')
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert 'test-component-action' in ret.stdout
|
||||||
|
assert 'INFO: Loaded component extension from "components/test_component"' in ret.stdout
|
||||||
|
ret = idf_py('test-component-action')
|
||||||
|
assert 'Test extension action executed - component extension' in ret.stdout
|
||||||
|
assert 'INFO: Loaded component extension from "components/test_component"' in ret.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_extension_from_component_invalid_syntax(idf_py: IdfPyFunc, test_app_copy: Path) -> None:
|
||||||
|
logging.info('Test handling of invalid component extensions')
|
||||||
|
|
||||||
|
idf_py('create-component', '-C', 'components', 'invalid_component')
|
||||||
|
replace_in_file(
|
||||||
|
test_app_copy / 'main' / 'CMakeLists.txt',
|
||||||
|
'# placeholder_inside_idf_component_register',
|
||||||
|
'\n'.join(['INCLUDE_DIRS "." ', 'REQUIRES "invalid_component" ']),
|
||||||
|
)
|
||||||
|
ret = idf_py('reconfigure')
|
||||||
|
assert ret.returncode == 0
|
||||||
|
|
||||||
|
component_dir = test_app_copy / 'components' / 'invalid_component'
|
||||||
|
idf_ext_py = component_dir / 'idf_ext.py'
|
||||||
|
idf_ext_py.write_text('def some_function() # no ":" at the end - INVALID SYNTAX')
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert 'Warning: Failed to import extension' in ret.stderr
|
||||||
|
|
||||||
|
idf_ext_py.write_text(
|
||||||
|
textwrap.dedent("""
|
||||||
|
def some_function():
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
)
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert "has no attribute 'action_extensions'" in ret.stderr
|
||||||
|
|
||||||
|
idf_ext_py.write_text(
|
||||||
|
textwrap.dedent(
|
||||||
|
TEST_EXT_TEMPLATE.format(
|
||||||
|
suffix='component extension',
|
||||||
|
global_options='',
|
||||||
|
actions="""'test-component-action': {
|
||||||
|
'callback': test_extension_action,
|
||||||
|
'help': 'Test action from component extension'
|
||||||
|
}""",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
replace_in_file(
|
||||||
|
idf_ext_py,
|
||||||
|
"'version': '1',",
|
||||||
|
'\n',
|
||||||
|
)
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert 'Attribute "version" is required in custom extension.' in ret.stderr
|
||||||
|
|
||||||
|
|
||||||
|
# ----------- Test cases for entry point extension -----------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('test_app_copy')
|
||||||
|
def test_extension_entrypoint(idf_py: IdfPyFunc, extension_package_manager: ExtensionPackageManager) -> None:
|
||||||
|
logging.info('Test loading multiple extensions from Python entry points')
|
||||||
|
|
||||||
|
_, action1_name = extension_package_manager.create_package('alpha')
|
||||||
|
_, action2_name = extension_package_manager.create_package('beta')
|
||||||
|
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert action1_name in ret.stdout
|
||||||
|
assert action2_name in ret.stdout
|
||||||
|
|
||||||
|
ret_alpha = idf_py('test-extension-action-alpha')
|
||||||
|
assert 'Test extension action executed - alpha' in ret_alpha.stdout
|
||||||
|
|
||||||
|
ret_beta = idf_py('test-extension-action-beta')
|
||||||
|
assert 'Test extension action executed - beta' in ret_beta.stdout
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('test_app_copy')
|
||||||
|
def test_extension_entrypoint_declarative_value_duplicate(
|
||||||
|
idf_py: IdfPyFunc, extension_package_manager: ExtensionPackageManager
|
||||||
|
) -> None:
|
||||||
|
logging.info('Test entry point declarative value duplicate name warning')
|
||||||
|
|
||||||
|
entry_point1_name, action1_name = extension_package_manager.create_package(
|
||||||
|
'collision1',
|
||||||
|
template_vars={
|
||||||
|
'declarative_value': 'duplicate_test_ext:action_extensions' # Same declarative value
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
entry_point2_name, action2_name = extension_package_manager.create_package(
|
||||||
|
'collision2',
|
||||||
|
template_vars={
|
||||||
|
'declarative_value': 'duplicate_test_ext:action_extensions' # Same declarative value
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert action1_name not in ret.stdout
|
||||||
|
assert action2_name not in ret.stdout
|
||||||
|
assert 'name collision detected for - duplicate_test_ext:action_extensions' in ret.stderr
|
||||||
|
assert entry_point1_name in ret.stderr
|
||||||
|
assert entry_point2_name in ret.stderr
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('test_app_copy')
|
||||||
|
def test_extension_entrypoint_default_declarative_value(
|
||||||
|
idf_py: IdfPyFunc, extension_package_manager: ExtensionPackageManager
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Test recommendation warning log when entrypoint uses default idf_ext:action_extensions declarative value.
|
||||||
|
This declarative value (extension file name) is used for components participating in the build,
|
||||||
|
thus is not recommended to use it for external components - entrypoints.
|
||||||
|
"""
|
||||||
|
logging.info('Test entrypoint uses default idf_ext:action_extensions declarative value')
|
||||||
|
|
||||||
|
entry_point_name, _ = extension_package_manager.create_package(
|
||||||
|
'default_value',
|
||||||
|
template_vars={
|
||||||
|
'declarative_value': 'idf_ext:action_extensions',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert f'Entry point "{entry_point_name}" has declarative value "idf_ext:action_extensions"' in ret.stderr
|
||||||
|
assert (
|
||||||
|
'For external components, it is recommended to use name like <<COMPONENT_NAME>>_ext:action_extensions'
|
||||||
|
in ret.stderr
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('test_app_copy')
|
||||||
|
def test_extension_entrypoint_non_existing_module(
|
||||||
|
idf_py: IdfPyFunc, extension_package_manager: ExtensionPackageManager
|
||||||
|
) -> None:
|
||||||
|
logging.info('Test entrypoint uses non-existing module')
|
||||||
|
|
||||||
|
entry_point_name, _ = extension_package_manager.create_package(
|
||||||
|
'non_existing_module',
|
||||||
|
template_vars={
|
||||||
|
'declarative_value': 'non_existing_module:action_extensions',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert f'Failed to load entry point extension "{entry_point_name}"' in ret.stderr
|
||||||
|
assert "No module named 'non_existing_module'" in ret.stderr
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('test_app_copy')
|
||||||
|
def test_extension_entrypoint_conflicting_names(
|
||||||
|
idf_py: IdfPyFunc, extension_package_manager: ExtensionPackageManager
|
||||||
|
) -> None:
|
||||||
|
logging.info('Test action name conflict warning')
|
||||||
|
|
||||||
|
extension_package_manager.create_package(
|
||||||
|
'conflicting_action',
|
||||||
|
template_vars={
|
||||||
|
'actions': """
|
||||||
|
'bootloader': {
|
||||||
|
'callback': test_extension_action,
|
||||||
|
'help': 'This action conflicts with built-in action',
|
||||||
|
},
|
||||||
|
'my-custom-action': {
|
||||||
|
'callback': test_extension_action,
|
||||||
|
'help': 'Custom action with conflicting aliases',
|
||||||
|
'aliases': ['clean']
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
'global_options': """{
|
||||||
|
'names': ['--project-dir'],
|
||||||
|
'help': 'This global option conflicts with existing one'
|
||||||
|
}""",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
ret = idf_py('--help')
|
||||||
|
assert "Action 'bootloader' already defined. External action will not be added." in ret.stderr
|
||||||
|
assert 'This action conflicts with built-in action' not in ret.stdout
|
||||||
|
assert (
|
||||||
|
"Action 'my-custom-action' has aliases ['clean'] that conflict with existing actions or aliases" in ret.stderr
|
||||||
|
)
|
||||||
|
assert 'Custom action with conflicting aliases' not in ret.stdout
|
||||||
|
assert "Global option ['--project-dir'] already defined. External option will not be added." in ret.stderr
|
||||||
|
assert 'This global option conflicts with existing one' not in ret.stdout
|
Reference in New Issue
Block a user