mirror of
https://github.com/home-assistant/core.git
synced 2025-07-29 18:28:14 +02:00
Merge config subentry feature branch to dev (#136121)
* Reapply "Add support for subentries to config entries" (#133470) (#136061)
* Reapply "Add support for subentries to config entries" (#133470)
This reverts commit ecb3bf79f3
.
* Update test snapshot
* Add config subentry support to device registry (#128157)
* Add config subentry support to device registry
* Apply suggestions from code review
* Update syrupy serializer
* Update snapshots
* Address review comments
* Allow a device to be connected to no or a single subentry of a config entry
* Update snapshots
* Revert "Allow a device to be connected to no or a single subentry of a config entry"
This reverts commit ec6f613151cb4a806b7961033c004b71b76510c2.
* Update test snapshots
* Bump release version in comments
* Rename config_subentries to config_entries_subentries
* Add config subentry support to entity registry (#128155)
* Add config subentry support to entity registry
* Update syrupy serializer
* Update snapshots
* Update snapshots
* Accept suggested changes
* Clean registries when removing subentry (#136671)
* Clean up registries when removing subentry
* Update tests
* Clean up subentries from deleted devices when removing config entry (#136669)
* Clean up subentries from deleted devices when removing config entry
* Move
* Add config subentry support to entity platform (#128161)
* Add config subentry support to entity platform
* Rename subentry_id to config_subentry_id
* Store subentry type in subentry (#136687)
* Add reconfigure support to config subentries (#133353)
* Add reconfigure support to config subentries
* Update test
* Minor adjustment
* Rename supported_subentry_flows to supported_subentry_types
* Address review comments
* Add subentry support to kitchen sink (#136755)
* Add subentry support to kitchen sink
* Add subentry reconfigure support to kitchen_sink
* Update kitchen_sink tests with subentry type stored in config entry
* Update kitchen_sink
* Update kitchen_sink
* Adjust kitchen sink tests
* Fix hassfest
* Apply suggestions from code review
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
* Improve docstrings and strings.json
---------
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
* Update snapshots
* Update snapshots
* Update snapshots
* Update snapshots
* Update snapshots
* Update snapshots
* Update snapshots
---------
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
This commit is contained in:
@ -15,6 +15,7 @@ from collections.abc import (
|
||||
)
|
||||
from contextvars import ContextVar
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum, StrEnum
|
||||
import functools
|
||||
@ -22,7 +23,7 @@ from functools import cache
|
||||
import logging
|
||||
from random import randint
|
||||
from types import MappingProxyType
|
||||
from typing import TYPE_CHECKING, Any, Self, cast
|
||||
from typing import TYPE_CHECKING, Any, Self, TypedDict, cast
|
||||
|
||||
from async_interrupt import interrupt
|
||||
from propcache.api import cached_property
|
||||
@ -127,7 +128,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry()
|
||||
|
||||
STORAGE_KEY = "core.config_entries"
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 4
|
||||
STORAGE_VERSION_MINOR = 5
|
||||
|
||||
SAVE_DELAY = 1
|
||||
|
||||
@ -253,6 +254,10 @@ class UnknownEntry(ConfigError):
|
||||
"""Unknown entry specified."""
|
||||
|
||||
|
||||
class UnknownSubEntry(ConfigError):
|
||||
"""Unknown subentry specified."""
|
||||
|
||||
|
||||
class OperationNotAllowed(ConfigError):
|
||||
"""Raised when a config entry operation is not allowed."""
|
||||
|
||||
@ -297,6 +302,7 @@ class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False):
|
||||
|
||||
minor_version: int
|
||||
options: Mapping[str, Any]
|
||||
subentries: Iterable[ConfigSubentryData]
|
||||
version: int
|
||||
|
||||
|
||||
@ -310,6 +316,61 @@ def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> N
|
||||
)
|
||||
|
||||
|
||||
class ConfigSubentryData(TypedDict):
|
||||
"""Container for configuration subentry data.
|
||||
|
||||
Returned by integrations, a subentry_id will be assigned automatically.
|
||||
"""
|
||||
|
||||
data: Mapping[str, Any]
|
||||
subentry_type: str
|
||||
title: str
|
||||
unique_id: str | None
|
||||
|
||||
|
||||
class ConfigSubentryDataWithId(ConfigSubentryData):
|
||||
"""Container for configuration subentry data.
|
||||
|
||||
This type is used when loading existing subentries from storage.
|
||||
"""
|
||||
|
||||
subentry_id: str
|
||||
|
||||
|
||||
class SubentryFlowContext(FlowContext, total=False):
|
||||
"""Typed context dict for subentry flow."""
|
||||
|
||||
entry_id: str
|
||||
subentry_id: str
|
||||
|
||||
|
||||
class SubentryFlowResult(FlowResult[SubentryFlowContext, tuple[str, str]], total=False):
|
||||
"""Typed result dict for subentry flow."""
|
||||
|
||||
unique_id: str | None
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ConfigSubentry:
|
||||
"""Container for a configuration subentry."""
|
||||
|
||||
data: MappingProxyType[str, Any]
|
||||
subentry_id: str = field(default_factory=ulid_util.ulid_now)
|
||||
subentry_type: str
|
||||
title: str
|
||||
unique_id: str | None
|
||||
|
||||
def as_dict(self) -> ConfigSubentryDataWithId:
|
||||
"""Return dictionary version of this subentry."""
|
||||
return {
|
||||
"data": dict(self.data),
|
||||
"subentry_id": self.subentry_id,
|
||||
"subentry_type": self.subentry_type,
|
||||
"title": self.title,
|
||||
"unique_id": self.unique_id,
|
||||
}
|
||||
|
||||
|
||||
class ConfigEntry[_DataT = Any]:
|
||||
"""Hold a configuration entry."""
|
||||
|
||||
@ -319,6 +380,7 @@ class ConfigEntry[_DataT = Any]:
|
||||
data: MappingProxyType[str, Any]
|
||||
runtime_data: _DataT
|
||||
options: MappingProxyType[str, Any]
|
||||
subentries: MappingProxyType[str, ConfigSubentry]
|
||||
unique_id: str | None
|
||||
state: ConfigEntryState
|
||||
reason: str | None
|
||||
@ -334,6 +396,7 @@ class ConfigEntry[_DataT = Any]:
|
||||
supports_remove_device: bool | None
|
||||
_supports_options: bool | None
|
||||
_supports_reconfigure: bool | None
|
||||
_supported_subentry_types: dict[str, dict[str, bool]] | None
|
||||
update_listeners: list[UpdateListenerType]
|
||||
_async_cancel_retry_setup: Callable[[], Any] | None
|
||||
_on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None
|
||||
@ -363,6 +426,7 @@ class ConfigEntry[_DataT = Any]:
|
||||
pref_disable_polling: bool | None = None,
|
||||
source: str,
|
||||
state: ConfigEntryState = ConfigEntryState.NOT_LOADED,
|
||||
subentries_data: Iterable[ConfigSubentryData | ConfigSubentryDataWithId] | None,
|
||||
title: str,
|
||||
unique_id: str | None,
|
||||
version: int,
|
||||
@ -388,6 +452,25 @@ class ConfigEntry[_DataT = Any]:
|
||||
# Entry options
|
||||
_setter(self, "options", MappingProxyType(options or {}))
|
||||
|
||||
# Subentries
|
||||
subentries_data = subentries_data or ()
|
||||
subentries = {}
|
||||
for subentry_data in subentries_data:
|
||||
subentry_kwargs = {}
|
||||
if "subentry_id" in subentry_data:
|
||||
# If subentry_data has key "subentry_id", we're loading from storage
|
||||
subentry_kwargs["subentry_id"] = subentry_data["subentry_id"] # type: ignore[typeddict-item]
|
||||
subentry = ConfigSubentry(
|
||||
data=MappingProxyType(subentry_data["data"]),
|
||||
subentry_type=subentry_data["subentry_type"],
|
||||
title=subentry_data["title"],
|
||||
unique_id=subentry_data.get("unique_id"),
|
||||
**subentry_kwargs,
|
||||
)
|
||||
subentries[subentry.subentry_id] = subentry
|
||||
|
||||
_setter(self, "subentries", MappingProxyType(subentries))
|
||||
|
||||
# Entry system options
|
||||
if pref_disable_new_entities is None:
|
||||
pref_disable_new_entities = False
|
||||
@ -424,6 +507,9 @@ class ConfigEntry[_DataT = Any]:
|
||||
# Supports reconfigure
|
||||
_setter(self, "_supports_reconfigure", None)
|
||||
|
||||
# Supports subentries
|
||||
_setter(self, "_supported_subentry_types", None)
|
||||
|
||||
# Listeners to call on update
|
||||
_setter(self, "update_listeners", [])
|
||||
|
||||
@ -496,6 +582,28 @@ class ConfigEntry[_DataT = Any]:
|
||||
)
|
||||
return self._supports_reconfigure or False
|
||||
|
||||
@property
|
||||
def supported_subentry_types(self) -> dict[str, dict[str, bool]]:
|
||||
"""Return supported subentry types."""
|
||||
if self._supported_subentry_types is None and (
|
||||
handler := HANDLERS.get(self.domain)
|
||||
):
|
||||
# work out sub entries supported by the handler
|
||||
supported_flows = handler.async_get_supported_subentry_types(self)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"_supported_subentry_types",
|
||||
{
|
||||
subentry_flow_type: {
|
||||
"supports_reconfigure": hasattr(
|
||||
subentry_flow_handler, "async_step_reconfigure"
|
||||
)
|
||||
}
|
||||
for subentry_flow_type, subentry_flow_handler in supported_flows.items()
|
||||
},
|
||||
)
|
||||
return self._supported_subentry_types or {}
|
||||
|
||||
def clear_state_cache(self) -> None:
|
||||
"""Clear cached properties that are included in as_json_fragment."""
|
||||
self.__dict__.pop("as_json_fragment", None)
|
||||
@ -515,12 +623,14 @@ class ConfigEntry[_DataT = Any]:
|
||||
"supports_remove_device": self.supports_remove_device or False,
|
||||
"supports_unload": self.supports_unload or False,
|
||||
"supports_reconfigure": self.supports_reconfigure,
|
||||
"supported_subentry_types": self.supported_subentry_types,
|
||||
"pref_disable_new_entities": self.pref_disable_new_entities,
|
||||
"pref_disable_polling": self.pref_disable_polling,
|
||||
"disabled_by": self.disabled_by,
|
||||
"reason": self.reason,
|
||||
"error_reason_translation_key": self.error_reason_translation_key,
|
||||
"error_reason_translation_placeholders": self.error_reason_translation_placeholders,
|
||||
"num_subentries": len(self.subentries),
|
||||
}
|
||||
return json_fragment(json_bytes(json_repr))
|
||||
|
||||
@ -1012,6 +1122,7 @@ class ConfigEntry[_DataT = Any]:
|
||||
"pref_disable_new_entities": self.pref_disable_new_entities,
|
||||
"pref_disable_polling": self.pref_disable_polling,
|
||||
"source": self.source,
|
||||
"subentries": [subentry.as_dict() for subentry in self.subentries.values()],
|
||||
"title": self.title,
|
||||
"unique_id": self.unique_id,
|
||||
"version": self.version,
|
||||
@ -1497,6 +1608,7 @@ class ConfigEntriesFlowManager(
|
||||
minor_version=result["minor_version"],
|
||||
options=result["options"],
|
||||
source=flow.context["source"],
|
||||
subentries_data=result["subentries"],
|
||||
title=result["title"],
|
||||
unique_id=flow.unique_id,
|
||||
version=result["version"],
|
||||
@ -1787,6 +1899,11 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]):
|
||||
for entry in data["entries"]:
|
||||
entry["discovery_keys"] = {}
|
||||
|
||||
if old_minor_version < 5:
|
||||
# Version 1.4 adds config subentries
|
||||
for entry in data["entries"]:
|
||||
entry.setdefault("subentries", entry.get("subentries", {}))
|
||||
|
||||
if old_major_version > 1:
|
||||
raise NotImplementedError
|
||||
return data
|
||||
@ -1803,6 +1920,7 @@ class ConfigEntries:
|
||||
self.hass = hass
|
||||
self.flow = ConfigEntriesFlowManager(hass, self, hass_config)
|
||||
self.options = OptionsFlowManager(hass)
|
||||
self.subentries = ConfigSubentryFlowManager(hass)
|
||||
self._hass_config = hass_config
|
||||
self._entries = ConfigEntryItems(hass)
|
||||
self._store = ConfigEntryStore(hass)
|
||||
@ -2005,6 +2123,7 @@ class ConfigEntries:
|
||||
pref_disable_new_entities=entry["pref_disable_new_entities"],
|
||||
pref_disable_polling=entry["pref_disable_polling"],
|
||||
source=entry["source"],
|
||||
subentries_data=entry["subentries"],
|
||||
title=entry["title"],
|
||||
unique_id=entry["unique_id"],
|
||||
version=entry["version"],
|
||||
@ -2164,6 +2283,44 @@ class ConfigEntries:
|
||||
If the entry was changed, the update_listeners are
|
||||
fired and this function returns True
|
||||
|
||||
If the entry was not changed, the update_listeners are
|
||||
not fired and this function returns False
|
||||
"""
|
||||
return self._async_update_entry(
|
||||
entry,
|
||||
data=data,
|
||||
discovery_keys=discovery_keys,
|
||||
minor_version=minor_version,
|
||||
options=options,
|
||||
pref_disable_new_entities=pref_disable_new_entities,
|
||||
pref_disable_polling=pref_disable_polling,
|
||||
title=title,
|
||||
unique_id=unique_id,
|
||||
version=version,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_update_entry(
|
||||
self,
|
||||
entry: ConfigEntry,
|
||||
*,
|
||||
data: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
||||
discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]]
|
||||
| UndefinedType = UNDEFINED,
|
||||
minor_version: int | UndefinedType = UNDEFINED,
|
||||
options: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
||||
pref_disable_new_entities: bool | UndefinedType = UNDEFINED,
|
||||
pref_disable_polling: bool | UndefinedType = UNDEFINED,
|
||||
subentries: dict[str, ConfigSubentry] | UndefinedType = UNDEFINED,
|
||||
title: str | UndefinedType = UNDEFINED,
|
||||
unique_id: str | None | UndefinedType = UNDEFINED,
|
||||
version: int | UndefinedType = UNDEFINED,
|
||||
) -> bool:
|
||||
"""Update a config entry.
|
||||
|
||||
If the entry was changed, the update_listeners are
|
||||
fired and this function returns True
|
||||
|
||||
If the entry was not changed, the update_listeners are
|
||||
not fired and this function returns False
|
||||
"""
|
||||
@ -2226,11 +2383,21 @@ class ConfigEntries:
|
||||
changed = True
|
||||
_setter(entry, "options", MappingProxyType(options))
|
||||
|
||||
if subentries is not UNDEFINED:
|
||||
if entry.subentries != subentries:
|
||||
changed = True
|
||||
_setter(entry, "subentries", MappingProxyType(subentries))
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
_setter(entry, "modified_at", utcnow())
|
||||
|
||||
self._async_save_and_notify(entry)
|
||||
return True
|
||||
|
||||
@callback
|
||||
def _async_save_and_notify(self, entry: ConfigEntry) -> None:
|
||||
for listener in entry.update_listeners:
|
||||
self.hass.async_create_task(
|
||||
listener(self.hass, entry),
|
||||
@ -2241,8 +2408,92 @@ class ConfigEntries:
|
||||
entry.clear_state_cache()
|
||||
entry.clear_storage_cache()
|
||||
self._async_dispatch(ConfigEntryChange.UPDATED, entry)
|
||||
|
||||
@callback
|
||||
def async_add_subentry(self, entry: ConfigEntry, subentry: ConfigSubentry) -> bool:
|
||||
"""Add a subentry to a config entry."""
|
||||
self._raise_if_subentry_unique_id_exists(entry, subentry.unique_id)
|
||||
|
||||
return self._async_update_entry(
|
||||
entry,
|
||||
subentries=entry.subentries | {subentry.subentry_id: subentry},
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_remove_subentry(self, entry: ConfigEntry, subentry_id: str) -> bool:
|
||||
"""Remove a subentry from a config entry."""
|
||||
subentries = dict(entry.subentries)
|
||||
try:
|
||||
subentries.pop(subentry_id)
|
||||
except KeyError as err:
|
||||
raise UnknownSubEntry from err
|
||||
|
||||
result = self._async_update_entry(entry, subentries=subentries)
|
||||
dev_reg = dr.async_get(self.hass)
|
||||
ent_reg = er.async_get(self.hass)
|
||||
|
||||
dev_reg.async_clear_config_subentry(entry.entry_id, subentry_id)
|
||||
ent_reg.async_clear_config_subentry(entry.entry_id, subentry_id)
|
||||
return result
|
||||
|
||||
@callback
|
||||
def async_update_subentry(
|
||||
self,
|
||||
entry: ConfigEntry,
|
||||
subentry: ConfigSubentry,
|
||||
*,
|
||||
data: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
||||
title: str | UndefinedType = UNDEFINED,
|
||||
unique_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> bool:
|
||||
"""Update a config subentry.
|
||||
|
||||
If the subentry was changed, the update_listeners are
|
||||
fired and this function returns True
|
||||
|
||||
If the subentry was not changed, the update_listeners are
|
||||
not fired and this function returns False
|
||||
"""
|
||||
if entry.entry_id not in self._entries:
|
||||
raise UnknownEntry(entry.entry_id)
|
||||
if subentry.subentry_id not in entry.subentries:
|
||||
raise UnknownSubEntry(subentry.subentry_id)
|
||||
|
||||
self.hass.verify_event_loop_thread("hass.config_entries.async_update_subentry")
|
||||
changed = False
|
||||
_setter = object.__setattr__
|
||||
|
||||
if unique_id is not UNDEFINED and subentry.unique_id != unique_id:
|
||||
self._raise_if_subentry_unique_id_exists(entry, unique_id)
|
||||
changed = True
|
||||
_setter(subentry, "unique_id", unique_id)
|
||||
|
||||
if title is not UNDEFINED and subentry.title != title:
|
||||
changed = True
|
||||
_setter(subentry, "title", title)
|
||||
|
||||
if data is not UNDEFINED and subentry.data != data:
|
||||
changed = True
|
||||
_setter(subentry, "data", MappingProxyType(data))
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
_setter(entry, "modified_at", utcnow())
|
||||
|
||||
self._async_save_and_notify(entry)
|
||||
return True
|
||||
|
||||
def _raise_if_subentry_unique_id_exists(
|
||||
self, entry: ConfigEntry, unique_id: str | None
|
||||
) -> None:
|
||||
"""Raise if a subentry with the same unique_id exists."""
|
||||
if unique_id is None:
|
||||
return
|
||||
for existing_subentry in entry.subentries.values():
|
||||
if existing_subentry.unique_id == unique_id:
|
||||
raise data_entry_flow.AbortFlow("already_configured")
|
||||
|
||||
@callback
|
||||
def _async_dispatch(
|
||||
self, change_type: ConfigEntryChange, entry: ConfigEntry
|
||||
@ -2579,6 +2830,14 @@ class ConfigFlow(ConfigEntryBaseFlow):
|
||||
"""Return options flow support for this handler."""
|
||||
return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this handler."""
|
||||
return {}
|
||||
|
||||
@callback
|
||||
def _async_abort_entries_match(
|
||||
self, match_dict: dict[str, Any] | None = None
|
||||
@ -2887,6 +3146,7 @@ class ConfigFlow(ConfigEntryBaseFlow):
|
||||
description: str | None = None,
|
||||
description_placeholders: Mapping[str, str] | None = None,
|
||||
options: Mapping[str, Any] | None = None,
|
||||
subentries: Iterable[ConfigSubentryData] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Finish config flow and create a config entry."""
|
||||
if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}:
|
||||
@ -2906,6 +3166,7 @@ class ConfigFlow(ConfigEntryBaseFlow):
|
||||
|
||||
result["minor_version"] = self.MINOR_VERSION
|
||||
result["options"] = options or {}
|
||||
result["subentries"] = subentries or ()
|
||||
result["version"] = self.VERSION
|
||||
|
||||
return result
|
||||
@ -3020,17 +3281,199 @@ class ConfigFlow(ConfigEntryBaseFlow):
|
||||
)
|
||||
|
||||
|
||||
class OptionsFlowManager(
|
||||
data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult]
|
||||
):
|
||||
"""Flow to set options for a configuration entry."""
|
||||
class _ConfigSubFlowManager:
|
||||
"""Mixin class for flow managers which manage flows tied to a config entry."""
|
||||
|
||||
_flow_result = ConfigFlowResult
|
||||
hass: HomeAssistant
|
||||
|
||||
def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry:
|
||||
"""Return config entry or raise if not found."""
|
||||
return self.hass.config_entries.async_get_known_entry(config_entry_id)
|
||||
|
||||
|
||||
class ConfigSubentryFlowManager(
|
||||
data_entry_flow.FlowManager[
|
||||
SubentryFlowContext, SubentryFlowResult, tuple[str, str]
|
||||
],
|
||||
_ConfigSubFlowManager,
|
||||
):
|
||||
"""Manage all the config subentry flows that are in progress."""
|
||||
|
||||
_flow_result = SubentryFlowResult
|
||||
|
||||
async def async_create_flow(
|
||||
self,
|
||||
handler_key: tuple[str, str],
|
||||
*,
|
||||
context: FlowContext | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
) -> ConfigSubentryFlow:
|
||||
"""Create a subentry flow for a config entry.
|
||||
|
||||
The entry_id and flow.handler[0] is the same thing to map entry with flow.
|
||||
"""
|
||||
if not context or "source" not in context:
|
||||
raise KeyError("Context not set or doesn't have a source set")
|
||||
|
||||
entry_id, subentry_type = handler_key
|
||||
entry = self._async_get_config_entry(entry_id)
|
||||
handler = await _async_get_flow_handler(self.hass, entry.domain, {})
|
||||
subentry_types = handler.async_get_supported_subentry_types(entry)
|
||||
if subentry_type not in subentry_types:
|
||||
raise data_entry_flow.UnknownHandler(
|
||||
f"Config entry '{entry.domain}' does not support subentry '{subentry_type}'"
|
||||
)
|
||||
subentry_flow = subentry_types[subentry_type]()
|
||||
subentry_flow.init_step = context["source"]
|
||||
return subentry_flow
|
||||
|
||||
async def async_finish_flow(
|
||||
self,
|
||||
flow: data_entry_flow.FlowHandler[
|
||||
SubentryFlowContext, SubentryFlowResult, tuple[str, str]
|
||||
],
|
||||
result: SubentryFlowResult,
|
||||
) -> SubentryFlowResult:
|
||||
"""Finish a subentry flow and add a new subentry to the configuration entry.
|
||||
|
||||
The flow.handler[0] and entry_id is the same thing to map flow with entry.
|
||||
"""
|
||||
flow = cast(ConfigSubentryFlow, flow)
|
||||
|
||||
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
||||
return result
|
||||
|
||||
entry_id, subentry_type = flow.handler
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is None:
|
||||
raise UnknownEntry(entry_id)
|
||||
|
||||
unique_id = result.get("unique_id")
|
||||
if unique_id is not None and not isinstance(unique_id, str):
|
||||
raise HomeAssistantError("unique_id must be a string")
|
||||
|
||||
self.hass.config_entries.async_add_subentry(
|
||||
entry,
|
||||
ConfigSubentry(
|
||||
data=MappingProxyType(result["data"]),
|
||||
subentry_type=subentry_type,
|
||||
title=result["title"],
|
||||
unique_id=unique_id,
|
||||
),
|
||||
)
|
||||
|
||||
result["result"] = True
|
||||
return result
|
||||
|
||||
|
||||
class ConfigSubentryFlow(
|
||||
data_entry_flow.FlowHandler[
|
||||
SubentryFlowContext, SubentryFlowResult, tuple[str, str]
|
||||
]
|
||||
):
|
||||
"""Base class for config subentry flows."""
|
||||
|
||||
_flow_result = SubentryFlowResult
|
||||
handler: tuple[str, str]
|
||||
|
||||
@callback
|
||||
def async_create_entry(
|
||||
self,
|
||||
*,
|
||||
title: str | None = None,
|
||||
data: Mapping[str, Any],
|
||||
description: str | None = None,
|
||||
description_placeholders: Mapping[str, str] | None = None,
|
||||
unique_id: str | None = None,
|
||||
) -> SubentryFlowResult:
|
||||
"""Finish config flow and create a config entry."""
|
||||
if self.source != SOURCE_USER:
|
||||
raise ValueError(f"Source is {self.source}, expected {SOURCE_USER}")
|
||||
|
||||
result = super().async_create_entry(
|
||||
title=title,
|
||||
data=data,
|
||||
description=description,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
result["unique_id"] = unique_id
|
||||
|
||||
return result
|
||||
|
||||
@callback
|
||||
def async_update_and_abort(
|
||||
self,
|
||||
entry: ConfigEntry,
|
||||
subentry: ConfigSubentry,
|
||||
*,
|
||||
unique_id: str | None | UndefinedType = UNDEFINED,
|
||||
title: str | UndefinedType = UNDEFINED,
|
||||
data: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
||||
data_updates: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
||||
) -> SubentryFlowResult:
|
||||
"""Update config subentry and finish subentry flow.
|
||||
|
||||
:param data: replace the subentry data with new data
|
||||
:param data_updates: add items from data_updates to subentry data - existing
|
||||
keys are overridden
|
||||
:param title: replace the title of the subentry
|
||||
:param unique_id: replace the unique_id of the subentry
|
||||
"""
|
||||
if data_updates is not UNDEFINED:
|
||||
if data is not UNDEFINED:
|
||||
raise ValueError("Cannot set both data and data_updates")
|
||||
data = entry.data | data_updates
|
||||
self.hass.config_entries.async_update_subentry(
|
||||
entry=entry,
|
||||
subentry=subentry,
|
||||
unique_id=unique_id,
|
||||
title=title,
|
||||
data=data,
|
||||
)
|
||||
return self.async_abort(reason="reconfigure_successful")
|
||||
|
||||
@property
|
||||
def _reconfigure_entry_id(self) -> str:
|
||||
"""Return reconfigure entry id."""
|
||||
if self.source != SOURCE_RECONFIGURE:
|
||||
raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}")
|
||||
return self.handler[0]
|
||||
|
||||
@callback
|
||||
def _get_reconfigure_entry(self) -> ConfigEntry:
|
||||
"""Return the reconfigure config entry linked to the current context."""
|
||||
return self.hass.config_entries.async_get_known_entry(
|
||||
self._reconfigure_entry_id
|
||||
)
|
||||
|
||||
@property
|
||||
def _reconfigure_subentry_id(self) -> str:
|
||||
"""Return reconfigure subentry id."""
|
||||
if self.source != SOURCE_RECONFIGURE:
|
||||
raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}")
|
||||
return self.context["subentry_id"]
|
||||
|
||||
@callback
|
||||
def _get_reconfigure_subentry(self) -> ConfigSubentry:
|
||||
"""Return the reconfigure config subentry linked to the current context."""
|
||||
entry = self.hass.config_entries.async_get_known_entry(
|
||||
self._reconfigure_entry_id
|
||||
)
|
||||
subentry_id = self._reconfigure_subentry_id
|
||||
if subentry_id not in entry.subentries:
|
||||
raise UnknownEntry
|
||||
return entry.subentries[subentry_id]
|
||||
|
||||
|
||||
class OptionsFlowManager(
|
||||
data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult],
|
||||
_ConfigSubFlowManager,
|
||||
):
|
||||
"""Manage all the config entry option flows that are in progress."""
|
||||
|
||||
_flow_result = ConfigFlowResult
|
||||
|
||||
async def async_create_flow(
|
||||
self,
|
||||
handler_key: str,
|
||||
@ -3040,7 +3483,7 @@ class OptionsFlowManager(
|
||||
) -> OptionsFlow:
|
||||
"""Create an options flow for a config entry.
|
||||
|
||||
Entry_id and flow.handler is the same thing to map entry with flow.
|
||||
The entry_id and the flow.handler is the same thing to map entry with flow.
|
||||
"""
|
||||
entry = self._async_get_config_entry(handler_key)
|
||||
handler = await _async_get_flow_handler(self.hass, entry.domain, {})
|
||||
@ -3056,7 +3499,7 @@ class OptionsFlowManager(
|
||||
This method is called when a flow step returns FlowResultType.ABORT or
|
||||
FlowResultType.CREATE_ENTRY.
|
||||
|
||||
Flow.handler and entry_id is the same thing to map flow with entry.
|
||||
The flow.handler and the entry_id is the same thing to map flow with entry.
|
||||
"""
|
||||
flow = cast(OptionsFlow, flow)
|
||||
|
||||
|
Reference in New Issue
Block a user