mirror of
https://github.com/home-assistant/core.git
synced 2026-02-04 06:15:47 +01:00
Compare commits
69 Commits
2026.2.0b0
...
rvc_identi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40944f0f2d | ||
|
|
91a3e488b1 | ||
|
|
9a1f517e6e | ||
|
|
c82c614bb9 | ||
|
|
20914dce67 | ||
|
|
5fc407d2f3 | ||
|
|
c7444d38a1 | ||
|
|
81f6136bda | ||
|
|
862d0ea49e | ||
|
|
f2fdfed241 | ||
|
|
15640049cb | ||
|
|
5c163434f8 | ||
|
|
e54c2ea55e | ||
|
|
1ec42693ab | ||
|
|
672864ae4f | ||
|
|
e54d7e42cb | ||
|
|
5d63fce015 | ||
|
|
190fe10eed | ||
|
|
ef410c1e2a | ||
|
|
5a712398e7 | ||
|
|
b1be3fe0da | ||
|
|
97a7ab011b | ||
|
|
694a3050b9 | ||
|
|
8164e65188 | ||
|
|
9af0d1eed4 | ||
|
|
72e6ca55ba | ||
|
|
0fb62a7e97 | ||
|
|
930eb70a8b | ||
|
|
462104fa68 | ||
|
|
d0c77d8a7e | ||
|
|
606780b20f | ||
|
|
8f465cf2ca | ||
|
|
4e29476dd9 | ||
|
|
b4328083be | ||
|
|
72ba59f559 | ||
|
|
826168b601 | ||
|
|
66f181992c | ||
|
|
336ef4c37b | ||
|
|
72e7bf7f9c | ||
|
|
acbdbc9be7 | ||
|
|
3551382f8d | ||
|
|
95014d7e6d | ||
|
|
dfe1990484 | ||
|
|
15ff5d0f74 | ||
|
|
1407f61a9c | ||
|
|
6107b794d6 | ||
|
|
7ab8ceab7e | ||
|
|
a4db6a9ebc | ||
|
|
12a2650b6b | ||
|
|
23da7ecedd | ||
|
|
8d9e7b0b26 | ||
|
|
9664047345 | ||
|
|
804fbf9cef | ||
|
|
e10fe074c9 | ||
|
|
7b0e21da74 | ||
|
|
29e142cf1e | ||
|
|
6b765ebabb | ||
|
|
899aa62697 | ||
|
|
a11efba405 | ||
|
|
78280dfc5a | ||
|
|
4220bab08a | ||
|
|
f7dcf8de15 | ||
|
|
7e32b50fee | ||
|
|
c875b75272 | ||
|
|
7368b9ca1d | ||
|
|
493e8c1a22 | ||
|
|
1b16b24550 | ||
|
|
7637300632 | ||
|
|
bdbce57217 |
14
.github/workflows/builder.yml
vendored
14
.github/workflows/builder.yml
vendored
@@ -10,12 +10,12 @@ on:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
# Base image version from https://github.com/home-assistant/docker
|
||||
BASE_IMAGE_VERSION: "2025.12.0"
|
||||
BASE_IMAGE_VERSION: "2026.01.0"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
|
||||
jobs:
|
||||
@@ -184,7 +184,7 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -287,7 +287,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -358,13 +358,13 @@ jobs:
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
||||
12
.github/workflows/ci.yaml
vendored
12
.github/workflows/ci.yaml
vendored
@@ -40,9 +40,9 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.2"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
HA_SHORT_VERSION: "2026.3"
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
ALL_PYTHON_VERSIONS: "['3.14.2']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -310,7 +310,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: &actions-cache actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: venv
|
||||
key: &key-python-venv >-
|
||||
@@ -374,7 +374,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: &actions-cache-save actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: &actions-cache-save actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
key: *key-apt-cache
|
||||
@@ -425,7 +425,7 @@ jobs:
|
||||
steps:
|
||||
- &cache-restore-apt
|
||||
name: Restore apt cache
|
||||
uses: &actions-cache-restore actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: &actions-cache-restore actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
- "**strings.json"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -17,7 +17,7 @@ on:
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
|
||||
@@ -1 +1 @@
|
||||
3.13
|
||||
3.14
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -921,6 +921,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/libre_hardware_monitor/ @Sab44
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/liebherr/ @mettolen
|
||||
/tests/components/liebherr/ @mettolen
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
/tests/components/lifx/ @Djelibeybi
|
||||
/homeassistant/components/light/ @home-assistant/core
|
||||
@@ -1878,6 +1880,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wsdot/ @ucodery
|
||||
/tests/components/wsdot/ @ucodery
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
|
||||
@@ -52,6 +52,9 @@ RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
|
||||
uv pip install -r requirements.txt -r requirements_test.txt
|
||||
|
||||
# Claude Code native install
|
||||
RUN curl -fsSL https://claude.ai/install.sh | bash
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
|
||||
@@ -166,7 +166,7 @@
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
"description": "Arms an alarm in the away mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -176,7 +176,7 @@
|
||||
"name": "Arm away"
|
||||
},
|
||||
"alarm_arm_custom_bypass": {
|
||||
"description": "Arms the alarm while allowing to bypass a custom area.",
|
||||
"description": "Arms an alarm while allowing to bypass a custom area.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to arm the alarm.",
|
||||
@@ -186,7 +186,7 @@
|
||||
"name": "Arm with custom bypass"
|
||||
},
|
||||
"alarm_arm_home": {
|
||||
"description": "Arms the alarm in the home mode.",
|
||||
"description": "Arms an alarm in the home mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -196,7 +196,7 @@
|
||||
"name": "Arm home"
|
||||
},
|
||||
"alarm_arm_night": {
|
||||
"description": "Arms the alarm in the night mode.",
|
||||
"description": "Arms an alarm in the night mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -206,7 +206,7 @@
|
||||
"name": "Arm night"
|
||||
},
|
||||
"alarm_arm_vacation": {
|
||||
"description": "Arms the alarm in the vacation mode.",
|
||||
"description": "Arms an alarm in the vacation mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -216,7 +216,7 @@
|
||||
"name": "Arm vacation"
|
||||
},
|
||||
"alarm_disarm": {
|
||||
"description": "Disarms the alarm.",
|
||||
"description": "Disarms an alarm.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to disarm the alarm.",
|
||||
@@ -226,7 +226,7 @@
|
||||
"name": "Disarm"
|
||||
},
|
||||
"alarm_trigger": {
|
||||
"description": "Triggers the alarm manually.",
|
||||
"description": "Triggers an alarm manually.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
|
||||
@@ -600,6 +600,16 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
|
||||
# System prompt with caching enabled
|
||||
system_prompt: list[TextBlockParam] = [
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=system.content,
|
||||
cache_control={"type": "ephemeral"},
|
||||
)
|
||||
]
|
||||
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
@@ -608,7 +618,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
system=system.content,
|
||||
system=system_prompt,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
@@ -695,10 +705,6 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
type="auto",
|
||||
)
|
||||
|
||||
if isinstance(model_args["system"], str):
|
||||
model_args["system"] = [
|
||||
TextBlockParam(type="text", text=model_args["system"])
|
||||
]
|
||||
model_args["system"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==5.3.1.108.0"],
|
||||
"requirements": ["mozart-api==5.3.1.108.2"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api import __version__ as MOZART_API_VERSION
|
||||
@@ -735,7 +736,7 @@ class BeoMediaPlayer(BeoEntity, MediaPlayerEntity):
|
||||
await self._client.set_active_source(source_id=key)
|
||||
else:
|
||||
# Video
|
||||
await self._client.post_remote_trigger(id=key)
|
||||
await self._client.post_remote_trigger(id=UUID(key))
|
||||
|
||||
async def async_select_sound_mode(self, sound_mode: str) -> None:
|
||||
"""Select a sound mode."""
|
||||
@@ -894,7 +895,7 @@ class BeoMediaPlayer(BeoEntity, MediaPlayerEntity):
|
||||
translation_key="play_media_error",
|
||||
translation_placeholders={
|
||||
"media_type": media_type,
|
||||
"error_message": json.loads(error.body)["message"],
|
||||
"error_message": json.loads(cast(str, error.body))["message"],
|
||||
},
|
||||
) from error
|
||||
|
||||
|
||||
@@ -50,11 +50,11 @@
|
||||
"selector": {},
|
||||
"services": {
|
||||
"disable_motion_detection": {
|
||||
"description": "Disables the motion detection.",
|
||||
"description": "Disables the motion detection of a camera.",
|
||||
"name": "Disable motion detection"
|
||||
},
|
||||
"enable_motion_detection": {
|
||||
"description": "Enables the motion detection.",
|
||||
"description": "Enables the motion detection of a camera.",
|
||||
"name": "Enable motion detection"
|
||||
},
|
||||
"play_stream": {
|
||||
@@ -100,11 +100,11 @@
|
||||
"name": "Take snapshot"
|
||||
},
|
||||
"turn_off": {
|
||||
"description": "Turns off the camera.",
|
||||
"description": "Turns off a camera.",
|
||||
"name": "[%key:common::action::turn_off%]"
|
||||
},
|
||||
"turn_on": {
|
||||
"description": "Turns on the camera.",
|
||||
"description": "Turns on a camera.",
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -12,14 +12,25 @@ from hass_nabucasa import Cloud, NabuCasaBaseError
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMRateLimitError,
|
||||
LLMResponseCompletedEvent,
|
||||
LLMResponseError,
|
||||
LLMResponseErrorEvent,
|
||||
LLMResponseFailedEvent,
|
||||
LLMResponseFunctionCallArgumentsDeltaEvent,
|
||||
LLMResponseFunctionCallArgumentsDoneEvent,
|
||||
LLMResponseFunctionCallOutputItem,
|
||||
LLMResponseImageOutputItem,
|
||||
LLMResponseIncompleteEvent,
|
||||
LLMResponseMessageOutputItem,
|
||||
LLMResponseOutputItemAddedEvent,
|
||||
LLMResponseOutputItemDoneEvent,
|
||||
LLMResponseOutputTextDeltaEvent,
|
||||
LLMResponseReasoningOutputItem,
|
||||
LLMResponseReasoningSummaryTextDeltaEvent,
|
||||
LLMResponseWebSearchCallOutputItem,
|
||||
LLMResponseWebSearchCallSearchingEvent,
|
||||
LLMServiceError,
|
||||
)
|
||||
from litellm import (
|
||||
ResponseFunctionToolCall,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIStreamEvents,
|
||||
)
|
||||
from openai.types.responses import (
|
||||
FunctionToolParam,
|
||||
ResponseInputItemParam,
|
||||
@@ -60,9 +71,9 @@ class ResponseItemType(str, Enum):
|
||||
|
||||
def _convert_content_to_param(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> ResponseInputParam:
|
||||
) -> list[ResponseInputItemParam]:
|
||||
"""Convert any native chat message for this agent to the native format."""
|
||||
messages: ResponseInputParam = []
|
||||
messages: list[ResponseInputItemParam] = []
|
||||
reasoning_summary: list[str] = []
|
||||
web_search_calls: dict[str, dict[str, Any]] = {}
|
||||
|
||||
@@ -238,7 +249,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"""Transform stream result into HA format."""
|
||||
last_summary_index = None
|
||||
last_role: Literal["assistant", "tool_result"] | None = None
|
||||
current_tool_call: ResponseFunctionToolCall | None = None
|
||||
current_tool_call: LLMResponseFunctionCallOutputItem | None = None
|
||||
|
||||
# Non-reasoning models don't follow our request to remove citations, so we remove
|
||||
# them manually here. They always follow the same pattern: the citation is always
|
||||
@@ -248,19 +259,10 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
|
||||
|
||||
async for event in stream:
|
||||
event_type = getattr(event, "type", None)
|
||||
event_item = getattr(event, "item", None)
|
||||
event_item_type = getattr(event_item, "type", None) if event_item else None
|
||||
_LOGGER.debug("Event[%s]", getattr(event, "type", None))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Event[%s] | item: %s",
|
||||
event_type,
|
||||
event_item_type,
|
||||
)
|
||||
|
||||
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
|
||||
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
|
||||
if event_item_type == ResponseItemType.FUNCTION_CALL:
|
||||
if isinstance(event, LLMResponseOutputItemAddedEvent):
|
||||
if isinstance(event.item, LLMResponseFunctionCallOutputItem):
|
||||
# OpenAI has tool calls as individual events
|
||||
# while HA puts tool calls inside the assistant message.
|
||||
# We turn them into individual assistant content for HA
|
||||
@@ -268,11 +270,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
current_tool_call = cast(ResponseFunctionToolCall, event.item)
|
||||
current_tool_call = event.item
|
||||
elif (
|
||||
event_item_type == ResponseItemType.MESSAGE
|
||||
isinstance(event.item, LLMResponseMessageOutputItem)
|
||||
or (
|
||||
event_item_type == ResponseItemType.REASONING
|
||||
isinstance(event.item, LLMResponseReasoningOutputItem)
|
||||
and last_summary_index is not None
|
||||
) # Subsequent ResponseReasoningItem
|
||||
or last_role != "assistant"
|
||||
@@ -281,14 +283,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
|
||||
if event_item_type == ResponseItemType.REASONING:
|
||||
encrypted_content = getattr(event.item, "encrypted_content", None)
|
||||
summary = getattr(event.item, "summary", []) or []
|
||||
elif isinstance(event, LLMResponseOutputItemDoneEvent):
|
||||
if isinstance(event.item, LLMResponseReasoningOutputItem):
|
||||
encrypted_content = event.item.encrypted_content
|
||||
summary = event.item.summary
|
||||
|
||||
yield {
|
||||
"native": ResponseReasoningItem(
|
||||
type="reasoning",
|
||||
"native": LLMResponseReasoningOutputItem(
|
||||
type=event.item.type,
|
||||
id=event.item.id,
|
||||
summary=[],
|
||||
encrypted_content=encrypted_content,
|
||||
@@ -296,14 +298,8 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
}
|
||||
|
||||
last_summary_index = len(summary) - 1 if summary else None
|
||||
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
|
||||
action = getattr(event.item, "action", None)
|
||||
if isinstance(action, dict):
|
||||
action_dict = action
|
||||
elif action is not None:
|
||||
action_dict = action.to_dict()
|
||||
else:
|
||||
action_dict = {}
|
||||
elif isinstance(event.item, LLMResponseWebSearchCallOutputItem):
|
||||
action_dict = event.item.action
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
@@ -321,11 +317,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"tool_result": {"status": event.item.status},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
elif event_item_type == ResponseItemType.IMAGE:
|
||||
yield {"native": event.item}
|
||||
elif isinstance(event.item, LLMResponseImageOutputItem):
|
||||
yield {"native": event.item.raw}
|
||||
last_summary_index = -1 # Trigger new assistant message on next turn
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseOutputTextDeltaEvent):
|
||||
data = event.delta
|
||||
if remove_parentheses:
|
||||
data = data.removeprefix(")")
|
||||
@@ -344,7 +340,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if data:
|
||||
yield {"content": data}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseReasoningSummaryTextDeltaEvent):
|
||||
# OpenAI can output several reasoning summaries
|
||||
# in a single ResponseReasoningItem. We split them as separate
|
||||
# AssistantContent messages. Only last of them will have
|
||||
@@ -358,14 +354,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_summary_index = event.summary_index
|
||||
yield {"thinking_content": event.delta}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDeltaEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.arguments += event.delta
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
|
||||
elif isinstance(event, LLMResponseWebSearchCallSearchingEvent):
|
||||
yield {"role": "assistant"}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDoneEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.status = "completed"
|
||||
|
||||
@@ -385,35 +381,36 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
]
|
||||
}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseCompletedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseIncompleteEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
event.response.incomplete_details
|
||||
and event.response.incomplete_details.reason
|
||||
):
|
||||
reason: str = event.response.incomplete_details.reason
|
||||
else:
|
||||
reason = "unknown reason"
|
||||
incomplete_details = response.get("incomplete_details")
|
||||
reason = "unknown reason"
|
||||
if incomplete_details is not None and incomplete_details.get("reason"):
|
||||
reason = incomplete_details["reason"]
|
||||
|
||||
if reason == "max_output_tokens":
|
||||
reason = "max output tokens reached"
|
||||
@@ -422,22 +419,24 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
|
||||
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseFailedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
reason = "unknown reason"
|
||||
if event.response.error is not None:
|
||||
reason = event.response.error.message
|
||||
if isinstance(error := response.get("error"), dict):
|
||||
reason = error.get("message") or reason
|
||||
raise HomeAssistantError(f"OpenAI response failed: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.ERROR:
|
||||
elif isinstance(event, LLMResponseErrorEvent):
|
||||
raise HomeAssistantError(f"OpenAI response error: {event.message}")
|
||||
|
||||
|
||||
@@ -452,7 +451,7 @@ class BaseCloudLLMEntity(Entity):
|
||||
async def _prepare_chat_for_generation(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
messages: ResponseInputParam,
|
||||
messages: list[ResponseInputItemParam],
|
||||
response_format: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Prepare kwargs for Cloud LLM from the chat log."""
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"requirements": ["hass-nabucasa==1.12.0", "openai==2.15.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -58,12 +58,13 @@ C4_TO_HA_HVAC_MODE = {
|
||||
|
||||
HA_TO_C4_HVAC_MODE = {v: k for k, v in C4_TO_HA_HVAC_MODE.items()}
|
||||
|
||||
# Map Control4 HVAC state to Home Assistant HVAC action
|
||||
# Map the five known Control4 HVAC states to Home Assistant HVAC actions
|
||||
C4_TO_HA_HVAC_ACTION = {
|
||||
"heating": HVACAction.HEATING,
|
||||
"cooling": HVACAction.COOLING,
|
||||
"idle": HVACAction.IDLE,
|
||||
"off": HVACAction.OFF,
|
||||
"heat": HVACAction.HEATING,
|
||||
"cool": HVACAction.COOLING,
|
||||
"dry": HVACAction.DRYING,
|
||||
"fan": HVACAction.FAN,
|
||||
}
|
||||
|
||||
|
||||
@@ -236,7 +237,10 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
if c4_state is None:
|
||||
return None
|
||||
# Convert state to lowercase for mapping
|
||||
return C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
action = C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
if action is None:
|
||||
_LOGGER.debug("Unknown HVAC state received from Control4: %s", c4_state)
|
||||
return action
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
|
||||
@@ -335,20 +335,18 @@ def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str,
|
||||
"""Return config intents."""
|
||||
intents = config.get(DOMAIN, {}).get("intents", {})
|
||||
return {
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in intents.items()
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in intents.items()
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import dataclasses
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
@@ -18,7 +19,7 @@ from homeassistant.core import (
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent, singleton
|
||||
|
||||
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT
|
||||
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT, IntentSource
|
||||
from .entity import ConversationEntity
|
||||
from .models import (
|
||||
AbstractConversationAgent,
|
||||
@@ -34,9 +35,11 @@ from .trace import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TRIGGER_INTENT_NAME_PREFIX = "HassSentenceTrigger"
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .default_agent import DefaultAgent
|
||||
from .trigger import TriggerDetails
|
||||
from .trigger import TRIGGER_CALLBACK_TYPE
|
||||
|
||||
|
||||
@singleton.singleton("conversation_agent")
|
||||
@@ -139,6 +142,10 @@ async def async_converse(
|
||||
return result
|
||||
|
||||
|
||||
type IntentSourceConfig = dict[str, dict[str, Any]]
|
||||
type IntentsCallback = Callable[[dict[IntentSource, IntentSourceConfig]], None]
|
||||
|
||||
|
||||
class AgentManager:
|
||||
"""Class to manage conversation agents."""
|
||||
|
||||
@@ -147,8 +154,13 @@ class AgentManager:
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
self.config_intents: dict[str, Any] = {}
|
||||
self.triggers_details: list[TriggerDetails] = []
|
||||
self._intents: dict[IntentSource, IntentSourceConfig] = {
|
||||
IntentSource.CONFIG: {"intents": {}},
|
||||
IntentSource.TRIGGER: {"intents": {}},
|
||||
}
|
||||
self._intents_subscribers: list[IntentsCallback] = []
|
||||
self._trigger_callbacks: dict[int, TRIGGER_CALLBACK_TYPE] = {}
|
||||
self._trigger_callback_counter: int = 0
|
||||
|
||||
@callback
|
||||
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
|
||||
@@ -200,27 +212,75 @@ class AgentManager:
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
agent.update_config_intents(self.config_intents)
|
||||
agent.update_triggers(self.triggers_details)
|
||||
self.default_agent = agent
|
||||
|
||||
@callback
|
||||
def subscribe_intents(self, subscriber: IntentsCallback) -> CALLBACK_TYPE:
|
||||
"""Subscribe to intents updates.
|
||||
|
||||
The subscriber callback is called immediately with all intent sources
|
||||
and whenever intents are updated (only with the changed source).
|
||||
"""
|
||||
subscriber(self._intents)
|
||||
self._intents_subscribers.append(subscriber)
|
||||
|
||||
@callback
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from intents updates."""
|
||||
self._intents_subscribers.remove(subscriber)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
def _notify_intents_subscribers(self, source: IntentSource) -> None:
|
||||
"""Notify all intents subscribers of a change to a specific source."""
|
||||
update = {source: self._intents[source]}
|
||||
for subscriber in self._intents_subscribers:
|
||||
subscriber(update)
|
||||
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self.config_intents = intents
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_config_intents(intents)
|
||||
self._intents[IntentSource.CONFIG]["intents"] = intents
|
||||
self._notify_intents_subscribers(IntentSource.CONFIG)
|
||||
|
||||
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
||||
def register_trigger(
|
||||
self, sentences: list[str], trigger_callback: TRIGGER_CALLBACK_TYPE
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register a trigger."""
|
||||
self.triggers_details.append(trigger_details)
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_triggers(self.triggers_details)
|
||||
trigger_id = self._trigger_callback_counter
|
||||
self._trigger_callback_counter += 1
|
||||
trigger_intent_name = f"{TRIGGER_INTENT_NAME_PREFIX}{trigger_id}"
|
||||
|
||||
trigger_intents = self._intents[IntentSource.TRIGGER]
|
||||
trigger_intents["intents"][trigger_intent_name] = {
|
||||
"data": [{"sentences": sentences}]
|
||||
}
|
||||
self._trigger_callbacks[trigger_id] = trigger_callback
|
||||
self._notify_intents_subscribers(IntentSource.TRIGGER)
|
||||
|
||||
@callback
|
||||
def unregister_trigger() -> None:
|
||||
"""Unregister the trigger."""
|
||||
self.triggers_details.remove(trigger_details)
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_triggers(self.triggers_details)
|
||||
del trigger_intents["intents"][trigger_intent_name]
|
||||
del self._trigger_callbacks[trigger_id]
|
||||
self._notify_intents_subscribers(IntentSource.TRIGGER)
|
||||
|
||||
return unregister_trigger
|
||||
|
||||
@property
|
||||
def trigger_sentences(self) -> list[str]:
|
||||
"""Get all trigger sentences."""
|
||||
sentences: list[str] = []
|
||||
trigger_intents = self._intents[IntentSource.TRIGGER]
|
||||
for trigger_intent in trigger_intents.get("intents", {}).values():
|
||||
for data in trigger_intent.get("data", []):
|
||||
sentences.extend(data.get("sentences", []))
|
||||
return sentences
|
||||
|
||||
def get_trigger_callback(
|
||||
self, trigger_intent_name: str
|
||||
) -> TRIGGER_CALLBACK_TYPE | None:
|
||||
"""Get the callback for a trigger from its intent name."""
|
||||
if not trigger_intent_name.startswith(TRIGGER_INTENT_NAME_PREFIX):
|
||||
return None
|
||||
trigger_id = int(trigger_intent_name[len(TRIGGER_INTENT_NAME_PREFIX) :])
|
||||
return self._trigger_callbacks.get(trigger_id)
|
||||
|
||||
@@ -36,6 +36,13 @@ METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
|
||||
|
||||
class IntentSource(StrEnum):
|
||||
"""Source of intents."""
|
||||
|
||||
CONFIG = "config"
|
||||
TRIGGER = "trigger"
|
||||
|
||||
|
||||
class ChatLogEventType(StrEnum):
|
||||
"""Chat log event type."""
|
||||
|
||||
|
||||
@@ -76,18 +76,18 @@ from homeassistant.helpers.event import async_track_state_added_domain
|
||||
from homeassistant.util import language as language_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .agent_manager import IntentSourceConfig, get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog, ToolResultContent
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
ConversationEntityFeature,
|
||||
IntentSource,
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
from .trigger import TriggerDetails
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -126,7 +126,7 @@ class SentenceTriggerResult:
|
||||
|
||||
sentence: str
|
||||
sentence_template: str | None
|
||||
matched_triggers: dict[int, RecognizeResult]
|
||||
matched_triggers: dict[str, RecognizeResult]
|
||||
|
||||
|
||||
class IntentMatchingStage(Enum):
|
||||
@@ -236,15 +236,19 @@ class DefaultAgent(ConversationEntity):
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the default agent."""
|
||||
self.hass = hass
|
||||
|
||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||
self._load_intents_lock = asyncio.Lock()
|
||||
|
||||
# Intents from common conversation config
|
||||
self._config_intents: dict[str, Any] = {}
|
||||
self._config_intents_config: IntentSourceConfig = {}
|
||||
|
||||
# Sentences that will trigger a callback (skipping intent recognition)
|
||||
self._triggers_details: list[TriggerDetails] = []
|
||||
# Intents from conversation triggers
|
||||
self._trigger_intents: Intents | None = None
|
||||
self._trigger_intents_config: IntentSourceConfig = {}
|
||||
|
||||
# Subscription to intents updates
|
||||
self._unsub_intents: Callable[[], None] | None = None
|
||||
|
||||
# Slot lists for entities, areas, etc.
|
||||
self._slot_lists: dict[str, SlotList] | None = None
|
||||
@@ -261,6 +265,33 @@ class DefaultAgent(ConversationEntity):
|
||||
self.fuzzy_matching = True
|
||||
self._fuzzy_config: FuzzyConfig | None = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to intents updates when added to hass."""
|
||||
self._unsub_intents = get_agent_manager(self.hass).subscribe_intents(
|
||||
self._update_intents
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unsubscribe from intents updates when removed from hass."""
|
||||
if self._unsub_intents is not None:
|
||||
self._unsub_intents()
|
||||
self._unsub_intents = None
|
||||
|
||||
@callback
|
||||
def _update_intents(
|
||||
self, intents_update: dict[IntentSource, IntentSourceConfig]
|
||||
) -> None:
|
||||
"""Handle intents update from agent_manager subscription."""
|
||||
if IntentSource.CONFIG in intents_update:
|
||||
self._config_intents_config = intents_update[IntentSource.CONFIG]
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
if IntentSource.TRIGGER in intents_update:
|
||||
self._trigger_intents_config = intents_update[IntentSource.TRIGGER]
|
||||
# Force rebuild on next use
|
||||
self._trigger_intents = None
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> list[str]:
|
||||
"""Return a list of supported languages."""
|
||||
@@ -1059,14 +1090,6 @@ class DefaultAgent(ConversationEntity):
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
@callback
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self._config_intents = intents
|
||||
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
async def async_prepare(self, language: str | None = None) -> None:
|
||||
"""Load intents for a language."""
|
||||
if language is None:
|
||||
@@ -1193,7 +1216,7 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
merge_dict(
|
||||
intents_dict,
|
||||
self._config_intents,
|
||||
self._config_intents_config,
|
||||
)
|
||||
|
||||
if not intents_dict:
|
||||
@@ -1461,27 +1484,12 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
return response_template.async_render(response_args)
|
||||
|
||||
@callback
|
||||
def update_triggers(self, triggers_details: list[TriggerDetails]) -> None:
|
||||
"""Update triggers."""
|
||||
self._triggers_details = triggers_details
|
||||
|
||||
# Force rebuild on next use
|
||||
self._trigger_intents = None
|
||||
|
||||
def _rebuild_trigger_intents(self) -> None:
|
||||
"""Rebuild the HassIL intents object from the current trigger sentences."""
|
||||
"""Rebuild the HassIL intents object from the trigger intents dict."""
|
||||
intents_dict = {
|
||||
"language": self.hass.config.language,
|
||||
"intents": {
|
||||
# Use trigger data index as a virtual intent name for HassIL.
|
||||
# This works because the intents are rebuilt on every
|
||||
# register/unregister.
|
||||
str(trigger_id): {"data": [{"sentences": trigger_details.sentences}]}
|
||||
for trigger_id, trigger_details in enumerate(self._triggers_details)
|
||||
},
|
||||
**self._trigger_intents_config,
|
||||
}
|
||||
|
||||
trigger_intents = Intents.from_dict(intents_dict)
|
||||
|
||||
# Assume slot list references are wildcards
|
||||
@@ -1496,7 +1504,7 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
self._trigger_intents = trigger_intents
|
||||
|
||||
_LOGGER.debug("Rebuilt trigger intents: %s", intents_dict)
|
||||
_LOGGER.debug("Rebuilt trigger intents: %s", self._trigger_intents_config)
|
||||
|
||||
async def async_recognize_sentence_trigger(
|
||||
self, user_input: ConversationInput
|
||||
@@ -1506,7 +1514,7 @@ class DefaultAgent(ConversationEntity):
|
||||
Calls the registered callbacks if there's a match and returns a sentence
|
||||
trigger result.
|
||||
"""
|
||||
if not self._triggers_details:
|
||||
if not self._trigger_intents_config.get("intents"):
|
||||
# No triggers registered
|
||||
return None
|
||||
|
||||
@@ -1516,18 +1524,18 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
assert self._trigger_intents is not None
|
||||
|
||||
matched_triggers: dict[int, RecognizeResult] = {}
|
||||
matched_triggers: dict[str, RecognizeResult] = {}
|
||||
matched_template: str | None = None
|
||||
for result in recognize_all(user_input.text, self._trigger_intents):
|
||||
if result.intent_sentence is not None:
|
||||
matched_template = result.intent_sentence.text
|
||||
|
||||
trigger_id = int(result.intent.name)
|
||||
if trigger_id in matched_triggers:
|
||||
trigger_intent_name = result.intent.name
|
||||
if trigger_intent_name in matched_triggers:
|
||||
# Already matched a sentence from this trigger
|
||||
break
|
||||
|
||||
matched_triggers[trigger_id] = result
|
||||
matched_triggers[trigger_intent_name] = result
|
||||
|
||||
if not matched_triggers:
|
||||
# Sentence did not match any trigger sentences
|
||||
@@ -1551,10 +1559,14 @@ class DefaultAgent(ConversationEntity):
|
||||
chat_log: ChatLog,
|
||||
) -> str:
|
||||
"""Run sentence trigger callbacks and return response text."""
|
||||
manager = get_agent_manager(self.hass)
|
||||
|
||||
# Gather callback responses in parallel
|
||||
trigger_callbacks = [
|
||||
self._triggers_details[trigger_id].callback(user_input, trigger_result)
|
||||
for trigger_id, trigger_result in result.matched_triggers.items()
|
||||
trigger_callback(user_input, trigger_result)
|
||||
for trigger_intent_name, trigger_result in result.matched_triggers.items()
|
||||
if (trigger_callback := manager.get_trigger_callback(trigger_intent_name))
|
||||
is not None
|
||||
]
|
||||
|
||||
tool_input = llm.ToolInput(
|
||||
|
||||
@@ -165,11 +165,7 @@ async def websocket_list_sentences(
|
||||
"""List custom registered sentences."""
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
sentences = []
|
||||
for trigger_details in manager.triggers_details:
|
||||
sentences.extend(trigger_details.sentences)
|
||||
|
||||
connection.send_result(msg["id"], {"trigger_sentences": sentences})
|
||||
connection.send_result(msg["id"], {"trigger_sentences": manager.trigger_sentences})
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.6"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.28"]
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
@@ -31,14 +30,6 @@ TRIGGER_CALLBACK_TYPE = Callable[
|
||||
]
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TriggerDetails:
|
||||
"""List of sentences and the callback for a trigger."""
|
||||
|
||||
sentences: list[str]
|
||||
callback: TRIGGER_CALLBACK_TYPE
|
||||
|
||||
|
||||
def has_no_punctuation(value: list[str]) -> list[str]:
|
||||
"""Validate result does not contain punctuation."""
|
||||
for sentence in value:
|
||||
@@ -149,5 +140,5 @@ async def async_attach_trigger(
|
||||
return None
|
||||
|
||||
return get_agent_manager(hass).register_trigger(
|
||||
TriggerDetails(sentences=sentences, callback=call_action)
|
||||
sentences=sentences, trigger_callback=call_action
|
||||
)
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
import logging
|
||||
|
||||
from datadog import DogStatsd, initialize
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PORT,
|
||||
@@ -16,53 +15,15 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, state as state_helper
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import config_flow as config_flow
|
||||
from .const import (
|
||||
CONF_RATE,
|
||||
DEFAULT_HOST,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_PREFIX,
|
||||
DEFAULT_RATE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import CONF_RATE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type DatadogConfigEntry = ConfigEntry[DogStatsd]
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): cv.string,
|
||||
vol.Optional(CONF_RATE, default=DEFAULT_RATE): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Datadog integration from YAML, initiating config flow import."""
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config[DOMAIN],
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DatadogConfigEntry) -> bool:
|
||||
|
||||
@@ -12,8 +12,7 @@ from homeassistant.config_entries import (
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_PREFIX
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import (
|
||||
CONF_RATE,
|
||||
@@ -71,22 +70,6 @@ class DatadogConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle import from configuration.yaml."""
|
||||
# Check for duplicates
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]}
|
||||
)
|
||||
|
||||
result = await self.async_step_user(user_input)
|
||||
|
||||
if errors := result.get("errors"):
|
||||
await deprecate_yaml_issue(self.hass, False)
|
||||
return self.async_abort(reason=errors["base"])
|
||||
|
||||
await deprecate_yaml_issue(self.hass, True)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
@@ -163,41 +146,3 @@ async def validate_datadog_connection(
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
async def deprecate_yaml_issue(
|
||||
hass: HomeAssistant,
|
||||
import_success: bool,
|
||||
) -> None:
|
||||
"""Create an issue to deprecate YAML config."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.2.0",
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Datadog",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_connection_error",
|
||||
breaks_in_ha_version="2026.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_connection_error",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Datadog",
|
||||
"url": f"/config/integrations/dashboard/add?domain={DOMAIN}",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -25,12 +25,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_connection_error": {
|
||||
"description": "There was an error connecting to the Datadog Agent when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the {domain} configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
|
||||
"title": "{domain} YAML configuration import failed"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
|
||||
@@ -7,10 +7,7 @@ import logging
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SOURCE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.device import async_entity_id_to_device_id
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
@@ -22,11 +19,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Derivative from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass, entry.entry_id, entry.options[CONF_SOURCE]
|
||||
)
|
||||
|
||||
def set_source_entity_id_or_uuid(source_entity_id: str) -> None:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""The Dexcom integration."""
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -14,10 +15,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: DexcomConfigEntry) -> bo
|
||||
"""Set up Dexcom from a config entry."""
|
||||
try:
|
||||
dexcom = await hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if entry.data[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except AccountError:
|
||||
return False
|
||||
|
||||
@@ -5,7 +5,8 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -37,10 +38,13 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
user_input[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if user_input[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except SessionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -18,7 +18,7 @@ _SCAN_INTERVAL = timedelta(seconds=180)
|
||||
type DexcomConfigEntry = ConfigEntry[DexcomCoordinator]
|
||||
|
||||
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading | None]):
|
||||
"""Dexcom Coordinator."""
|
||||
|
||||
def __init__(
|
||||
@@ -37,7 +37,7 @@ class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
)
|
||||
self.dexcom = dexcom
|
||||
|
||||
async def _async_update_data(self) -> GlucoseReading:
|
||||
async def _async_update_data(self) -> GlucoseReading | None:
|
||||
"""Fetch data from API endpoint."""
|
||||
return await self.hass.async_add_executor_job(
|
||||
self.dexcom.get_current_glucose_reading
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydexcom"],
|
||||
"requirements": ["pydexcom==0.2.3"]
|
||||
"requirements": ["pydexcom==0.5.1"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import urllib
|
||||
import urllib.error
|
||||
|
||||
from pyW215.pyW215 import SmartPlug
|
||||
|
||||
|
||||
@@ -41,13 +41,20 @@ class UKFloodsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.stations = {}
|
||||
for station in stations:
|
||||
label = station["label"]
|
||||
rloId = station["RLOIid"]
|
||||
|
||||
# API annoyingly sometimes returns a list and some times returns a string
|
||||
# E.g. L3121 has a label of ['Scurf Dyke', 'Scurf Dyke Dyke Level']
|
||||
if isinstance(label, list):
|
||||
label = label[-1]
|
||||
|
||||
self.stations[label] = station["stationReference"]
|
||||
# Similar for RLOIid
|
||||
# E.g. 0018 has an RLOIid of ['10427', '9154']
|
||||
if isinstance(rloId, list):
|
||||
rloId = rloId[-1]
|
||||
|
||||
fullName = label + " - " + rloId
|
||||
self.stations[fullName] = station["stationReference"]
|
||||
|
||||
if not self.stations:
|
||||
return self.async_abort(reason="no_stations")
|
||||
|
||||
@@ -4,6 +4,8 @@ import asyncio.exceptions
|
||||
from typing import Any
|
||||
|
||||
from flexit_bacnet import (
|
||||
OPERATION_MODE_FIREPLACE,
|
||||
OPERATION_MODE_OFF,
|
||||
VENTILATION_MODE_AWAY,
|
||||
VENTILATION_MODE_HOME,
|
||||
VENTILATION_MODE_STOP,
|
||||
@@ -12,7 +14,6 @@ from flexit_bacnet.bacnet import DecodingError
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
PRESET_BOOST,
|
||||
PRESET_HOME,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
@@ -28,8 +29,10 @@ from .const import (
|
||||
DOMAIN,
|
||||
MAX_TEMP,
|
||||
MIN_TEMP,
|
||||
OPERATION_TO_PRESET_MODE_MAP,
|
||||
PRESET_FIREPLACE,
|
||||
PRESET_HIGH,
|
||||
PRESET_TO_VENTILATION_MODE_MAP,
|
||||
VENTILATION_TO_PRESET_MODE_MAP,
|
||||
)
|
||||
from .coordinator import FlexitConfigEntry, FlexitCoordinator
|
||||
from .entity import FlexitEntity
|
||||
@@ -51,6 +54,7 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
||||
"""Flexit air handling unit."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "flexit_bacnet"
|
||||
|
||||
_attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
@@ -60,7 +64,8 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
||||
_attr_preset_modes = [
|
||||
PRESET_AWAY,
|
||||
PRESET_HOME,
|
||||
PRESET_BOOST,
|
||||
PRESET_HIGH,
|
||||
PRESET_FIREPLACE,
|
||||
]
|
||||
|
||||
_attr_supported_features = (
|
||||
@@ -127,20 +132,29 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
||||
|
||||
Requires ClimateEntityFeature.PRESET_MODE.
|
||||
"""
|
||||
return VENTILATION_TO_PRESET_MODE_MAP[self.device.ventilation_mode]
|
||||
return OPERATION_TO_PRESET_MODE_MAP[self.device.operation_mode]
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
ventilation_mode = PRESET_TO_VENTILATION_MODE_MAP[preset_mode]
|
||||
|
||||
try:
|
||||
await self.device.set_ventilation_mode(ventilation_mode)
|
||||
if preset_mode == PRESET_FIREPLACE:
|
||||
# Use trigger method for fireplace mode
|
||||
await self.device.trigger_fireplace_mode()
|
||||
else:
|
||||
# If currently in fireplace mode, toggle it off first
|
||||
# trigger_fireplace_mode() acts as a toggle
|
||||
if self.device.operation_mode == OPERATION_MODE_FIREPLACE:
|
||||
await self.device.trigger_fireplace_mode()
|
||||
|
||||
# Set the desired ventilation mode
|
||||
ventilation_mode = PRESET_TO_VENTILATION_MODE_MAP[preset_mode]
|
||||
await self.device.set_ventilation_mode(ventilation_mode)
|
||||
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_preset_mode",
|
||||
translation_placeholders={
|
||||
"preset": str(ventilation_mode),
|
||||
"preset": preset_mode,
|
||||
},
|
||||
) from exc
|
||||
finally:
|
||||
@@ -149,7 +163,7 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
if self.device.ventilation_mode == VENTILATION_MODE_STOP:
|
||||
if self.device.operation_mode == OPERATION_MODE_OFF:
|
||||
return HVACMode.OFF
|
||||
|
||||
return HVACMode.FAN_ONLY
|
||||
|
||||
@@ -1,34 +1,40 @@
|
||||
"""Constants for the Flexit Nordic (BACnet) integration."""
|
||||
|
||||
from flexit_bacnet import (
|
||||
OPERATION_MODE_AWAY,
|
||||
OPERATION_MODE_FIREPLACE,
|
||||
OPERATION_MODE_HIGH,
|
||||
OPERATION_MODE_HOME,
|
||||
OPERATION_MODE_OFF,
|
||||
VENTILATION_MODE_AWAY,
|
||||
VENTILATION_MODE_HIGH,
|
||||
VENTILATION_MODE_HOME,
|
||||
VENTILATION_MODE_STOP,
|
||||
)
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
PRESET_BOOST,
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
)
|
||||
from homeassistant.components.climate import PRESET_AWAY, PRESET_HOME, PRESET_NONE
|
||||
|
||||
DOMAIN = "flexit_bacnet"
|
||||
|
||||
MAX_TEMP = 30
|
||||
MIN_TEMP = 10
|
||||
|
||||
VENTILATION_TO_PRESET_MODE_MAP = {
|
||||
VENTILATION_MODE_STOP: PRESET_NONE,
|
||||
VENTILATION_MODE_AWAY: PRESET_AWAY,
|
||||
VENTILATION_MODE_HOME: PRESET_HOME,
|
||||
VENTILATION_MODE_HIGH: PRESET_BOOST,
|
||||
PRESET_HIGH = "high"
|
||||
PRESET_FIREPLACE = "fireplace"
|
||||
|
||||
# Map operation mode (what device reports) to Home Assistant preset
|
||||
OPERATION_TO_PRESET_MODE_MAP = {
|
||||
OPERATION_MODE_OFF: PRESET_NONE,
|
||||
OPERATION_MODE_AWAY: PRESET_AWAY,
|
||||
OPERATION_MODE_HOME: PRESET_HOME,
|
||||
OPERATION_MODE_HIGH: PRESET_HIGH,
|
||||
OPERATION_MODE_FIREPLACE: PRESET_FIREPLACE,
|
||||
}
|
||||
|
||||
# Map preset to ventilation mode (for setting standard modes)
|
||||
PRESET_TO_VENTILATION_MODE_MAP = {
|
||||
PRESET_NONE: VENTILATION_MODE_STOP,
|
||||
PRESET_AWAY: VENTILATION_MODE_AWAY,
|
||||
PRESET_HOME: VENTILATION_MODE_HOME,
|
||||
PRESET_BOOST: VENTILATION_MODE_HIGH,
|
||||
PRESET_HIGH: VENTILATION_MODE_HIGH,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
{
|
||||
"entity": {
|
||||
"climate": {
|
||||
"flexit_bacnet": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"fireplace": "mdi:fireplace",
|
||||
"high": "mdi:fan-speed-3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"away_extract_fan_setpoint": {
|
||||
"default": "mdi:fan-minus"
|
||||
|
||||
@@ -26,6 +26,18 @@
|
||||
"name": "Air filter polluted"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"flexit_bacnet": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"fireplace": "Fireplace",
|
||||
"high": "High"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"away_extract_fan_setpoint": {
|
||||
"name": "Away extract fan setpoint"
|
||||
@@ -139,5 +151,11 @@
|
||||
"switch_turn": {
|
||||
"message": "Failed to turn the switch {state}."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_fireplace_switch": {
|
||||
"description": "The fireplace mode switch entity `{entity_id}` is deprecated and will be removed in a future version.\n\nFireplace mode has been moved to a climate preset on the climate entity to better match the device interface.\n\nPlease update your automations to use the `climate.set_preset_mode` service with preset mode `fireplace` instead of using the switch entity.\n\nAfter updating your automations, you can safely disable this switch entity.",
|
||||
"title": "Fireplace mode switch is deprecated"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,9 +13,12 @@ from homeassistant.components.switch import (
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FlexitConfigEntry, FlexitCoordinator
|
||||
@@ -39,13 +42,6 @@ SWITCHES: tuple[FlexitSwitchEntityDescription, ...] = (
|
||||
turn_on_fn=lambda data: data.enable_electric_heater(),
|
||||
turn_off_fn=lambda data: data.disable_electric_heater(),
|
||||
),
|
||||
FlexitSwitchEntityDescription(
|
||||
key="fireplace_mode",
|
||||
translation_key="fireplace_mode",
|
||||
is_on_fn=lambda data: data.fireplace_ventilation_status,
|
||||
turn_on_fn=lambda data: data.trigger_fireplace_mode(),
|
||||
turn_off_fn=lambda data: data.trigger_fireplace_mode(),
|
||||
),
|
||||
FlexitSwitchEntityDescription(
|
||||
key="cooker_hood_mode",
|
||||
translation_key="cooker_hood_mode",
|
||||
@@ -53,6 +49,13 @@ SWITCHES: tuple[FlexitSwitchEntityDescription, ...] = (
|
||||
turn_on_fn=lambda data: data.activate_cooker_hood(),
|
||||
turn_off_fn=lambda data: data.deactivate_cooker_hood(),
|
||||
),
|
||||
FlexitSwitchEntityDescription(
|
||||
key="fireplace_mode",
|
||||
translation_key="fireplace_mode",
|
||||
is_on_fn=lambda data: data.fireplace_ventilation_status,
|
||||
turn_on_fn=lambda data: data.trigger_fireplace_mode(),
|
||||
turn_off_fn=lambda data: data.trigger_fireplace_mode(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -64,9 +67,42 @@ async def async_setup_entry(
|
||||
"""Set up Flexit (bacnet) switch from a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
FlexitSwitch(coordinator, description) for description in SWITCHES
|
||||
)
|
||||
entities: list[FlexitSwitch] = []
|
||||
for description in SWITCHES:
|
||||
if description.key == "fireplace_mode":
|
||||
# Check if deprecated fireplace switch is enabled and create repair issue
|
||||
entity_reg = er.async_get(hass)
|
||||
fireplace_switch_unique_id = (
|
||||
f"{coordinator.device.serial_number}-fireplace_mode"
|
||||
)
|
||||
# Look up the fireplace switch entity by unique_id
|
||||
fireplace_switch_entity_id = entity_reg.async_get_entity_id(
|
||||
Platform.SWITCH, DOMAIN, fireplace_switch_unique_id
|
||||
)
|
||||
if not fireplace_switch_entity_id:
|
||||
continue
|
||||
entity_registry_entry = entity_reg.async_get(fireplace_switch_entity_id)
|
||||
|
||||
if entity_registry_entry:
|
||||
if entity_registry_entry.disabled:
|
||||
entity_reg.async_remove(fireplace_switch_entity_id)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_switch_{fireplace_switch_unique_id}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_fireplace_switch",
|
||||
translation_placeholders={
|
||||
"entity_id": fireplace_switch_entity_id,
|
||||
},
|
||||
)
|
||||
entities.append(FlexitSwitch(coordinator, description))
|
||||
else:
|
||||
entities.append(FlexitSwitch(coordinator, description))
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fressnapftracker==0.2.1"]
|
||||
"requirements": ["fressnapftracker==0.2.2"]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.1", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0"]
|
||||
"requirements": ["fritzconnection[qr]==1.15.1"]
|
||||
}
|
||||
|
||||
@@ -19,9 +19,7 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"winter_mode": {}
|
||||
},
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260128.1"]
|
||||
"requirements": ["home-assistant-frontend==20260128.3"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hdfury",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["hdfury==1.4.2"],
|
||||
"zeroconf": [
|
||||
{ "name": "diva-*", "type": "_http._tcp.local." },
|
||||
|
||||
@@ -46,24 +46,26 @@ rules:
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Device type integration.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: The integration doesn't have any repair cases.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Device type integration.
|
||||
|
||||
@@ -35,11 +35,11 @@
|
||||
},
|
||||
"services": {
|
||||
"decrement": {
|
||||
"description": "Decrements the current value by 1 step.",
|
||||
"description": "Decrements the value of an input number by 1 step.",
|
||||
"name": "Decrement"
|
||||
},
|
||||
"increment": {
|
||||
"description": "Increments the current value by 1 step.",
|
||||
"description": "Increments the value of an input number by 1 step.",
|
||||
"name": "Increment"
|
||||
},
|
||||
"reload": {
|
||||
@@ -47,7 +47,7 @@
|
||||
"name": "[%key:common::action::reload%]"
|
||||
},
|
||||
"set_value": {
|
||||
"description": "Sets the value.",
|
||||
"description": "Sets the value of an input number.",
|
||||
"fields": {
|
||||
"value": {
|
||||
"description": "The target value.",
|
||||
|
||||
@@ -7,10 +7,7 @@ import logging
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.device import async_entity_id_to_device_id
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
@@ -24,13 +21,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Integration from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass,
|
||||
entry.entry_id,
|
||||
entry.options[CONF_SOURCE_SENSOR],
|
||||
)
|
||||
|
||||
def set_source_entity_id_or_uuid(source_entity_id: str) -> None:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
|
||||
@@ -10,6 +10,7 @@ import voluptuous as vol
|
||||
from homeassistant.components.script import CONF_MODE
|
||||
from homeassistant.const import CONF_DESCRIPTION, CONF_TYPE, SERVICE_RELOAD
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
intent,
|
||||
@@ -18,6 +19,7 @@ from homeassistant.helpers import (
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.script import async_validate_actions_config
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -85,19 +87,29 @@ async def async_reload(hass: HomeAssistant, service_call: ServiceCall) -> None:
|
||||
|
||||
new_intents = new_config[DOMAIN]
|
||||
|
||||
async_load_intents(hass, new_intents)
|
||||
await async_load_intents(hass, new_intents)
|
||||
|
||||
|
||||
def async_load_intents(hass: HomeAssistant, intents: dict[str, ConfigType]) -> None:
|
||||
async def async_load_intents(
|
||||
hass: HomeAssistant, intents: dict[str, ConfigType]
|
||||
) -> None:
|
||||
"""Load YAML intents into the intent system."""
|
||||
hass.data[DOMAIN] = intents
|
||||
|
||||
for intent_type, conf in intents.items():
|
||||
if CONF_ACTION in conf:
|
||||
try:
|
||||
actions = await async_validate_actions_config(hass, conf[CONF_ACTION])
|
||||
except (vol.Invalid, HomeAssistantError) as exc:
|
||||
_LOGGER.error(
|
||||
"Failed to validate actions for intent %s: %s", intent_type, exc
|
||||
)
|
||||
continue # Skip this intent
|
||||
|
||||
script_mode: str = conf.get(CONF_MODE, script.DEFAULT_SCRIPT_MODE)
|
||||
conf[CONF_ACTION] = script.Script(
|
||||
hass,
|
||||
conf[CONF_ACTION],
|
||||
actions,
|
||||
f"Intent Script {intent_type}",
|
||||
DOMAIN,
|
||||
script_mode=script_mode,
|
||||
@@ -109,7 +121,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the intent script component."""
|
||||
intents = config[DOMAIN]
|
||||
|
||||
async_load_intents(hass, intents)
|
||||
await async_load_intents(hass, intents)
|
||||
|
||||
async def _handle_reload(service_call: ServiceCall) -> None:
|
||||
return await async_reload(hass, service_call)
|
||||
|
||||
@@ -76,7 +76,7 @@ async def async_migrate_entities(
|
||||
def _update_entry(entry: RegistryEntry) -> dict[str, str] | None:
|
||||
"""Fix unique_id of power binary_sensor entry."""
|
||||
if entry.domain == Platform.BINARY_SENSOR and ":" not in entry.unique_id:
|
||||
if "_power" in entry.unique_id:
|
||||
if entry.unique_id.endswith("_power"):
|
||||
return {"new_unique_id": f"{coordinator.unique_id}_power"}
|
||||
return None
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import POWER
|
||||
from .coordinator import JVCConfigEntry, JvcProjectorDataUpdateCoordinator
|
||||
from .entity import JvcProjectorEntity
|
||||
|
||||
@@ -41,4 +40,4 @@ class JvcBinarySensor(JvcProjectorEntity, BinarySensorEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the JVC Projector is on."""
|
||||
return self.coordinator.data[POWER] in ON_STATUS
|
||||
return self.coordinator.data[cmd.Power.name] in ON_STATUS
|
||||
|
||||
@@ -3,7 +3,3 @@
|
||||
NAME = "JVC Projector"
|
||||
DOMAIN = "jvc_projector"
|
||||
MANUFACTURER = "JVC"
|
||||
|
||||
POWER = "power"
|
||||
INPUT = "input"
|
||||
SOURCE = "source"
|
||||
|
||||
@@ -2,29 +2,40 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from jvcprojector import (
|
||||
JvcProjector,
|
||||
JvcProjectorAuthError,
|
||||
JvcProjectorTimeoutError,
|
||||
command as cmd,
|
||||
)
|
||||
from jvcprojector import JvcProjector, JvcProjectorTimeoutError, command as cmd
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import INPUT, NAME, POWER
|
||||
from .const import NAME
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from jvcprojector import Command
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
INTERVAL_SLOW = timedelta(seconds=10)
|
||||
INTERVAL_FAST = timedelta(seconds=5)
|
||||
|
||||
CORE_COMMANDS: tuple[type[Command], ...] = (
|
||||
cmd.Power,
|
||||
cmd.Signal,
|
||||
cmd.Input,
|
||||
cmd.LightTime,
|
||||
)
|
||||
|
||||
TRANSLATIONS = str.maketrans({"+": "p", "%": "p", ":": "x"})
|
||||
|
||||
TIMEOUT_RETRIES = 12
|
||||
TIMEOUT_SLEEP = 1
|
||||
|
||||
type JVCConfigEntry = ConfigEntry[JvcProjectorDataUpdateCoordinator]
|
||||
|
||||
|
||||
@@ -51,27 +62,108 @@ class JvcProjectorDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str]]):
|
||||
assert config_entry.unique_id is not None
|
||||
self.unique_id = config_entry.unique_id
|
||||
|
||||
self.capabilities = self.device.capabilities()
|
||||
|
||||
self.state: dict[type[Command], str] = {}
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Get the latest state data."""
|
||||
state: dict[str, str | None] = {
|
||||
POWER: None,
|
||||
INPUT: None,
|
||||
}
|
||||
"""Update state with the current value of a command."""
|
||||
commands: set[type[Command]] = set(self.async_contexts())
|
||||
commands = commands.difference(CORE_COMMANDS)
|
||||
|
||||
try:
|
||||
state[POWER] = await self.device.get(cmd.Power)
|
||||
last_timeout: JvcProjectorTimeoutError | None = None
|
||||
|
||||
if state[POWER] == cmd.Power.ON:
|
||||
state[INPUT] = await self.device.get(cmd.Input)
|
||||
for _ in range(TIMEOUT_RETRIES):
|
||||
try:
|
||||
new_state = await self._get_device_state(commands)
|
||||
break
|
||||
except JvcProjectorTimeoutError as err:
|
||||
# Timeouts are expected when the projector loses signal and ignores commands for a brief time.
|
||||
last_timeout = err
|
||||
await asyncio.sleep(TIMEOUT_SLEEP)
|
||||
else:
|
||||
raise UpdateFailed(str(last_timeout)) from last_timeout
|
||||
|
||||
except JvcProjectorTimeoutError as err:
|
||||
raise UpdateFailed(f"Unable to connect to {self.device.host}") from err
|
||||
except JvcProjectorAuthError as err:
|
||||
raise ConfigEntryAuthFailed("Password authentication failed") from err
|
||||
# Clear state on signal loss
|
||||
if (
|
||||
new_state.get(cmd.Signal) == cmd.Signal.NONE
|
||||
and self.state.get(cmd.Signal) != cmd.Signal.NONE
|
||||
):
|
||||
self.state = {k: v for k, v in self.state.items() if k in CORE_COMMANDS}
|
||||
|
||||
if state[POWER] != cmd.Power.STANDBY:
|
||||
# Update state with new values
|
||||
for k, v in new_state.items():
|
||||
self.state[k] = v
|
||||
|
||||
if self.state[cmd.Power] != cmd.Power.STANDBY:
|
||||
self.update_interval = INTERVAL_FAST
|
||||
else:
|
||||
self.update_interval = INTERVAL_SLOW
|
||||
|
||||
return state
|
||||
return {k.name: v for k, v in self.state.items()}
|
||||
|
||||
async def _get_device_state(
|
||||
self, commands: set[type[Command]]
|
||||
) -> dict[type[Command], str]:
|
||||
"""Get the current state of the device."""
|
||||
new_state: dict[type[Command], str] = {}
|
||||
deferred_commands: list[type[Command]] = []
|
||||
|
||||
power = await self._update_command_state(cmd.Power, new_state)
|
||||
|
||||
if power == cmd.Power.ON:
|
||||
signal = await self._update_command_state(cmd.Signal, new_state)
|
||||
await self._update_command_state(cmd.Input, new_state)
|
||||
await self._update_command_state(cmd.LightTime, new_state)
|
||||
|
||||
if signal == cmd.Signal.SIGNAL:
|
||||
for command in commands:
|
||||
if command.depends:
|
||||
# Command has dependencies so defer until below
|
||||
deferred_commands.append(command)
|
||||
else:
|
||||
await self._update_command_state(command, new_state)
|
||||
|
||||
# Deferred commands should have had dependencies met above
|
||||
for command in deferred_commands:
|
||||
depend_command, depend_values = next(iter(command.depends.items()))
|
||||
value: str | None = None
|
||||
if depend_command in new_state:
|
||||
value = new_state[depend_command]
|
||||
elif depend_command in self.state:
|
||||
value = self.state[depend_command]
|
||||
if value and value in depend_values:
|
||||
await self._update_command_state(command, new_state)
|
||||
|
||||
elif self.state.get(cmd.Signal) != cmd.Signal.NONE:
|
||||
new_state[cmd.Signal] = cmd.Signal.NONE
|
||||
|
||||
return new_state
|
||||
|
||||
async def _update_command_state(
|
||||
self, command: type[Command], new_state: dict[type[Command], str]
|
||||
) -> str | None:
|
||||
"""Update state with the current value of a command."""
|
||||
value = await self.device.get(command)
|
||||
|
||||
if value != self.state.get(command):
|
||||
new_state[command] = value
|
||||
|
||||
return value
|
||||
|
||||
def get_options_map(self, command: str) -> dict[str, str]:
|
||||
"""Get the available options for a command."""
|
||||
capabilities = self.capabilities.get(command, {})
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(capabilities, dict)
|
||||
assert isinstance(capabilities.get("parameter", {}), dict)
|
||||
assert isinstance(capabilities.get("parameter", {}).get("read", {}), dict)
|
||||
|
||||
values = list(capabilities.get("parameter", {}).get("read", {}).values())
|
||||
|
||||
return {v: v.translate(TRANSLATIONS) for v in values}
|
||||
|
||||
def supports(self, command: type[Command]) -> bool:
|
||||
"""Check if the device supports a command."""
|
||||
return self.device.supports(command)
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from jvcprojector import JvcProjector
|
||||
from jvcprojector import Command, JvcProjector
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -20,9 +20,13 @@ class JvcProjectorEntity(CoordinatorEntity[JvcProjectorDataUpdateCoordinator]):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: JvcProjectorDataUpdateCoordinator) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: JvcProjectorDataUpdateCoordinator,
|
||||
command: type[Command] | None = None,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(coordinator, command)
|
||||
|
||||
self._attr_unique_id = coordinator.unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"jvc_power": {
|
||||
"power": {
|
||||
"default": "mdi:projector-off",
|
||||
"state": {
|
||||
"on": "mdi:projector"
|
||||
@@ -9,17 +9,47 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"anamorphic": {
|
||||
"default": "mdi:fit-to-screen-outline"
|
||||
},
|
||||
"clear_motion_drive": {
|
||||
"default": "mdi:blur"
|
||||
},
|
||||
"dynamic_control": {
|
||||
"default": "mdi:lightbulb-on-outline"
|
||||
},
|
||||
"input": {
|
||||
"default": "mdi:hdmi-port"
|
||||
},
|
||||
"installation_mode": {
|
||||
"default": "mdi:aspect-ratio"
|
||||
},
|
||||
"light_power": {
|
||||
"default": "mdi:lightbulb-on-outline"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"jvc_power_status": {
|
||||
"default": "mdi:power-plug-off",
|
||||
"color_depth": {
|
||||
"default": "mdi:palette-outline"
|
||||
},
|
||||
"color_space": {
|
||||
"default": "mdi:palette-outline"
|
||||
},
|
||||
"hdr": {
|
||||
"default": "mdi:image-filter-hdr-outline"
|
||||
},
|
||||
"hdr_processing": {
|
||||
"default": "mdi:image-filter-hdr-outline"
|
||||
},
|
||||
"picture_mode": {
|
||||
"default": "mdi:movie-roll"
|
||||
},
|
||||
"power": {
|
||||
"default": "mdi:power",
|
||||
"state": {
|
||||
"cooling": "mdi:snowflake",
|
||||
"error": "mdi:alert-circle",
|
||||
"on": "mdi:power-plug",
|
||||
"on": "mdi:power",
|
||||
"warming": "mdi:heat-wave"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["jvcprojector"],
|
||||
"requirements": ["pyjvcprojector==2.0.0"]
|
||||
"requirements": ["pyjvcprojector==2.0.1"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import POWER
|
||||
from .coordinator import JVCConfigEntry
|
||||
from .entity import JvcProjectorEntity
|
||||
|
||||
@@ -65,6 +64,8 @@ RENAMED_COMMANDS: dict[str, str] = {
|
||||
"hdmi2": cmd.Remote.HDMI2,
|
||||
}
|
||||
|
||||
ON_STATUS = (cmd.Power.ON, cmd.Power.WARMING)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -86,7 +87,7 @@ class JvcProjectorRemote(JvcProjectorEntity, RemoteEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the entity is on."""
|
||||
return self.coordinator.data[POWER] in (cmd.Power.ON, cmd.Power.WARMING)
|
||||
return self.coordinator.data.get(cmd.Power.name) in ON_STATUS
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the device on."""
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from jvcprojector import JvcProjector, command as cmd
|
||||
from jvcprojector import Command, command as cmd
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -20,17 +19,37 @@ from .entity import JvcProjectorEntity
|
||||
class JvcProjectorSelectDescription(SelectEntityDescription):
|
||||
"""Describes JVC Projector select entities."""
|
||||
|
||||
command: Callable[[JvcProjector, str], Awaitable[None]]
|
||||
command: type[Command]
|
||||
|
||||
|
||||
SELECTS: Final[list[JvcProjectorSelectDescription]] = [
|
||||
SELECTS: Final[tuple[JvcProjectorSelectDescription, ...]] = (
|
||||
JvcProjectorSelectDescription(key="input", command=cmd.Input),
|
||||
JvcProjectorSelectDescription(
|
||||
key="input",
|
||||
translation_key="input",
|
||||
options=[cmd.Input.HDMI1, cmd.Input.HDMI2],
|
||||
command=lambda device, option: device.set(cmd.Input, option),
|
||||
)
|
||||
]
|
||||
key="installation_mode",
|
||||
command=cmd.InstallationMode,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="light_power",
|
||||
command=cmd.LightPower,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="dynamic_control",
|
||||
command=cmd.DynamicControl,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="clear_motion_drive",
|
||||
command=cmd.ClearMotionDrive,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="anamorphic",
|
||||
command=cmd.Anamorphic,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -42,30 +61,45 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
JvcProjectorSelectEntity(coordinator, description) for description in SELECTS
|
||||
JvcProjectorSelectEntity(coordinator, description)
|
||||
for description in SELECTS
|
||||
if coordinator.supports(description.command)
|
||||
)
|
||||
|
||||
|
||||
class JvcProjectorSelectEntity(JvcProjectorEntity, SelectEntity):
|
||||
"""Representation of a JVC Projector select entity."""
|
||||
|
||||
entity_description: JvcProjectorSelectDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: JvcProjectorDataUpdateCoordinator,
|
||||
description: JvcProjectorSelectDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(coordinator, description.command)
|
||||
self.command: type[Command] = description.command
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_id}_{description.key}"
|
||||
self._attr_translation_key = description.key
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{description.key}"
|
||||
|
||||
self._options_map: dict[str, str] = coordinator.get_options_map(
|
||||
self.command.name
|
||||
)
|
||||
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return a list of selectable options."""
|
||||
return list(self._options_map.values())
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the selected entity option to represent the entity state."""
|
||||
return self.coordinator.data[self.entity_description.key]
|
||||
if value := self.coordinator.data.get(self.command.name):
|
||||
return self._options_map.get(value)
|
||||
return None
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.entity_description.command(self.coordinator.device, option)
|
||||
value = next((k for k, v in self._options_map.items() if v == option), None)
|
||||
await self.coordinator.device.set(self.command, value)
|
||||
|
||||
@@ -2,33 +2,77 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from jvcprojector import command as cmd
|
||||
from dataclasses import dataclass
|
||||
|
||||
from jvcprojector import Command, command as cmd
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.const import EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import JVCConfigEntry, JvcProjectorDataUpdateCoordinator
|
||||
from .entity import JvcProjectorEntity
|
||||
|
||||
JVC_SENSORS = (
|
||||
SensorEntityDescription(
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class JvcProjectorSensorDescription(SensorEntityDescription):
|
||||
"""Describes JVC Projector sensor entities."""
|
||||
|
||||
command: type[Command]
|
||||
|
||||
|
||||
SENSORS: tuple[JvcProjectorSensorDescription, ...] = (
|
||||
JvcProjectorSensorDescription(
|
||||
key="power",
|
||||
translation_key="jvc_power_status",
|
||||
command=cmd.Power,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="light_time",
|
||||
command=cmd.LightTime,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="color_depth",
|
||||
command=cmd.ColorDepth,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=[
|
||||
cmd.Power.STANDBY,
|
||||
cmd.Power.ON,
|
||||
cmd.Power.WARMING,
|
||||
cmd.Power.COOLING,
|
||||
cmd.Power.ERROR,
|
||||
],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="color_space",
|
||||
command=cmd.ColorSpace,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="hdr",
|
||||
command=cmd.Hdr,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="hdr_processing",
|
||||
command=cmd.HdrProcessing,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="picture_mode",
|
||||
command=cmd.PictureMode,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -42,24 +86,48 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
JvcSensor(coordinator, description) for description in JVC_SENSORS
|
||||
JvcProjectorSensorEntity(coordinator, description)
|
||||
for description in SENSORS
|
||||
if coordinator.supports(description.command)
|
||||
)
|
||||
|
||||
|
||||
class JvcSensor(JvcProjectorEntity, SensorEntity):
|
||||
class JvcProjectorSensorEntity(JvcProjectorEntity, SensorEntity):
|
||||
"""The entity class for JVC Projector integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: JvcProjectorDataUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
description: JvcProjectorSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the JVC Projector sensor."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(coordinator, description.command)
|
||||
self.command: type[Command] = description.command
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_id}_{description.key}"
|
||||
self._attr_translation_key = description.key
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{description.key}"
|
||||
|
||||
self._options_map: dict[str, str] = {}
|
||||
if self.device_class == SensorDeviceClass.ENUM:
|
||||
self._options_map = coordinator.get_options_map(self.command.name)
|
||||
|
||||
@property
|
||||
def options(self) -> list[str] | None:
|
||||
"""Return a set of possible options."""
|
||||
if self.device_class == SensorDeviceClass.ENUM:
|
||||
return list(self._options_map.values())
|
||||
return None
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the native value."""
|
||||
return self.coordinator.data[self.entity_description.key]
|
||||
value = self.coordinator.data.get(self.command.name)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if self.device_class == SensorDeviceClass.ENUM:
|
||||
return self._options_map.get(value)
|
||||
|
||||
return value
|
||||
|
||||
@@ -36,20 +36,134 @@
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"power": {
|
||||
"name": "[%key:component::binary_sensor::entity_component::power::name%]"
|
||||
"name": "Power"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"anamorphic": {
|
||||
"name": "Anamorphic",
|
||||
"state": {
|
||||
"a": "A",
|
||||
"b": "B",
|
||||
"c": "C",
|
||||
"d": "D",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"clear_motion_drive": {
|
||||
"name": "Clear Motion Drive",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"inverse-telecine": "Inverse Telecine",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"dynamic_control": {
|
||||
"name": "Dynamic Control",
|
||||
"state": {
|
||||
"balanced": "Balanced",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"mode-1": "Mode 1",
|
||||
"mode-2": "Mode 2",
|
||||
"mode-3": "Mode 3",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"name": "Input",
|
||||
"state": {
|
||||
"hdmi1": "HDMI 1",
|
||||
"hdmi2": "HDMI 2"
|
||||
}
|
||||
},
|
||||
"installation_mode": {
|
||||
"name": "Installation Mode",
|
||||
"state": {
|
||||
"memory-1": "Memory 1",
|
||||
"memory-10": "Memory 10",
|
||||
"memory-2": "Memory 2",
|
||||
"memory-3": "Memory 3",
|
||||
"memory-4": "Memory 4",
|
||||
"memory-5": "Memory 5",
|
||||
"memory-6": "Memory 6",
|
||||
"memory-7": "Memory 7",
|
||||
"memory-8": "Memory 8",
|
||||
"memory-9": "Memory 9"
|
||||
}
|
||||
},
|
||||
"light_power": {
|
||||
"name": "Light Power",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"mid": "[%key:common::state::medium%]",
|
||||
"normal": "[%key:common::state::normal%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"jvc_power_status": {
|
||||
"color_depth": {
|
||||
"name": "Color Depth",
|
||||
"state": {
|
||||
"8-bit": "8-bit",
|
||||
"10-bit": "10-bit",
|
||||
"12-bit": "12-bit"
|
||||
}
|
||||
},
|
||||
"color_space": {
|
||||
"name": "Color Space",
|
||||
"state": {
|
||||
"rgb": "RGB",
|
||||
"xv-color": "XV Color",
|
||||
"ycbcr-420": "YCbCr 4:2:0",
|
||||
"ycbcr-422": "YCbCr 4:2:2",
|
||||
"ycbcr-444": "YCbCr 4:4:4",
|
||||
"yuv": "YUV"
|
||||
}
|
||||
},
|
||||
"hdr": {
|
||||
"name": "HDR",
|
||||
"state": {
|
||||
"hdr": "HDR",
|
||||
"hdr10p": "HDR10+",
|
||||
"hybrid-log": "Hybrid Log",
|
||||
"none": "None",
|
||||
"sdr": "SDR",
|
||||
"smpte-st-2084": "SMPTE ST 2084"
|
||||
}
|
||||
},
|
||||
"hdr_processing": {
|
||||
"name": "HDR Processing",
|
||||
"state": {
|
||||
"frame-by-frame": "Frame-by-Frame",
|
||||
"hdr10p": "HDR10+",
|
||||
"scene-by-scene": "Scene-by-Scene",
|
||||
"static": "Static"
|
||||
}
|
||||
},
|
||||
"light_time": {
|
||||
"name": "Light Time"
|
||||
},
|
||||
"picture_mode": {
|
||||
"name": "Picture Mode",
|
||||
"state": {
|
||||
"frame-adapt-hdr": "Frame Adapt HDR",
|
||||
"frame-adapt-hdr2": "Frame Adapt HDR2",
|
||||
"frame-adapt-hdr3": "Frame Adapt HDR3",
|
||||
"hdr1": "HDR1",
|
||||
"hdr10": "HDR10",
|
||||
"hdr10-ll": "HDR10 LL",
|
||||
"hdr2": "HDR2",
|
||||
"last-setting": "Last Setting",
|
||||
"pana-pq": "Pana PQ",
|
||||
"user-4": "User 4",
|
||||
"user-5": "User 5",
|
||||
"user-6": "User 6"
|
||||
}
|
||||
},
|
||||
"power": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
"cooling": "Cooling",
|
||||
|
||||
67
homeassistant/components/liebherr/__init__.py
Normal file
67
homeassistant/components/liebherr/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""The liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from pyliebherrhomeapi import LiebherrClient
|
||||
from pyliebherrhomeapi.exceptions import (
|
||||
LiebherrAuthenticationError,
|
||||
LiebherrConnectionError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) -> bool:
|
||||
"""Set up Liebherr from a config entry."""
|
||||
# Create shared API client
|
||||
client = LiebherrClient(
|
||||
api_key=entry.data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
# Fetch device list to create coordinators
|
||||
try:
|
||||
devices = await client.get_devices()
|
||||
except LiebherrAuthenticationError as err:
|
||||
raise ConfigEntryError("Invalid API key") from err
|
||||
except LiebherrConnectionError as err:
|
||||
raise ConfigEntryNotReady(f"Failed to connect to Liebherr API: {err}") from err
|
||||
|
||||
# Create a coordinator for each device (may be empty if no devices)
|
||||
coordinators: dict[str, LiebherrCoordinator] = {}
|
||||
for device in devices:
|
||||
coordinator = LiebherrCoordinator(
|
||||
hass=hass,
|
||||
config_entry=entry,
|
||||
client=client,
|
||||
device_id=device.device_id,
|
||||
)
|
||||
coordinators[device.device_id] = coordinator
|
||||
|
||||
await asyncio.gather(
|
||||
*(
|
||||
coordinator.async_config_entry_first_refresh()
|
||||
for coordinator in coordinators.values()
|
||||
)
|
||||
)
|
||||
|
||||
# Store coordinators in runtime data
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
68
homeassistant/components/liebherr/config_flow.py
Normal file
68
homeassistant/components/liebherr/config_flow.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""Config flow for the liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyliebherrhomeapi import LiebherrClient
|
||||
from pyliebherrhomeapi.exceptions import (
|
||||
LiebherrAuthenticationError,
|
||||
LiebherrConnectionError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class LiebherrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for liebherr."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
user_input[CONF_API_KEY] = user_input[CONF_API_KEY].strip()
|
||||
|
||||
self._async_abort_entries_match({CONF_API_KEY: user_input[CONF_API_KEY]})
|
||||
|
||||
try:
|
||||
# Create a client and test the connection
|
||||
client = LiebherrClient(
|
||||
api_key=user_input[CONF_API_KEY],
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
devices = await client.get_devices()
|
||||
except LiebherrAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except LiebherrConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not devices:
|
||||
return self.async_abort(reason="no_devices")
|
||||
|
||||
return self.async_create_entry(
|
||||
title="Liebherr",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
6
homeassistant/components/liebherr/const.py
Normal file
6
homeassistant/components/liebherr/const.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Constants for the liebherr integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "liebherr"
|
||||
MANUFACTURER: Final = "Liebherr"
|
||||
75
homeassistant/components/liebherr/coordinator.py
Normal file
75
homeassistant/components/liebherr/coordinator.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""DataUpdateCoordinator for Liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyliebherrhomeapi import (
|
||||
DeviceState,
|
||||
LiebherrAuthenticationError,
|
||||
LiebherrClient,
|
||||
LiebherrConnectionError,
|
||||
LiebherrTimeoutError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type LiebherrConfigEntry = ConfigEntry[dict[str, LiebherrCoordinator]]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
class LiebherrCoordinator(DataUpdateCoordinator[DeviceState]):
|
||||
"""Class to manage fetching Liebherr data from the API for a single device."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: LiebherrConfigEntry,
|
||||
client: LiebherrClient,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
name=f"{DOMAIN}_{device_id}",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
self.client = client
|
||||
self.device_id = device_id
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator by validating device access."""
|
||||
try:
|
||||
await self.client.get_device(self.device_id)
|
||||
except LiebherrAuthenticationError as err:
|
||||
raise ConfigEntryError("Invalid API key") from err
|
||||
except LiebherrConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Failed to connect to device {self.device_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def _async_update_data(self) -> DeviceState:
|
||||
"""Fetch data from API for this device."""
|
||||
try:
|
||||
return await self.client.get_device_state(self.device_id)
|
||||
except LiebherrAuthenticationError as err:
|
||||
raise ConfigEntryError("API key is no longer valid") from err
|
||||
except LiebherrTimeoutError as err:
|
||||
raise UpdateFailed(
|
||||
f"Timeout communicating with device {self.device_id}"
|
||||
) from err
|
||||
except LiebherrConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with device {self.device_id}"
|
||||
) from err
|
||||
75
homeassistant/components/liebherr/entity.py
Normal file
75
homeassistant/components/liebherr/entity.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Base entity for Liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyliebherrhomeapi import TemperatureControl, ZonePosition
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import LiebherrCoordinator
|
||||
|
||||
# Zone position to translation key mapping
|
||||
ZONE_POSITION_MAP = {
|
||||
ZonePosition.TOP: "top_zone",
|
||||
ZonePosition.MIDDLE: "middle_zone",
|
||||
ZonePosition.BOTTOM: "bottom_zone",
|
||||
}
|
||||
|
||||
|
||||
class LiebherrEntity(CoordinatorEntity[LiebherrCoordinator]):
|
||||
"""Base entity for Liebherr devices."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LiebherrCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the Liebherr entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
device = coordinator.data.device
|
||||
|
||||
model = None
|
||||
if device.device_type:
|
||||
model = device.device_type.title()
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.device_id)},
|
||||
name=device.nickname or device.device_name,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=model,
|
||||
model_id=device.device_name,
|
||||
)
|
||||
|
||||
|
||||
class LiebherrZoneEntity(LiebherrEntity):
|
||||
"""Base entity for zone-based Liebherr entities.
|
||||
|
||||
This class should be used for entities that are associated with a specific
|
||||
temperature control zone (e.g., climate, zone sensors).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LiebherrCoordinator,
|
||||
zone_id: int,
|
||||
) -> None:
|
||||
"""Initialize the zone entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone_id = zone_id
|
||||
|
||||
@property
|
||||
def temperature_control(self) -> TemperatureControl | None:
|
||||
"""Get the temperature control for this zone."""
|
||||
return self.coordinator.data.get_temperature_controls().get(self._zone_id)
|
||||
|
||||
def _get_zone_translation_key(self) -> str | None:
|
||||
"""Get the translation key for this zone."""
|
||||
control = self.temperature_control
|
||||
if control and isinstance(control.zone_position, ZonePosition):
|
||||
return ZONE_POSITION_MAP.get(control.zone_position)
|
||||
# Fallback to None to use device model name
|
||||
return None
|
||||
18
homeassistant/components/liebherr/manifest.json
Normal file
18
homeassistant/components/liebherr/manifest.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"domain": "liebherr",
|
||||
"name": "Liebherr",
|
||||
"codeowners": ["@mettolen"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/liebherr",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyliebherrhomeapi"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyliebherrhomeapi==0.2.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "liebherr*",
|
||||
"type": "_http._tcp.local."
|
||||
}
|
||||
]
|
||||
}
|
||||
72
homeassistant/components/liebherr/quality_scale.yaml
Normal file
72
homeassistant/components/liebherr/quality_scale.yaml
Normal file
@@ -0,0 +1,72 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration has no configurable parameters after initial setup.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Cloud API does not require updating entry data from network discovery.
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repair issues to implement at this time.
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
118
homeassistant/components/liebherr/sensor.py
Normal file
118
homeassistant/components/liebherr/sensor.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""Sensor platform for Liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyliebherrhomeapi import TemperatureControl, TemperatureUnit
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
|
||||
from .entity import LiebherrZoneEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class LiebherrSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Liebherr sensor entity."""
|
||||
|
||||
value_fn: Callable[[TemperatureControl], StateType]
|
||||
unit_fn: Callable[[TemperatureControl], str]
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[LiebherrSensorEntityDescription, ...] = (
|
||||
LiebherrSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda control: control.value,
|
||||
unit_fn=lambda control: (
|
||||
UnitOfTemperature.FAHRENHEIT
|
||||
if control.unit == TemperatureUnit.FAHRENHEIT
|
||||
else UnitOfTemperature.CELSIUS
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: LiebherrConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Liebherr sensor entities."""
|
||||
coordinators = entry.runtime_data
|
||||
entities: list[LiebherrSensor] = []
|
||||
|
||||
for coordinator in coordinators.values():
|
||||
# Get all temperature controls for this device
|
||||
temp_controls = coordinator.data.get_temperature_controls()
|
||||
|
||||
for temp_control in temp_controls.values():
|
||||
entities.extend(
|
||||
LiebherrSensor(
|
||||
coordinator=coordinator,
|
||||
zone_id=temp_control.zone_id,
|
||||
description=description,
|
||||
)
|
||||
for description in SENSOR_TYPES
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class LiebherrSensor(LiebherrZoneEntity, SensorEntity):
|
||||
"""Representation of a Liebherr sensor."""
|
||||
|
||||
entity_description: LiebherrSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LiebherrCoordinator,
|
||||
zone_id: int,
|
||||
description: LiebherrSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor entity."""
|
||||
super().__init__(coordinator, zone_id)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.device_id}_{description.key}_{zone_id}"
|
||||
|
||||
# If device has only one zone, use model name instead of zone name
|
||||
temp_controls = coordinator.data.get_temperature_controls()
|
||||
if len(temp_controls) == 1:
|
||||
self._attr_name = None
|
||||
else:
|
||||
# Set translation key based on zone position for multi-zone devices
|
||||
self._attr_translation_key = self._get_zone_translation_key()
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
if (temp_control := self.temperature_control) is None:
|
||||
return None
|
||||
return self.entity_description.unit_fn(temp_control)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the current value."""
|
||||
if (temp_control := self.temperature_control) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(temp_control)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.temperature_control is not None
|
||||
38
homeassistant/components/liebherr/strings.json
Normal file
38
homeassistant/components/liebherr/strings.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices": "No devices found for this API key"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"no_devices": "No devices found for this API key",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The API key from the Liebherr SmartDevice app. Note: The API key can only be copied once from the app."
|
||||
},
|
||||
"description": "Enter your Liebherr HomeAPI key. You can find it in the Liebherr SmartDevice app under Settings → Become a beta tester."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"bottom_zone": {
|
||||
"name": "Bottom zone"
|
||||
},
|
||||
"middle_zone": {
|
||||
"name": "Middle zone"
|
||||
},
|
||||
"top_zone": {
|
||||
"name": "Top zone"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -73,6 +73,3 @@ LIFX_CEILING_PRODUCT_IDS = {176, 177, 201, 202}
|
||||
LIFX_128ZONE_CEILING_PRODUCT_IDS = {201, 202}
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
# _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1
|
||||
_ATTR_COLOR_TEMP = "color_temp"
|
||||
|
||||
@@ -33,7 +33,7 @@ from homeassistant.helpers.target import (
|
||||
async_extract_referenced_entity_ids,
|
||||
)
|
||||
|
||||
from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DOMAIN
|
||||
from .const import ATTR_THEME, DOMAIN
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .util import convert_8_to_16, find_hsbk
|
||||
|
||||
@@ -135,8 +135,6 @@ LIFX_EFFECT_PULSE_SCHEMA = cv.make_entity_service_schema(
|
||||
vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1500, max=9000)
|
||||
),
|
||||
# _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1
|
||||
vol.Exclusive(_ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int,
|
||||
ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Range(min=0.05)),
|
||||
ATTR_CYCLES: vol.All(vol.Coerce(float), vol.Range(min=1)),
|
||||
ATTR_MODE: vol.In(PULSE_MODES),
|
||||
|
||||
@@ -26,7 +26,6 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from .const import (
|
||||
_ATTR_COLOR_TEMP,
|
||||
_LOGGER,
|
||||
DEFAULT_ATTEMPTS,
|
||||
DOMAIN,
|
||||
@@ -115,17 +114,6 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] |
|
||||
saturation = int(saturation / 100 * 65535)
|
||||
kelvin = 3500
|
||||
|
||||
if ATTR_COLOR_TEMP_KELVIN not in kwargs and _ATTR_COLOR_TEMP in kwargs:
|
||||
# added in 2025.1, can be removed in 2026.1
|
||||
_LOGGER.warning(
|
||||
"The 'color_temp' parameter is deprecated. Please use 'color_temp_kelvin' for"
|
||||
" all service calls"
|
||||
)
|
||||
kelvin = color_util.color_temperature_mired_to_kelvin(
|
||||
kwargs.pop(_ATTR_COLOR_TEMP)
|
||||
)
|
||||
saturation = 0
|
||||
|
||||
if ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||
kelvin = kwargs.pop(ATTR_COLOR_TEMP_KELVIN)
|
||||
saturation = 0
|
||||
|
||||
@@ -33,6 +33,7 @@ from .const import ( # noqa: F401
|
||||
CONF_ALLOW_SINGLE_WORD,
|
||||
CONF_ICON,
|
||||
CONF_REQUIRE_ADMIN,
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_SHOW_IN_SIDEBAR,
|
||||
CONF_TITLE,
|
||||
CONF_URL_PATH,
|
||||
@@ -61,7 +62,7 @@ def _validate_url_slug(value: Any) -> str:
|
||||
"""Validate value is a valid url slug."""
|
||||
if value is None:
|
||||
raise vol.Invalid("Slug should not be None")
|
||||
if "-" not in value:
|
||||
if value != "lovelace" and "-" not in value:
|
||||
raise vol.Invalid("Url path needs to contain a hyphen (-)")
|
||||
str_value = str(value)
|
||||
slg = slugify(str_value, separator="-")
|
||||
@@ -84,9 +85,13 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(DOMAIN, default={}): vol.Schema(
|
||||
{
|
||||
# Deprecated - Remove in 2026.8
|
||||
vol.Optional(CONF_MODE, default=MODE_STORAGE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_RESOURCE_MODE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_DASHBOARDS): cv.schema_with_slug_keys(
|
||||
YAML_DASHBOARD_SCHEMA,
|
||||
slug_validator=_validate_url_slug,
|
||||
@@ -103,7 +108,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
class LovelaceData:
|
||||
"""Dataclass to store information in hass.data."""
|
||||
|
||||
mode: str
|
||||
resource_mode: str # The mode used for resources (yaml or storage)
|
||||
dashboards: dict[str | None, dashboard.LovelaceConfig]
|
||||
resources: resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
yaml_dashboards: dict[str | None, ConfigType]
|
||||
@@ -114,18 +119,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
mode = config[DOMAIN][CONF_MODE]
|
||||
yaml_resources = config[DOMAIN].get(CONF_RESOURCES)
|
||||
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, register the default panel in yaml mode (temporary until user migrates)
|
||||
if mode == MODE_YAML:
|
||||
frontend.async_register_built_in_panel(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config={"mode": mode},
|
||||
sidebar_title="overview",
|
||||
sidebar_icon="mdi:view-dashboard",
|
||||
sidebar_default_visible=False,
|
||||
)
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
# resource_mode controls how resources are loaded (yaml vs storage)
|
||||
# Deprecated - Remove mode fallback in 2026.8
|
||||
resource_mode = config[DOMAIN].get(CONF_RESOURCE_MODE, mode)
|
||||
|
||||
async def reload_resources_service_handler(service_call: ServiceCall) -> None:
|
||||
"""Reload yaml resources."""
|
||||
@@ -149,12 +145,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
hass.data[LOVELACE_DATA].resources = resource_collection
|
||||
|
||||
default_config: dashboard.LovelaceConfig
|
||||
resource_collection: (
|
||||
resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
)
|
||||
if mode == MODE_YAML:
|
||||
default_config = dashboard.LovelaceYAML(hass, None, None)
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
# Load resources based on resource_mode
|
||||
if resource_mode == MODE_YAML:
|
||||
resource_collection = await create_yaml_resource_col(hass, yaml_resources)
|
||||
|
||||
async_register_admin_service(
|
||||
@@ -177,8 +174,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
else:
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
if yaml_resources is not None:
|
||||
_LOGGER.warning(
|
||||
"Lovelace is running in storage mode. Define resources via user"
|
||||
@@ -195,18 +190,44 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
RESOURCE_UPDATE_FIELDS,
|
||||
).async_setup(hass)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_info)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_config)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_save_config)
|
||||
websocket_api.async_register_command(
|
||||
hass, websocket.websocket_lovelace_delete_config
|
||||
)
|
||||
|
||||
yaml_dashboards = config[DOMAIN].get(CONF_DASHBOARDS, {})
|
||||
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, add the default "lovelace" dashboard if not already defined
|
||||
# This migrates the legacy yaml mode to a proper yaml dashboard entry
|
||||
if mode == MODE_YAML and DOMAIN not in yaml_dashboards:
|
||||
translations = await async_get_translations(
|
||||
hass, hass.config.language, "dashboard", {onboarding.DOMAIN}
|
||||
)
|
||||
title = translations.get(
|
||||
"component.onboarding.dashboard.overview.title", "Overview"
|
||||
)
|
||||
yaml_dashboards = {
|
||||
DOMAIN: {
|
||||
CONF_TITLE: title,
|
||||
CONF_ICON: DEFAULT_ICON,
|
||||
CONF_SHOW_IN_SIDEBAR: True,
|
||||
CONF_REQUIRE_ADMIN: False,
|
||||
CONF_MODE: MODE_YAML,
|
||||
CONF_FILENAME: LOVELACE_CONFIG_FILE,
|
||||
},
|
||||
**yaml_dashboards,
|
||||
}
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
|
||||
hass.data[LOVELACE_DATA] = LovelaceData(
|
||||
mode=mode,
|
||||
resource_mode=resource_mode,
|
||||
# We store a dictionary mapping url_path: config. None is the default.
|
||||
dashboards={None: default_config},
|
||||
resources=resource_collection,
|
||||
yaml_dashboards=config[DOMAIN].get(CONF_DASHBOARDS, {}),
|
||||
yaml_dashboards=yaml_dashboards,
|
||||
)
|
||||
|
||||
if hass.config.recovery_mode:
|
||||
@@ -450,7 +471,7 @@ async def _async_migrate_default_config(
|
||||
# Deprecated - Remove in 2026.8
|
||||
@callback
|
||||
def _async_create_yaml_mode_repair(hass: HomeAssistant) -> None:
|
||||
"""Create repair issue for YAML mode migration."""
|
||||
"""Create repair issue for YAML mode deprecation."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -158,7 +158,15 @@ async def _get_dashboard_info(
|
||||
"""Load a dashboard and return info on views."""
|
||||
if url_path == DEFAULT_DASHBOARD:
|
||||
url_path = None
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if dashboard is None:
|
||||
raise ValueError("Invalid dashboard specified")
|
||||
|
||||
@@ -57,6 +57,7 @@ RESOURCE_UPDATE_FIELDS: VolDictType = {
|
||||
SERVICE_RELOAD_RESOURCES = "reload_resources"
|
||||
RESOURCE_RELOAD_SERVICE_SCHEMA = vol.Schema({})
|
||||
|
||||
CONF_RESOURCE_MODE = "resource_mode"
|
||||
CONF_TITLE = "title"
|
||||
CONF_REQUIRE_ADMIN = "require_admin"
|
||||
CONF_SHOW_IN_SIDEBAR = "show_in_sidebar"
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
},
|
||||
"issues": {
|
||||
"yaml_mode_deprecated": {
|
||||
"description": "Starting with Home Assistant 2026.8, the default Lovelace dashboard will no longer support YAML mode. To migrate:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. Rename `{config_file}` to a new filename (e.g., `my-dashboard.yaml`)\n3. Add a dashboard entry in your `configuration.yaml`:\n\n```yaml\nlovelace:\n dashboards:\n lovelace:\n mode: yaml\n filename: my-dashboard.yaml\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n```\n\n4. Restart Home Assistant",
|
||||
"title": "Lovelace YAML mode migration required"
|
||||
"description": "The `mode` option in `lovelace:` configuration is deprecated and will be removed in Home Assistant 2026.8.\n\nTo migrate:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. If you have `resources:` declared in your lovelace configuration, add `resource_mode: yaml` to keep loading resources from YAML\n3. Add a dashboard entry in your `configuration.yaml`:\n\n ```yaml\n lovelace:\n resource_mode: yaml # Add this if you have resources declared\n dashboards:\n lovelace:\n mode: yaml\n filename: {config_file}\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n ```\n\n4. Restart Home Assistant",
|
||||
"title": "Lovelace YAML mode deprecated"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -42,9 +42,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
else:
|
||||
health_info[key] = dashboard[key]
|
||||
|
||||
if hass.data[LOVELACE_DATA].mode == MODE_YAML:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
elif MODE_STORAGE in modes:
|
||||
if MODE_STORAGE in modes:
|
||||
health_info[CONF_MODE] = MODE_STORAGE
|
||||
elif MODE_YAML in modes:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
|
||||
@@ -14,7 +14,13 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.json import json_fragment
|
||||
|
||||
from .const import CONF_URL_PATH, LOVELACE_DATA, ConfigNotFound
|
||||
from .const import (
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_URL_PATH,
|
||||
DOMAIN,
|
||||
LOVELACE_DATA,
|
||||
ConfigNotFound,
|
||||
)
|
||||
from .dashboard import LovelaceConfig
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -38,7 +44,15 @@ def _handle_errors[_R](
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
url_path = msg.get(CONF_URL_PATH)
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if config is None:
|
||||
connection.send_error(
|
||||
@@ -100,6 +114,20 @@ async def websocket_lovelace_resources_impl(
|
||||
connection.send_result(msg["id"], resources.async_items())
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": "lovelace/info"})
|
||||
@websocket_api.async_response
|
||||
async def websocket_lovelace_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Send Lovelace UI info over WebSocket connection."""
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{CONF_RESOURCE_MODE: hass.data[LOVELACE_DATA].resource_mode},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "lovelace/config",
|
||||
|
||||
@@ -24,7 +24,7 @@ from .const import DOMAIN, MEDIA_CLASS_MAP, MEDIA_MIME_TYPES, MEDIA_SOURCE_DATA
|
||||
from .error import Unresolvable
|
||||
from .models import BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia
|
||||
|
||||
MAX_UPLOAD_SIZE = 1024 * 1024 * 10
|
||||
MAX_UPLOAD_SIZE = 1024 * 1024 * 20
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
@@ -25,7 +26,9 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -259,6 +262,7 @@ class MinMaxSensor(SensorEntity):
|
||||
)
|
||||
self._async_min_max_sensor_state_listener(state_event, update_state=False)
|
||||
|
||||
self._update_device_class()
|
||||
self._calc_values()
|
||||
|
||||
@property
|
||||
@@ -345,6 +349,32 @@ class MinMaxSensor(SensorEntity):
|
||||
self._calc_values()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _update_device_class(self) -> None:
|
||||
"""Update device_class based on source entities.
|
||||
|
||||
If all source entities have the same device_class, inherit it.
|
||||
Otherwise, leave device_class as None.
|
||||
"""
|
||||
device_classes: list[SensorDeviceClass | None] = []
|
||||
|
||||
for entity_id in self._entity_ids:
|
||||
try:
|
||||
device_class = get_device_class(self.hass, entity_id)
|
||||
if device_class:
|
||||
device_classes.append(SensorDeviceClass(device_class))
|
||||
else:
|
||||
device_classes.append(None)
|
||||
except (HomeAssistantError, ValueError):
|
||||
# If we can't get device class for any entity, don't set it
|
||||
device_classes.append(None)
|
||||
|
||||
# Only inherit device_class if all entities have the same non-None device_class
|
||||
if device_classes and all(
|
||||
dc is not None and dc == device_classes[0] for dc in device_classes
|
||||
):
|
||||
self._attr_device_class = device_classes[0]
|
||||
|
||||
@callback
|
||||
def _calc_values(self) -> None:
|
||||
"""Calculate the values."""
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"domain": "[%key:common::config_flow::data::username%]",
|
||||
"domain": "Domain",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "Dynamic DNS password"
|
||||
},
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.21.0"]
|
||||
"requirements": ["nibe==2.22.0"]
|
||||
}
|
||||
|
||||
@@ -594,7 +594,8 @@ UNIT_CONVERTERS: dict[NumberDeviceClass, type[BaseUnitConverter]] = {
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -604,4 +605,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["openevsehttp"],
|
||||
"quality_scale": "legacy",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-openevse-http==0.2.1"],
|
||||
"zeroconf": ["_openevse._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -25,6 +25,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN
|
||||
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class OpenEVSENumberDescription(NumberEntityDescription):
|
||||
|
||||
74
homeassistant/components/openevse/quality_scale.yaml
Normal file
74
homeassistant/components/openevse/quality_scale.yaml
Normal file
@@ -0,0 +1,74 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration has no options flow.
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery: done
|
||||
discovery-update-info: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Integration supports a single device per config entry.
|
||||
entity-category: todo
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: done
|
||||
comment: Integration creates repair issues for YAML deprecation.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Integration supports a single device per config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -15,8 +15,12 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import PortainerCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -25,6 +29,7 @@ _PLATFORMS: list[Platform] = [
|
||||
Platform.BUTTON,
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
|
||||
@@ -49,6 +54,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Portainer integration."""
|
||||
await async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
DOMAIN = "portainer"
|
||||
DEFAULT_NAME = "Portainer"
|
||||
|
||||
|
||||
ENDPOINT_STATUS_DOWN = 2
|
||||
|
||||
CONTAINER_STATE_RUNNING = "running"
|
||||
|
||||
@@ -67,5 +67,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"prune_images": {
|
||||
"service": "mdi:delete-sweep"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,10 +7,7 @@ rules:
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
@@ -33,10 +30,7 @@ rules:
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
No explicit parallel updates are defined.
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
|
||||
115
homeassistant/components/portainer/services.py
Normal file
115
homeassistant/components/portainer/services.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Services for the Portainer integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from pyportainer import (
|
||||
PortainerAuthenticationError,
|
||||
PortainerConnectionError,
|
||||
PortainerTimeoutError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.service import async_extract_config_entry_ids
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import PortainerConfigEntry
|
||||
|
||||
ATTR_DATE_UNTIL = "until"
|
||||
ATTR_DANGLING = "dangling"
|
||||
|
||||
SERVICE_PRUNE_IMAGES = "prune_images"
|
||||
SERVICE_PRUNE_IMAGES_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
vol.Optional(ATTR_DATE_UNTIL): vol.All(
|
||||
cv.time_period, vol.Range(min=timedelta(minutes=1))
|
||||
),
|
||||
vol.Optional(ATTR_DANGLING): cv.boolean,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _extract_config_entry(service_call: ServiceCall) -> PortainerConfigEntry:
|
||||
"""Extract config entry from the service call."""
|
||||
target_entry_ids = await async_extract_config_entry_ids(service_call)
|
||||
target_entries: list[PortainerConfigEntry] = [
|
||||
loaded_entry
|
||||
for loaded_entry in service_call.hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
if loaded_entry.entry_id in target_entry_ids
|
||||
]
|
||||
if not target_entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_target",
|
||||
)
|
||||
return target_entries[0]
|
||||
|
||||
|
||||
async def _get_endpoint_id(
|
||||
call: ServiceCall,
|
||||
config_entry: PortainerConfigEntry,
|
||||
) -> int:
|
||||
"""Get endpoint data from device ID."""
|
||||
device_reg = dr.async_get(call.hass)
|
||||
device_id = call.data[ATTR_DEVICE_ID]
|
||||
device = device_reg.async_get(device_id)
|
||||
assert device
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
endpoint_data = None
|
||||
for data in coordinator.data.values():
|
||||
if (
|
||||
DOMAIN,
|
||||
f"{config_entry.entry_id}_{data.endpoint.id}",
|
||||
) in device.identifiers:
|
||||
endpoint_data = data
|
||||
break
|
||||
|
||||
assert endpoint_data
|
||||
return endpoint_data.endpoint.id
|
||||
|
||||
|
||||
async def prune_images(call: ServiceCall) -> None:
|
||||
"""Prune unused images in Portainer, with more controls."""
|
||||
config_entry = await _extract_config_entry(call)
|
||||
coordinator = config_entry.runtime_data
|
||||
endpoint_id = await _get_endpoint_id(call, config_entry)
|
||||
|
||||
try:
|
||||
await coordinator.portainer.images_prune(
|
||||
endpoint_id=endpoint_id,
|
||||
until=call.data.get(ATTR_DATE_UNTIL),
|
||||
dangling=call.data.get(ATTR_DANGLING, False),
|
||||
)
|
||||
except PortainerAuthenticationError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth_no_details",
|
||||
) from err
|
||||
except PortainerConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect_no_details",
|
||||
) from err
|
||||
except PortainerTimeoutError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_connect_no_details",
|
||||
) from err
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_PRUNE_IMAGES,
|
||||
prune_images,
|
||||
SERVICE_PRUNE_IMAGES_SCHEMA,
|
||||
)
|
||||
18
homeassistant/components/portainer/services.yaml
Normal file
18
homeassistant/components/portainer/services.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Services for Portainer
|
||||
|
||||
prune_images:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: portainer
|
||||
model: Endpoint
|
||||
until:
|
||||
required: false
|
||||
selector:
|
||||
duration:
|
||||
dangling:
|
||||
required: false
|
||||
selector:
|
||||
boolean: {}
|
||||
@@ -155,11 +155,34 @@
|
||||
"invalid_auth_no_details": {
|
||||
"message": "An error occurred while trying to authenticate."
|
||||
},
|
||||
"invalid_target": {
|
||||
"message": "Invalid device targeted."
|
||||
},
|
||||
"timeout_connect": {
|
||||
"message": "A timeout occurred while trying to connect to the Portainer instance: {error}"
|
||||
},
|
||||
"timeout_connect_no_details": {
|
||||
"message": "A timeout occurred while trying to connect to the Portainer instance."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"prune_images": {
|
||||
"description": "Prunes unused images on a Portainer endpoint.",
|
||||
"fields": {
|
||||
"dangling": {
|
||||
"description": "If true, only prune dangling images.",
|
||||
"name": "Dangling"
|
||||
},
|
||||
"device_id": {
|
||||
"description": "The endpoint to prune images on.",
|
||||
"name": "Endpoint"
|
||||
},
|
||||
"until": {
|
||||
"description": "Prune images unused for at least this time duration in the past. If not provided, all unused images will be pruned.",
|
||||
"name": "Until"
|
||||
}
|
||||
},
|
||||
"name": "Prune unused images"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -384,11 +384,7 @@ class PrometheusMetrics:
|
||||
if event.data["action"] != "update" or "area_id" not in event.data["changes"]:
|
||||
return
|
||||
|
||||
device_id = event.data.get("device_id")
|
||||
|
||||
if device_id is None:
|
||||
return
|
||||
|
||||
device_id = event.data["device_id"]
|
||||
_LOGGER.debug("Handling device update for %s", device_id)
|
||||
|
||||
device = self.device_registry.async_get(device_id)
|
||||
|
||||
@@ -4,15 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.components.calendar import (
|
||||
CalendarEntity,
|
||||
CalendarEntityDescription,
|
||||
CalendarEvent,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import CalendarUpdateCoordinator, RadarrConfigEntry, RadarrEvent
|
||||
from .entity import RadarrEntity
|
||||
|
||||
CALENDAR_TYPE = EntityDescription(
|
||||
CALENDAR_TYPE = CalendarEntityDescription(
|
||||
key="calendar",
|
||||
name=None,
|
||||
)
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from concurrent.futures.thread import _threads_queues, _worker
|
||||
import sys
|
||||
import threading
|
||||
from typing import Any
|
||||
import weakref
|
||||
@@ -54,17 +53,10 @@ class DBInterruptibleThreadPoolExecutor(InterruptibleThreadPoolExecutor):
|
||||
) -> None:
|
||||
q.put(None)
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
additional_args = (
|
||||
self._create_worker_context(),
|
||||
self._work_queue,
|
||||
)
|
||||
else:
|
||||
additional_args = (
|
||||
self._work_queue,
|
||||
self._initializer,
|
||||
self._initargs,
|
||||
)
|
||||
additional_args = (
|
||||
self._create_worker_context(),
|
||||
self._work_queue,
|
||||
)
|
||||
|
||||
num_threads = len(self._threads)
|
||||
if num_threads < self._max_workers:
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"data_description": {
|
||||
"calendar_name": "The name of the calendar shown in the UI.",
|
||||
"url": "The URL of the remote calendar.",
|
||||
"verify_ssl": "Enable SSL certificate verification for secure connections."
|
||||
"verify_ssl": "[%key:common::config_flow::description::verify_ssl%]"
|
||||
},
|
||||
"description": "Please choose a name for the calendar to be imported"
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import RenaultConfigEntry
|
||||
from .entity import RenaultEntity
|
||||
from .renault_vehicle import RenaultVehicleProxy
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
# but renault servers are unreliable and it's safer to queue action calls
|
||||
@@ -23,7 +24,7 @@ class RenaultButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Class describing Renault button entities."""
|
||||
|
||||
async_press: Callable[[RenaultButtonEntity], Coroutine[Any, Any, Any]]
|
||||
requires_electricity: bool = False
|
||||
is_supported: Callable[[RenaultVehicleProxy], bool]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -36,7 +37,7 @@ async def async_setup_entry(
|
||||
RenaultButtonEntity(vehicle, description)
|
||||
for vehicle in config_entry.runtime_data.vehicles.values()
|
||||
for description in BUTTON_TYPES
|
||||
if not description.requires_electricity or vehicle.details.uses_electricity()
|
||||
if description.is_supported(vehicle)
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -55,18 +56,27 @@ BUTTON_TYPES: tuple[RenaultButtonEntityDescription, ...] = (
|
||||
RenaultButtonEntityDescription(
|
||||
async_press=lambda x: x.vehicle.set_ac_start(21, None),
|
||||
key="start_air_conditioner",
|
||||
is_supported=lambda vehicle: (
|
||||
vehicle.details.supports_endpoint("actions/hvac-start")
|
||||
),
|
||||
translation_key="start_air_conditioner",
|
||||
),
|
||||
RenaultButtonEntityDescription(
|
||||
async_press=lambda x: x.vehicle.set_charge_start(),
|
||||
key="start_charge",
|
||||
requires_electricity=True,
|
||||
is_supported=lambda vehicle: (
|
||||
vehicle.details.supports_endpoint("actions/charge-start")
|
||||
and vehicle.details.uses_electricity()
|
||||
),
|
||||
translation_key="start_charge",
|
||||
),
|
||||
RenaultButtonEntityDescription(
|
||||
async_press=lambda x: x.vehicle.set_charge_stop(),
|
||||
key="stop_charge",
|
||||
requires_electricity=True,
|
||||
is_supported=lambda vehicle: (
|
||||
vehicle.details.supports_endpoint("actions/charge-stop")
|
||||
and vehicle.details.uses_electricity()
|
||||
),
|
||||
translation_key="stop_charge",
|
||||
),
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.5.2"]
|
||||
"requirements": ["renault-api==0.5.3"]
|
||||
}
|
||||
|
||||
@@ -840,7 +840,8 @@ STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]]
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -850,4 +851,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -233,7 +233,7 @@ async def _async_setup_block_entry(
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, runtime_data.platforms
|
||||
)
|
||||
async_manage_coiot_unconfigured_issue(hass, entry)
|
||||
await async_manage_coiot_unconfigured_issue(hass, entry)
|
||||
remove_empty_sub_devices(hass, entry)
|
||||
elif (
|
||||
sleep_period is None
|
||||
|
||||
@@ -162,8 +162,7 @@ def async_manage_outbound_websocket_incorrectly_enabled_issue(
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_coiot_unconfigured_issue(
|
||||
async def async_manage_coiot_unconfigured_issue(
|
||||
hass: HomeAssistant,
|
||||
entry: ShellyConfigEntry,
|
||||
) -> None:
|
||||
@@ -183,10 +182,10 @@ def async_manage_coiot_unconfigured_issue(
|
||||
coiot_config = device.settings["coiot"]
|
||||
coiot_enabled = coiot_config.get("enabled")
|
||||
|
||||
coiot_peer = f"{await get_coiot_address(hass)}:{get_coiot_port(hass)}"
|
||||
# Check if CoIoT is disabled or peer address is not correctly set
|
||||
if not coiot_enabled or (
|
||||
(peer_config := coiot_config.get("peer"))
|
||||
and peer_config != get_coiot_address(hass)
|
||||
(peer_config := coiot_config.get("peer")) and peer_config != coiot_peer
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
@@ -275,7 +274,7 @@ class CoiotConfigureFlow(ShellyBlockRepairsFlow):
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
coiot_addr = get_coiot_address(self.hass)
|
||||
coiot_addr = await get_coiot_address(self.hass)
|
||||
coiot_port = get_coiot_port(self.hass)
|
||||
if coiot_addr is None or coiot_port is None:
|
||||
return self.async_abort(reason="cannot_configure")
|
||||
|
||||
@@ -29,6 +29,7 @@ from yarl import URL
|
||||
|
||||
from homeassistant.components import network
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.network import async_get_source_ip
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -260,7 +261,7 @@ async def get_coap_context(hass: HomeAssistant) -> COAP:
|
||||
ipv4: list[IPv4Address] = []
|
||||
if not network.async_only_default_interface_enabled(adapters):
|
||||
ipv4.extend(
|
||||
address
|
||||
cast(IPv4Address, address)
|
||||
for address in await network.async_get_enabled_source_ips(hass)
|
||||
if address.version == 4
|
||||
and not (
|
||||
@@ -732,12 +733,12 @@ def _get_homeassistant_url(hass: HomeAssistant) -> URL | None:
|
||||
return URL(raw_url)
|
||||
|
||||
|
||||
def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
async def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
"""Return the CoIoT ip address."""
|
||||
url = _get_homeassistant_url(hass)
|
||||
if url is None:
|
||||
if url is None or url.host is None:
|
||||
return None
|
||||
return str(url.host)
|
||||
return await async_get_source_ip(hass, url.host)
|
||||
|
||||
|
||||
def get_rpc_ws_url(hass: HomeAssistant) -> str | None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user