mirror of
https://github.com/home-assistant/core.git
synced 2026-04-20 08:29:39 +02:00
Compare commits
100 Commits
rainbird-6
...
gj-2025110
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dc991eec09 | ||
|
|
08097c67eb | ||
|
|
550e53d192 | ||
|
|
09ee76c265 | ||
|
|
f7b2f5e8f1 | ||
|
|
a1414717ad | ||
|
|
2f0889ac02 | ||
|
|
323b3a4d96 | ||
|
|
8aa0e9f6c3 | ||
|
|
906475249c | ||
|
|
354b5860bb | ||
|
|
74957969f7 | ||
|
|
b52ce22ee7 | ||
|
|
920ffdb9b5 | ||
|
|
4a454dff02 | ||
|
|
481eb66bc5 | ||
|
|
b76627a442 | ||
|
|
1aa214fb61 | ||
|
|
6e30de3a1c | ||
|
|
2f0488f985 | ||
|
|
61c02c854f | ||
|
|
a10f16ce3e | ||
|
|
293db47101 | ||
|
|
c9cdbcc3db | ||
|
|
856d363ca8 | ||
|
|
cb71628ee2 | ||
|
|
f34301f236 | ||
|
|
554b906788 | ||
|
|
d5ffd7f37a | ||
|
|
17fbb9909c | ||
|
|
bd4ac7993f | ||
|
|
eaba7b0e48 | ||
|
|
2a8551138d | ||
|
|
7586fe6dec | ||
|
|
74a6f781a1 | ||
|
|
4dba27f15e | ||
|
|
298b9b962f | ||
|
|
f6fb6f40dd | ||
|
|
12ee952a97 | ||
|
|
9d6a335127 | ||
|
|
e1fa894572 | ||
|
|
43c83cc850 | ||
|
|
7f27915825 | ||
|
|
54a63d2c3e | ||
|
|
9709fbd6c6 | ||
|
|
12abff5b9e | ||
|
|
9b2abb0acc | ||
|
|
e7bc593fa8 | ||
|
|
2a4b8c88a8 | ||
|
|
a6c66a86ee | ||
|
|
6dfcc1c4f6 | ||
|
|
dfb61ee881 | ||
|
|
4776b99f5f | ||
|
|
49f5557947 | ||
|
|
4ed9113e35 | ||
|
|
e2688f909b | ||
|
|
9e1c521fed | ||
|
|
73fbc87639 | ||
|
|
8986477c96 | ||
|
|
ca40d68417 | ||
|
|
d2539ccaf2 | ||
|
|
56e7b8ddbb | ||
|
|
ca5aa215d2 | ||
|
|
d3ca5132fc | ||
|
|
f1d309779e | ||
|
|
85fa2415c1 | ||
|
|
ddc00f6924 | ||
|
|
2216fcccc7 | ||
|
|
6212d548b8 | ||
|
|
bf12323782 | ||
|
|
8b9ba690f1 | ||
|
|
3c7c0091f2 | ||
|
|
7bea4a53e2 | ||
|
|
50d9109e5f | ||
|
|
6d80b3769a | ||
|
|
9ff97ecd7b | ||
|
|
34dc52c732 | ||
|
|
67243a5044 | ||
|
|
74770f0b33 | ||
|
|
b1d81536ce | ||
|
|
b8d8b1cfa8 | ||
|
|
dfaed39a01 | ||
|
|
4849dc0eb9 | ||
|
|
05aaf8745d | ||
|
|
7c1abd993d | ||
|
|
0674de2ce4 | ||
|
|
ba26d119f7 | ||
|
|
616a0f204c | ||
|
|
0eaa8d38db | ||
|
|
4ad2f752a3 | ||
|
|
310af5a31a | ||
|
|
11fac8ee48 | ||
|
|
e70514f540 | ||
|
|
1a7465dd72 | ||
|
|
15ddce74a7 | ||
|
|
ee4c941610 | ||
|
|
459fc43625 | ||
|
|
893d9306d4 | ||
|
|
c243680113 | ||
|
|
db6d95273c |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -47,10 +47,6 @@ jobs:
|
||||
with:
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@5f5b077d63a1e4c53019231409a0c4d791fb74e5 # zizmor: ignore[unpinned-uses]
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
uses: home-assistant/actions/helpers/version@master # zizmor: ignore[unpinned-uses]
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -28,11 +28,11 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
|
||||
uses: github/codeql-action/init@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
|
||||
uses: github/codeql-action/analyze@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@@ -1263,8 +1263,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/openuv/ @bachya
|
||||
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/tests/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/homeassistant/components/opnsense/ @mtreinish
|
||||
/tests/components/opnsense/ @mtreinish
|
||||
/homeassistant/components/opnsense/ @HarlemSquirrel @Snuffy2
|
||||
/tests/components/opnsense/ @HarlemSquirrel @Snuffy2
|
||||
/homeassistant/components/opower/ @tronikos
|
||||
/tests/components/opower/ @tronikos
|
||||
/homeassistant/components/oralb/ @bdraco @Lash-L
|
||||
@@ -1875,6 +1875,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/victron_ble/ @rajlaud
|
||||
/tests/components/victron_ble/ @rajlaud
|
||||
/homeassistant/components/victron_gx/ @tomer-w
|
||||
/tests/components/victron_gx/ @tomer-w
|
||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "victron",
|
||||
"name": "Victron",
|
||||
"integrations": ["victron_ble", "victron_remote_monitoring"]
|
||||
"integrations": ["victron_gx", "victron_ble", "victron_remote_monitoring"]
|
||||
}
|
||||
|
||||
@@ -11,12 +11,12 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_apps": "Apps",
|
||||
"tracked_custom_integrations": "Custom integrations",
|
||||
"tracked_custom_integrations": "Community integrations",
|
||||
"tracked_integrations": "Integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_apps": "Select the apps you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track",
|
||||
"tracked_custom_integrations": "Select the community integrations you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track"
|
||||
}
|
||||
}
|
||||
@@ -31,7 +31,7 @@
|
||||
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
|
||||
},
|
||||
"custom_integrations": {
|
||||
"name": "{custom_integration_domain} (custom)",
|
||||
"name": "{custom_integration_domain} (community)",
|
||||
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
|
||||
},
|
||||
"total_active_installations": {
|
||||
|
||||
@@ -92,6 +92,7 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
_LOGGER.debug("Updating statistics for the first time")
|
||||
usage_sum = 0.0
|
||||
last_stats_time = None
|
||||
allow_update_last_stored_hour = False
|
||||
else:
|
||||
if not meter.readings or len(meter.readings) == 0:
|
||||
_LOGGER.debug("No recent usage statistics found, skipping update")
|
||||
@@ -107,6 +108,7 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
_LOGGER.debug("Getting statistics at %s", start)
|
||||
stats: dict[str, list[Any]] = {}
|
||||
for end in (start + timedelta(seconds=1), None):
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
@@ -127,15 +129,28 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"Not found, trying to find oldest statistic after %s",
|
||||
start,
|
||||
)
|
||||
assert stats
|
||||
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
if not stats or not stats.get(usage_statistic_id):
|
||||
_LOGGER.debug(
|
||||
"Could not find existing statistics during period lookup for %s, "
|
||||
"falling back to last stored statistic",
|
||||
usage_statistic_id,
|
||||
)
|
||||
allow_update_last_stored_hour = True
|
||||
last_records = last_stat[usage_statistic_id]
|
||||
usage_sum = float(last_records[0].get("sum") or 0.0)
|
||||
last_stats_time = last_records[0]["start"]
|
||||
else:
|
||||
allow_update_last_stored_hour = False
|
||||
records = stats[usage_statistic_id]
|
||||
|
||||
usage_sum = _safe_get_sum(stats.get(usage_statistic_id, []))
|
||||
last_stats_time = stats[usage_statistic_id][0]["start"]
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
|
||||
usage_sum = _safe_get_sum(records)
|
||||
last_stats_time = records[0]["start"]
|
||||
|
||||
usage_statistics = []
|
||||
|
||||
@@ -148,7 +163,13 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
if last_stats_time is not None and (
|
||||
start.timestamp() < last_stats_time
|
||||
or (
|
||||
start.timestamp() == last_stats_time
|
||||
and not allow_update_last_stored_hour
|
||||
)
|
||||
):
|
||||
continue
|
||||
usage_state = max(0, read["consumption"] / 1000)
|
||||
usage_sum = max(0, read["read"])
|
||||
|
||||
@@ -53,6 +53,7 @@ from .const import (
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT,
|
||||
CONF_TOOL_SEARCH,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
@@ -66,6 +67,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
NON_ADAPTIVE_THINKING_MODELS,
|
||||
NON_THINKING_MODELS,
|
||||
TOOL_SEARCH_UNSUPPORTED_MODELS,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
PromptCaching,
|
||||
)
|
||||
@@ -466,6 +468,16 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
self.options.pop(CONF_WEB_SEARCH_COUNTRY, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
||||
|
||||
if not model.startswith(tuple(TOOL_SEARCH_UNSUPPORTED_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(
|
||||
CONF_TOOL_SEARCH,
|
||||
default=DEFAULT[CONF_TOOL_SEARCH],
|
||||
)
|
||||
] = bool
|
||||
else:
|
||||
self.options.pop(CONF_TOOL_SEARCH, None)
|
||||
|
||||
if not step_schema:
|
||||
user_input = {}
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ CONF_PROMPT_CACHING = "prompt_caching"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
CONF_THINKING_EFFORT = "thinking_effort"
|
||||
CONF_TOOL_SEARCH = "tool_search"
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
@@ -35,21 +36,22 @@ class PromptCaching(StrEnum):
|
||||
AUTOMATIC = "automatic"
|
||||
|
||||
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-haiku-4-5",
|
||||
CONF_CODE_EXECUTION: False,
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_PROMPT_CACHING: PromptCaching.PROMPT.value,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
CONF_THINKING_BUDGET: MIN_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT: "low",
|
||||
CONF_TOOL_SEARCH: False,
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 5,
|
||||
}
|
||||
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-haiku",
|
||||
]
|
||||
@@ -93,6 +95,11 @@ PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
TOOL_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3",
|
||||
"claude-haiku",
|
||||
]
|
||||
|
||||
DEPRECATED_MODELS = [
|
||||
"claude-3",
|
||||
]
|
||||
|
||||
@@ -58,6 +58,8 @@ from anthropic.types import (
|
||||
ToolChoiceAutoParam,
|
||||
ToolChoiceToolParam,
|
||||
ToolParam,
|
||||
ToolSearchToolBm25_20251119Param,
|
||||
ToolSearchToolResultBlock,
|
||||
ToolUnionParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
@@ -74,6 +76,9 @@ from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
from anthropic.types.text_editor_code_execution_tool_result_block_param import (
|
||||
Content as TextEditorCodeExecutionToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.tool_search_tool_result_block_param import (
|
||||
Content as ToolSearchToolResultBlockParamContentParam,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@@ -95,6 +100,7 @@ from .const import (
|
||||
CONF_TEMPERATURE,
|
||||
CONF_THINKING_BUDGET,
|
||||
CONF_THINKING_EFFORT,
|
||||
CONF_TOOL_SEARCH,
|
||||
CONF_WEB_SEARCH,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
@@ -204,7 +210,7 @@ class ContentDetails:
|
||||
]
|
||||
|
||||
|
||||
def _convert_content(
|
||||
def _convert_content( # noqa: C901
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> tuple[list[MessageParam], str | None]:
|
||||
"""Transform HA chat_log content into Anthropic API format."""
|
||||
@@ -257,6 +263,15 @@ def _convert_content(
|
||||
content.tool_result,
|
||||
),
|
||||
}
|
||||
elif content.tool_name == "tool_search":
|
||||
tool_result_block = {
|
||||
"type": "tool_search_tool_result",
|
||||
"tool_use_id": content.tool_call_id,
|
||||
"content": cast(
|
||||
ToolSearchToolResultBlockParamContentParam,
|
||||
content.tool_result,
|
||||
),
|
||||
}
|
||||
else:
|
||||
tool_result_block = {
|
||||
"type": "tool_result",
|
||||
@@ -387,6 +402,7 @@ def _convert_content(
|
||||
"code_execution",
|
||||
"bash_code_execution",
|
||||
"text_editor_code_execution",
|
||||
"tool_search_tool_bm25",
|
||||
],
|
||||
tool_call.tool_name,
|
||||
),
|
||||
@@ -399,6 +415,7 @@ def _convert_content(
|
||||
"code_execution",
|
||||
"bash_code_execution",
|
||||
"text_editor_code_execution",
|
||||
"tool_search_tool_bm25",
|
||||
]
|
||||
else ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
@@ -560,6 +577,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
CodeExecutionToolResultBlock,
|
||||
BashCodeExecutionToolResultBlock,
|
||||
TextEditorCodeExecutionToolResultBlock,
|
||||
ToolSearchToolResultBlock,
|
||||
),
|
||||
):
|
||||
if content_details:
|
||||
@@ -690,6 +708,14 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
|
||||
preloaded_tools = [
|
||||
"HassTurnOn",
|
||||
"HassTurnOff",
|
||||
"GetLiveContext",
|
||||
"code_execution",
|
||||
"web_search",
|
||||
]
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise HomeAssistantError(
|
||||
@@ -884,8 +910,23 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
|
||||
),
|
||||
)
|
||||
)
|
||||
preloaded_tools.append(structure_name)
|
||||
|
||||
if tools:
|
||||
if (
|
||||
options.get(CONF_TOOL_SEARCH, DEFAULT[CONF_TOOL_SEARCH])
|
||||
and len(tools) > len(preloaded_tools) + 1
|
||||
):
|
||||
for tool in tools:
|
||||
if not tool["name"].endswith(tuple(preloaded_tools)):
|
||||
tool["defer_loading"] = True
|
||||
tools.append(
|
||||
ToolSearchToolBm25_20251119Param(
|
||||
type="tool_search_tool_bm25_20251119",
|
||||
name="tool_search_tool_bm25",
|
||||
)
|
||||
)
|
||||
|
||||
model_args["tools"] = tools
|
||||
|
||||
coordinator = self.entry.runtime_data
|
||||
@@ -929,6 +970,7 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
|
||||
except anthropic.AnthropicError as err:
|
||||
# Non-connection error, mark connection as healthy
|
||||
coordinator.async_set_updated_data(None)
|
||||
LOGGER.error("Error while talking to Anthropic: %s", err)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["anthropic==0.83.0"]
|
||||
}
|
||||
|
||||
@@ -74,6 +74,7 @@
|
||||
"code_execution": "[%key:component::anthropic::config_subentries::conversation::step::model::data::code_execution%]",
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
|
||||
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_effort%]",
|
||||
"tool_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::tool_search%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
|
||||
@@ -82,6 +83,7 @@
|
||||
"code_execution": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::code_execution%]",
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
|
||||
"thinking_effort": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_effort%]",
|
||||
"tool_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::tool_search%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
|
||||
@@ -136,6 +138,7 @@
|
||||
"code_execution": "Code execution",
|
||||
"thinking_budget": "Thinking budget",
|
||||
"thinking_effort": "Thinking effort",
|
||||
"tool_search": "Enable tool search tool",
|
||||
"user_location": "Include home location",
|
||||
"web_search": "Enable web search",
|
||||
"web_search_max_uses": "Maximum web searches"
|
||||
@@ -144,6 +147,7 @@
|
||||
"code_execution": "Allow the model to execute code in a secure sandbox environment, enabling it to analyze data and perform complex calculations.",
|
||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||
"thinking_effort": "Control how many tokens Claude uses when responding, trading off between response thoroughness and token efficiency",
|
||||
"tool_search": "Enable dynamic tool discovery instead of preloading all tools into the context",
|
||||
"user_location": "Localize search results based on home location",
|
||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||
"web_search_max_uses": "Limit the number of searches performed per response"
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==67"],
|
||||
"requirements": ["axis==68"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -74,6 +74,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_bucket_name",
|
||||
) from err
|
||||
except exception.BadRequest as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="bad_request",
|
||||
translation_placeholders={"error_message": str(err)},
|
||||
) from err
|
||||
except (
|
||||
exception.B2ConnectionError,
|
||||
exception.B2RequestTimeout,
|
||||
|
||||
@@ -174,6 +174,14 @@ class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
|
||||
)
|
||||
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||
except exception.BadRequest as err:
|
||||
_LOGGER.error(
|
||||
"Backblaze B2 API rejected the request for Key ID '%s': %s",
|
||||
user_input[CONF_KEY_ID],
|
||||
err,
|
||||
)
|
||||
errors["base"] = "bad_request"
|
||||
placeholders["error_message"] = str(err)
|
||||
except (
|
||||
exception.B2ConnectionError,
|
||||
exception.B2RequestTimeout,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["b2sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["b2sdk==2.10.1"]
|
||||
"requirements": ["b2sdk==2.10.4"]
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"bad_request": "The Backblaze B2 API rejected the request: {error_message}",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
|
||||
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
|
||||
@@ -60,6 +61,9 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"bad_request": {
|
||||
"message": "The Backblaze B2 API rejected the request: {error_message}"
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to endpoint"
|
||||
},
|
||||
|
||||
@@ -23,7 +23,7 @@ from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
from .models import AgentBackup, BackupNotFound
|
||||
from .models import AgentBackup, BackupNotFound, InvalidBackupFilename
|
||||
|
||||
|
||||
@callback
|
||||
@@ -195,6 +195,11 @@ class UploadBackupView(HomeAssistantView):
|
||||
backup_id = await manager.async_receive_backup(
|
||||
contents=contents, agent_ids=agent_ids
|
||||
)
|
||||
except InvalidBackupFilename as err:
|
||||
return Response(
|
||||
body=str(err),
|
||||
status=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
except OSError as err:
|
||||
return Response(
|
||||
body=f"Can't write backup file: {err}",
|
||||
|
||||
@@ -68,6 +68,7 @@ from .models import (
|
||||
BackupReaderWriterError,
|
||||
BaseBackup,
|
||||
Folder,
|
||||
InvalidBackupFilename,
|
||||
)
|
||||
from .store import BackupStore
|
||||
from .util import (
|
||||
@@ -1006,6 +1007,14 @@ class BackupManager:
|
||||
) -> str:
|
||||
"""Receive and store a backup file from upload."""
|
||||
contents.chunk_size = BUF_SIZE
|
||||
suggested_filename = contents.filename or "backup.tar"
|
||||
safe_filename = PureWindowsPath(suggested_filename).name
|
||||
if (
|
||||
not safe_filename
|
||||
or safe_filename != suggested_filename
|
||||
or safe_filename == ".."
|
||||
):
|
||||
raise InvalidBackupFilename(f"Invalid filename: {suggested_filename}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(
|
||||
reason=None,
|
||||
@@ -1016,7 +1025,7 @@ class BackupManager:
|
||||
written_backup = await self._reader_writer.async_receive_backup(
|
||||
agent_ids=agent_ids,
|
||||
stream=contents,
|
||||
suggested_filename=contents.filename or "backup.tar",
|
||||
suggested_filename=suggested_filename,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(
|
||||
@@ -1957,10 +1966,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
suggested_filename: str,
|
||||
) -> WrittenBackup:
|
||||
"""Receive a backup."""
|
||||
safe_filename = PureWindowsPath(suggested_filename).name
|
||||
if not safe_filename or safe_filename == "..":
|
||||
safe_filename = "backup.tar"
|
||||
temp_file = Path(self.temp_backup_dir, safe_filename)
|
||||
temp_file = Path(self.temp_backup_dir, suggested_filename)
|
||||
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
await async_add_executor_job(make_backup_dir, self.temp_backup_dir)
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.7", "securetar==2026.2.0"],
|
||||
"requirements": ["cronsim==2.7", "securetar==2026.4.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -95,6 +95,12 @@ class BackupReaderWriterError(BackupError):
|
||||
error_code = "backup_reader_writer_error"
|
||||
|
||||
|
||||
class InvalidBackupFilename(BackupManagerError):
|
||||
"""Raised when a backup filename is invalid."""
|
||||
|
||||
error_code = "invalid_backup_filename"
|
||||
|
||||
|
||||
class BackupNotFound(BackupAgentError, BackupManagerError):
|
||||
"""Raised when a backup is not found."""
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ from securetar import (
|
||||
SecureTarFile,
|
||||
SecureTarReadError,
|
||||
SecureTarRootKeyContext,
|
||||
get_archive_max_ciphertext_size,
|
||||
)
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -383,9 +384,12 @@ def _encrypt_backup(
|
||||
if prefix not in expected_archives:
|
||||
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
|
||||
continue
|
||||
output_archive.import_tar(
|
||||
input_tar.extractfile(obj), obj, derived_key_id=inner_tar_idx
|
||||
)
|
||||
if (fileobj := input_tar.extractfile(obj)) is None:
|
||||
LOGGER.debug(
|
||||
"Non regular inner tar file %s will not be encrypted", obj.name
|
||||
)
|
||||
continue
|
||||
output_archive.import_tar(fileobj, obj, derived_key_id=inner_tar_idx)
|
||||
inner_tar_idx += 1
|
||||
|
||||
|
||||
@@ -419,7 +423,7 @@ class _CipherBackupStreamer:
|
||||
hass: HomeAssistant,
|
||||
backup: AgentBackup,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
password: str | None,
|
||||
password: str,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self._workers: list[_CipherWorkerStatus] = []
|
||||
@@ -431,7 +435,9 @@ class _CipherBackupStreamer:
|
||||
|
||||
def size(self) -> int:
|
||||
"""Return the maximum size of the decrypted or encrypted backup."""
|
||||
return self._backup.size + self._num_tar_files() * tarfile.RECORDSIZE
|
||||
return get_archive_max_ciphertext_size(
|
||||
self._backup.size, SECURETAR_CREATE_VERSION, self._num_tar_files()
|
||||
)
|
||||
|
||||
def _num_tar_files(self) -> int:
|
||||
"""Return the number of inner tar files."""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"issues": {
|
||||
"integration_removed": {
|
||||
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a community-developed [custom component]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
|
||||
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a [community integration]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
|
||||
"title": "The BMW Connected Drive integration has been removed"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ from bsblan import (
|
||||
BSBLAN,
|
||||
BSBLANAuthError,
|
||||
BSBLANConnectionError,
|
||||
BSBLANError,
|
||||
HotWaterConfig,
|
||||
HotWaterSchedule,
|
||||
HotWaterState,
|
||||
@@ -50,7 +51,7 @@ class BSBLanFastData:
|
||||
|
||||
state: State
|
||||
sensor: Sensor
|
||||
dhw: HotWaterState
|
||||
dhw: HotWaterState | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -111,7 +112,6 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
# This reduces response time significantly (~0.2s per parameter)
|
||||
state = await self.client.state(include=STATE_INCLUDE)
|
||||
sensor = await self.client.sensor(include=SENSOR_INCLUDE)
|
||||
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
|
||||
|
||||
except BSBLANAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -126,6 +126,19 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
translation_placeholders={"host": host},
|
||||
) from err
|
||||
|
||||
# Fetch DHW state separately - device may not support hot water
|
||||
dhw: HotWaterState | None = None
|
||||
try:
|
||||
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
|
||||
except BSBLANError:
|
||||
# Preserve last known DHW state if available (entity may depend on it)
|
||||
if self.data:
|
||||
dhw = self.data.dhw
|
||||
LOGGER.debug(
|
||||
"DHW (Domestic Hot Water) state not available on device at %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
)
|
||||
|
||||
return BSBLanFastData(
|
||||
state=state,
|
||||
sensor=sensor,
|
||||
@@ -159,13 +172,6 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
dhw_config = await self.client.hot_water_config(include=DHW_CONFIG_INCLUDE)
|
||||
dhw_schedule = await self.client.hot_water_schedule()
|
||||
|
||||
except AttributeError:
|
||||
# Device does not support DHW functionality
|
||||
LOGGER.debug(
|
||||
"DHW (Domestic Hot Water) not available on device at %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
)
|
||||
return BSBLanSlowData()
|
||||
except (BSBLANConnectionError, BSBLANAuthError) as err:
|
||||
# If config update fails, keep existing data
|
||||
LOGGER.debug(
|
||||
@@ -177,6 +183,13 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
return self.data
|
||||
# First fetch failed, return empty data
|
||||
return BSBLanSlowData()
|
||||
except BSBLANError, AttributeError:
|
||||
# Device does not support DHW functionality
|
||||
LOGGER.debug(
|
||||
"DHW (Domestic Hot Water) not available on device at %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
)
|
||||
return BSBLanSlowData()
|
||||
|
||||
return BSBLanSlowData(
|
||||
dhw_config=dhw_config,
|
||||
|
||||
@@ -22,7 +22,9 @@ async def async_get_config_entry_diagnostics(
|
||||
"fast_coordinator_data": {
|
||||
"state": data.fast_coordinator.data.state.model_dump(),
|
||||
"sensor": data.fast_coordinator.data.sensor.model_dump(),
|
||||
"dhw": data.fast_coordinator.data.dhw.model_dump(),
|
||||
"dhw": data.fast_coordinator.data.dhw.model_dump()
|
||||
if data.fast_coordinator.data.dhw
|
||||
else None,
|
||||
},
|
||||
"static": data.static.model_dump() if data.static is not None else None,
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
@@ -10,7 +13,7 @@ from homeassistant.helpers.device_registry import (
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import BSBLanData
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_PORT, DOMAIN
|
||||
from .coordinator import BSBLanCoordinator, BSBLanFastCoordinator, BSBLanSlowCoordinator
|
||||
|
||||
|
||||
@@ -22,7 +25,8 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
|
||||
def __init__(self, coordinator: _T, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLan entity with device info."""
|
||||
super().__init__(coordinator)
|
||||
host = coordinator.config_entry.data["host"]
|
||||
host = coordinator.config_entry.data[CONF_HOST]
|
||||
port = coordinator.config_entry.data.get(CONF_PORT, DEFAULT_PORT)
|
||||
mac = data.device.MAC
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, mac)},
|
||||
@@ -44,7 +48,7 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
|
||||
else None
|
||||
),
|
||||
sw_version=data.device.version,
|
||||
configuration_url=f"http://{host}",
|
||||
configuration_url=str(URL.build(scheme="http", host=host, port=port)),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from bsblan import BSBLANError, SetHotWaterParam
|
||||
from bsblan import BSBLANError, HotWaterState, SetHotWaterParam
|
||||
|
||||
from homeassistant.components.water_heater import (
|
||||
STATE_ECO,
|
||||
@@ -46,8 +46,10 @@ async def async_setup_entry(
|
||||
data = entry.runtime_data
|
||||
|
||||
# Only create water heater entity if DHW (Domestic Hot Water) is available
|
||||
# Check if we have any DHW-related data indicating water heater support
|
||||
dhw_data = data.fast_coordinator.data.dhw
|
||||
if dhw_data is None:
|
||||
# Device does not support DHW, skip water heater setup
|
||||
return
|
||||
if (
|
||||
dhw_data.operating_mode is None
|
||||
and dhw_data.nominal_setpoint is None
|
||||
@@ -107,11 +109,21 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
else:
|
||||
self._attr_max_temp = 65.0 # Default maximum
|
||||
|
||||
@property
|
||||
def _dhw(self) -> HotWaterState:
|
||||
"""Return DHW state data.
|
||||
|
||||
This entity is only created when DHW data is available.
|
||||
"""
|
||||
dhw = self.coordinator.data.dhw
|
||||
assert dhw is not None
|
||||
return dhw
|
||||
|
||||
@property
|
||||
def current_operation(self) -> str | None:
|
||||
"""Return current operation."""
|
||||
if (
|
||||
operating_mode := self.coordinator.data.dhw.operating_mode
|
||||
operating_mode := self._dhw.operating_mode
|
||||
) is None or operating_mode.value is None:
|
||||
return None
|
||||
return BSBLAN_TO_HA_OPERATION_MODE.get(operating_mode.value)
|
||||
@@ -119,16 +131,14 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if (
|
||||
current_temp := self.coordinator.data.dhw.dhw_actual_value_top_temperature
|
||||
) is None:
|
||||
if (current_temp := self._dhw.dhw_actual_value_top_temperature) is None:
|
||||
return None
|
||||
return current_temp.value
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
if (target_temp := self.coordinator.data.dhw.nominal_setpoint) is None:
|
||||
if (target_temp := self._dhw.nominal_setpoint) is None:
|
||||
return None
|
||||
return target_temp.value
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import frontend, http, websocket_api
|
||||
from homeassistant.components.websocket_api import (
|
||||
ERR_INVALID_FORMAT,
|
||||
ERR_NOT_FOUND,
|
||||
ERR_NOT_SUPPORTED,
|
||||
ActiveConnection,
|
||||
@@ -33,6 +34,7 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import async_track_point_in_time
|
||||
@@ -76,6 +78,7 @@ ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
|
||||
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
|
||||
SCAN_INTERVAL = datetime.timedelta(seconds=60)
|
||||
EVENT_LISTENER_DEBOUNCE_COOLDOWN = 1.0 # seconds
|
||||
|
||||
# Don't support rrules more often than daily
|
||||
VALID_FREQS = {"DAILY", "WEEKLY", "MONTHLY", "YEARLY"}
|
||||
@@ -320,6 +323,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_create)
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_delete)
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_update)
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_subscribe)
|
||||
|
||||
component.async_register_entity_service(
|
||||
CREATE_EVENT_SERVICE,
|
||||
@@ -517,6 +521,17 @@ class CalendarEntity(Entity):
|
||||
_entity_component_unrecorded_attributes = frozenset({"description"})
|
||||
|
||||
_alarm_unsubs: list[CALLBACK_TYPE] | None = None
|
||||
_event_listeners: (
|
||||
list[
|
||||
tuple[
|
||||
datetime.datetime,
|
||||
datetime.datetime,
|
||||
Callable[[list[JsonValueType] | None], None],
|
||||
]
|
||||
]
|
||||
| None
|
||||
) = None
|
||||
_event_listener_debouncer: Debouncer[None] | None = None
|
||||
|
||||
_attr_initial_color: str | None
|
||||
|
||||
@@ -585,6 +600,10 @@ class CalendarEntity(Entity):
|
||||
the current or upcoming event.
|
||||
"""
|
||||
super()._async_write_ha_state()
|
||||
|
||||
# Notify websocket subscribers of event changes (debounced)
|
||||
if self._event_listeners and self._event_listener_debouncer:
|
||||
self._event_listener_debouncer.async_schedule_call()
|
||||
if self._alarm_unsubs is None:
|
||||
self._alarm_unsubs = []
|
||||
_LOGGER.debug(
|
||||
@@ -625,6 +644,13 @@ class CalendarEntity(Entity):
|
||||
event.end_datetime_local,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_cancel_event_listener_debouncer(self) -> None:
|
||||
"""Cancel and clear the event listener debouncer."""
|
||||
if self._event_listener_debouncer:
|
||||
self._event_listener_debouncer.async_cancel()
|
||||
self._event_listener_debouncer = None
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass.
|
||||
|
||||
@@ -633,6 +659,90 @@ class CalendarEntity(Entity):
|
||||
for unsub in self._alarm_unsubs or ():
|
||||
unsub()
|
||||
self._alarm_unsubs = None
|
||||
self._async_cancel_event_listener_debouncer()
|
||||
|
||||
@final
|
||||
@callback
|
||||
def async_subscribe_events(
|
||||
self,
|
||||
start_date: datetime.datetime,
|
||||
end_date: datetime.datetime,
|
||||
event_listener: Callable[[list[JsonValueType] | None], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Subscribe to calendar event updates.
|
||||
|
||||
Called by websocket API.
|
||||
"""
|
||||
if self._event_listeners is None:
|
||||
self._event_listeners = []
|
||||
|
||||
if self._event_listener_debouncer is None:
|
||||
self._event_listener_debouncer = Debouncer(
|
||||
self.hass,
|
||||
_LOGGER,
|
||||
cooldown=EVENT_LISTENER_DEBOUNCE_COOLDOWN,
|
||||
immediate=True,
|
||||
function=self.async_update_event_listeners,
|
||||
)
|
||||
|
||||
listener_data = (start_date, end_date, event_listener)
|
||||
self._event_listeners.append(listener_data)
|
||||
|
||||
@callback
|
||||
def unsubscribe() -> None:
|
||||
if self._event_listeners:
|
||||
self._event_listeners.remove(listener_data)
|
||||
if not self._event_listeners:
|
||||
self._async_cancel_event_listener_debouncer()
|
||||
|
||||
return unsubscribe
|
||||
|
||||
@final
|
||||
@callback
|
||||
def async_update_event_listeners(self) -> None:
|
||||
"""Push updated calendar events to all listeners."""
|
||||
if not self._event_listeners:
|
||||
return
|
||||
|
||||
for start_date, end_date, listener in self._event_listeners:
|
||||
self.async_update_single_event_listener(start_date, end_date, listener)
|
||||
|
||||
@final
|
||||
@callback
|
||||
def async_update_single_event_listener(
|
||||
self,
|
||||
start_date: datetime.datetime,
|
||||
end_date: datetime.datetime,
|
||||
listener: Callable[[list[JsonValueType] | None], None],
|
||||
) -> None:
|
||||
"""Schedule an event fetch and push to a single listener."""
|
||||
self.hass.async_create_task(
|
||||
self._async_update_listener(start_date, end_date, listener)
|
||||
)
|
||||
|
||||
async def _async_update_listener(
|
||||
self,
|
||||
start_date: datetime.datetime,
|
||||
end_date: datetime.datetime,
|
||||
listener: Callable[[list[JsonValueType] | None], None],
|
||||
) -> None:
|
||||
"""Fetch events and push to a single listener."""
|
||||
try:
|
||||
events = await self.async_get_events(self.hass, start_date, end_date)
|
||||
except HomeAssistantError as err:
|
||||
_LOGGER.debug(
|
||||
"Error fetching calendar events for %s: %s",
|
||||
self.entity_id,
|
||||
err,
|
||||
)
|
||||
listener(None)
|
||||
return
|
||||
|
||||
event_list: list[JsonValueType] = [
|
||||
dataclasses.asdict(event, dict_factory=_list_events_dict_factory)
|
||||
for event in events
|
||||
]
|
||||
listener(event_list)
|
||||
|
||||
async def async_get_events(
|
||||
self,
|
||||
@@ -867,6 +977,65 @@ async def handle_calendar_event_update(
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "calendar/event/subscribe",
|
||||
vol.Required("entity_id"): cv.entity_domain(DOMAIN),
|
||||
vol.Required("start"): cv.datetime,
|
||||
vol.Required("end"): cv.datetime,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_calendar_event_subscribe(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Subscribe to calendar event updates."""
|
||||
entity_id: str = msg["entity_id"]
|
||||
|
||||
if not (entity := hass.data[DATA_COMPONENT].get_entity(entity_id)):
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
ERR_NOT_FOUND,
|
||||
f"Calendar entity not found: {entity_id}",
|
||||
)
|
||||
return
|
||||
|
||||
start_date = dt_util.as_local(msg["start"])
|
||||
end_date = dt_util.as_local(msg["end"])
|
||||
|
||||
if start_date >= end_date:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
ERR_INVALID_FORMAT,
|
||||
"Start must be before end",
|
||||
)
|
||||
return
|
||||
|
||||
subscription_id = msg["id"]
|
||||
|
||||
@callback
|
||||
def event_listener(events: list[JsonValueType] | None) -> None:
|
||||
"""Push updated calendar events to websocket."""
|
||||
if subscription_id not in connection.subscriptions:
|
||||
return
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
subscription_id,
|
||||
{
|
||||
"events": events,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[subscription_id] = entity.async_subscribe_events(
|
||||
start_date, end_date, event_listener
|
||||
)
|
||||
connection.send_result(subscription_id)
|
||||
|
||||
# Push initial events only to the new subscriber
|
||||
entity.async_update_single_event_listener(start_date, end_date, event_listener)
|
||||
|
||||
|
||||
def _validate_timespan(
|
||||
values: dict[str, Any],
|
||||
) -> tuple[datetime.datetime | datetime.date, datetime.datetime | datetime.date]:
|
||||
|
||||
@@ -51,18 +51,24 @@ rules:
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Each config entry represents a single device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: No user-configurable settings in the configuration flow.
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Integration does not register repair issues.
|
||||
stale-devices: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Each config entry represents a single device.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from pycasperglow import GlowState
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -13,6 +15,8 @@ from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.variance import ignore_variance
|
||||
|
||||
from .coordinator import CasperGlowConfigEntry, CasperGlowCoordinator
|
||||
from .entity import CasperGlowEntity
|
||||
@@ -26,7 +30,12 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform for Casper Glow."""
|
||||
async_add_entities([CasperGlowBatterySensor(entry.runtime_data)])
|
||||
async_add_entities(
|
||||
[
|
||||
CasperGlowBatterySensor(entry.runtime_data),
|
||||
CasperGlowDimmingEndTimeSensor(entry.runtime_data),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class CasperGlowBatterySensor(CasperGlowEntity, SensorEntity):
|
||||
@@ -59,3 +68,67 @@ class CasperGlowBatterySensor(CasperGlowEntity, SensorEntity):
|
||||
if new_value != self._attr_native_value:
|
||||
self._attr_native_value = new_value
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class CasperGlowDimmingEndTimeSensor(CasperGlowEntity, SensorEntity):
|
||||
"""Sensor entity for Casper Glow dimming end time."""
|
||||
|
||||
_attr_translation_key = "dimming_end_time"
|
||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
||||
_attr_entity_registry_enabled_default = False
|
||||
|
||||
def __init__(self, coordinator: CasperGlowCoordinator) -> None:
|
||||
"""Initialize the dimming end time sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = (
|
||||
f"{format_mac(coordinator.device.address)}_dimming_end_time"
|
||||
)
|
||||
self._is_paused = False
|
||||
self._projected_end_time = ignore_variance(
|
||||
self._calculate_end_time,
|
||||
timedelta(minutes=1, seconds=30),
|
||||
)
|
||||
self._update_from_state(coordinator.device.state)
|
||||
|
||||
@staticmethod
|
||||
def _calculate_end_time(remaining_ms: int) -> datetime:
|
||||
"""Calculate projected dimming end time from remaining milliseconds."""
|
||||
return utcnow() + timedelta(milliseconds=remaining_ms)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register state update callback when entity is added."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self._device.register_callback(self._async_handle_state_update)
|
||||
)
|
||||
|
||||
def _reset_projected_end_time(self) -> None:
|
||||
"""Clear the projected end time and reset the variance filter."""
|
||||
self._attr_native_value = None
|
||||
self._projected_end_time = ignore_variance(
|
||||
self._calculate_end_time,
|
||||
timedelta(minutes=1, seconds=30),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_from_state(self, state: GlowState) -> None:
|
||||
"""Update entity attributes from device state."""
|
||||
if state.is_paused is not None:
|
||||
self._is_paused = state.is_paused
|
||||
|
||||
if self._is_paused:
|
||||
self._reset_projected_end_time()
|
||||
return
|
||||
|
||||
remaining_ms = state.dimming_time_remaining_ms
|
||||
if not remaining_ms:
|
||||
if remaining_ms == 0 or state.is_on is False:
|
||||
self._reset_projected_end_time()
|
||||
return
|
||||
self._attr_native_value = self._projected_end_time(remaining_ms)
|
||||
|
||||
@callback
|
||||
def _async_handle_state_update(self, state: GlowState) -> None:
|
||||
"""Handle a state update from the device."""
|
||||
self._update_from_state(state)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -44,6 +44,11 @@
|
||||
"dimming_time": {
|
||||
"name": "Dimming time"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"dimming_end_time": {
|
||||
"name": "Dimming end time"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -44,10 +44,10 @@
|
||||
},
|
||||
"services": {
|
||||
"show_lovelace_view": {
|
||||
"description": "Shows a dashboard view on a Chromecast device.",
|
||||
"description": "Shows a dashboard view on a Google Cast device.",
|
||||
"fields": {
|
||||
"dashboard_path": {
|
||||
"description": "The URL path of the dashboard to show, defaults to lovelace if not specified.",
|
||||
"description": "The URL path of the dashboard to show, defaults to `lovelace` if not specified.",
|
||||
"name": "Dashboard path"
|
||||
},
|
||||
"entity_id": {
|
||||
@@ -59,7 +59,7 @@
|
||||
"name": "View path"
|
||||
}
|
||||
},
|
||||
"name": "Show dashboard view"
|
||||
"name": "Show dashboard view via Google Cast"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,25 +6,17 @@ rules:
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: The integration uses a push-based mechanism with a background sync task, not polling.
|
||||
brands:
|
||||
status: done
|
||||
common-modules:
|
||||
status: done
|
||||
config-flow-test-coverage:
|
||||
status: done
|
||||
config-flow:
|
||||
status: done
|
||||
dependency-transparency:
|
||||
status: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
docs-high-level-description:
|
||||
status: done
|
||||
docs-installation-instructions:
|
||||
status: done
|
||||
docs-removal-instructions:
|
||||
status: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
@@ -34,40 +26,30 @@ rules:
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
runtime-data:
|
||||
status: done
|
||||
test-before-configure:
|
||||
status: done
|
||||
test-before-setup:
|
||||
status: done
|
||||
unique-config-entry:
|
||||
status: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
config-entry-unloading:
|
||||
status: done
|
||||
docs-configuration-parameters:
|
||||
status: done
|
||||
docs-installation-parameters:
|
||||
status: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
integration-owner:
|
||||
status: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: The integration logs a single message when the EnergyID service is unavailable.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
reauthentication-flow:
|
||||
status: done
|
||||
test-coverage:
|
||||
status: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
@@ -82,21 +64,15 @@ rules:
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery mechanism is used.
|
||||
docs-data-update:
|
||||
status: done
|
||||
docs-examples:
|
||||
status: done
|
||||
docs-known-limitations:
|
||||
status: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This is a service integration not tied to specific device models.
|
||||
docs-supported-functions:
|
||||
status: done
|
||||
docs-troubleshooting:
|
||||
status: done
|
||||
docs-use-cases:
|
||||
status: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: The integration creates a single device entry for the service connection.
|
||||
@@ -112,8 +88,7 @@ rules:
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
exception-translations:
|
||||
status: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
@@ -128,10 +103,8 @@ rules:
|
||||
comment: Creates a single service device entry tied to the config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: done
|
||||
inject-websession:
|
||||
status: done
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing:
|
||||
status: todo
|
||||
comment: Full strict typing compliance will be addressed in a future update.
|
||||
|
||||
@@ -19,7 +19,7 @@ from pyfirefly.models import Account, Bill, Budget, Category, Currency
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -79,13 +79,13 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except FireflyConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except FireflyTimeoutError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
|
||||
@@ -7,13 +7,14 @@ from typing import Any, cast
|
||||
|
||||
from fitbit import Fitbit
|
||||
from fitbit.exceptions import HTTPException, HTTPUnauthorized
|
||||
from fitbit_web_api import ApiClient, Configuration, DevicesApi
|
||||
from fitbit_web_api import ApiClient, Configuration, DevicesApi, UserApi
|
||||
from fitbit_web_api.exceptions import (
|
||||
ApiException,
|
||||
OpenApiException,
|
||||
UnauthorizedException,
|
||||
)
|
||||
from fitbit_web_api.models.device import Device
|
||||
from fitbit_web_api.models.user import User
|
||||
from requests.exceptions import ConnectionError as RequestsConnectionError
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
@@ -24,7 +25,6 @@ from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import FitbitUnitSystem
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitProfile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,7 +46,7 @@ class FitbitApi(ABC):
|
||||
) -> None:
|
||||
"""Initialize Fitbit auth."""
|
||||
self._hass = hass
|
||||
self._profile: FitbitProfile | None = None
|
||||
self._profile: User | None = None
|
||||
self._unit_system = unit_system
|
||||
|
||||
@abstractmethod
|
||||
@@ -74,18 +74,16 @@ class FitbitApi(ABC):
|
||||
configuration.access_token = token[CONF_ACCESS_TOKEN]
|
||||
return await self._hass.async_add_executor_job(ApiClient, configuration)
|
||||
|
||||
async def async_get_user_profile(self) -> FitbitProfile:
|
||||
async def async_get_user_profile(self) -> User:
|
||||
"""Return the user profile from the API."""
|
||||
if self._profile is None:
|
||||
client = await self._async_get_client()
|
||||
response: dict[str, Any] = await self._run(client.user_profile_get)
|
||||
_LOGGER.debug("user_profile_get=%s", response)
|
||||
profile = response["user"]
|
||||
self._profile = FitbitProfile(
|
||||
encoded_id=profile["encodedId"],
|
||||
display_name=profile["displayName"],
|
||||
locale=profile.get("locale"),
|
||||
)
|
||||
client = await self._async_get_fitbit_web_api()
|
||||
api = UserApi(client)
|
||||
api_response = await self._run_async(api.get_profile)
|
||||
if not api_response.user:
|
||||
raise FitbitApiException("No user profile returned from fitbit API")
|
||||
_LOGGER.debug("user_profile_get=%s", api_response.to_dict())
|
||||
self._profile = api_response.user
|
||||
return self._profile
|
||||
|
||||
async def async_get_unit_system(self) -> FitbitUnitSystem:
|
||||
|
||||
@@ -85,4 +85,6 @@ class OAuth2FlowHandler(
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=profile.display_name, data=data)
|
||||
return self.async_create_entry(
|
||||
title=profile.display_name or "Fitbit", data=data
|
||||
)
|
||||
|
||||
@@ -7,20 +7,6 @@ from typing import Any
|
||||
from .const import CONF_CLOCK_FORMAT, CONF_MONITORED_RESOURCES, FitbitScope
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitProfile:
|
||||
"""User profile from the Fitbit API response."""
|
||||
|
||||
encoded_id: str
|
||||
"""The ID representing the Fitbit user."""
|
||||
|
||||
display_name: str
|
||||
"""The name shown when the user's friends look at their Fitbit profile."""
|
||||
|
||||
locale: str | None
|
||||
"""The locale defined in the user's Fitbit account settings."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitConfig:
|
||||
"""Information from the fitbit ConfigEntry data."""
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.const import (
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.icon import icon_for_battery_level
|
||||
@@ -536,6 +537,8 @@ async def async_setup_entry(
|
||||
# These are run serially to reuse the cached user profile, not gathered
|
||||
# to avoid two racing requests.
|
||||
user_profile = await api.async_get_user_profile()
|
||||
if user_profile.encoded_id is None:
|
||||
raise ConfigEntryNotReady("Could not get user profile")
|
||||
unit_system = await api.async_get_unit_system()
|
||||
|
||||
fitbit_config = config_from_entry_data(entry.data)
|
||||
|
||||
@@ -80,6 +80,5 @@ FRITZ_EXCEPTIONS = (
|
||||
|
||||
FRITZ_AUTH_EXCEPTIONS = (FritzAuthorizationError, FritzSecurityError)
|
||||
|
||||
WIFI_STANDARD = {1: "2.4Ghz", 2: "5Ghz", 3: "5Ghz", 4: "Guest"}
|
||||
|
||||
CONNECTION_TYPE_LAN = "LAN"
|
||||
|
||||
@@ -379,6 +379,8 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
"""Return device Mac address."""
|
||||
if not self._unique_id:
|
||||
raise ClassSetupMissing
|
||||
# Unique ID is the serial number of the device
|
||||
# which is the MAC of the device without the colons
|
||||
return dr.format_mac(self._unique_id)
|
||||
|
||||
@property
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["fritzconnection[qr]==1.15.1", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -169,6 +169,18 @@
|
||||
"switch": {
|
||||
"internet_access": {
|
||||
"name": "Internet access"
|
||||
},
|
||||
"wi_fi_guest": {
|
||||
"name": "Guest"
|
||||
},
|
||||
"wi_fi_main_2_4ghz": {
|
||||
"name": "Main 2.4 GHz"
|
||||
},
|
||||
"wi_fi_main_5ghz": {
|
||||
"name": "Main 5 GHz"
|
||||
},
|
||||
"wi_fi_main_5ghz_high_6ghz": {
|
||||
"name": "Main 5 GHz High / 6 GHz"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -9,6 +9,7 @@ from homeassistant.components.network import async_get_source_ip
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -22,8 +23,8 @@ from .const import (
|
||||
SWITCH_TYPE_PORTFORWARD,
|
||||
SWITCH_TYPE_PROFILE,
|
||||
SWITCH_TYPE_WIFINETWORK,
|
||||
WIFI_STANDARD,
|
||||
MeshRoles,
|
||||
Platform,
|
||||
)
|
||||
from .coordinator import FRITZ_DATA_KEY, AvmWrapper, FritzConfigEntry, FritzData
|
||||
from .entity import FritzBoxBaseEntity
|
||||
@@ -35,6 +36,101 @@ _LOGGER = logging.getLogger(__name__)
|
||||
# Set a sane value to avoid too many updates
|
||||
PARALLEL_UPDATES = 5
|
||||
|
||||
WIFI_STANDARD = {1: "2.4Ghz", 2: "5Ghz", 3: "5Ghz", 4: "Guest"}
|
||||
|
||||
WIFI_BAND = {
|
||||
0: {"band": "2.4Ghz"},
|
||||
1: {"band": "5Ghz"},
|
||||
3: {"band": "5Ghz High / 6Ghz"},
|
||||
}
|
||||
|
||||
|
||||
def _wifi_naming(
|
||||
network_info: dict[str, Any], wifi_index: int, wifi_count: int
|
||||
) -> str | None:
|
||||
"""Return a friendly name for a Wi-Fi network."""
|
||||
|
||||
if wifi_index == 2 and wifi_count == 4:
|
||||
# In case of 4 Wi-Fi networks, the 2nd one is used for internal communication
|
||||
# between mesh devices and should not be named like the others to avoid confusion
|
||||
return None
|
||||
|
||||
if (wifi_index + 1) == wifi_count:
|
||||
# Last Wi-Fi network in the guest network, both bands available
|
||||
return "Guest"
|
||||
|
||||
# Cast to correct type for type checker
|
||||
if (result := WIFI_BAND.get(wifi_index)) is not None:
|
||||
return f"Main {result['band']}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
async def _get_wifi_networks_list(avm_wrapper: AvmWrapper) -> dict[int, dict[str, Any]]:
|
||||
"""Get a list of wifi networks with friendly names."""
|
||||
wifi_count = len(
|
||||
[
|
||||
s
|
||||
for s in avm_wrapper.connection.services
|
||||
if s.startswith("WLANConfiguration")
|
||||
]
|
||||
)
|
||||
_LOGGER.debug("WiFi networks count: %s", wifi_count)
|
||||
networks: dict[int, dict[str, Any]] = {}
|
||||
for i in range(1, wifi_count + 1):
|
||||
network_info = await avm_wrapper.async_get_wlan_configuration(i)
|
||||
if (switch_name := _wifi_naming(network_info, i - 1, wifi_count)) is None:
|
||||
continue
|
||||
networks[i] = network_info
|
||||
networks[i]["switch_name"] = switch_name
|
||||
|
||||
_LOGGER.debug("WiFi networks list: %s", networks)
|
||||
return networks
|
||||
|
||||
|
||||
async def _migrate_to_new_unique_id(
|
||||
hass: HomeAssistant, avm_wrapper: AvmWrapper
|
||||
) -> None:
|
||||
"""Migrate old unique ids to new unique ids."""
|
||||
|
||||
_LOGGER.debug("Migrating Wi-Fi switches")
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
networks = await _get_wifi_networks_list(avm_wrapper)
|
||||
for index, network in networks.items():
|
||||
description = f"Wi-Fi {network['NewSSID']}"
|
||||
if (
|
||||
len(
|
||||
[
|
||||
j
|
||||
for j, n in networks.items()
|
||||
if slugify(n["NewSSID"]) == slugify(network["NewSSID"])
|
||||
]
|
||||
)
|
||||
> 1
|
||||
):
|
||||
description += f" ({WIFI_STANDARD[index]})"
|
||||
|
||||
old_unique_id = f"{avm_wrapper.unique_id}-{slugify(description)}"
|
||||
new_unique_id = f"{avm_wrapper.unique_id}-wi_fi_{slugify(_wifi_naming(network, index - 1, len(networks)))}"
|
||||
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
Platform.SWITCH, DOMAIN, old_unique_id
|
||||
)
|
||||
|
||||
if entity_id is not None:
|
||||
entity_registry.async_update_entity(
|
||||
entity_id,
|
||||
new_unique_id=new_unique_id,
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Migrating Wi-FI switch unique_id from [%s] to [%s]",
|
||||
old_unique_id,
|
||||
new_unique_id,
|
||||
)
|
||||
|
||||
_LOGGER.debug("Migration completed")
|
||||
|
||||
|
||||
async def _async_deflection_entities_list(
|
||||
avm_wrapper: AvmWrapper, device_friendly_name: str
|
||||
@@ -125,35 +221,7 @@ async def _async_wifi_entities_list(
|
||||
#
|
||||
# https://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/wlanconfigSCPD.pdf
|
||||
#
|
||||
wifi_count = len(
|
||||
[
|
||||
s
|
||||
for s in avm_wrapper.connection.services
|
||||
if s.startswith("WLANConfiguration")
|
||||
]
|
||||
)
|
||||
_LOGGER.debug("WiFi networks count: %s", wifi_count)
|
||||
networks: dict[int, dict[str, Any]] = {}
|
||||
for i in range(1, wifi_count + 1):
|
||||
network_info = await avm_wrapper.async_get_wlan_configuration(i)
|
||||
# Devices with 4 WLAN services, use the 2nd for internal communications
|
||||
if not (wifi_count == 4 and i == 2):
|
||||
networks[i] = network_info
|
||||
for i, network in networks.copy().items():
|
||||
networks[i]["switch_name"] = network["NewSSID"]
|
||||
if (
|
||||
len(
|
||||
[
|
||||
j
|
||||
for j, n in networks.items()
|
||||
if slugify(n["NewSSID"]) == slugify(network["NewSSID"])
|
||||
]
|
||||
)
|
||||
> 1
|
||||
):
|
||||
networks[i]["switch_name"] += f" ({WIFI_STANDARD[i]})"
|
||||
|
||||
_LOGGER.debug("WiFi networks list: %s", networks)
|
||||
networks = await _get_wifi_networks_list(avm_wrapper)
|
||||
return [
|
||||
FritzBoxWifiSwitch(avm_wrapper, device_friendly_name, index, data)
|
||||
for index, data in networks.items()
|
||||
@@ -225,6 +293,8 @@ async def async_setup_entry(
|
||||
|
||||
local_ip = await async_get_source_ip(avm_wrapper.hass, target_ip=avm_wrapper.host)
|
||||
|
||||
await _migrate_to_new_unique_id(hass, avm_wrapper)
|
||||
|
||||
entities_list = await async_all_entities_list(
|
||||
avm_wrapper,
|
||||
entry.title,
|
||||
@@ -554,8 +624,11 @@ class FritzBoxWifiSwitch(FritzBoxBaseSwitch):
|
||||
)
|
||||
self._network_num = network_num
|
||||
|
||||
description = f"Wi-Fi {network_data['switch_name']}"
|
||||
self._attr_translation_key = slugify(description)
|
||||
|
||||
switch_info = SwitchInfo(
|
||||
description=f"Wi-Fi {network_data['switch_name']}",
|
||||
description=description,
|
||||
friendly_name=device_friendly_name,
|
||||
icon="mdi:wifi",
|
||||
type=SWITCH_TYPE_WIFINETWORK,
|
||||
|
||||
@@ -38,6 +38,7 @@ PLATFORMS: list[Platform] = [
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TEXT,
|
||||
Platform.VALVE,
|
||||
]
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
12
homeassistant/components/gardena_bluetooth/icons.json
Normal file
12
homeassistant/components/gardena_bluetooth/icons.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"text": {
|
||||
"contour_name": {
|
||||
"default": "mdi:vector-polygon"
|
||||
},
|
||||
"position_name": {
|
||||
"default": "mdi:map-marker-radius"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -154,6 +154,14 @@
|
||||
"state": {
|
||||
"name": "[%key:common::state::open%]"
|
||||
}
|
||||
},
|
||||
"text": {
|
||||
"contour_name": {
|
||||
"name": "Contour {number}"
|
||||
},
|
||||
"position_name": {
|
||||
"name": "Position {number}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
88
homeassistant/components/gardena_bluetooth/text.py
Normal file
88
homeassistant/components/gardena_bluetooth/text.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""Support for text entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from gardena_bluetooth.const import AquaContourContours, AquaContourPosition
|
||||
from gardena_bluetooth.parse import CharacteristicNullString
|
||||
|
||||
from homeassistant.components.text import TextEntity, TextEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import GardenaBluetoothConfigEntry
|
||||
from .entity import GardenaBluetoothDescriptorEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GardenaBluetoothTextEntityDescription(TextEntityDescription):
|
||||
"""Description of entity."""
|
||||
|
||||
char: CharacteristicNullString
|
||||
|
||||
@property
|
||||
def context(self) -> set[str]:
|
||||
"""Context needed for update coordinator."""
|
||||
return {self.char.uuid}
|
||||
|
||||
|
||||
DESCRIPTIONS = (
|
||||
*(
|
||||
GardenaBluetoothTextEntityDescription(
|
||||
key=f"position_{i}_name",
|
||||
translation_key="position_name",
|
||||
translation_placeholders={"number": str(i)},
|
||||
has_entity_name=True,
|
||||
char=getattr(AquaContourPosition, f"position_name_{i}"),
|
||||
native_max=20,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
for i in range(1, 6)
|
||||
),
|
||||
*(
|
||||
GardenaBluetoothTextEntityDescription(
|
||||
key=f"contour_{i}_name",
|
||||
translation_key="contour_name",
|
||||
translation_placeholders={"number": str(i)},
|
||||
has_entity_name=True,
|
||||
char=getattr(AquaContourContours, f"contour_name_{i}"),
|
||||
native_max=20,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
for i in range(1, 6)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: GardenaBluetoothConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up text based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
entities = [
|
||||
GardenaBluetoothTextEntity(coordinator, description, description.context)
|
||||
for description in DESCRIPTIONS
|
||||
if description.char.unique_id in coordinator.characteristics
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class GardenaBluetoothTextEntity(GardenaBluetoothDescriptorEntity, TextEntity):
|
||||
"""Representation of a text entity."""
|
||||
|
||||
entity_description: GardenaBluetoothTextEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the value reported by the text."""
|
||||
char = self.entity_description.char
|
||||
return self.coordinator.get_cached(char)
|
||||
|
||||
async def async_set_value(self, value: str) -> None:
|
||||
"""Change the text."""
|
||||
char = self.entity_description.char
|
||||
await self.coordinator.write(char, value)
|
||||
@@ -111,7 +111,7 @@
|
||||
"name": "Add event"
|
||||
},
|
||||
"create_event": {
|
||||
"description": "Adds a new calendar event.",
|
||||
"description": "Adds a new event to a Google calendar.",
|
||||
"fields": {
|
||||
"description": {
|
||||
"description": "[%key:component::google::services::add_event::fields::description::description%]",
|
||||
@@ -146,7 +146,7 @@
|
||||
"name": "Summary"
|
||||
}
|
||||
},
|
||||
"name": "Create event"
|
||||
"name": "Create event in Google Calendar"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,7 +94,7 @@
|
||||
"name": "Filename"
|
||||
}
|
||||
},
|
||||
"name": "Upload media"
|
||||
"name": "Upload media to Google Photos"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,7 @@ rules:
|
||||
comment: The integration registers no actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
@@ -47,8 +46,7 @@ rules:
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: done
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
|
||||
@@ -4,8 +4,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't add actions.
|
||||
appropriate-polling:
|
||||
status: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.93", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.94", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -77,7 +77,12 @@ AFFECTS_TO_SELECTED_PROGRAM = "selected_program"
|
||||
TRANSLATION_KEYS_PROGRAMS_MAP = {
|
||||
bsh_key_to_translation_key(program.value): program
|
||||
for program in ProgramKey
|
||||
if program not in (ProgramKey.UNKNOWN, ProgramKey.BSH_COMMON_FAVORITE_001)
|
||||
if program
|
||||
not in (
|
||||
ProgramKey.UNKNOWN,
|
||||
ProgramKey.BSH_COMMON_FAVORITE_001,
|
||||
ProgramKey.BSH_COMMON_FAVORITE_002,
|
||||
)
|
||||
}
|
||||
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP = {
|
||||
|
||||
@@ -533,7 +533,11 @@ class HomeConnectApplianceCoordinator(DataUpdateCoordinator[HomeConnectAppliance
|
||||
current_program_key = program.key
|
||||
program_options = program.options
|
||||
if (
|
||||
current_program_key == ProgramKey.BSH_COMMON_FAVORITE_001
|
||||
current_program_key
|
||||
in (
|
||||
ProgramKey.BSH_COMMON_FAVORITE_001,
|
||||
ProgramKey.BSH_COMMON_FAVORITE_002,
|
||||
)
|
||||
and program_options
|
||||
):
|
||||
# The API doesn't allow to fetch the options from the favorite program.
|
||||
@@ -616,7 +620,11 @@ class HomeConnectApplianceCoordinator(DataUpdateCoordinator[HomeConnectAppliance
|
||||
options_to_notify = options.copy()
|
||||
options.clear()
|
||||
if (
|
||||
program_key == ProgramKey.BSH_COMMON_FAVORITE_001
|
||||
program_key
|
||||
in (
|
||||
ProgramKey.BSH_COMMON_FAVORITE_001,
|
||||
ProgramKey.BSH_COMMON_FAVORITE_002,
|
||||
)
|
||||
and (event := events.get(EventKey.BSH_COMMON_OPTION_BASE_PROGRAM))
|
||||
and isinstance(event.value, str)
|
||||
):
|
||||
|
||||
@@ -23,6 +23,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.33.0"],
|
||||
"requirements": ["aiohomeconnect==0.34.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -436,7 +436,11 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity):
|
||||
else None
|
||||
)
|
||||
if (
|
||||
program_key == ProgramKey.BSH_COMMON_FAVORITE_001
|
||||
program_key
|
||||
in (
|
||||
ProgramKey.BSH_COMMON_FAVORITE_001,
|
||||
ProgramKey.BSH_COMMON_FAVORITE_002,
|
||||
)
|
||||
and (
|
||||
base_program_event := self.appliance.events.get(
|
||||
EventKey.BSH_COMMON_OPTION_BASE_PROGRAM
|
||||
|
||||
@@ -57,6 +57,7 @@ BSH_PROGRAM_SENSORS = (
|
||||
"CookProcessor",
|
||||
"Dishwasher",
|
||||
"Dryer",
|
||||
"Microwave",
|
||||
"Hood",
|
||||
"Oven",
|
||||
"Washer",
|
||||
@@ -198,7 +199,7 @@ EVENT_SENSORS = (
|
||||
options=EVENT_OPTIONS,
|
||||
default_value="off",
|
||||
translation_key="program_aborted",
|
||||
appliance_types=("Dishwasher", "CleaningRobot", "CookProcessor"),
|
||||
appliance_types=("Dishwasher", "Microwave", "CleaningRobot", "CookProcessor"),
|
||||
),
|
||||
HomeConnectSensorEntityDescription(
|
||||
key=EventKey.BSH_COMMON_EVENT_PROGRAM_FINISHED,
|
||||
@@ -211,6 +212,7 @@ EVENT_SENSORS = (
|
||||
"Dishwasher",
|
||||
"Washer",
|
||||
"Dryer",
|
||||
"Microwave",
|
||||
"WasherDryer",
|
||||
"CleaningRobot",
|
||||
"CookProcessor",
|
||||
@@ -599,8 +601,6 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity):
|
||||
class HomeConnectProgramSensor(HomeConnectSensor):
|
||||
"""Sensor class for Home Connect sensors that reports information related to the running program."""
|
||||
|
||||
program_running: bool = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register listener."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -614,18 +614,21 @@ class HomeConnectProgramSensor(HomeConnectSensor):
|
||||
@callback
|
||||
def _handle_operation_state_event(self) -> None:
|
||||
"""Update status when an event for the entity is received."""
|
||||
self.program_running = (
|
||||
status := self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
|
||||
) is not None and status.value in [
|
||||
BSH_OPERATION_STATE_RUN,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
BSH_OPERATION_STATE_FINISHED,
|
||||
]
|
||||
if not self.program_running:
|
||||
# reset the value when the program is not running, paused or finished
|
||||
self._attr_native_value = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def program_running(self) -> bool:
|
||||
"""Return whether a program is running, paused or finished."""
|
||||
status = self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
|
||||
return status is not None and status.value in [
|
||||
BSH_OPERATION_STATE_RUN,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
BSH_OPERATION_STATE_FINISHED,
|
||||
]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return true if the sensor is available."""
|
||||
@@ -635,13 +638,6 @@ class HomeConnectProgramSensor(HomeConnectSensor):
|
||||
|
||||
def update_native_value(self) -> None:
|
||||
"""Update the program sensor's status."""
|
||||
self.program_running = (
|
||||
status := self.appliance.status.get(StatusKey.BSH_COMMON_OPERATION_STATE)
|
||||
) is not None and status.value in [
|
||||
BSH_OPERATION_STATE_RUN,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
BSH_OPERATION_STATE_FINISHED,
|
||||
]
|
||||
event = self.appliance.events.get(cast(EventKey, self.bsh_key))
|
||||
if event:
|
||||
self._update_native_value(event.value)
|
||||
|
||||
@@ -148,7 +148,7 @@
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"description": "The integration `{domain}` could not be found. This happens when a (custom) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
|
||||
"description": "The integration `{domain}` could not be found. This happens when a (community) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
|
||||
"menu_options": {
|
||||
"confirm": "Remove previous configurations",
|
||||
"ignore": "Ignore"
|
||||
@@ -236,7 +236,7 @@
|
||||
"description": "Restarts Home Assistant.",
|
||||
"fields": {
|
||||
"safe_mode": {
|
||||
"description": "Disable custom integrations and custom cards.",
|
||||
"description": "Disable community integrations and community cards.",
|
||||
"name": "Safe mode"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -172,6 +172,8 @@ async def async_migrate_entry(
|
||||
f"USB device {device} is missing, cannot migrate"
|
||||
)
|
||||
|
||||
assert isinstance(usb_info, USBDevice)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
data={
|
||||
|
||||
@@ -81,5 +81,4 @@ rules:
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: Underlying huawei-lte-api does not use aiohttp or httpx, so this does not apply.
|
||||
strict-typing:
|
||||
status: done
|
||||
strict-typing: done
|
||||
|
||||
@@ -184,10 +184,8 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
)
|
||||
|
||||
def _should_poll(self) -> bool:
|
||||
"""Return True if at least one mower is connected and at least one is not OFF."""
|
||||
return any(mower.metadata.connected for mower in self.data.values()) and any(
|
||||
mower.mower.state != MowerStates.OFF for mower in self.data.values()
|
||||
)
|
||||
"""Return True if at least one mower is not OFF."""
|
||||
return any(mower.mower.state != MowerStates.OFF for mower in self.data.values())
|
||||
|
||||
async def _pong_watchdog(self) -> None:
|
||||
"""Watchdog to check for pong messages."""
|
||||
|
||||
@@ -47,11 +47,11 @@ rules:
|
||||
discovery: todo
|
||||
discovery-update-info: todo
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["incomfortclient"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["incomfort-client==0.6.12"]
|
||||
"requirements": ["incomfort-client==0.7.0"]
|
||||
}
|
||||
|
||||
@@ -92,11 +92,13 @@
|
||||
"central_heating": "Central heating",
|
||||
"central_heating_low": "Central heating low",
|
||||
"central_heating_rf": "Central heating rf",
|
||||
"central_heating_wait": "Central heating waiting",
|
||||
"cv_temperature_too_high_e1": "Temperature too high",
|
||||
"flame_detection_fault_e6": "Flame detection fault",
|
||||
"frost": "Frost protection",
|
||||
"gas_valve_relay_faulty_e29": "Gas valve relay faulty",
|
||||
"gas_valve_relay_faulty_e30": "[%key:component::incomfort::entity::water_heater::boiler::state::gas_valve_relay_faulty_e29%]",
|
||||
"hp_error_recovery": "Heat pump error recovery",
|
||||
"incorrect_fan_speed_e8": "Incorrect fan speed",
|
||||
"no_flame_signal_e4": "No flame signal",
|
||||
"off": "[%key:common::state::off%]",
|
||||
@@ -120,6 +122,7 @@
|
||||
"service": "Service",
|
||||
"shortcut_outside_sensor_temperature_e27": "Shortcut outside temperature sensor",
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"starting_ch": "Starting central heating",
|
||||
"tapwater": "Tap water",
|
||||
"tapwater_int": "Tap water internal",
|
||||
"unknown": "Unknown"
|
||||
|
||||
@@ -52,20 +52,13 @@ rules:
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Integration does not support network discovery
|
||||
docs-data-update:
|
||||
status: todo
|
||||
docs-examples:
|
||||
status: todo
|
||||
docs-known-limitations:
|
||||
status: todo
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
docs-supported-functions:
|
||||
status: todo
|
||||
docs-troubleshooting:
|
||||
status: todo
|
||||
docs-use-cases:
|
||||
status: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Integration represents a single device, not a hub with multiple devices
|
||||
@@ -73,10 +66,8 @@ rules:
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations:
|
||||
status: todo
|
||||
icon-translations:
|
||||
status: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
@@ -88,5 +79,4 @@ rules:
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing:
|
||||
status: todo
|
||||
strict-typing: todo
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["jvcprojector"],
|
||||
"requirements": ["pyjvcprojector==2.0.3"]
|
||||
"requirements": ["pyjvcprojector==2.0.5"]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Log"
|
||||
"name": "Log activity"
|
||||
}
|
||||
},
|
||||
"title": "Activity"
|
||||
|
||||
@@ -4,11 +4,20 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from pylutron import Button, Keypad, Led, Lutron, OccupancyGroup, Output
|
||||
from pylutron import (
|
||||
Button,
|
||||
Keypad,
|
||||
Led,
|
||||
Lutron,
|
||||
LutronException,
|
||||
OccupancyGroup,
|
||||
Output,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -57,8 +66,12 @@ async def async_setup_entry(
|
||||
pwd = config_entry.data[CONF_PASSWORD]
|
||||
|
||||
lutron_client = Lutron(host, uid, pwd)
|
||||
await hass.async_add_executor_job(lutron_client.load_xml_db)
|
||||
lutron_client.connect()
|
||||
try:
|
||||
await hass.async_add_executor_job(lutron_client.load_xml_db)
|
||||
lutron_client.connect()
|
||||
except LutronException as ex:
|
||||
raise ConfigEntryNotReady(f"Failed to connect to Lutron repeater: {ex}") from ex
|
||||
|
||||
_LOGGER.debug("Connected to main repeater at %s", host)
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pylutron"],
|
||||
"requirements": ["pylutron==0.4.0"],
|
||||
"requirements": ["pylutron==0.4.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -10,10 +10,12 @@ See https://modelcontextprotocol.io/docs/concepts/architecture#implementation-ex
|
||||
from collections.abc import Callable, Sequence
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from mcp import types
|
||||
from mcp.server import Server
|
||||
from mcp.server.lowlevel.helper_types import ReadResourceContents
|
||||
from pydantic import AnyUrl
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@@ -25,6 +27,16 @@ from .const import STATELESS_LLM_API
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SNAPSHOT_RESOURCE_URI = "homeassistant://assist/context-snapshot"
|
||||
SNAPSHOT_RESOURCE_URL = AnyUrl(SNAPSHOT_RESOURCE_URI)
|
||||
SNAPSHOT_RESOURCE_MIME_TYPE = "text/plain"
|
||||
LIVE_CONTEXT_TOOL_NAME = "GetLiveContext"
|
||||
|
||||
|
||||
def _has_live_context_tool(llm_api: llm.APIInstance) -> bool:
|
||||
"""Return if the selected API exposes the live context tool."""
|
||||
return any(tool.name == LIVE_CONTEXT_TOOL_NAME for tool in llm_api.tools)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
@@ -90,6 +102,47 @@ async def create_server(
|
||||
],
|
||||
)
|
||||
|
||||
@server.list_resources() # type: ignore[no-untyped-call,untyped-decorator]
|
||||
async def handle_list_resources() -> list[types.Resource]:
|
||||
llm_api = await get_api_instance()
|
||||
if not _has_live_context_tool(llm_api):
|
||||
return []
|
||||
|
||||
return [
|
||||
types.Resource(
|
||||
uri=SNAPSHOT_RESOURCE_URL,
|
||||
name="assist_context_snapshot",
|
||||
title="Assist context snapshot",
|
||||
description=(
|
||||
"A snapshot of the current Assist context, matching the"
|
||||
" existing GetLiveContext tool output."
|
||||
),
|
||||
mimeType=SNAPSHOT_RESOURCE_MIME_TYPE,
|
||||
)
|
||||
]
|
||||
|
||||
@server.read_resource() # type: ignore[no-untyped-call,untyped-decorator]
|
||||
async def handle_read_resource(uri: AnyUrl) -> Sequence[ReadResourceContents]:
|
||||
if str(uri) != SNAPSHOT_RESOURCE_URI:
|
||||
raise ValueError(f"Unknown resource: {uri}")
|
||||
|
||||
llm_api = await get_api_instance()
|
||||
if not _has_live_context_tool(llm_api):
|
||||
raise ValueError(f"Unknown resource: {uri}")
|
||||
|
||||
tool_response = await llm_api.async_call_tool(
|
||||
llm.ToolInput(tool_name=LIVE_CONTEXT_TOOL_NAME, tool_args={})
|
||||
)
|
||||
if not tool_response.get("success"):
|
||||
raise HomeAssistantError(cast(str, tool_response["error"]))
|
||||
|
||||
return [
|
||||
ReadResourceContents(
|
||||
content=cast(str, tool_response["result"]),
|
||||
mime_type=SNAPSHOT_RESOURCE_MIME_TYPE,
|
||||
)
|
||||
]
|
||||
|
||||
@server.list_tools() # type: ignore[no-untyped-call,untyped-decorator]
|
||||
async def list_tools() -> list[types.Tool]:
|
||||
"""List available time tools."""
|
||||
|
||||
@@ -19,9 +19,13 @@ LIGHT = "light"
|
||||
LIGHT_ON = 1
|
||||
LIGHT_OFF = 2
|
||||
|
||||
# API "no reading" sentinels. Most temperatures use centidegrees (-32768 -> -327.68 °C).
|
||||
# Some devices report the int16 minimum already in degrees after scaling (-3276800 raw -> -32768 °C).
|
||||
DISABLED_TEMP_ENTITIES = (
|
||||
-32768 / 100,
|
||||
-32766 / 100,
|
||||
-32768.0,
|
||||
-32766.0,
|
||||
)
|
||||
|
||||
|
||||
@@ -494,7 +498,7 @@ class DishWasherProgramId(MieleEnum, missing_to_none=True):
|
||||
intensive = 1, 26, 205
|
||||
maintenance = 2, 27, 214
|
||||
eco = 3, 22, 28, 200
|
||||
automatic = 6, 7, 31, 32, 202
|
||||
automatic = 6, 7, 31, 32, 201, 202
|
||||
solar_save = 9, 34
|
||||
gentle = 10, 35, 210
|
||||
extra_quiet = 11, 36, 207
|
||||
|
||||
@@ -93,7 +93,14 @@ def _convert_temperature(
|
||||
"""Convert temperature object to readable value."""
|
||||
if index >= len(value_list):
|
||||
return None
|
||||
raw_value = cast(int, value_list[index].temperature) / 100.0
|
||||
raw = value_list[index].temperature
|
||||
if raw is None:
|
||||
return None
|
||||
try:
|
||||
raw_centi = int(raw)
|
||||
except TypeError, ValueError:
|
||||
return None
|
||||
raw_value = raw_centi / 100.0
|
||||
if raw_value in DISABLED_TEMP_ENTITIES:
|
||||
return None
|
||||
return raw_value
|
||||
@@ -639,6 +646,7 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleDevice], ...]] = (
|
||||
MieleAppliance.OVEN,
|
||||
MieleAppliance.OVEN_MICROWAVE,
|
||||
MieleAppliance.STEAM_OVEN_COMBI,
|
||||
MieleAppliance.STEAM_OVEN_MK2,
|
||||
),
|
||||
description=MieleSensorDescription(
|
||||
key="state_core_temperature",
|
||||
@@ -840,9 +848,9 @@ async def async_setup_entry(
|
||||
and definition.description.value_fn(device) is None
|
||||
and definition.description.zone != 1
|
||||
):
|
||||
# all appliances supporting temperature have at least zone 1, for other zones
|
||||
# don't create entity if API signals that datapoint is disabled, unless the sensor
|
||||
# already appeared in the past (= it provided a valid value)
|
||||
# Optional temperature datapoints (extra fridge zones, oven food probe): only
|
||||
# create the entity after the API first reports a valid reading, then keep it
|
||||
# so state can return to unknown when the datapoint is inactive.
|
||||
return _is_entity_registered(unique_id)
|
||||
if (
|
||||
definition.description.key == "state_plate_step"
|
||||
|
||||
@@ -146,7 +146,6 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
_attributes_extra_blocked = MQTT_LIGHT_ATTRIBUTES_BLOCKED
|
||||
|
||||
_fixed_color_mode: ColorMode | str | None = None
|
||||
_flash_times: dict[str, int | None]
|
||||
_topic: dict[str, str | None]
|
||||
_optimistic: bool
|
||||
@@ -190,6 +189,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
self._attr_supported_features |= (
|
||||
config[CONF_TRANSITION] and LightEntityFeature.TRANSITION
|
||||
)
|
||||
self._attr_color_mode = ColorMode.UNKNOWN
|
||||
if supported_color_modes := self._config.get(CONF_SUPPORTED_COLOR_MODES):
|
||||
self._attr_supported_color_modes = supported_color_modes
|
||||
if self.supported_color_modes and len(self.supported_color_modes) == 1:
|
||||
|
||||
@@ -49,7 +49,11 @@ if TYPE_CHECKING:
|
||||
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
PLATFORMS = [Platform.BUTTON, Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.MEDIA_PLAYER,
|
||||
Platform.NUMBER,
|
||||
]
|
||||
|
||||
CONNECT_TIMEOUT = 10
|
||||
LISTEN_READY_TIMEOUT = 30
|
||||
|
||||
@@ -80,3 +80,5 @@ ATTR_FANART_IMAGE = "fanart_image"
|
||||
ATTR_CONF_EXPOSE_PLAYER_TO_HA = "expose_player_to_ha"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
PLAYER_OPTIONS_TRANSLATION_KEY_PREFIX = "player_options."
|
||||
|
||||
@@ -6,8 +6,9 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from music_assistant_models.enums import EventType
|
||||
from music_assistant_models.event import MassEvent
|
||||
from music_assistant_models.player import Player
|
||||
from music_assistant_models.player import Player, PlayerOption
|
||||
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
@@ -84,3 +85,45 @@ class MusicAssistantEntity(Entity):
|
||||
|
||||
async def async_on_update(self) -> None:
|
||||
"""Handle player updates."""
|
||||
|
||||
|
||||
class MusicAssistantPlayerOptionEntity(MusicAssistantEntity):
|
||||
"""Base entity for Music Assistant Player Options."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
def __init__(
|
||||
self, mass: MusicAssistantClient, player_id: str, player_option: PlayerOption
|
||||
) -> None:
|
||||
"""Initialize MusicAssistantPlayerOptionEntity."""
|
||||
super().__init__(mass, player_id)
|
||||
|
||||
self.mass_option_key = player_option.key
|
||||
self.mass_type = player_option.type
|
||||
|
||||
self.on_player_option_update(player_option)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
# need callbacks of parent to catch availability
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# main callback for player options
|
||||
self.async_on_remove(
|
||||
self.mass.subscribe(
|
||||
self.__on_mass_player_options_update,
|
||||
EventType.PLAYER_OPTIONS_UPDATED,
|
||||
self.player_id,
|
||||
)
|
||||
)
|
||||
|
||||
def __on_mass_player_options_update(self, event: MassEvent) -> None:
|
||||
"""Call when we receive an event from MusicAssistant."""
|
||||
for option in self.player.options:
|
||||
if option.key == self.mass_option_key:
|
||||
self.on_player_option_update(option)
|
||||
self.async_write_ha_state()
|
||||
break
|
||||
|
||||
def on_player_option_update(self, player_option: PlayerOption) -> None:
|
||||
"""Callback for player option updates."""
|
||||
|
||||
127
homeassistant/components/music_assistant/number.py
Normal file
127
homeassistant/components/music_assistant/number.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Music Assistant Number platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
from music_assistant_client.client import MusicAssistantClient
|
||||
from music_assistant_models.player import PlayerOption, PlayerOptionType
|
||||
|
||||
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
from .const import PLAYER_OPTIONS_TRANSLATION_KEY_PREFIX
|
||||
from .entity import MusicAssistantPlayerOptionEntity
|
||||
from .helpers import catch_musicassistant_error
|
||||
|
||||
PLAYER_OPTIONS_TRANSLATION_KEYS_NUMBER: Final[list[str]] = [
|
||||
"bass",
|
||||
"dialogue_level",
|
||||
"dialogue_lift",
|
||||
"dts_dialogue_control",
|
||||
"equalizer_high",
|
||||
"equalizer_low",
|
||||
"equalizer_mid",
|
||||
"subwoofer_volume",
|
||||
"treble",
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: MusicAssistantConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Music Assistant Number Entities (Player Options) from Config Entry."""
|
||||
mass = entry.runtime_data.mass
|
||||
|
||||
def add_player(player_id: str) -> None:
|
||||
"""Handle add player."""
|
||||
player = mass.players.get(player_id)
|
||||
if player is None:
|
||||
return
|
||||
entities: list[MusicAssistantPlayerConfigNumber] = []
|
||||
for player_option in player.options:
|
||||
if (
|
||||
not player_option.read_only
|
||||
and player_option.type
|
||||
in (
|
||||
PlayerOptionType.INTEGER,
|
||||
PlayerOptionType.FLOAT,
|
||||
)
|
||||
and not player_option.options # these we map to select
|
||||
):
|
||||
# the MA translation key must have the format player_options.<translation key>
|
||||
# we ignore entities with unknown translation keys.
|
||||
if (
|
||||
player_option.translation_key is None
|
||||
or not player_option.translation_key.startswith(
|
||||
PLAYER_OPTIONS_TRANSLATION_KEY_PREFIX
|
||||
)
|
||||
):
|
||||
continue
|
||||
translation_key = player_option.translation_key[
|
||||
len(PLAYER_OPTIONS_TRANSLATION_KEY_PREFIX) :
|
||||
]
|
||||
if translation_key not in PLAYER_OPTIONS_TRANSLATION_KEYS_NUMBER:
|
||||
continue
|
||||
|
||||
entities.append(
|
||||
MusicAssistantPlayerConfigNumber(
|
||||
mass,
|
||||
player_id,
|
||||
player_option=player_option,
|
||||
entity_description=NumberEntityDescription(
|
||||
key=player_option.key,
|
||||
translation_key=translation_key,
|
||||
),
|
||||
)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
# register callback to add players when they are discovered
|
||||
entry.runtime_data.platform_handlers.setdefault(Platform.NUMBER, add_player)
|
||||
|
||||
|
||||
class MusicAssistantPlayerConfigNumber(MusicAssistantPlayerOptionEntity, NumberEntity):
|
||||
"""Representation of a Number entity to control player provider dependent settings."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mass: MusicAssistantClient,
|
||||
player_id: str,
|
||||
player_option: PlayerOption,
|
||||
entity_description: NumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize MusicAssistantPlayerConfigNumber."""
|
||||
super().__init__(mass, player_id, player_option)
|
||||
|
||||
self.entity_description = entity_description
|
||||
|
||||
@catch_musicassistant_error
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set a new value."""
|
||||
_value = round(value) if self.mass_type == PlayerOptionType.INTEGER else value
|
||||
await self.mass.players.set_option(
|
||||
self.player_id,
|
||||
self.mass_option_key,
|
||||
_value,
|
||||
)
|
||||
|
||||
def on_player_option_update(self, player_option: PlayerOption) -> None:
|
||||
"""Update on player option update."""
|
||||
if player_option.min_value is not None:
|
||||
self._attr_native_min_value = player_option.min_value
|
||||
if player_option.max_value is not None:
|
||||
self._attr_native_max_value = player_option.max_value
|
||||
if player_option.step is not None:
|
||||
self._attr_native_step = player_option.step
|
||||
|
||||
self._attr_native_value = (
|
||||
player_option.value
|
||||
if isinstance(player_option.value, (int, float))
|
||||
else None
|
||||
)
|
||||
@@ -53,6 +53,35 @@
|
||||
"favorite_now_playing": {
|
||||
"name": "Favorite current song"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"bass": {
|
||||
"name": "Bass"
|
||||
},
|
||||
"dialogue_level": {
|
||||
"name": "Dialogue level"
|
||||
},
|
||||
"dialogue_lift": {
|
||||
"name": "Dialogue lift"
|
||||
},
|
||||
"dts_dialogue_control": {
|
||||
"name": "DTS dialogue control"
|
||||
},
|
||||
"equalizer_high": {
|
||||
"name": "Equalizer high"
|
||||
},
|
||||
"equalizer_low": {
|
||||
"name": "Equalizer low"
|
||||
},
|
||||
"equalizer_mid": {
|
||||
"name": "Equalizer mid"
|
||||
},
|
||||
"subwoofer_volume": {
|
||||
"name": "Subwoofer volume"
|
||||
},
|
||||
"treble": {
|
||||
"name": "Treble"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
||||
@@ -104,12 +104,8 @@ async def async_setup_entry(
|
||||
def _create_entity(device: dict) -> MyNeoSelect:
|
||||
"""Create a select entity for a device."""
|
||||
if device["model"] == "EWS":
|
||||
# According to the MyNeomitis API, EWS "relais" devices expose a "relayMode"
|
||||
# field in their state, while "pilote" devices do not. We therefore use the
|
||||
# presence of "relayMode" as an explicit heuristic to distinguish relais
|
||||
# from pilote devices. If the upstream API changes this behavior, this
|
||||
# detection logic must be revisited.
|
||||
if "relayMode" in device.get("state", {}):
|
||||
state = device.get("state") or {}
|
||||
if state.get("deviceType") == 0:
|
||||
description = SELECT_TYPES["relais"]
|
||||
else:
|
||||
description = SELECT_TYPES["pilote"]
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynintendoauth", "pynintendoparental"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.3"]
|
||||
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.4"]
|
||||
}
|
||||
|
||||
@@ -49,6 +49,12 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
|
||||
def get_next_data_interval(self, now: datetime) -> datetime:
|
||||
"""Compute next time an update should occur."""
|
||||
if self.is_prices_final():
|
||||
# Prices are final for the current day, update next day
|
||||
LOGGER.debug("Prices are final for the current day")
|
||||
if tomorrow_starts_at := self.tomorrow_starts_at():
|
||||
LOGGER.debug("Next data update at %s", tomorrow_starts_at)
|
||||
return tomorrow_starts_at
|
||||
next_hour = dt_util.utcnow() + timedelta(hours=1)
|
||||
next_run = datetime(
|
||||
next_hour.year,
|
||||
@@ -92,17 +98,22 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
|
||||
async def fetch_data(self, now: datetime, initial: bool = False) -> None:
|
||||
"""Fetch data from Nord Pool."""
|
||||
self.data_unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_data_interval(dt_util.utcnow())
|
||||
)
|
||||
if self.config_entry.pref_disable_polling and not initial:
|
||||
self.data_unsub = async_track_point_in_utc_time(
|
||||
self.hass,
|
||||
self.fetch_data,
|
||||
self.get_next_data_interval(dt_util.utcnow()),
|
||||
)
|
||||
return
|
||||
try:
|
||||
data = await self.handle_data(initial)
|
||||
except UpdateFailed as err:
|
||||
self.async_set_update_error(err)
|
||||
return
|
||||
self.async_set_updated_data(data)
|
||||
else:
|
||||
self.async_set_updated_data(data)
|
||||
self.data_unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_data_interval(dt_util.utcnow())
|
||||
)
|
||||
|
||||
async def handle_data(self, initial: bool = False) -> DeliveryPeriodsData:
|
||||
"""Fetch data from Nord Pool."""
|
||||
@@ -171,3 +182,25 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
delivery_period = del_period
|
||||
break
|
||||
return delivery_period
|
||||
|
||||
def is_prices_final(self) -> bool:
|
||||
"""Return True if prices for the current day are final."""
|
||||
current_day = dt_util.utcnow().strftime("%Y-%m-%d")
|
||||
if not self.data:
|
||||
return False
|
||||
for del_period in self.data.entries:
|
||||
if del_period.requested_date == current_day and del_period.prices_final:
|
||||
return True
|
||||
return False
|
||||
|
||||
def tomorrow_starts_at(self) -> datetime | None:
|
||||
"""Return tomorrow's starting time."""
|
||||
# What if tomorrow file not exist?
|
||||
|
||||
tomorrow = dt_util.utcnow() + timedelta(days=1)
|
||||
tomorrow_day = tomorrow.strftime("%Y-%m-%d")
|
||||
for del_period in self.data.entries:
|
||||
if del_period.requested_date == tomorrow_day:
|
||||
sorted_tomorrow = sorted(del_period.entries, key=lambda x: x.start)
|
||||
return sorted_tomorrow[0].start
|
||||
return None
|
||||
|
||||
@@ -54,7 +54,7 @@ def get_prices(
|
||||
current_price_entries: dict[str, float] = {}
|
||||
next_price_entries: dict[str, float] = {}
|
||||
current_time = dt_util.utcnow()
|
||||
LOGGER.debug("Price data: %s", data)
|
||||
# LOGGER.debug("Price data: %s", data)
|
||||
for entry in data:
|
||||
resolution = entry.end - entry.start
|
||||
previous_time = current_time - resolution
|
||||
|
||||
@@ -168,7 +168,7 @@ class NumberDeviceClass(StrEnum):
|
||||
CURRENT = "current"
|
||||
"""Current.
|
||||
|
||||
Unit of measurement: `A`, `mA`
|
||||
Unit of measurement: `A`, `mA`, `μA`
|
||||
"""
|
||||
|
||||
DATA_RATE = "data_rate"
|
||||
|
||||
@@ -30,12 +30,12 @@ def _validate_input(data: dict[str, Any]) -> None:
|
||||
Data has the keys from DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
nzbget_api = NZBGetAPI(
|
||||
data[CONF_HOST],
|
||||
data.get(CONF_USERNAME),
|
||||
data.get(CONF_PASSWORD),
|
||||
data[CONF_SSL],
|
||||
data[CONF_VERIFY_SSL],
|
||||
data[CONF_PORT],
|
||||
host=data[CONF_HOST],
|
||||
username=data.get(CONF_USERNAME),
|
||||
password=data.get(CONF_PASSWORD),
|
||||
secure=data[CONF_SSL],
|
||||
verify_certificate=data[CONF_VERIFY_SSL],
|
||||
port=data[CONF_PORT],
|
||||
)
|
||||
|
||||
nzbget_api.version()
|
||||
|
||||
@@ -38,12 +38,12 @@ class NZBGetDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
) -> None:
|
||||
"""Initialize global NZBGet data updater."""
|
||||
self.nzbget = NZBGetAPI(
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.data.get(CONF_USERNAME),
|
||||
config_entry.data.get(CONF_PASSWORD),
|
||||
config_entry.data[CONF_SSL],
|
||||
config_entry.data[CONF_VERIFY_SSL],
|
||||
config_entry.data[CONF_PORT],
|
||||
host=config_entry.data[CONF_HOST],
|
||||
username=config_entry.data.get(CONF_USERNAME),
|
||||
password=config_entry.data.get(CONF_PASSWORD),
|
||||
secure=config_entry.data[CONF_SSL],
|
||||
verify_certificate=config_entry.data[CONF_VERIFY_SSL],
|
||||
port=config_entry.data[CONF_PORT],
|
||||
)
|
||||
|
||||
self._completed_downloads_init = False
|
||||
|
||||
@@ -45,7 +45,7 @@ rules:
|
||||
comment: the integration only integrates state-less entities
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
@@ -63,8 +63,12 @@ rules:
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: devices are created via subentries, not discovered dynamically
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: conversation and AI task entities do not use entity categories
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: no suitable device class for the conversation entity
|
||||
|
||||
@@ -46,6 +46,7 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_REASONING_EFFORT,
|
||||
CONF_STORE_RESPONSES,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
@@ -58,6 +59,7 @@ from .const import (
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_STORE_RESPONSES,
|
||||
RECOMMENDED_STT_OPTIONS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_P,
|
||||
@@ -208,7 +210,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
"user": call.context.user_id,
|
||||
"store": False,
|
||||
"store": conversation_subentry.data.get(
|
||||
CONF_STORE_RESPONSES, RECOMMENDED_STORE_RESPONSES
|
||||
),
|
||||
}
|
||||
|
||||
if model.startswith("o"):
|
||||
|
||||
@@ -55,6 +55,7 @@ from .const import (
|
||||
CONF_REASONING_SUMMARY,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_SERVICE_TIER,
|
||||
CONF_STORE_RESPONSES,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
CONF_TTS_SPEED,
|
||||
@@ -82,6 +83,7 @@ from .const import (
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_REASONING_SUMMARY,
|
||||
RECOMMENDED_SERVICE_TIER,
|
||||
RECOMMENDED_STORE_RESPONSES,
|
||||
RECOMMENDED_STT_MODEL,
|
||||
RECOMMENDED_STT_OPTIONS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
@@ -357,6 +359,10 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=2, step=0.05)),
|
||||
vol.Optional(
|
||||
CONF_STORE_RESPONSES,
|
||||
default=RECOMMENDED_STORE_RESPONSES,
|
||||
): bool,
|
||||
}
|
||||
|
||||
if user_input is not None:
|
||||
@@ -641,7 +647,9 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
"strict": False,
|
||||
}
|
||||
},
|
||||
store=False,
|
||||
store=self.options.get(
|
||||
CONF_STORE_RESPONSES, RECOMMENDED_STORE_RESPONSES
|
||||
),
|
||||
)
|
||||
location_data = location_schema(json.loads(response.output_text) or {})
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ CONF_PROMPT = "prompt"
|
||||
CONF_REASONING_EFFORT = "reasoning_effort"
|
||||
CONF_REASONING_SUMMARY = "reasoning_summary"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_STORE_RESPONSES = "store_responses"
|
||||
CONF_SERVICE_TIER = "service_tier"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
CONF_TOP_P = "top_p"
|
||||
@@ -42,6 +43,7 @@ RECOMMENDED_CHAT_MODEL = "gpt-4o-mini"
|
||||
RECOMMENDED_IMAGE_MODEL = "gpt-image-1.5"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
RECOMMENDED_REASONING_EFFORT = "low"
|
||||
RECOMMENDED_STORE_RESPONSES = False
|
||||
RECOMMENDED_REASONING_SUMMARY = "auto"
|
||||
RECOMMENDED_SERVICE_TIER = "auto"
|
||||
RECOMMENDED_STT_MODEL = "gpt-4o-mini-transcribe"
|
||||
|
||||
@@ -75,6 +75,7 @@ from .const import (
|
||||
CONF_REASONING_EFFORT,
|
||||
CONF_REASONING_SUMMARY,
|
||||
CONF_SERVICE_TIER,
|
||||
CONF_STORE_RESPONSES,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
CONF_VERBOSITY,
|
||||
@@ -94,6 +95,7 @@ from .const import (
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_REASONING_SUMMARY,
|
||||
RECOMMENDED_SERVICE_TIER,
|
||||
RECOMMENDED_STORE_RESPONSES,
|
||||
RECOMMENDED_STT_MODEL,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_P,
|
||||
@@ -508,7 +510,7 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
max_output_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
user=chat_log.conversation_id,
|
||||
service_tier=options.get(CONF_SERVICE_TIER, RECOMMENDED_SERVICE_TIER),
|
||||
store=False,
|
||||
store=options.get(CONF_STORE_RESPONSES, RECOMMENDED_STORE_RESPONSES),
|
||||
stream=True,
|
||||
)
|
||||
|
||||
@@ -611,8 +613,10 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
if image_model != "gpt-image-1-mini":
|
||||
image_tool["input_fidelity"] = "high"
|
||||
tools.append(image_tool)
|
||||
# Keep image state on OpenAI so follow-up prompts can continue by
|
||||
# conversation ID without resending the generated image data.
|
||||
model_args["store"] = True
|
||||
model_args["tool_choice"] = ToolChoiceTypesParam(type="image_generation")
|
||||
model_args["store"] = True # Avoid sending image data back and forth
|
||||
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
|
||||
@@ -51,9 +51,13 @@
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "[%key:component::openai_conversation::config_subentries::conversation::step::advanced::data::max_tokens%]",
|
||||
"store_responses": "[%key:component::openai_conversation::config_subentries::conversation::step::advanced::data::store_responses%]",
|
||||
"temperature": "[%key:component::openai_conversation::config_subentries::conversation::step::advanced::data::temperature%]",
|
||||
"top_p": "[%key:component::openai_conversation::config_subentries::conversation::step::advanced::data::top_p%]"
|
||||
},
|
||||
"data_description": {
|
||||
"store_responses": "[%key:component::openai_conversation::config_subentries::conversation::step::advanced::data_description::store_responses%]"
|
||||
},
|
||||
"title": "[%key:component::openai_conversation::config_subentries::conversation::step::advanced::title%]"
|
||||
},
|
||||
"init": {
|
||||
@@ -109,9 +113,13 @@
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"store_responses": "Store requests and responses in OpenAI",
|
||||
"temperature": "Temperature",
|
||||
"top_p": "Top P"
|
||||
},
|
||||
"data_description": {
|
||||
"store_responses": "If enabled, requests and responses are stored by OpenAI and visible in your OpenAI dashboard logs"
|
||||
},
|
||||
"title": "Advanced settings"
|
||||
},
|
||||
"init": {
|
||||
|
||||
@@ -2,13 +2,23 @@
|
||||
|
||||
import logging
|
||||
|
||||
from pyopnsense import diagnostics
|
||||
from pyopnsense.exceptions import APIException
|
||||
from aiopnsense import (
|
||||
OPNsenseBelowMinFirmware,
|
||||
OPNsenseClient,
|
||||
OPNsenseConnectionError,
|
||||
OPNsenseInvalidAuth,
|
||||
OPNsenseInvalidURL,
|
||||
OPNsensePrivilegeMissing,
|
||||
OPNsenseSSLError,
|
||||
OPNsenseTimeoutError,
|
||||
OPNsenseUnknownFirmware,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.discovery import load_platform
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -40,7 +50,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the opnsense component."""
|
||||
|
||||
conf = config[DOMAIN]
|
||||
@@ -50,30 +60,73 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
verify_ssl = conf[CONF_VERIFY_SSL]
|
||||
tracker_interfaces = conf[CONF_TRACKER_INTERFACES]
|
||||
|
||||
interfaces_client = diagnostics.InterfaceClient(
|
||||
api_key, api_secret, url, verify_ssl, timeout=20
|
||||
session = async_get_clientsession(hass, verify_ssl=verify_ssl)
|
||||
client = OPNsenseClient(
|
||||
url,
|
||||
api_key,
|
||||
api_secret,
|
||||
session,
|
||||
opts={"verify_ssl": verify_ssl},
|
||||
)
|
||||
try:
|
||||
interfaces_client.get_arp()
|
||||
except APIException:
|
||||
_LOGGER.exception("Failure while connecting to OPNsense API endpoint")
|
||||
await client.validate()
|
||||
if tracker_interfaces:
|
||||
interfaces_resp = await client.get_interfaces()
|
||||
except OPNsenseUnknownFirmware:
|
||||
_LOGGER.error("Error checking the OPNsense firmware version at %s", url)
|
||||
return False
|
||||
except OPNsenseBelowMinFirmware:
|
||||
_LOGGER.error(
|
||||
"OPNsense Firmware is below the minimum supported version at %s", url
|
||||
)
|
||||
return False
|
||||
except OPNsenseInvalidURL:
|
||||
_LOGGER.error(
|
||||
"Invalid URL while connecting to OPNsense API endpoint at %s", url
|
||||
)
|
||||
return False
|
||||
except OPNsenseTimeoutError:
|
||||
_LOGGER.error("Timeout while connecting to OPNsense API endpoint at %s", url)
|
||||
return False
|
||||
except OPNsenseSSLError:
|
||||
_LOGGER.error(
|
||||
"Unable to verify SSL while connecting to OPNsense API endpoint at %s", url
|
||||
)
|
||||
return False
|
||||
except OPNsenseInvalidAuth:
|
||||
_LOGGER.error(
|
||||
"Authentication failure while connecting to OPNsense API endpoint at %s",
|
||||
url,
|
||||
)
|
||||
return False
|
||||
except OPNsensePrivilegeMissing:
|
||||
_LOGGER.error(
|
||||
"Invalid Permissions while connecting to OPNsense API endpoint at %s",
|
||||
url,
|
||||
)
|
||||
return False
|
||||
except OPNsenseConnectionError:
|
||||
_LOGGER.error(
|
||||
"Connection failure while connecting to OPNsense API endpoint at %s",
|
||||
url,
|
||||
)
|
||||
return False
|
||||
|
||||
if tracker_interfaces:
|
||||
# Verify that specified tracker interfaces are valid
|
||||
netinsight_client = diagnostics.NetworkInsightClient(
|
||||
api_key, api_secret, url, verify_ssl, timeout=20
|
||||
)
|
||||
interfaces = list(netinsight_client.get_interfaces().values())
|
||||
for interface in tracker_interfaces:
|
||||
if interface not in interfaces:
|
||||
known_interfaces = [
|
||||
ifinfo.get("name", "") for ifinfo in interfaces_resp.values()
|
||||
]
|
||||
for intf_description in tracker_interfaces:
|
||||
if intf_description not in known_interfaces:
|
||||
_LOGGER.error(
|
||||
"Specified OPNsense tracker interface %s is not found", interface
|
||||
"Specified OPNsense tracker interface %s is not found",
|
||||
intf_description,
|
||||
)
|
||||
return False
|
||||
|
||||
hass.data[OPNSENSE_DATA] = {
|
||||
CONF_INTERFACE_CLIENT: interfaces_client,
|
||||
CONF_INTERFACE_CLIENT: client,
|
||||
CONF_TRACKER_INTERFACES: tracker_interfaces,
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any, NewType
|
||||
|
||||
from pyopnsense import diagnostics
|
||||
from aiopnsense import OPNsenseClient
|
||||
|
||||
from homeassistant.components.device_tracker import DeviceScanner
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -27,9 +27,7 @@ async def async_get_scanner(
|
||||
class OPNsenseDeviceScanner(DeviceScanner):
|
||||
"""This class queries a router running OPNsense."""
|
||||
|
||||
def __init__(
|
||||
self, client: diagnostics.InterfaceClient, interfaces: list[str]
|
||||
) -> None:
|
||||
def __init__(self, client: OPNsenseClient, interfaces: list[str]) -> None:
|
||||
"""Initialize the scanner."""
|
||||
self.last_results: dict[str, Any] = {}
|
||||
self.client = client
|
||||
@@ -43,9 +41,9 @@ class OPNsenseDeviceScanner(DeviceScanner):
|
||||
out_devices[device["mac"]] = device
|
||||
return out_devices
|
||||
|
||||
def scan_devices(self) -> list[str]:
|
||||
async def async_scan_devices(self) -> list[str]:
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
self.update_info()
|
||||
await self._async_update_info()
|
||||
return list(self.last_results)
|
||||
|
||||
def get_device_name(self, device: str) -> str | None:
|
||||
@@ -54,12 +52,12 @@ class OPNsenseDeviceScanner(DeviceScanner):
|
||||
return None
|
||||
return self.last_results[device].get("hostname") or None
|
||||
|
||||
def update_info(self) -> bool:
|
||||
async def _async_update_info(self) -> bool:
|
||||
"""Ensure the information from the OPNsense router is up to date.
|
||||
|
||||
Return boolean if scanning successful.
|
||||
"""
|
||||
devices = self.client.get_arp()
|
||||
devices = await self.client.get_arp_table(True)
|
||||
self.last_results = self._get_mac_addrs(devices)
|
||||
return True
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"domain": "opnsense",
|
||||
"name": "OPNsense",
|
||||
"codeowners": ["@mtreinish"],
|
||||
"codeowners": ["@HarlemSquirrel", "@Snuffy2"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/opnsense",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pbr", "pyopnsense"],
|
||||
"loggers": ["aiopnsense"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyopnsense==0.4.0"]
|
||||
"requirements": ["aiopnsense==1.0.8"]
|
||||
}
|
||||
|
||||
@@ -42,8 +42,7 @@ rules:
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: done
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
|
||||
@@ -66,8 +66,7 @@ rules:
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"create": {
|
||||
"description": "Shows a notification on the notifications panel.",
|
||||
"description": "Shows a persistent notification on the notifications panel.",
|
||||
"fields": {
|
||||
"message": {
|
||||
"description": "Message body of the notification.",
|
||||
@@ -16,21 +16,21 @@
|
||||
"name": "Title"
|
||||
}
|
||||
},
|
||||
"name": "Create"
|
||||
"name": "Create persistent notification"
|
||||
},
|
||||
"dismiss": {
|
||||
"description": "Deletes a notification from the notifications panel.",
|
||||
"description": "Deletes a persistent notification from the notifications panel.",
|
||||
"fields": {
|
||||
"notification_id": {
|
||||
"description": "ID of the notification to be deleted.",
|
||||
"name": "[%key:component::persistent_notification::services::create::fields::notification_id::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Dismiss"
|
||||
"name": "Dismiss persistent notification"
|
||||
},
|
||||
"dismiss_all": {
|
||||
"description": "Deletes all notifications from the notifications panel.",
|
||||
"name": "Dismiss all"
|
||||
"description": "Deletes all persistent notifications from the notifications panel.",
|
||||
"name": "Dismiss all persistent notifications"
|
||||
}
|
||||
},
|
||||
"title": "Persistent Notification"
|
||||
|
||||
@@ -49,8 +49,6 @@ class PicnicUpdateCoordinator(DataUpdateCoordinator):
|
||||
async def _async_update_data(self) -> dict:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
# Note: TimeoutError and aiohttp.ClientError are already
|
||||
# handled by the data update coordinator.
|
||||
async with asyncio.timeout(10):
|
||||
data = await self.hass.async_add_executor_job(self.fetch_data)
|
||||
|
||||
@@ -60,6 +58,10 @@ class PicnicUpdateCoordinator(DataUpdateCoordinator):
|
||||
raise UpdateFailed(f"API response was malformed: {error}") from error
|
||||
except PicnicAuthError as error:
|
||||
raise ConfigEntryAuthFailed from error
|
||||
except TimeoutError as error:
|
||||
raise UpdateFailed(
|
||||
"Timeout while connecting to the Picnic API", retry_after=120
|
||||
) from error
|
||||
|
||||
# Return the fetched data
|
||||
return data
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_picnic_api2"],
|
||||
"requirements": ["python-picnic-api2==1.3.1"]
|
||||
"requirements": ["python-picnic-api2==1.3.4"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyportainer import DockerContainerState, EndpointStatus, StackStatus
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
@@ -15,7 +17,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import PortainerConfigEntry
|
||||
from .const import ContainerState, EndpointStatus, StackStatus
|
||||
from .coordinator import PortainerContainerData
|
||||
from .entity import (
|
||||
PortainerContainerEntity,
|
||||
@@ -53,7 +54,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerBinarySensorEntityDescription, ...] =
|
||||
PortainerContainerBinarySensorEntityDescription(
|
||||
key="status",
|
||||
translation_key="status",
|
||||
state_fn=lambda data: data.container.state == ContainerState.RUNNING,
|
||||
state_fn=lambda data: data.container.state == DockerContainerState.RUNNING,
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
|
||||
@@ -199,6 +199,8 @@ class PortainerBaseButton(ButtonEntity):
|
||||
translation_key="timeout_connect_no_details",
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
class PortainerEndpointButton(PortainerEndpointEntity, PortainerBaseButton):
|
||||
"""Defines a Portainer endpoint button."""
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user