mirror of
https://github.com/home-assistant/core.git
synced 2026-02-03 22:05:35 +01:00
Compare commits
57 Commits
whirlpool_
...
2026.2.0b3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fdf8edf474 | ||
|
|
47e1a98bee | ||
|
|
2d8572b943 | ||
|
|
660cfdbd50 | ||
|
|
4208595da6 | ||
|
|
b6b2d2fc6f | ||
|
|
6c4c632848 | ||
|
|
78cf62176f | ||
|
|
df971c7a42 | ||
|
|
1fcabb7f2d | ||
|
|
9fb60c9ea2 | ||
|
|
9c11a4646f | ||
|
|
b036a78776 | ||
|
|
60bb3cb704 | ||
|
|
0e770958ac | ||
|
|
2a54c71b6c | ||
|
|
50463291ab | ||
|
|
43cc34042a | ||
|
|
a02244ccda | ||
|
|
a739619121 | ||
|
|
5db97a5f1c | ||
|
|
804ba9c9cc | ||
|
|
5ecbcea946 | ||
|
|
11be2b6289 | ||
|
|
eefae0307b | ||
|
|
d397ee28ea | ||
|
|
02c821128e | ||
|
|
71dc15d45f | ||
|
|
1078387b22 | ||
|
|
35fab27d15 | ||
|
|
915dc7a908 | ||
|
|
e5a9738983 | ||
|
|
2ff73219a2 | ||
|
|
5dc1270ed1 | ||
|
|
9e95ad5a85 | ||
|
|
9a5d4610f7 | ||
|
|
41c524fce4 | ||
|
|
5f9fa95554 | ||
|
|
6950be8ea9 | ||
|
|
c5a8bf64d0 | ||
|
|
a2b9a6e9df | ||
|
|
a0c567f0da | ||
|
|
c7feafdde6 | ||
|
|
e1e74b0aeb | ||
|
|
673411ef97 | ||
|
|
f7e5af7cb1 | ||
|
|
0ee56ce708 | ||
|
|
f93a176398 | ||
|
|
cd2394bc12 | ||
|
|
5c20b8eaff | ||
|
|
4bd499d3a6 | ||
|
|
8a53b94c5a | ||
|
|
d5aff326e3 | ||
|
|
22f66abbe7 | ||
|
|
f635228b1f | ||
|
|
4c708c143d | ||
|
|
3369459d41 |
5
homeassistant/brands/heatit.json
Normal file
5
homeassistant/brands/heatit.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heatit",
|
||||
"name": "Heatit",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
5
homeassistant/brands/heiman.json
Normal file
5
homeassistant/brands/heiman.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "heiman",
|
||||
"name": "Heiman",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
}
|
||||
@@ -166,7 +166,7 @@
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
"description": "Arms an alarm in the away mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -176,7 +176,7 @@
|
||||
"name": "Arm away"
|
||||
},
|
||||
"alarm_arm_custom_bypass": {
|
||||
"description": "Arms the alarm while allowing to bypass a custom area.",
|
||||
"description": "Arms an alarm while allowing to bypass a custom area.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to arm the alarm.",
|
||||
@@ -186,7 +186,7 @@
|
||||
"name": "Arm with custom bypass"
|
||||
},
|
||||
"alarm_arm_home": {
|
||||
"description": "Arms the alarm in the home mode.",
|
||||
"description": "Arms an alarm in the home mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -196,7 +196,7 @@
|
||||
"name": "Arm home"
|
||||
},
|
||||
"alarm_arm_night": {
|
||||
"description": "Arms the alarm in the night mode.",
|
||||
"description": "Arms an alarm in the night mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -206,7 +206,7 @@
|
||||
"name": "Arm night"
|
||||
},
|
||||
"alarm_arm_vacation": {
|
||||
"description": "Arms the alarm in the vacation mode.",
|
||||
"description": "Arms an alarm in the vacation mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -216,7 +216,7 @@
|
||||
"name": "Arm vacation"
|
||||
},
|
||||
"alarm_disarm": {
|
||||
"description": "Disarms the alarm.",
|
||||
"description": "Disarms an alarm.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to disarm the alarm.",
|
||||
@@ -226,7 +226,7 @@
|
||||
"name": "Disarm"
|
||||
},
|
||||
"alarm_trigger": {
|
||||
"description": "Triggers the alarm manually.",
|
||||
"description": "Triggers an alarm manually.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==11.0.2"]
|
||||
"requirements": ["aioamazondevices==11.1.1"]
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers.typing import StateType
|
||||
from .const import CATEGORY_NOTIFICATIONS, CATEGORY_SENSORS
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_remove_unsupported_notification_sensors
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -105,6 +106,9 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Remove notification sensors from unsupported devices
|
||||
await async_remove_unsupported_notification_sensors(hass, coordinator)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -122,6 +126,7 @@ async def async_setup_entry(
|
||||
AmazonSensorEntity(coordinator, serial_num, notification_desc)
|
||||
for notification_desc in NOTIFICATIONS
|
||||
for serial_num in new_devices
|
||||
if coordinator.data[serial_num].notifications_supported
|
||||
]
|
||||
async_add_entities(sensors_list + notifications_list)
|
||||
|
||||
|
||||
@@ -5,8 +5,14 @@ from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const.schedules import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -81,3 +87,27 @@ async def async_remove_dnd_from_virtual_group(
|
||||
if entity_id and is_group:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||
|
||||
|
||||
async def async_remove_unsupported_notification_sensors(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
) -> None:
|
||||
"""Remove notification sensors from unsupported devices."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
for notification_key in (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
):
|
||||
unique_id = f"{serial_num}-{notification_key}"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
domain=SENSOR_DOMAIN, platform=DOMAIN, unique_id=unique_id
|
||||
)
|
||||
is_unsupported = not coordinator.data[serial_num].notifications_supported
|
||||
|
||||
if entity_id and is_unsupported:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed unsupported notification sensor %s", entity_id)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"feedback_url": "https://forms.gle/GqvRmgmghSDco8M46",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2026/02/02/about-device-database/",
|
||||
"report_issue_url": "https://github.com/OHF-Device-Database/device-database/issues/new"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"description": "This free, open source device database of the Open Home Foundation helps users find useful information about smart home devices used in real installations.\n\nYou can help build it by anonymously sharing data about your devices. Only device-specific details (like model or manufacturer) are shared — never personally identifying information (like the names you assign).\n\nLearn more about the device database and how we process your data in our [Data Use Statement](https://www.openhomefoundation.org/device-database-data-use-statement), which you accept by opting in.",
|
||||
"description": "We're creating the [Open Home Foundation Device Database](https://www.home-assistant.io/blog/2026/02/02/about-device-database/): a free, open source community-powered resource to help users find practical information about how smart home devices perform in real installations.\n\nYou can help us build it by opting in to share anonymized data about your devices. This data will only ever include device-specific details (like model or manufacturer) – never personally identifying information (like the names you assign).\n\nFind out how we process your data (should you choose to contribute) in our [Data Use Statement](https://www.openhomefoundation.org/device-database-data-use-statement).",
|
||||
"disable_confirmation": "Your data will no longer be shared with the Open Home Foundation's device database.",
|
||||
"enable_confirmation": "This feature is still in development and may change. The device database is being refined based on user feedback and is not yet complete.",
|
||||
"name": "Device database"
|
||||
|
||||
@@ -419,7 +419,11 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id
|
||||
not in ("claude-3-haiku-20240307", "claude-3-opus-20240229")
|
||||
not in (
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-opus-20240229",
|
||||
)
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
|
||||
@@ -23,7 +23,7 @@ CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_CHAT_MODEL: "claude-haiku-4-5",
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
|
||||
@@ -540,7 +540,17 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
data = self.coordinator.data[key]
|
||||
|
||||
if self.entity_description.device_class == SensorDeviceClass.TIMESTAMP:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
# The date could be "N/A" for certain fields (e.g., XOFFBATT), indicating there is no value yet.
|
||||
if data == "N/A":
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
except (dateutil.parser.ParserError, OverflowError):
|
||||
# If parsing fails we should mark it as unknown, with a log for further debugging.
|
||||
_LOGGER.warning('Failed to parse date for %s: "%s"', key, data)
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
self._attr_native_value, inferred_unit = infer_unit(data)
|
||||
|
||||
@@ -12,14 +12,25 @@ from hass_nabucasa import Cloud, NabuCasaBaseError
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMRateLimitError,
|
||||
LLMResponseCompletedEvent,
|
||||
LLMResponseError,
|
||||
LLMResponseErrorEvent,
|
||||
LLMResponseFailedEvent,
|
||||
LLMResponseFunctionCallArgumentsDeltaEvent,
|
||||
LLMResponseFunctionCallArgumentsDoneEvent,
|
||||
LLMResponseFunctionCallOutputItem,
|
||||
LLMResponseImageOutputItem,
|
||||
LLMResponseIncompleteEvent,
|
||||
LLMResponseMessageOutputItem,
|
||||
LLMResponseOutputItemAddedEvent,
|
||||
LLMResponseOutputItemDoneEvent,
|
||||
LLMResponseOutputTextDeltaEvent,
|
||||
LLMResponseReasoningOutputItem,
|
||||
LLMResponseReasoningSummaryTextDeltaEvent,
|
||||
LLMResponseWebSearchCallOutputItem,
|
||||
LLMResponseWebSearchCallSearchingEvent,
|
||||
LLMServiceError,
|
||||
)
|
||||
from litellm import (
|
||||
ResponseFunctionToolCall,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIStreamEvents,
|
||||
)
|
||||
from openai.types.responses import (
|
||||
FunctionToolParam,
|
||||
ResponseInputItemParam,
|
||||
@@ -60,9 +71,9 @@ class ResponseItemType(str, Enum):
|
||||
|
||||
def _convert_content_to_param(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> ResponseInputParam:
|
||||
) -> list[ResponseInputItemParam]:
|
||||
"""Convert any native chat message for this agent to the native format."""
|
||||
messages: ResponseInputParam = []
|
||||
messages: list[ResponseInputItemParam] = []
|
||||
reasoning_summary: list[str] = []
|
||||
web_search_calls: dict[str, dict[str, Any]] = {}
|
||||
|
||||
@@ -238,7 +249,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"""Transform stream result into HA format."""
|
||||
last_summary_index = None
|
||||
last_role: Literal["assistant", "tool_result"] | None = None
|
||||
current_tool_call: ResponseFunctionToolCall | None = None
|
||||
current_tool_call: LLMResponseFunctionCallOutputItem | None = None
|
||||
|
||||
# Non-reasoning models don't follow our request to remove citations, so we remove
|
||||
# them manually here. They always follow the same pattern: the citation is always
|
||||
@@ -248,19 +259,10 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
|
||||
|
||||
async for event in stream:
|
||||
event_type = getattr(event, "type", None)
|
||||
event_item = getattr(event, "item", None)
|
||||
event_item_type = getattr(event_item, "type", None) if event_item else None
|
||||
_LOGGER.debug("Event[%s]", getattr(event, "type", None))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Event[%s] | item: %s",
|
||||
event_type,
|
||||
event_item_type,
|
||||
)
|
||||
|
||||
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
|
||||
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
|
||||
if event_item_type == ResponseItemType.FUNCTION_CALL:
|
||||
if isinstance(event, LLMResponseOutputItemAddedEvent):
|
||||
if isinstance(event.item, LLMResponseFunctionCallOutputItem):
|
||||
# OpenAI has tool calls as individual events
|
||||
# while HA puts tool calls inside the assistant message.
|
||||
# We turn them into individual assistant content for HA
|
||||
@@ -268,11 +270,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
current_tool_call = cast(ResponseFunctionToolCall, event.item)
|
||||
current_tool_call = event.item
|
||||
elif (
|
||||
event_item_type == ResponseItemType.MESSAGE
|
||||
isinstance(event.item, LLMResponseMessageOutputItem)
|
||||
or (
|
||||
event_item_type == ResponseItemType.REASONING
|
||||
isinstance(event.item, LLMResponseReasoningOutputItem)
|
||||
and last_summary_index is not None
|
||||
) # Subsequent ResponseReasoningItem
|
||||
or last_role != "assistant"
|
||||
@@ -281,14 +283,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
|
||||
if event_item_type == ResponseItemType.REASONING:
|
||||
encrypted_content = getattr(event.item, "encrypted_content", None)
|
||||
summary = getattr(event.item, "summary", []) or []
|
||||
elif isinstance(event, LLMResponseOutputItemDoneEvent):
|
||||
if isinstance(event.item, LLMResponseReasoningOutputItem):
|
||||
encrypted_content = event.item.encrypted_content
|
||||
summary = event.item.summary
|
||||
|
||||
yield {
|
||||
"native": ResponseReasoningItem(
|
||||
type="reasoning",
|
||||
"native": LLMResponseReasoningOutputItem(
|
||||
type=event.item.type,
|
||||
id=event.item.id,
|
||||
summary=[],
|
||||
encrypted_content=encrypted_content,
|
||||
@@ -296,14 +298,8 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
}
|
||||
|
||||
last_summary_index = len(summary) - 1 if summary else None
|
||||
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
|
||||
action = getattr(event.item, "action", None)
|
||||
if isinstance(action, dict):
|
||||
action_dict = action
|
||||
elif action is not None:
|
||||
action_dict = action.to_dict()
|
||||
else:
|
||||
action_dict = {}
|
||||
elif isinstance(event.item, LLMResponseWebSearchCallOutputItem):
|
||||
action_dict = event.item.action
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
@@ -321,11 +317,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"tool_result": {"status": event.item.status},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
elif event_item_type == ResponseItemType.IMAGE:
|
||||
yield {"native": event.item}
|
||||
elif isinstance(event.item, LLMResponseImageOutputItem):
|
||||
yield {"native": event.item.raw}
|
||||
last_summary_index = -1 # Trigger new assistant message on next turn
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseOutputTextDeltaEvent):
|
||||
data = event.delta
|
||||
if remove_parentheses:
|
||||
data = data.removeprefix(")")
|
||||
@@ -344,7 +340,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if data:
|
||||
yield {"content": data}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseReasoningSummaryTextDeltaEvent):
|
||||
# OpenAI can output several reasoning summaries
|
||||
# in a single ResponseReasoningItem. We split them as separate
|
||||
# AssistantContent messages. Only last of them will have
|
||||
@@ -358,14 +354,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_summary_index = event.summary_index
|
||||
yield {"thinking_content": event.delta}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDeltaEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.arguments += event.delta
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
|
||||
elif isinstance(event, LLMResponseWebSearchCallSearchingEvent):
|
||||
yield {"role": "assistant"}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDoneEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.status = "completed"
|
||||
|
||||
@@ -385,35 +381,36 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
]
|
||||
}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseCompletedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseIncompleteEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
event.response.incomplete_details
|
||||
and event.response.incomplete_details.reason
|
||||
):
|
||||
reason: str = event.response.incomplete_details.reason
|
||||
else:
|
||||
reason = "unknown reason"
|
||||
incomplete_details = response.get("incomplete_details")
|
||||
reason = "unknown reason"
|
||||
if incomplete_details is not None and incomplete_details.get("reason"):
|
||||
reason = incomplete_details["reason"]
|
||||
|
||||
if reason == "max_output_tokens":
|
||||
reason = "max output tokens reached"
|
||||
@@ -422,22 +419,24 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
|
||||
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseFailedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
reason = "unknown reason"
|
||||
if event.response.error is not None:
|
||||
reason = event.response.error.message
|
||||
if isinstance(error := response.get("error"), dict):
|
||||
reason = error.get("message") or reason
|
||||
raise HomeAssistantError(f"OpenAI response failed: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.ERROR:
|
||||
elif isinstance(event, LLMResponseErrorEvent):
|
||||
raise HomeAssistantError(f"OpenAI response error: {event.message}")
|
||||
|
||||
|
||||
@@ -452,7 +451,7 @@ class BaseCloudLLMEntity(Entity):
|
||||
async def _prepare_chat_for_generation(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
messages: ResponseInputParam,
|
||||
messages: list[ResponseInputItemParam],
|
||||
response_format: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Prepare kwargs for Cloud LLM from the chat log."""
|
||||
@@ -460,8 +459,17 @@ class BaseCloudLLMEntity(Entity):
|
||||
last_content: Any = chat_log.content[-1]
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
files = await self._async_prepare_files_for_prompt(last_content.attachments)
|
||||
current_content = last_content.content
|
||||
last_content = [*(current_content or []), *files]
|
||||
|
||||
last_message = cast(dict[str, Any], messages[-1])
|
||||
assert (
|
||||
last_message["type"] == "message"
|
||||
and last_message["role"] == "user"
|
||||
and isinstance(last_message["content"], str)
|
||||
)
|
||||
last_message["content"] = [
|
||||
{"type": "input_text", "text": last_message["content"]},
|
||||
*files,
|
||||
]
|
||||
|
||||
tools: list[ToolParam] = []
|
||||
tool_choice: str | None = None
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"requirements": ["hass-nabucasa==1.12.0", "openai==2.15.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -58,12 +58,13 @@ C4_TO_HA_HVAC_MODE = {
|
||||
|
||||
HA_TO_C4_HVAC_MODE = {v: k for k, v in C4_TO_HA_HVAC_MODE.items()}
|
||||
|
||||
# Map Control4 HVAC state to Home Assistant HVAC action
|
||||
# Map the five known Control4 HVAC states to Home Assistant HVAC actions
|
||||
C4_TO_HA_HVAC_ACTION = {
|
||||
"heating": HVACAction.HEATING,
|
||||
"cooling": HVACAction.COOLING,
|
||||
"idle": HVACAction.IDLE,
|
||||
"off": HVACAction.OFF,
|
||||
"heat": HVACAction.HEATING,
|
||||
"cool": HVACAction.COOLING,
|
||||
"dry": HVACAction.DRYING,
|
||||
"fan": HVACAction.FAN,
|
||||
}
|
||||
|
||||
|
||||
@@ -236,7 +237,10 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
if c4_state is None:
|
||||
return None
|
||||
# Convert state to lowercase for mapping
|
||||
return C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
action = C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
if action is None:
|
||||
_LOGGER.debug("Unknown HVAC state received from Control4: %s", c4_state)
|
||||
return action
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.6"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.28"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""The Dexcom integration."""
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -14,10 +15,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: DexcomConfigEntry) -> bo
|
||||
"""Set up Dexcom from a config entry."""
|
||||
try:
|
||||
dexcom = await hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if entry.data[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except AccountError:
|
||||
return False
|
||||
|
||||
@@ -5,7 +5,8 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -37,10 +38,13 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
user_input[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if user_input[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except SessionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -18,7 +18,7 @@ _SCAN_INTERVAL = timedelta(seconds=180)
|
||||
type DexcomConfigEntry = ConfigEntry[DexcomCoordinator]
|
||||
|
||||
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading | None]):
|
||||
"""Dexcom Coordinator."""
|
||||
|
||||
def __init__(
|
||||
@@ -37,7 +37,7 @@ class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
)
|
||||
self.dexcom = dexcom
|
||||
|
||||
async def _async_update_data(self) -> GlucoseReading:
|
||||
async def _async_update_data(self) -> GlucoseReading | None:
|
||||
"""Fetch data from API endpoint."""
|
||||
return await self.hass.async_add_executor_job(
|
||||
self.dexcom.get_current_glucose_reading
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydexcom"],
|
||||
"requirements": ["pydexcom==0.2.3"]
|
||||
"requirements": ["pydexcom==0.5.1"]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.1", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0"]
|
||||
"requirements": ["fritzconnection[qr]==1.15.1"]
|
||||
}
|
||||
|
||||
@@ -19,9 +19,7 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"winter_mode": {}
|
||||
},
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260128.1"]
|
||||
"requirements": ["home-assistant-frontend==20260128.5"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.2"]
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.3"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyhik"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyHik==0.4.1"]
|
||||
"requirements": ["pyHik==0.4.2"]
|
||||
}
|
||||
|
||||
@@ -169,6 +169,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect binary sensor."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -73,6 +73,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect button entities."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -7,18 +7,44 @@ from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity
|
||||
|
||||
|
||||
def should_add_option_entity(
|
||||
description: EntityDescription,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
platform: Platform,
|
||||
) -> bool:
|
||||
"""Check if the option entity should be added for the appliance.
|
||||
|
||||
This function returns `True` if the option is available in the appliance options
|
||||
or if the entity was added in previous loads of this integration.
|
||||
"""
|
||||
description_key = description.key
|
||||
return description_key in appliance.options or (
|
||||
entity_registry.async_get_entity_id(
|
||||
platform, DOMAIN, f"{appliance.info.ha_id}-{description_key}"
|
||||
)
|
||||
is not None
|
||||
)
|
||||
|
||||
|
||||
def _create_option_entities(
|
||||
entity_registry: er.EntityRegistry,
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
known_entity_unique_ids: dict[str, str],
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData, er.EntityRegistry],
|
||||
list[HomeConnectOptionEntity],
|
||||
],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
@@ -26,7 +52,9 @@ def _create_option_entities(
|
||||
"""Create the required option entities for the appliances."""
|
||||
option_entities_to_add = [
|
||||
entity
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
for entity in get_option_entities_for_appliance(
|
||||
entry, appliance, entity_registry
|
||||
)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
]
|
||||
known_entity_unique_ids.update(
|
||||
@@ -39,13 +67,14 @@ def _create_option_entities(
|
||||
|
||||
|
||||
def _handle_paired_or_connected_appliance(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeConnectConfigEntry,
|
||||
known_entity_unique_ids: dict[str, str],
|
||||
get_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData], list[HomeConnectEntity]
|
||||
],
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData, er.EntityRegistry],
|
||||
list[HomeConnectOptionEntity],
|
||||
]
|
||||
| None,
|
||||
@@ -60,6 +89,7 @@ def _handle_paired_or_connected_appliance(
|
||||
already or it is the first time we see them when the appliance is connected.
|
||||
"""
|
||||
entities: list[HomeConnectEntity] = []
|
||||
entity_registry = er.async_get(hass)
|
||||
for appliance in entry.runtime_data.data.values():
|
||||
entities_to_add = [
|
||||
entity
|
||||
@@ -69,7 +99,9 @@ def _handle_paired_or_connected_appliance(
|
||||
if get_option_entities_for_appliance:
|
||||
entities_to_add.extend(
|
||||
entity
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
for entity in get_option_entities_for_appliance(
|
||||
entry, appliance, entity_registry
|
||||
)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
)
|
||||
for event_key in (
|
||||
@@ -80,6 +112,7 @@ def _handle_paired_or_connected_appliance(
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entity_registry,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
@@ -120,13 +153,14 @@ def _handle_depaired_appliance(
|
||||
|
||||
|
||||
def setup_home_connect_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeConnectConfigEntry,
|
||||
get_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData], list[HomeConnectEntity]
|
||||
],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData, er.EntityRegistry],
|
||||
list[HomeConnectOptionEntity],
|
||||
]
|
||||
| None = None,
|
||||
@@ -141,6 +175,7 @@ def setup_home_connect_entry(
|
||||
entry.runtime_data.async_add_special_listener(
|
||||
partial(
|
||||
_handle_paired_or_connected_appliance,
|
||||
hass,
|
||||
entry,
|
||||
known_entity_unique_ids,
|
||||
get_entities_for_appliance,
|
||||
|
||||
@@ -96,6 +96,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect light."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -11,12 +11,13 @@ from homeassistant.components.number import (
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.const import PERCENTAGE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .common import setup_home_connect_entry
|
||||
from .common import setup_home_connect_entry, should_add_option_entity
|
||||
from .const import DOMAIN, UNIT_MAP
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity, constraint_fetcher
|
||||
@@ -136,12 +137,15 @@ def _get_entities_for_appliance(
|
||||
def _get_option_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> list[HomeConnectOptionEntity]:
|
||||
"""Get a list of currently available option entities."""
|
||||
return [
|
||||
HomeConnectOptionNumberEntity(entry.runtime_data, appliance, description)
|
||||
for description in NUMBER_OPTIONS
|
||||
if description.key in appliance.options
|
||||
if should_add_option_entity(
|
||||
description, appliance, entity_registry, Platform.NUMBER
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@@ -152,6 +156,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect number."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -11,11 +11,13 @@ from aiohomeconnect.model.error import HomeConnectError
|
||||
from aiohomeconnect.model.program import Execution
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .common import setup_home_connect_entry
|
||||
from .common import setup_home_connect_entry, should_add_option_entity
|
||||
from .const import (
|
||||
AVAILABLE_MAPS_ENUM,
|
||||
BEAN_AMOUNT_OPTIONS,
|
||||
@@ -358,12 +360,13 @@ def _get_entities_for_appliance(
|
||||
def _get_option_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> list[HomeConnectOptionEntity]:
|
||||
"""Get a list of entities."""
|
||||
return [
|
||||
HomeConnectSelectOptionEntity(entry.runtime_data, appliance, desc)
|
||||
for desc in PROGRAM_SELECT_OPTION_ENTITY_DESCRIPTIONS
|
||||
if desc.key in appliance.options
|
||||
if should_add_option_entity(desc, appliance, entity_registry, Platform.SELECT)
|
||||
]
|
||||
|
||||
|
||||
@@ -374,6 +377,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect select entities."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -115,7 +115,6 @@ SENSORS = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
|
||||
device_class=SensorDeviceClass.VOLUME,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
translation_key="hot_water_counter",
|
||||
),
|
||||
HomeConnectSensorEntityDescription(
|
||||
@@ -540,6 +539,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect sensor."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -7,12 +7,14 @@ from aiohomeconnect.model import OptionKey, SettingKey
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from .common import setup_home_connect_entry
|
||||
from .common import setup_home_connect_entry, should_add_option_entity
|
||||
from .const import BSH_POWER_OFF, BSH_POWER_ON, BSH_POWER_STANDBY, DOMAIN
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity
|
||||
@@ -190,12 +192,15 @@ def _get_entities_for_appliance(
|
||||
def _get_option_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> list[HomeConnectOptionEntity]:
|
||||
"""Get a list of currently available option entities."""
|
||||
return [
|
||||
HomeConnectSwitchOptionEntity(entry.runtime_data, appliance, description)
|
||||
for description in SWITCH_OPTIONS
|
||||
if description.key in appliance.options
|
||||
if should_add_option_entity(
|
||||
description, appliance, entity_registry, Platform.SWITCH
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@@ -206,6 +211,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect switch."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["incomfortclient"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["incomfort-client==0.6.11"]
|
||||
"requirements": ["incomfort-client==0.6.12"]
|
||||
}
|
||||
|
||||
@@ -90,6 +90,7 @@
|
||||
"boiler_int": "Boiler internal",
|
||||
"buffer": "Buffer",
|
||||
"central_heating": "Central heating",
|
||||
"central_heating_low": "Central heating low",
|
||||
"central_heating_rf": "Central heating rf",
|
||||
"cv_temperature_too_high_e1": "Temperature too high",
|
||||
"flame_detection_fault_e6": "Flame detection fault",
|
||||
|
||||
@@ -10,6 +10,7 @@ import voluptuous as vol
|
||||
from homeassistant.components.script import CONF_MODE
|
||||
from homeassistant.const import CONF_DESCRIPTION, CONF_TYPE, SERVICE_RELOAD
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
intent,
|
||||
@@ -18,6 +19,7 @@ from homeassistant.helpers import (
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.script import async_validate_actions_config
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -85,19 +87,29 @@ async def async_reload(hass: HomeAssistant, service_call: ServiceCall) -> None:
|
||||
|
||||
new_intents = new_config[DOMAIN]
|
||||
|
||||
async_load_intents(hass, new_intents)
|
||||
await async_load_intents(hass, new_intents)
|
||||
|
||||
|
||||
def async_load_intents(hass: HomeAssistant, intents: dict[str, ConfigType]) -> None:
|
||||
async def async_load_intents(
|
||||
hass: HomeAssistant, intents: dict[str, ConfigType]
|
||||
) -> None:
|
||||
"""Load YAML intents into the intent system."""
|
||||
hass.data[DOMAIN] = intents
|
||||
|
||||
for intent_type, conf in intents.items():
|
||||
if CONF_ACTION in conf:
|
||||
try:
|
||||
actions = await async_validate_actions_config(hass, conf[CONF_ACTION])
|
||||
except (vol.Invalid, HomeAssistantError) as exc:
|
||||
_LOGGER.error(
|
||||
"Failed to validate actions for intent %s: %s", intent_type, exc
|
||||
)
|
||||
continue # Skip this intent
|
||||
|
||||
script_mode: str = conf.get(CONF_MODE, script.DEFAULT_SCRIPT_MODE)
|
||||
conf[CONF_ACTION] = script.Script(
|
||||
hass,
|
||||
conf[CONF_ACTION],
|
||||
actions,
|
||||
f"Intent Script {intent_type}",
|
||||
DOMAIN,
|
||||
script_mode=script_mode,
|
||||
@@ -109,7 +121,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the intent script component."""
|
||||
intents = config[DOMAIN]
|
||||
|
||||
async_load_intents(hass, intents)
|
||||
await async_load_intents(hass, intents)
|
||||
|
||||
async def _handle_reload(service_call: ServiceCall) -> None:
|
||||
return await async_reload(hass, service_call)
|
||||
|
||||
@@ -2,16 +2,19 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import math
|
||||
from typing import Any
|
||||
|
||||
from propcache.api import cached_property
|
||||
from xknx.devices import Fan as XknxFan
|
||||
from xknx.telegram.address import parse_device_group_address
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
@@ -37,6 +40,58 @@ from .storage.const import (
|
||||
)
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_yaml_uids(
|
||||
hass: HomeAssistant, platform_config: list[ConfigType]
|
||||
) -> None:
|
||||
"""Migrate entities unique_id for YAML switch-only fan entities."""
|
||||
# issue was introduced in 2026.1 - this migration in 2026.2
|
||||
ent_reg = er.async_get(hass)
|
||||
invalid_uid = str(None)
|
||||
if (
|
||||
none_entity_id := ent_reg.async_get_entity_id(Platform.FAN, DOMAIN, invalid_uid)
|
||||
) is None:
|
||||
return
|
||||
for config in platform_config:
|
||||
if not config.get(KNX_ADDRESS) and (
|
||||
new_uid_base := config.get(FanSchema.CONF_SWITCH_ADDRESS)
|
||||
):
|
||||
break
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"No YAML entry found to migrate fan entity '%s' unique_id from '%s'. Removing entry",
|
||||
none_entity_id,
|
||||
invalid_uid,
|
||||
)
|
||||
ent_reg.async_remove(none_entity_id)
|
||||
return
|
||||
new_uid = str(
|
||||
parse_device_group_address(
|
||||
new_uid_base[0], # list of group addresses - first item is sending address
|
||||
)
|
||||
)
|
||||
try:
|
||||
ent_reg.async_update_entity(none_entity_id, new_unique_id=str(new_uid))
|
||||
_LOGGER.info(
|
||||
"Migrating fan entity '%s' unique_id from '%s' to %s",
|
||||
none_entity_id,
|
||||
invalid_uid,
|
||||
new_uid,
|
||||
)
|
||||
except ValueError:
|
||||
# New unique_id already exists - remove invalid entry. User might have changed YAML
|
||||
_LOGGER.info(
|
||||
"Failed to migrate fan entity '%s' unique_id from '%s' to '%s'. "
|
||||
"Removing the invalid entry",
|
||||
none_entity_id,
|
||||
invalid_uid,
|
||||
new_uid,
|
||||
)
|
||||
ent_reg.async_remove(none_entity_id)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -57,6 +112,7 @@ async def async_setup_entry(
|
||||
|
||||
entities: list[_KnxFan] = []
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.FAN):
|
||||
async_migrate_yaml_uids(hass, yaml_platform_config)
|
||||
entities.extend(
|
||||
KnxYamlFan(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
@@ -177,7 +233,10 @@ class KnxYamlFan(_KnxFan, KnxYamlEntity):
|
||||
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
|
||||
self._attr_unique_id = str(self._device.speed.group_address)
|
||||
if self._device.speed.group_address:
|
||||
self._attr_unique_id = str(self._device.speed.group_address)
|
||||
else:
|
||||
self._attr_unique_id = str(self._device.switch.group_address)
|
||||
|
||||
|
||||
class KnxUiFan(_KnxFan, KnxUiEntity):
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==12.1.2"]
|
||||
"requirements": ["ical==12.1.3"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==12.1.2"]
|
||||
"requirements": ["ical==12.1.3"]
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ from .const import ( # noqa: F401
|
||||
CONF_ALLOW_SINGLE_WORD,
|
||||
CONF_ICON,
|
||||
CONF_REQUIRE_ADMIN,
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_SHOW_IN_SIDEBAR,
|
||||
CONF_TITLE,
|
||||
CONF_URL_PATH,
|
||||
@@ -61,7 +62,7 @@ def _validate_url_slug(value: Any) -> str:
|
||||
"""Validate value is a valid url slug."""
|
||||
if value is None:
|
||||
raise vol.Invalid("Slug should not be None")
|
||||
if "-" not in value:
|
||||
if value != "lovelace" and "-" not in value:
|
||||
raise vol.Invalid("Url path needs to contain a hyphen (-)")
|
||||
str_value = str(value)
|
||||
slg = slugify(str_value, separator="-")
|
||||
@@ -84,9 +85,13 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(DOMAIN, default={}): vol.Schema(
|
||||
{
|
||||
# Deprecated - Remove in 2026.8
|
||||
vol.Optional(CONF_MODE, default=MODE_STORAGE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_RESOURCE_MODE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_DASHBOARDS): cv.schema_with_slug_keys(
|
||||
YAML_DASHBOARD_SCHEMA,
|
||||
slug_validator=_validate_url_slug,
|
||||
@@ -103,7 +108,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
class LovelaceData:
|
||||
"""Dataclass to store information in hass.data."""
|
||||
|
||||
mode: str
|
||||
resource_mode: str # The mode used for resources (yaml or storage)
|
||||
dashboards: dict[str | None, dashboard.LovelaceConfig]
|
||||
resources: resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
yaml_dashboards: dict[str | None, ConfigType]
|
||||
@@ -114,18 +119,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
mode = config[DOMAIN][CONF_MODE]
|
||||
yaml_resources = config[DOMAIN].get(CONF_RESOURCES)
|
||||
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, register the default panel in yaml mode (temporary until user migrates)
|
||||
if mode == MODE_YAML:
|
||||
frontend.async_register_built_in_panel(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config={"mode": mode},
|
||||
sidebar_title="overview",
|
||||
sidebar_icon="mdi:view-dashboard",
|
||||
sidebar_default_visible=False,
|
||||
)
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
# resource_mode controls how resources are loaded (yaml vs storage)
|
||||
# Deprecated - Remove mode fallback in 2026.8
|
||||
resource_mode = config[DOMAIN].get(CONF_RESOURCE_MODE, mode)
|
||||
|
||||
async def reload_resources_service_handler(service_call: ServiceCall) -> None:
|
||||
"""Reload yaml resources."""
|
||||
@@ -149,12 +145,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
hass.data[LOVELACE_DATA].resources = resource_collection
|
||||
|
||||
default_config: dashboard.LovelaceConfig
|
||||
resource_collection: (
|
||||
resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
)
|
||||
if mode == MODE_YAML:
|
||||
default_config = dashboard.LovelaceYAML(hass, None, None)
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
# Load resources based on resource_mode
|
||||
if resource_mode == MODE_YAML:
|
||||
resource_collection = await create_yaml_resource_col(hass, yaml_resources)
|
||||
|
||||
async_register_admin_service(
|
||||
@@ -177,8 +174,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
else:
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
if yaml_resources is not None:
|
||||
_LOGGER.warning(
|
||||
"Lovelace is running in storage mode. Define resources via user"
|
||||
@@ -195,18 +190,44 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
RESOURCE_UPDATE_FIELDS,
|
||||
).async_setup(hass)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_info)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_config)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_save_config)
|
||||
websocket_api.async_register_command(
|
||||
hass, websocket.websocket_lovelace_delete_config
|
||||
)
|
||||
|
||||
yaml_dashboards = config[DOMAIN].get(CONF_DASHBOARDS, {})
|
||||
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, add the default "lovelace" dashboard if not already defined
|
||||
# This migrates the legacy yaml mode to a proper yaml dashboard entry
|
||||
if mode == MODE_YAML and DOMAIN not in yaml_dashboards:
|
||||
translations = await async_get_translations(
|
||||
hass, hass.config.language, "dashboard", {onboarding.DOMAIN}
|
||||
)
|
||||
title = translations.get(
|
||||
"component.onboarding.dashboard.overview.title", "Overview"
|
||||
)
|
||||
yaml_dashboards = {
|
||||
DOMAIN: {
|
||||
CONF_TITLE: title,
|
||||
CONF_ICON: DEFAULT_ICON,
|
||||
CONF_SHOW_IN_SIDEBAR: True,
|
||||
CONF_REQUIRE_ADMIN: False,
|
||||
CONF_MODE: MODE_YAML,
|
||||
CONF_FILENAME: LOVELACE_CONFIG_FILE,
|
||||
},
|
||||
**yaml_dashboards,
|
||||
}
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
|
||||
hass.data[LOVELACE_DATA] = LovelaceData(
|
||||
mode=mode,
|
||||
resource_mode=resource_mode,
|
||||
# We store a dictionary mapping url_path: config. None is the default.
|
||||
dashboards={None: default_config},
|
||||
resources=resource_collection,
|
||||
yaml_dashboards=config[DOMAIN].get(CONF_DASHBOARDS, {}),
|
||||
yaml_dashboards=yaml_dashboards,
|
||||
)
|
||||
|
||||
if hass.config.recovery_mode:
|
||||
@@ -450,7 +471,7 @@ async def _async_migrate_default_config(
|
||||
# Deprecated - Remove in 2026.8
|
||||
@callback
|
||||
def _async_create_yaml_mode_repair(hass: HomeAssistant) -> None:
|
||||
"""Create repair issue for YAML mode migration."""
|
||||
"""Create repair issue for YAML mode deprecation."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -158,7 +158,15 @@ async def _get_dashboard_info(
|
||||
"""Load a dashboard and return info on views."""
|
||||
if url_path == DEFAULT_DASHBOARD:
|
||||
url_path = None
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if dashboard is None:
|
||||
raise ValueError("Invalid dashboard specified")
|
||||
|
||||
@@ -57,6 +57,7 @@ RESOURCE_UPDATE_FIELDS: VolDictType = {
|
||||
SERVICE_RELOAD_RESOURCES = "reload_resources"
|
||||
RESOURCE_RELOAD_SERVICE_SCHEMA = vol.Schema({})
|
||||
|
||||
CONF_RESOURCE_MODE = "resource_mode"
|
||||
CONF_TITLE = "title"
|
||||
CONF_REQUIRE_ADMIN = "require_admin"
|
||||
CONF_SHOW_IN_SIDEBAR = "show_in_sidebar"
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
},
|
||||
"issues": {
|
||||
"yaml_mode_deprecated": {
|
||||
"description": "Starting with Home Assistant 2026.8, the default Lovelace dashboard will no longer support YAML mode. To migrate:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. Rename `{config_file}` to a new filename (e.g., `my-dashboard.yaml`)\n3. Add a dashboard entry in your `configuration.yaml`:\n\n```yaml\nlovelace:\n dashboards:\n lovelace:\n mode: yaml\n filename: my-dashboard.yaml\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n```\n\n4. Restart Home Assistant",
|
||||
"title": "Lovelace YAML mode migration required"
|
||||
"description": "Your YAML dashboard configuration uses the legacy `mode: yaml` option, which will be removed in Home Assistant 2026.8. Your YAML dashboards will continue to work, you just need to update how they are defined.\n\nTo update your configuration:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. Add a dashboard entry instead:\n\n ```yaml\n lovelace:\n resource_mode: yaml\n dashboards:\n lovelace:\n mode: yaml\n filename: {config_file}\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n ```\n\n3. Restart Home Assistant\n\nNote: `resource_mode: yaml` keeps loading resources from YAML. If you want to manage resources through the UI instead, you can remove this line and move your resources to Settings > Dashboards > Resources.",
|
||||
"title": "Lovelace YAML configuration needs update"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -42,9 +42,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
else:
|
||||
health_info[key] = dashboard[key]
|
||||
|
||||
if hass.data[LOVELACE_DATA].mode == MODE_YAML:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
elif MODE_STORAGE in modes:
|
||||
if MODE_STORAGE in modes:
|
||||
health_info[CONF_MODE] = MODE_STORAGE
|
||||
elif MODE_YAML in modes:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
|
||||
@@ -14,7 +14,13 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.json import json_fragment
|
||||
|
||||
from .const import CONF_URL_PATH, LOVELACE_DATA, ConfigNotFound
|
||||
from .const import (
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_URL_PATH,
|
||||
DOMAIN,
|
||||
LOVELACE_DATA,
|
||||
ConfigNotFound,
|
||||
)
|
||||
from .dashboard import LovelaceConfig
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -38,7 +44,15 @@ def _handle_errors[_R](
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
url_path = msg.get(CONF_URL_PATH)
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if config is None:
|
||||
connection.send_error(
|
||||
@@ -100,6 +114,20 @@ async def websocket_lovelace_resources_impl(
|
||||
connection.send_result(msg["id"], resources.async_items())
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": "lovelace/info"})
|
||||
@websocket_api.async_response
|
||||
async def websocket_lovelace_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Send Lovelace UI info over WebSocket connection."""
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{CONF_RESOURCE_MODE: hass.data[LOVELACE_DATA].resource_mode},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "lovelace/config",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["meteoclimatic"],
|
||||
"requirements": ["pymeteoclimatic==0.1.0"]
|
||||
"requirements": ["pymeteoclimatic==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -722,7 +722,7 @@ POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]]
|
||||
description=MieleSensorDescription[MieleFillingLevel](
|
||||
key="power_disk_level",
|
||||
translation_key="power_disk_level",
|
||||
value_fn=lambda value: None,
|
||||
value_fn=lambda value: value.power_disc_filling_level,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"domain": "[%key:common::config_flow::data::username%]",
|
||||
"domain": "Domain",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "Dynamic DNS password"
|
||||
},
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.21.0"]
|
||||
"requirements": ["nibe==2.22.0"]
|
||||
}
|
||||
|
||||
@@ -594,7 +594,8 @@ UNIT_CONVERTERS: dict[NumberDeviceClass, type[BaseUnitConverter]] = {
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -604,4 +605,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ from onedrive_personal_sdk.exceptions import (
|
||||
NotFoundError,
|
||||
OneDriveException,
|
||||
)
|
||||
from onedrive_personal_sdk.models.items import ItemUpdate
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -72,15 +71,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
entry, data={**entry.data, CONF_FOLDER_ID: backup_folder.id}
|
||||
)
|
||||
|
||||
# write instance id to description
|
||||
if backup_folder.description != (instance_id := await async_get_instance_id(hass)):
|
||||
await _handle_item_operation(
|
||||
lambda: client.update_drive_item(
|
||||
backup_folder.id, ItemUpdate(description=instance_id)
|
||||
),
|
||||
folder_name,
|
||||
)
|
||||
|
||||
# update in case folder was renamed manually inside OneDrive
|
||||
if backup_folder.name != entry.data[CONF_FOLDER_NAME]:
|
||||
hass.config_entries.async_update_entry(
|
||||
@@ -122,7 +112,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
|
||||
|
||||
async def _migrate_backup_files(client: OneDriveClient, backup_folder_id: str) -> None:
|
||||
"""Migrate backup files to metadata version 2."""
|
||||
"""Migrate backup files from metadata version 1 to version 2.
|
||||
|
||||
Version 1: Backup metadata was stored in the backup file's description field.
|
||||
Version 2: Backup metadata is stored in a separate .metadata.json file.
|
||||
"""
|
||||
files = await client.list_drive_items(backup_folder_id)
|
||||
for file in files:
|
||||
if file.description and '"metadata_version": 1' in (
|
||||
@@ -131,24 +125,11 @@ async def _migrate_backup_files(client: OneDriveClient, backup_folder_id: str) -
|
||||
metadata = loads(metadata_json)
|
||||
del metadata["metadata_version"]
|
||||
metadata_filename = file.name.rsplit(".", 1)[0] + ".metadata.json"
|
||||
metadata_file = await client.upload_file(
|
||||
await client.upload_file(
|
||||
backup_folder_id,
|
||||
metadata_filename,
|
||||
dumps(metadata),
|
||||
)
|
||||
metadata_description = {
|
||||
"metadata_version": 2,
|
||||
"backup_id": metadata["backup_id"],
|
||||
"backup_file_id": file.id,
|
||||
}
|
||||
await client.update_drive_item(
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
await client.update_drive_item(
|
||||
path_or_id=file.id,
|
||||
data=ItemUpdate(description=""),
|
||||
)
|
||||
_LOGGER.debug("Migrated backup file %s", file.name)
|
||||
|
||||
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from functools import wraps
|
||||
from html import unescape
|
||||
from json import dumps, loads
|
||||
import logging
|
||||
from time import time
|
||||
from typing import Any, Concatenate
|
||||
@@ -18,7 +15,6 @@ from onedrive_personal_sdk.exceptions import (
|
||||
HashMismatchError,
|
||||
OneDriveException,
|
||||
)
|
||||
from onedrive_personal_sdk.models.items import ItemUpdate
|
||||
from onedrive_personal_sdk.models.upload import FileInfo
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
@@ -30,6 +26,8 @@ from homeassistant.components.backup import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
from .const import CONF_DELETE_PERMANENTLY, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
@@ -38,7 +36,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
MAX_CHUNK_SIZE = 60 * 1024 * 1024 # largest chunk possible, must be <= 60 MiB
|
||||
TARGET_CHUNKS = 20
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
CACHE_TTL = 300
|
||||
|
||||
|
||||
@@ -104,13 +101,10 @@ def handle_backup_errors[_R, **P](
|
||||
return wrapper
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class OneDriveBackup:
|
||||
"""Define a OneDrive backup."""
|
||||
|
||||
backup: AgentBackup
|
||||
backup_file_id: str
|
||||
metadata_file_id: str
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata."""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||
|
||||
|
||||
class OneDriveBackupAgent(BackupAgent):
|
||||
@@ -129,7 +123,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self.name = entry.title
|
||||
assert entry.unique_id
|
||||
self.unique_id = entry.unique_id
|
||||
self._backup_cache: dict[str, OneDriveBackup] = {}
|
||||
self._cache_backup_metadata: dict[str, AgentBackup] = {}
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -137,12 +131,11 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
backups = await self._list_cached_backups()
|
||||
if backup_id not in backups:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
backup_filename, _ = suggested_filenames(backup)
|
||||
|
||||
stream = await self._client.download_drive_item(
|
||||
backups[backup_id].backup_file_id, timeout=TIMEOUT
|
||||
f"{self._folder_id}:/{backup_filename}:", timeout=TIMEOUT
|
||||
)
|
||||
return stream.iter_chunked(1024)
|
||||
|
||||
@@ -155,9 +148,9 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
filename = suggested_filename(backup)
|
||||
backup_filename, metadata_filename = suggested_filenames(backup)
|
||||
file = FileInfo(
|
||||
filename,
|
||||
backup_filename,
|
||||
backup.size,
|
||||
self._folder_id,
|
||||
await open_stream(),
|
||||
@@ -173,7 +166,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
upload_chunk_size = max(upload_chunk_size, 320 * 1024)
|
||||
|
||||
try:
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
await LargeFileUploadClient.upload(
|
||||
self._token_function,
|
||||
file,
|
||||
upload_chunk_size=upload_chunk_size,
|
||||
@@ -185,35 +178,27 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
"Hash validation failed, backup file might be corrupt"
|
||||
) from err
|
||||
|
||||
# store metadata in metadata file
|
||||
description = dumps(backup.as_dict())
|
||||
_LOGGER.debug("Creating metadata: %s", description)
|
||||
metadata_filename = filename.rsplit(".", 1)[0] + ".metadata.json"
|
||||
_LOGGER.debug("Uploaded backup to %s", backup_filename)
|
||||
|
||||
# Store metadata in separate metadata file (just backup.as_dict(), no extra fields)
|
||||
metadata_content = json_dumps(backup.as_dict())
|
||||
try:
|
||||
metadata_file = await self._client.upload_file(
|
||||
await self._client.upload_file(
|
||||
self._folder_id,
|
||||
metadata_filename,
|
||||
description,
|
||||
metadata_content,
|
||||
)
|
||||
except OneDriveException:
|
||||
await self._client.delete_drive_item(backup_file.id)
|
||||
# Clean up the backup file if metadata upload fails
|
||||
_LOGGER.debug(
|
||||
"Uploading metadata failed, deleting backup file %s", backup_filename
|
||||
)
|
||||
await self._client.delete_drive_item(
|
||||
f"{self._folder_id}:/{backup_filename}:"
|
||||
)
|
||||
raise
|
||||
|
||||
# add metadata to the metadata file
|
||||
metadata_description = {
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_file_id": backup_file.id,
|
||||
}
|
||||
try:
|
||||
await self._client.update_drive_item(
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
except OneDriveException:
|
||||
await self._client.delete_drive_item(backup_file.id)
|
||||
await self._client.delete_drive_item(metadata_file.id)
|
||||
raise
|
||||
_LOGGER.debug("Uploaded metadata file %s", metadata_filename)
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -223,66 +208,63 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
backups = await self._list_cached_backups()
|
||||
if backup_id not in backups:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
backup = backups[backup_id]
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
backup_filename, metadata_filename = suggested_filenames(backup)
|
||||
|
||||
delete_permanently = self._entry.options.get(CONF_DELETE_PERMANENTLY, False)
|
||||
|
||||
await self._client.delete_drive_item(backup.backup_file_id, delete_permanently)
|
||||
await self._client.delete_drive_item(
|
||||
backup.metadata_file_id, delete_permanently
|
||||
f"{self._folder_id}:/{backup_filename}:", delete_permanently
|
||||
)
|
||||
await self._client.delete_drive_item(
|
||||
f"{self._folder_id}:/{metadata_filename}:", delete_permanently
|
||||
)
|
||||
|
||||
_LOGGER.debug("Deleted backup %s", backup_filename)
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
return [
|
||||
backup.backup for backup in (await self._list_cached_backups()).values()
|
||||
]
|
||||
return list((await self._list_cached_metadata_files()).values())
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
backups = await self._list_cached_backups()
|
||||
if backup_id not in backups:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return backups[backup_id].backup
|
||||
return await self._find_backup_by_id(backup_id)
|
||||
|
||||
async def _list_cached_backups(self) -> dict[str, OneDriveBackup]:
|
||||
"""List backups with a cache."""
|
||||
async def _list_cached_metadata_files(self) -> dict[str, AgentBackup]:
|
||||
"""List metadata files with a cache."""
|
||||
if time() <= self._cache_expiration:
|
||||
return self._backup_cache
|
||||
return self._cache_backup_metadata
|
||||
|
||||
items = await self._client.list_drive_items(self._folder_id)
|
||||
|
||||
async def download_backup_metadata(item_id: str) -> AgentBackup | None:
|
||||
async def _download_metadata(item_id: str) -> AgentBackup | None:
|
||||
"""Download metadata file."""
|
||||
try:
|
||||
metadata_stream = await self._client.download_drive_item(item_id)
|
||||
except OneDriveException as err:
|
||||
_LOGGER.warning("Error downloading metadata for %s: %s", item_id, err)
|
||||
return None
|
||||
metadata_json = loads(await metadata_stream.read())
|
||||
return AgentBackup.from_dict(metadata_json)
|
||||
|
||||
backups: dict[str, OneDriveBackup] = {}
|
||||
return AgentBackup.from_dict(
|
||||
json_loads_object(await metadata_stream.read())
|
||||
)
|
||||
|
||||
items = await self._client.list_drive_items(self._folder_id)
|
||||
metadata_files: dict[str, AgentBackup] = {}
|
||||
for item in items:
|
||||
if item.description and f'"metadata_version": {METADATA_VERSION}' in (
|
||||
metadata_description_json := unescape(item.description)
|
||||
):
|
||||
backup = await download_backup_metadata(item.id)
|
||||
if backup is None:
|
||||
continue
|
||||
metadata_description = loads(metadata_description_json)
|
||||
backups[backup.backup_id] = OneDriveBackup(
|
||||
backup=backup,
|
||||
backup_file_id=metadata_description["backup_file_id"],
|
||||
metadata_file_id=item.id,
|
||||
)
|
||||
if item.name and item.name.endswith(".metadata.json"):
|
||||
if metadata := await _download_metadata(item.id):
|
||||
metadata_files[metadata.backup_id] = metadata
|
||||
|
||||
self._cache_backup_metadata = metadata_files
|
||||
self._cache_expiration = time() + CACHE_TTL
|
||||
self._backup_cache = backups
|
||||
return backups
|
||||
return self._cache_backup_metadata
|
||||
|
||||
async def _find_backup_by_id(self, backup_id: str) -> AgentBackup:
|
||||
"""Find a backup by its backup ID on remote."""
|
||||
metadata_files = await self._list_cached_metadata_files()
|
||||
if backup := metadata_files.get(backup_id):
|
||||
return backup
|
||||
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
@@ -129,9 +129,6 @@ class OneDriveConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
except OneDriveException:
|
||||
self.logger.debug("Failed to create folder", exc_info=True)
|
||||
errors["base"] = "folder_creation_error"
|
||||
else:
|
||||
if folder.description and folder.description != instance_id:
|
||||
errors[CONF_FOLDER_NAME] = "folder_already_in_use"
|
||||
if not errors:
|
||||
title = (
|
||||
f"{self.approot.created_by.user.display_name}'s OneDrive"
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
},
|
||||
"error": {
|
||||
"folder_already_in_use": "Folder already used for backups from another Home Assistant instance",
|
||||
"folder_creation_error": "Failed to create folder",
|
||||
"folder_rename_error": "Failed to rename folder"
|
||||
},
|
||||
|
||||
@@ -31,7 +31,6 @@ class OpenThermEntity(Entity):
|
||||
"""Represent an OpenTherm entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
entity_description: OpenThermEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -61,6 +60,8 @@ class OpenThermEntity(Entity):
|
||||
class OpenThermStatusEntity(OpenThermEntity):
|
||||
"""Represent an OpenTherm entity that receives status updates."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to updates from the component."""
|
||||
self.async_on_remove(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyotgw"],
|
||||
"requirements": ["pyotgw==2.2.2"]
|
||||
"requirements": ["pyotgw==2.2.3"]
|
||||
}
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["opower==0.16.5"]
|
||||
"requirements": ["opower==0.17.0"]
|
||||
}
|
||||
|
||||
@@ -4,15 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.components.calendar import (
|
||||
CalendarEntity,
|
||||
CalendarEntityDescription,
|
||||
CalendarEvent,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import CalendarUpdateCoordinator, RadarrConfigEntry, RadarrEvent
|
||||
from .entity import RadarrEntity
|
||||
|
||||
CALENDAR_TYPE = EntityDescription(
|
||||
CALENDAR_TYPE = CalendarEntityDescription(
|
||||
key="calendar",
|
||||
name=None,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==12.1.2"]
|
||||
"requirements": ["ical==12.1.3"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.5.2"]
|
||||
"requirements": ["renault-api==0.5.3"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.18.1"]
|
||||
"requirements": ["reolink-aio==0.18.2"]
|
||||
}
|
||||
|
||||
@@ -840,7 +840,8 @@ STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]]
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -850,4 +851,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from httpx import HTTPStatusError, RequestError
|
||||
import jwt
|
||||
from pysenz import SENZAPI, Thermostat
|
||||
@@ -70,11 +71,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: SENZConfigEntry) -> bool
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_ready",
|
||||
) from err
|
||||
except ClientResponseError as err:
|
||||
if err.status in (HTTPStatus.UNAUTHORIZED, HTTPStatus.BAD_REQUEST):
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_auth_failed",
|
||||
) from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_ready",
|
||||
) from err
|
||||
except RequestError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_ready",
|
||||
) from err
|
||||
except Exception as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_auth_failed",
|
||||
) from err
|
||||
|
||||
coordinator: SENZDataUpdateCoordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -81,6 +81,12 @@ class SENZSensor(CoordinatorEntity[SENZDataUpdateCoordinator], SensorEntity):
|
||||
serial_number=thermostat.serial_number,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._thermostat = self.coordinator.data[self._thermostat.serial_number]
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if the thermostat is available."""
|
||||
|
||||
@@ -59,7 +59,6 @@ from .coordinator import (
|
||||
)
|
||||
from .repairs import (
|
||||
async_manage_ble_scanner_firmware_unsupported_issue,
|
||||
async_manage_coiot_unconfigured_issue,
|
||||
async_manage_deprecated_firmware_issue,
|
||||
async_manage_open_wifi_ap_issue,
|
||||
async_manage_outbound_websocket_incorrectly_enabled_issue,
|
||||
@@ -233,7 +232,6 @@ async def _async_setup_block_entry(
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, runtime_data.platforms
|
||||
)
|
||||
async_manage_coiot_unconfigured_issue(hass, entry)
|
||||
remove_empty_sub_devices(hass, entry)
|
||||
elif (
|
||||
sleep_period is None
|
||||
|
||||
@@ -47,6 +47,7 @@ from .const import (
|
||||
ATTR_DEVICE,
|
||||
ATTR_GENERATION,
|
||||
BATTERY_DEVICES_WITH_PERMANENT_CONNECTION,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID,
|
||||
CONF_BLE_SCANNER_MODE,
|
||||
CONF_SLEEP_PERIOD,
|
||||
DOMAIN,
|
||||
@@ -72,6 +73,7 @@ from .const import (
|
||||
)
|
||||
from .utils import (
|
||||
async_create_issue_unsupported_firmware,
|
||||
async_manage_coiot_issues_task,
|
||||
get_block_device_sleep_period,
|
||||
get_device_entry_gen,
|
||||
get_host,
|
||||
@@ -442,26 +444,19 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]):
|
||||
DOMAIN,
|
||||
PUSH_UPDATE_ISSUE_ID.format(unique=self.mac),
|
||||
)
|
||||
ir.async_delete_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID.format(unique=self.mac),
|
||||
)
|
||||
self._push_update_failures = 0
|
||||
elif update_type is BlockUpdateType.COAP_REPLY:
|
||||
self._push_update_failures += 1
|
||||
if self._push_update_failures == MAX_PUSH_UPDATE_FAILURES:
|
||||
LOGGER.debug(
|
||||
"Creating issue %s", PUSH_UPDATE_ISSUE_ID.format(unique=self.mac)
|
||||
)
|
||||
ir.async_create_issue(
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
PUSH_UPDATE_ISSUE_ID.format(unique=self.mac),
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
learn_more_url="https://www.home-assistant.io/integrations/shelly/#shelly-device-configuration-generation-1",
|
||||
translation_key="push_update_failure",
|
||||
translation_placeholders={
|
||||
"device_name": self.config_entry.title,
|
||||
"ip_address": self.device.ip_address,
|
||||
},
|
||||
async_manage_coiot_issues_task(self.hass, self.config_entry),
|
||||
"coiot_issues",
|
||||
)
|
||||
if self._push_update_failures:
|
||||
LOGGER.debug(
|
||||
|
||||
@@ -5,12 +5,7 @@ from __future__ import annotations
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aioshelly.block_device import BlockDevice
|
||||
from aioshelly.const import (
|
||||
MODEL_OUT_PLUG_S_G3,
|
||||
MODEL_PLUG,
|
||||
MODEL_PLUG_S_G3,
|
||||
RPC_GENERATIONS,
|
||||
)
|
||||
from aioshelly.const import MODEL_OUT_PLUG_S_G3, MODEL_PLUG_S_G3, RPC_GENERATIONS
|
||||
from aioshelly.exceptions import DeviceConnectionError, RpcCallError
|
||||
from aioshelly.rpc_device import RpcDevice
|
||||
from awesomeversion import AwesomeVersion
|
||||
@@ -24,7 +19,6 @@ from homeassistant.helpers import issue_registry as ir
|
||||
from .const import (
|
||||
BLE_SCANNER_FIRMWARE_UNSUPPORTED_ISSUE_ID,
|
||||
BLE_SCANNER_MIN_FIRMWARE,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID,
|
||||
CONF_BLE_SCANNER_MODE,
|
||||
DEPRECATED_FIRMWARE_ISSUE_ID,
|
||||
DEPRECATED_FIRMWARES,
|
||||
@@ -162,51 +156,6 @@ def async_manage_outbound_websocket_incorrectly_enabled_issue(
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_coiot_unconfigured_issue(
|
||||
hass: HomeAssistant,
|
||||
entry: ShellyConfigEntry,
|
||||
) -> None:
|
||||
"""Manage the CoIoT unconfigured issue."""
|
||||
issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=entry.unique_id)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert entry.runtime_data.block is not None
|
||||
|
||||
device = entry.runtime_data.block.device
|
||||
|
||||
if device.model == MODEL_PLUG:
|
||||
# Shelly Plug Gen 1 does not have CoIoT settings
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
return
|
||||
|
||||
coiot_config = device.settings["coiot"]
|
||||
coiot_enabled = coiot_config.get("enabled")
|
||||
|
||||
# Check if CoIoT is disabled or peer address is not correctly set
|
||||
if not coiot_enabled or (
|
||||
(peer_config := coiot_config.get("peer"))
|
||||
and peer_config != get_coiot_address(hass)
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="coiot_unconfigured",
|
||||
translation_placeholders={
|
||||
"device_name": device.name,
|
||||
"ip_address": device.ip_address,
|
||||
},
|
||||
data={"entry_id": entry.entry_id},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_open_wifi_ap_issue(
|
||||
hass: HomeAssistant,
|
||||
@@ -275,7 +224,7 @@ class CoiotConfigureFlow(ShellyBlockRepairsFlow):
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
coiot_addr = get_coiot_address(self.hass)
|
||||
coiot_addr = await get_coiot_address(self.hass)
|
||||
coiot_port = get_coiot_port(self.hass)
|
||||
if coiot_addr is None or coiot_port is None:
|
||||
return self.async_abort(reason="cannot_configure")
|
||||
|
||||
@@ -1283,6 +1283,7 @@ RPC_SENSORS: Final = {
|
||||
key="voltmeter",
|
||||
sub_key="xvoltage",
|
||||
translation_key="voltmeter_value",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
removal_condition=lambda _, status, key: (status[key].get("xvoltage") is None),
|
||||
unit=lambda config: config["xvoltage"]["unit"] or None,
|
||||
),
|
||||
@@ -1300,6 +1301,7 @@ RPC_SENSORS: Final = {
|
||||
key="input",
|
||||
sub_key="xpercent",
|
||||
translation_key="analog_value",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
removal_condition=lambda config, status, key: (
|
||||
config[key]["type"] != "analog"
|
||||
or config[key]["enable"] is False
|
||||
@@ -1344,6 +1346,7 @@ RPC_SENSORS: Final = {
|
||||
key="input",
|
||||
sub_key="xfreq",
|
||||
translation_key="pulse_counter_frequency_value",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
removal_condition=lambda config, status, key: (
|
||||
config[key]["type"] != "count"
|
||||
or config[key]["enable"] is False
|
||||
|
||||
@@ -22,6 +22,7 @@ from aioshelly.const import (
|
||||
MODEL_EM3,
|
||||
MODEL_I3,
|
||||
MODEL_NAMES,
|
||||
MODEL_PLUG,
|
||||
RPC_GENERATIONS,
|
||||
)
|
||||
from aioshelly.rpc_device import RpcDevice, WsServer
|
||||
@@ -29,6 +30,7 @@ from yarl import URL
|
||||
|
||||
from homeassistant.components import network
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.network import async_get_source_ip
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -54,6 +56,7 @@ from homeassistant.util.dt import utcnow
|
||||
from .const import (
|
||||
API_WS_URL,
|
||||
BASIC_INPUTS_EVENTS_TYPES,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID,
|
||||
COMPONENT_ID_PATTERN,
|
||||
CONF_COAP_PORT,
|
||||
CONF_GEN,
|
||||
@@ -66,6 +69,7 @@ from .const import (
|
||||
GEN2_RELEASE_URL,
|
||||
LOGGER,
|
||||
MAX_SCRIPT_SIZE,
|
||||
PUSH_UPDATE_ISSUE_ID,
|
||||
ROLE_GENERIC,
|
||||
RPC_INPUTS_EVENTS_TYPES,
|
||||
SHAIR_MAX_WORK_HOURS,
|
||||
@@ -732,12 +736,12 @@ def _get_homeassistant_url(hass: HomeAssistant) -> URL | None:
|
||||
return URL(raw_url)
|
||||
|
||||
|
||||
def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
async def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
"""Return the CoIoT ip address."""
|
||||
url = _get_homeassistant_url(hass)
|
||||
if url is None:
|
||||
if url is None or url.host is None:
|
||||
return None
|
||||
return str(url.host)
|
||||
return await async_get_source_ip(hass, url.host)
|
||||
|
||||
|
||||
def get_rpc_ws_url(hass: HomeAssistant) -> str | None:
|
||||
@@ -1003,3 +1007,86 @@ def is_rpc_ble_scanner_supported(entry: ConfigEntry) -> bool:
|
||||
entry.runtime_data.rpc_supports_scripts
|
||||
and not entry.runtime_data.rpc_zigbee_firmware
|
||||
)
|
||||
|
||||
|
||||
async def check_coiot_config(device: BlockDevice, hass: HomeAssistant) -> bool:
|
||||
"""Check if CoIoT is correctly configured."""
|
||||
if device.model == MODEL_PLUG:
|
||||
# Shelly Plug Gen 1 does not have CoIoT settings
|
||||
return True
|
||||
|
||||
coiot_config = device.settings["coiot"]
|
||||
|
||||
# Check if CoIoT is disabled
|
||||
if not coiot_config.get("enabled"):
|
||||
return False
|
||||
|
||||
coiot_address = await get_coiot_address(hass)
|
||||
if coiot_address is None:
|
||||
LOGGER.debug(
|
||||
"Skipping CoIoT peer check for device %s as no local address is available",
|
||||
device.name,
|
||||
)
|
||||
return True
|
||||
|
||||
coiot_peer = f"{coiot_address}:{get_coiot_port(hass)}"
|
||||
# Check if CoIoT address is not correctly set
|
||||
if (peer_config := coiot_config.get("peer")) and peer_config != coiot_peer:
|
||||
LOGGER.debug(
|
||||
"CoIoT is unconfigured for device %s, peer_config: %s, coiot_peer: %s",
|
||||
device.name,
|
||||
peer_config,
|
||||
coiot_peer,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_manage_coiot_issues_task(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""CoIoT configuration or push updates issues task."""
|
||||
config_issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=entry.unique_id)
|
||||
push_updates_issue_id = PUSH_UPDATE_ISSUE_ID.format(unique=entry.unique_id)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert entry.runtime_data.block is not None
|
||||
|
||||
device = entry.runtime_data.block.device
|
||||
|
||||
if await check_coiot_config(device, hass):
|
||||
# CoIoT is correctly configured, create push updates issue
|
||||
ir.async_delete_issue(hass, DOMAIN, config_issue_id)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
push_updates_issue_id,
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
learn_more_url="https://www.home-assistant.io/integrations/shelly/#shelly-device-configuration-generation-1",
|
||||
translation_key="push_update_failure",
|
||||
translation_placeholders={
|
||||
"device_name": device.name,
|
||||
"ip_address": device.ip_address,
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
# CoIoT is not correctly configured, create config issue
|
||||
ir.async_delete_issue(hass, DOMAIN, push_updates_issue_id)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config_issue_id,
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="coiot_unconfigured",
|
||||
translation_placeholders={
|
||||
"device_name": device.name,
|
||||
"ip_address": device.ip_address,
|
||||
},
|
||||
data={"entry_id": entry.entry_id},
|
||||
)
|
||||
|
||||
@@ -284,6 +284,8 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
try:
|
||||
battery = self._psutil.sensors_battery()
|
||||
_LOGGER.debug("battery: %s", battery)
|
||||
except (FileNotFoundError, PermissionError) as err:
|
||||
_LOGGER.debug("OS error when accessing battery sensors: %s", err)
|
||||
except (AttributeError, FileNotFoundError):
|
||||
_LOGGER.debug("OS does not provide battery sensors")
|
||||
|
||||
|
||||
@@ -91,6 +91,10 @@ from .const import (
|
||||
CONF_CONFIG_ENTRY_ID,
|
||||
DEFAULT_API_ENDPOINT,
|
||||
DOMAIN,
|
||||
PARSER_HTML,
|
||||
PARSER_MD,
|
||||
PARSER_MD2,
|
||||
PARSER_PLAIN_TEXT,
|
||||
PLATFORM_BROADCAST,
|
||||
PLATFORM_POLLING,
|
||||
PLATFORM_WEBHOOKS,
|
||||
@@ -119,11 +123,16 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
ATTR_PARSER_SCHEMA = vol.All(
|
||||
cv.string,
|
||||
vol.In([PARSER_HTML, PARSER_MD, PARSER_MD2, PARSER_PLAIN_TEXT]),
|
||||
)
|
||||
|
||||
BASE_SERVICE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_CONFIG_ENTRY_ID): cv.string,
|
||||
vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [vol.Coerce(int)]),
|
||||
vol.Optional(ATTR_PARSER): cv.string,
|
||||
vol.Optional(ATTR_PARSER): ATTR_PARSER_SCHEMA,
|
||||
vol.Optional(ATTR_DISABLE_NOTIF): cv.boolean,
|
||||
vol.Optional(ATTR_DISABLE_WEB_PREV): cv.boolean,
|
||||
vol.Optional(ATTR_RESIZE_KEYBOARD): cv.boolean,
|
||||
@@ -236,7 +245,7 @@ SERVICE_SCHEMA_EDIT_MESSAGE = vol.All(
|
||||
cv.positive_int, vol.All(cv.string, "last")
|
||||
),
|
||||
vol.Required(ATTR_CHAT_ID): vol.Coerce(int),
|
||||
vol.Optional(ATTR_PARSER): cv.string,
|
||||
vol.Optional(ATTR_PARSER): ATTR_PARSER_SCHEMA,
|
||||
vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list,
|
||||
vol.Optional(ATTR_DISABLE_WEB_PREV): cv.boolean,
|
||||
}
|
||||
@@ -253,6 +262,7 @@ SERVICE_SCHEMA_EDIT_MESSAGE_MEDIA = vol.All(
|
||||
),
|
||||
vol.Required(ATTR_CHAT_ID): vol.Coerce(int),
|
||||
vol.Optional(ATTR_CAPTION): cv.string,
|
||||
vol.Optional(ATTR_PARSER): ATTR_PARSER_SCHEMA,
|
||||
vol.Required(ATTR_MEDIA_TYPE): vol.In(
|
||||
(
|
||||
str(InputMediaType.ANIMATION),
|
||||
@@ -279,6 +289,7 @@ SERVICE_SCHEMA_EDIT_CAPTION = vol.Schema(
|
||||
vol.Required(ATTR_MESSAGEID): vol.Any(
|
||||
cv.positive_int, vol.All(cv.string, "last")
|
||||
),
|
||||
vol.Optional(ATTR_PARSER): ATTR_PARSER_SCHEMA,
|
||||
vol.Required(ATTR_CHAT_ID): vol.Coerce(int),
|
||||
vol.Required(ATTR_CAPTION): cv.string,
|
||||
vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list,
|
||||
|
||||
@@ -674,6 +674,8 @@ class TelegramNotificationService:
|
||||
"Error editing message media",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
media=media,
|
||||
caption=kwargs.get(ATTR_CAPTION),
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
chat_id=chat_id,
|
||||
message_id=message_id,
|
||||
inline_message_id=inline_message_id,
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"bot_logout_failed": "Failed to logout Telegram bot. Please try again later.",
|
||||
"bot_logout_failed": "Failed to log out Telegram bot. Please try again later.",
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"invalid_proxy_url": "{proxy_url_error}",
|
||||
"invalid_trusted_networks": "Invalid trusted network: {error_message}",
|
||||
@@ -231,11 +231,9 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"api_endpoint": "API endpoint",
|
||||
"parse_mode": "Parse mode"
|
||||
},
|
||||
"data_description": {
|
||||
"api_endpoint": "Telegram bot API server endpoint.\nThe bot will be **locked out for 10 minutes** if you switch back to the default.\nDefault: `{default_api_endpoint}`.",
|
||||
"parse_mode": "Default parse mode for messages if not explicit in message data."
|
||||
},
|
||||
"title": "Configure Telegram bot"
|
||||
|
||||
@@ -16,6 +16,8 @@ from homeassistant.components.light import (
|
||||
ATTR_RGBW_COLOR,
|
||||
ATTR_RGBWW_COLOR,
|
||||
ATTR_TRANSITION,
|
||||
DEFAULT_MAX_KELVIN,
|
||||
DEFAULT_MIN_KELVIN,
|
||||
DOMAIN as LIGHT_DOMAIN,
|
||||
ENTITY_ID_FORMAT,
|
||||
PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA,
|
||||
@@ -265,6 +267,8 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
|
||||
_entity_id_format = ENTITY_ID_FORMAT
|
||||
_optimistic_entity = True
|
||||
_attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
|
||||
_attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
@@ -856,7 +860,7 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
|
||||
try:
|
||||
if render in (None, "None", ""):
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
self._attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
|
||||
return
|
||||
|
||||
self._attr_min_color_temp_kelvin = (
|
||||
@@ -867,14 +871,14 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
"Template must supply an integer temperature within the range for"
|
||||
" this light, or 'None'"
|
||||
)
|
||||
self._attr_min_color_temp_kelvin = None
|
||||
self._attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
|
||||
|
||||
@callback
|
||||
def _update_min_mireds(self, render):
|
||||
"""Update the min mireds from the template."""
|
||||
try:
|
||||
if render in (None, "None", ""):
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
self._attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
|
||||
return
|
||||
|
||||
self._attr_max_color_temp_kelvin = (
|
||||
@@ -885,7 +889,7 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
||||
"Template must supply an integer temperature within the range for"
|
||||
" this light, or 'None'"
|
||||
)
|
||||
self._attr_max_color_temp_kelvin = None
|
||||
self._attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
|
||||
|
||||
@callback
|
||||
def _update_supports_transition(self, render):
|
||||
|
||||
@@ -8,9 +8,7 @@ from typing import Any
|
||||
import uuid
|
||||
|
||||
from todoist_api_python.api_async import TodoistAPIAsync
|
||||
from todoist_api_python.endpoints import get_sync_url
|
||||
from todoist_api_python.headers import create_headers
|
||||
from todoist_api_python.models import Due, Label, Task
|
||||
from todoist_api_python.models import Label, Project, Task
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.calendar import (
|
||||
@@ -62,8 +60,9 @@ from .const import (
|
||||
START,
|
||||
SUMMARY,
|
||||
)
|
||||
from .coordinator import TodoistCoordinator
|
||||
from .coordinator import TodoistCoordinator, flatten_async_pages
|
||||
from .types import CalData, CustomProject, ProjectData, TodoistEvent
|
||||
from .util import parse_due_date
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -157,18 +156,22 @@ async def async_setup_platform(
|
||||
|
||||
# Setup devices:
|
||||
# Grab all projects.
|
||||
projects = await api.get_projects()
|
||||
projects_result = await api.get_projects()
|
||||
all_projects: list[Project] = await flatten_async_pages(projects_result)
|
||||
|
||||
# Grab all labels
|
||||
labels = await api.get_labels()
|
||||
labels_result = await api.get_labels()
|
||||
all_labels: list[Label] = await flatten_async_pages(labels_result)
|
||||
|
||||
# Add all Todoist-defined projects.
|
||||
project_devices = []
|
||||
for project in projects:
|
||||
for project in all_projects:
|
||||
# Project is an object, not a dict!
|
||||
# Because of that, we convert what we need to a dict.
|
||||
project_data: ProjectData = {CONF_NAME: project.name, CONF_ID: project.id}
|
||||
project_devices.append(TodoistProjectEntity(coordinator, project_data, labels))
|
||||
project_devices.append(
|
||||
TodoistProjectEntity(coordinator, project_data, all_labels)
|
||||
)
|
||||
# Cache the names so we can easily look up name->ID.
|
||||
project_id_lookup[project.name.lower()] = project.id
|
||||
|
||||
@@ -196,7 +199,7 @@ async def async_setup_platform(
|
||||
TodoistProjectEntity(
|
||||
coordinator,
|
||||
{"id": None, "name": extra_project["name"]},
|
||||
labels,
|
||||
all_labels,
|
||||
due_date_days=project_due_date,
|
||||
whitelisted_labels=project_label_filter,
|
||||
whitelisted_projects=project_id_filter,
|
||||
@@ -218,7 +221,7 @@ def async_register_services( # noqa: C901
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
async def handle_new_task(call: ServiceCall) -> None: # noqa: C901
|
||||
async def handle_new_task(call: ServiceCall) -> None:
|
||||
"""Call when a user creates a new Todoist Task from Home Assistant."""
|
||||
project_name = call.data[PROJECT_NAME]
|
||||
projects = await coordinator.async_get_projects()
|
||||
@@ -269,9 +272,10 @@ def async_register_services( # noqa: C901
|
||||
data["labels"] = task_labels
|
||||
|
||||
if ASSIGNEE in call.data:
|
||||
collaborators = await coordinator.api.get_collaborators(project_id)
|
||||
collaborators_result = await coordinator.api.get_collaborators(project_id)
|
||||
all_collaborators = await flatten_async_pages(collaborators_result)
|
||||
collaborator_id_lookup = {
|
||||
collab.name.lower(): collab.id for collab in collaborators
|
||||
collab.name.lower(): collab.id for collab in all_collaborators
|
||||
}
|
||||
task_assignee = call.data[ASSIGNEE].lower()
|
||||
if task_assignee in collaborator_id_lookup:
|
||||
@@ -297,17 +301,14 @@ def async_register_services( # noqa: C901
|
||||
if due is None:
|
||||
raise ValueError(f"Invalid due_date: {call.data[DUE_DATE]}")
|
||||
due_date = datetime(due.year, due.month, due.day)
|
||||
# Format it in the manner Todoist expects
|
||||
due_date = dt_util.as_utc(due_date)
|
||||
date_format = "%Y-%m-%dT%H:%M:%S"
|
||||
data["due_datetime"] = datetime.strftime(due_date, date_format)
|
||||
# Pass the datetime object directly - the library handles formatting
|
||||
data["due_datetime"] = dt_util.as_utc(due_date)
|
||||
|
||||
api_task = await coordinator.api.add_task(content, **data)
|
||||
|
||||
# @NOTE: The rest-api doesn't support reminders, this works manually using
|
||||
# the sync api, in order to keep functional parity with the component.
|
||||
# https://developer.todoist.com/sync/v9/#reminders
|
||||
sync_url = get_sync_url("sync")
|
||||
# The REST API doesn't support reminders, so we use the Sync API directly
|
||||
# to maintain functional parity with the component.
|
||||
# https://developer.todoist.com/api/v1/#tag/Sync/Reminders/Add-a-reminder
|
||||
_reminder_due: dict = {}
|
||||
if REMINDER_DATE_STRING in call.data:
|
||||
_reminder_due["string"] = call.data[REMINDER_DATE_STRING]
|
||||
@@ -316,20 +317,21 @@ def async_register_services( # noqa: C901
|
||||
_reminder_due["lang"] = call.data[REMINDER_DATE_LANG]
|
||||
|
||||
if REMINDER_DATE in call.data:
|
||||
due_date = dt_util.parse_datetime(call.data[REMINDER_DATE])
|
||||
if due_date is None:
|
||||
due = dt_util.parse_date(call.data[REMINDER_DATE])
|
||||
if due is None:
|
||||
reminder_date = dt_util.parse_datetime(call.data[REMINDER_DATE])
|
||||
if reminder_date is None:
|
||||
reminder = dt_util.parse_date(call.data[REMINDER_DATE])
|
||||
if reminder is None:
|
||||
raise ValueError(
|
||||
f"Invalid reminder_date: {call.data[REMINDER_DATE]}"
|
||||
)
|
||||
due_date = datetime(due.year, due.month, due.day)
|
||||
# Format it in the manner Todoist expects
|
||||
due_date = dt_util.as_utc(due_date)
|
||||
date_format = "%Y-%m-%dT%H:%M:%S"
|
||||
_reminder_due["date"] = datetime.strftime(due_date, date_format)
|
||||
reminder_date = datetime(reminder.year, reminder.month, reminder.day)
|
||||
# Format it in the manner Todoist expects (UTC with Z suffix)
|
||||
reminder_date = dt_util.as_utc(reminder_date)
|
||||
date_format = "%Y-%m-%dT%H:%M:%S.000000Z"
|
||||
_reminder_due["date"] = datetime.strftime(reminder_date, date_format)
|
||||
|
||||
async def add_reminder(reminder_due: dict):
|
||||
if _reminder_due:
|
||||
sync_url = "https://api.todoist.com/api/v1/sync"
|
||||
reminder_data = {
|
||||
"commands": [
|
||||
{
|
||||
@@ -339,16 +341,16 @@ def async_register_services( # noqa: C901
|
||||
"args": {
|
||||
"item_id": api_task.id,
|
||||
"type": "absolute",
|
||||
"due": reminder_due,
|
||||
"due": _reminder_due,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
headers = create_headers(token=coordinator.token, with_content=True)
|
||||
return await session.post(sync_url, headers=headers, json=reminder_data)
|
||||
|
||||
if _reminder_due:
|
||||
await add_reminder(_reminder_due)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {coordinator.token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
await session.post(sync_url, headers=headers, json=reminder_data)
|
||||
|
||||
_LOGGER.debug("Created Todoist task: %s", call.data[CONTENT])
|
||||
|
||||
@@ -527,7 +529,7 @@ class TodoistProjectData:
|
||||
"""
|
||||
task: TodoistEvent = {
|
||||
ALL_DAY: False,
|
||||
COMPLETED: data.is_completed,
|
||||
COMPLETED: data.completed_at is not None,
|
||||
DESCRIPTION: f"https://todoist.com/showTask?id={data.id}",
|
||||
DUE_TODAY: False,
|
||||
END: None,
|
||||
@@ -561,22 +563,26 @@ class TodoistProjectData:
|
||||
# complete the task.
|
||||
# Generally speaking, that means right now.
|
||||
if data.due is not None:
|
||||
end = dt_util.parse_datetime(
|
||||
data.due.datetime if data.due.datetime else data.due.date
|
||||
)
|
||||
task[END] = dt_util.as_local(end) if end is not None else end
|
||||
if task[END] is not None:
|
||||
if self._due_date_days is not None and (
|
||||
task[END] > dt_util.now() + self._due_date_days
|
||||
):
|
||||
# This task is out of range of our due date;
|
||||
# it shouldn't be counted.
|
||||
return None
|
||||
due_date = data.due.date
|
||||
# The API returns date or datetime objects when deserialized via from_dict()
|
||||
if isinstance(due_date, datetime):
|
||||
task[END] = dt_util.as_local(due_date)
|
||||
elif isinstance(due_date, date):
|
||||
task[END] = dt_util.start_of_local_day(due_date)
|
||||
|
||||
task[DUE_TODAY] = task[END].date() == dt_util.now().date()
|
||||
if (end_dt := task[END]) is not None:
|
||||
if self._due_date_days is not None:
|
||||
# For comparison with now, use datetime
|
||||
|
||||
if end_dt > dt_util.now() + self._due_date_days:
|
||||
# This task is out of range of our due date;
|
||||
# it shouldn't be counted.
|
||||
return None
|
||||
|
||||
task[DUE_TODAY] = end_dt.date() == dt_util.now().date()
|
||||
|
||||
# Special case: Task is overdue.
|
||||
if task[END] <= task[START]:
|
||||
if end_dt <= task[START]:
|
||||
task[OVERDUE] = True
|
||||
# Set end time to the current time plus 1 hour.
|
||||
# We're pretty much guaranteed to update within that 1 hour,
|
||||
@@ -681,7 +687,7 @@ class TodoistProjectData:
|
||||
for task in project_task_data:
|
||||
if task.due is None:
|
||||
continue
|
||||
start = get_start(task.due)
|
||||
start = parse_due_date(task.due)
|
||||
if start is None:
|
||||
continue
|
||||
event = CalendarEvent(
|
||||
@@ -689,9 +695,15 @@ class TodoistProjectData:
|
||||
start=start,
|
||||
end=start + timedelta(days=1),
|
||||
)
|
||||
if event.start_datetime_local >= end_date:
|
||||
if (
|
||||
event.start_datetime_local is not None
|
||||
and event.start_datetime_local >= end_date
|
||||
):
|
||||
continue
|
||||
if event.end_datetime_local < start_date:
|
||||
if (
|
||||
event.end_datetime_local is not None
|
||||
and event.end_datetime_local < start_date
|
||||
):
|
||||
continue
|
||||
events.append(event)
|
||||
return events
|
||||
@@ -748,15 +760,3 @@ class TodoistProjectData:
|
||||
return
|
||||
self.event = event
|
||||
_LOGGER.debug("Updated %s", self._name)
|
||||
|
||||
|
||||
def get_start(due: Due) -> datetime | date | None:
|
||||
"""Return the task due date as a start date or date time."""
|
||||
if due.datetime:
|
||||
start = dt_util.parse_datetime(due.datetime)
|
||||
if not start:
|
||||
return None
|
||||
return dt_util.as_local(start)
|
||||
if due.date:
|
||||
return dt_util.parse_date(due.date)
|
||||
return None
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""DataUpdateCoordinator for the Todoist component."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TypeVar
|
||||
|
||||
from todoist_api_python.api_async import TodoistAPIAsync
|
||||
from todoist_api_python.models import Label, Project, Section, Task
|
||||
@@ -10,6 +12,18 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
async def flatten_async_pages(
|
||||
pages: AsyncGenerator[list[T]],
|
||||
) -> list[T]:
|
||||
"""Flatten paginated results from an async generator."""
|
||||
all_items: list[T] = []
|
||||
async for page in pages:
|
||||
all_items.extend(page)
|
||||
return all_items
|
||||
|
||||
|
||||
class TodoistCoordinator(DataUpdateCoordinator[list[Task]]):
|
||||
"""Coordinator for updating task data from Todoist."""
|
||||
@@ -39,22 +53,26 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]):
|
||||
async def _async_update_data(self) -> list[Task]:
|
||||
"""Fetch tasks from the Todoist API."""
|
||||
try:
|
||||
return await self.api.get_tasks()
|
||||
tasks_async = await self.api.get_tasks()
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
return await flatten_async_pages(tasks_async)
|
||||
|
||||
async def async_get_projects(self) -> list[Project]:
|
||||
"""Return todoist projects fetched at most once."""
|
||||
if self._projects is None:
|
||||
self._projects = await self.api.get_projects()
|
||||
projects_async = await self.api.get_projects()
|
||||
self._projects = await flatten_async_pages(projects_async)
|
||||
return self._projects
|
||||
|
||||
async def async_get_sections(self, project_id: str) -> list[Section]:
|
||||
"""Return todoist sections for a given project ID."""
|
||||
return await self.api.get_sections(project_id=project_id)
|
||||
sections_async = await self.api.get_sections(project_id=project_id)
|
||||
return await flatten_async_pages(sections_async)
|
||||
|
||||
async def async_get_labels(self) -> list[Label]:
|
||||
"""Return todoist labels fetched at most once."""
|
||||
if self._labels is None:
|
||||
self._labels = await self.api.get_labels()
|
||||
labels_async = await self.api.get_labels()
|
||||
self._labels = await flatten_async_pages(labels_async)
|
||||
return self._labels
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/todoist",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["todoist"],
|
||||
"requirements": ["todoist-api-python==2.1.7"]
|
||||
"requirements": ["todoist-api-python==3.1.0"]
|
||||
}
|
||||
|
||||
@@ -16,10 +16,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TodoistCoordinator
|
||||
from .util import parse_due_date
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -99,24 +99,16 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit
|
||||
if task.parent_id is not None:
|
||||
# Filter out sub-tasks until they are supported by the UI.
|
||||
continue
|
||||
if task.is_completed:
|
||||
if task.completed_at is not None:
|
||||
status = TodoItemStatus.COMPLETED
|
||||
else:
|
||||
status = TodoItemStatus.NEEDS_ACTION
|
||||
due: datetime.date | datetime.datetime | None = None
|
||||
if task_due := task.due:
|
||||
if task_due.datetime:
|
||||
due = dt_util.as_local(
|
||||
datetime.datetime.fromisoformat(task_due.datetime)
|
||||
)
|
||||
elif task_due.date:
|
||||
due = datetime.date.fromisoformat(task_due.date)
|
||||
items.append(
|
||||
TodoItem(
|
||||
summary=task.content,
|
||||
uid=task.id,
|
||||
status=status,
|
||||
due=due,
|
||||
due=parse_due_date(task.due),
|
||||
description=task.description or None, # Don't use empty string
|
||||
)
|
||||
)
|
||||
@@ -147,9 +139,9 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit
|
||||
|
||||
if item.status != existing_item.status:
|
||||
if item.status == TodoItemStatus.COMPLETED:
|
||||
await self.coordinator.api.close_task(task_id=uid)
|
||||
await self.coordinator.api.complete_task(task_id=uid)
|
||||
else:
|
||||
await self.coordinator.api.reopen_task(task_id=uid)
|
||||
await self.coordinator.api.uncomplete_task(task_id=uid)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_delete_todo_items(self, uids: list[str]) -> None:
|
||||
|
||||
35
homeassistant/components/todoist/util.py
Normal file
35
homeassistant/components/todoist/util.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Utility functions for the Todoist integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime
|
||||
|
||||
from todoist_api_python.models import Due
|
||||
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
|
||||
def parse_due_date(task_due: Due | None) -> date | datetime | None:
|
||||
"""Parse due date from Todoist task due object.
|
||||
|
||||
The due.date field contains either a date object (for date-only tasks)
|
||||
or a datetime object (for tasks with a specific time). When deserialized
|
||||
from the API via from_dict(), these are already proper Python date/datetime
|
||||
objects.
|
||||
|
||||
Args:
|
||||
task_due: The Due object from a Todoist task, or None.
|
||||
|
||||
Returns:
|
||||
A date object for date-only due dates, a localized datetime for
|
||||
datetime due dates, or None if no due date is set.
|
||||
|
||||
"""
|
||||
if task_due is None or not (due_date := task_due.date):
|
||||
return None
|
||||
|
||||
if isinstance(due_date, datetime):
|
||||
return dt_util.as_local(due_date)
|
||||
if isinstance(due_date, date):
|
||||
return due_date
|
||||
return None
|
||||
@@ -41,7 +41,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["uiprotect==10.0.1", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==10.1.0", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/vesync",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyvesync"],
|
||||
"requirements": ["pyvesync==3.4.1"]
|
||||
|
||||
@@ -4,5 +4,6 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/viaggiatreno",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy"
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["viaggiatreno_ha==0.2.4"]
|
||||
}
|
||||
|
||||
@@ -2,12 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from viaggiatreno_ha.trainline import (
|
||||
TrainLine,
|
||||
TrainLineStatus,
|
||||
TrainState,
|
||||
Viaggiatreno,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -19,19 +24,12 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
VIAGGIATRENO_ENDPOINT = (
|
||||
"http://www.viaggiatreno.it/infomobilita/"
|
||||
"resteasy/viaggiatreno/andamentoTreno/"
|
||||
"{station_id}/{train_id}/{timestamp}"
|
||||
)
|
||||
|
||||
REQUEST_TIMEOUT = 5 # seconds
|
||||
ICON = "mdi:train"
|
||||
MONITORED_INFO = [
|
||||
MONITORED_INFO = [ # Backward compatibility with older versions
|
||||
"categoria",
|
||||
"compOrarioArrivoZeroEffettivo",
|
||||
"compOrarioPartenzaZeroEffettivo",
|
||||
@@ -47,7 +45,6 @@ DEFAULT_NAME = "Train {}"
|
||||
|
||||
CONF_NAME = "train_name"
|
||||
CONF_STATION_ID = "station_id"
|
||||
CONF_STATION_NAME = "station_name"
|
||||
CONF_TRAIN_ID = "train_id"
|
||||
|
||||
ARRIVED_STRING = "Arrived"
|
||||
@@ -55,6 +52,8 @@ CANCELLED_STRING = "Cancelled"
|
||||
NOT_DEPARTED_STRING = "Not departed yet"
|
||||
NO_INFORMATION_STRING = "No information for this train now"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=2)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_TRAIN_ID): cv.string,
|
||||
@@ -71,126 +70,94 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the ViaggiaTreno platform."""
|
||||
train_id = config.get(CONF_TRAIN_ID)
|
||||
station_id = config.get(CONF_STATION_ID)
|
||||
train_id = str(config.get(CONF_TRAIN_ID))
|
||||
station_id = str(config.get(CONF_STATION_ID))
|
||||
if not (name := config.get(CONF_NAME)):
|
||||
name = DEFAULT_NAME.format(train_id)
|
||||
async_add_entities([ViaggiaTrenoSensor(train_id, station_id, name)])
|
||||
|
||||
|
||||
async def async_http_request(hass, uri):
|
||||
"""Perform actual request."""
|
||||
try:
|
||||
session = async_get_clientsession(hass)
|
||||
async with asyncio.timeout(REQUEST_TIMEOUT):
|
||||
req = await session.get(uri)
|
||||
if req.status != HTTPStatus.OK:
|
||||
return {"error": req.status}
|
||||
json_response = await req.json()
|
||||
except (TimeoutError, aiohttp.ClientError) as exc:
|
||||
_LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc)
|
||||
return None
|
||||
except ValueError:
|
||||
_LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint")
|
||||
return None
|
||||
return json_response
|
||||
tl = TrainLine(train_id=train_id, starting_station=station_id)
|
||||
async_add_entities([ViaggiaTrenoSensor(tl, name)], True)
|
||||
|
||||
|
||||
class ViaggiaTrenoSensor(SensorEntity):
|
||||
"""Implementation of a ViaggiaTreno sensor."""
|
||||
|
||||
_attr_attribution = "Powered by ViaggiaTreno Data"
|
||||
_attr_should_poll = True
|
||||
|
||||
def __init__(self, train_id, station_id, name):
|
||||
def __init__(self, train_line: TrainLine, name: str) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._state = None
|
||||
self._attributes = {}
|
||||
self._unit = ""
|
||||
self._state: StateType = NO_INFORMATION_STRING
|
||||
self._attributes: dict[str, Any] = {}
|
||||
self._icon = ICON
|
||||
self._station_id = station_id
|
||||
self._name = name
|
||||
|
||||
self.uri = VIAGGIATRENO_ENDPOINT.format(
|
||||
station_id=station_id, train_id=train_id, timestamp=int(time.time()) * 1000
|
||||
)
|
||||
self._line = train_line
|
||||
self._viaggiatreno: Viaggiatreno | None = None
|
||||
self._tstatus: TrainLineStatus | None = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
def icon(self) -> str:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
return self._unit
|
||||
if isinstance(self.native_value, (int, float)):
|
||||
return UnitOfTime.MINUTES
|
||||
return None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return extra attributes."""
|
||||
return self._attributes
|
||||
|
||||
@staticmethod
|
||||
def has_departed(data):
|
||||
"""Check if the train has actually departed."""
|
||||
try:
|
||||
first_station = data["fermate"][0]
|
||||
if data["oraUltimoRilevamento"] or first_station["effettiva"]:
|
||||
return True
|
||||
except ValueError:
|
||||
_LOGGER.error("Cannot fetch first station: %s", data)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def has_arrived(data):
|
||||
"""Check if the train has already arrived."""
|
||||
last_station = data["fermate"][-1]
|
||||
if not last_station["effettiva"]:
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def is_cancelled(data):
|
||||
"""Check if the train is cancelled."""
|
||||
if data["tipoTreno"] == "ST" and data["provvedimento"] == 1:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update state."""
|
||||
uri = self.uri
|
||||
res = await async_http_request(self.hass, uri)
|
||||
if res.get("error", ""):
|
||||
if res["error"] == 204:
|
||||
self._state = NO_INFORMATION_STRING
|
||||
self._unit = ""
|
||||
else:
|
||||
self._state = f"Error: {res['error']}"
|
||||
self._unit = ""
|
||||
else:
|
||||
for i in MONITORED_INFO:
|
||||
self._attributes[i] = res[i]
|
||||
|
||||
if self.is_cancelled(res):
|
||||
if self._viaggiatreno is None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
self._viaggiatreno = Viaggiatreno(session)
|
||||
try:
|
||||
await self._viaggiatreno.query_if_useful(self._line)
|
||||
self._tstatus = self._viaggiatreno.get_line_status(self._line)
|
||||
if self._tstatus is None:
|
||||
_LOGGER.error(
|
||||
"Received status for line %s: None. Check the train and station IDs",
|
||||
self._line,
|
||||
)
|
||||
return
|
||||
except (TimeoutError, aiohttp.ClientError) as exc:
|
||||
_LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc)
|
||||
return
|
||||
except ValueError:
|
||||
_LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint")
|
||||
return
|
||||
if self._tstatus is not None:
|
||||
if self._tstatus.state == TrainState.CANCELLED:
|
||||
self._state = CANCELLED_STRING
|
||||
self._icon = "mdi:cancel"
|
||||
self._unit = ""
|
||||
elif not self.has_departed(res):
|
||||
elif self._tstatus.state == TrainState.NOT_YET_DEPARTED:
|
||||
self._state = NOT_DEPARTED_STRING
|
||||
self._unit = ""
|
||||
elif self.has_arrived(res):
|
||||
elif self._tstatus.state == TrainState.ARRIVED:
|
||||
self._state = ARRIVED_STRING
|
||||
self._unit = ""
|
||||
else:
|
||||
self._state = res.get("ritardo")
|
||||
self._unit = UnitOfTime.MINUTES
|
||||
elif self._tstatus.state in {
|
||||
TrainState.RUNNING,
|
||||
TrainState.PARTIALLY_CANCELLED,
|
||||
}:
|
||||
delay_minutes = self._tstatus.timetable.delay
|
||||
self._state = delay_minutes
|
||||
self._icon = ICON
|
||||
else:
|
||||
self._state = NO_INFORMATION_STRING
|
||||
# Update attributes
|
||||
for info in MONITORED_INFO:
|
||||
self._attributes[info] = self._viaggiatreno.json[self._line][info]
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"universal_silabs_flasher",
|
||||
"serialx"
|
||||
],
|
||||
"requirements": ["zha==0.0.87", "serialx==0.6.2"],
|
||||
"requirements": ["zha==0.0.88", "serialx==0.6.2"],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*2652*",
|
||||
|
||||
@@ -53,6 +53,10 @@ import zigpy.backups
|
||||
from zigpy.config import CONF_DEVICE
|
||||
from zigpy.config.validators import cv_boolean
|
||||
from zigpy.types.named import EUI64, KeyData
|
||||
from zigpy.typing import (
|
||||
UNDEFINED as ZIGPY_UNDEFINED,
|
||||
UndefinedType as ZigpyUndefinedType,
|
||||
)
|
||||
from zigpy.zcl.clusters.security import IasAce
|
||||
import zigpy.zdo.types as zdo_types
|
||||
|
||||
@@ -850,7 +854,7 @@ async def websocket_read_zigbee_cluster_attributes(
|
||||
cluster_id: int = msg[ATTR_CLUSTER_ID]
|
||||
cluster_type: str = msg[ATTR_CLUSTER_TYPE]
|
||||
attribute: int = msg[ATTR_ATTRIBUTE]
|
||||
manufacturer: int | None = msg.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = msg.get(ATTR_MANUFACTURER, ZIGPY_UNDEFINED)
|
||||
zha_device = zha_gateway.get_device(ieee)
|
||||
success = {}
|
||||
failure = {}
|
||||
@@ -1326,7 +1330,9 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
cluster_type: str = service.data[ATTR_CLUSTER_TYPE]
|
||||
attribute: int | str = service.data[ATTR_ATTRIBUTE]
|
||||
value: int | bool | str = service.data[ATTR_VALUE]
|
||||
manufacturer: int | None = service.data.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = service.data.get(
|
||||
ATTR_MANUFACTURER, ZIGPY_UNDEFINED
|
||||
)
|
||||
zha_device = zha_gateway.get_device(ieee)
|
||||
response = None
|
||||
if zha_device is not None:
|
||||
@@ -1380,7 +1386,9 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
command_type: str = service.data[ATTR_COMMAND_TYPE]
|
||||
args: list | None = service.data.get(ATTR_ARGS)
|
||||
params: dict | None = service.data.get(ATTR_PARAMS)
|
||||
manufacturer: int | None = service.data.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = service.data.get(
|
||||
ATTR_MANUFACTURER, ZIGPY_UNDEFINED
|
||||
)
|
||||
zha_device = zha_gateway.get_device(ieee)
|
||||
if zha_device is not None:
|
||||
if cluster_id >= MFG_CLUSTER_ID_START and manufacturer is None:
|
||||
@@ -1435,7 +1443,9 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
cluster_id: int = service.data[ATTR_CLUSTER_ID]
|
||||
command: int = service.data[ATTR_COMMAND]
|
||||
args: list = service.data[ATTR_ARGS]
|
||||
manufacturer: int | None = service.data.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = service.data.get(
|
||||
ATTR_MANUFACTURER, ZIGPY_UNDEFINED
|
||||
)
|
||||
group = zha_gateway.get_group(group_id)
|
||||
if cluster_id >= MFG_CLUSTER_ID_START and manufacturer is None:
|
||||
_LOGGER.error("Missing manufacturer attribute for cluster: %d", cluster_id)
|
||||
|
||||
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2026
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0.dev0"
|
||||
PATCH_VERSION: Final = "0b3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
||||
@@ -2701,6 +2701,12 @@
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"heatit": {
|
||||
"name": "Heatit",
|
||||
"iot_standards": [
|
||||
"zwave"
|
||||
]
|
||||
},
|
||||
"heatmiser": {
|
||||
"name": "Heatmiser",
|
||||
"integration_type": "hub",
|
||||
@@ -2712,6 +2718,13 @@
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "motion_blinds"
|
||||
},
|
||||
"heiman": {
|
||||
"name": "Heiman",
|
||||
"iot_standards": [
|
||||
"matter",
|
||||
"zigbee"
|
||||
]
|
||||
},
|
||||
"heiwa": {
|
||||
"name": "Heiwa",
|
||||
"integration_type": "virtual",
|
||||
|
||||
2
homeassistant/generated/labs.py
generated
2
homeassistant/generated/labs.py
generated
@@ -7,7 +7,7 @@ LABS_PREVIEW_FEATURES = {
|
||||
"analytics": {
|
||||
"snapshots": {
|
||||
"feedback_url": "https://forms.gle/GqvRmgmghSDco8M46",
|
||||
"learn_more_url": "",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2026/02/02/about-device-database/",
|
||||
"report_issue_url": "https://github.com/OHF-Device-Database/device-database/issues/new",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -36,16 +36,17 @@ fnv-hash-fast==1.6.0
|
||||
go2rtc-client==0.4.0
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==5.8.0
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20260128.1
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-frontend==20260128.5
|
||||
home-assistant-intents==2026.1.28
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
lru-dict==1.3.0
|
||||
mutagen==1.47.0
|
||||
openai==2.15.0
|
||||
orjson==3.11.5
|
||||
packaging>=23.1
|
||||
paho-mqtt==2.1.0
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2026.2.0.dev0"
|
||||
version = "2026.2.0b3"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -48,7 +48,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.6.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==1.11.0",
|
||||
"hass-nabucasa==1.12.0",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
@@ -484,8 +484,6 @@ filterwarnings = [
|
||||
"ignore:Deprecated call to `pkg_resources.declare_namespace\\('azure'\\)`:DeprecationWarning:pkg_resources",
|
||||
|
||||
# -- tracked upstream / open PRs
|
||||
# https://github.com/kbr/fritzconnection/pull/244 - v1.15.0 - 2025-05-17
|
||||
"ignore:.*invalid escape sequence:SyntaxWarning:.*fritzconnection.core.soaper",
|
||||
# https://github.com/hacf-fr/meteofrance-api/pull/688 - v1.4.0 - 2025-03-26
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteofrance_api.model.forecast",
|
||||
|
||||
|
||||
4
requirements.txt
generated
4
requirements.txt
generated
@@ -24,10 +24,10 @@ cronsim==2.7
|
||||
cryptography==46.0.2
|
||||
fnv-hash-fast==1.6.0
|
||||
ha-ffmpeg==3.2.2
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-intents==2026.1.28
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
|
||||
40
requirements_all.txt
generated
40
requirements_all.txt
generated
@@ -190,7 +190,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.5
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==11.0.2
|
||||
aioamazondevices==11.1.1
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -1015,7 +1015,7 @@ fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
fritzconnection[qr]==1.15.0
|
||||
fritzconnection[qr]==1.15.1
|
||||
|
||||
# homeassistant.components.fyta
|
||||
fyta_cli==0.7.2
|
||||
@@ -1175,7 +1175,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.7
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@@ -1219,10 +1219,10 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260128.1
|
||||
home-assistant-frontend==20260128.5
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-intents==2026.1.28
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1258,7 +1258,7 @@ ibeacon-ble==1.2.0
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==12.1.2
|
||||
ical==12.1.3
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.3.1
|
||||
@@ -1288,7 +1288,7 @@ imeon_inverter_api==0.4.0
|
||||
imgw_pib==2.0.1
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.11
|
||||
incomfort-client==0.6.12
|
||||
|
||||
# homeassistant.components.influxdb
|
||||
influxdb-client==1.50.0
|
||||
@@ -1584,7 +1584,7 @@ nextdns==5.0.0
|
||||
nhc==0.7.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.21.0
|
||||
nibe==2.22.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -1664,6 +1664,7 @@ open-garage==0.2.0
|
||||
# homeassistant.components.open_meteo
|
||||
open-meteo==0.3.2
|
||||
|
||||
# homeassistant.components.cloud
|
||||
# homeassistant.components.open_router
|
||||
# homeassistant.components.openai_conversation
|
||||
openai==2.15.0
|
||||
@@ -1690,7 +1691,7 @@ openwrt-luci-rpc==1.1.17
|
||||
openwrt-ubus-rpc==0.0.2
|
||||
|
||||
# homeassistant.components.opower
|
||||
opower==0.16.5
|
||||
opower==0.17.0
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==1.0.2
|
||||
@@ -1864,7 +1865,7 @@ pyElectra==1.2.4
|
||||
pyEmby==1.10
|
||||
|
||||
# homeassistant.components.hikvision
|
||||
pyHik==0.4.1
|
||||
pyHik==0.4.2
|
||||
|
||||
# homeassistant.components.homee
|
||||
pyHomee==1.3.8
|
||||
@@ -1988,7 +1989,7 @@ pydeconz==120
|
||||
pydelijn==1.1.0
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
pydexcom==0.5.1
|
||||
|
||||
# homeassistant.components.discovergy
|
||||
pydiscovergy==3.0.2
|
||||
@@ -2208,7 +2209,7 @@ pymata-express==1.19
|
||||
pymediaroom==0.6.5.4
|
||||
|
||||
# homeassistant.components.meteoclimatic
|
||||
pymeteoclimatic==0.1.0
|
||||
pymeteoclimatic==0.1.1
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pymicro-vad==1.0.1
|
||||
@@ -2295,7 +2296,7 @@ pyoppleio-legacy==1.0.8
|
||||
pyosoenergyapi==1.2.4
|
||||
|
||||
# homeassistant.components.opentherm_gw
|
||||
pyotgw==2.2.2
|
||||
pyotgw==2.2.3
|
||||
|
||||
# homeassistant.auth.mfa_modules.notify
|
||||
# homeassistant.auth.mfa_modules.totp
|
||||
@@ -2747,13 +2748,13 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.2
|
||||
renault-api==0.5.3
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.18.1
|
||||
reolink-aio==0.18.2
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
@@ -3039,7 +3040,7 @@ tilt-pi==0.2.1
|
||||
tmb==0.0.4
|
||||
|
||||
# homeassistant.components.todoist
|
||||
todoist-api-python==2.1.7
|
||||
todoist-api-python==3.1.0
|
||||
|
||||
# homeassistant.components.togrill
|
||||
togrill-bluetooth==0.8.1
|
||||
@@ -3093,7 +3094,7 @@ uasiren==0.0.1
|
||||
uhooapi==1.2.6
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.0.1
|
||||
uiprotect==10.1.0
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
@@ -3142,6 +3143,9 @@ velbus-aio==2026.1.4
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.21
|
||||
|
||||
# homeassistant.components.viaggiatreno
|
||||
viaggiatreno_ha==0.2.4
|
||||
|
||||
# homeassistant.components.victron_ble
|
||||
victron-ble-ha-parser==0.4.9
|
||||
|
||||
@@ -3292,7 +3296,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.87
|
||||
zha==0.0.88
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
37
requirements_test_all.txt
generated
37
requirements_test_all.txt
generated
@@ -181,7 +181,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.5
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==11.0.2
|
||||
aioamazondevices==11.1.1
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -894,7 +894,7 @@ fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
fritzconnection[qr]==1.15.0
|
||||
fritzconnection[qr]==1.15.1
|
||||
|
||||
# homeassistant.components.fyta
|
||||
fyta_cli==0.7.2
|
||||
@@ -1045,7 +1045,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.7
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
|
||||
# homeassistant.components.assist_satellite
|
||||
# homeassistant.components.conversation
|
||||
@@ -1077,10 +1077,10 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260128.1
|
||||
home-assistant-frontend==20260128.5
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-intents==2026.1.28
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1110,7 +1110,7 @@ ibeacon-ble==1.2.0
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==12.1.2
|
||||
ical==12.1.3
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.3.1
|
||||
@@ -1134,7 +1134,7 @@ imeon_inverter_api==0.4.0
|
||||
imgw_pib==2.0.1
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.11
|
||||
incomfort-client==0.6.12
|
||||
|
||||
# homeassistant.components.influxdb
|
||||
influxdb-client==1.50.0
|
||||
@@ -1379,7 +1379,7 @@ nextdns==5.0.0
|
||||
nhc==0.7.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.21.0
|
||||
nibe==2.22.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -1447,6 +1447,7 @@ open-garage==0.2.0
|
||||
# homeassistant.components.open_meteo
|
||||
open-meteo==0.3.2
|
||||
|
||||
# homeassistant.components.cloud
|
||||
# homeassistant.components.open_router
|
||||
# homeassistant.components.openai_conversation
|
||||
openai==2.15.0
|
||||
@@ -1464,7 +1465,7 @@ openrgb-python==0.3.6
|
||||
openwebifpy==4.3.1
|
||||
|
||||
# homeassistant.components.opower
|
||||
opower==0.16.5
|
||||
opower==0.17.0
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==1.0.2
|
||||
@@ -1601,7 +1602,7 @@ pyDuotecno==2024.10.1
|
||||
pyElectra==1.2.4
|
||||
|
||||
# homeassistant.components.hikvision
|
||||
pyHik==0.4.1
|
||||
pyHik==0.4.2
|
||||
|
||||
# homeassistant.components.homee
|
||||
pyHomee==1.3.8
|
||||
@@ -1695,7 +1696,7 @@ pydeako==0.6.0
|
||||
pydeconz==120
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
pydexcom==0.5.1
|
||||
|
||||
# homeassistant.components.discovergy
|
||||
pydiscovergy==3.0.2
|
||||
@@ -1873,7 +1874,7 @@ pymailgunner==1.4
|
||||
pymata-express==1.19
|
||||
|
||||
# homeassistant.components.meteoclimatic
|
||||
pymeteoclimatic==0.1.0
|
||||
pymeteoclimatic==0.1.1
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pymicro-vad==1.0.1
|
||||
@@ -1945,7 +1946,7 @@ pyopnsense==0.4.0
|
||||
pyosoenergyapi==1.2.4
|
||||
|
||||
# homeassistant.components.opentherm_gw
|
||||
pyotgw==2.2.2
|
||||
pyotgw==2.2.3
|
||||
|
||||
# homeassistant.auth.mfa_modules.notify
|
||||
# homeassistant.auth.mfa_modules.totp
|
||||
@@ -2313,13 +2314,13 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.2
|
||||
renault-api==0.5.3
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.18.1
|
||||
reolink-aio==0.18.2
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.67
|
||||
@@ -2539,7 +2540,7 @@ tilt-ble==1.0.1
|
||||
tilt-pi==0.2.1
|
||||
|
||||
# homeassistant.components.todoist
|
||||
todoist-api-python==2.1.7
|
||||
todoist-api-python==3.1.0
|
||||
|
||||
# homeassistant.components.togrill
|
||||
togrill-bluetooth==0.8.1
|
||||
@@ -2590,7 +2591,7 @@ uasiren==0.0.1
|
||||
uhooapi==1.2.6
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.0.1
|
||||
uiprotect==10.1.0
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
@@ -2762,7 +2763,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.87
|
||||
zha==0.0.88
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.68.0
|
||||
|
||||
@@ -46,6 +46,7 @@ TEST_DEVICE_1 = AmazonDevice(
|
||||
scale="CELSIUS",
|
||||
),
|
||||
},
|
||||
notifications_supported=True,
|
||||
notifications={
|
||||
NOTIFICATION_ALARM: AmazonSchedule(
|
||||
type=NOTIFICATION_ALARM,
|
||||
@@ -93,5 +94,6 @@ TEST_DEVICE_2 = AmazonDevice(
|
||||
scale="CELSIUS",
|
||||
)
|
||||
},
|
||||
notifications_supported=False,
|
||||
notifications={},
|
||||
)
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
'type': 'Timer',
|
||||
}),
|
||||
}),
|
||||
'notifications_supported': True,
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
@@ -103,6 +104,7 @@
|
||||
'type': 'Timer',
|
||||
}),
|
||||
}),
|
||||
'notifications_supported': True,
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
|
||||
@@ -7,6 +7,7 @@ from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.alexa_devices.const import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_ON
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -134,3 +135,42 @@ async def test_alexa_dnd_group_removal(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert not hass.states.get(entity.entity_id)
|
||||
|
||||
|
||||
async def test_alexa_unsupported_notification_sensor_removal(
|
||||
hass: HomeAssistant,
|
||||
mock_amazon_devices_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test notification sensors are removed from devices that do not support them."""
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||
name=mock_config_entry.title,
|
||||
manufacturer="Amazon",
|
||||
model=SPEAKER_GROUP_MODEL,
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
entity = entity_registry.async_get_or_create(
|
||||
DOMAIN,
|
||||
SENSOR_DOMAIN,
|
||||
unique_id=f"{TEST_DEVICE_1_SN}-Timer",
|
||||
device_id=device.id,
|
||||
config_entry=mock_config_entry,
|
||||
has_entity_name=True,
|
||||
)
|
||||
|
||||
mock_amazon_devices_client.get_devices_data.return_value[
|
||||
TEST_DEVICE_1_SN
|
||||
].notifications_supported = False
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert not hass.states.get(entity.entity_id)
|
||||
|
||||
@@ -387,7 +387,7 @@ async def test_model_list(
|
||||
},
|
||||
{
|
||||
"label": "Claude Haiku 3.5",
|
||||
"value": "claude-3-5-haiku-latest",
|
||||
"value": "claude-3-5-haiku-20241022",
|
||||
},
|
||||
{
|
||||
"label": "Claude Haiku 3",
|
||||
@@ -500,7 +500,7 @@ async def test_model_list_error(
|
||||
CONF_LLM_HASS_API: [],
|
||||
},
|
||||
{
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-20241022",
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
},
|
||||
{
|
||||
@@ -513,7 +513,7 @@ async def test_model_list_error(
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-20241022",
|
||||
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 10,
|
||||
@@ -581,6 +581,7 @@ async def test_model_list_error(
|
||||
CONF_TEMPERATURE: 0.3,
|
||||
CONF_CHAT_MODEL: DEFAULT[CONF_CHAT_MODEL],
|
||||
CONF_MAX_TOKENS: DEFAULT[CONF_MAX_TOKENS],
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 5,
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from datetime import timedelta
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import dateutil.parser
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
@@ -135,42 +134,64 @@ async def test_manual_update_entity(
|
||||
assert state.state == "15.0"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("mock_request_status", [MOCK_MINIMAL_STATUS], indirect=True)
|
||||
@pytest.mark.parametrize(
|
||||
("mock_request_status", "entity_id", "known_status"),
|
||||
[
|
||||
pytest.param(
|
||||
# Even though the "LASTSTEST" field is not available, we should still create the entity.
|
||||
MOCK_MINIMAL_STATUS,
|
||||
"sensor.apc_ups_last_self_test",
|
||||
MOCK_MINIMAL_STATUS | {"LASTSTEST": "1970-01-01 00:00:00 +0000"},
|
||||
id="last_self_test_missing",
|
||||
),
|
||||
pytest.param(
|
||||
MOCK_MINIMAL_STATUS | {"XOFFBATT": "N/A"},
|
||||
"sensor.apc_ups_transfer_from_battery",
|
||||
MOCK_MINIMAL_STATUS | {"XOFFBATT": "1970-01-01 00:00:00 +0000"},
|
||||
id="xoffbatt_na",
|
||||
),
|
||||
pytest.param(
|
||||
MOCK_MINIMAL_STATUS | {"XOFFBATT": "invalid-time-string"},
|
||||
"sensor.apc_ups_transfer_from_battery",
|
||||
MOCK_MINIMAL_STATUS | {"XOFFBATT": "1970-01-01 00:00:00 +0000"},
|
||||
id="xoffbatt_invalid_time_string",
|
||||
),
|
||||
],
|
||||
indirect=["mock_request_status"],
|
||||
)
|
||||
async def test_sensor_unknown(
|
||||
hass: HomeAssistant,
|
||||
mock_request_status: AsyncMock,
|
||||
entity_id: str,
|
||||
known_status: dict[str, str],
|
||||
) -> None:
|
||||
"""Test if our integration can properly mark certain sensors as unknown when it becomes so."""
|
||||
ups_mode_id = "sensor.apc_ups_mode"
|
||||
last_self_test_id = "sensor.apc_ups_last_self_test"
|
||||
"""Test if our integration can properly mark certain sensors as known/unknown when it becomes so."""
|
||||
base_status = mock_request_status.return_value
|
||||
|
||||
assert hass.states.get(ups_mode_id).state == MOCK_MINIMAL_STATUS["UPSMODE"]
|
||||
# Last self test sensor should be added even if our status does not report it initially (it is
|
||||
# a sensor that appears only after a periodical or manual self test is performed).
|
||||
assert hass.states.get(last_self_test_id) is not None
|
||||
assert hass.states.get(last_self_test_id).state == STATE_UNKNOWN
|
||||
# The state should be unknown initially.
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
# Simulate an event (a self test) such that "LASTSTEST" field is being reported, the state of
|
||||
# the sensor should be properly updated with the corresponding value.
|
||||
last_self_test_value = "1970-01-01 00:00:00 +0000"
|
||||
mock_request_status.return_value = MOCK_MINIMAL_STATUS | {
|
||||
"LASTSTEST": last_self_test_value
|
||||
}
|
||||
# Update to a payload that should make the entity known.
|
||||
mock_request_status.return_value = known_status
|
||||
future = utcnow() + timedelta(minutes=2)
|
||||
async_fire_time_changed(hass, future)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
hass.states.get(last_self_test_id).state
|
||||
== dateutil.parser.parse(last_self_test_value).isoformat()
|
||||
)
|
||||
|
||||
# Simulate another event (e.g., daemon restart) such that "LASTSTEST" is no longer reported.
|
||||
mock_request_status.return_value = MOCK_MINIMAL_STATUS
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state != STATE_UNKNOWN
|
||||
|
||||
# Revert back to the initial status, and the state should now be unknown again.
|
||||
mock_request_status.return_value = base_status
|
||||
future = utcnow() + timedelta(minutes=2)
|
||||
async_fire_time_changed(hass, future)
|
||||
await hass.async_block_till_done()
|
||||
# The state should become unknown again.
|
||||
assert hass.states.get(last_self_test_id).state == STATE_UNKNOWN
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("entity_key", "issue_key"), DEPRECATED_SENSORS.items())
|
||||
|
||||
@@ -206,6 +206,17 @@ async def test_prepare_chat_for_generation_appends_attachments(
|
||||
assert response["messages"] is messages
|
||||
mock_prepare_files_for_prompt.assert_awaited_once_with([attachment])
|
||||
|
||||
# Verify that files are actually added to the last user message
|
||||
last_message = messages[-1]
|
||||
assert last_message["type"] == "message"
|
||||
assert last_message["role"] == "user"
|
||||
assert isinstance(last_message["content"], list)
|
||||
assert last_message["content"][0] == {
|
||||
"type": "input_text",
|
||||
"text": "Describe the door",
|
||||
}
|
||||
assert last_message["content"][1] == files[0]
|
||||
|
||||
|
||||
async def test_prepare_chat_for_generation_passes_messages_through(
|
||||
hass: HomeAssistant, cloud_entity: BaseCloudLLMEntity
|
||||
|
||||
@@ -121,7 +121,7 @@ def mock_climate_variables() -> dict:
|
||||
"""Mock climate variable data for default thermostat state."""
|
||||
return {
|
||||
123: {
|
||||
"HVAC_STATE": "idle",
|
||||
"HVAC_STATE": "Off",
|
||||
"HVAC_MODE": "Heat",
|
||||
"TEMPERATURE_F": 72.5,
|
||||
"HUMIDITY": 45,
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
'current_humidity': 45,
|
||||
'current_temperature': 72,
|
||||
'friendly_name': 'Test Controller Residential Thermostat V2',
|
||||
'hvac_action': <HVACAction.IDLE: 'idle'>,
|
||||
'hvac_action': <HVACAction.OFF: 'off'>,
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user