mirror of
https://github.com/home-assistant/core.git
synced 2026-02-03 22:05:35 +01:00
Compare commits
22 Commits
rvc_identi
...
2026.2.0b1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9a5d4610f7 | ||
|
|
41c524fce4 | ||
|
|
5f9fa95554 | ||
|
|
6950be8ea9 | ||
|
|
c5a8bf64d0 | ||
|
|
a2b9a6e9df | ||
|
|
a0c567f0da | ||
|
|
c7feafdde6 | ||
|
|
e1e74b0aeb | ||
|
|
673411ef97 | ||
|
|
f7e5af7cb1 | ||
|
|
0ee56ce708 | ||
|
|
f93a176398 | ||
|
|
cd2394bc12 | ||
|
|
5c20b8eaff | ||
|
|
4bd499d3a6 | ||
|
|
8a53b94c5a | ||
|
|
d5aff326e3 | ||
|
|
22f66abbe7 | ||
|
|
f635228b1f | ||
|
|
4c708c143d | ||
|
|
3369459d41 |
@@ -166,7 +166,7 @@
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
"description": "Arms an alarm in the away mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -176,7 +176,7 @@
|
||||
"name": "Arm away"
|
||||
},
|
||||
"alarm_arm_custom_bypass": {
|
||||
"description": "Arms the alarm while allowing to bypass a custom area.",
|
||||
"description": "Arms an alarm while allowing to bypass a custom area.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to arm the alarm.",
|
||||
@@ -186,7 +186,7 @@
|
||||
"name": "Arm with custom bypass"
|
||||
},
|
||||
"alarm_arm_home": {
|
||||
"description": "Arms the alarm in the home mode.",
|
||||
"description": "Arms an alarm in the home mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -196,7 +196,7 @@
|
||||
"name": "Arm home"
|
||||
},
|
||||
"alarm_arm_night": {
|
||||
"description": "Arms the alarm in the night mode.",
|
||||
"description": "Arms an alarm in the night mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -206,7 +206,7 @@
|
||||
"name": "Arm night"
|
||||
},
|
||||
"alarm_arm_vacation": {
|
||||
"description": "Arms the alarm in the vacation mode.",
|
||||
"description": "Arms an alarm in the vacation mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -216,7 +216,7 @@
|
||||
"name": "Arm vacation"
|
||||
},
|
||||
"alarm_disarm": {
|
||||
"description": "Disarms the alarm.",
|
||||
"description": "Disarms an alarm.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to disarm the alarm.",
|
||||
@@ -226,7 +226,7 @@
|
||||
"name": "Disarm"
|
||||
},
|
||||
"alarm_trigger": {
|
||||
"description": "Triggers the alarm manually.",
|
||||
"description": "Triggers an alarm manually.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
|
||||
@@ -12,14 +12,25 @@ from hass_nabucasa import Cloud, NabuCasaBaseError
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMRateLimitError,
|
||||
LLMResponseCompletedEvent,
|
||||
LLMResponseError,
|
||||
LLMResponseErrorEvent,
|
||||
LLMResponseFailedEvent,
|
||||
LLMResponseFunctionCallArgumentsDeltaEvent,
|
||||
LLMResponseFunctionCallArgumentsDoneEvent,
|
||||
LLMResponseFunctionCallOutputItem,
|
||||
LLMResponseImageOutputItem,
|
||||
LLMResponseIncompleteEvent,
|
||||
LLMResponseMessageOutputItem,
|
||||
LLMResponseOutputItemAddedEvent,
|
||||
LLMResponseOutputItemDoneEvent,
|
||||
LLMResponseOutputTextDeltaEvent,
|
||||
LLMResponseReasoningOutputItem,
|
||||
LLMResponseReasoningSummaryTextDeltaEvent,
|
||||
LLMResponseWebSearchCallOutputItem,
|
||||
LLMResponseWebSearchCallSearchingEvent,
|
||||
LLMServiceError,
|
||||
)
|
||||
from litellm import (
|
||||
ResponseFunctionToolCall,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIStreamEvents,
|
||||
)
|
||||
from openai.types.responses import (
|
||||
FunctionToolParam,
|
||||
ResponseInputItemParam,
|
||||
@@ -60,9 +71,9 @@ class ResponseItemType(str, Enum):
|
||||
|
||||
def _convert_content_to_param(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> ResponseInputParam:
|
||||
) -> list[ResponseInputItemParam]:
|
||||
"""Convert any native chat message for this agent to the native format."""
|
||||
messages: ResponseInputParam = []
|
||||
messages: list[ResponseInputItemParam] = []
|
||||
reasoning_summary: list[str] = []
|
||||
web_search_calls: dict[str, dict[str, Any]] = {}
|
||||
|
||||
@@ -238,7 +249,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"""Transform stream result into HA format."""
|
||||
last_summary_index = None
|
||||
last_role: Literal["assistant", "tool_result"] | None = None
|
||||
current_tool_call: ResponseFunctionToolCall | None = None
|
||||
current_tool_call: LLMResponseFunctionCallOutputItem | None = None
|
||||
|
||||
# Non-reasoning models don't follow our request to remove citations, so we remove
|
||||
# them manually here. They always follow the same pattern: the citation is always
|
||||
@@ -248,19 +259,10 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
|
||||
|
||||
async for event in stream:
|
||||
event_type = getattr(event, "type", None)
|
||||
event_item = getattr(event, "item", None)
|
||||
event_item_type = getattr(event_item, "type", None) if event_item else None
|
||||
_LOGGER.debug("Event[%s]", getattr(event, "type", None))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Event[%s] | item: %s",
|
||||
event_type,
|
||||
event_item_type,
|
||||
)
|
||||
|
||||
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
|
||||
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
|
||||
if event_item_type == ResponseItemType.FUNCTION_CALL:
|
||||
if isinstance(event, LLMResponseOutputItemAddedEvent):
|
||||
if isinstance(event.item, LLMResponseFunctionCallOutputItem):
|
||||
# OpenAI has tool calls as individual events
|
||||
# while HA puts tool calls inside the assistant message.
|
||||
# We turn them into individual assistant content for HA
|
||||
@@ -268,11 +270,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
current_tool_call = cast(ResponseFunctionToolCall, event.item)
|
||||
current_tool_call = event.item
|
||||
elif (
|
||||
event_item_type == ResponseItemType.MESSAGE
|
||||
isinstance(event.item, LLMResponseMessageOutputItem)
|
||||
or (
|
||||
event_item_type == ResponseItemType.REASONING
|
||||
isinstance(event.item, LLMResponseReasoningOutputItem)
|
||||
and last_summary_index is not None
|
||||
) # Subsequent ResponseReasoningItem
|
||||
or last_role != "assistant"
|
||||
@@ -281,14 +283,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
|
||||
if event_item_type == ResponseItemType.REASONING:
|
||||
encrypted_content = getattr(event.item, "encrypted_content", None)
|
||||
summary = getattr(event.item, "summary", []) or []
|
||||
elif isinstance(event, LLMResponseOutputItemDoneEvent):
|
||||
if isinstance(event.item, LLMResponseReasoningOutputItem):
|
||||
encrypted_content = event.item.encrypted_content
|
||||
summary = event.item.summary
|
||||
|
||||
yield {
|
||||
"native": ResponseReasoningItem(
|
||||
type="reasoning",
|
||||
"native": LLMResponseReasoningOutputItem(
|
||||
type=event.item.type,
|
||||
id=event.item.id,
|
||||
summary=[],
|
||||
encrypted_content=encrypted_content,
|
||||
@@ -296,14 +298,8 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
}
|
||||
|
||||
last_summary_index = len(summary) - 1 if summary else None
|
||||
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
|
||||
action = getattr(event.item, "action", None)
|
||||
if isinstance(action, dict):
|
||||
action_dict = action
|
||||
elif action is not None:
|
||||
action_dict = action.to_dict()
|
||||
else:
|
||||
action_dict = {}
|
||||
elif isinstance(event.item, LLMResponseWebSearchCallOutputItem):
|
||||
action_dict = event.item.action
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
@@ -321,11 +317,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"tool_result": {"status": event.item.status},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
elif event_item_type == ResponseItemType.IMAGE:
|
||||
yield {"native": event.item}
|
||||
elif isinstance(event.item, LLMResponseImageOutputItem):
|
||||
yield {"native": event.item.raw}
|
||||
last_summary_index = -1 # Trigger new assistant message on next turn
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseOutputTextDeltaEvent):
|
||||
data = event.delta
|
||||
if remove_parentheses:
|
||||
data = data.removeprefix(")")
|
||||
@@ -344,7 +340,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if data:
|
||||
yield {"content": data}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
|
||||
elif isinstance(event, LLMResponseReasoningSummaryTextDeltaEvent):
|
||||
# OpenAI can output several reasoning summaries
|
||||
# in a single ResponseReasoningItem. We split them as separate
|
||||
# AssistantContent messages. Only last of them will have
|
||||
@@ -358,14 +354,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_summary_index = event.summary_index
|
||||
yield {"thinking_content": event.delta}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDeltaEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.arguments += event.delta
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
|
||||
elif isinstance(event, LLMResponseWebSearchCallSearchingEvent):
|
||||
yield {"role": "assistant"}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDoneEvent):
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.status = "completed"
|
||||
|
||||
@@ -385,35 +381,36 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
]
|
||||
}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseCompletedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseIncompleteEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
event.response.incomplete_details
|
||||
and event.response.incomplete_details.reason
|
||||
):
|
||||
reason: str = event.response.incomplete_details.reason
|
||||
else:
|
||||
reason = "unknown reason"
|
||||
incomplete_details = response.get("incomplete_details")
|
||||
reason = "unknown reason"
|
||||
if incomplete_details is not None and incomplete_details.get("reason"):
|
||||
reason = incomplete_details["reason"]
|
||||
|
||||
if reason == "max_output_tokens":
|
||||
reason = "max output tokens reached"
|
||||
@@ -422,22 +419,24 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
|
||||
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
|
||||
if event.response.usage is not None:
|
||||
elif isinstance(event, LLMResponseFailedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
}
|
||||
}
|
||||
)
|
||||
reason = "unknown reason"
|
||||
if event.response.error is not None:
|
||||
reason = event.response.error.message
|
||||
if isinstance(error := response.get("error"), dict):
|
||||
reason = error.get("message") or reason
|
||||
raise HomeAssistantError(f"OpenAI response failed: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.ERROR:
|
||||
elif isinstance(event, LLMResponseErrorEvent):
|
||||
raise HomeAssistantError(f"OpenAI response error: {event.message}")
|
||||
|
||||
|
||||
@@ -452,7 +451,7 @@ class BaseCloudLLMEntity(Entity):
|
||||
async def _prepare_chat_for_generation(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
messages: ResponseInputParam,
|
||||
messages: list[ResponseInputItemParam],
|
||||
response_format: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Prepare kwargs for Cloud LLM from the chat log."""
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"requirements": ["hass-nabucasa==1.12.0", "openai==2.15.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -58,12 +58,13 @@ C4_TO_HA_HVAC_MODE = {
|
||||
|
||||
HA_TO_C4_HVAC_MODE = {v: k for k, v in C4_TO_HA_HVAC_MODE.items()}
|
||||
|
||||
# Map Control4 HVAC state to Home Assistant HVAC action
|
||||
# Map the five known Control4 HVAC states to Home Assistant HVAC actions
|
||||
C4_TO_HA_HVAC_ACTION = {
|
||||
"heating": HVACAction.HEATING,
|
||||
"cooling": HVACAction.COOLING,
|
||||
"idle": HVACAction.IDLE,
|
||||
"off": HVACAction.OFF,
|
||||
"heat": HVACAction.HEATING,
|
||||
"cool": HVACAction.COOLING,
|
||||
"dry": HVACAction.DRYING,
|
||||
"fan": HVACAction.FAN,
|
||||
}
|
||||
|
||||
|
||||
@@ -236,7 +237,10 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
if c4_state is None:
|
||||
return None
|
||||
# Convert state to lowercase for mapping
|
||||
return C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
action = C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
if action is None:
|
||||
_LOGGER.debug("Unknown HVAC state received from Control4: %s", c4_state)
|
||||
return action
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.6"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.28"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""The Dexcom integration."""
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -14,10 +15,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: DexcomConfigEntry) -> bo
|
||||
"""Set up Dexcom from a config entry."""
|
||||
try:
|
||||
dexcom = await hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if entry.data[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except AccountError:
|
||||
return False
|
||||
|
||||
@@ -5,7 +5,8 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -37,10 +38,13 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
user_input[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if user_input[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except SessionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -18,7 +18,7 @@ _SCAN_INTERVAL = timedelta(seconds=180)
|
||||
type DexcomConfigEntry = ConfigEntry[DexcomCoordinator]
|
||||
|
||||
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading | None]):
|
||||
"""Dexcom Coordinator."""
|
||||
|
||||
def __init__(
|
||||
@@ -37,7 +37,7 @@ class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
)
|
||||
self.dexcom = dexcom
|
||||
|
||||
async def _async_update_data(self) -> GlucoseReading:
|
||||
async def _async_update_data(self) -> GlucoseReading | None:
|
||||
"""Fetch data from API endpoint."""
|
||||
return await self.hass.async_add_executor_job(
|
||||
self.dexcom.get_current_glucose_reading
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydexcom"],
|
||||
"requirements": ["pydexcom==0.2.3"]
|
||||
"requirements": ["pydexcom==0.5.1"]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.1", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0"]
|
||||
"requirements": ["fritzconnection[qr]==1.15.1"]
|
||||
}
|
||||
|
||||
@@ -19,9 +19,7 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"winter_mode": {}
|
||||
},
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260128.1"]
|
||||
"requirements": ["home-assistant-frontend==20260128.3"]
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import voluptuous as vol
|
||||
from homeassistant.components.script import CONF_MODE
|
||||
from homeassistant.const import CONF_DESCRIPTION, CONF_TYPE, SERVICE_RELOAD
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
intent,
|
||||
@@ -18,6 +19,7 @@ from homeassistant.helpers import (
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.script import async_validate_actions_config
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -85,19 +87,29 @@ async def async_reload(hass: HomeAssistant, service_call: ServiceCall) -> None:
|
||||
|
||||
new_intents = new_config[DOMAIN]
|
||||
|
||||
async_load_intents(hass, new_intents)
|
||||
await async_load_intents(hass, new_intents)
|
||||
|
||||
|
||||
def async_load_intents(hass: HomeAssistant, intents: dict[str, ConfigType]) -> None:
|
||||
async def async_load_intents(
|
||||
hass: HomeAssistant, intents: dict[str, ConfigType]
|
||||
) -> None:
|
||||
"""Load YAML intents into the intent system."""
|
||||
hass.data[DOMAIN] = intents
|
||||
|
||||
for intent_type, conf in intents.items():
|
||||
if CONF_ACTION in conf:
|
||||
try:
|
||||
actions = await async_validate_actions_config(hass, conf[CONF_ACTION])
|
||||
except (vol.Invalid, HomeAssistantError) as exc:
|
||||
_LOGGER.error(
|
||||
"Failed to validate actions for intent %s: %s", intent_type, exc
|
||||
)
|
||||
continue # Skip this intent
|
||||
|
||||
script_mode: str = conf.get(CONF_MODE, script.DEFAULT_SCRIPT_MODE)
|
||||
conf[CONF_ACTION] = script.Script(
|
||||
hass,
|
||||
conf[CONF_ACTION],
|
||||
actions,
|
||||
f"Intent Script {intent_type}",
|
||||
DOMAIN,
|
||||
script_mode=script_mode,
|
||||
@@ -109,7 +121,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the intent script component."""
|
||||
intents = config[DOMAIN]
|
||||
|
||||
async_load_intents(hass, intents)
|
||||
await async_load_intents(hass, intents)
|
||||
|
||||
async def _handle_reload(service_call: ServiceCall) -> None:
|
||||
return await async_reload(hass, service_call)
|
||||
|
||||
@@ -33,6 +33,7 @@ from .const import ( # noqa: F401
|
||||
CONF_ALLOW_SINGLE_WORD,
|
||||
CONF_ICON,
|
||||
CONF_REQUIRE_ADMIN,
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_SHOW_IN_SIDEBAR,
|
||||
CONF_TITLE,
|
||||
CONF_URL_PATH,
|
||||
@@ -61,7 +62,7 @@ def _validate_url_slug(value: Any) -> str:
|
||||
"""Validate value is a valid url slug."""
|
||||
if value is None:
|
||||
raise vol.Invalid("Slug should not be None")
|
||||
if "-" not in value:
|
||||
if value != "lovelace" and "-" not in value:
|
||||
raise vol.Invalid("Url path needs to contain a hyphen (-)")
|
||||
str_value = str(value)
|
||||
slg = slugify(str_value, separator="-")
|
||||
@@ -84,9 +85,13 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(DOMAIN, default={}): vol.Schema(
|
||||
{
|
||||
# Deprecated - Remove in 2026.8
|
||||
vol.Optional(CONF_MODE, default=MODE_STORAGE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_RESOURCE_MODE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_DASHBOARDS): cv.schema_with_slug_keys(
|
||||
YAML_DASHBOARD_SCHEMA,
|
||||
slug_validator=_validate_url_slug,
|
||||
@@ -103,7 +108,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
class LovelaceData:
|
||||
"""Dataclass to store information in hass.data."""
|
||||
|
||||
mode: str
|
||||
resource_mode: str # The mode used for resources (yaml or storage)
|
||||
dashboards: dict[str | None, dashboard.LovelaceConfig]
|
||||
resources: resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
yaml_dashboards: dict[str | None, ConfigType]
|
||||
@@ -114,18 +119,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
mode = config[DOMAIN][CONF_MODE]
|
||||
yaml_resources = config[DOMAIN].get(CONF_RESOURCES)
|
||||
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, register the default panel in yaml mode (temporary until user migrates)
|
||||
if mode == MODE_YAML:
|
||||
frontend.async_register_built_in_panel(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config={"mode": mode},
|
||||
sidebar_title="overview",
|
||||
sidebar_icon="mdi:view-dashboard",
|
||||
sidebar_default_visible=False,
|
||||
)
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
# resource_mode controls how resources are loaded (yaml vs storage)
|
||||
# Deprecated - Remove mode fallback in 2026.8
|
||||
resource_mode = config[DOMAIN].get(CONF_RESOURCE_MODE, mode)
|
||||
|
||||
async def reload_resources_service_handler(service_call: ServiceCall) -> None:
|
||||
"""Reload yaml resources."""
|
||||
@@ -149,12 +145,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
hass.data[LOVELACE_DATA].resources = resource_collection
|
||||
|
||||
default_config: dashboard.LovelaceConfig
|
||||
resource_collection: (
|
||||
resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
)
|
||||
if mode == MODE_YAML:
|
||||
default_config = dashboard.LovelaceYAML(hass, None, None)
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
# Load resources based on resource_mode
|
||||
if resource_mode == MODE_YAML:
|
||||
resource_collection = await create_yaml_resource_col(hass, yaml_resources)
|
||||
|
||||
async_register_admin_service(
|
||||
@@ -177,8 +174,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
else:
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
if yaml_resources is not None:
|
||||
_LOGGER.warning(
|
||||
"Lovelace is running in storage mode. Define resources via user"
|
||||
@@ -195,18 +190,44 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
RESOURCE_UPDATE_FIELDS,
|
||||
).async_setup(hass)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_info)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_config)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_save_config)
|
||||
websocket_api.async_register_command(
|
||||
hass, websocket.websocket_lovelace_delete_config
|
||||
)
|
||||
|
||||
yaml_dashboards = config[DOMAIN].get(CONF_DASHBOARDS, {})
|
||||
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, add the default "lovelace" dashboard if not already defined
|
||||
# This migrates the legacy yaml mode to a proper yaml dashboard entry
|
||||
if mode == MODE_YAML and DOMAIN not in yaml_dashboards:
|
||||
translations = await async_get_translations(
|
||||
hass, hass.config.language, "dashboard", {onboarding.DOMAIN}
|
||||
)
|
||||
title = translations.get(
|
||||
"component.onboarding.dashboard.overview.title", "Overview"
|
||||
)
|
||||
yaml_dashboards = {
|
||||
DOMAIN: {
|
||||
CONF_TITLE: title,
|
||||
CONF_ICON: DEFAULT_ICON,
|
||||
CONF_SHOW_IN_SIDEBAR: True,
|
||||
CONF_REQUIRE_ADMIN: False,
|
||||
CONF_MODE: MODE_YAML,
|
||||
CONF_FILENAME: LOVELACE_CONFIG_FILE,
|
||||
},
|
||||
**yaml_dashboards,
|
||||
}
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
|
||||
hass.data[LOVELACE_DATA] = LovelaceData(
|
||||
mode=mode,
|
||||
resource_mode=resource_mode,
|
||||
# We store a dictionary mapping url_path: config. None is the default.
|
||||
dashboards={None: default_config},
|
||||
resources=resource_collection,
|
||||
yaml_dashboards=config[DOMAIN].get(CONF_DASHBOARDS, {}),
|
||||
yaml_dashboards=yaml_dashboards,
|
||||
)
|
||||
|
||||
if hass.config.recovery_mode:
|
||||
@@ -450,7 +471,7 @@ async def _async_migrate_default_config(
|
||||
# Deprecated - Remove in 2026.8
|
||||
@callback
|
||||
def _async_create_yaml_mode_repair(hass: HomeAssistant) -> None:
|
||||
"""Create repair issue for YAML mode migration."""
|
||||
"""Create repair issue for YAML mode deprecation."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -158,7 +158,15 @@ async def _get_dashboard_info(
|
||||
"""Load a dashboard and return info on views."""
|
||||
if url_path == DEFAULT_DASHBOARD:
|
||||
url_path = None
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if dashboard is None:
|
||||
raise ValueError("Invalid dashboard specified")
|
||||
|
||||
@@ -57,6 +57,7 @@ RESOURCE_UPDATE_FIELDS: VolDictType = {
|
||||
SERVICE_RELOAD_RESOURCES = "reload_resources"
|
||||
RESOURCE_RELOAD_SERVICE_SCHEMA = vol.Schema({})
|
||||
|
||||
CONF_RESOURCE_MODE = "resource_mode"
|
||||
CONF_TITLE = "title"
|
||||
CONF_REQUIRE_ADMIN = "require_admin"
|
||||
CONF_SHOW_IN_SIDEBAR = "show_in_sidebar"
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
},
|
||||
"issues": {
|
||||
"yaml_mode_deprecated": {
|
||||
"description": "Starting with Home Assistant 2026.8, the default Lovelace dashboard will no longer support YAML mode. To migrate:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. Rename `{config_file}` to a new filename (e.g., `my-dashboard.yaml`)\n3. Add a dashboard entry in your `configuration.yaml`:\n\n```yaml\nlovelace:\n dashboards:\n lovelace:\n mode: yaml\n filename: my-dashboard.yaml\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n```\n\n4. Restart Home Assistant",
|
||||
"title": "Lovelace YAML mode migration required"
|
||||
"description": "The `mode` option in `lovelace:` configuration is deprecated and will be removed in Home Assistant 2026.8.\n\nTo migrate:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. If you have `resources:` declared in your lovelace configuration, add `resource_mode: yaml` to keep loading resources from YAML\n3. Add a dashboard entry in your `configuration.yaml`:\n\n ```yaml\n lovelace:\n resource_mode: yaml # Add this if you have resources declared\n dashboards:\n lovelace:\n mode: yaml\n filename: {config_file}\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n ```\n\n4. Restart Home Assistant",
|
||||
"title": "Lovelace YAML mode deprecated"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -42,9 +42,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
else:
|
||||
health_info[key] = dashboard[key]
|
||||
|
||||
if hass.data[LOVELACE_DATA].mode == MODE_YAML:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
elif MODE_STORAGE in modes:
|
||||
if MODE_STORAGE in modes:
|
||||
health_info[CONF_MODE] = MODE_STORAGE
|
||||
elif MODE_YAML in modes:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
|
||||
@@ -14,7 +14,13 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.json import json_fragment
|
||||
|
||||
from .const import CONF_URL_PATH, LOVELACE_DATA, ConfigNotFound
|
||||
from .const import (
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_URL_PATH,
|
||||
DOMAIN,
|
||||
LOVELACE_DATA,
|
||||
ConfigNotFound,
|
||||
)
|
||||
from .dashboard import LovelaceConfig
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -38,7 +44,15 @@ def _handle_errors[_R](
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
url_path = msg.get(CONF_URL_PATH)
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if config is None:
|
||||
connection.send_error(
|
||||
@@ -100,6 +114,20 @@ async def websocket_lovelace_resources_impl(
|
||||
connection.send_result(msg["id"], resources.async_items())
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": "lovelace/info"})
|
||||
@websocket_api.async_response
|
||||
async def websocket_lovelace_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Send Lovelace UI info over WebSocket connection."""
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{CONF_RESOURCE_MODE: hass.data[LOVELACE_DATA].resource_mode},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "lovelace/config",
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"domain": "[%key:common::config_flow::data::username%]",
|
||||
"domain": "Domain",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "Dynamic DNS password"
|
||||
},
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.21.0"]
|
||||
"requirements": ["nibe==2.22.0"]
|
||||
}
|
||||
|
||||
@@ -594,7 +594,8 @@ UNIT_CONVERTERS: dict[NumberDeviceClass, type[BaseUnitConverter]] = {
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -604,4 +605,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -4,15 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.components.calendar import (
|
||||
CalendarEntity,
|
||||
CalendarEntityDescription,
|
||||
CalendarEvent,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import CalendarUpdateCoordinator, RadarrConfigEntry, RadarrEvent
|
||||
from .entity import RadarrEntity
|
||||
|
||||
CALENDAR_TYPE = EntityDescription(
|
||||
CALENDAR_TYPE = CalendarEntityDescription(
|
||||
key="calendar",
|
||||
name=None,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.5.2"]
|
||||
"requirements": ["renault-api==0.5.3"]
|
||||
}
|
||||
|
||||
@@ -840,7 +840,8 @@ STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]]
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -850,4 +851,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -233,7 +233,7 @@ async def _async_setup_block_entry(
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, runtime_data.platforms
|
||||
)
|
||||
async_manage_coiot_unconfigured_issue(hass, entry)
|
||||
await async_manage_coiot_unconfigured_issue(hass, entry)
|
||||
remove_empty_sub_devices(hass, entry)
|
||||
elif (
|
||||
sleep_period is None
|
||||
|
||||
@@ -162,8 +162,7 @@ def async_manage_outbound_websocket_incorrectly_enabled_issue(
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_coiot_unconfigured_issue(
|
||||
async def async_manage_coiot_unconfigured_issue(
|
||||
hass: HomeAssistant,
|
||||
entry: ShellyConfigEntry,
|
||||
) -> None:
|
||||
@@ -183,10 +182,10 @@ def async_manage_coiot_unconfigured_issue(
|
||||
coiot_config = device.settings["coiot"]
|
||||
coiot_enabled = coiot_config.get("enabled")
|
||||
|
||||
coiot_peer = f"{await get_coiot_address(hass)}:{get_coiot_port(hass)}"
|
||||
# Check if CoIoT is disabled or peer address is not correctly set
|
||||
if not coiot_enabled or (
|
||||
(peer_config := coiot_config.get("peer"))
|
||||
and peer_config != get_coiot_address(hass)
|
||||
(peer_config := coiot_config.get("peer")) and peer_config != coiot_peer
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
@@ -275,7 +274,7 @@ class CoiotConfigureFlow(ShellyBlockRepairsFlow):
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
coiot_addr = get_coiot_address(self.hass)
|
||||
coiot_addr = await get_coiot_address(self.hass)
|
||||
coiot_port = get_coiot_port(self.hass)
|
||||
if coiot_addr is None or coiot_port is None:
|
||||
return self.async_abort(reason="cannot_configure")
|
||||
|
||||
@@ -29,6 +29,7 @@ from yarl import URL
|
||||
|
||||
from homeassistant.components import network
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.network import async_get_source_ip
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -732,12 +733,12 @@ def _get_homeassistant_url(hass: HomeAssistant) -> URL | None:
|
||||
return URL(raw_url)
|
||||
|
||||
|
||||
def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
async def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
"""Return the CoIoT ip address."""
|
||||
url = _get_homeassistant_url(hass)
|
||||
if url is None:
|
||||
if url is None or url.host is None:
|
||||
return None
|
||||
return str(url.host)
|
||||
return await async_get_source_ip(hass, url.host)
|
||||
|
||||
|
||||
def get_rpc_ws_url(hass: HomeAssistant) -> str | None:
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"bot_logout_failed": "Failed to logout Telegram bot. Please try again later.",
|
||||
"bot_logout_failed": "Failed to log out Telegram bot. Please try again later.",
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"invalid_proxy_url": "{proxy_url_error}",
|
||||
"invalid_trusted_networks": "Invalid trusted network: {error_message}",
|
||||
@@ -231,11 +231,9 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"api_endpoint": "API endpoint",
|
||||
"parse_mode": "Parse mode"
|
||||
},
|
||||
"data_description": {
|
||||
"api_endpoint": "Telegram bot API server endpoint.\nThe bot will be **locked out for 10 minutes** if you switch back to the default.\nDefault: `{default_api_endpoint}`.",
|
||||
"parse_mode": "Default parse mode for messages if not explicit in message data."
|
||||
},
|
||||
"title": "Configure Telegram bot"
|
||||
|
||||
@@ -8,9 +8,7 @@ from typing import Any
|
||||
import uuid
|
||||
|
||||
from todoist_api_python.api_async import TodoistAPIAsync
|
||||
from todoist_api_python.endpoints import get_sync_url
|
||||
from todoist_api_python.headers import create_headers
|
||||
from todoist_api_python.models import Due, Label, Task
|
||||
from todoist_api_python.models import Label, Project, Task
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.calendar import (
|
||||
@@ -62,8 +60,9 @@ from .const import (
|
||||
START,
|
||||
SUMMARY,
|
||||
)
|
||||
from .coordinator import TodoistCoordinator
|
||||
from .coordinator import TodoistCoordinator, flatten_async_pages
|
||||
from .types import CalData, CustomProject, ProjectData, TodoistEvent
|
||||
from .util import parse_due_date
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -157,18 +156,22 @@ async def async_setup_platform(
|
||||
|
||||
# Setup devices:
|
||||
# Grab all projects.
|
||||
projects = await api.get_projects()
|
||||
projects_result = await api.get_projects()
|
||||
all_projects: list[Project] = await flatten_async_pages(projects_result)
|
||||
|
||||
# Grab all labels
|
||||
labels = await api.get_labels()
|
||||
labels_result = await api.get_labels()
|
||||
all_labels: list[Label] = await flatten_async_pages(labels_result)
|
||||
|
||||
# Add all Todoist-defined projects.
|
||||
project_devices = []
|
||||
for project in projects:
|
||||
for project in all_projects:
|
||||
# Project is an object, not a dict!
|
||||
# Because of that, we convert what we need to a dict.
|
||||
project_data: ProjectData = {CONF_NAME: project.name, CONF_ID: project.id}
|
||||
project_devices.append(TodoistProjectEntity(coordinator, project_data, labels))
|
||||
project_devices.append(
|
||||
TodoistProjectEntity(coordinator, project_data, all_labels)
|
||||
)
|
||||
# Cache the names so we can easily look up name->ID.
|
||||
project_id_lookup[project.name.lower()] = project.id
|
||||
|
||||
@@ -196,7 +199,7 @@ async def async_setup_platform(
|
||||
TodoistProjectEntity(
|
||||
coordinator,
|
||||
{"id": None, "name": extra_project["name"]},
|
||||
labels,
|
||||
all_labels,
|
||||
due_date_days=project_due_date,
|
||||
whitelisted_labels=project_label_filter,
|
||||
whitelisted_projects=project_id_filter,
|
||||
@@ -218,7 +221,7 @@ def async_register_services( # noqa: C901
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
async def handle_new_task(call: ServiceCall) -> None: # noqa: C901
|
||||
async def handle_new_task(call: ServiceCall) -> None:
|
||||
"""Call when a user creates a new Todoist Task from Home Assistant."""
|
||||
project_name = call.data[PROJECT_NAME]
|
||||
projects = await coordinator.async_get_projects()
|
||||
@@ -269,9 +272,10 @@ def async_register_services( # noqa: C901
|
||||
data["labels"] = task_labels
|
||||
|
||||
if ASSIGNEE in call.data:
|
||||
collaborators = await coordinator.api.get_collaborators(project_id)
|
||||
collaborators_result = await coordinator.api.get_collaborators(project_id)
|
||||
all_collaborators = await flatten_async_pages(collaborators_result)
|
||||
collaborator_id_lookup = {
|
||||
collab.name.lower(): collab.id for collab in collaborators
|
||||
collab.name.lower(): collab.id for collab in all_collaborators
|
||||
}
|
||||
task_assignee = call.data[ASSIGNEE].lower()
|
||||
if task_assignee in collaborator_id_lookup:
|
||||
@@ -297,17 +301,14 @@ def async_register_services( # noqa: C901
|
||||
if due is None:
|
||||
raise ValueError(f"Invalid due_date: {call.data[DUE_DATE]}")
|
||||
due_date = datetime(due.year, due.month, due.day)
|
||||
# Format it in the manner Todoist expects
|
||||
due_date = dt_util.as_utc(due_date)
|
||||
date_format = "%Y-%m-%dT%H:%M:%S"
|
||||
data["due_datetime"] = datetime.strftime(due_date, date_format)
|
||||
# Pass the datetime object directly - the library handles formatting
|
||||
data["due_datetime"] = dt_util.as_utc(due_date)
|
||||
|
||||
api_task = await coordinator.api.add_task(content, **data)
|
||||
|
||||
# @NOTE: The rest-api doesn't support reminders, this works manually using
|
||||
# the sync api, in order to keep functional parity with the component.
|
||||
# https://developer.todoist.com/sync/v9/#reminders
|
||||
sync_url = get_sync_url("sync")
|
||||
# The REST API doesn't support reminders, so we use the Sync API directly
|
||||
# to maintain functional parity with the component.
|
||||
# https://developer.todoist.com/api/v1/#tag/Sync/Reminders/Add-a-reminder
|
||||
_reminder_due: dict = {}
|
||||
if REMINDER_DATE_STRING in call.data:
|
||||
_reminder_due["string"] = call.data[REMINDER_DATE_STRING]
|
||||
@@ -316,20 +317,21 @@ def async_register_services( # noqa: C901
|
||||
_reminder_due["lang"] = call.data[REMINDER_DATE_LANG]
|
||||
|
||||
if REMINDER_DATE in call.data:
|
||||
due_date = dt_util.parse_datetime(call.data[REMINDER_DATE])
|
||||
if due_date is None:
|
||||
due = dt_util.parse_date(call.data[REMINDER_DATE])
|
||||
if due is None:
|
||||
reminder_date = dt_util.parse_datetime(call.data[REMINDER_DATE])
|
||||
if reminder_date is None:
|
||||
reminder = dt_util.parse_date(call.data[REMINDER_DATE])
|
||||
if reminder is None:
|
||||
raise ValueError(
|
||||
f"Invalid reminder_date: {call.data[REMINDER_DATE]}"
|
||||
)
|
||||
due_date = datetime(due.year, due.month, due.day)
|
||||
# Format it in the manner Todoist expects
|
||||
due_date = dt_util.as_utc(due_date)
|
||||
date_format = "%Y-%m-%dT%H:%M:%S"
|
||||
_reminder_due["date"] = datetime.strftime(due_date, date_format)
|
||||
reminder_date = datetime(reminder.year, reminder.month, reminder.day)
|
||||
# Format it in the manner Todoist expects (UTC with Z suffix)
|
||||
reminder_date = dt_util.as_utc(reminder_date)
|
||||
date_format = "%Y-%m-%dT%H:%M:%S.000000Z"
|
||||
_reminder_due["date"] = datetime.strftime(reminder_date, date_format)
|
||||
|
||||
async def add_reminder(reminder_due: dict):
|
||||
if _reminder_due:
|
||||
sync_url = "https://api.todoist.com/api/v1/sync"
|
||||
reminder_data = {
|
||||
"commands": [
|
||||
{
|
||||
@@ -339,16 +341,16 @@ def async_register_services( # noqa: C901
|
||||
"args": {
|
||||
"item_id": api_task.id,
|
||||
"type": "absolute",
|
||||
"due": reminder_due,
|
||||
"due": _reminder_due,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
headers = create_headers(token=coordinator.token, with_content=True)
|
||||
return await session.post(sync_url, headers=headers, json=reminder_data)
|
||||
|
||||
if _reminder_due:
|
||||
await add_reminder(_reminder_due)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {coordinator.token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
await session.post(sync_url, headers=headers, json=reminder_data)
|
||||
|
||||
_LOGGER.debug("Created Todoist task: %s", call.data[CONTENT])
|
||||
|
||||
@@ -527,7 +529,7 @@ class TodoistProjectData:
|
||||
"""
|
||||
task: TodoistEvent = {
|
||||
ALL_DAY: False,
|
||||
COMPLETED: data.is_completed,
|
||||
COMPLETED: data.completed_at is not None,
|
||||
DESCRIPTION: f"https://todoist.com/showTask?id={data.id}",
|
||||
DUE_TODAY: False,
|
||||
END: None,
|
||||
@@ -561,22 +563,26 @@ class TodoistProjectData:
|
||||
# complete the task.
|
||||
# Generally speaking, that means right now.
|
||||
if data.due is not None:
|
||||
end = dt_util.parse_datetime(
|
||||
data.due.datetime if data.due.datetime else data.due.date
|
||||
)
|
||||
task[END] = dt_util.as_local(end) if end is not None else end
|
||||
if task[END] is not None:
|
||||
if self._due_date_days is not None and (
|
||||
task[END] > dt_util.now() + self._due_date_days
|
||||
):
|
||||
# This task is out of range of our due date;
|
||||
# it shouldn't be counted.
|
||||
return None
|
||||
due_date = data.due.date
|
||||
# The API returns date or datetime objects when deserialized via from_dict()
|
||||
if isinstance(due_date, datetime):
|
||||
task[END] = dt_util.as_local(due_date)
|
||||
elif isinstance(due_date, date):
|
||||
task[END] = dt_util.start_of_local_day(due_date)
|
||||
|
||||
task[DUE_TODAY] = task[END].date() == dt_util.now().date()
|
||||
if (end_dt := task[END]) is not None:
|
||||
if self._due_date_days is not None:
|
||||
# For comparison with now, use datetime
|
||||
|
||||
if end_dt > dt_util.now() + self._due_date_days:
|
||||
# This task is out of range of our due date;
|
||||
# it shouldn't be counted.
|
||||
return None
|
||||
|
||||
task[DUE_TODAY] = end_dt.date() == dt_util.now().date()
|
||||
|
||||
# Special case: Task is overdue.
|
||||
if task[END] <= task[START]:
|
||||
if end_dt <= task[START]:
|
||||
task[OVERDUE] = True
|
||||
# Set end time to the current time plus 1 hour.
|
||||
# We're pretty much guaranteed to update within that 1 hour,
|
||||
@@ -681,7 +687,7 @@ class TodoistProjectData:
|
||||
for task in project_task_data:
|
||||
if task.due is None:
|
||||
continue
|
||||
start = get_start(task.due)
|
||||
start = parse_due_date(task.due)
|
||||
if start is None:
|
||||
continue
|
||||
event = CalendarEvent(
|
||||
@@ -689,9 +695,15 @@ class TodoistProjectData:
|
||||
start=start,
|
||||
end=start + timedelta(days=1),
|
||||
)
|
||||
if event.start_datetime_local >= end_date:
|
||||
if (
|
||||
event.start_datetime_local is not None
|
||||
and event.start_datetime_local >= end_date
|
||||
):
|
||||
continue
|
||||
if event.end_datetime_local < start_date:
|
||||
if (
|
||||
event.end_datetime_local is not None
|
||||
and event.end_datetime_local < start_date
|
||||
):
|
||||
continue
|
||||
events.append(event)
|
||||
return events
|
||||
@@ -748,15 +760,3 @@ class TodoistProjectData:
|
||||
return
|
||||
self.event = event
|
||||
_LOGGER.debug("Updated %s", self._name)
|
||||
|
||||
|
||||
def get_start(due: Due) -> datetime | date | None:
|
||||
"""Return the task due date as a start date or date time."""
|
||||
if due.datetime:
|
||||
start = dt_util.parse_datetime(due.datetime)
|
||||
if not start:
|
||||
return None
|
||||
return dt_util.as_local(start)
|
||||
if due.date:
|
||||
return dt_util.parse_date(due.date)
|
||||
return None
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""DataUpdateCoordinator for the Todoist component."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TypeVar
|
||||
|
||||
from todoist_api_python.api_async import TodoistAPIAsync
|
||||
from todoist_api_python.models import Label, Project, Section, Task
|
||||
@@ -10,6 +12,18 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
async def flatten_async_pages(
|
||||
pages: AsyncGenerator[list[T]],
|
||||
) -> list[T]:
|
||||
"""Flatten paginated results from an async generator."""
|
||||
all_items: list[T] = []
|
||||
async for page in pages:
|
||||
all_items.extend(page)
|
||||
return all_items
|
||||
|
||||
|
||||
class TodoistCoordinator(DataUpdateCoordinator[list[Task]]):
|
||||
"""Coordinator for updating task data from Todoist."""
|
||||
@@ -39,22 +53,26 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]):
|
||||
async def _async_update_data(self) -> list[Task]:
|
||||
"""Fetch tasks from the Todoist API."""
|
||||
try:
|
||||
return await self.api.get_tasks()
|
||||
tasks_async = await self.api.get_tasks()
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
return await flatten_async_pages(tasks_async)
|
||||
|
||||
async def async_get_projects(self) -> list[Project]:
|
||||
"""Return todoist projects fetched at most once."""
|
||||
if self._projects is None:
|
||||
self._projects = await self.api.get_projects()
|
||||
projects_async = await self.api.get_projects()
|
||||
self._projects = await flatten_async_pages(projects_async)
|
||||
return self._projects
|
||||
|
||||
async def async_get_sections(self, project_id: str) -> list[Section]:
|
||||
"""Return todoist sections for a given project ID."""
|
||||
return await self.api.get_sections(project_id=project_id)
|
||||
sections_async = await self.api.get_sections(project_id=project_id)
|
||||
return await flatten_async_pages(sections_async)
|
||||
|
||||
async def async_get_labels(self) -> list[Label]:
|
||||
"""Return todoist labels fetched at most once."""
|
||||
if self._labels is None:
|
||||
self._labels = await self.api.get_labels()
|
||||
labels_async = await self.api.get_labels()
|
||||
self._labels = await flatten_async_pages(labels_async)
|
||||
return self._labels
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/todoist",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["todoist"],
|
||||
"requirements": ["todoist-api-python==2.1.7"]
|
||||
"requirements": ["todoist-api-python==3.1.0"]
|
||||
}
|
||||
|
||||
@@ -16,10 +16,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TodoistCoordinator
|
||||
from .util import parse_due_date
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -99,24 +99,16 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit
|
||||
if task.parent_id is not None:
|
||||
# Filter out sub-tasks until they are supported by the UI.
|
||||
continue
|
||||
if task.is_completed:
|
||||
if task.completed_at is not None:
|
||||
status = TodoItemStatus.COMPLETED
|
||||
else:
|
||||
status = TodoItemStatus.NEEDS_ACTION
|
||||
due: datetime.date | datetime.datetime | None = None
|
||||
if task_due := task.due:
|
||||
if task_due.datetime:
|
||||
due = dt_util.as_local(
|
||||
datetime.datetime.fromisoformat(task_due.datetime)
|
||||
)
|
||||
elif task_due.date:
|
||||
due = datetime.date.fromisoformat(task_due.date)
|
||||
items.append(
|
||||
TodoItem(
|
||||
summary=task.content,
|
||||
uid=task.id,
|
||||
status=status,
|
||||
due=due,
|
||||
due=parse_due_date(task.due),
|
||||
description=task.description or None, # Don't use empty string
|
||||
)
|
||||
)
|
||||
@@ -147,9 +139,9 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit
|
||||
|
||||
if item.status != existing_item.status:
|
||||
if item.status == TodoItemStatus.COMPLETED:
|
||||
await self.coordinator.api.close_task(task_id=uid)
|
||||
await self.coordinator.api.complete_task(task_id=uid)
|
||||
else:
|
||||
await self.coordinator.api.reopen_task(task_id=uid)
|
||||
await self.coordinator.api.uncomplete_task(task_id=uid)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_delete_todo_items(self, uids: list[str]) -> None:
|
||||
|
||||
35
homeassistant/components/todoist/util.py
Normal file
35
homeassistant/components/todoist/util.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Utility functions for the Todoist integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime
|
||||
|
||||
from todoist_api_python.models import Due
|
||||
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
|
||||
def parse_due_date(task_due: Due | None) -> date | datetime | None:
|
||||
"""Parse due date from Todoist task due object.
|
||||
|
||||
The due.date field contains either a date object (for date-only tasks)
|
||||
or a datetime object (for tasks with a specific time). When deserialized
|
||||
from the API via from_dict(), these are already proper Python date/datetime
|
||||
objects.
|
||||
|
||||
Args:
|
||||
task_due: The Due object from a Todoist task, or None.
|
||||
|
||||
Returns:
|
||||
A date object for date-only due dates, a localized datetime for
|
||||
datetime due dates, or None if no due date is set.
|
||||
|
||||
"""
|
||||
if task_due is None or not (due_date := task_due.date):
|
||||
return None
|
||||
|
||||
if isinstance(due_date, datetime):
|
||||
return dt_util.as_local(due_date)
|
||||
if isinstance(due_date, date):
|
||||
return due_date
|
||||
return None
|
||||
@@ -4,5 +4,6 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/viaggiatreno",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy"
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["viaggiatreno_ha==0.2.4"]
|
||||
}
|
||||
|
||||
@@ -2,12 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from viaggiatreno_ha.trainline import (
|
||||
TrainLine,
|
||||
TrainLineStatus,
|
||||
TrainState,
|
||||
Viaggiatreno,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -19,19 +24,12 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
VIAGGIATRENO_ENDPOINT = (
|
||||
"http://www.viaggiatreno.it/infomobilita/"
|
||||
"resteasy/viaggiatreno/andamentoTreno/"
|
||||
"{station_id}/{train_id}/{timestamp}"
|
||||
)
|
||||
|
||||
REQUEST_TIMEOUT = 5 # seconds
|
||||
ICON = "mdi:train"
|
||||
MONITORED_INFO = [
|
||||
MONITORED_INFO = [ # Backward compatibility with older versions
|
||||
"categoria",
|
||||
"compOrarioArrivoZeroEffettivo",
|
||||
"compOrarioPartenzaZeroEffettivo",
|
||||
@@ -47,7 +45,6 @@ DEFAULT_NAME = "Train {}"
|
||||
|
||||
CONF_NAME = "train_name"
|
||||
CONF_STATION_ID = "station_id"
|
||||
CONF_STATION_NAME = "station_name"
|
||||
CONF_TRAIN_ID = "train_id"
|
||||
|
||||
ARRIVED_STRING = "Arrived"
|
||||
@@ -55,6 +52,8 @@ CANCELLED_STRING = "Cancelled"
|
||||
NOT_DEPARTED_STRING = "Not departed yet"
|
||||
NO_INFORMATION_STRING = "No information for this train now"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=2)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_TRAIN_ID): cv.string,
|
||||
@@ -71,126 +70,94 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the ViaggiaTreno platform."""
|
||||
train_id = config.get(CONF_TRAIN_ID)
|
||||
station_id = config.get(CONF_STATION_ID)
|
||||
train_id = str(config.get(CONF_TRAIN_ID))
|
||||
station_id = str(config.get(CONF_STATION_ID))
|
||||
if not (name := config.get(CONF_NAME)):
|
||||
name = DEFAULT_NAME.format(train_id)
|
||||
async_add_entities([ViaggiaTrenoSensor(train_id, station_id, name)])
|
||||
|
||||
|
||||
async def async_http_request(hass, uri):
|
||||
"""Perform actual request."""
|
||||
try:
|
||||
session = async_get_clientsession(hass)
|
||||
async with asyncio.timeout(REQUEST_TIMEOUT):
|
||||
req = await session.get(uri)
|
||||
if req.status != HTTPStatus.OK:
|
||||
return {"error": req.status}
|
||||
json_response = await req.json()
|
||||
except (TimeoutError, aiohttp.ClientError) as exc:
|
||||
_LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc)
|
||||
return None
|
||||
except ValueError:
|
||||
_LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint")
|
||||
return None
|
||||
return json_response
|
||||
tl = TrainLine(train_id=train_id, starting_station=station_id)
|
||||
async_add_entities([ViaggiaTrenoSensor(tl, name)], True)
|
||||
|
||||
|
||||
class ViaggiaTrenoSensor(SensorEntity):
|
||||
"""Implementation of a ViaggiaTreno sensor."""
|
||||
|
||||
_attr_attribution = "Powered by ViaggiaTreno Data"
|
||||
_attr_should_poll = True
|
||||
|
||||
def __init__(self, train_id, station_id, name):
|
||||
def __init__(self, train_line: TrainLine, name: str) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._state = None
|
||||
self._attributes = {}
|
||||
self._unit = ""
|
||||
self._state: StateType = NO_INFORMATION_STRING
|
||||
self._attributes: dict[str, Any] = {}
|
||||
self._icon = ICON
|
||||
self._station_id = station_id
|
||||
self._name = name
|
||||
|
||||
self.uri = VIAGGIATRENO_ENDPOINT.format(
|
||||
station_id=station_id, train_id=train_id, timestamp=int(time.time()) * 1000
|
||||
)
|
||||
self._line = train_line
|
||||
self._viaggiatreno: Viaggiatreno | None = None
|
||||
self._tstatus: TrainLineStatus | None = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
def icon(self) -> str:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
return self._unit
|
||||
if isinstance(self.native_value, (int, float)):
|
||||
return UnitOfTime.MINUTES
|
||||
return None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return extra attributes."""
|
||||
return self._attributes
|
||||
|
||||
@staticmethod
|
||||
def has_departed(data):
|
||||
"""Check if the train has actually departed."""
|
||||
try:
|
||||
first_station = data["fermate"][0]
|
||||
if data["oraUltimoRilevamento"] or first_station["effettiva"]:
|
||||
return True
|
||||
except ValueError:
|
||||
_LOGGER.error("Cannot fetch first station: %s", data)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def has_arrived(data):
|
||||
"""Check if the train has already arrived."""
|
||||
last_station = data["fermate"][-1]
|
||||
if not last_station["effettiva"]:
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def is_cancelled(data):
|
||||
"""Check if the train is cancelled."""
|
||||
if data["tipoTreno"] == "ST" and data["provvedimento"] == 1:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update state."""
|
||||
uri = self.uri
|
||||
res = await async_http_request(self.hass, uri)
|
||||
if res.get("error", ""):
|
||||
if res["error"] == 204:
|
||||
self._state = NO_INFORMATION_STRING
|
||||
self._unit = ""
|
||||
else:
|
||||
self._state = f"Error: {res['error']}"
|
||||
self._unit = ""
|
||||
else:
|
||||
for i in MONITORED_INFO:
|
||||
self._attributes[i] = res[i]
|
||||
|
||||
if self.is_cancelled(res):
|
||||
if self._viaggiatreno is None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
self._viaggiatreno = Viaggiatreno(session)
|
||||
try:
|
||||
await self._viaggiatreno.query_if_useful(self._line)
|
||||
self._tstatus = self._viaggiatreno.get_line_status(self._line)
|
||||
if self._tstatus is None:
|
||||
_LOGGER.error(
|
||||
"Received status for line %s: None. Check the train and station IDs",
|
||||
self._line,
|
||||
)
|
||||
return
|
||||
except (TimeoutError, aiohttp.ClientError) as exc:
|
||||
_LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc)
|
||||
return
|
||||
except ValueError:
|
||||
_LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint")
|
||||
return
|
||||
if self._tstatus is not None:
|
||||
if self._tstatus.state == TrainState.CANCELLED:
|
||||
self._state = CANCELLED_STRING
|
||||
self._icon = "mdi:cancel"
|
||||
self._unit = ""
|
||||
elif not self.has_departed(res):
|
||||
elif self._tstatus.state == TrainState.NOT_YET_DEPARTED:
|
||||
self._state = NOT_DEPARTED_STRING
|
||||
self._unit = ""
|
||||
elif self.has_arrived(res):
|
||||
elif self._tstatus.state == TrainState.ARRIVED:
|
||||
self._state = ARRIVED_STRING
|
||||
self._unit = ""
|
||||
else:
|
||||
self._state = res.get("ritardo")
|
||||
self._unit = UnitOfTime.MINUTES
|
||||
elif self._tstatus.state in {
|
||||
TrainState.RUNNING,
|
||||
TrainState.PARTIALLY_CANCELLED,
|
||||
}:
|
||||
delay_minutes = self._tstatus.timetable.delay
|
||||
self._state = delay_minutes
|
||||
self._icon = ICON
|
||||
else:
|
||||
self._state = NO_INFORMATION_STRING
|
||||
# Update attributes
|
||||
for info in MONITORED_INFO:
|
||||
self._attributes[info] = self._viaggiatreno.json[self._line][info]
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"universal_silabs_flasher",
|
||||
"serialx"
|
||||
],
|
||||
"requirements": ["zha==0.0.87", "serialx==0.6.2"],
|
||||
"requirements": ["zha==0.0.88", "serialx==0.6.2"],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*2652*",
|
||||
|
||||
@@ -53,6 +53,10 @@ import zigpy.backups
|
||||
from zigpy.config import CONF_DEVICE
|
||||
from zigpy.config.validators import cv_boolean
|
||||
from zigpy.types.named import EUI64, KeyData
|
||||
from zigpy.typing import (
|
||||
UNDEFINED as ZIGPY_UNDEFINED,
|
||||
UndefinedType as ZigpyUndefinedType,
|
||||
)
|
||||
from zigpy.zcl.clusters.security import IasAce
|
||||
import zigpy.zdo.types as zdo_types
|
||||
|
||||
@@ -850,7 +854,7 @@ async def websocket_read_zigbee_cluster_attributes(
|
||||
cluster_id: int = msg[ATTR_CLUSTER_ID]
|
||||
cluster_type: str = msg[ATTR_CLUSTER_TYPE]
|
||||
attribute: int = msg[ATTR_ATTRIBUTE]
|
||||
manufacturer: int | None = msg.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = msg.get(ATTR_MANUFACTURER, ZIGPY_UNDEFINED)
|
||||
zha_device = zha_gateway.get_device(ieee)
|
||||
success = {}
|
||||
failure = {}
|
||||
@@ -1326,7 +1330,9 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
cluster_type: str = service.data[ATTR_CLUSTER_TYPE]
|
||||
attribute: int | str = service.data[ATTR_ATTRIBUTE]
|
||||
value: int | bool | str = service.data[ATTR_VALUE]
|
||||
manufacturer: int | None = service.data.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = service.data.get(
|
||||
ATTR_MANUFACTURER, ZIGPY_UNDEFINED
|
||||
)
|
||||
zha_device = zha_gateway.get_device(ieee)
|
||||
response = None
|
||||
if zha_device is not None:
|
||||
@@ -1380,7 +1386,9 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
command_type: str = service.data[ATTR_COMMAND_TYPE]
|
||||
args: list | None = service.data.get(ATTR_ARGS)
|
||||
params: dict | None = service.data.get(ATTR_PARAMS)
|
||||
manufacturer: int | None = service.data.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = service.data.get(
|
||||
ATTR_MANUFACTURER, ZIGPY_UNDEFINED
|
||||
)
|
||||
zha_device = zha_gateway.get_device(ieee)
|
||||
if zha_device is not None:
|
||||
if cluster_id >= MFG_CLUSTER_ID_START and manufacturer is None:
|
||||
@@ -1435,7 +1443,9 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
cluster_id: int = service.data[ATTR_CLUSTER_ID]
|
||||
command: int = service.data[ATTR_COMMAND]
|
||||
args: list = service.data[ATTR_ARGS]
|
||||
manufacturer: int | None = service.data.get(ATTR_MANUFACTURER)
|
||||
manufacturer: int | ZigpyUndefinedType = service.data.get(
|
||||
ATTR_MANUFACTURER, ZIGPY_UNDEFINED
|
||||
)
|
||||
group = zha_gateway.get_group(group_id)
|
||||
if cluster_id >= MFG_CLUSTER_ID_START and manufacturer is None:
|
||||
_LOGGER.error("Missing manufacturer attribute for cluster: %d", cluster_id)
|
||||
|
||||
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2026
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0.dev0"
|
||||
PATCH_VERSION: Final = "0b1"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
||||
@@ -36,16 +36,17 @@ fnv-hash-fast==1.6.0
|
||||
go2rtc-client==0.4.0
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==5.8.0
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20260128.1
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-frontend==20260128.3
|
||||
home-assistant-intents==2026.1.28
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
lru-dict==1.3.0
|
||||
mutagen==1.47.0
|
||||
openai==2.15.0
|
||||
orjson==3.11.5
|
||||
packaging>=23.1
|
||||
paho-mqtt==2.1.0
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2026.2.0.dev0"
|
||||
version = "2026.2.0b1"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -48,7 +48,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.6.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==1.11.0",
|
||||
"hass-nabucasa==1.12.0",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
@@ -484,8 +484,6 @@ filterwarnings = [
|
||||
"ignore:Deprecated call to `pkg_resources.declare_namespace\\('azure'\\)`:DeprecationWarning:pkg_resources",
|
||||
|
||||
# -- tracked upstream / open PRs
|
||||
# https://github.com/kbr/fritzconnection/pull/244 - v1.15.0 - 2025-05-17
|
||||
"ignore:.*invalid escape sequence:SyntaxWarning:.*fritzconnection.core.soaper",
|
||||
# https://github.com/hacf-fr/meteofrance-api/pull/688 - v1.4.0 - 2025-03-26
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteofrance_api.model.forecast",
|
||||
|
||||
|
||||
4
requirements.txt
generated
4
requirements.txt
generated
@@ -24,10 +24,10 @@ cronsim==2.7
|
||||
cryptography==46.0.2
|
||||
fnv-hash-fast==1.6.0
|
||||
ha-ffmpeg==3.2.2
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-intents==2026.1.28
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
|
||||
22
requirements_all.txt
generated
22
requirements_all.txt
generated
@@ -1015,7 +1015,7 @@ fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
fritzconnection[qr]==1.15.0
|
||||
fritzconnection[qr]==1.15.1
|
||||
|
||||
# homeassistant.components.fyta
|
||||
fyta_cli==0.7.2
|
||||
@@ -1175,7 +1175,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.7
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@@ -1219,10 +1219,10 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260128.1
|
||||
home-assistant-frontend==20260128.3
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-intents==2026.1.28
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1584,7 +1584,7 @@ nextdns==5.0.0
|
||||
nhc==0.7.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.21.0
|
||||
nibe==2.22.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -1664,6 +1664,7 @@ open-garage==0.2.0
|
||||
# homeassistant.components.open_meteo
|
||||
open-meteo==0.3.2
|
||||
|
||||
# homeassistant.components.cloud
|
||||
# homeassistant.components.open_router
|
||||
# homeassistant.components.openai_conversation
|
||||
openai==2.15.0
|
||||
@@ -1988,7 +1989,7 @@ pydeconz==120
|
||||
pydelijn==1.1.0
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
pydexcom==0.5.1
|
||||
|
||||
# homeassistant.components.discovergy
|
||||
pydiscovergy==3.0.2
|
||||
@@ -2747,7 +2748,7 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.2
|
||||
renault-api==0.5.3
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
@@ -3039,7 +3040,7 @@ tilt-pi==0.2.1
|
||||
tmb==0.0.4
|
||||
|
||||
# homeassistant.components.todoist
|
||||
todoist-api-python==2.1.7
|
||||
todoist-api-python==3.1.0
|
||||
|
||||
# homeassistant.components.togrill
|
||||
togrill-bluetooth==0.8.1
|
||||
@@ -3142,6 +3143,9 @@ velbus-aio==2026.1.4
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.21
|
||||
|
||||
# homeassistant.components.viaggiatreno
|
||||
viaggiatreno_ha==0.2.4
|
||||
|
||||
# homeassistant.components.victron_ble
|
||||
victron-ble-ha-parser==0.4.9
|
||||
|
||||
@@ -3292,7 +3296,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.87
|
||||
zha==0.0.88
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
19
requirements_test_all.txt
generated
19
requirements_test_all.txt
generated
@@ -894,7 +894,7 @@ fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
fritzconnection[qr]==1.15.0
|
||||
fritzconnection[qr]==1.15.1
|
||||
|
||||
# homeassistant.components.fyta
|
||||
fyta_cli==0.7.2
|
||||
@@ -1045,7 +1045,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.7
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.11.0
|
||||
hass-nabucasa==1.12.0
|
||||
|
||||
# homeassistant.components.assist_satellite
|
||||
# homeassistant.components.conversation
|
||||
@@ -1077,10 +1077,10 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260128.1
|
||||
home-assistant-frontend==20260128.3
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.6
|
||||
home-assistant-intents==2026.1.28
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1379,7 +1379,7 @@ nextdns==5.0.0
|
||||
nhc==0.7.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.21.0
|
||||
nibe==2.22.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -1447,6 +1447,7 @@ open-garage==0.2.0
|
||||
# homeassistant.components.open_meteo
|
||||
open-meteo==0.3.2
|
||||
|
||||
# homeassistant.components.cloud
|
||||
# homeassistant.components.open_router
|
||||
# homeassistant.components.openai_conversation
|
||||
openai==2.15.0
|
||||
@@ -1695,7 +1696,7 @@ pydeako==0.6.0
|
||||
pydeconz==120
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
pydexcom==0.5.1
|
||||
|
||||
# homeassistant.components.discovergy
|
||||
pydiscovergy==3.0.2
|
||||
@@ -2313,7 +2314,7 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.2
|
||||
renault-api==0.5.3
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
@@ -2539,7 +2540,7 @@ tilt-ble==1.0.1
|
||||
tilt-pi==0.2.1
|
||||
|
||||
# homeassistant.components.todoist
|
||||
todoist-api-python==2.1.7
|
||||
todoist-api-python==3.1.0
|
||||
|
||||
# homeassistant.components.togrill
|
||||
togrill-bluetooth==0.8.1
|
||||
@@ -2762,7 +2763,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.87
|
||||
zha==0.0.88
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.68.0
|
||||
|
||||
@@ -121,7 +121,7 @@ def mock_climate_variables() -> dict:
|
||||
"""Mock climate variable data for default thermostat state."""
|
||||
return {
|
||||
123: {
|
||||
"HVAC_STATE": "idle",
|
||||
"HVAC_STATE": "Off",
|
||||
"HVAC_MODE": "Heat",
|
||||
"TEMPERATURE_F": 72.5,
|
||||
"HUMIDITY": 45,
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
'current_humidity': 45,
|
||||
'current_temperature': 72,
|
||||
'friendly_name': 'Test Controller Residential Thermostat V2',
|
||||
'hvac_action': <HVACAction.IDLE: 'idle'>,
|
||||
'hvac_action': <HVACAction.OFF: 'off'>,
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Test Control4 Climate."""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
@@ -28,6 +29,27 @@ from tests.common import MockConfigEntry, snapshot_platform
|
||||
ENTITY_ID = "climate.test_controller_residential_thermostat_v2"
|
||||
|
||||
|
||||
def _make_climate_data(
|
||||
hvac_state: str = "off",
|
||||
hvac_mode: str = "Heat",
|
||||
temperature: float = 72.0,
|
||||
humidity: int = 50,
|
||||
cool_setpoint: float = 75.0,
|
||||
heat_setpoint: float = 68.0,
|
||||
) -> dict[int, dict[str, Any]]:
|
||||
"""Build mock climate variable data for item ID 123."""
|
||||
return {
|
||||
123: {
|
||||
"HVAC_STATE": hvac_state,
|
||||
"HVAC_MODE": hvac_mode,
|
||||
"TEMPERATURE_F": temperature,
|
||||
"HUMIDITY": humidity,
|
||||
"COOL_SETPOINT_F": cool_setpoint,
|
||||
"HEAT_SETPOINT_F": heat_setpoint,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Platforms which should be loaded during the test."""
|
||||
@@ -60,6 +82,53 @@ async def test_climate_entities(
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("mock_climate_variables", "expected_action"),
|
||||
[
|
||||
pytest.param(
|
||||
_make_climate_data(hvac_state="Off", hvac_mode="Off"),
|
||||
HVACAction.OFF,
|
||||
id="off",
|
||||
),
|
||||
pytest.param(
|
||||
_make_climate_data(hvac_state="Heat"),
|
||||
HVACAction.HEATING,
|
||||
id="heat",
|
||||
),
|
||||
pytest.param(
|
||||
_make_climate_data(hvac_state="Cool", hvac_mode="Cool"),
|
||||
HVACAction.COOLING,
|
||||
id="cool",
|
||||
),
|
||||
pytest.param(
|
||||
_make_climate_data(hvac_state="Dry"),
|
||||
HVACAction.DRYING,
|
||||
id="dry",
|
||||
),
|
||||
pytest.param(
|
||||
_make_climate_data(hvac_state="Fan"),
|
||||
HVACAction.FAN,
|
||||
id="fan",
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures(
|
||||
"mock_c4_account",
|
||||
"mock_c4_director",
|
||||
"mock_climate_update_variables",
|
||||
"init_integration",
|
||||
)
|
||||
async def test_hvac_action_mapping(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
expected_action: HVACAction,
|
||||
) -> None:
|
||||
"""Test all 5 official Control4 HVAC states map to correct HA actions."""
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
assert state is not None
|
||||
assert state.attributes["hvac_action"] == expected_action
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"mock_climate_variables",
|
||||
@@ -71,16 +140,7 @@ async def test_climate_entities(
|
||||
),
|
||||
[
|
||||
pytest.param(
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "off",
|
||||
"HVAC_MODE": "Off",
|
||||
"TEMPERATURE_F": 72.0,
|
||||
"HUMIDITY": 50,
|
||||
"COOL_SETPOINT_F": 75.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
},
|
||||
_make_climate_data(hvac_state="Off", hvac_mode="Off"),
|
||||
HVACMode.OFF,
|
||||
HVACAction.OFF,
|
||||
None,
|
||||
@@ -89,16 +149,13 @@ async def test_climate_entities(
|
||||
id="off",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "cooling",
|
||||
"HVAC_MODE": "Cool",
|
||||
"TEMPERATURE_F": 74.0,
|
||||
"HUMIDITY": 55,
|
||||
"COOL_SETPOINT_F": 72.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
},
|
||||
_make_climate_data(
|
||||
hvac_state="Cool",
|
||||
hvac_mode="Cool",
|
||||
temperature=74.0,
|
||||
humidity=55,
|
||||
cool_setpoint=72.0,
|
||||
),
|
||||
HVACMode.COOL,
|
||||
HVACAction.COOLING,
|
||||
72.0,
|
||||
@@ -107,16 +164,12 @@ async def test_climate_entities(
|
||||
id="cool",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "heating",
|
||||
"HVAC_MODE": "Auto",
|
||||
"TEMPERATURE_F": 65.0,
|
||||
"HUMIDITY": 40,
|
||||
"COOL_SETPOINT_F": 75.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
},
|
||||
_make_climate_data(
|
||||
hvac_state="Heat",
|
||||
hvac_mode="Auto",
|
||||
temperature=65.0,
|
||||
humidity=40,
|
||||
),
|
||||
HVACMode.HEAT_COOL,
|
||||
HVACAction.HEATING,
|
||||
None,
|
||||
@@ -143,6 +196,7 @@ async def test_climate_states(
|
||||
) -> None:
|
||||
"""Test climate entity in different states."""
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
assert state is not None
|
||||
assert state.state == expected_hvac_mode
|
||||
assert state.attributes["hvac_action"] == expected_hvac_action
|
||||
|
||||
@@ -186,30 +240,21 @@ async def test_set_hvac_mode(
|
||||
("mock_climate_variables", "method_name"),
|
||||
[
|
||||
pytest.param(
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "idle",
|
||||
"HVAC_MODE": "Heat",
|
||||
"TEMPERATURE_F": 72.5,
|
||||
"HUMIDITY": 45,
|
||||
"COOL_SETPOINT_F": 75.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
},
|
||||
_make_climate_data(
|
||||
hvac_state="Off",
|
||||
temperature=72.5,
|
||||
humidity=45,
|
||||
),
|
||||
"setHeatSetpointF",
|
||||
id="heat",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "idle",
|
||||
"HVAC_MODE": "Cool",
|
||||
"TEMPERATURE_F": 74.0,
|
||||
"HUMIDITY": 50,
|
||||
"COOL_SETPOINT_F": 72.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
},
|
||||
_make_climate_data(
|
||||
hvac_state="Cool",
|
||||
hvac_mode="Cool",
|
||||
temperature=74.0,
|
||||
cool_setpoint=72.0,
|
||||
),
|
||||
"setCoolSetpointF",
|
||||
id="cool",
|
||||
),
|
||||
@@ -240,16 +285,7 @@ async def test_set_temperature(
|
||||
@pytest.mark.parametrize(
|
||||
"mock_climate_variables",
|
||||
[
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "idle",
|
||||
"HVAC_MODE": "Auto",
|
||||
"TEMPERATURE_F": 70.0,
|
||||
"HUMIDITY": 50,
|
||||
"COOL_SETPOINT_F": 75.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
}
|
||||
_make_climate_data(hvac_state="Off", hvac_mode="Auto"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures(
|
||||
@@ -300,7 +336,7 @@ async def test_climate_not_created_when_no_initial_data(
|
||||
[
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "idle",
|
||||
"HVAC_STATE": "Off",
|
||||
"HVAC_MODE": "Heat",
|
||||
# Missing TEMPERATURE_F and HUMIDITY
|
||||
"COOL_SETPOINT_F": 75.0,
|
||||
@@ -331,16 +367,7 @@ async def test_climate_missing_variables(
|
||||
@pytest.mark.parametrize(
|
||||
"mock_climate_variables",
|
||||
[
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "idle",
|
||||
"HVAC_MODE": "UnknownMode",
|
||||
"TEMPERATURE_F": 72.0,
|
||||
"HUMIDITY": 50,
|
||||
"COOL_SETPOINT_F": 75.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
}
|
||||
_make_climate_data(hvac_state="off", hvac_mode="UnknownMode"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures(
|
||||
@@ -362,16 +389,7 @@ async def test_climate_unknown_hvac_mode(
|
||||
@pytest.mark.parametrize(
|
||||
"mock_climate_variables",
|
||||
[
|
||||
{
|
||||
123: {
|
||||
"HVAC_STATE": "unknown_state",
|
||||
"HVAC_MODE": "Heat",
|
||||
"TEMPERATURE_F": 72.0,
|
||||
"HUMIDITY": 50,
|
||||
"COOL_SETPOINT_F": 75.0,
|
||||
"HEAT_SETPOINT_F": 68.0,
|
||||
}
|
||||
}
|
||||
_make_climate_data(hvac_state="unknown_state"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures(
|
||||
|
||||
@@ -19,6 +19,8 @@ CONFIG = {
|
||||
}
|
||||
|
||||
GLUCOSE_READING = GlucoseReading(json.loads(load_fixture("data.json", "dexcom")))
|
||||
TEST_ACCOUNT_ID = "99999999-9999-9999-9999-999999999999"
|
||||
TEST_SESSION_ID = "55555555-5555-5555-5555-555555555555"
|
||||
|
||||
|
||||
async def init_integration(
|
||||
@@ -38,8 +40,12 @@ async def init_integration(
|
||||
return_value=GLUCOSE_READING,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.dexcom.Dexcom.create_session",
|
||||
return_value="test_session_id",
|
||||
"homeassistant.components.dexcom.Dexcom._get_account_id",
|
||||
return_value=TEST_ACCOUNT_ID,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.dexcom.Dexcom._get_session_id",
|
||||
return_value=TEST_SESSION_ID,
|
||||
),
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"DT": "/Date(1587165223000+0000)/",
|
||||
"ST": "/Date(1587179623000)/",
|
||||
"Trend": 4,
|
||||
"Value": 110,
|
||||
"WT": "/Date(1587179623000)/"
|
||||
"WT": "Date(1745081913085)",
|
||||
"ST": "Date(1745081913085)",
|
||||
"DT": "Date(1745081913085-0400)",
|
||||
"Value": 100,
|
||||
"Trend": "Flat"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from pydexcom import AccountError, SessionError
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.dexcom.const import DOMAIN
|
||||
@@ -23,10 +23,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
assert result["errors"] == {}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.dexcom.config_flow.Dexcom.create_session",
|
||||
return_value="test_session_id",
|
||||
),
|
||||
patch("homeassistant.components.dexcom.config_flow.Dexcom"),
|
||||
patch(
|
||||
"homeassistant.components.dexcom.async_setup_entry",
|
||||
return_value=True,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from pydexcom import AccountError, SessionError
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant.components.dexcom.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from pydexcom import SessionError
|
||||
from pydexcom.errors import SessionError
|
||||
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant import config as hass_config
|
||||
from homeassistant.components.intent_script import DOMAIN
|
||||
from homeassistant.const import ATTR_FRIENDLY_NAME, SERVICE_RELOAD
|
||||
@@ -294,6 +296,120 @@ async def test_intent_script_targets(
|
||||
calls.clear()
|
||||
|
||||
|
||||
async def test_intent_script_action_validation(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test action validation in intent scripts.
|
||||
|
||||
This tests that async_validate_actions_config is called during setup,
|
||||
which resolves entity registry IDs to entity IDs in conditions.
|
||||
Without async_validate_actions_config, the entity registry ID would not
|
||||
be resolved and the condition would fail.
|
||||
"""
|
||||
calls = async_mock_service(hass, "test", "service")
|
||||
|
||||
entry = entity_registry.async_get_or_create(
|
||||
"binary_sensor", "test", "1234", suggested_object_id="test_sensor"
|
||||
)
|
||||
assert entry.entity_id == "binary_sensor.test_sensor"
|
||||
|
||||
# Use a non-existent entity registry ID to trigger validation error
|
||||
non_existent_registry_id = "abcd1234abcd1234abcd1234abcd1234"
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
"intent_script",
|
||||
{
|
||||
"intent_script": {
|
||||
"ChooseWithRegistryIdIntent": {
|
||||
"action": [
|
||||
{
|
||||
"choose": [
|
||||
{
|
||||
"conditions": [
|
||||
{
|
||||
"condition": "state",
|
||||
# Use entity registry ID instead of entity_id
|
||||
# This requires async_validate_actions_config
|
||||
# to resolve to the actual entity_id
|
||||
"entity_id": entry.id,
|
||||
"state": "on",
|
||||
}
|
||||
],
|
||||
"sequence": [
|
||||
{
|
||||
"action": "test.service",
|
||||
"data": {"result": "sensor_on"},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
"default": [
|
||||
{
|
||||
"action": "test.service",
|
||||
"data": {"result": "sensor_off"},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
"speech": {"text": "Done"},
|
||||
},
|
||||
# This intent has an invalid entity registry ID and should fail validation
|
||||
"InvalidIntent": {
|
||||
"action": [
|
||||
{
|
||||
"choose": [
|
||||
{
|
||||
"conditions": [
|
||||
{
|
||||
"condition": "state",
|
||||
"entity_id": non_existent_registry_id,
|
||||
"state": "on",
|
||||
}
|
||||
],
|
||||
"sequence": [
|
||||
{"action": "test.service"},
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
"speech": {"text": "Invalid"},
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
# Verify that the invalid intent logged an error
|
||||
assert "Failed to validate actions for intent InvalidIntent" in caplog.text
|
||||
|
||||
# The invalid intent should not be registered
|
||||
with pytest.raises(intent.UnknownIntent):
|
||||
await intent.async_handle(hass, "test", "InvalidIntent")
|
||||
|
||||
# Test when condition is true (sensor is "on")
|
||||
hass.states.async_set("binary_sensor.test_sensor", "on")
|
||||
|
||||
response = await intent.async_handle(hass, "test", "ChooseWithRegistryIdIntent")
|
||||
|
||||
assert len(calls) == 1
|
||||
assert calls[0].data["result"] == "sensor_on"
|
||||
assert response.speech["plain"]["speech"] == "Done"
|
||||
|
||||
calls.clear()
|
||||
|
||||
# Test when condition is false (sensor is "off")
|
||||
hass.states.async_set("binary_sensor.test_sensor", "off")
|
||||
|
||||
response = await intent.async_handle(hass, "test", "ChooseWithRegistryIdIntent")
|
||||
|
||||
assert len(calls) == 1
|
||||
assert calls[0].data["result"] == "sensor_off"
|
||||
assert response.speech["plain"]["speech"] == "Done"
|
||||
|
||||
|
||||
async def test_reload(hass: HomeAssistant) -> None:
|
||||
"""Verify we can reload intent config."""
|
||||
|
||||
|
||||
@@ -368,7 +368,7 @@ async def test_lovelace_from_yaml_creates_repair_issue(
|
||||
"""Test YAML mode creates a repair issue."""
|
||||
assert await async_setup_component(hass, "lovelace", {"lovelace": {"mode": "YAML"}})
|
||||
|
||||
# Panel should still be registered for backwards compatibility
|
||||
# Panel should be registered as a YAML dashboard
|
||||
assert hass.data[frontend.DATA_PANELS]["lovelace"].config == {"mode": "yaml"}
|
||||
|
||||
# Repair issue should be created
|
||||
@@ -803,3 +803,47 @@ async def test_lovelace_no_migration_no_default_panel_set(
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"]["value"] is None
|
||||
|
||||
|
||||
async def test_lovelace_info_default(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test lovelace/info returns default resource_mode."""
|
||||
assert await async_setup_component(hass, "lovelace", {})
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json({"id": 5, "type": "lovelace/info"})
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {"resource_mode": "storage"}
|
||||
|
||||
|
||||
async def test_lovelace_info_yaml_resource_mode(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test lovelace/info returns yaml resource_mode."""
|
||||
assert await async_setup_component(
|
||||
hass, "lovelace", {"lovelace": {"resource_mode": "yaml"}}
|
||||
)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json({"id": 5, "type": "lovelace/info"})
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {"resource_mode": "yaml"}
|
||||
|
||||
|
||||
async def test_lovelace_info_yaml_mode_fallback(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test lovelace/info returns yaml resource_mode when mode is yaml."""
|
||||
assert await async_setup_component(hass, "lovelace", {"lovelace": {"mode": "yaml"}})
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json({"id": 5, "type": "lovelace/info"})
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {"resource_mode": "yaml"}
|
||||
|
||||
@@ -5,7 +5,9 @@ from typing import Any
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.lovelace import _validate_url_slug
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
@@ -96,3 +98,30 @@ async def test_create_dashboards_when_not_onboarded(
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {"strategy": {"type": "map"}}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("value", "expected"),
|
||||
[
|
||||
("lovelace", "lovelace"),
|
||||
("my-dashboard", "my-dashboard"),
|
||||
("my-cool-dashboard", "my-cool-dashboard"),
|
||||
],
|
||||
)
|
||||
def test_validate_url_slug_valid(value: str, expected: str) -> None:
|
||||
"""Test _validate_url_slug with valid values."""
|
||||
assert _validate_url_slug(value) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("value", "error_message"),
|
||||
[
|
||||
(None, r"Slug should not be None"),
|
||||
("nodash", r"Url path needs to contain a hyphen \(-\)"),
|
||||
("my-dash board", r"invalid slug my-dash board \(try my-dash-board\)"),
|
||||
],
|
||||
)
|
||||
def test_validate_url_slug_invalid(value: Any, error_message: str) -> None:
|
||||
"""Test _validate_url_slug with invalid values."""
|
||||
with pytest.raises(vol.Invalid, match=error_message):
|
||||
_validate_url_slug(value)
|
||||
|
||||
@@ -62,7 +62,8 @@ async def test_system_health_info_yaml(hass: HomeAssistant) -> None:
|
||||
return_value={"views": [{"cards": []}]},
|
||||
):
|
||||
info = await get_system_health_info(hass, "lovelace")
|
||||
assert info == {"dashboards": 1, "mode": "yaml", "resources": 0, "views": 1}
|
||||
# 2 dashboards: default storage (None) + yaml "lovelace" dashboard
|
||||
assert info == {"dashboards": 2, "mode": "yaml", "resources": 0, "views": 1}
|
||||
|
||||
|
||||
async def test_system_health_info_yaml_not_found(hass: HomeAssistant) -> None:
|
||||
@@ -71,8 +72,9 @@ async def test_system_health_info_yaml_not_found(hass: HomeAssistant) -> None:
|
||||
assert await async_setup_component(hass, "lovelace", {"lovelace": {"mode": "YAML"}})
|
||||
await hass.async_block_till_done()
|
||||
info = await get_system_health_info(hass, "lovelace")
|
||||
# 2 dashboards: default storage (None) + yaml "lovelace" dashboard
|
||||
assert info == {
|
||||
"dashboards": 1,
|
||||
"dashboards": 2,
|
||||
"mode": "yaml",
|
||||
"error": f"{hass.config.path('ui-lovelace.yaml')} not found",
|
||||
"resources": 0,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@@ -20,6 +21,8 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfReactiveEnergy,
|
||||
UnitOfReactivePower,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, State, callback
|
||||
@@ -909,24 +912,113 @@ async def test_invalid_unit_of_measurement(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"hass_config",
|
||||
("hass_config", "device_class", "unit", "equivalent_unit"),
|
||||
[
|
||||
{
|
||||
mqtt.DOMAIN: {
|
||||
sensor.DOMAIN: {
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"device_class": "voltage",
|
||||
"unit_of_measurement": "\u00b5V", # microVolt
|
||||
pytest.param(
|
||||
{
|
||||
mqtt.DOMAIN: {
|
||||
sensor.DOMAIN: {
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"device_class": "voltage",
|
||||
"unit_of_measurement": "\u00b5V", # microVolt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"voltage",
|
||||
UnitOfElectricPotential.MICROVOLT,
|
||||
"\u00b5V",
|
||||
id="microvolt",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
mqtt.DOMAIN: {
|
||||
sensor.DOMAIN: {
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"device_class": "reactive_power",
|
||||
"unit_of_measurement": "mVAr",
|
||||
}
|
||||
}
|
||||
},
|
||||
"reactive_power",
|
||||
UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"mVAr",
|
||||
id="mvar",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
mqtt.DOMAIN: {
|
||||
sensor.DOMAIN: {
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"device_class": "reactive_power",
|
||||
"unit_of_measurement": "VAr",
|
||||
}
|
||||
}
|
||||
},
|
||||
"reactive_power",
|
||||
UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"VAr",
|
||||
id="var",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
mqtt.DOMAIN: {
|
||||
sensor.DOMAIN: {
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"device_class": "reactive_power",
|
||||
"unit_of_measurement": "kVAr",
|
||||
}
|
||||
}
|
||||
},
|
||||
"reactive_power",
|
||||
UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"kVAr",
|
||||
id="kvar",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
mqtt.DOMAIN: {
|
||||
sensor.DOMAIN: {
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"device_class": "reactive_energy",
|
||||
"unit_of_measurement": "VArh",
|
||||
}
|
||||
}
|
||||
},
|
||||
"reactive_energy",
|
||||
UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"VArh",
|
||||
id="varh",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
mqtt.DOMAIN: {
|
||||
sensor.DOMAIN: {
|
||||
"name": "test",
|
||||
"state_topic": "test-topic",
|
||||
"device_class": "reactive_energy",
|
||||
"unit_of_measurement": "kVArh",
|
||||
}
|
||||
}
|
||||
},
|
||||
"reactive_energy",
|
||||
UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh",
|
||||
id="kvarh",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_device_class_with_equivalent_unit_of_measurement_received(
|
||||
hass: HomeAssistant,
|
||||
mqtt_mock_entry: MqttMockHAClientGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
device_class: str,
|
||||
unit: StrEnum,
|
||||
equivalent_unit: str,
|
||||
) -> None:
|
||||
"""Test device_class with equivalent unit of measurement."""
|
||||
assert await mqtt_mock_entry()
|
||||
@@ -935,20 +1027,17 @@ async def test_device_class_with_equivalent_unit_of_measurement_received(
|
||||
state = hass.states.get("sensor.test")
|
||||
assert state is not None
|
||||
assert state.state == "100"
|
||||
assert (
|
||||
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
is UnitOfElectricPotential.MICROVOLT
|
||||
)
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is unit
|
||||
|
||||
caplog.clear()
|
||||
|
||||
discovery_payload = {
|
||||
"name": "bla",
|
||||
"state_topic": "test-topic2",
|
||||
"device_class": "voltage",
|
||||
"unit_of_measurement": "\u00b5V",
|
||||
"device_class": device_class,
|
||||
"unit_of_measurement": equivalent_unit,
|
||||
}
|
||||
# Now discover a sensor with an altarantive mu char
|
||||
# Now discover a sensor with an ambiguous unit
|
||||
async_fire_mqtt_message(
|
||||
hass, "homeassistant/sensor/bla/config", json.dumps(discovery_payload)
|
||||
)
|
||||
@@ -958,10 +1047,7 @@ async def test_device_class_with_equivalent_unit_of_measurement_received(
|
||||
state = hass.states.get("sensor.bla")
|
||||
assert state is not None
|
||||
assert state.state == "21"
|
||||
assert (
|
||||
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
is UnitOfElectricPotential.MICROVOLT
|
||||
)
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is unit
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -3892,6 +3892,21 @@ async def test_compile_hourly_statistics_convert_units_1(
|
||||
),
|
||||
(None, "\u00b5g", "\u03bcg", None, "mass", 13.050847, 13.333333, -10, 30),
|
||||
(None, "\u00b5s", "\u03bcs", None, "duration", 13.050847, 13.333333, -10, 30),
|
||||
(None, "mVAr", "mvar", None, "reactive_power", 13.050847, 13.333333, -10, 30),
|
||||
(None, "VAr", "var", None, "reactive_power", 13.050847, 13.333333, -10, 30),
|
||||
(None, "kVAr", "kvar", None, "reactive_power", 13.050847, 13.333333, -10, 30),
|
||||
(None, "VArh", "varh", None, "reactive_energy", 13.050847, 13.333333, -10, 30),
|
||||
(
|
||||
None,
|
||||
"kVArh",
|
||||
"kvarh",
|
||||
None,
|
||||
"reactive_energy",
|
||||
13.050847,
|
||||
13.333333,
|
||||
-10,
|
||||
30,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_compile_hourly_statistics_equivalent_units_1(
|
||||
@@ -4032,6 +4047,16 @@ async def test_compile_hourly_statistics_equivalent_units_1(
|
||||
(SensorDeviceClass.WEIGHT, "\u00b5g", "\u03bcg", None, 13.333333, -10, 30),
|
||||
(None, "\u00b5s", "\u03bcs", None, 13.333333, -10, 30),
|
||||
(SensorDeviceClass.DURATION, "\u00b5s", "\u03bcs", None, 13.333333, -10, 30),
|
||||
(None, "mVAr", "mvar", None, 13.333333, -10, 30),
|
||||
(None, "VAr", "var", None, 13.333333, -10, 30),
|
||||
(None, "kVAr", "kvar", None, 13.333333, -10, 30),
|
||||
(None, "VArh", "varh", None, 13.333333, -10, 30),
|
||||
(None, "kVArh", "kvarh", None, 13.333333, -10, 30),
|
||||
(SensorDeviceClass.REACTIVE_POWER, "mVAr", "mvar", None, 13.333333, -10, 30),
|
||||
(SensorDeviceClass.REACTIVE_POWER, "VAr", "var", None, 13.333333, -10, 30),
|
||||
(SensorDeviceClass.REACTIVE_POWER, "kVAr", "kvar", None, 13.333333, -10, 30),
|
||||
(SensorDeviceClass.REACTIVE_ENERGY, "VArh", "varh", None, 13.333333, -10, 30),
|
||||
(SensorDeviceClass.REACTIVE_ENERGY, "kVArh", "kvarh", None, 13.333333, -10, 30),
|
||||
],
|
||||
)
|
||||
async def test_compile_hourly_statistics_equivalent_units_2(
|
||||
@@ -6008,6 +6033,11 @@ async def test_validate_statistics_unit_change_no_conversion(
|
||||
(NONE_SENSOR_ATTRIBUTES, "\u00b5mol/s⋅m²", "\u03bcmol/s⋅m²"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "\u00b5g", "\u03bcg"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "\u00b5s", "\u03bcs"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "mVAr", "mvar"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "VAr", "var"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "kVAr", "kvar"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "VArh", "varh"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "kVArh", "kvarh"),
|
||||
],
|
||||
)
|
||||
async def test_validate_statistics_unit_change_equivalent_units(
|
||||
@@ -6105,6 +6135,11 @@ async def test_validate_statistics_unit_change_equivalent_units(
|
||||
"\u00b5s",
|
||||
"d, h, min, ms, s, w, \u03bcs",
|
||||
),
|
||||
(NONE_SENSOR_ATTRIBUTES, "reactive_power", "mvar", "mVAr", "kvar, mvar, var"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "reactive_power", "var", "VAr", "kvar, mvar, var"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "reactive_power", "kvar", "kVAr", "kvar, mvar, var"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "reactive_energy", "varh", "VArh", "kvarh, varh"),
|
||||
(NONE_SENSOR_ATTRIBUTES, "reactive_energy", "kvarh", "kVArh", "kvarh, varh"),
|
||||
],
|
||||
)
|
||||
async def test_validate_statistics_unit_change_equivalent_units_2(
|
||||
|
||||
@@ -516,7 +516,7 @@ async def test_coiot_missing_or_wrong_peer_issue(
|
||||
monkeypatch.setitem(
|
||||
mock_block_device.settings,
|
||||
"coiot",
|
||||
{"enabled": False, "update_period": 15, "peer": "wrong.peer.address"},
|
||||
{"enabled": False, "update_period": 15, "peer": "wrong.peer.address:5683"},
|
||||
)
|
||||
issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=MOCK_MAC)
|
||||
assert await async_setup_component(hass, "repairs", {})
|
||||
@@ -555,7 +555,7 @@ async def test_coiot_exception(
|
||||
monkeypatch.setitem(
|
||||
mock_block_device.settings,
|
||||
"coiot",
|
||||
{"enabled": True, "update_period": 15, "peer": "correct.peer.address"},
|
||||
{"enabled": True, "update_period": 15, "peer": "correct.peer.address:5683"},
|
||||
)
|
||||
issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=MOCK_MAC)
|
||||
assert await async_setup_component(hass, "repairs", {})
|
||||
@@ -584,22 +584,34 @@ async def test_coiot_exception(
|
||||
assert len(issue_registry.issues) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_url",
|
||||
[
|
||||
"http://10.10.10.10:8123",
|
||||
"https://homeassistant.local:443",
|
||||
],
|
||||
)
|
||||
async def test_coiot_configured_no_issue_created(
|
||||
hass: HomeAssistant,
|
||||
mock_block_device: Mock,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
raw_url: str,
|
||||
) -> None:
|
||||
"""Test no repair issues when CoIoT configuration is missing."""
|
||||
monkeypatch.setitem(
|
||||
mock_block_device.settings,
|
||||
"coiot",
|
||||
{"enabled": True, "update_period": 15, "peer": "10.10.10.10"},
|
||||
{"enabled": True, "update_period": 15, "peer": "10.10.10.10:5683"},
|
||||
)
|
||||
issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=MOCK_MAC)
|
||||
assert await async_setup_component(hass, "repairs", {})
|
||||
await hass.async_block_till_done()
|
||||
await init_integration(hass, 1)
|
||||
with patch(
|
||||
"homeassistant.components.shelly.utils.get_url",
|
||||
return_value=raw_url,
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
await init_integration(hass, 1)
|
||||
|
||||
assert issue_registry.async_get_issue(DOMAIN, issue_id) is None
|
||||
|
||||
@@ -634,7 +646,7 @@ async def test_coiot_no_hass_url(
|
||||
monkeypatch.setitem(
|
||||
mock_block_device.settings,
|
||||
"coiot",
|
||||
{"enabled": True, "update_period": 15, "peer": "correct.peer.address"},
|
||||
{"enabled": True, "update_period": 15, "peer": "correct.peer.address:5683"},
|
||||
)
|
||||
issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=MOCK_MAC)
|
||||
assert await async_setup_component(hass, "repairs", {})
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""Common fixtures for the todoist tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from collections.abc import AsyncGenerator, Callable, Generator
|
||||
from http import HTTPStatus
|
||||
from typing import TypeVar
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -13,15 +14,55 @@ from homeassistant.components.todoist import DOMAIN
|
||||
from homeassistant.const import CONF_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
PROJECT_ID = "project-id-1"
|
||||
SECTION_ID = "section-id-1"
|
||||
SUMMARY = "A task"
|
||||
TOKEN = "some-token"
|
||||
TODAY = dt_util.now().strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
async def _async_generator(items: list[T]) -> AsyncGenerator[list[T]]:
|
||||
"""Create an async generator that yields items as a single page."""
|
||||
yield items
|
||||
|
||||
|
||||
def make_api_response(items: list[T]) -> Callable[[], AsyncGenerator[list[T]]]:
|
||||
"""Create a callable that returns a fresh async generator each time.
|
||||
|
||||
This is needed because async generators can only be iterated once,
|
||||
but mocks may be called multiple times.
|
||||
"""
|
||||
|
||||
async def _generator(*args, **kwargs) -> AsyncGenerator[list[T]]:
|
||||
async for page in _async_generator(items):
|
||||
yield page
|
||||
|
||||
return _generator
|
||||
|
||||
|
||||
def make_api_due(
|
||||
date: str,
|
||||
is_recurring: bool = False,
|
||||
string: str = "",
|
||||
timezone: str | None = None,
|
||||
) -> Due:
|
||||
"""Create a Due object using from_dict to match API deserialization behavior.
|
||||
|
||||
This ensures the date field is properly converted to date/datetime objects
|
||||
just like the real API response deserialization does.
|
||||
"""
|
||||
data: dict = {
|
||||
"date": date,
|
||||
"is_recurring": is_recurring,
|
||||
"string": string,
|
||||
}
|
||||
if timezone is not None:
|
||||
data["timezone"] = timezone
|
||||
return Due.from_dict(data)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -35,16 +76,18 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
|
||||
@pytest.fixture(name="due")
|
||||
def mock_due() -> Due:
|
||||
"""Mock a todoist Task Due date/time."""
|
||||
return Due(
|
||||
is_recurring=False, date=dt_util.now().strftime("%Y-%m-%d"), string="today"
|
||||
)
|
||||
"""Mock a todoist Task Due date/time.
|
||||
|
||||
Uses a fixed date matching the frozen test time in test_calendar.py
|
||||
and test_todo.py (2024-05-24 12:00:00).
|
||||
"""
|
||||
return make_api_due(date="2024-05-24", string="today")
|
||||
|
||||
|
||||
def make_api_task(
|
||||
id: str | None = None,
|
||||
content: str | None = None,
|
||||
is_completed: bool = False,
|
||||
completed_at: str | None = None,
|
||||
due: Due | None = None,
|
||||
project_id: str | None = None,
|
||||
description: str | None = None,
|
||||
@@ -54,12 +97,11 @@ def make_api_task(
|
||||
return Task(
|
||||
assignee_id="1",
|
||||
assigner_id="1",
|
||||
comment_count=0,
|
||||
is_completed=is_completed,
|
||||
completed_at=completed_at,
|
||||
content=content or SUMMARY,
|
||||
created_at="2021-10-01T00:00:00",
|
||||
creator_id="1",
|
||||
description=description,
|
||||
description=description or "",
|
||||
due=due,
|
||||
id=id or "1",
|
||||
labels=["Label1"],
|
||||
@@ -68,9 +110,10 @@ def make_api_task(
|
||||
priority=1,
|
||||
project_id=project_id or PROJECT_ID,
|
||||
section_id=None,
|
||||
url="https://todoist.com",
|
||||
sync_id=None,
|
||||
duration=None,
|
||||
deadline=None,
|
||||
is_collapsed=False,
|
||||
updated_at="2021-10-01T00:00:00",
|
||||
)
|
||||
|
||||
|
||||
@@ -84,38 +127,45 @@ def mock_tasks(due: Due) -> list[Task]:
|
||||
def mock_api(tasks: list[Task]) -> AsyncMock:
|
||||
"""Mock the api state."""
|
||||
api = AsyncMock()
|
||||
api.get_projects.return_value = [
|
||||
Project(
|
||||
id=PROJECT_ID,
|
||||
color="blue",
|
||||
comment_count=0,
|
||||
is_favorite=False,
|
||||
name="Name",
|
||||
is_shared=False,
|
||||
url="",
|
||||
is_inbox_project=False,
|
||||
is_team_inbox=False,
|
||||
can_assign_tasks=False,
|
||||
order=1,
|
||||
parent_id=None,
|
||||
view_style="list",
|
||||
)
|
||||
]
|
||||
api.get_sections.return_value = [
|
||||
Section(
|
||||
id=SECTION_ID,
|
||||
project_id=PROJECT_ID,
|
||||
name="Section Name",
|
||||
order=1,
|
||||
)
|
||||
]
|
||||
api.get_labels.return_value = [
|
||||
Label(id="1", name="Label1", color="1", order=1, is_favorite=False)
|
||||
]
|
||||
api.get_collaborators.return_value = [
|
||||
Collaborator(email="user@gmail.com", id="1", name="user")
|
||||
]
|
||||
api.get_tasks.return_value = tasks
|
||||
api.get_projects.side_effect = make_api_response(
|
||||
[
|
||||
Project(
|
||||
id=PROJECT_ID,
|
||||
color="blue",
|
||||
is_favorite=False,
|
||||
name="Name",
|
||||
is_shared=False,
|
||||
is_archived=False,
|
||||
is_collapsed=False,
|
||||
is_inbox_project=False,
|
||||
can_assign_tasks=False,
|
||||
order=1,
|
||||
parent_id=None,
|
||||
view_style="list",
|
||||
description="",
|
||||
created_at="2021-01-01",
|
||||
updated_at="2021-01-01",
|
||||
)
|
||||
]
|
||||
)
|
||||
api.get_sections.side_effect = make_api_response(
|
||||
[
|
||||
Section(
|
||||
id=SECTION_ID,
|
||||
project_id=PROJECT_ID,
|
||||
name="Section Name",
|
||||
order=1,
|
||||
is_collapsed=False,
|
||||
)
|
||||
]
|
||||
)
|
||||
api.get_labels.side_effect = make_api_response(
|
||||
[Label(id="1", name="Label1", color="1", order=1, is_favorite=False)]
|
||||
)
|
||||
api.get_collaborators.side_effect = make_api_response(
|
||||
[Collaborator(email="user@gmail.com", id="1", name="user")]
|
||||
)
|
||||
api.get_tasks.side_effect = make_api_response(tasks)
|
||||
return api
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_component import async_update_entity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .conftest import PROJECT_ID, SECTION_ID, SUMMARY
|
||||
from .conftest import PROJECT_ID, SECTION_ID, SUMMARY, make_api_due
|
||||
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
@@ -147,7 +147,7 @@ async def test_update_entity_for_custom_project_no_due_date_on(
|
||||
@pytest.mark.parametrize(
|
||||
"due",
|
||||
[
|
||||
Due(
|
||||
make_api_due(
|
||||
# Note: This runs before the test fixture that sets the timezone
|
||||
date=(
|
||||
datetime(
|
||||
@@ -164,6 +164,7 @@ async def test_update_entity_for_calendar_with_due_date_in_the_future(
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
api: AsyncMock,
|
||||
due: Due,
|
||||
) -> None:
|
||||
"""Test that a task with a due date in the future has on state and correct end_time."""
|
||||
await async_update_entity(hass, "calendar.name")
|
||||
@@ -216,37 +217,37 @@ async def test_calendar_custom_project_unique_id(
|
||||
("due", "start", "end", "expected_response"),
|
||||
[
|
||||
(
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-28T00:00:00.000Z",
|
||||
"2023-04-01T00:00:00.000Z",
|
||||
[get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})],
|
||||
),
|
||||
(
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-30T06:00:00.000Z",
|
||||
"2023-03-31T06:00:00.000Z",
|
||||
[get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})],
|
||||
),
|
||||
(
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-29T08:00:00.000Z",
|
||||
"2023-03-30T08:00:00.000Z",
|
||||
[get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})],
|
||||
),
|
||||
(
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-30T08:00:00.000Z",
|
||||
"2023-03-31T08:00:00.000Z",
|
||||
[get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})],
|
||||
),
|
||||
(
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-31T08:00:00.000Z",
|
||||
"2023-04-01T08:00:00.000Z",
|
||||
[],
|
||||
),
|
||||
(
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-29T06:00:00.000Z",
|
||||
"2023-03-30T06:00:00.000Z",
|
||||
[],
|
||||
@@ -334,25 +335,22 @@ async def test_create_task_service_call_with_section(
|
||||
[
|
||||
# These are all equivalent due dates for the same time in different
|
||||
# timezone formats.
|
||||
Due(
|
||||
date="2023-03-30",
|
||||
make_api_due(
|
||||
date="2023-03-31T00:00:00Z",
|
||||
is_recurring=False,
|
||||
string="Mar 30 6:00 PM",
|
||||
datetime="2023-03-31T00:00:00Z",
|
||||
timezone="America/Regina",
|
||||
),
|
||||
Due(
|
||||
date="2023-03-30",
|
||||
make_api_due(
|
||||
date="2023-03-31T00:00:00Z",
|
||||
is_recurring=False,
|
||||
string="Mar 30 7:00 PM",
|
||||
datetime="2023-03-31T00:00:00Z",
|
||||
timezone="America/Los_Angeles",
|
||||
),
|
||||
Due(
|
||||
date="2023-03-30",
|
||||
make_api_due(
|
||||
date="2023-03-30T18:00:00",
|
||||
is_recurring=False,
|
||||
string="Mar 30 6:00 PM",
|
||||
datetime="2023-03-30T18:00:00",
|
||||
),
|
||||
],
|
||||
ids=("in_local_timezone", "in_other_timezone", "floating"),
|
||||
@@ -431,35 +429,35 @@ async def test_task_due_datetime(
|
||||
[
|
||||
(
|
||||
{"custom_projects": [{"name": "Test", "labels": ["Label1"]}]},
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-28T00:00:00.000Z",
|
||||
"2023-04-01T00:00:00.000Z",
|
||||
[get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})],
|
||||
),
|
||||
(
|
||||
{"custom_projects": [{"name": "Test", "labels": ["custom"]}]},
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-28T00:00:00.000Z",
|
||||
"2023-04-01T00:00:00.000Z",
|
||||
[],
|
||||
),
|
||||
(
|
||||
{"custom_projects": [{"name": "Test", "include_projects": ["Name"]}]},
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-28T00:00:00.000Z",
|
||||
"2023-04-01T00:00:00.000Z",
|
||||
[get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})],
|
||||
),
|
||||
(
|
||||
{"custom_projects": [{"name": "Test", "due_date_days": 1}]},
|
||||
Due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
make_api_due(date="2023-03-30", is_recurring=False, string="Mar 30"),
|
||||
"2023-03-28T00:00:00.000Z",
|
||||
"2023-04-01T00:00:00.000Z",
|
||||
[get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})],
|
||||
),
|
||||
(
|
||||
{"custom_projects": [{"name": "Test", "due_date_days": 1}]},
|
||||
Due(
|
||||
make_api_due(
|
||||
date=(dt_util.now() + timedelta(days=2)).strftime("%Y-%m-%d"),
|
||||
is_recurring=False,
|
||||
string="Mar 30",
|
||||
@@ -497,11 +495,10 @@ async def test_events_filtered_for_custom_projects(
|
||||
("due", "setup_platform"),
|
||||
[
|
||||
(
|
||||
Due(
|
||||
date="2023-03-30",
|
||||
make_api_due(
|
||||
date="2023-03-31T00:00:00Z",
|
||||
is_recurring=False,
|
||||
string="Mar 30 6:00 PM",
|
||||
datetime="2023-03-31T00:00:00Z",
|
||||
timezone="America/Regina",
|
||||
),
|
||||
None,
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
from todoist_api_python.models import Due, Task
|
||||
from todoist_api_python.models import Task
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
ATTR_DESCRIPTION,
|
||||
@@ -20,7 +20,7 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_component import async_update_entity
|
||||
|
||||
from .conftest import PROJECT_ID, make_api_task
|
||||
from .conftest import PROJECT_ID, make_api_due, make_api_response, make_api_task
|
||||
|
||||
from tests.typing import WebSocketGenerator
|
||||
|
||||
@@ -41,12 +41,19 @@ async def set_time_zone(hass: HomeAssistant) -> None:
|
||||
("tasks", "expected_state"),
|
||||
[
|
||||
([], "0"),
|
||||
([make_api_task(id="12345", content="Soda", is_completed=False)], "1"),
|
||||
([make_api_task(id="12345", content="Soda", is_completed=True)], "0"),
|
||||
([make_api_task(id="12345", content="Soda", completed_at=None)], "1"),
|
||||
(
|
||||
[
|
||||
make_api_task(id="12345", content="Milk", is_completed=False),
|
||||
make_api_task(id="54321", content="Soda", is_completed=False),
|
||||
make_api_task(
|
||||
id="12345", content="Soda", completed_at="2021-10-01T00:00:00"
|
||||
)
|
||||
],
|
||||
"0",
|
||||
),
|
||||
(
|
||||
[
|
||||
make_api_task(id="12345", content="Milk", completed_at=None),
|
||||
make_api_task(id="54321", content="Soda", completed_at=None),
|
||||
],
|
||||
"2",
|
||||
),
|
||||
@@ -55,7 +62,7 @@ async def set_time_zone(hass: HomeAssistant) -> None:
|
||||
make_api_task(
|
||||
id="12345",
|
||||
content="Soda",
|
||||
is_completed=False,
|
||||
completed_at=None,
|
||||
project_id="other-project-id",
|
||||
)
|
||||
],
|
||||
@@ -64,7 +71,7 @@ async def set_time_zone(hass: HomeAssistant) -> None:
|
||||
(
|
||||
[
|
||||
make_api_task(
|
||||
id="12345", content="sub-task", is_completed=False, parent_id="1"
|
||||
id="12345", content="sub-task", completed_at=None, parent_id="1"
|
||||
)
|
||||
],
|
||||
"0",
|
||||
@@ -89,7 +96,7 @@ async def test_todo_item_state(
|
||||
(
|
||||
[],
|
||||
{},
|
||||
[make_api_task(id="task-id-1", content="Soda", is_completed=False)],
|
||||
[make_api_task(id="task-id-1", content="Soda", completed_at=None)],
|
||||
{"content": "Soda", "due_string": "no date", "description": ""},
|
||||
{"uid": "task-id-1", "summary": "Soda", "status": "needs_action"},
|
||||
),
|
||||
@@ -100,8 +107,10 @@ async def test_todo_item_state(
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
is_completed=False,
|
||||
due=Due(is_recurring=False, date="2023-11-18", string="today"),
|
||||
completed_at=None,
|
||||
due=make_api_due(
|
||||
date="2023-11-18", is_recurring=False, string="today"
|
||||
),
|
||||
)
|
||||
],
|
||||
{"description": "", "due_date": "2023-11-18"},
|
||||
@@ -119,11 +128,10 @@ async def test_todo_item_state(
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
is_completed=False,
|
||||
due=Due(
|
||||
date="2023-11-18",
|
||||
completed_at=None,
|
||||
due=make_api_due(
|
||||
date="2023-11-18T12:30:00.000000Z",
|
||||
is_recurring=False,
|
||||
datetime="2023-11-18T12:30:00.000000Z",
|
||||
string="today",
|
||||
),
|
||||
)
|
||||
@@ -147,7 +155,7 @@ async def test_todo_item_state(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
description="6-pack",
|
||||
is_completed=False,
|
||||
completed_at=None,
|
||||
)
|
||||
],
|
||||
{"description": "6-pack", "due_string": "no date"},
|
||||
@@ -178,7 +186,7 @@ async def test_add_todo_list_item(
|
||||
|
||||
api.add_task = AsyncMock()
|
||||
# Fake API response when state is refreshed after create
|
||||
api.get_tasks.return_value = tasks_after_update
|
||||
api.get_tasks.side_effect = make_api_response(tasks_after_update)
|
||||
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
@@ -209,7 +217,7 @@ async def test_add_todo_list_item(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("tasks"), [[make_api_task(id="task-id-1", content="Soda", is_completed=False)]]
|
||||
("tasks"), [[make_api_task(id="task-id-1", content="Soda", completed_at=None)]]
|
||||
)
|
||||
async def test_update_todo_item_status(
|
||||
hass: HomeAssistant,
|
||||
@@ -222,13 +230,17 @@ async def test_update_todo_item_status(
|
||||
assert state
|
||||
assert state.state == "1"
|
||||
|
||||
api.close_task = AsyncMock()
|
||||
api.reopen_task = AsyncMock()
|
||||
api.complete_task = AsyncMock()
|
||||
api.uncomplete_task = AsyncMock()
|
||||
|
||||
# Fake API response when state is refreshed after close
|
||||
api.get_tasks.return_value = [
|
||||
make_api_task(id="task-id-1", content="Soda", is_completed=True)
|
||||
]
|
||||
# Fake API response when state is refreshed after complete
|
||||
api.get_tasks.side_effect = make_api_response(
|
||||
[
|
||||
make_api_task(
|
||||
id="task-id-1", content="Soda", completed_at="2021-10-01T00:00:00"
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
@@ -237,21 +249,21 @@ async def test_update_todo_item_status(
|
||||
target={ATTR_ENTITY_ID: "todo.name"},
|
||||
blocking=True,
|
||||
)
|
||||
assert api.close_task.called
|
||||
args = api.close_task.call_args
|
||||
assert api.complete_task.called
|
||||
args = api.complete_task.call_args
|
||||
assert args
|
||||
assert args.kwargs.get("task_id") == "task-id-1"
|
||||
assert not api.reopen_task.called
|
||||
assert not api.uncomplete_task.called
|
||||
|
||||
# Verify state is refreshed
|
||||
state = hass.states.get("todo.name")
|
||||
assert state
|
||||
assert state.state == "0"
|
||||
|
||||
# Fake API response when state is refreshed after reopen
|
||||
api.get_tasks.return_value = [
|
||||
make_api_task(id="task-id-1", content="Soda", is_completed=False)
|
||||
]
|
||||
# Fake API response when state is refreshed after reopening task
|
||||
api.get_tasks.side_effect = make_api_response(
|
||||
[make_api_task(id="task-id-1", content="Soda", completed_at=None)]
|
||||
)
|
||||
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
@@ -260,8 +272,8 @@ async def test_update_todo_item_status(
|
||||
target={ATTR_ENTITY_ID: "todo.name"},
|
||||
blocking=True,
|
||||
)
|
||||
assert api.reopen_task.called
|
||||
args = api.reopen_task.call_args
|
||||
assert api.uncomplete_task.called
|
||||
args = api.uncomplete_task.call_args
|
||||
assert args
|
||||
assert args.kwargs.get("task_id") == "task-id-1"
|
||||
|
||||
@@ -279,7 +291,7 @@ async def test_update_todo_item_status(
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
is_completed=False,
|
||||
completed_at=None,
|
||||
description="desc",
|
||||
)
|
||||
],
|
||||
@@ -288,7 +300,7 @@ async def test_update_todo_item_status(
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Milk",
|
||||
is_completed=False,
|
||||
completed_at=None,
|
||||
description="desc",
|
||||
)
|
||||
],
|
||||
@@ -306,14 +318,16 @@ async def test_update_todo_item_status(
|
||||
},
|
||||
),
|
||||
(
|
||||
[make_api_task(id="task-id-1", content="Soda", is_completed=False)],
|
||||
[make_api_task(id="task-id-1", content="Soda", completed_at=None)],
|
||||
{ATTR_DUE_DATE: "2023-11-18"},
|
||||
[
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
is_completed=False,
|
||||
due=Due(is_recurring=False, date="2023-11-18", string="today"),
|
||||
completed_at=None,
|
||||
due=make_api_due(
|
||||
date="2023-11-18", is_recurring=False, string="today"
|
||||
),
|
||||
)
|
||||
],
|
||||
{
|
||||
@@ -330,17 +344,16 @@ async def test_update_todo_item_status(
|
||||
},
|
||||
),
|
||||
(
|
||||
[make_api_task(id="task-id-1", content="Soda", is_completed=False)],
|
||||
[make_api_task(id="task-id-1", content="Soda", completed_at=None)],
|
||||
{ATTR_DUE_DATETIME: "2023-11-18T06:30:00"},
|
||||
[
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
is_completed=False,
|
||||
due=Due(
|
||||
date="2023-11-18",
|
||||
completed_at=None,
|
||||
due=make_api_due(
|
||||
date="2023-11-18T12:30:00.000000Z",
|
||||
is_recurring=False,
|
||||
datetime="2023-11-18T12:30:00.000000Z",
|
||||
string="today",
|
||||
),
|
||||
)
|
||||
@@ -359,14 +372,14 @@ async def test_update_todo_item_status(
|
||||
},
|
||||
),
|
||||
(
|
||||
[make_api_task(id="task-id-1", content="Soda", is_completed=False)],
|
||||
[make_api_task(id="task-id-1", content="Soda", completed_at=None)],
|
||||
{ATTR_DESCRIPTION: "6-pack"},
|
||||
[
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
description="6-pack",
|
||||
is_completed=False,
|
||||
completed_at=None,
|
||||
)
|
||||
],
|
||||
{
|
||||
@@ -388,7 +401,7 @@ async def test_update_todo_item_status(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
description="6-pack",
|
||||
is_completed=False,
|
||||
completed_at=None,
|
||||
)
|
||||
],
|
||||
{ATTR_DESCRIPTION: None},
|
||||
@@ -396,7 +409,7 @@ async def test_update_todo_item_status(
|
||||
make_api_task(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
is_completed=False,
|
||||
completed_at=None,
|
||||
description="",
|
||||
)
|
||||
],
|
||||
@@ -418,10 +431,12 @@ async def test_update_todo_item_status(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
description="6-pack",
|
||||
is_completed=False,
|
||||
# Create a mock task with a string value in the Due object and verify it
|
||||
completed_at=None,
|
||||
# Create a mock task with a Due object and verify the due string
|
||||
# gets preserved when verifying the kwargs to update below
|
||||
due=Due(date="2024-01-01", is_recurring=True, string="every day"),
|
||||
due=make_api_due(
|
||||
date="2024-01-01", is_recurring=True, string="every day"
|
||||
),
|
||||
)
|
||||
],
|
||||
{ATTR_DUE_DATE: "2024-02-01"},
|
||||
@@ -430,8 +445,10 @@ async def test_update_todo_item_status(
|
||||
id="task-id-1",
|
||||
content="Soda",
|
||||
description="6-pack",
|
||||
is_completed=False,
|
||||
due=Due(date="2024-02-01", is_recurring=True, string="every day"),
|
||||
completed_at=None,
|
||||
due=make_api_due(
|
||||
date="2024-02-01", is_recurring=True, string="every day"
|
||||
),
|
||||
)
|
||||
],
|
||||
{
|
||||
@@ -477,7 +494,7 @@ async def test_update_todo_items(
|
||||
api.update_task = AsyncMock()
|
||||
|
||||
# Fake API response when state is refreshed after close
|
||||
api.get_tasks.return_value = tasks_after_update
|
||||
api.get_tasks.side_effect = make_api_response(tasks_after_update)
|
||||
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
@@ -506,8 +523,8 @@ async def test_update_todo_items(
|
||||
("tasks"),
|
||||
[
|
||||
[
|
||||
make_api_task(id="task-id-1", content="Soda", is_completed=False),
|
||||
make_api_task(id="task-id-2", content="Milk", is_completed=False),
|
||||
make_api_task(id="task-id-1", content="Soda", completed_at=None),
|
||||
make_api_task(id="task-id-2", content="Milk", completed_at=None),
|
||||
]
|
||||
],
|
||||
)
|
||||
@@ -524,7 +541,7 @@ async def test_remove_todo_item(
|
||||
|
||||
api.delete_task = AsyncMock()
|
||||
# Fake API response when state is refreshed after close
|
||||
api.get_tasks.return_value = []
|
||||
api.get_tasks.side_effect = make_api_response([])
|
||||
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
@@ -545,7 +562,7 @@ async def test_remove_todo_item(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("tasks"), [[make_api_task(id="task-id-1", content="Cheese", is_completed=False)]]
|
||||
("tasks"), [[make_api_task(id="task-id-1", content="Cheese", completed_at=None)]]
|
||||
)
|
||||
async def test_subscribe(
|
||||
hass: HomeAssistant,
|
||||
@@ -579,9 +596,9 @@ async def test_subscribe(
|
||||
assert items[0]["uid"]
|
||||
|
||||
# Fake API response when state is refreshed
|
||||
api.get_tasks.return_value = [
|
||||
make_api_task(id="test-id-1", content="Wine", is_completed=False)
|
||||
]
|
||||
api.get_tasks.side_effect = make_api_response(
|
||||
[make_api_task(id="test-id-1", content="Wine", completed_at=None)]
|
||||
)
|
||||
await hass.services.async_call(
|
||||
TODO_DOMAIN,
|
||||
TodoServices.UPDATE_ITEM,
|
||||
|
||||
@@ -7,6 +7,7 @@ import pytest
|
||||
from zha.application.platforms.fan.const import PRESET_MODE_ON
|
||||
from zigpy.device import Device
|
||||
from zigpy.profiles import zha
|
||||
from zigpy.typing import UNDEFINED
|
||||
from zigpy.zcl.clusters import general, hvac
|
||||
|
||||
from homeassistant.components.fan import (
|
||||
@@ -113,28 +114,28 @@ async def test_fan(
|
||||
cluster.write_attributes.reset_mock()
|
||||
await async_turn_on(hass, entity_id)
|
||||
assert cluster.write_attributes.mock_calls == [
|
||||
call({"fan_mode": 2}, manufacturer=None)
|
||||
call({"fan_mode": 2}, manufacturer=UNDEFINED)
|
||||
]
|
||||
|
||||
# turn off from HA
|
||||
cluster.write_attributes.reset_mock()
|
||||
await async_turn_off(hass, entity_id)
|
||||
assert cluster.write_attributes.mock_calls == [
|
||||
call({"fan_mode": 0}, manufacturer=None)
|
||||
call({"fan_mode": 0}, manufacturer=UNDEFINED)
|
||||
]
|
||||
|
||||
# change speed from HA
|
||||
cluster.write_attributes.reset_mock()
|
||||
await async_set_percentage(hass, entity_id, percentage=100)
|
||||
assert cluster.write_attributes.mock_calls == [
|
||||
call({"fan_mode": 3}, manufacturer=None)
|
||||
call({"fan_mode": 3}, manufacturer=UNDEFINED)
|
||||
]
|
||||
|
||||
# change preset_mode from HA
|
||||
cluster.write_attributes.reset_mock()
|
||||
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_ON)
|
||||
assert cluster.write_attributes.mock_calls == [
|
||||
call({"fan_mode": 4}, manufacturer=None)
|
||||
call({"fan_mode": 4}, manufacturer=UNDEFINED)
|
||||
]
|
||||
|
||||
# set invalid preset_mode from HA
|
||||
|
||||
@@ -6,6 +6,7 @@ from unittest.mock import call, patch
|
||||
import pytest
|
||||
from zigpy.device import Device
|
||||
from zigpy.profiles import zha
|
||||
from zigpy.typing import UNDEFINED
|
||||
from zigpy.zcl.clusters import general
|
||||
import zigpy.zcl.foundation as zcl_f
|
||||
|
||||
@@ -122,7 +123,7 @@ async def test_number(
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster.write_attributes.mock_calls == [
|
||||
call({"present_value": 30.0}, manufacturer=None)
|
||||
call({"present_value": 30.0}, manufacturer=UNDEFINED)
|
||||
]
|
||||
cluster.PLUGGED_ATTR_READS["present_value"] = 30.0
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ from unittest.mock import call, patch
|
||||
import pytest
|
||||
from zigpy.device import Device
|
||||
from zigpy.profiles import zha
|
||||
from zigpy.typing import UNDEFINED
|
||||
from zigpy.zcl.clusters import general
|
||||
import zigpy.zcl.foundation as zcl_f
|
||||
|
||||
@@ -143,5 +144,5 @@ async def test_switch(
|
||||
)
|
||||
assert len(cluster.read_attributes.mock_calls) == 1
|
||||
assert cluster.read_attributes.call_args == call(
|
||||
["on_off"], allow_cache=False, only_cache=False, manufacturer=None
|
||||
["on_off"], allow_cache=False, only_cache=False, manufacturer=UNDEFINED
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user