forked from home-assistant/core
Compare commits
55 Commits
2024.3.0b0
...
2024.3.0b4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a4353cf39d | ||
|
|
63192f2291 | ||
|
|
675b7ca7ba | ||
|
|
df5eb552a0 | ||
|
|
5017f4a2c7 | ||
|
|
92d3dccb94 | ||
|
|
2c38b5ee7b | ||
|
|
435bb50d29 | ||
|
|
005493bb5a | ||
|
|
838a4e4f7b | ||
|
|
04bf68e661 | ||
|
|
8ddec745ed | ||
|
|
3896afbb69 | ||
|
|
c9227d3c3d | ||
|
|
88d2772fa9 | ||
|
|
f89fddb92b | ||
|
|
c04e68b9b2 | ||
|
|
f711411d18 | ||
|
|
c9aea57326 | ||
|
|
63740d2a6d | ||
|
|
7a9e5354ee | ||
|
|
4f50c7217b | ||
|
|
a9410ded11 | ||
|
|
b6503f53bc | ||
|
|
51716290bb | ||
|
|
10cc0e6b2b | ||
|
|
640de7dbc9 | ||
|
|
868f19e846 | ||
|
|
52ea1a9deb | ||
|
|
5c2fc8d7a0 | ||
|
|
fba331fd7e | ||
|
|
dd85a97a48 | ||
|
|
b19b5dc451 | ||
|
|
3d4291fc59 | ||
|
|
3cd07aacad | ||
|
|
19837055bf | ||
|
|
209e61f734 | ||
|
|
39deab74b3 | ||
|
|
77fd02da1d | ||
|
|
7e80eb551e | ||
|
|
536addc5ff | ||
|
|
ff0fbcb309 | ||
|
|
c9ea72ba7d | ||
|
|
10a1a450a3 | ||
|
|
7ff6627e07 | ||
|
|
4a66727bff | ||
|
|
db584857c8 | ||
|
|
cb7116126c | ||
|
|
99a70ba959 | ||
|
|
a9fd113a80 | ||
|
|
acfd1c2755 | ||
|
|
feea4071d0 | ||
|
|
e1be109947 | ||
|
|
812afc1bd0 | ||
|
|
27b5a79fa5 |
@@ -14,13 +14,28 @@ import threading
|
||||
from time import monotonic
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
# Import cryptography early since import openssl is not thread-safe
|
||||
# _frozen_importlib._DeadlockError: deadlock detected by _ModuleLock('cryptography.hazmat.backends.openssl.backend')
|
||||
import cryptography.hazmat.backends.openssl.backend # noqa: F401
|
||||
import voluptuous as vol
|
||||
import yarl
|
||||
|
||||
from . import config as conf_util, config_entries, core, loader, requirements
|
||||
from .components import http
|
||||
|
||||
# Pre-import config and lovelace which have no requirements here to avoid
|
||||
# loading them at run time and blocking the event loop. We do this ahead
|
||||
# of time so that we do not have to flag frontends deps with `import_executor`
|
||||
# as it would create a thundering heard of executor jobs trying to import
|
||||
# frontend deps at the same time.
|
||||
from .components import (
|
||||
api as api_pre_import, # noqa: F401
|
||||
config as config_pre_import, # noqa: F401
|
||||
http,
|
||||
lovelace as lovelace_pre_import, # noqa: F401
|
||||
)
|
||||
from .const import (
|
||||
FORMAT_DATETIME,
|
||||
KEY_DATA_LOGGING as DATA_LOGGING,
|
||||
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
REQUIRED_NEXT_PYTHON_VER,
|
||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||
@@ -62,7 +77,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
ERROR_LOG_FILENAME = "home-assistant.log"
|
||||
|
||||
# hass.data key for logging information.
|
||||
DATA_LOGGING = "logging"
|
||||
DATA_REGISTRIES_LOADED = "bootstrap_registries_loaded"
|
||||
|
||||
LOG_SLOW_STARTUP_INTERVAL = 60
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.7.4"]
|
||||
"requirements": ["aioairzone==0.7.5"]
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
options = ConnectionOptions(
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
True,
|
||||
)
|
||||
|
||||
airzone = AirzoneCloudApi(aiohttp_client.async_get_clientsession(hass), options)
|
||||
|
||||
@@ -94,6 +94,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
ConnectionOptions(
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.3.8"]
|
||||
"requirements": ["aioairzone-cloud==0.4.5"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@joostlek"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics_insights",
|
||||
"import_executor": true,
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@tronikos", "@Drafteed"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv_remote",
|
||||
"import_executor": true,
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
|
||||
@@ -12,7 +12,6 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.permissions.const import POLICY_READ
|
||||
from homeassistant.bootstrap import DATA_LOGGING
|
||||
from homeassistant.components.http import (
|
||||
KEY_HASS,
|
||||
KEY_HASS_USER,
|
||||
@@ -23,6 +22,7 @@ from homeassistant.const import (
|
||||
CONTENT_TYPE_JSON,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
EVENT_STATE_CHANGED,
|
||||
KEY_DATA_LOGGING as DATA_LOGGING,
|
||||
MATCH_ALL,
|
||||
URL_API,
|
||||
URL_API_COMPONENTS,
|
||||
|
||||
@@ -4,8 +4,9 @@
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/backup",
|
||||
"import_executor": true,
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["securetar==2024.2.0"]
|
||||
"requirements": ["securetar==2024.2.1"]
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/blink",
|
||||
"import_executor": true,
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["blinkpy"],
|
||||
"requirements": ["blinkpy==0.22.6"]
|
||||
|
||||
@@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.3.0",
|
||||
"bluetooth-data-tools==1.19.0",
|
||||
"dbus-fast==2.21.1",
|
||||
"habluetooth==2.4.1"
|
||||
"habluetooth==2.4.2"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -91,11 +91,24 @@ EventFetcher = Callable[[Timespan], Awaitable[list[CalendarEvent]]]
|
||||
QueuedEventFetcher = Callable[[Timespan], Awaitable[list[QueuedCalendarEvent]]]
|
||||
|
||||
|
||||
def event_fetcher(hass: HomeAssistant, entity: CalendarEntity) -> EventFetcher:
|
||||
def get_entity(hass: HomeAssistant, entity_id: str) -> CalendarEntity:
|
||||
"""Get the calendar entity for the provided entity_id."""
|
||||
component: EntityComponent[CalendarEntity] = hass.data[DOMAIN]
|
||||
if not (entity := component.get_entity(entity_id)) or not isinstance(
|
||||
entity, CalendarEntity
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"Entity does not exist {entity_id} or is not a calendar entity"
|
||||
)
|
||||
return entity
|
||||
|
||||
|
||||
def event_fetcher(hass: HomeAssistant, entity_id: str) -> EventFetcher:
|
||||
"""Build an async_get_events wrapper to fetch events during a time span."""
|
||||
|
||||
async def async_get_events(timespan: Timespan) -> list[CalendarEvent]:
|
||||
"""Return events active in the specified time span."""
|
||||
entity = get_entity(hass, entity_id)
|
||||
# Expand by one second to make the end time exclusive
|
||||
end_time = timespan.end + datetime.timedelta(seconds=1)
|
||||
return await entity.async_get_events(hass, timespan.start, end_time)
|
||||
@@ -237,7 +250,10 @@ class CalendarEventListener:
|
||||
self._dispatch_events(now)
|
||||
self._clear_event_listener()
|
||||
self._timespan = self._timespan.next_upcoming(now, UPDATE_INTERVAL)
|
||||
self._events.extend(await self._fetcher(self._timespan))
|
||||
try:
|
||||
self._events.extend(await self._fetcher(self._timespan))
|
||||
except HomeAssistantError as ex:
|
||||
_LOGGER.error("Calendar trigger failed to fetch events: %s", ex)
|
||||
self._listen_next_calendar_event()
|
||||
|
||||
|
||||
@@ -252,13 +268,8 @@ async def async_attach_trigger(
|
||||
event_type = config[CONF_EVENT]
|
||||
offset = config[CONF_OFFSET]
|
||||
|
||||
component: EntityComponent[CalendarEntity] = hass.data[DOMAIN]
|
||||
if not (entity := component.get_entity(entity_id)) or not isinstance(
|
||||
entity, CalendarEntity
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"Entity does not exist {entity_id} or is not a calendar entity"
|
||||
)
|
||||
# Validate the entity id is valid
|
||||
get_entity(hass, entity_id)
|
||||
|
||||
trigger_data = {
|
||||
**trigger_info["trigger_data"],
|
||||
@@ -270,7 +281,7 @@ async def async_attach_trigger(
|
||||
hass,
|
||||
HassJob(action),
|
||||
trigger_data,
|
||||
queued_event_fetcher(event_fetcher(hass, entity), event_type, offset),
|
||||
queued_event_fetcher(event_fetcher(hass, entity_id), event_type, offset),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@tombrien"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/coinbase",
|
||||
"import_executor": true,
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["coinbase"],
|
||||
"requirements": ["coinbase==2.1.0"]
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.2.2"]
|
||||
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.2.28"]
|
||||
}
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
"""Intents for the cover integration."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
SERVICE_CLOSE_COVER,
|
||||
SERVICE_OPEN_COVER,
|
||||
SERVICE_SET_COVER_POSITION,
|
||||
)
|
||||
from homeassistant.const import SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import intent
|
||||
|
||||
from . import ATTR_POSITION, DOMAIN
|
||||
from . import DOMAIN
|
||||
|
||||
INTENT_OPEN_COVER = "HassOpenCover"
|
||||
INTENT_CLOSE_COVER = "HassCloseCover"
|
||||
@@ -30,12 +25,3 @@ async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
INTENT_CLOSE_COVER, DOMAIN, SERVICE_CLOSE_COVER, "Closed {}"
|
||||
),
|
||||
)
|
||||
intent.async_register(
|
||||
hass,
|
||||
intent.ServiceIntentHandler(
|
||||
intent.INTENT_SET_POSITION,
|
||||
DOMAIN,
|
||||
SERVICE_SET_COVER_POSITION,
|
||||
extra_slots={ATTR_POSITION: vol.All(vol.Range(min=0, max=100))},
|
||||
),
|
||||
)
|
||||
|
||||
@@ -63,6 +63,7 @@ FLASH_TO_DECONZ = {FLASH_SHORT: LightAlert.SHORT, FLASH_LONG: LightAlert.LONG}
|
||||
|
||||
DECONZ_TO_COLOR_MODE = {
|
||||
LightColorMode.CT: ColorMode.COLOR_TEMP,
|
||||
LightColorMode.GRADIENT: ColorMode.XY,
|
||||
LightColorMode.HS: ColorMode.HS,
|
||||
LightColorMode.XY: ColorMode.XY,
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pydeconz"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pydeconz==114"],
|
||||
"requirements": ["pydeconz==115"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Royal Philips Electronics",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@tkdrob"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/discord",
|
||||
"import_executor": true,
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["discord"],
|
||||
|
||||
@@ -11,7 +11,7 @@ from flux_led.const import ATTR_ID, WhiteChannelType
|
||||
from flux_led.scanner import FluxLEDDiscovery
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STARTED, Platform
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
@@ -37,7 +37,6 @@ from .const import (
|
||||
FLUX_LED_DISCOVERY_SIGNAL,
|
||||
FLUX_LED_EXCEPTIONS,
|
||||
SIGNAL_STATE_UPDATED,
|
||||
STARTUP_SCAN_TIMEOUT,
|
||||
)
|
||||
from .coordinator import FluxLedUpdateCoordinator
|
||||
from .discovery import (
|
||||
@@ -89,24 +88,21 @@ def async_wifi_bulb_for_host(
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the flux_led component."""
|
||||
domain_data = hass.data.setdefault(DOMAIN, {})
|
||||
domain_data[FLUX_LED_DISCOVERY] = await async_discover_devices(
|
||||
hass, STARTUP_SCAN_TIMEOUT
|
||||
)
|
||||
domain_data[FLUX_LED_DISCOVERY] = []
|
||||
|
||||
@callback
|
||||
def _async_start_background_discovery(*_: Any) -> None:
|
||||
"""Run discovery in the background."""
|
||||
hass.async_create_background_task(_async_discovery(), "flux_led-discovery")
|
||||
hass.async_create_background_task(
|
||||
_async_discovery(), "flux_led-discovery", eager_start=True
|
||||
)
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
async_trigger_discovery(
|
||||
hass, await async_discover_devices(hass, DISCOVER_SCAN_TIMEOUT)
|
||||
)
|
||||
|
||||
async_trigger_discovery(hass, domain_data[FLUX_LED_DISCOVERY])
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STARTED, _async_start_background_discovery
|
||||
)
|
||||
_async_start_background_discovery()
|
||||
async_track_time_interval(
|
||||
hass,
|
||||
_async_start_background_discovery,
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20240228.0"]
|
||||
"requirements": ["home-assistant-frontend==20240301.0"]
|
||||
}
|
||||
|
||||
@@ -32,6 +32,11 @@ from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import storage
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.http import (
|
||||
KEY_AUTHENTICATED, # noqa: F401
|
||||
HomeAssistantView,
|
||||
current_request,
|
||||
)
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
@@ -41,20 +46,14 @@ from homeassistant.util.json import json_loads
|
||||
|
||||
from .auth import async_setup_auth
|
||||
from .ban import setup_bans
|
||||
from .const import ( # noqa: F401
|
||||
KEY_AUTHENTICATED,
|
||||
KEY_HASS,
|
||||
KEY_HASS_REFRESH_TOKEN_ID,
|
||||
KEY_HASS_USER,
|
||||
)
|
||||
from .const import KEY_HASS, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER # noqa: F401
|
||||
from .cors import setup_cors
|
||||
from .decorators import require_admin # noqa: F401
|
||||
from .forwarded import async_setup_forwarded
|
||||
from .headers import setup_headers
|
||||
from .request_context import current_request, setup_request_context
|
||||
from .request_context import setup_request_context
|
||||
from .security_filter import setup_security_filter
|
||||
from .static import CACHE_HEADERS, CachingStaticResource
|
||||
from .view import HomeAssistantView
|
||||
from .web_runner import HomeAssistantTCPSite
|
||||
|
||||
DOMAIN: Final = "http"
|
||||
|
||||
@@ -20,13 +20,13 @@ from homeassistant.auth.const import GROUP_ID_READ_ONLY
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.http import current_request
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.helpers.network import is_cloud_connection
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util.network import is_local
|
||||
|
||||
from .const import KEY_AUTHENTICATED, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER
|
||||
from .request_context import current_request
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ from aiohttp.web import Application, Request, Response, StreamResponse, middlewa
|
||||
from aiohttp.web_exceptions import HTTPForbidden, HTTPUnauthorized
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -128,6 +127,10 @@ async def process_wrong_login(request: Request) -> None:
|
||||
|
||||
_LOGGER.warning(log_msg)
|
||||
|
||||
# Circular import with websocket_api
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from homeassistant.components import persistent_notification
|
||||
|
||||
persistent_notification.async_create(
|
||||
hass, notification_msg, "Login attempt failed", NOTIFICATION_ID_LOGIN
|
||||
)
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""HTTP specific constants."""
|
||||
from typing import Final
|
||||
|
||||
KEY_AUTHENTICATED: Final = "ha_authenticated"
|
||||
from homeassistant.helpers.http import KEY_AUTHENTICATED # noqa: F401
|
||||
|
||||
KEY_HASS: Final = "hass"
|
||||
KEY_HASS_USER: Final = "hass_user"
|
||||
KEY_HASS_REFRESH_TOKEN_ID: Final = "hass_refresh_token_id"
|
||||
|
||||
@@ -7,10 +7,7 @@ from contextvars import ContextVar
|
||||
from aiohttp.web import Application, Request, StreamResponse, middleware
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
||||
current_request: ContextVar[Request | None] = ContextVar(
|
||||
"current_request", default=None
|
||||
)
|
||||
from homeassistant.helpers.http import current_request # noqa: F401
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -1,180 +1,7 @@
|
||||
"""Support for views."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.typedefs import LooseHeaders
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadRequest,
|
||||
HTTPInternalServerError,
|
||||
HTTPUnauthorized,
|
||||
from homeassistant.helpers.http import ( # noqa: F401
|
||||
HomeAssistantView,
|
||||
request_handler_factory,
|
||||
)
|
||||
from aiohttp.web_urldispatcher import AbstractRoute
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import exceptions
|
||||
from homeassistant.const import CONTENT_TYPE_JSON
|
||||
from homeassistant.core import Context, HomeAssistant, is_callback
|
||||
from homeassistant.helpers.json import (
|
||||
find_paths_unserializable_data,
|
||||
json_bytes,
|
||||
json_dumps,
|
||||
)
|
||||
from homeassistant.util.json import JSON_ENCODE_EXCEPTIONS, format_unserializable_data
|
||||
|
||||
from .const import KEY_AUTHENTICATED
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HomeAssistantView:
|
||||
"""Base view for all views."""
|
||||
|
||||
url: str | None = None
|
||||
extra_urls: list[str] = []
|
||||
# Views inheriting from this class can override this
|
||||
requires_auth = True
|
||||
cors_allowed = False
|
||||
|
||||
@staticmethod
|
||||
def context(request: web.Request) -> Context:
|
||||
"""Generate a context from a request."""
|
||||
if (user := request.get("hass_user")) is None:
|
||||
return Context()
|
||||
|
||||
return Context(user_id=user.id)
|
||||
|
||||
@staticmethod
|
||||
def json(
|
||||
result: Any,
|
||||
status_code: HTTPStatus | int = HTTPStatus.OK,
|
||||
headers: LooseHeaders | None = None,
|
||||
) -> web.Response:
|
||||
"""Return a JSON response."""
|
||||
try:
|
||||
msg = json_bytes(result)
|
||||
except JSON_ENCODE_EXCEPTIONS as err:
|
||||
_LOGGER.error(
|
||||
"Unable to serialize to JSON. Bad data found at %s",
|
||||
format_unserializable_data(
|
||||
find_paths_unserializable_data(result, dump=json_dumps)
|
||||
),
|
||||
)
|
||||
raise HTTPInternalServerError from err
|
||||
response = web.Response(
|
||||
body=msg,
|
||||
content_type=CONTENT_TYPE_JSON,
|
||||
status=int(status_code),
|
||||
headers=headers,
|
||||
zlib_executor_size=32768,
|
||||
)
|
||||
response.enable_compression()
|
||||
return response
|
||||
|
||||
def json_message(
|
||||
self,
|
||||
message: str,
|
||||
status_code: HTTPStatus | int = HTTPStatus.OK,
|
||||
message_code: str | None = None,
|
||||
headers: LooseHeaders | None = None,
|
||||
) -> web.Response:
|
||||
"""Return a JSON message response."""
|
||||
data = {"message": message}
|
||||
if message_code is not None:
|
||||
data["code"] = message_code
|
||||
return self.json(data, status_code, headers=headers)
|
||||
|
||||
def register(
|
||||
self, hass: HomeAssistant, app: web.Application, router: web.UrlDispatcher
|
||||
) -> None:
|
||||
"""Register the view with a router."""
|
||||
assert self.url is not None, "No url set for view"
|
||||
urls = [self.url] + self.extra_urls
|
||||
routes: list[AbstractRoute] = []
|
||||
|
||||
for method in ("get", "post", "delete", "put", "patch", "head", "options"):
|
||||
if not (handler := getattr(self, method, None)):
|
||||
continue
|
||||
|
||||
handler = request_handler_factory(hass, self, handler)
|
||||
|
||||
for url in urls:
|
||||
routes.append(router.add_route(method, url, handler))
|
||||
|
||||
# Use `get` because CORS middleware is not be loaded in emulated_hue
|
||||
if self.cors_allowed:
|
||||
allow_cors = app.get("allow_all_cors")
|
||||
else:
|
||||
allow_cors = app.get("allow_configured_cors")
|
||||
|
||||
if allow_cors:
|
||||
for route in routes:
|
||||
allow_cors(route)
|
||||
|
||||
|
||||
def request_handler_factory(
|
||||
hass: HomeAssistant, view: HomeAssistantView, handler: Callable
|
||||
) -> Callable[[web.Request], Awaitable[web.StreamResponse]]:
|
||||
"""Wrap the handler classes."""
|
||||
is_coroutinefunction = asyncio.iscoroutinefunction(handler)
|
||||
assert is_coroutinefunction or is_callback(
|
||||
handler
|
||||
), "Handler should be a coroutine or a callback."
|
||||
|
||||
async def handle(request: web.Request) -> web.StreamResponse:
|
||||
"""Handle incoming request."""
|
||||
if hass.is_stopping:
|
||||
return web.Response(status=HTTPStatus.SERVICE_UNAVAILABLE)
|
||||
|
||||
authenticated = request.get(KEY_AUTHENTICATED, False)
|
||||
|
||||
if view.requires_auth and not authenticated:
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
_LOGGER.debug(
|
||||
"Serving %s to %s (auth: %s)",
|
||||
request.path,
|
||||
request.remote,
|
||||
authenticated,
|
||||
)
|
||||
|
||||
try:
|
||||
if is_coroutinefunction:
|
||||
result = await handler(request, **request.match_info)
|
||||
else:
|
||||
result = handler(request, **request.match_info)
|
||||
except vol.Invalid as err:
|
||||
raise HTTPBadRequest() from err
|
||||
except exceptions.ServiceNotFound as err:
|
||||
raise HTTPInternalServerError() from err
|
||||
except exceptions.Unauthorized as err:
|
||||
raise HTTPUnauthorized() from err
|
||||
|
||||
if isinstance(result, web.StreamResponse):
|
||||
# The method handler returned a ready-made Response, how nice of it
|
||||
return result
|
||||
|
||||
status_code = HTTPStatus.OK
|
||||
if isinstance(result, tuple):
|
||||
result, status_code = result
|
||||
|
||||
if isinstance(result, bytes):
|
||||
return web.Response(body=result, status=status_code)
|
||||
|
||||
if isinstance(result, str):
|
||||
return web.Response(text=result, status=status_code)
|
||||
|
||||
if result is None:
|
||||
return web.Response(body=b"", status=status_code)
|
||||
|
||||
raise TypeError(
|
||||
f"Result should be None, string, bytes or StreamResponse. Got: {result}"
|
||||
)
|
||||
|
||||
return handle
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohue"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohue==4.7.0"],
|
||||
"requirements": ["aiohue==4.7.1"],
|
||||
"zeroconf": ["_hue._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -10,9 +10,11 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
DOMAIN as COVER_DOMAIN,
|
||||
SERVICE_CLOSE_COVER,
|
||||
SERVICE_OPEN_COVER,
|
||||
SERVICE_SET_COVER_POSITION,
|
||||
)
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.lock import (
|
||||
@@ -24,6 +26,7 @@ from homeassistant.components.valve import (
|
||||
DOMAIN as VALVE_DOMAIN,
|
||||
SERVICE_CLOSE_VALVE,
|
||||
SERVICE_OPEN_VALVE,
|
||||
SERVICE_SET_VALVE_POSITION,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -75,6 +78,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass,
|
||||
NevermindIntentHandler(),
|
||||
)
|
||||
intent.async_register(hass, SetPositionIntentHandler())
|
||||
|
||||
return True
|
||||
|
||||
@@ -89,14 +93,16 @@ class IntentPlatformProtocol(Protocol):
|
||||
class OnOffIntentHandler(intent.ServiceIntentHandler):
|
||||
"""Intent handler for on/off that also supports covers, valves, locks, etc."""
|
||||
|
||||
async def async_call_service(self, intent_obj: intent.Intent, state: State) -> None:
|
||||
async def async_call_service(
|
||||
self, domain: str, service: str, intent_obj: intent.Intent, state: State
|
||||
) -> None:
|
||||
"""Call service on entity with handling for special cases."""
|
||||
hass = intent_obj.hass
|
||||
|
||||
if state.domain == COVER_DOMAIN:
|
||||
# on = open
|
||||
# off = close
|
||||
if self.service == SERVICE_TURN_ON:
|
||||
if service == SERVICE_TURN_ON:
|
||||
service_name = SERVICE_OPEN_COVER
|
||||
else:
|
||||
service_name = SERVICE_CLOSE_COVER
|
||||
@@ -117,7 +123,7 @@ class OnOffIntentHandler(intent.ServiceIntentHandler):
|
||||
if state.domain == LOCK_DOMAIN:
|
||||
# on = lock
|
||||
# off = unlock
|
||||
if self.service == SERVICE_TURN_ON:
|
||||
if service == SERVICE_TURN_ON:
|
||||
service_name = SERVICE_LOCK
|
||||
else:
|
||||
service_name = SERVICE_UNLOCK
|
||||
@@ -138,7 +144,7 @@ class OnOffIntentHandler(intent.ServiceIntentHandler):
|
||||
if state.domain == VALVE_DOMAIN:
|
||||
# on = opened
|
||||
# off = closed
|
||||
if self.service == SERVICE_TURN_ON:
|
||||
if service == SERVICE_TURN_ON:
|
||||
service_name = SERVICE_OPEN_VALVE
|
||||
else:
|
||||
service_name = SERVICE_CLOSE_VALVE
|
||||
@@ -156,13 +162,13 @@ class OnOffIntentHandler(intent.ServiceIntentHandler):
|
||||
)
|
||||
return
|
||||
|
||||
if not hass.services.has_service(state.domain, self.service):
|
||||
if not hass.services.has_service(state.domain, service):
|
||||
raise intent.IntentHandleError(
|
||||
f"Service {self.service} does not support entity {state.entity_id}"
|
||||
f"Service {service} does not support entity {state.entity_id}"
|
||||
)
|
||||
|
||||
# Fall back to homeassistant.turn_on/off
|
||||
await super().async_call_service(intent_obj, state)
|
||||
await super().async_call_service(domain, service, intent_obj, state)
|
||||
|
||||
|
||||
class GetStateIntentHandler(intent.IntentHandler):
|
||||
@@ -296,6 +302,29 @@ class NevermindIntentHandler(intent.IntentHandler):
|
||||
return intent_obj.create_response()
|
||||
|
||||
|
||||
class SetPositionIntentHandler(intent.DynamicServiceIntentHandler):
|
||||
"""Intent handler for setting positions."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Create set position handler."""
|
||||
super().__init__(
|
||||
intent.INTENT_SET_POSITION,
|
||||
extra_slots={ATTR_POSITION: vol.All(vol.Range(min=0, max=100))},
|
||||
)
|
||||
|
||||
def get_domain_and_service(
|
||||
self, intent_obj: intent.Intent, state: State
|
||||
) -> tuple[str, str]:
|
||||
"""Get the domain and service name to call."""
|
||||
if state.domain == COVER_DOMAIN:
|
||||
return (COVER_DOMAIN, SERVICE_SET_COVER_POSITION)
|
||||
|
||||
if state.domain == VALVE_DOMAIN:
|
||||
return (VALVE_DOMAIN, SERVICE_SET_VALVE_POSITION)
|
||||
|
||||
raise intent.IntentHandleError(f"Domain not supported: {state.domain}")
|
||||
|
||||
|
||||
async def _async_process_intent(
|
||||
hass: HomeAssistant, domain: str, platform: IntentPlatformProtocol
|
||||
) -> None:
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/isy994",
|
||||
"import_executor": true,
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyisy"],
|
||||
|
||||
@@ -1336,5 +1336,5 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Return if light color mode issues should be reported."""
|
||||
if not self.platform:
|
||||
return True
|
||||
# philips_js, tuya and zha have known issues, we don't need users to open issues
|
||||
return self.platform.platform_name not in {"philips_js", "tuya", "zha"}
|
||||
# philips_js and tuya have known issues, we don't need users to open issues
|
||||
return self.platform.platform_name not in {"philips_js", "tuya"}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""The Matter integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
@@ -45,7 +46,10 @@ def get_matter_device_info(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> MatterDeviceInfo | None:
|
||||
"""Return Matter device info or None if device does not exist."""
|
||||
if not (node := node_from_ha_device_id(hass, device_id)):
|
||||
# Test hass.data[DOMAIN] to ensure config entry is set up
|
||||
if not hass.data.get(DOMAIN, False) or not (
|
||||
node := node_from_ha_device_id(hass, device_id)
|
||||
):
|
||||
return None
|
||||
|
||||
return MatterDeviceInfo(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/matter",
|
||||
"import_executor": true,
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["python-matter-server==5.5.0"]
|
||||
"requirements": ["python-matter-server==5.7.0"]
|
||||
}
|
||||
|
||||
@@ -148,7 +148,10 @@ async def async_resolve_media(
|
||||
raise Unresolvable("Media Source not loaded")
|
||||
|
||||
if target_media_player is UNDEFINED:
|
||||
report("calls media_source.async_resolve_media without passing an entity_id")
|
||||
report(
|
||||
"calls media_source.async_resolve_media without passing an entity_id",
|
||||
{DOMAIN},
|
||||
)
|
||||
target_media_player = None
|
||||
|
||||
try:
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["recorder"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/opower",
|
||||
"import_executor": true,
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"requirements": ["opower==0.3.1"]
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Support for Overkiz climate devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -8,8 +10,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import HomeAssistantOverkizData
|
||||
from .climate_entities import (
|
||||
WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY,
|
||||
WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY,
|
||||
WIDGET_TO_CLIMATE_ENTITY,
|
||||
Controllable,
|
||||
)
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -28,6 +32,18 @@ async def async_setup_entry(
|
||||
if device.widget in WIDGET_TO_CLIMATE_ENTITY
|
||||
)
|
||||
|
||||
# Match devices based on the widget and controllableName
|
||||
# This is for example used for Atlantic APC, where devices with different functionality share the same uiClass and widget.
|
||||
async_add_entities(
|
||||
WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget][
|
||||
cast(Controllable, device.controllable_name)
|
||||
](device.device_url, data.coordinator)
|
||||
for device in data.platforms[Platform.CLIMATE]
|
||||
if device.widget in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY
|
||||
and device.controllable_name
|
||||
in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget]
|
||||
)
|
||||
|
||||
# Hitachi Air To Air Heat Pumps
|
||||
async_add_entities(
|
||||
WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget][device.protocol](
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
"""Climate entities for the Overkiz (by Somfy) integration."""
|
||||
from enum import StrEnum, unique
|
||||
|
||||
from pyoverkiz.enums import Protocol
|
||||
from pyoverkiz.enums.ui import UIWidget
|
||||
|
||||
@@ -10,18 +12,31 @@ from .atlantic_electrical_towel_dryer import AtlanticElectricalTowelDryer
|
||||
from .atlantic_heat_recovery_ventilation import AtlanticHeatRecoveryVentilation
|
||||
from .atlantic_pass_apc_heating_zone import AtlanticPassAPCHeatingZone
|
||||
from .atlantic_pass_apc_zone_control import AtlanticPassAPCZoneControl
|
||||
from .atlantic_pass_apc_zone_control_zone import AtlanticPassAPCZoneControlZone
|
||||
from .hitachi_air_to_air_heat_pump_hlrrwifi import HitachiAirToAirHeatPumpHLRRWIFI
|
||||
from .hitachi_air_to_air_heat_pump_ovp import HitachiAirToAirHeatPumpOVP
|
||||
from .somfy_heating_temperature_interface import SomfyHeatingTemperatureInterface
|
||||
from .somfy_thermostat import SomfyThermostat
|
||||
from .valve_heating_temperature_interface import ValveHeatingTemperatureInterface
|
||||
|
||||
|
||||
@unique
|
||||
class Controllable(StrEnum):
|
||||
"""Enum for widget controllables."""
|
||||
|
||||
IO_ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE = (
|
||||
"io:AtlanticPassAPCHeatingAndCoolingZoneComponent"
|
||||
)
|
||||
IO_ATLANTIC_PASS_APC_ZONE_CONTROL_ZONE = (
|
||||
"io:AtlanticPassAPCZoneControlZoneComponent"
|
||||
)
|
||||
|
||||
|
||||
WIDGET_TO_CLIMATE_ENTITY = {
|
||||
UIWidget.ATLANTIC_ELECTRICAL_HEATER: AtlanticElectricalHeater,
|
||||
UIWidget.ATLANTIC_ELECTRICAL_HEATER_WITH_ADJUSTABLE_TEMPERATURE_SETPOINT: AtlanticElectricalHeaterWithAdjustableTemperatureSetpoint,
|
||||
UIWidget.ATLANTIC_ELECTRICAL_TOWEL_DRYER: AtlanticElectricalTowelDryer,
|
||||
UIWidget.ATLANTIC_HEAT_RECOVERY_VENTILATION: AtlanticHeatRecoveryVentilation,
|
||||
# ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE works exactly the same as ATLANTIC_PASS_APC_HEATING_ZONE
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE: AtlanticPassAPCHeatingZone,
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_ZONE: AtlanticPassAPCHeatingZone,
|
||||
UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: AtlanticPassAPCZoneControl,
|
||||
UIWidget.SOMFY_HEATING_TEMPERATURE_INTERFACE: SomfyHeatingTemperatureInterface,
|
||||
@@ -29,9 +44,19 @@ WIDGET_TO_CLIMATE_ENTITY = {
|
||||
UIWidget.VALVE_HEATING_TEMPERATURE_INTERFACE: ValveHeatingTemperatureInterface,
|
||||
}
|
||||
|
||||
# For Atlantic APC, some devices are standalone and control themselves, some others needs to be
|
||||
# managed by a ZoneControl device. Widget name is the same in the two cases.
|
||||
WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY = {
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE: {
|
||||
Controllable.IO_ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE: AtlanticPassAPCHeatingZone,
|
||||
Controllable.IO_ATLANTIC_PASS_APC_ZONE_CONTROL_ZONE: AtlanticPassAPCZoneControlZone,
|
||||
}
|
||||
}
|
||||
|
||||
# Hitachi air-to-air heatpumps come in 2 flavors (HLRRWIFI and OVP) that are separated in 2 classes
|
||||
WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY = {
|
||||
UIWidget.HITACHI_AIR_TO_AIR_HEAT_PUMP: {
|
||||
Protocol.HLRR_WIFI: HitachiAirToAirHeatPumpHLRRWIFI,
|
||||
Protocol.OVP: HitachiAirToAirHeatPumpOVP,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -49,7 +49,15 @@ OVERKIZ_TO_PRESET_MODES: dict[str, str] = {
|
||||
OverkizCommandParam.INTERNAL_SCHEDULING: PRESET_HOME,
|
||||
}
|
||||
|
||||
PRESET_MODES_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_PRESET_MODES.items()}
|
||||
PRESET_MODES_TO_OVERKIZ: dict[str, str] = {
|
||||
PRESET_COMFORT: OverkizCommandParam.COMFORT,
|
||||
PRESET_AWAY: OverkizCommandParam.ABSENCE,
|
||||
PRESET_ECO: OverkizCommandParam.ECO,
|
||||
PRESET_FROST_PROTECTION: OverkizCommandParam.FROSTPROTECTION,
|
||||
PRESET_EXTERNAL: OverkizCommandParam.EXTERNAL_SCHEDULING,
|
||||
PRESET_HOME: OverkizCommandParam.INTERNAL_SCHEDULING,
|
||||
}
|
||||
|
||||
|
||||
OVERKIZ_TO_PROFILE_MODES: dict[str, str] = {
|
||||
OverkizCommandParam.OFF: PRESET_SLEEP,
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
OVERKIZ_TO_HVAC_MODE: dict[str, HVACMode] = {
|
||||
@@ -25,16 +26,48 @@ HVAC_MODE_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_HVAC_MODE.items()}
|
||||
class AtlanticPassAPCZoneControl(OverkizEntity, ClimateEntity):
|
||||
"""Representation of Atlantic Pass APC Zone Control."""
|
||||
|
||||
_attr_hvac_modes = [*HVAC_MODE_TO_OVERKIZ]
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Init method."""
|
||||
super().__init__(device_url, coordinator)
|
||||
|
||||
self._attr_hvac_modes = [*HVAC_MODE_TO_OVERKIZ]
|
||||
|
||||
# Cooling is supported by a separate command
|
||||
if self.is_auto_hvac_mode_available:
|
||||
self._attr_hvac_modes.append(HVACMode.AUTO)
|
||||
|
||||
@property
|
||||
def is_auto_hvac_mode_available(self) -> bool:
|
||||
"""Check if auto mode is available on the ZoneControl."""
|
||||
|
||||
return self.executor.has_command(
|
||||
OverkizCommand.SET_HEATING_COOLING_AUTO_SWITCH
|
||||
) and self.executor.has_state(OverkizState.CORE_HEATING_COOLING_AUTO_SWITCH)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
|
||||
if (
|
||||
self.is_auto_hvac_mode_available
|
||||
and cast(
|
||||
str,
|
||||
self.executor.select_state(
|
||||
OverkizState.CORE_HEATING_COOLING_AUTO_SWITCH
|
||||
),
|
||||
)
|
||||
== OverkizCommandParam.ON
|
||||
):
|
||||
return HVACMode.AUTO
|
||||
|
||||
return OVERKIZ_TO_HVAC_MODE[
|
||||
cast(
|
||||
str, self.executor.select_state(OverkizState.IO_PASS_APC_OPERATING_MODE)
|
||||
@@ -43,6 +76,18 @@ class AtlanticPassAPCZoneControl(OverkizEntity, ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
|
||||
if self.is_auto_hvac_mode_available:
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_HEATING_COOLING_AUTO_SWITCH,
|
||||
OverkizCommandParam.ON
|
||||
if hvac_mode == HVACMode.AUTO
|
||||
else OverkizCommandParam.OFF,
|
||||
)
|
||||
|
||||
if hvac_mode == HVACMode.AUTO:
|
||||
return
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_PASS_APC_OPERATING_MODE, HVAC_MODE_TO_OVERKIZ[hvac_mode]
|
||||
)
|
||||
|
||||
@@ -0,0 +1,252 @@
|
||||
"""Support for Atlantic Pass APC Heating Control."""
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import sleep
|
||||
from typing import Any, cast
|
||||
|
||||
from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState
|
||||
|
||||
from homeassistant.components.climate import PRESET_NONE, HVACMode
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from .atlantic_pass_apc_heating_zone import AtlanticPassAPCHeatingZone
|
||||
from .atlantic_pass_apc_zone_control import OVERKIZ_TO_HVAC_MODE
|
||||
|
||||
PRESET_SCHEDULE = "schedule"
|
||||
PRESET_MANUAL = "manual"
|
||||
|
||||
OVERKIZ_MODE_TO_PRESET_MODES: dict[str, str] = {
|
||||
OverkizCommandParam.MANU: PRESET_MANUAL,
|
||||
OverkizCommandParam.INTERNAL_SCHEDULING: PRESET_SCHEDULE,
|
||||
}
|
||||
|
||||
PRESET_MODES_TO_OVERKIZ = {v: k for k, v in OVERKIZ_MODE_TO_PRESET_MODES.items()}
|
||||
|
||||
TEMPERATURE_ZONECONTROL_DEVICE_INDEX = 1
|
||||
|
||||
|
||||
# Those device depends on a main probe that choose the operating mode (heating, cooling, ...)
|
||||
class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone):
|
||||
"""Representation of Atlantic Pass APC Heating And Cooling Zone Control."""
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Init method."""
|
||||
super().__init__(device_url, coordinator)
|
||||
|
||||
# There is less supported functions, because they depend on the ZoneControl.
|
||||
if not self.is_using_derogated_temperature_fallback:
|
||||
# Modes are not configurable, they will follow current HVAC Mode of Zone Control.
|
||||
self._attr_hvac_modes = []
|
||||
|
||||
# Those are available and tested presets on Shogun.
|
||||
self._attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ]
|
||||
|
||||
# Those APC Heating and Cooling probes depends on the zone control device (main probe).
|
||||
# Only the base device (#1) can be used to get/set some states.
|
||||
# Like to retrieve and set the current operating mode (heating, cooling, drying, off).
|
||||
self.zone_control_device = self.executor.linked_device(
|
||||
TEMPERATURE_ZONECONTROL_DEVICE_INDEX
|
||||
)
|
||||
|
||||
@property
|
||||
def is_using_derogated_temperature_fallback(self) -> bool:
|
||||
"""Check if the device behave like the Pass APC Heating Zone."""
|
||||
|
||||
return self.executor.has_command(
|
||||
OverkizCommand.SET_DEROGATED_TARGET_TEMPERATURE
|
||||
)
|
||||
|
||||
@property
|
||||
def zone_control_hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool, dry, off mode."""
|
||||
|
||||
if (
|
||||
state := self.zone_control_device.states[
|
||||
OverkizState.IO_PASS_APC_OPERATING_MODE
|
||||
]
|
||||
) is not None and (value := state.value_as_str) is not None:
|
||||
return OVERKIZ_TO_HVAC_MODE[value]
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool, dry, off mode."""
|
||||
|
||||
if self.is_using_derogated_temperature_fallback:
|
||||
return super().hvac_mode
|
||||
|
||||
zone_control_hvac_mode = self.zone_control_hvac_mode
|
||||
|
||||
# Should be same, because either thermostat or this integration change both.
|
||||
on_off_state = cast(
|
||||
str,
|
||||
self.executor.select_state(
|
||||
OverkizState.CORE_COOLING_ON_OFF
|
||||
if zone_control_hvac_mode == HVACMode.COOL
|
||||
else OverkizState.CORE_HEATING_ON_OFF
|
||||
),
|
||||
)
|
||||
|
||||
# Device is Stopped, it means the air flux is flowing but its venting door is closed.
|
||||
if on_off_state == OverkizCommandParam.OFF:
|
||||
hvac_mode = HVACMode.OFF
|
||||
else:
|
||||
hvac_mode = zone_control_hvac_mode
|
||||
|
||||
# It helps keep it consistent with the Zone Control, within the interface.
|
||||
if self._attr_hvac_modes != [zone_control_hvac_mode, HVACMode.OFF]:
|
||||
self._attr_hvac_modes = [zone_control_hvac_mode, HVACMode.OFF]
|
||||
self.async_write_ha_state()
|
||||
|
||||
return hvac_mode
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
|
||||
if self.is_using_derogated_temperature_fallback:
|
||||
return await super().async_set_hvac_mode(hvac_mode)
|
||||
|
||||
# They are mainly managed by the Zone Control device
|
||||
# However, it make sense to map the OFF Mode to the Overkiz STOP Preset
|
||||
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_COOLING_ON_OFF,
|
||||
OverkizCommandParam.OFF,
|
||||
)
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_HEATING_ON_OFF,
|
||||
OverkizCommandParam.OFF,
|
||||
)
|
||||
else:
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_COOLING_ON_OFF,
|
||||
OverkizCommandParam.ON,
|
||||
)
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_HEATING_ON_OFF,
|
||||
OverkizCommandParam.ON,
|
||||
)
|
||||
|
||||
await self.async_refresh_modes()
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str:
|
||||
"""Return the current preset mode, e.g., schedule, manual."""
|
||||
|
||||
if self.is_using_derogated_temperature_fallback:
|
||||
return super().preset_mode
|
||||
|
||||
mode = OVERKIZ_MODE_TO_PRESET_MODES[
|
||||
cast(
|
||||
str,
|
||||
self.executor.select_state(
|
||||
OverkizState.IO_PASS_APC_COOLING_MODE
|
||||
if self.zone_control_hvac_mode == HVACMode.COOL
|
||||
else OverkizState.IO_PASS_APC_HEATING_MODE
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
return mode if mode is not None else PRESET_NONE
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
|
||||
if self.is_using_derogated_temperature_fallback:
|
||||
return await super().async_set_preset_mode(preset_mode)
|
||||
|
||||
mode = PRESET_MODES_TO_OVERKIZ[preset_mode]
|
||||
|
||||
# For consistency, it is better both are synced like on the Thermostat.
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_PASS_APC_HEATING_MODE, mode
|
||||
)
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_PASS_APC_COOLING_MODE, mode
|
||||
)
|
||||
|
||||
await self.async_refresh_modes()
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
"""Return hvac target temperature."""
|
||||
|
||||
if self.is_using_derogated_temperature_fallback:
|
||||
return super().target_temperature
|
||||
|
||||
if self.zone_control_hvac_mode == HVACMode.COOL:
|
||||
return cast(
|
||||
float,
|
||||
self.executor.select_state(
|
||||
OverkizState.CORE_COOLING_TARGET_TEMPERATURE
|
||||
),
|
||||
)
|
||||
|
||||
if self.zone_control_hvac_mode == HVACMode.HEAT:
|
||||
return cast(
|
||||
float,
|
||||
self.executor.select_state(
|
||||
OverkizState.CORE_HEATING_TARGET_TEMPERATURE
|
||||
),
|
||||
)
|
||||
|
||||
return cast(
|
||||
float, self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE)
|
||||
)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new temperature."""
|
||||
|
||||
if self.is_using_derogated_temperature_fallback:
|
||||
return await super().async_set_temperature(**kwargs)
|
||||
|
||||
temperature = kwargs[ATTR_TEMPERATURE]
|
||||
|
||||
# Change both (heating/cooling) temperature is a good way to have consistency
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_HEATING_TARGET_TEMPERATURE,
|
||||
temperature,
|
||||
)
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_COOLING_TARGET_TEMPERATURE,
|
||||
temperature,
|
||||
)
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_DEROGATION_ON_OFF_STATE,
|
||||
OverkizCommandParam.OFF,
|
||||
)
|
||||
|
||||
# Target temperature may take up to 1 minute to get refreshed.
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.REFRESH_TARGET_TEMPERATURE
|
||||
)
|
||||
|
||||
async def async_refresh_modes(self) -> None:
|
||||
"""Refresh the device modes to have new states."""
|
||||
|
||||
# The device needs a bit of time to update everything before a refresh.
|
||||
await sleep(2)
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.REFRESH_PASS_APC_HEATING_MODE
|
||||
)
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.REFRESH_PASS_APC_HEATING_PROFILE
|
||||
)
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.REFRESH_PASS_APC_COOLING_MODE
|
||||
)
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.REFRESH_PASS_APC_COOLING_PROFILE
|
||||
)
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.REFRESH_TARGET_TEMPERATURE
|
||||
)
|
||||
@@ -0,0 +1,357 @@
|
||||
"""Support for HitachiAirToAirHeatPump."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
PRESET_NONE,
|
||||
SWING_BOTH,
|
||||
SWING_HORIZONTAL,
|
||||
SWING_OFF,
|
||||
SWING_VERTICAL,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..coordinator import OverkizDataUpdateCoordinator
|
||||
from ..entity import OverkizEntity
|
||||
|
||||
PRESET_HOLIDAY_MODE = "holiday_mode"
|
||||
FAN_SILENT = "silent"
|
||||
TEMP_MIN = 16
|
||||
TEMP_MAX = 32
|
||||
TEMP_AUTO_MIN = 22
|
||||
TEMP_AUTO_MAX = 28
|
||||
AUTO_PIVOT_TEMPERATURE = 25
|
||||
AUTO_TEMPERATURE_CHANGE_MIN = TEMP_AUTO_MIN - AUTO_PIVOT_TEMPERATURE
|
||||
AUTO_TEMPERATURE_CHANGE_MAX = TEMP_AUTO_MAX - AUTO_PIVOT_TEMPERATURE
|
||||
|
||||
OVERKIZ_TO_HVAC_MODES: dict[str, HVACMode] = {
|
||||
OverkizCommandParam.AUTOHEATING: HVACMode.AUTO,
|
||||
OverkizCommandParam.AUTOCOOLING: HVACMode.AUTO,
|
||||
OverkizCommandParam.ON: HVACMode.HEAT,
|
||||
OverkizCommandParam.OFF: HVACMode.OFF,
|
||||
OverkizCommandParam.HEATING: HVACMode.HEAT,
|
||||
OverkizCommandParam.FAN: HVACMode.FAN_ONLY,
|
||||
OverkizCommandParam.DEHUMIDIFY: HVACMode.DRY,
|
||||
OverkizCommandParam.COOLING: HVACMode.COOL,
|
||||
}
|
||||
|
||||
HVAC_MODES_TO_OVERKIZ: dict[HVACMode, str] = {
|
||||
HVACMode.AUTO: OverkizCommandParam.AUTO,
|
||||
HVACMode.HEAT: OverkizCommandParam.HEATING,
|
||||
HVACMode.OFF: OverkizCommandParam.HEATING,
|
||||
HVACMode.FAN_ONLY: OverkizCommandParam.FAN,
|
||||
HVACMode.DRY: OverkizCommandParam.DEHUMIDIFY,
|
||||
HVACMode.COOL: OverkizCommandParam.COOLING,
|
||||
}
|
||||
|
||||
OVERKIZ_TO_SWING_MODES: dict[str, str] = {
|
||||
OverkizCommandParam.BOTH: SWING_BOTH,
|
||||
OverkizCommandParam.HORIZONTAL: SWING_HORIZONTAL,
|
||||
OverkizCommandParam.STOP: SWING_OFF,
|
||||
OverkizCommandParam.VERTICAL: SWING_VERTICAL,
|
||||
}
|
||||
|
||||
SWING_MODES_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_SWING_MODES.items()}
|
||||
|
||||
OVERKIZ_TO_FAN_MODES: dict[str, str] = {
|
||||
OverkizCommandParam.AUTO: FAN_AUTO,
|
||||
OverkizCommandParam.HIGH: FAN_HIGH, # fallback, state can be exposed as HIGH, new state = hi
|
||||
OverkizCommandParam.HI: FAN_HIGH,
|
||||
OverkizCommandParam.LOW: FAN_LOW,
|
||||
OverkizCommandParam.LO: FAN_LOW,
|
||||
OverkizCommandParam.MEDIUM: FAN_MEDIUM, # fallback, state can be exposed as MEDIUM, new state = med
|
||||
OverkizCommandParam.MED: FAN_MEDIUM,
|
||||
OverkizCommandParam.SILENT: OverkizCommandParam.SILENT,
|
||||
}
|
||||
|
||||
FAN_MODES_TO_OVERKIZ: dict[str, str] = {
|
||||
FAN_AUTO: OverkizCommandParam.AUTO,
|
||||
FAN_HIGH: OverkizCommandParam.HI,
|
||||
FAN_LOW: OverkizCommandParam.LO,
|
||||
FAN_MEDIUM: OverkizCommandParam.MED,
|
||||
FAN_SILENT: OverkizCommandParam.SILENT,
|
||||
}
|
||||
|
||||
|
||||
class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity):
|
||||
"""Representation of Hitachi Air To Air HeatPump."""
|
||||
|
||||
_attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ]
|
||||
_attr_fan_modes = [*FAN_MODES_TO_OVERKIZ]
|
||||
_attr_preset_modes = [PRESET_NONE, PRESET_HOLIDAY_MODE]
|
||||
_attr_swing_modes = [*SWING_MODES_TO_OVERKIZ]
|
||||
_attr_target_temperature_step = 1.0
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = DOMAIN
|
||||
|
||||
def __init__(
|
||||
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Init method."""
|
||||
super().__init__(device_url, coordinator)
|
||||
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
|
||||
if self.device.states.get(OverkizState.OVP_SWING):
|
||||
self._attr_supported_features |= ClimateEntityFeature.SWING_MODE
|
||||
|
||||
if self._attr_device_info:
|
||||
self._attr_device_info["manufacturer"] = "Hitachi"
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
if (
|
||||
main_op_state := self.device.states[OverkizState.OVP_MAIN_OPERATION]
|
||||
) and main_op_state.value_as_str:
|
||||
if main_op_state.value_as_str.lower() == OverkizCommandParam.OFF:
|
||||
return HVACMode.OFF
|
||||
|
||||
if (
|
||||
mode_change_state := self.device.states[OverkizState.OVP_MODE_CHANGE]
|
||||
) and mode_change_state.value_as_str:
|
||||
# The OVP protocol has 'auto cooling' and 'auto heating' values
|
||||
# that are equivalent to the HLRRWIFI protocol without spaces
|
||||
sanitized_value = mode_change_state.value_as_str.replace(" ", "").lower()
|
||||
return OVERKIZ_TO_HVAC_MODES[sanitized_value]
|
||||
|
||||
return HVACMode.OFF
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
await self._global_control(main_operation=OverkizCommandParam.OFF)
|
||||
else:
|
||||
await self._global_control(
|
||||
main_operation=OverkizCommandParam.ON,
|
||||
hvac_mode=HVAC_MODES_TO_OVERKIZ[hvac_mode],
|
||||
)
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the fan setting."""
|
||||
if (
|
||||
state := self.device.states[OverkizState.OVP_FAN_SPEED]
|
||||
) and state.value_as_str:
|
||||
return OVERKIZ_TO_FAN_MODES[state.value_as_str]
|
||||
|
||||
return None
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
await self._global_control(fan_mode=FAN_MODES_TO_OVERKIZ[fan_mode])
|
||||
|
||||
@property
|
||||
def swing_mode(self) -> str | None:
|
||||
"""Return the swing setting."""
|
||||
if (state := self.device.states[OverkizState.OVP_SWING]) and state.value_as_str:
|
||||
return OVERKIZ_TO_SWING_MODES[state.value_as_str]
|
||||
|
||||
return None
|
||||
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
"""Set new target swing operation."""
|
||||
await self._global_control(swing_mode=SWING_MODES_TO_OVERKIZ[swing_mode])
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> int | None:
|
||||
"""Return the target temperature."""
|
||||
if (
|
||||
temperature := self.device.states[OverkizState.CORE_TARGET_TEMPERATURE]
|
||||
) and temperature.value_as_int:
|
||||
return temperature.value_as_int
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> int | None:
|
||||
"""Return current temperature."""
|
||||
if (
|
||||
state := self.device.states[OverkizState.OVP_ROOM_TEMPERATURE]
|
||||
) and state.value_as_int:
|
||||
return state.value_as_int
|
||||
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new temperature."""
|
||||
await self._global_control(target_temperature=int(kwargs[ATTR_TEMPERATURE]))
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode, e.g., home, away, temp."""
|
||||
if (
|
||||
state := self.device.states[OverkizState.CORE_HOLIDAYS_MODE]
|
||||
) and state.value_as_str:
|
||||
if state.value_as_str == OverkizCommandParam.ON:
|
||||
return PRESET_HOLIDAY_MODE
|
||||
|
||||
if state.value_as_str == OverkizCommandParam.OFF:
|
||||
return PRESET_NONE
|
||||
|
||||
return None
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
if preset_mode == PRESET_HOLIDAY_MODE:
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_HOLIDAYS,
|
||||
OverkizCommandParam.ON,
|
||||
)
|
||||
if preset_mode == PRESET_NONE:
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.SET_HOLIDAYS,
|
||||
OverkizCommandParam.OFF,
|
||||
)
|
||||
|
||||
# OVP has this property to control the unit's timer mode
|
||||
@property
|
||||
def auto_manu_mode(self) -> str | None:
|
||||
"""Return auto/manu mode."""
|
||||
if (
|
||||
state := self.device.states[OverkizState.CORE_AUTO_MANU_MODE]
|
||||
) and state.value_as_str:
|
||||
return state.value_as_str
|
||||
return None
|
||||
|
||||
# OVP has this property to control the target temperature delta in auto mode
|
||||
@property
|
||||
def temperature_change(self) -> int | None:
|
||||
"""Return temperature change state."""
|
||||
if (
|
||||
state := self.device.states[OverkizState.OVP_TEMPERATURE_CHANGE]
|
||||
) and state.value_as_int:
|
||||
return state.value_as_int
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature."""
|
||||
if self.hvac_mode == HVACMode.AUTO:
|
||||
return TEMP_AUTO_MIN
|
||||
return TEMP_MIN
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature."""
|
||||
if self.hvac_mode == HVACMode.AUTO:
|
||||
return TEMP_AUTO_MAX
|
||||
return TEMP_MAX
|
||||
|
||||
def _control_backfill(
|
||||
self, value: str | None, state_name: str, fallback_value: str
|
||||
) -> str:
|
||||
"""Return a parameter value which will be accepted in a command by Overkiz.
|
||||
|
||||
Overkiz doesn't accept commands with undefined parameters. This function
|
||||
is guaranteed to return a `str` which is the provided `value` if set, or
|
||||
the current device state if set, or the provided `fallback_value` otherwise.
|
||||
"""
|
||||
if value:
|
||||
return value
|
||||
if (state := self.device.states[state_name]) is not None and (
|
||||
value := state.value_as_str
|
||||
) is not None:
|
||||
return value
|
||||
return fallback_value
|
||||
|
||||
async def _global_control(
|
||||
self,
|
||||
main_operation: str | None = None,
|
||||
target_temperature: int | None = None,
|
||||
fan_mode: str | None = None,
|
||||
hvac_mode: str | None = None,
|
||||
swing_mode: str | None = None,
|
||||
leave_home: str | None = None,
|
||||
) -> None:
|
||||
"""Execute globalControl command with all parameters.
|
||||
|
||||
There is no option to only set a single parameter, without passing
|
||||
all other values.
|
||||
"""
|
||||
|
||||
main_operation = self._control_backfill(
|
||||
main_operation, OverkizState.OVP_MAIN_OPERATION, OverkizCommandParam.ON
|
||||
)
|
||||
fan_mode = self._control_backfill(
|
||||
fan_mode,
|
||||
OverkizState.OVP_FAN_SPEED,
|
||||
OverkizCommandParam.AUTO,
|
||||
)
|
||||
hvac_mode = self._control_backfill(
|
||||
hvac_mode,
|
||||
OverkizState.OVP_MODE_CHANGE,
|
||||
OverkizCommandParam.AUTO,
|
||||
).lower() # Overkiz returns uppercase states that are not acceptable commands
|
||||
if hvac_mode.replace(" ", "") in [
|
||||
# Overkiz returns compound states like 'auto cooling' or 'autoHeating'
|
||||
# that are not valid commands and need to be mapped to 'auto'
|
||||
OverkizCommandParam.AUTOCOOLING,
|
||||
OverkizCommandParam.AUTOHEATING,
|
||||
]:
|
||||
hvac_mode = OverkizCommandParam.AUTO
|
||||
|
||||
swing_mode = self._control_backfill(
|
||||
swing_mode,
|
||||
OverkizState.OVP_SWING,
|
||||
OverkizCommandParam.STOP,
|
||||
)
|
||||
|
||||
# AUTO_MANU parameter is not controlled by HA and is turned "off" when the device is on Holiday mode
|
||||
auto_manu_mode = self._control_backfill(
|
||||
None, OverkizState.CORE_AUTO_MANU_MODE, OverkizCommandParam.MANU
|
||||
)
|
||||
if self.preset_mode == PRESET_HOLIDAY_MODE:
|
||||
auto_manu_mode = OverkizCommandParam.OFF
|
||||
|
||||
# In all the hvac modes except AUTO, the temperature command parameter is the target temperature
|
||||
temperature_command = None
|
||||
target_temperature = target_temperature or self.target_temperature
|
||||
if hvac_mode == OverkizCommandParam.AUTO:
|
||||
# In hvac mode AUTO, the temperature command parameter is a temperature_change
|
||||
# which is the delta between a pivot temperature (25) and the target temperature
|
||||
temperature_change = 0
|
||||
|
||||
if target_temperature:
|
||||
temperature_change = target_temperature - AUTO_PIVOT_TEMPERATURE
|
||||
elif self.temperature_change:
|
||||
temperature_change = self.temperature_change
|
||||
|
||||
# Keep temperature_change in the API accepted range
|
||||
temperature_change = min(
|
||||
max(temperature_change, AUTO_TEMPERATURE_CHANGE_MIN),
|
||||
AUTO_TEMPERATURE_CHANGE_MAX,
|
||||
)
|
||||
|
||||
temperature_command = temperature_change
|
||||
else:
|
||||
# In other modes, the temperature command is the target temperature
|
||||
temperature_command = target_temperature
|
||||
|
||||
command_data = [
|
||||
main_operation, # Main Operation
|
||||
temperature_command, # Temperature Command
|
||||
fan_mode, # Fan Mode
|
||||
hvac_mode, # Mode
|
||||
auto_manu_mode, # Auto Manu Mode
|
||||
]
|
||||
|
||||
await self.executor.async_execute_command(
|
||||
OverkizCommand.GLOBAL_CONTROL, command_data
|
||||
)
|
||||
@@ -19,7 +19,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||
"requirements": ["pyoverkiz==1.13.7"],
|
||||
"requirements": ["pyoverkiz==1.13.8"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_kizbox._tcp.local.",
|
||||
|
||||
@@ -20,6 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import HomeAssistantOverkizData
|
||||
from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES
|
||||
from .coordinator import OverkizDataUpdateCoordinator
|
||||
from .entity import OverkizDescriptiveEntity
|
||||
|
||||
BOOST_MODE_DURATION_DELAY = 1
|
||||
@@ -37,6 +38,8 @@ class OverkizNumberDescriptionMixin:
|
||||
class OverkizNumberDescription(NumberEntityDescription, OverkizNumberDescriptionMixin):
|
||||
"""Class to describe an Overkiz number."""
|
||||
|
||||
min_value_state_name: str | None = None
|
||||
max_value_state_name: str | None = None
|
||||
inverted: bool = False
|
||||
set_native_value: Callable[
|
||||
[float, Callable[..., Awaitable[None]]], Awaitable[None]
|
||||
@@ -94,6 +97,8 @@ NUMBER_DESCRIPTIONS: list[OverkizNumberDescription] = [
|
||||
command=OverkizCommand.SET_EXPECTED_NUMBER_OF_SHOWER,
|
||||
native_min_value=2,
|
||||
native_max_value=4,
|
||||
min_value_state_name=OverkizState.CORE_MINIMAL_SHOWER_MANUAL_MODE,
|
||||
max_value_state_name=OverkizState.CORE_MAXIMAL_SHOWER_MANUAL_MODE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
# SomfyHeatingTemperatureInterface
|
||||
@@ -200,6 +205,29 @@ class OverkizNumber(OverkizDescriptiveEntity, NumberEntity):
|
||||
|
||||
entity_description: OverkizNumberDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device_url: str,
|
||||
coordinator: OverkizDataUpdateCoordinator,
|
||||
description: OverkizNumberDescription,
|
||||
) -> None:
|
||||
"""Initialize a device."""
|
||||
super().__init__(device_url, coordinator, description)
|
||||
|
||||
if self.entity_description.min_value_state_name and (
|
||||
state := self.device.states.get(
|
||||
self.entity_description.min_value_state_name
|
||||
)
|
||||
):
|
||||
self._attr_native_min_value = cast(float, state.value)
|
||||
|
||||
if self.entity_description.max_value_state_name and (
|
||||
state := self.device.states.get(
|
||||
self.entity_description.max_value_state_name
|
||||
)
|
||||
):
|
||||
self._attr_native_max_value = cast(float, state.value)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the entity value to represent the entity state."""
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOM
|
||||
from homeassistant.components.person import DOMAIN as PERSON_DOMAIN
|
||||
from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow
|
||||
from homeassistant.const import CONF_ZONE
|
||||
from homeassistant.const import CONF_ZONE, UnitOfLength
|
||||
from homeassistant.core import State, callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.selector import (
|
||||
@@ -50,7 +50,9 @@ def _base_schema(user_input: dict[str, Any]) -> vol.Schema:
|
||||
CONF_TOLERANCE,
|
||||
default=user_input.get(CONF_TOLERANCE, DEFAULT_TOLERANCE),
|
||||
): NumberSelector(
|
||||
NumberSelectorConfig(min=1, max=100, step=1),
|
||||
NumberSelectorConfig(
|
||||
min=1, max=100, step=1, unit_of_measurement=UnitOfLength.METERS
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"title": "Random sensor"
|
||||
},
|
||||
"user": {
|
||||
"description": "This helper allow you to create a helper that emits a random value.",
|
||||
"description": "This helper allows you to create a helper that emits a random value.",
|
||||
"menu_options": {
|
||||
"binary_sensor": "Random binary sensor",
|
||||
"sensor": "Random sensor"
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DISCOVER_SCAN_TIMEOUT, DISCOVERY, DOMAIN, STARTUP_SCAN_TIMEOUT
|
||||
from .const import DISCOVER_SCAN_TIMEOUT, DISCOVERY, DOMAIN
|
||||
from .coordinator import SteamistDataUpdateCoordinator
|
||||
from .discovery import (
|
||||
async_discover_device,
|
||||
@@ -32,14 +32,16 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the steamist component."""
|
||||
domain_data = hass.data.setdefault(DOMAIN, {})
|
||||
domain_data[DISCOVERY] = await async_discover_devices(hass, STARTUP_SCAN_TIMEOUT)
|
||||
domain_data[DISCOVERY] = []
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
async_trigger_discovery(
|
||||
hass, await async_discover_devices(hass, DISCOVER_SCAN_TIMEOUT)
|
||||
)
|
||||
|
||||
async_trigger_discovery(hass, domain_data[DISCOVERY])
|
||||
hass.async_create_background_task(
|
||||
_async_discovery(), "steamist-discovery", eager_start=True
|
||||
)
|
||||
async_track_time_interval(hass, _async_discovery, DISCOVERY_INTERVAL)
|
||||
return True
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"title": "Template sensor"
|
||||
},
|
||||
"user": {
|
||||
"description": "This helper allow you to create helper entities that define their state using a template.",
|
||||
"description": "This helper allows you to create helper entities that define their state using a template.",
|
||||
"menu_options": {
|
||||
"binary_sensor": "Template a binary sensor",
|
||||
"sensor": "Template a sensor"
|
||||
|
||||
@@ -60,6 +60,35 @@ MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
RT_SENSORS_UNIQUE_ID_MIGRATION = {
|
||||
"accumulated_consumption_last_hour": "accumulated consumption current hour",
|
||||
"accumulated_production_last_hour": "accumulated production current hour",
|
||||
"current_l1": "current L1",
|
||||
"current_l2": "current L2",
|
||||
"current_l3": "current L3",
|
||||
"estimated_hour_consumption": "Estimated consumption current hour",
|
||||
}
|
||||
|
||||
RT_SENSORS_UNIQUE_ID_MIGRATION_SIMPLE = {
|
||||
# simple migration can be done by replacing " " with "_"
|
||||
"accumulated_consumption",
|
||||
"accumulated_cost",
|
||||
"accumulated_production",
|
||||
"accumulated_reward",
|
||||
"average_power",
|
||||
"last_meter_consumption",
|
||||
"last_meter_production",
|
||||
"max_power",
|
||||
"min_power",
|
||||
"power_factor",
|
||||
"power_production",
|
||||
"signal_strength",
|
||||
"voltage_phase1",
|
||||
"voltage_phase2",
|
||||
"voltage_phase3",
|
||||
}
|
||||
|
||||
|
||||
RT_SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="averagePower",
|
||||
@@ -454,7 +483,7 @@ class TibberSensorRT(TibberSensor, CoordinatorEntity["TibberRtDataCoordinator"])
|
||||
self._device_name = f"{self._model} {self._home_name}"
|
||||
|
||||
self._attr_native_value = initial_state
|
||||
self._attr_unique_id = f"{self._tibber_home.home_id}_rt_{description.name}"
|
||||
self._attr_unique_id = f"{self._tibber_home.home_id}_rt_{description.key}"
|
||||
|
||||
if description.key in ("accumulatedCost", "accumulatedReward"):
|
||||
self._attr_native_unit_of_measurement = tibber_home.currency
|
||||
@@ -523,6 +552,7 @@ class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-en
|
||||
self._async_remove_device_updates_handler = self.async_add_listener(
|
||||
self._add_sensors
|
||||
)
|
||||
self.entity_registry = async_get_entity_reg(hass)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop)
|
||||
|
||||
@callback
|
||||
@@ -530,6 +560,49 @@ class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-en
|
||||
"""Handle Home Assistant stopping."""
|
||||
self._async_remove_device_updates_handler()
|
||||
|
||||
@callback
|
||||
def _migrate_unique_id(self, sensor_description: SensorEntityDescription) -> None:
|
||||
"""Migrate unique id if needed."""
|
||||
home_id = self._tibber_home.home_id
|
||||
translation_key = sensor_description.translation_key
|
||||
description_key = sensor_description.key
|
||||
entity_id: str | None = None
|
||||
if translation_key in RT_SENSORS_UNIQUE_ID_MIGRATION_SIMPLE:
|
||||
entity_id = self.entity_registry.async_get_entity_id(
|
||||
"sensor",
|
||||
TIBBER_DOMAIN,
|
||||
f"{home_id}_rt_{translation_key.replace('_', ' ')}",
|
||||
)
|
||||
elif translation_key in RT_SENSORS_UNIQUE_ID_MIGRATION:
|
||||
entity_id = self.entity_registry.async_get_entity_id(
|
||||
"sensor",
|
||||
TIBBER_DOMAIN,
|
||||
f"{home_id}_rt_{RT_SENSORS_UNIQUE_ID_MIGRATION[translation_key]}",
|
||||
)
|
||||
elif translation_key != description_key:
|
||||
entity_id = self.entity_registry.async_get_entity_id(
|
||||
"sensor",
|
||||
TIBBER_DOMAIN,
|
||||
f"{home_id}_rt_{translation_key}",
|
||||
)
|
||||
|
||||
if entity_id is None:
|
||||
return
|
||||
|
||||
new_unique_id = f"{home_id}_rt_{description_key}"
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating unique id for %s to %s",
|
||||
entity_id,
|
||||
new_unique_id,
|
||||
)
|
||||
try:
|
||||
self.entity_registry.async_update_entity(
|
||||
entity_id, new_unique_id=new_unique_id
|
||||
)
|
||||
except ValueError as err:
|
||||
_LOGGER.error(err)
|
||||
|
||||
@callback
|
||||
def _add_sensors(self) -> None:
|
||||
"""Add sensor."""
|
||||
@@ -543,6 +616,8 @@ class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-en
|
||||
state = live_measurement.get(sensor_description.key)
|
||||
if state is None:
|
||||
continue
|
||||
|
||||
self._migrate_unique_id(sensor_description)
|
||||
entity = TibberSensorRT(
|
||||
self._tibber_home,
|
||||
sensor_description,
|
||||
|
||||
@@ -28,7 +28,6 @@ from homeassistant.const import (
|
||||
CONF_MODEL,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
@@ -112,14 +111,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the TP-Link component."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
if discovered_devices := await async_discover_devices(hass):
|
||||
async_trigger_discovery(hass, discovered_devices)
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
if discovered := await async_discover_devices(hass):
|
||||
async_trigger_discovery(hass, discovered)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_discovery)
|
||||
hass.async_create_background_task(
|
||||
_async_discovery(), "tplink first discovery", eager_start=True
|
||||
)
|
||||
async_track_time_interval(
|
||||
hass, _async_discovery, DISCOVERY_INTERVAL, cancel_on_shutdown=True
|
||||
)
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
"""Intents for the valve integration."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import SERVICE_SET_VALVE_POSITION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import intent
|
||||
|
||||
from . import ATTR_POSITION, DOMAIN
|
||||
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the valve intents."""
|
||||
intent.async_register(
|
||||
hass,
|
||||
intent.ServiceIntentHandler(
|
||||
intent.INTENT_SET_POSITION,
|
||||
DOMAIN,
|
||||
SERVICE_SET_VALVE_POSITION,
|
||||
extra_slots={ATTR_POSITION: vol.All(vol.Range(min=0, max=100))},
|
||||
),
|
||||
)
|
||||
@@ -22,8 +22,8 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
),
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["weatherflow4py==0.1.11"]
|
||||
"requirements": ["weatherflow4py==0.1.12"]
|
||||
}
|
||||
|
||||
@@ -9,9 +9,9 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.models import RefreshToken, User
|
||||
from homeassistant.components.http import current_request
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, Unauthorized
|
||||
from homeassistant.helpers.http import current_request
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
from . import const, messages
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"""Base class for Wyoming providers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from wyoming.client import AsyncTcpClient
|
||||
from wyoming.info import Describe, Info, Satellite
|
||||
from wyoming.info import Describe, Info
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
@@ -23,14 +24,19 @@ class WyomingService:
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.info = info
|
||||
platforms = []
|
||||
self.platforms = []
|
||||
|
||||
if (self.info.satellite is not None) and self.info.satellite.installed:
|
||||
# Don't load platforms for satellite services, such as local wake
|
||||
# word detection.
|
||||
return
|
||||
|
||||
if any(asr.installed for asr in info.asr):
|
||||
platforms.append(Platform.STT)
|
||||
self.platforms.append(Platform.STT)
|
||||
if any(tts.installed for tts in info.tts):
|
||||
platforms.append(Platform.TTS)
|
||||
self.platforms.append(Platform.TTS)
|
||||
if any(wake.installed for wake in info.wake):
|
||||
platforms.append(Platform.WAKE_WORD)
|
||||
self.platforms = platforms
|
||||
self.platforms.append(Platform.WAKE_WORD)
|
||||
|
||||
def has_services(self) -> bool:
|
||||
"""Return True if services are installed that Home Assistant can use."""
|
||||
@@ -43,6 +49,12 @@ class WyomingService:
|
||||
|
||||
def get_name(self) -> str | None:
|
||||
"""Return name of first installed usable service."""
|
||||
|
||||
# Wyoming satellite
|
||||
# Must be checked first because satellites may contain wake services, etc.
|
||||
if (self.info.satellite is not None) and self.info.satellite.installed:
|
||||
return self.info.satellite.name
|
||||
|
||||
# ASR = automated speech recognition (speech-to-text)
|
||||
asr_installed = [asr for asr in self.info.asr if asr.installed]
|
||||
if asr_installed:
|
||||
@@ -58,15 +70,6 @@ class WyomingService:
|
||||
if wake_installed:
|
||||
return wake_installed[0].name
|
||||
|
||||
# satellite
|
||||
satellite_installed: Satellite | None = None
|
||||
|
||||
if (self.info.satellite is not None) and self.info.satellite.installed:
|
||||
satellite_installed = self.info.satellite
|
||||
|
||||
if satellite_installed:
|
||||
return satellite_installed.name
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
import functools
|
||||
from typing import Any
|
||||
|
||||
from zigpy.quirks.v2 import BinarySensorMetadata, EntityMetadata
|
||||
import zigpy.types as t
|
||||
from zigpy.zcl.clusters.general import OnOff
|
||||
from zigpy.zcl.clusters.security import IasZone
|
||||
@@ -26,6 +27,7 @@ from .core.const import (
|
||||
CLUSTER_HANDLER_OCCUPANCY,
|
||||
CLUSTER_HANDLER_ON_OFF,
|
||||
CLUSTER_HANDLER_ZONE,
|
||||
QUIRK_METADATA,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
SIGNAL_ATTR_UPDATED,
|
||||
)
|
||||
@@ -76,8 +78,16 @@ class BinarySensor(ZhaEntity, BinarySensorEntity):
|
||||
|
||||
def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs) -> None:
|
||||
"""Initialize the ZHA binary sensor."""
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
self._cluster_handler = cluster_handlers[0]
|
||||
if QUIRK_METADATA in kwargs:
|
||||
self._init_from_quirks_metadata(kwargs[QUIRK_METADATA])
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
super()._init_from_quirks_metadata(entity_metadata)
|
||||
binary_sensor_metadata: BinarySensorMetadata = entity_metadata.entity_metadata
|
||||
self._attribute_name = binary_sensor_metadata.attribute_name
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when about to be added to hass."""
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
"""Support for ZHA button."""
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Self
|
||||
|
||||
from zigpy.quirks.v2 import (
|
||||
EntityMetadata,
|
||||
WriteAttributeButtonMetadata,
|
||||
ZCLCommandButtonMetadata,
|
||||
)
|
||||
|
||||
from homeassistant.components.button import ButtonDeviceClass, ButtonEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory, Platform
|
||||
@@ -14,7 +19,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .core import discovery
|
||||
from .core.const import CLUSTER_HANDLER_IDENTIFY, SIGNAL_ADD_ENTITIES
|
||||
from .core.const import CLUSTER_HANDLER_IDENTIFY, QUIRK_METADATA, SIGNAL_ADD_ENTITIES
|
||||
from .core.helpers import get_zha_data
|
||||
from .core.registries import ZHA_ENTITIES
|
||||
from .entity import ZhaEntity
|
||||
@@ -58,6 +63,8 @@ class ZHAButton(ZhaEntity, ButtonEntity):
|
||||
"""Defines a ZHA button."""
|
||||
|
||||
_command_name: str
|
||||
_args: list[Any]
|
||||
_kwargs: dict[str, Any]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -67,18 +74,33 @@ class ZHAButton(ZhaEntity, ButtonEntity):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Init this button."""
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
if QUIRK_METADATA in kwargs:
|
||||
self._init_from_quirks_metadata(kwargs[QUIRK_METADATA])
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
super()._init_from_quirks_metadata(entity_metadata)
|
||||
button_metadata: ZCLCommandButtonMetadata = entity_metadata.entity_metadata
|
||||
self._command_name = button_metadata.command_name
|
||||
self._args = button_metadata.args
|
||||
self._kwargs = button_metadata.kwargs
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_args(self) -> list[Any]:
|
||||
"""Return the arguments to use in the command."""
|
||||
return list(self._args) if self._args else []
|
||||
|
||||
def get_kwargs(self) -> dict[str, Any]:
|
||||
"""Return the keyword arguments to use in the command."""
|
||||
return self._kwargs
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Send out a update command."""
|
||||
command = getattr(self._cluster_handler, self._command_name)
|
||||
arguments = self.get_args()
|
||||
await command(*arguments)
|
||||
arguments = self.get_args() or []
|
||||
kwargs = self.get_kwargs() or {}
|
||||
await command(*arguments, **kwargs)
|
||||
|
||||
|
||||
@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_IDENTIFY)
|
||||
@@ -106,11 +128,8 @@ class ZHAIdentifyButton(ZHAButton):
|
||||
_attr_device_class = ButtonDeviceClass.IDENTIFY
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
_command_name = "identify"
|
||||
|
||||
def get_args(self) -> list[Any]:
|
||||
"""Return the arguments to use in the command."""
|
||||
|
||||
return [DEFAULT_DURATION]
|
||||
_kwargs = {}
|
||||
_args = [DEFAULT_DURATION]
|
||||
|
||||
|
||||
class ZHAAttributeButton(ZhaEntity, ButtonEntity):
|
||||
@@ -127,8 +146,17 @@ class ZHAAttributeButton(ZhaEntity, ButtonEntity):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Init this button."""
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
if QUIRK_METADATA in kwargs:
|
||||
self._init_from_quirks_metadata(kwargs[QUIRK_METADATA])
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
super()._init_from_quirks_metadata(entity_metadata)
|
||||
button_metadata: WriteAttributeButtonMetadata = entity_metadata.entity_metadata
|
||||
self._attribute_name = button_metadata.attribute_name
|
||||
self._attribute_value = button_metadata.attribute_value
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Write attribute with defined value."""
|
||||
|
||||
@@ -64,6 +64,8 @@ ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity"
|
||||
BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000]
|
||||
BINDINGS = "bindings"
|
||||
|
||||
CLUSTER_DETAILS = "cluster_details"
|
||||
|
||||
CLUSTER_HANDLER_ACCELEROMETER = "accelerometer"
|
||||
CLUSTER_HANDLER_BINARY_INPUT = "binary_input"
|
||||
CLUSTER_HANDLER_ANALOG_INPUT = "analog_input"
|
||||
@@ -230,6 +232,10 @@ PRESET_SCHEDULE = "Schedule"
|
||||
PRESET_COMPLEX = "Complex"
|
||||
PRESET_TEMP_MANUAL = "Temporary manual"
|
||||
|
||||
QUIRK_METADATA = "quirk_metadata"
|
||||
|
||||
ZCL_INIT_ATTRS = "ZCL_INIT_ATTRS"
|
||||
|
||||
ZHA_ALARM_OPTIONS = "zha_alarm_options"
|
||||
ZHA_OPTIONS = "zha_options"
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from zigpy.device import Device as ZigpyDevice
|
||||
import zigpy.exceptions
|
||||
from zigpy.profiles import PROFILES
|
||||
import zigpy.quirks
|
||||
from zigpy.quirks.v2 import CustomDeviceV2
|
||||
from zigpy.types.named import EUI64, NWK
|
||||
from zigpy.zcl.clusters import Cluster
|
||||
from zigpy.zcl.clusters.general import Groups, Identify
|
||||
@@ -582,6 +583,9 @@ class ZHADevice(LogMixin):
|
||||
await asyncio.gather(
|
||||
*(endpoint.async_configure() for endpoint in self._endpoints.values())
|
||||
)
|
||||
if isinstance(self._zigpy_device, CustomDeviceV2):
|
||||
self.debug("applying quirks v2 custom device configuration")
|
||||
await self._zigpy_device.apply_custom_configuration()
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
const.ZHA_CLUSTER_HANDLER_MSG,
|
||||
|
||||
@@ -4,10 +4,22 @@ from __future__ import annotations
|
||||
from collections import Counter
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from slugify import slugify
|
||||
from zigpy.quirks.v2 import (
|
||||
BinarySensorMetadata,
|
||||
CustomDeviceV2,
|
||||
EntityType,
|
||||
NumberMetadata,
|
||||
SwitchMetadata,
|
||||
WriteAttributeButtonMetadata,
|
||||
ZCLCommandButtonMetadata,
|
||||
ZCLEnumMetadata,
|
||||
ZCLSensorMetadata,
|
||||
)
|
||||
from zigpy.state import State
|
||||
from zigpy.zcl import ClusterType
|
||||
from zigpy.zcl.clusters.general import Ota
|
||||
|
||||
from homeassistant.const import CONF_TYPE, Platform
|
||||
@@ -66,6 +78,59 @@ if TYPE_CHECKING:
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
QUIRKS_ENTITY_META_TO_ENTITY_CLASS = {
|
||||
(
|
||||
Platform.BUTTON,
|
||||
WriteAttributeButtonMetadata,
|
||||
EntityType.CONFIG,
|
||||
): button.ZHAAttributeButton,
|
||||
(Platform.BUTTON, ZCLCommandButtonMetadata, EntityType.CONFIG): button.ZHAButton,
|
||||
(
|
||||
Platform.BUTTON,
|
||||
ZCLCommandButtonMetadata,
|
||||
EntityType.DIAGNOSTIC,
|
||||
): button.ZHAButton,
|
||||
(
|
||||
Platform.BINARY_SENSOR,
|
||||
BinarySensorMetadata,
|
||||
EntityType.CONFIG,
|
||||
): binary_sensor.BinarySensor,
|
||||
(
|
||||
Platform.BINARY_SENSOR,
|
||||
BinarySensorMetadata,
|
||||
EntityType.DIAGNOSTIC,
|
||||
): binary_sensor.BinarySensor,
|
||||
(
|
||||
Platform.BINARY_SENSOR,
|
||||
BinarySensorMetadata,
|
||||
EntityType.STANDARD,
|
||||
): binary_sensor.BinarySensor,
|
||||
(Platform.SENSOR, ZCLEnumMetadata, EntityType.DIAGNOSTIC): sensor.EnumSensor,
|
||||
(Platform.SENSOR, ZCLEnumMetadata, EntityType.STANDARD): sensor.EnumSensor,
|
||||
(Platform.SENSOR, ZCLSensorMetadata, EntityType.DIAGNOSTIC): sensor.Sensor,
|
||||
(Platform.SENSOR, ZCLSensorMetadata, EntityType.STANDARD): sensor.Sensor,
|
||||
(Platform.SELECT, ZCLEnumMetadata, EntityType.CONFIG): select.ZCLEnumSelectEntity,
|
||||
(
|
||||
Platform.SELECT,
|
||||
ZCLEnumMetadata,
|
||||
EntityType.DIAGNOSTIC,
|
||||
): select.ZCLEnumSelectEntity,
|
||||
(
|
||||
Platform.NUMBER,
|
||||
NumberMetadata,
|
||||
EntityType.CONFIG,
|
||||
): number.ZHANumberConfigurationEntity,
|
||||
(Platform.NUMBER, NumberMetadata, EntityType.DIAGNOSTIC): number.ZhaNumber,
|
||||
(Platform.NUMBER, NumberMetadata, EntityType.STANDARD): number.ZhaNumber,
|
||||
(
|
||||
Platform.SWITCH,
|
||||
SwitchMetadata,
|
||||
EntityType.CONFIG,
|
||||
): switch.ZHASwitchConfigurationEntity,
|
||||
(Platform.SWITCH, SwitchMetadata, EntityType.STANDARD): switch.Switch,
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
async def async_add_entities(
|
||||
_async_add_entities: AddEntitiesCallback,
|
||||
@@ -73,6 +138,7 @@ async def async_add_entities(
|
||||
tuple[
|
||||
type[ZhaEntity],
|
||||
tuple[str, ZHADevice, list[ClusterHandler]],
|
||||
dict[str, Any],
|
||||
]
|
||||
],
|
||||
**kwargs,
|
||||
@@ -80,7 +146,11 @@ async def async_add_entities(
|
||||
"""Add entities helper."""
|
||||
if not entities:
|
||||
return
|
||||
to_add = [ent_cls.create_entity(*args, **kwargs) for ent_cls, args in entities]
|
||||
|
||||
to_add = [
|
||||
ent_cls.create_entity(*args, **{**kwargs, **kw_args})
|
||||
for ent_cls, args, kw_args in entities
|
||||
]
|
||||
entities_to_add = [entity for entity in to_add if entity is not None]
|
||||
_async_add_entities(entities_to_add, update_before_add=False)
|
||||
entities.clear()
|
||||
@@ -118,6 +188,129 @@ class ProbeEndpoint:
|
||||
|
||||
if device.is_coordinator:
|
||||
self.discover_coordinator_device_entities(device)
|
||||
return
|
||||
|
||||
self.discover_quirks_v2_entities(device)
|
||||
zha_regs.ZHA_ENTITIES.clean_up()
|
||||
|
||||
@callback
|
||||
def discover_quirks_v2_entities(self, device: ZHADevice) -> None:
|
||||
"""Discover entities for a ZHA device exposed by quirks v2."""
|
||||
_LOGGER.debug(
|
||||
"Attempting to discover quirks v2 entities for device: %s-%s",
|
||||
str(device.ieee),
|
||||
device.name,
|
||||
)
|
||||
|
||||
if not isinstance(device.device, CustomDeviceV2):
|
||||
_LOGGER.debug(
|
||||
"Device: %s-%s is not a quirks v2 device - skipping "
|
||||
"discover_quirks_v2_entities",
|
||||
str(device.ieee),
|
||||
device.name,
|
||||
)
|
||||
return
|
||||
|
||||
zigpy_device: CustomDeviceV2 = device.device
|
||||
|
||||
if not zigpy_device.exposes_metadata:
|
||||
_LOGGER.debug(
|
||||
"Device: %s-%s does not expose any quirks v2 entities",
|
||||
str(device.ieee),
|
||||
device.name,
|
||||
)
|
||||
return
|
||||
|
||||
for (
|
||||
cluster_details,
|
||||
quirk_metadata_list,
|
||||
) in zigpy_device.exposes_metadata.items():
|
||||
endpoint_id, cluster_id, cluster_type = cluster_details
|
||||
|
||||
if endpoint_id not in device.endpoints:
|
||||
_LOGGER.warning(
|
||||
"Device: %s-%s does not have an endpoint with id: %s - unable to "
|
||||
"create entity with cluster details: %s",
|
||||
str(device.ieee),
|
||||
device.name,
|
||||
endpoint_id,
|
||||
cluster_details,
|
||||
)
|
||||
continue
|
||||
|
||||
endpoint: Endpoint = device.endpoints[endpoint_id]
|
||||
cluster = (
|
||||
endpoint.zigpy_endpoint.in_clusters.get(cluster_id)
|
||||
if cluster_type is ClusterType.Server
|
||||
else endpoint.zigpy_endpoint.out_clusters.get(cluster_id)
|
||||
)
|
||||
|
||||
if cluster is None:
|
||||
_LOGGER.warning(
|
||||
"Device: %s-%s does not have a cluster with id: %s - "
|
||||
"unable to create entity with cluster details: %s",
|
||||
str(device.ieee),
|
||||
device.name,
|
||||
cluster_id,
|
||||
cluster_details,
|
||||
)
|
||||
continue
|
||||
|
||||
cluster_handler_id = f"{endpoint.id}:0x{cluster.cluster_id:04x}"
|
||||
cluster_handler = (
|
||||
endpoint.all_cluster_handlers.get(cluster_handler_id)
|
||||
if cluster_type is ClusterType.Server
|
||||
else endpoint.client_cluster_handlers.get(cluster_handler_id)
|
||||
)
|
||||
assert cluster_handler
|
||||
|
||||
for quirk_metadata in quirk_metadata_list:
|
||||
platform = Platform(quirk_metadata.entity_platform.value)
|
||||
metadata_type = type(quirk_metadata.entity_metadata)
|
||||
entity_class = QUIRKS_ENTITY_META_TO_ENTITY_CLASS.get(
|
||||
(platform, metadata_type, quirk_metadata.entity_type)
|
||||
)
|
||||
|
||||
if entity_class is None:
|
||||
_LOGGER.warning(
|
||||
"Device: %s-%s has an entity with details: %s that does not"
|
||||
" have an entity class mapping - unable to create entity",
|
||||
str(device.ieee),
|
||||
device.name,
|
||||
{
|
||||
zha_const.CLUSTER_DETAILS: cluster_details,
|
||||
zha_const.QUIRK_METADATA: quirk_metadata,
|
||||
},
|
||||
)
|
||||
continue
|
||||
|
||||
# automatically add the attribute to ZCL_INIT_ATTRS for the cluster
|
||||
# handler if it is not already in the list
|
||||
if (
|
||||
hasattr(quirk_metadata.entity_metadata, "attribute_name")
|
||||
and quirk_metadata.entity_metadata.attribute_name
|
||||
not in cluster_handler.ZCL_INIT_ATTRS
|
||||
):
|
||||
init_attrs = cluster_handler.ZCL_INIT_ATTRS.copy()
|
||||
init_attrs[
|
||||
quirk_metadata.entity_metadata.attribute_name
|
||||
] = quirk_metadata.attribute_initialized_from_cache
|
||||
cluster_handler.__dict__[zha_const.ZCL_INIT_ATTRS] = init_attrs
|
||||
|
||||
endpoint.async_new_entity(
|
||||
platform,
|
||||
entity_class,
|
||||
endpoint.unique_id,
|
||||
[cluster_handler],
|
||||
quirk_metadata=quirk_metadata,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"'%s' platform -> '%s' using %s",
|
||||
platform,
|
||||
entity_class.__name__,
|
||||
[cluster_handler.name],
|
||||
)
|
||||
|
||||
@callback
|
||||
def discover_coordinator_device_entities(self, device: ZHADevice) -> None:
|
||||
@@ -144,14 +337,20 @@ class ProbeEndpoint:
|
||||
counter_group,
|
||||
counter,
|
||||
),
|
||||
{},
|
||||
)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"'%s' platform -> '%s' using %s",
|
||||
Platform.SENSOR,
|
||||
sensor.DeviceCounterSensor.__name__,
|
||||
f"counter groups[{counter_groups}] counter group[{counter_group}] counter[{counter}]",
|
||||
)
|
||||
|
||||
process_counters("counters")
|
||||
process_counters("broadcast_counters")
|
||||
process_counters("device_counters")
|
||||
process_counters("group_counters")
|
||||
zha_regs.ZHA_ENTITIES.clean_up()
|
||||
|
||||
@callback
|
||||
def discover_by_device_type(self, endpoint: Endpoint) -> None:
|
||||
@@ -309,7 +508,7 @@ class ProbeEndpoint:
|
||||
for platform, ent_n_handler_list in matches.items():
|
||||
for entity_and_handler in ent_n_handler_list:
|
||||
_LOGGER.debug(
|
||||
"'%s' component -> '%s' using %s",
|
||||
"'%s' platform -> '%s' using %s",
|
||||
platform,
|
||||
entity_and_handler.entity_class.__name__,
|
||||
[ch.name for ch in entity_and_handler.claimed_cluster_handlers],
|
||||
@@ -317,7 +516,8 @@ class ProbeEndpoint:
|
||||
for platform, ent_n_handler_list in matches.items():
|
||||
for entity_and_handler in ent_n_handler_list:
|
||||
if platform == cmpt_by_dev_type:
|
||||
# for well known device types, like thermostats we'll take only 1st class
|
||||
# for well known device types,
|
||||
# like thermostats we'll take only 1st class
|
||||
endpoint.async_new_entity(
|
||||
platform,
|
||||
entity_and_handler.entity_class,
|
||||
@@ -405,6 +605,7 @@ class GroupProbe:
|
||||
group.group_id,
|
||||
zha_gateway.coordinator_zha_device,
|
||||
),
|
||||
{},
|
||||
)
|
||||
)
|
||||
async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES)
|
||||
|
||||
@@ -7,8 +7,6 @@ import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Final, TypeVar
|
||||
|
||||
from zigpy.typing import EndpointType as ZigpyEndpointType
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
@@ -19,6 +17,8 @@ from .cluster_handlers import ClusterHandler
|
||||
from .helpers import get_zha_data
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from zigpy import Endpoint as ZigpyEndpoint
|
||||
|
||||
from .cluster_handlers import ClientClusterHandler
|
||||
from .device import ZHADevice
|
||||
|
||||
@@ -34,11 +34,11 @@ CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable)
|
||||
class Endpoint:
|
||||
"""Endpoint for a zha device."""
|
||||
|
||||
def __init__(self, zigpy_endpoint: ZigpyEndpointType, device: ZHADevice) -> None:
|
||||
def __init__(self, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> None:
|
||||
"""Initialize instance."""
|
||||
assert zigpy_endpoint is not None
|
||||
assert device is not None
|
||||
self._zigpy_endpoint: ZigpyEndpointType = zigpy_endpoint
|
||||
self._zigpy_endpoint: ZigpyEndpoint = zigpy_endpoint
|
||||
self._device: ZHADevice = device
|
||||
self._all_cluster_handlers: dict[str, ClusterHandler] = {}
|
||||
self._claimed_cluster_handlers: dict[str, ClusterHandler] = {}
|
||||
@@ -66,7 +66,7 @@ class Endpoint:
|
||||
return self._client_cluster_handlers
|
||||
|
||||
@property
|
||||
def zigpy_endpoint(self) -> ZigpyEndpointType:
|
||||
def zigpy_endpoint(self) -> ZigpyEndpoint:
|
||||
"""Return endpoint of zigpy device."""
|
||||
return self._zigpy_endpoint
|
||||
|
||||
@@ -104,7 +104,7 @@ class Endpoint:
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def new(cls, zigpy_endpoint: ZigpyEndpointType, device: ZHADevice) -> Endpoint:
|
||||
def new(cls, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> Endpoint:
|
||||
"""Create new endpoint and populate cluster handlers."""
|
||||
endpoint = cls(zigpy_endpoint, device)
|
||||
endpoint.add_all_cluster_handlers()
|
||||
@@ -211,6 +211,7 @@ class Endpoint:
|
||||
entity_class: CALLABLE_T,
|
||||
unique_id: str,
|
||||
cluster_handlers: list[ClusterHandler],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Create a new entity."""
|
||||
from .device import DeviceStatus # pylint: disable=import-outside-toplevel
|
||||
@@ -220,7 +221,7 @@ class Endpoint:
|
||||
|
||||
zha_data = get_zha_data(self.device.hass)
|
||||
zha_data.platforms[platform].append(
|
||||
(entity_class, (unique_id, self.device, cluster_handlers))
|
||||
(entity_class, (unique_id, self.device, cluster_handlers), kwargs or {})
|
||||
)
|
||||
|
||||
@callback
|
||||
|
||||
@@ -7,7 +7,9 @@ import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Self
|
||||
|
||||
from homeassistant.const import ATTR_NAME
|
||||
from zigpy.quirks.v2 import EntityMetadata, EntityType
|
||||
|
||||
from homeassistant.const import ATTR_NAME, EntityCategory
|
||||
from homeassistant.core import CALLBACK_TYPE, callback
|
||||
from homeassistant.helpers import entity
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
@@ -175,6 +177,31 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity):
|
||||
"""
|
||||
return cls(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
if entity_metadata.initially_disabled:
|
||||
self._attr_entity_registry_enabled_default = False
|
||||
|
||||
if entity_metadata.translation_key:
|
||||
self._attr_translation_key = entity_metadata.translation_key
|
||||
|
||||
if hasattr(entity_metadata.entity_metadata, "attribute_name"):
|
||||
if not entity_metadata.translation_key:
|
||||
self._attr_translation_key = (
|
||||
entity_metadata.entity_metadata.attribute_name
|
||||
)
|
||||
self._unique_id_suffix = entity_metadata.entity_metadata.attribute_name
|
||||
elif hasattr(entity_metadata.entity_metadata, "command_name"):
|
||||
if not entity_metadata.translation_key:
|
||||
self._attr_translation_key = (
|
||||
entity_metadata.entity_metadata.command_name
|
||||
)
|
||||
self._unique_id_suffix = entity_metadata.entity_metadata.command_name
|
||||
if entity_metadata.entity_type is EntityType.CONFIG:
|
||||
self._attr_entity_category = EntityCategory.CONFIG
|
||||
elif entity_metadata.entity_type is EntityType.DIAGNOSTIC:
|
||||
self._attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return entity availability."""
|
||||
|
||||
@@ -1185,7 +1185,7 @@ class LightGroup(BaseLight, ZhaGroupEntity):
|
||||
self._zha_config_enhanced_light_transition = False
|
||||
|
||||
self._attr_color_mode = ColorMode.UNKNOWN
|
||||
self._attr_supported_color_modes = set()
|
||||
self._attr_supported_color_modes = {ColorMode.ONOFF}
|
||||
|
||||
# remove this when all ZHA platforms and base entities are updated
|
||||
@property
|
||||
@@ -1285,6 +1285,19 @@ class LightGroup(BaseLight, ZhaGroupEntity):
|
||||
effects_count = Counter(itertools.chain(all_effects))
|
||||
self._attr_effect = effects_count.most_common(1)[0][0]
|
||||
|
||||
supported_color_modes = {ColorMode.ONOFF}
|
||||
all_supported_color_modes: list[set[ColorMode]] = list(
|
||||
helpers.find_state_attributes(states, light.ATTR_SUPPORTED_COLOR_MODES)
|
||||
)
|
||||
if all_supported_color_modes:
|
||||
# Merge all color modes.
|
||||
supported_color_modes = filter_supported_color_modes(
|
||||
set().union(*all_supported_color_modes)
|
||||
)
|
||||
|
||||
self._attr_supported_color_modes = supported_color_modes
|
||||
|
||||
self._attr_color_mode = ColorMode.UNKNOWN
|
||||
all_color_modes = list(
|
||||
helpers.find_state_attributes(on_states, light.ATTR_COLOR_MODE)
|
||||
)
|
||||
@@ -1292,25 +1305,26 @@ class LightGroup(BaseLight, ZhaGroupEntity):
|
||||
# Report the most common color mode, select brightness and onoff last
|
||||
color_mode_count = Counter(itertools.chain(all_color_modes))
|
||||
if ColorMode.ONOFF in color_mode_count:
|
||||
color_mode_count[ColorMode.ONOFF] = -1
|
||||
if ColorMode.ONOFF in supported_color_modes:
|
||||
color_mode_count[ColorMode.ONOFF] = -1
|
||||
else:
|
||||
color_mode_count.pop(ColorMode.ONOFF)
|
||||
if ColorMode.BRIGHTNESS in color_mode_count:
|
||||
color_mode_count[ColorMode.BRIGHTNESS] = 0
|
||||
self._attr_color_mode = color_mode_count.most_common(1)[0][0]
|
||||
if ColorMode.BRIGHTNESS in supported_color_modes:
|
||||
color_mode_count[ColorMode.BRIGHTNESS] = 0
|
||||
else:
|
||||
color_mode_count.pop(ColorMode.BRIGHTNESS)
|
||||
if color_mode_count:
|
||||
self._attr_color_mode = color_mode_count.most_common(1)[0][0]
|
||||
else:
|
||||
self._attr_color_mode = next(iter(supported_color_modes))
|
||||
|
||||
if self._attr_color_mode == ColorMode.HS and (
|
||||
color_mode_count[ColorMode.HS] != len(self._group.members)
|
||||
or self._zha_config_always_prefer_xy_color_mode
|
||||
): # switch to XY if all members do not support HS
|
||||
self._attr_color_mode = ColorMode.XY
|
||||
|
||||
all_supported_color_modes: list[set[ColorMode]] = list(
|
||||
helpers.find_state_attributes(states, light.ATTR_SUPPORTED_COLOR_MODES)
|
||||
)
|
||||
if all_supported_color_modes:
|
||||
# Merge all color modes.
|
||||
self._attr_supported_color_modes = filter_supported_color_modes(
|
||||
set().union(*all_supported_color_modes)
|
||||
)
|
||||
|
||||
self._attr_supported_features = LightEntityFeature(0)
|
||||
for support in helpers.find_state_attributes(states, ATTR_SUPPORTED_FEATURES):
|
||||
# Merge supported features by emulating support for every feature
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.112",
|
||||
"zigpy-deconz==0.23.1",
|
||||
"zigpy==0.63.2",
|
||||
"zigpy==0.63.3",
|
||||
"zigpy-xbee==0.20.1",
|
||||
"zigpy-zigate==0.12.0",
|
||||
"zigpy-znp==0.12.1",
|
||||
|
||||
@@ -5,6 +5,7 @@ import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Self
|
||||
|
||||
from zigpy.quirks.v2 import EntityMetadata, NumberMetadata
|
||||
from zigpy.zcl.clusters.hvac import Thermostat
|
||||
|
||||
from homeassistant.components.number import NumberEntity, NumberMode
|
||||
@@ -24,6 +25,7 @@ from .core.const import (
|
||||
CLUSTER_HANDLER_LEVEL,
|
||||
CLUSTER_HANDLER_OCCUPANCY,
|
||||
CLUSTER_HANDLER_THERMOSTAT,
|
||||
QUIRK_METADATA,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
SIGNAL_ATTR_UPDATED,
|
||||
)
|
||||
@@ -400,7 +402,7 @@ class ZHANumberConfigurationEntity(ZhaEntity, NumberEntity):
|
||||
Return entity if it is a supported configuration, otherwise return None
|
||||
"""
|
||||
cluster_handler = cluster_handlers[0]
|
||||
if (
|
||||
if QUIRK_METADATA not in kwargs and (
|
||||
cls._attribute_name in cluster_handler.cluster.unsupported_attributes
|
||||
or cls._attribute_name not in cluster_handler.cluster.attributes_by_name
|
||||
or cluster_handler.cluster.get(cls._attribute_name) is None
|
||||
@@ -423,8 +425,27 @@ class ZHANumberConfigurationEntity(ZhaEntity, NumberEntity):
|
||||
) -> None:
|
||||
"""Init this number configuration entity."""
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
if QUIRK_METADATA in kwargs:
|
||||
self._init_from_quirks_metadata(kwargs[QUIRK_METADATA])
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
super()._init_from_quirks_metadata(entity_metadata)
|
||||
number_metadata: NumberMetadata = entity_metadata.entity_metadata
|
||||
self._attribute_name = number_metadata.attribute_name
|
||||
|
||||
if number_metadata.min is not None:
|
||||
self._attr_native_min_value = number_metadata.min
|
||||
if number_metadata.max is not None:
|
||||
self._attr_native_max_value = number_metadata.max
|
||||
if number_metadata.step is not None:
|
||||
self._attr_native_step = number_metadata.step
|
||||
if number_metadata.unit is not None:
|
||||
self._attr_native_unit_of_measurement = number_metadata.unit
|
||||
if number_metadata.multiplier is not None:
|
||||
self._attr_multiplier = number_metadata.multiplier
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the current value."""
|
||||
|
||||
@@ -10,6 +10,7 @@ from zhaquirks.quirk_ids import TUYA_PLUG_MANUFACTURER, TUYA_PLUG_ONOFF
|
||||
from zhaquirks.xiaomi.aqara.magnet_ac01 import OppleCluster as MagnetAC01OppleCluster
|
||||
from zhaquirks.xiaomi.aqara.switch_acn047 import OppleCluster as T2RelayOppleCluster
|
||||
from zigpy import types
|
||||
from zigpy.quirks.v2 import EntityMetadata, ZCLEnumMetadata
|
||||
from zigpy.zcl.clusters.general import OnOff
|
||||
from zigpy.zcl.clusters.security import IasWd
|
||||
|
||||
@@ -27,6 +28,7 @@ from .core.const import (
|
||||
CLUSTER_HANDLER_INOVELLI,
|
||||
CLUSTER_HANDLER_OCCUPANCY,
|
||||
CLUSTER_HANDLER_ON_OFF,
|
||||
QUIRK_METADATA,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
SIGNAL_ATTR_UPDATED,
|
||||
Strobe,
|
||||
@@ -82,9 +84,9 @@ class ZHAEnumSelectEntity(ZhaEntity, SelectEntity):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Init this select entity."""
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
self._attribute_name = self._enum.__name__
|
||||
self._attr_options = [entry.name.replace("_", " ") for entry in self._enum]
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
@property
|
||||
@@ -176,7 +178,7 @@ class ZCLEnumSelectEntity(ZhaEntity, SelectEntity):
|
||||
Return entity if it is a supported configuration, otherwise return None
|
||||
"""
|
||||
cluster_handler = cluster_handlers[0]
|
||||
if (
|
||||
if QUIRK_METADATA not in kwargs and (
|
||||
cls._attribute_name in cluster_handler.cluster.unsupported_attributes
|
||||
or cls._attribute_name not in cluster_handler.cluster.attributes_by_name
|
||||
or cluster_handler.cluster.get(cls._attribute_name) is None
|
||||
@@ -198,10 +200,19 @@ class ZCLEnumSelectEntity(ZhaEntity, SelectEntity):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Init this select entity."""
|
||||
self._attr_options = [entry.name.replace("_", " ") for entry in self._enum]
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
if QUIRK_METADATA in kwargs:
|
||||
self._init_from_quirks_metadata(kwargs[QUIRK_METADATA])
|
||||
self._attr_options = [entry.name.replace("_", " ") for entry in self._enum]
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
super()._init_from_quirks_metadata(entity_metadata)
|
||||
zcl_enum_metadata: ZCLEnumMetadata = entity_metadata.entity_metadata
|
||||
self._attribute_name = zcl_enum_metadata.attribute_name
|
||||
self._enum = zcl_enum_metadata.enum
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the selected entity option to represent the entity state."""
|
||||
|
||||
@@ -6,11 +6,13 @@ from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import enum
|
||||
import functools
|
||||
import logging
|
||||
import numbers
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Any, Self
|
||||
|
||||
from zigpy import types
|
||||
from zigpy.quirks.v2 import EntityMetadata, ZCLEnumMetadata, ZCLSensorMetadata
|
||||
from zigpy.state import Counter, State
|
||||
from zigpy.zcl.clusters.closures import WindowCovering
|
||||
from zigpy.zcl.clusters.general import Basic
|
||||
@@ -68,6 +70,7 @@ from .core.const import (
|
||||
CLUSTER_HANDLER_TEMPERATURE,
|
||||
CLUSTER_HANDLER_THERMOSTAT,
|
||||
DATA_ZHA,
|
||||
QUIRK_METADATA,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
SIGNAL_ATTR_UPDATED,
|
||||
)
|
||||
@@ -95,6 +98,8 @@ BATTERY_SIZES = {
|
||||
255: "Unknown",
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CLUSTER_HANDLER_ST_HUMIDITY_CLUSTER = (
|
||||
f"cluster_handler_0x{SMARTTHINGS_HUMIDITY_CLUSTER:04x}"
|
||||
)
|
||||
@@ -135,17 +140,6 @@ class Sensor(ZhaEntity, SensorEntity):
|
||||
_divisor: int = 1
|
||||
_multiplier: int | float = 1
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
zha_device: ZHADevice,
|
||||
cluster_handlers: list[ClusterHandler],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Init this sensor."""
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
|
||||
@classmethod
|
||||
def create_entity(
|
||||
cls,
|
||||
@@ -159,14 +153,44 @@ class Sensor(ZhaEntity, SensorEntity):
|
||||
Return entity if it is a supported configuration, otherwise return None
|
||||
"""
|
||||
cluster_handler = cluster_handlers[0]
|
||||
if (
|
||||
if QUIRK_METADATA not in kwargs and (
|
||||
cls._attribute_name in cluster_handler.cluster.unsupported_attributes
|
||||
or cls._attribute_name not in cluster_handler.cluster.attributes_by_name
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"%s is not supported - skipping %s entity creation",
|
||||
cls._attribute_name,
|
||||
cls.__name__,
|
||||
)
|
||||
return None
|
||||
|
||||
return cls(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
zha_device: ZHADevice,
|
||||
cluster_handlers: list[ClusterHandler],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Init this sensor."""
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
if QUIRK_METADATA in kwargs:
|
||||
self._init_from_quirks_metadata(kwargs[QUIRK_METADATA])
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
super()._init_from_quirks_metadata(entity_metadata)
|
||||
sensor_metadata: ZCLSensorMetadata = entity_metadata.entity_metadata
|
||||
self._attribute_name = sensor_metadata.attribute_name
|
||||
if sensor_metadata.divisor is not None:
|
||||
self._divisor = sensor_metadata.divisor
|
||||
if sensor_metadata.multiplier is not None:
|
||||
self._multiplier = sensor_metadata.multiplier
|
||||
if sensor_metadata.unit is not None:
|
||||
self._attr_native_unit_of_measurement = sensor_metadata.unit
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -330,6 +354,13 @@ class EnumSensor(Sensor):
|
||||
_attr_device_class: SensorDeviceClass = SensorDeviceClass.ENUM
|
||||
_enum: type[enum.Enum]
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
ZhaEntity._init_from_quirks_metadata(self, entity_metadata) # pylint: disable=protected-access
|
||||
sensor_metadata: ZCLEnumMetadata = entity_metadata.entity_metadata
|
||||
self._attribute_name = sensor_metadata.attribute_name
|
||||
self._enum = sensor_metadata.enum
|
||||
|
||||
def formatter(self, value: int) -> str | None:
|
||||
"""Use name of enum."""
|
||||
assert self._enum is not None
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from typing import TYPE_CHECKING, Any, Self
|
||||
|
||||
from zhaquirks.quirk_ids import TUYA_PLUG_ONOFF
|
||||
from zigpy.quirks.v2 import EntityMetadata, SwitchMetadata
|
||||
from zigpy.zcl.clusters.closures import ConfigStatus, WindowCovering, WindowCoveringMode
|
||||
from zigpy.zcl.clusters.general import OnOff
|
||||
from zigpy.zcl.foundation import Status
|
||||
@@ -23,6 +24,7 @@ from .core.const import (
|
||||
CLUSTER_HANDLER_COVER,
|
||||
CLUSTER_HANDLER_INOVELLI,
|
||||
CLUSTER_HANDLER_ON_OFF,
|
||||
QUIRK_METADATA,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
SIGNAL_ATTR_UPDATED,
|
||||
)
|
||||
@@ -173,6 +175,8 @@ class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity):
|
||||
_attribute_name: str
|
||||
_inverter_attribute_name: str | None = None
|
||||
_force_inverted: bool = False
|
||||
_off_value: int = 0
|
||||
_on_value: int = 1
|
||||
|
||||
@classmethod
|
||||
def create_entity(
|
||||
@@ -187,7 +191,7 @@ class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity):
|
||||
Return entity if it is a supported configuration, otherwise return None
|
||||
"""
|
||||
cluster_handler = cluster_handlers[0]
|
||||
if (
|
||||
if QUIRK_METADATA not in kwargs and (
|
||||
cls._attribute_name in cluster_handler.cluster.unsupported_attributes
|
||||
or cls._attribute_name not in cluster_handler.cluster.attributes_by_name
|
||||
or cluster_handler.cluster.get(cls._attribute_name) is None
|
||||
@@ -210,8 +214,22 @@ class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity):
|
||||
) -> None:
|
||||
"""Init this number configuration entity."""
|
||||
self._cluster_handler: ClusterHandler = cluster_handlers[0]
|
||||
if QUIRK_METADATA in kwargs:
|
||||
self._init_from_quirks_metadata(kwargs[QUIRK_METADATA])
|
||||
super().__init__(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
||||
def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None:
|
||||
"""Init this entity from the quirks metadata."""
|
||||
super()._init_from_quirks_metadata(entity_metadata)
|
||||
switch_metadata: SwitchMetadata = entity_metadata.entity_metadata
|
||||
self._attribute_name = switch_metadata.attribute_name
|
||||
if switch_metadata.invert_attribute_name:
|
||||
self._inverter_attribute_name = switch_metadata.invert_attribute_name
|
||||
if switch_metadata.force_inverted:
|
||||
self._force_inverted = switch_metadata.force_inverted
|
||||
self._off_value = switch_metadata.off_value
|
||||
self._on_value = switch_metadata.on_value
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -236,14 +254,25 @@ class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return if the switch is on based on the statemachine."""
|
||||
val = bool(self._cluster_handler.cluster.get(self._attribute_name))
|
||||
if self._on_value != 1:
|
||||
val = self._cluster_handler.cluster.get(self._attribute_name)
|
||||
val = val == self._on_value
|
||||
else:
|
||||
val = bool(self._cluster_handler.cluster.get(self._attribute_name))
|
||||
return (not val) if self.inverted else val
|
||||
|
||||
async def async_turn_on_off(self, state: bool) -> None:
|
||||
"""Turn the entity on or off."""
|
||||
await self._cluster_handler.write_attributes_safe(
|
||||
{self._attribute_name: not state if self.inverted else state}
|
||||
)
|
||||
if self.inverted:
|
||||
state = not state
|
||||
if state:
|
||||
await self._cluster_handler.write_attributes_safe(
|
||||
{self._attribute_name: self._on_value}
|
||||
)
|
||||
else:
|
||||
await self._cluster_handler.write_attributes_safe(
|
||||
{self._attribute_name: self._off_value}
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -33,6 +33,7 @@ from .core import (
|
||||
CoreState,
|
||||
Event,
|
||||
HassJob,
|
||||
HassJobType,
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
@@ -363,7 +364,6 @@ class ConfigEntry:
|
||||
|
||||
self._integration_for_domain: loader.Integration | None = None
|
||||
self._tries = 0
|
||||
self._setup_again_job: HassJob | None = None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Representation of ConfigEntry."""
|
||||
@@ -555,12 +555,18 @@ class ConfigEntry:
|
||||
|
||||
if hass.state is CoreState.running:
|
||||
self._async_cancel_retry_setup = async_call_later(
|
||||
hass, wait_time, self._async_get_setup_again_job(hass)
|
||||
hass,
|
||||
wait_time,
|
||||
HassJob(
|
||||
functools.partial(self._async_setup_again, hass),
|
||||
job_type=HassJobType.Callback,
|
||||
),
|
||||
)
|
||||
else:
|
||||
self._async_cancel_retry_setup = hass.bus.async_listen_once(
|
||||
self._async_cancel_retry_setup = hass.bus.async_listen(
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
functools.partial(self._async_setup_again, hass),
|
||||
run_immediately=True,
|
||||
)
|
||||
|
||||
await self._async_process_on_unload(hass)
|
||||
@@ -585,28 +591,25 @@ class ConfigEntry:
|
||||
if not domain_is_integration:
|
||||
return
|
||||
|
||||
self.async_cancel_retry_setup()
|
||||
|
||||
if result:
|
||||
self._async_set_state(hass, ConfigEntryState.LOADED, None)
|
||||
else:
|
||||
self._async_set_state(hass, ConfigEntryState.SETUP_ERROR, error_reason)
|
||||
|
||||
async def _async_setup_again(self, hass: HomeAssistant, *_: Any) -> None:
|
||||
"""Run setup again."""
|
||||
@callback
|
||||
def _async_setup_again(self, hass: HomeAssistant, *_: Any) -> None:
|
||||
"""Schedule setup again.
|
||||
|
||||
This method is a callback to ensure that _async_cancel_retry_setup
|
||||
is unset as soon as its callback is called.
|
||||
"""
|
||||
self._async_cancel_retry_setup = None
|
||||
# Check again when we fire in case shutdown
|
||||
# has started so we do not block shutdown
|
||||
if not hass.is_stopping:
|
||||
self._async_cancel_retry_setup = None
|
||||
await self.async_setup(hass)
|
||||
|
||||
@callback
|
||||
def _async_get_setup_again_job(self, hass: HomeAssistant) -> HassJob:
|
||||
"""Get a job that will call setup again."""
|
||||
if not self._setup_again_job:
|
||||
self._setup_again_job = HassJob(
|
||||
functools.partial(self._async_setup_again, hass),
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
return self._setup_again_job
|
||||
hass.async_create_task(self.async_setup(hass), eager_start=True)
|
||||
|
||||
@callback
|
||||
def async_shutdown(self) -> None:
|
||||
|
||||
@@ -16,7 +16,7 @@ from .helpers.deprecation import (
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2024
|
||||
MINOR_VERSION: Final = 3
|
||||
PATCH_VERSION: Final = "0b0"
|
||||
PATCH_VERSION: Final = "0b4"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 11, 0)
|
||||
@@ -1602,6 +1602,11 @@ HASSIO_USER_NAME = "Supervisor"
|
||||
|
||||
SIGNAL_BOOTSTRAP_INTEGRATIONS = "bootstrap_integrations"
|
||||
|
||||
|
||||
# hass.data key for logging information.
|
||||
KEY_DATA_LOGGING = "logging"
|
||||
|
||||
|
||||
# Date/Time formats
|
||||
FORMAT_DATE: Final = "%Y-%m-%d"
|
||||
FORMAT_TIME: Final = "%H:%M:%S"
|
||||
|
||||
@@ -95,6 +95,7 @@ from .util.async_ import (
|
||||
run_callback_threadsafe,
|
||||
shutdown_run_callback_threadsafe,
|
||||
)
|
||||
from .util.executor import InterruptibleThreadPoolExecutor
|
||||
from .util.json import JsonObjectType
|
||||
from .util.read_only_dict import ReadOnlyDict
|
||||
from .util.timeout import TimeoutManager
|
||||
@@ -394,6 +395,9 @@ class HomeAssistant:
|
||||
self.timeout: TimeoutManager = TimeoutManager()
|
||||
self._stop_future: concurrent.futures.Future[None] | None = None
|
||||
self._shutdown_jobs: list[HassJobWithArgs] = []
|
||||
self.import_executor = InterruptibleThreadPoolExecutor(
|
||||
max_workers=1, thread_name_prefix="ImportExecutor"
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def is_running(self) -> bool:
|
||||
@@ -678,6 +682,16 @@ class HomeAssistant:
|
||||
|
||||
return task
|
||||
|
||||
@callback
|
||||
def async_add_import_executor_job(
|
||||
self, target: Callable[..., _T], *args: Any
|
||||
) -> asyncio.Future[_T]:
|
||||
"""Add an import executor job from within the event loop."""
|
||||
task = self.loop.run_in_executor(self.import_executor, target, *args)
|
||||
self._tasks.add(task)
|
||||
task.add_done_callback(self._tasks.remove)
|
||||
return task
|
||||
|
||||
@overload
|
||||
@callback
|
||||
def async_run_hass_job(
|
||||
@@ -992,6 +1006,7 @@ class HomeAssistant:
|
||||
self._async_log_running_tasks("close")
|
||||
|
||||
self.set_state(CoreState.stopped)
|
||||
self.import_executor.shutdown()
|
||||
|
||||
if self._stopped is not None:
|
||||
self._stopped.set()
|
||||
|
||||
@@ -86,6 +86,7 @@ def report(
|
||||
exclude_integrations: set | None = None,
|
||||
error_if_core: bool = True,
|
||||
level: int = logging.WARNING,
|
||||
log_custom_component_only: bool = False,
|
||||
) -> None:
|
||||
"""Report incorrect usage.
|
||||
|
||||
@@ -99,10 +100,12 @@ def report(
|
||||
msg = f"Detected code that {what}. Please report this issue."
|
||||
if error_if_core:
|
||||
raise RuntimeError(msg) from err
|
||||
_LOGGER.warning(msg, stack_info=True)
|
||||
if not log_custom_component_only:
|
||||
_LOGGER.warning(msg, stack_info=True)
|
||||
return
|
||||
|
||||
_report_integration(what, integration_frame, level)
|
||||
if not log_custom_component_only or integration_frame.custom_integration:
|
||||
_report_integration(what, integration_frame, level)
|
||||
|
||||
|
||||
def _report_integration(
|
||||
|
||||
184
homeassistant/helpers/http.py
Normal file
184
homeassistant/helpers/http.py
Normal file
@@ -0,0 +1,184 @@
|
||||
"""Helper to track the current http request."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from contextvars import ContextVar
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any, Final
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.typedefs import LooseHeaders
|
||||
from aiohttp.web import Request
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadRequest,
|
||||
HTTPInternalServerError,
|
||||
HTTPUnauthorized,
|
||||
)
|
||||
from aiohttp.web_urldispatcher import AbstractRoute
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import exceptions
|
||||
from homeassistant.const import CONTENT_TYPE_JSON
|
||||
from homeassistant.core import Context, HomeAssistant, is_callback
|
||||
from homeassistant.util.json import JSON_ENCODE_EXCEPTIONS, format_unserializable_data
|
||||
|
||||
from .json import find_paths_unserializable_data, json_bytes, json_dumps
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
KEY_AUTHENTICATED: Final = "ha_authenticated"
|
||||
|
||||
current_request: ContextVar[Request | None] = ContextVar(
|
||||
"current_request", default=None
|
||||
)
|
||||
|
||||
|
||||
def request_handler_factory(
|
||||
hass: HomeAssistant, view: HomeAssistantView, handler: Callable
|
||||
) -> Callable[[web.Request], Awaitable[web.StreamResponse]]:
|
||||
"""Wrap the handler classes."""
|
||||
is_coroutinefunction = asyncio.iscoroutinefunction(handler)
|
||||
assert is_coroutinefunction or is_callback(
|
||||
handler
|
||||
), "Handler should be a coroutine or a callback."
|
||||
|
||||
async def handle(request: web.Request) -> web.StreamResponse:
|
||||
"""Handle incoming request."""
|
||||
if hass.is_stopping:
|
||||
return web.Response(status=HTTPStatus.SERVICE_UNAVAILABLE)
|
||||
|
||||
authenticated = request.get(KEY_AUTHENTICATED, False)
|
||||
|
||||
if view.requires_auth and not authenticated:
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
_LOGGER.debug(
|
||||
"Serving %s to %s (auth: %s)",
|
||||
request.path,
|
||||
request.remote,
|
||||
authenticated,
|
||||
)
|
||||
|
||||
try:
|
||||
if is_coroutinefunction:
|
||||
result = await handler(request, **request.match_info)
|
||||
else:
|
||||
result = handler(request, **request.match_info)
|
||||
except vol.Invalid as err:
|
||||
raise HTTPBadRequest() from err
|
||||
except exceptions.ServiceNotFound as err:
|
||||
raise HTTPInternalServerError() from err
|
||||
except exceptions.Unauthorized as err:
|
||||
raise HTTPUnauthorized() from err
|
||||
|
||||
if isinstance(result, web.StreamResponse):
|
||||
# The method handler returned a ready-made Response, how nice of it
|
||||
return result
|
||||
|
||||
status_code = HTTPStatus.OK
|
||||
if isinstance(result, tuple):
|
||||
result, status_code = result
|
||||
|
||||
if isinstance(result, bytes):
|
||||
return web.Response(body=result, status=status_code)
|
||||
|
||||
if isinstance(result, str):
|
||||
return web.Response(text=result, status=status_code)
|
||||
|
||||
if result is None:
|
||||
return web.Response(body=b"", status=status_code)
|
||||
|
||||
raise TypeError(
|
||||
f"Result should be None, string, bytes or StreamResponse. Got: {result}"
|
||||
)
|
||||
|
||||
return handle
|
||||
|
||||
|
||||
class HomeAssistantView:
|
||||
"""Base view for all views."""
|
||||
|
||||
url: str | None = None
|
||||
extra_urls: list[str] = []
|
||||
# Views inheriting from this class can override this
|
||||
requires_auth = True
|
||||
cors_allowed = False
|
||||
|
||||
@staticmethod
|
||||
def context(request: web.Request) -> Context:
|
||||
"""Generate a context from a request."""
|
||||
if (user := request.get("hass_user")) is None:
|
||||
return Context()
|
||||
|
||||
return Context(user_id=user.id)
|
||||
|
||||
@staticmethod
|
||||
def json(
|
||||
result: Any,
|
||||
status_code: HTTPStatus | int = HTTPStatus.OK,
|
||||
headers: LooseHeaders | None = None,
|
||||
) -> web.Response:
|
||||
"""Return a JSON response."""
|
||||
try:
|
||||
msg = json_bytes(result)
|
||||
except JSON_ENCODE_EXCEPTIONS as err:
|
||||
_LOGGER.error(
|
||||
"Unable to serialize to JSON. Bad data found at %s",
|
||||
format_unserializable_data(
|
||||
find_paths_unserializable_data(result, dump=json_dumps)
|
||||
),
|
||||
)
|
||||
raise HTTPInternalServerError from err
|
||||
response = web.Response(
|
||||
body=msg,
|
||||
content_type=CONTENT_TYPE_JSON,
|
||||
status=int(status_code),
|
||||
headers=headers,
|
||||
zlib_executor_size=32768,
|
||||
)
|
||||
response.enable_compression()
|
||||
return response
|
||||
|
||||
def json_message(
|
||||
self,
|
||||
message: str,
|
||||
status_code: HTTPStatus | int = HTTPStatus.OK,
|
||||
message_code: str | None = None,
|
||||
headers: LooseHeaders | None = None,
|
||||
) -> web.Response:
|
||||
"""Return a JSON message response."""
|
||||
data = {"message": message}
|
||||
if message_code is not None:
|
||||
data["code"] = message_code
|
||||
return self.json(data, status_code, headers=headers)
|
||||
|
||||
def register(
|
||||
self, hass: HomeAssistant, app: web.Application, router: web.UrlDispatcher
|
||||
) -> None:
|
||||
"""Register the view with a router."""
|
||||
assert self.url is not None, "No url set for view"
|
||||
urls = [self.url] + self.extra_urls
|
||||
routes: list[AbstractRoute] = []
|
||||
|
||||
for method in ("get", "post", "delete", "put", "patch", "head", "options"):
|
||||
if not (handler := getattr(self, method, None)):
|
||||
continue
|
||||
|
||||
handler = request_handler_factory(hass, self, handler)
|
||||
|
||||
for url in urls:
|
||||
routes.append(router.add_route(method, url, handler))
|
||||
|
||||
# Use `get` because CORS middleware is not be loaded in emulated_hue
|
||||
if self.cors_allowed:
|
||||
allow_cors = app.get("allow_all_cors")
|
||||
else:
|
||||
allow_cors = app.get("allow_configured_cors")
|
||||
|
||||
if allow_cors:
|
||||
for route in routes:
|
||||
allow_cors(route)
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
import asyncio
|
||||
from collections.abc import Collection, Coroutine, Iterable
|
||||
import dataclasses
|
||||
@@ -385,8 +386,8 @@ class IntentHandler:
|
||||
return f"<{self.__class__.__name__} - {self.intent_type}>"
|
||||
|
||||
|
||||
class ServiceIntentHandler(IntentHandler):
|
||||
"""Service Intent handler registration.
|
||||
class DynamicServiceIntentHandler(IntentHandler):
|
||||
"""Service Intent handler registration (dynamic).
|
||||
|
||||
Service specific intent handler that calls a service by name/entity_id.
|
||||
"""
|
||||
@@ -404,15 +405,11 @@ class ServiceIntentHandler(IntentHandler):
|
||||
def __init__(
|
||||
self,
|
||||
intent_type: str,
|
||||
domain: str,
|
||||
service: str,
|
||||
speech: str | None = None,
|
||||
extra_slots: dict[str, vol.Schema] | None = None,
|
||||
) -> None:
|
||||
"""Create Service Intent Handler."""
|
||||
self.intent_type = intent_type
|
||||
self.domain = domain
|
||||
self.service = service
|
||||
self.speech = speech
|
||||
self.extra_slots = extra_slots
|
||||
|
||||
@@ -441,6 +438,13 @@ class ServiceIntentHandler(IntentHandler):
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
def get_domain_and_service(
|
||||
self, intent_obj: Intent, state: State
|
||||
) -> tuple[str, str]:
|
||||
"""Get the domain and service name to call."""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_handle(self, intent_obj: Intent) -> IntentResponse:
|
||||
"""Handle the hass intent."""
|
||||
hass = intent_obj.hass
|
||||
@@ -536,7 +540,10 @@ class ServiceIntentHandler(IntentHandler):
|
||||
|
||||
service_coros: list[Coroutine[Any, Any, None]] = []
|
||||
for state in states:
|
||||
service_coros.append(self.async_call_service(intent_obj, state))
|
||||
domain, service = self.get_domain_and_service(intent_obj, state)
|
||||
service_coros.append(
|
||||
self.async_call_service(domain, service, intent_obj, state)
|
||||
)
|
||||
|
||||
# Handle service calls in parallel, noting failures as they occur.
|
||||
failed_results: list[IntentResponseTarget] = []
|
||||
@@ -558,7 +565,7 @@ class ServiceIntentHandler(IntentHandler):
|
||||
# If no entities succeeded, raise an error.
|
||||
failed_entity_ids = [target.id for target in failed_results]
|
||||
raise IntentHandleError(
|
||||
f"Failed to call {self.service} for: {failed_entity_ids}"
|
||||
f"Failed to call {service} for: {failed_entity_ids}"
|
||||
)
|
||||
|
||||
response.async_set_results(
|
||||
@@ -574,7 +581,9 @@ class ServiceIntentHandler(IntentHandler):
|
||||
|
||||
return response
|
||||
|
||||
async def async_call_service(self, intent_obj: Intent, state: State) -> None:
|
||||
async def async_call_service(
|
||||
self, domain: str, service: str, intent_obj: Intent, state: State
|
||||
) -> None:
|
||||
"""Call service on entity."""
|
||||
hass = intent_obj.hass
|
||||
|
||||
@@ -587,13 +596,13 @@ class ServiceIntentHandler(IntentHandler):
|
||||
await self._run_then_background(
|
||||
hass.async_create_task(
|
||||
hass.services.async_call(
|
||||
self.domain,
|
||||
self.service,
|
||||
domain,
|
||||
service,
|
||||
service_data,
|
||||
context=intent_obj.context,
|
||||
blocking=True,
|
||||
),
|
||||
f"intent_call_service_{self.domain}_{self.service}",
|
||||
f"intent_call_service_{domain}_{service}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -615,6 +624,32 @@ class ServiceIntentHandler(IntentHandler):
|
||||
raise
|
||||
|
||||
|
||||
class ServiceIntentHandler(DynamicServiceIntentHandler):
|
||||
"""Service Intent handler registration.
|
||||
|
||||
Service specific intent handler that calls a service by name/entity_id.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
intent_type: str,
|
||||
domain: str,
|
||||
service: str,
|
||||
speech: str | None = None,
|
||||
extra_slots: dict[str, vol.Schema] | None = None,
|
||||
) -> None:
|
||||
"""Create service handler."""
|
||||
super().__init__(intent_type, speech=speech, extra_slots=extra_slots)
|
||||
self.domain = domain
|
||||
self.service = service
|
||||
|
||||
def get_domain_and_service(
|
||||
self, intent_obj: Intent, state: State
|
||||
) -> tuple[str, str]:
|
||||
"""Get the domain and service name to call."""
|
||||
return (self.domain, self.service)
|
||||
|
||||
|
||||
class IntentCategory(Enum):
|
||||
"""Category of an intent."""
|
||||
|
||||
|
||||
@@ -852,7 +852,14 @@ class Integration:
|
||||
# Some integrations fail on import because they call functions incorrectly.
|
||||
# So we do it before validating config to catch these errors.
|
||||
if load_executor:
|
||||
comp = await self.hass.async_add_executor_job(self.get_component)
|
||||
try:
|
||||
comp = await self.hass.async_add_import_executor_job(self.get_component)
|
||||
except ImportError as ex:
|
||||
load_executor = False
|
||||
_LOGGER.debug("Failed to import %s in executor", domain, exc_info=ex)
|
||||
# If importing in the executor deadlocks because there is a circular
|
||||
# dependency, we fall back to the event loop.
|
||||
comp = self.get_component()
|
||||
else:
|
||||
comp = self.get_component()
|
||||
|
||||
@@ -885,6 +892,9 @@ class Integration:
|
||||
)
|
||||
except ImportError:
|
||||
raise
|
||||
except RuntimeError as err:
|
||||
# _DeadlockError inherits from RuntimeError
|
||||
raise ImportError(f"RuntimeError importing {self.pkg_path}: {err}") from err
|
||||
except Exception as err:
|
||||
_LOGGER.exception(
|
||||
"Unexpected exception importing component %s", self.pkg_path
|
||||
@@ -913,9 +923,18 @@ class Integration:
|
||||
)
|
||||
try:
|
||||
if load_executor:
|
||||
platform = await self.hass.async_add_executor_job(
|
||||
self._load_platform, platform_name
|
||||
)
|
||||
try:
|
||||
platform = await self.hass.async_add_import_executor_job(
|
||||
self._load_platform, platform_name
|
||||
)
|
||||
except ImportError as ex:
|
||||
_LOGGER.debug(
|
||||
"Failed to import %s in executor", domain, exc_info=ex
|
||||
)
|
||||
load_executor = False
|
||||
# If importing in the executor deadlocks because there is a circular
|
||||
# dependency, we fall back to the event loop.
|
||||
platform = self._load_platform(platform_name)
|
||||
else:
|
||||
platform = self._load_platform(platform_name)
|
||||
import_future.set_result(platform)
|
||||
@@ -983,6 +1002,11 @@ class Integration:
|
||||
]
|
||||
missing_platforms_cache[full_name] = ex
|
||||
raise
|
||||
except RuntimeError as err:
|
||||
# _DeadlockError inherits from RuntimeError
|
||||
raise ImportError(
|
||||
f"RuntimeError importing {self.pkg_path}.{platform_name}: {err}"
|
||||
) from err
|
||||
except Exception as err:
|
||||
_LOGGER.exception(
|
||||
"Unexpected exception importing platform %s.%s",
|
||||
@@ -1247,6 +1271,19 @@ class Components:
|
||||
if component is None:
|
||||
raise ImportError(f"Unable to load {comp_name}")
|
||||
|
||||
# Local import to avoid circular dependencies
|
||||
from .helpers.frame import report # pylint: disable=import-outside-toplevel
|
||||
|
||||
report(
|
||||
(
|
||||
f"accesses hass.components.{comp_name}."
|
||||
" This is deprecated and will stop working in Home Assistant 2024.9, it"
|
||||
f" should be updated to import functions used from {comp_name} directly"
|
||||
),
|
||||
error_if_core=False,
|
||||
log_custom_component_only=True,
|
||||
)
|
||||
|
||||
wrapped = ModuleWrapper(self._hass, component)
|
||||
setattr(self, comp_name, wrapped)
|
||||
return wrapped
|
||||
|
||||
@@ -26,12 +26,12 @@ dbus-fast==2.21.1
|
||||
fnv-hash-fast==0.5.0
|
||||
ha-av==10.1.1
|
||||
ha-ffmpeg==3.1.0
|
||||
habluetooth==2.4.1
|
||||
habluetooth==2.4.2
|
||||
hass-nabucasa==0.78.0
|
||||
hassil==1.6.1
|
||||
home-assistant-bluetooth==1.12.0
|
||||
home-assistant-frontend==20240228.0
|
||||
home-assistant-intents==2024.2.2
|
||||
home-assistant-frontend==20240301.0
|
||||
home-assistant-intents==2024.2.28
|
||||
httpx==0.27.0
|
||||
ifaddr==0.2.0
|
||||
janus==1.0.0
|
||||
|
||||
@@ -79,12 +79,12 @@ def load_json(
|
||||
except FileNotFoundError:
|
||||
# This is not a fatal error
|
||||
_LOGGER.debug("JSON file not found: %s", filename)
|
||||
except ValueError as error:
|
||||
except JSON_DECODE_EXCEPTIONS as error:
|
||||
_LOGGER.exception("Could not parse JSON content: %s", filename)
|
||||
raise HomeAssistantError(error) from error
|
||||
raise HomeAssistantError(f"Error while loading {filename}: {error}") from error
|
||||
except OSError as error:
|
||||
_LOGGER.exception("JSON file reading failed: %s", filename)
|
||||
raise HomeAssistantError(error) from error
|
||||
raise HomeAssistantError(f"Error while loading {filename}: {error}") from error
|
||||
return {} if default is _SENTINEL else default
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2024.3.0b0"
|
||||
version = "2024.3.0b4"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
||||
@@ -188,10 +188,10 @@ aio-georss-gdacs==0.9
|
||||
aioairq==0.3.2
|
||||
|
||||
# homeassistant.components.airzone_cloud
|
||||
aioairzone-cloud==0.3.8
|
||||
aioairzone-cloud==0.4.5
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.7.4
|
||||
aioairzone==0.7.5
|
||||
|
||||
# homeassistant.components.ambient_station
|
||||
aioambient==2024.01.0
|
||||
@@ -276,7 +276,7 @@ aiohttp-zlib-ng==0.3.1
|
||||
aiohttp_cors==0.7.0
|
||||
|
||||
# homeassistant.components.hue
|
||||
aiohue==4.7.0
|
||||
aiohue==4.7.1
|
||||
|
||||
# homeassistant.components.imap
|
||||
aioimaplib==1.0.1
|
||||
@@ -1031,7 +1031,7 @@ ha-philipsjs==3.1.1
|
||||
habitipy==0.2.0
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==2.4.1
|
||||
habluetooth==2.4.2
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.78.0
|
||||
@@ -1074,10 +1074,10 @@ hole==0.8.0
|
||||
holidays==0.43
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20240228.0
|
||||
home-assistant-frontend==20240301.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.2.2
|
||||
home-assistant-intents==2024.2.28
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.2
|
||||
@@ -1758,7 +1758,7 @@ pydaikin==2.11.1
|
||||
pydanfossair==0.1.0
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==114
|
||||
pydeconz==115
|
||||
|
||||
# homeassistant.components.delijn
|
||||
pydelijn==1.1.0
|
||||
@@ -2036,7 +2036,7 @@ pyotgw==2.1.3
|
||||
pyotp==2.8.0
|
||||
|
||||
# homeassistant.components.overkiz
|
||||
pyoverkiz==1.13.7
|
||||
pyoverkiz==1.13.8
|
||||
|
||||
# homeassistant.components.openweathermap
|
||||
pyowm==3.2.0
|
||||
@@ -2251,7 +2251,7 @@ python-kasa[speedups]==0.6.2.1
|
||||
# python-lirc==1.2.3
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==5.5.0
|
||||
python-matter-server==5.7.0
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.12
|
||||
@@ -2502,7 +2502,7 @@ screenlogicpy==0.10.0
|
||||
scsgate==0.1.0
|
||||
|
||||
# homeassistant.components.backup
|
||||
securetar==2024.2.0
|
||||
securetar==2024.2.1
|
||||
|
||||
# homeassistant.components.sendgrid
|
||||
sendgrid==6.8.2
|
||||
@@ -2836,7 +2836,7 @@ watchdog==2.3.1
|
||||
waterfurnace==1.1.0
|
||||
|
||||
# homeassistant.components.weatherflow_cloud
|
||||
weatherflow4py==0.1.11
|
||||
weatherflow4py==0.1.12
|
||||
|
||||
# homeassistant.components.webmin
|
||||
webmin-xmlrpc==0.0.1
|
||||
@@ -2950,7 +2950,7 @@ zigpy-zigate==0.12.0
|
||||
zigpy-znp==0.12.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.63.2
|
||||
zigpy==0.63.3
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.5.4
|
||||
|
||||
@@ -167,10 +167,10 @@ aio-georss-gdacs==0.9
|
||||
aioairq==0.3.2
|
||||
|
||||
# homeassistant.components.airzone_cloud
|
||||
aioairzone-cloud==0.3.8
|
||||
aioairzone-cloud==0.4.5
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.7.4
|
||||
aioairzone==0.7.5
|
||||
|
||||
# homeassistant.components.ambient_station
|
||||
aioambient==2024.01.0
|
||||
@@ -252,7 +252,7 @@ aiohttp-zlib-ng==0.3.1
|
||||
aiohttp_cors==0.7.0
|
||||
|
||||
# homeassistant.components.hue
|
||||
aiohue==4.7.0
|
||||
aiohue==4.7.1
|
||||
|
||||
# homeassistant.components.imap
|
||||
aioimaplib==1.0.1
|
||||
@@ -842,7 +842,7 @@ ha-philipsjs==3.1.1
|
||||
habitipy==0.2.0
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==2.4.1
|
||||
habluetooth==2.4.2
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.78.0
|
||||
@@ -873,10 +873,10 @@ hole==0.8.0
|
||||
holidays==0.43
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20240228.0
|
||||
home-assistant-frontend==20240301.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.2.2
|
||||
home-assistant-intents==2024.2.28
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.7.2
|
||||
@@ -1366,7 +1366,7 @@ pycsspeechtts==1.0.8
|
||||
pydaikin==2.11.1
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==114
|
||||
pydeconz==115
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
@@ -1578,7 +1578,7 @@ pyotgw==2.1.3
|
||||
pyotp==2.8.0
|
||||
|
||||
# homeassistant.components.overkiz
|
||||
pyoverkiz==1.13.7
|
||||
pyoverkiz==1.13.8
|
||||
|
||||
# homeassistant.components.openweathermap
|
||||
pyowm==3.2.0
|
||||
@@ -1727,7 +1727,7 @@ python-izone==1.2.9
|
||||
python-kasa[speedups]==0.6.2.1
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==5.5.0
|
||||
python-matter-server==5.7.0
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.12
|
||||
@@ -1915,7 +1915,7 @@ samsungtvws[async,encrypted]==2.6.0
|
||||
screenlogicpy==0.10.0
|
||||
|
||||
# homeassistant.components.backup
|
||||
securetar==2024.2.0
|
||||
securetar==2024.2.1
|
||||
|
||||
# homeassistant.components.emulated_kasa
|
||||
# homeassistant.components.sense
|
||||
@@ -2174,7 +2174,7 @@ wallbox==0.6.0
|
||||
watchdog==2.3.1
|
||||
|
||||
# homeassistant.components.weatherflow_cloud
|
||||
weatherflow4py==0.1.11
|
||||
weatherflow4py==0.1.12
|
||||
|
||||
# homeassistant.components.webmin
|
||||
webmin-xmlrpc==0.0.1
|
||||
@@ -2270,7 +2270,7 @@ zigpy-zigate==0.12.0
|
||||
zigpy-znp==0.12.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.63.2
|
||||
zigpy==0.63.3
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.55.3
|
||||
|
||||
@@ -46,6 +46,9 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None:
|
||||
) as mock_webserver, patch(
|
||||
"homeassistant.components.airzone_cloud.AirzoneCloudApi.login",
|
||||
return_value=None,
|
||||
), patch(
|
||||
"homeassistant.components.airzone_cloud.AirzoneCloudApi._update_websockets",
|
||||
return_value=False,
|
||||
):
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -99,8 +99,20 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry")
|
||||
async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Create a mock config entry."""
|
||||
config_entry = MockConfigEntry(domain=TEST_DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
return config_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_integration(hass: HomeAssistant, config_flow_fixture: None) -> None:
|
||||
def mock_setup_integration(
|
||||
hass: HomeAssistant,
|
||||
config_flow_fixture: None,
|
||||
test_entities: list[CalendarEntity],
|
||||
) -> None:
|
||||
"""Fixture to set up a mock integration."""
|
||||
|
||||
async def async_setup_entry_init(
|
||||
@@ -129,20 +141,16 @@ def mock_setup_integration(hass: HomeAssistant, config_flow_fixture: None) -> No
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def create_mock_platform(
|
||||
hass: HomeAssistant,
|
||||
entities: list[CalendarEntity],
|
||||
) -> MockConfigEntry:
|
||||
"""Create a calendar platform with the specified entities."""
|
||||
|
||||
async def async_setup_entry_platform(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up test event platform via config entry."""
|
||||
async_add_entities(entities)
|
||||
new_entities = create_test_entities()
|
||||
test_entities.clear()
|
||||
test_entities.extend(new_entities)
|
||||
async_add_entities(test_entities)
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
@@ -150,17 +158,15 @@ async def create_mock_platform(
|
||||
MockPlatform(async_setup_entry=async_setup_entry_platform),
|
||||
)
|
||||
|
||||
config_entry = MockConfigEntry(domain=TEST_DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return config_entry
|
||||
|
||||
|
||||
@pytest.fixture(name="test_entities")
|
||||
def mock_test_entities() -> list[MockCalendarEntity]:
|
||||
"""Fixture to create fake entities used in the test."""
|
||||
"""Fixture that holdes the fake entities created during the test."""
|
||||
return []
|
||||
|
||||
|
||||
def create_test_entities() -> list[MockCalendarEntity]:
|
||||
"""Create test entities used during the test."""
|
||||
half_hour_from_now = dt_util.now() + datetime.timedelta(minutes=30)
|
||||
entity1 = MockCalendarEntity(
|
||||
"Calendar 1",
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.issue_registry import IssueRegistry
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .conftest import TEST_DOMAIN, MockCalendarEntity, create_mock_platform
|
||||
from .conftest import TEST_DOMAIN, MockCalendarEntity, MockConfigEntry
|
||||
|
||||
from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
||||
|
||||
@@ -51,10 +51,11 @@ async def mock_setup_platform(
|
||||
set_time_zone: Any,
|
||||
frozen_time: Any,
|
||||
mock_setup_integration: Any,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Fixture to setup platforms used in the test and fixtures are set up in the right order."""
|
||||
await create_mock_platform(hass, test_entities)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def test_events_http_api(
|
||||
|
||||
@@ -10,9 +10,7 @@ from homeassistant.const import ATTR_FRIENDLY_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .conftest import MockCalendarEntity, create_mock_platform
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.recorder.common import async_wait_recording_done
|
||||
|
||||
|
||||
@@ -22,10 +20,11 @@ async def mock_setup_dependencies(
|
||||
hass: HomeAssistant,
|
||||
set_time_zone: Any,
|
||||
mock_setup_integration: None,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Fixture that ensures the recorder is setup in the right order."""
|
||||
await create_mock_platform(hass, test_entities)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def test_exclude_attributes(hass: HomeAssistant) -> None:
|
||||
|
||||
@@ -27,9 +27,9 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .conftest import MockCalendarEntity, create_mock_platform
|
||||
from .conftest import MockCalendarEntity
|
||||
|
||||
from tests.common import async_fire_time_changed, async_mock_service
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, async_mock_service
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -105,10 +105,11 @@ def mock_test_entity(test_entities: list[MockCalendarEntity]) -> MockCalendarEnt
|
||||
async def mock_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_integration: Any,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Fixture to setup platforms used in the test."""
|
||||
await create_mock_platform(hass, test_entities)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@@ -745,3 +746,65 @@ async def test_event_start_trigger_dst(
|
||||
"calendar_event": event3_data,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
async def test_config_entry_reload(
|
||||
hass: HomeAssistant,
|
||||
calls: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
setup_platform: None,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the a calendar trigger after a config entry reload.
|
||||
|
||||
This sets ups a config entry, sets up an automation for an entity in that
|
||||
config entry, then reloads the config entry. This reproduces a bug where
|
||||
the automation kept a reference to the specific entity which would be
|
||||
invalid after a config entry was reloaded.
|
||||
"""
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls()) == 0
|
||||
|
||||
assert await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
# Ensure the reloaded entity has events upcoming.
|
||||
test_entity = test_entities[1]
|
||||
event_data = test_entity.create_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00"),
|
||||
)
|
||||
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:15:00+00:00"),
|
||||
)
|
||||
|
||||
assert calls() == [
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
async def test_config_entry_unload(
|
||||
hass: HomeAssistant,
|
||||
calls: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
setup_platform: None,
|
||||
config_entry: MockConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test an automation that references a calendar entity that is unloaded."""
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls()) == 0
|
||||
|
||||
assert await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:15:00+00:00"),
|
||||
)
|
||||
|
||||
assert "Entity does not exist calendar.calendar_2" in caplog.text
|
||||
|
||||
247
tests/components/conversation/test_default_agent_intents.py
Normal file
247
tests/components/conversation/test_default_agent_intents.py
Normal file
@@ -0,0 +1,247 @@
|
||||
"""Test intents for the default agent."""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import conversation, cover, media_player, vacuum, valve
|
||||
from homeassistant.components.cover import intent as cover_intent
|
||||
from homeassistant.components.homeassistant.exposed_entities import async_expose_entity
|
||||
from homeassistant.components.media_player import intent as media_player_intent
|
||||
from homeassistant.components.vacuum import intent as vaccum_intent
|
||||
from homeassistant.const import STATE_CLOSED
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import async_mock_service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def init_components(hass: HomeAssistant):
|
||||
"""Initialize relevant components with empty configs."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
assert await async_setup_component(hass, "conversation", {})
|
||||
assert await async_setup_component(hass, "intent", {})
|
||||
|
||||
|
||||
async def test_cover_set_position(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
) -> None:
|
||||
"""Test the open/close/set position for covers."""
|
||||
await cover_intent.async_setup_intents(hass)
|
||||
|
||||
entity_id = f"{cover.DOMAIN}.garage_door"
|
||||
hass.states.async_set(entity_id, STATE_CLOSED)
|
||||
async_expose_entity(hass, conversation.DOMAIN, entity_id, True)
|
||||
|
||||
# open
|
||||
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER)
|
||||
result = await conversation.async_converse(
|
||||
hass, "open the garage door", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Opened"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# close
|
||||
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER)
|
||||
result = await conversation.async_converse(
|
||||
hass, "close garage door", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Closed"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# set position
|
||||
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
|
||||
result = await conversation.async_converse(
|
||||
hass, "set garage door to 50%", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Position set"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id, cover.ATTR_POSITION: 50}
|
||||
|
||||
|
||||
async def test_valve_intents(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
) -> None:
|
||||
"""Test open/close/set position for valves."""
|
||||
entity_id = f"{valve.DOMAIN}.main_valve"
|
||||
hass.states.async_set(entity_id, STATE_CLOSED)
|
||||
async_expose_entity(hass, conversation.DOMAIN, entity_id, True)
|
||||
|
||||
# open
|
||||
calls = async_mock_service(hass, valve.DOMAIN, valve.SERVICE_OPEN_VALVE)
|
||||
result = await conversation.async_converse(
|
||||
hass, "open the main valve", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Opened"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# close
|
||||
calls = async_mock_service(hass, valve.DOMAIN, valve.SERVICE_CLOSE_VALVE)
|
||||
result = await conversation.async_converse(
|
||||
hass, "close main valve", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Closed"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# set position
|
||||
calls = async_mock_service(hass, valve.DOMAIN, valve.SERVICE_SET_VALVE_POSITION)
|
||||
result = await conversation.async_converse(
|
||||
hass, "set main valve position to 25", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Position set"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id, valve.ATTR_POSITION: 25}
|
||||
|
||||
|
||||
async def test_vacuum_intents(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
) -> None:
|
||||
"""Test start/return to base for vacuums."""
|
||||
await vaccum_intent.async_setup_intents(hass)
|
||||
|
||||
entity_id = f"{vacuum.DOMAIN}.rover"
|
||||
hass.states.async_set(entity_id, STATE_CLOSED)
|
||||
async_expose_entity(hass, conversation.DOMAIN, entity_id, True)
|
||||
|
||||
# start
|
||||
calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START)
|
||||
result = await conversation.async_converse(
|
||||
hass, "start rover", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Started"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# return to base
|
||||
calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE)
|
||||
result = await conversation.async_converse(
|
||||
hass, "return rover to base", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Returning"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
|
||||
async def test_media_player_intents(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
) -> None:
|
||||
"""Test pause/unpause/next/set volume for media players."""
|
||||
await media_player_intent.async_setup_intents(hass)
|
||||
|
||||
entity_id = f"{media_player.DOMAIN}.tv"
|
||||
hass.states.async_set(entity_id, media_player.STATE_PLAYING)
|
||||
async_expose_entity(hass, conversation.DOMAIN, entity_id, True)
|
||||
|
||||
# pause
|
||||
calls = async_mock_service(
|
||||
hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE
|
||||
)
|
||||
result = await conversation.async_converse(hass, "pause tv", None, Context(), None)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Paused"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# unpause
|
||||
calls = async_mock_service(
|
||||
hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY
|
||||
)
|
||||
result = await conversation.async_converse(
|
||||
hass, "unpause tv", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Unpaused"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# next
|
||||
calls = async_mock_service(
|
||||
hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK
|
||||
)
|
||||
result = await conversation.async_converse(
|
||||
hass, "next item on tv", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Playing next"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {"entity_id": entity_id}
|
||||
|
||||
# volume
|
||||
calls = async_mock_service(
|
||||
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET
|
||||
)
|
||||
result = await conversation.async_converse(
|
||||
hass, "set tv volume to 75 percent", None, Context(), None
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = result.response
|
||||
assert response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
assert response.speech["plain"]["speech"] == "Volume set"
|
||||
assert len(calls) == 1
|
||||
call = calls[0]
|
||||
assert call.data == {
|
||||
"entity_id": entity_id,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.75,
|
||||
}
|
||||
@@ -11,6 +11,7 @@ from homeassistant.components.cover import (
|
||||
from homeassistant.const import STATE_CLOSED, STATE_OPEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import async_mock_service
|
||||
|
||||
@@ -60,7 +61,7 @@ async def test_close_cover_intent(hass: HomeAssistant) -> None:
|
||||
|
||||
async def test_set_cover_position(hass: HomeAssistant) -> None:
|
||||
"""Test HassSetPosition intent for covers."""
|
||||
await cover_intent.async_setup_intents(hass)
|
||||
assert await async_setup_component(hass, "intent", {})
|
||||
|
||||
entity_id = f"{DOMAIN}.test_cover"
|
||||
hass.states.async_set(
|
||||
|
||||
@@ -308,6 +308,117 @@ async def test_no_lights_or_groups(
|
||||
},
|
||||
},
|
||||
),
|
||||
( # Gradient light
|
||||
{
|
||||
"capabilities": {
|
||||
"alerts": [
|
||||
"none",
|
||||
"select",
|
||||
"lselect",
|
||||
"blink",
|
||||
"breathe",
|
||||
"okay",
|
||||
"channelchange",
|
||||
"finish",
|
||||
"stop",
|
||||
],
|
||||
"bri": {"min_dim_level": 0.01},
|
||||
"color": {
|
||||
"ct": {"computes_xy": True, "max": 500, "min": 153},
|
||||
"effects": [
|
||||
"none",
|
||||
"colorloop",
|
||||
"candle",
|
||||
"fireplace",
|
||||
"prism",
|
||||
"sunrise",
|
||||
],
|
||||
"gamut_type": "C",
|
||||
"gradient": {
|
||||
"max_segments": 9,
|
||||
"pixel_count": 16,
|
||||
"pixel_length": 1250,
|
||||
"styles": ["linear", "mirrored"],
|
||||
},
|
||||
"modes": ["ct", "effect", "gradient", "hs", "xy"],
|
||||
"xy": {
|
||||
"blue": [0.1532, 0.0475],
|
||||
"green": [0.17, 0.7],
|
||||
"red": [0.6915, 0.3083],
|
||||
},
|
||||
},
|
||||
},
|
||||
"colorcapabilities": 31,
|
||||
"config": {
|
||||
"bri": {
|
||||
"couple_ct": False,
|
||||
"execute_if_off": True,
|
||||
"startup": "previous",
|
||||
},
|
||||
"color": {
|
||||
"ct": {"startup": "previous"},
|
||||
"execute_if_off": True,
|
||||
"gradient": {"reversed": False},
|
||||
"xy": {"startup": "previous"},
|
||||
},
|
||||
"groups": ["36", "39", "45", "46", "47", "51", "57", "59"],
|
||||
"on": {"startup": "previous"},
|
||||
},
|
||||
"ctmax": 500,
|
||||
"ctmin": 153,
|
||||
"etag": "077fb97dd6145f10a3c190f0a1ade499",
|
||||
"hascolor": True,
|
||||
"lastannounced": None,
|
||||
"lastseen": "2024-02-29T18:36Z",
|
||||
"manufacturername": "Signify Netherlands B.V.",
|
||||
"modelid": "LCX004",
|
||||
"name": "Gradient light",
|
||||
"productid": "Philips-LCX004-1-GALSECLv1",
|
||||
"productname": "Hue gradient lightstrip",
|
||||
"state": {
|
||||
"alert": "none",
|
||||
"bri": 184,
|
||||
"colormode": "gradient",
|
||||
"ct": 396,
|
||||
"effect": "none",
|
||||
"gradient": {
|
||||
"color_adjustment": 0,
|
||||
"offset": 0,
|
||||
"offset_adjustment": 0,
|
||||
"points": [
|
||||
[0.2728, 0.6226],
|
||||
[0.163, 0.4262],
|
||||
[0.1563, 0.1699],
|
||||
[0.1551, 0.1147],
|
||||
[0.1534, 0.0579],
|
||||
],
|
||||
"segments": 5,
|
||||
"style": "linear",
|
||||
},
|
||||
"hue": 20566,
|
||||
"on": True,
|
||||
"reachable": True,
|
||||
"sat": 254,
|
||||
"xy": [0.2727, 0.6226],
|
||||
},
|
||||
"swconfigid": "F03CAF4D",
|
||||
"swversion": "1.104.2",
|
||||
"type": "Extended color light",
|
||||
"uniqueid": "00:17:88:01:0b:0c:0d:0e-0f",
|
||||
},
|
||||
{
|
||||
"entity_id": "light.gradient_light",
|
||||
"state": STATE_ON,
|
||||
"attributes": {
|
||||
ATTR_SUPPORTED_COLOR_MODES: [
|
||||
ColorMode.COLOR_TEMP,
|
||||
ColorMode.HS,
|
||||
ColorMode.XY,
|
||||
],
|
||||
ATTR_COLOR_MODE: ColorMode.XY,
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_lights(
|
||||
|
||||
@@ -19,7 +19,6 @@ from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME,
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
@@ -57,13 +56,10 @@ async def test_configuring_flux_led_causes_discovery(hass: HomeAssistant) -> Non
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(scan.mock_calls) == 1
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
await hass.async_block_till_done()
|
||||
assert len(scan.mock_calls) == 2
|
||||
|
||||
async_fire_time_changed(hass, utcnow() + flux_led.DISCOVERY_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
assert len(scan.mock_calls) == 3
|
||||
assert len(scan.mock_calls) == 2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_multiple_broadcast_addresses")
|
||||
@@ -79,15 +75,11 @@ async def test_configuring_flux_led_causes_discovery_multiple_addresses(
|
||||
discover.return_value = [FLUX_DISCOVERY]
|
||||
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(scan.mock_calls) == 2
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
await hass.async_block_till_done()
|
||||
assert len(scan.mock_calls) == 4
|
||||
|
||||
async_fire_time_changed(hass, utcnow() + flux_led.DISCOVERY_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
assert len(scan.mock_calls) == 6
|
||||
assert len(scan.mock_calls) == 4
|
||||
|
||||
|
||||
async def test_config_entry_reload(hass: HomeAssistant) -> None:
|
||||
|
||||
@@ -210,7 +210,7 @@ async def test_if_not_fires_using_wrong_at(
|
||||
now = dt_util.utcnow()
|
||||
|
||||
time_that_will_not_match_right_away = now.replace(
|
||||
year=now.year + 1, hour=1, minute=0, second=0
|
||||
year=now.year + 1, day=1, hour=1, minute=0, second=0
|
||||
)
|
||||
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
@@ -233,7 +233,7 @@ async def test_if_not_fires_using_wrong_at(
|
||||
assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 1, hour=1, minute=0, second=5)
|
||||
hass, now.replace(year=now.year + 1, day=1, hour=1, minute=0, second=5)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -33,7 +33,7 @@ async def test_if_fires_when_hour_matches(
|
||||
"""Test for firing if hour is matching."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, hour=3
|
||||
year=now.year + 1, day=1, hour=3
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -55,7 +55,7 @@ async def test_if_fires_when_hour_matches(
|
||||
},
|
||||
)
|
||||
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 2, hour=0))
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, hour=0))
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
|
||||
@@ -66,7 +66,7 @@ async def test_if_fires_when_hour_matches(
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 1, hour=0))
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 1, day=1, hour=0))
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
assert calls[0].data["id"] == 0
|
||||
@@ -78,7 +78,7 @@ async def test_if_fires_when_minute_matches(
|
||||
"""Test for firing if minutes are matching."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, minute=30
|
||||
year=now.year + 1, day=1, minute=30
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -97,7 +97,7 @@ async def test_if_fires_when_minute_matches(
|
||||
},
|
||||
)
|
||||
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 2, minute=0))
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, minute=0))
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
@@ -109,7 +109,7 @@ async def test_if_fires_when_second_matches(
|
||||
"""Test for firing if seconds are matching."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, second=30
|
||||
year=now.year + 1, day=1, second=30
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -128,7 +128,7 @@ async def test_if_fires_when_second_matches(
|
||||
},
|
||||
)
|
||||
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 2, second=0))
|
||||
async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, second=0))
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
@@ -140,7 +140,7 @@ async def test_if_fires_when_second_as_string_matches(
|
||||
"""Test for firing if seconds are matching."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, second=15
|
||||
year=now.year + 1, day=1, second=15
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -173,7 +173,7 @@ async def test_if_fires_when_all_matches(
|
||||
"""Test for firing if everything matches."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, hour=4
|
||||
year=now.year + 1, day=1, hour=4
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -193,7 +193,7 @@ async def test_if_fires_when_all_matches(
|
||||
)
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 2, hour=1, minute=2, second=3)
|
||||
hass, now.replace(year=now.year + 2, day=1, hour=1, minute=2, second=3)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
@@ -206,7 +206,7 @@ async def test_if_fires_periodic_seconds(
|
||||
"""Test for firing periodically every second."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, second=1
|
||||
year=now.year + 1, day=1, second=1
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -226,7 +226,7 @@ async def test_if_fires_periodic_seconds(
|
||||
)
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 2, hour=0, minute=0, second=10)
|
||||
hass, now.replace(year=now.year + 2, day=1, hour=0, minute=0, second=10)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
@@ -240,7 +240,7 @@ async def test_if_fires_periodic_minutes(
|
||||
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, minute=1
|
||||
year=now.year + 1, day=1, minute=1
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -260,7 +260,7 @@ async def test_if_fires_periodic_minutes(
|
||||
)
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 2, hour=0, minute=2, second=0)
|
||||
hass, now.replace(year=now.year + 2, day=1, hour=0, minute=2, second=0)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
@@ -273,7 +273,7 @@ async def test_if_fires_periodic_hours(
|
||||
"""Test for firing periodically every hour."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, hour=1
|
||||
year=now.year + 1, day=1, hour=1
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -293,7 +293,7 @@ async def test_if_fires_periodic_hours(
|
||||
)
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 2, hour=2, minute=0, second=0)
|
||||
hass, now.replace(year=now.year + 2, day=1, hour=2, minute=0, second=0)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
@@ -306,7 +306,7 @@ async def test_default_values(
|
||||
"""Test for firing at 2 minutes every hour."""
|
||||
now = dt_util.utcnow()
|
||||
time_that_will_not_match_right_away = dt_util.utcnow().replace(
|
||||
year=now.year + 1, minute=1
|
||||
year=now.year + 1, day=1, minute=1
|
||||
)
|
||||
freezer.move_to(time_that_will_not_match_right_away)
|
||||
assert await async_setup_component(
|
||||
@@ -321,21 +321,21 @@ async def test_default_values(
|
||||
)
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 2, hour=1, minute=2, second=0)
|
||||
hass, now.replace(year=now.year + 2, day=1, hour=1, minute=2, second=0)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 2, hour=1, minute=2, second=1)
|
||||
hass, now.replace(year=now.year + 2, day=1, hour=1, minute=2, second=1)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
|
||||
async_fire_time_changed(
|
||||
hass, now.replace(year=now.year + 2, hour=2, minute=2, second=0)
|
||||
hass, now.replace(year=now.year + 2, day=1, hour=2, minute=2, second=0)
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -432,3 +432,20 @@ async def test_get_state_intent(
|
||||
"domain": {"value": "light"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def test_set_position_intent_unsupported_domain(hass: HomeAssistant) -> None:
|
||||
"""Test that HassSetPosition intent fails with unsupported domain."""
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
assert await async_setup_component(hass, "intent", {})
|
||||
|
||||
# Can't set position of lights
|
||||
hass.states.async_set("light.test_light", "off")
|
||||
|
||||
with pytest.raises(intent.IntentHandleError):
|
||||
await intent.async_handle(
|
||||
hass,
|
||||
"test",
|
||||
"HassSetPosition",
|
||||
{"name": {"value": "test light"}, "position": {"value": 100}},
|
||||
)
|
||||
|
||||
@@ -40,7 +40,9 @@ async def test_resolve(
|
||||
mock_api.get_item.side_effect = None
|
||||
mock_api.get_item.return_value = load_json_fixture("track.json")
|
||||
|
||||
play_media = await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/TRACK-UUID")
|
||||
play_media = await async_resolve_media(
|
||||
hass, f"{URI_SCHEME}{DOMAIN}/TRACK-UUID", "media_player.jellyfin_device"
|
||||
)
|
||||
|
||||
assert play_media.mime_type == "audio/flac"
|
||||
assert play_media.url == snapshot
|
||||
@@ -49,7 +51,9 @@ async def test_resolve(
|
||||
mock_api.get_item.side_effect = None
|
||||
mock_api.get_item.return_value = load_json_fixture("movie.json")
|
||||
|
||||
play_media = await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/MOVIE-UUID")
|
||||
play_media = await async_resolve_media(
|
||||
hass, f"{URI_SCHEME}{DOMAIN}/MOVIE-UUID", "media_player.jellyfin_device"
|
||||
)
|
||||
|
||||
assert play_media.mime_type == "video/mp4"
|
||||
assert play_media.url == snapshot
|
||||
@@ -59,7 +63,11 @@ async def test_resolve(
|
||||
mock_api.get_item.return_value = load_json_fixture("unsupported-item.json")
|
||||
|
||||
with pytest.raises(BrowseError):
|
||||
await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/UNSUPPORTED-ITEM-UUID")
|
||||
await async_resolve_media(
|
||||
hass,
|
||||
f"{URI_SCHEME}{DOMAIN}/UNSUPPORTED-ITEM-UUID",
|
||||
"media_player.jellyfin_device",
|
||||
)
|
||||
|
||||
|
||||
async def test_root(
|
||||
|
||||
@@ -2791,7 +2791,7 @@ def test_report_invalid_color_mode(
|
||||
(
|
||||
light.ColorMode.ONOFF,
|
||||
{light.ColorMode.ONOFF, light.ColorMode.BRIGHTNESS},
|
||||
"zha", # We don't log issues for zha
|
||||
"tuya", # We don't log issues for tuya
|
||||
False,
|
||||
),
|
||||
],
|
||||
|
||||
@@ -121,17 +121,13 @@ async def test_async_resolve_media_no_entity(
|
||||
assert await async_setup_component(hass, media_source.DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
media = await media_source.async_resolve_media(
|
||||
hass,
|
||||
media_source.generate_media_source_id(media_source.DOMAIN, "local/test.mp3"),
|
||||
)
|
||||
assert isinstance(media, media_source.models.PlayMedia)
|
||||
assert media.url == "/media/local/test.mp3"
|
||||
assert media.mime_type == "audio/mpeg"
|
||||
assert (
|
||||
"calls media_source.async_resolve_media without passing an entity_id"
|
||||
in caplog.text
|
||||
)
|
||||
with pytest.raises(RuntimeError):
|
||||
await media_source.async_resolve_media(
|
||||
hass,
|
||||
media_source.generate_media_source_id(
|
||||
media_source.DOMAIN, "local/test.mp3"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def test_async_unresolve_media(hass: HomeAssistant) -> None:
|
||||
|
||||
@@ -81,7 +81,9 @@ async def test_resolve(
|
||||
f"FILE|{config_entry.entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}"
|
||||
)
|
||||
|
||||
play_media = await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/{file_id}")
|
||||
play_media = await async_resolve_media(
|
||||
hass, f"{URI_SCHEME}{DOMAIN}/{file_id}", None
|
||||
)
|
||||
|
||||
assert play_media.mime_type == TEST_MIME_TYPE
|
||||
|
||||
@@ -245,7 +247,7 @@ async def test_browsing_errors(
|
||||
with pytest.raises(Unresolvable):
|
||||
await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/UNKNOWN")
|
||||
with pytest.raises(Unresolvable):
|
||||
await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/UNKNOWN")
|
||||
await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/UNKNOWN", None)
|
||||
|
||||
|
||||
async def test_browsing_not_loaded(
|
||||
|
||||
@@ -17,7 +17,6 @@ from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
@@ -52,17 +51,13 @@ async def test_configuring_tplink_causes_discovery(hass: HomeAssistant) -> None:
|
||||
call_count = len(discover.mock_calls)
|
||||
assert discover.mock_calls
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=15))
|
||||
await hass.async_block_till_done()
|
||||
assert len(discover.mock_calls) == call_count * 2
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=15))
|
||||
await hass.async_block_till_done()
|
||||
assert len(discover.mock_calls) == call_count * 3
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=30))
|
||||
await hass.async_block_till_done()
|
||||
assert len(discover.mock_calls) == call_count * 4
|
||||
assert len(discover.mock_calls) == call_count * 3
|
||||
|
||||
|
||||
async def test_config_entry_reload(hass: HomeAssistant) -> None:
|
||||
|
||||
@@ -6,7 +6,6 @@ from homeassistant.components.valve import (
|
||||
SERVICE_CLOSE_VALVE,
|
||||
SERVICE_OPEN_VALVE,
|
||||
SERVICE_SET_VALVE_POSITION,
|
||||
intent as valve_intent,
|
||||
)
|
||||
from homeassistant.const import STATE_CLOSED, STATE_OPEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -60,7 +59,7 @@ async def test_close_valve_intent(hass: HomeAssistant) -> None:
|
||||
|
||||
async def test_set_valve_position(hass: HomeAssistant) -> None:
|
||||
"""Test HassSetPosition intent for valves."""
|
||||
await valve_intent.async_setup_intents(hass)
|
||||
assert await async_setup_component(hass, "intent", {})
|
||||
|
||||
entity_id = f"{DOMAIN}.test_valve"
|
||||
hass.states.async_set(
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
"""Test tts."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
from wyoming.info import Info
|
||||
|
||||
from homeassistant.components.wyoming.data import WyomingService, load_wyoming_info
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -27,10 +29,13 @@ async def test_load_info_oserror(hass: HomeAssistant) -> None:
|
||||
"""Test loading info and error raising."""
|
||||
mock_client = MockAsyncTcpClient([STT_INFO.event()])
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.wyoming.data.AsyncTcpClient",
|
||||
mock_client,
|
||||
), patch.object(mock_client, "read_event", side_effect=OSError("Boom!")):
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.wyoming.data.AsyncTcpClient",
|
||||
mock_client,
|
||||
),
|
||||
patch.object(mock_client, "read_event", side_effect=OSError("Boom!")),
|
||||
):
|
||||
info = await load_wyoming_info(
|
||||
"localhost",
|
||||
1234,
|
||||
@@ -75,3 +80,21 @@ async def test_service_name(hass: HomeAssistant) -> None:
|
||||
service = await WyomingService.create("localhost", 1234)
|
||||
assert service is not None
|
||||
assert service.get_name() == SATELLITE_INFO.satellite.name
|
||||
|
||||
|
||||
async def test_satellite_with_wake_word(hass: HomeAssistant) -> None:
|
||||
"""Test that wake word info with satellite doesn't overwrite the service name."""
|
||||
# Info for local wake word detection
|
||||
satellite_info = Info(
|
||||
satellite=SATELLITE_INFO.satellite,
|
||||
wake=WAKE_WORD_INFO.wake,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.wyoming.data.AsyncTcpClient",
|
||||
MockAsyncTcpClient([satellite_info.event()]),
|
||||
):
|
||||
service = await WyomingService.create("localhost", 1234)
|
||||
assert service is not None
|
||||
assert service.get_name() == satellite_info.satellite.name
|
||||
assert not service.platforms
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Test ZHA button."""
|
||||
from typing import Final
|
||||
from unittest.mock import call, patch
|
||||
|
||||
from freezegun import freeze_time
|
||||
@@ -15,6 +16,7 @@ from zigpy.const import SIG_EP_PROFILE
|
||||
from zigpy.exceptions import ZigbeeException
|
||||
import zigpy.profiles.zha as zha
|
||||
from zigpy.quirks import CustomCluster, CustomDevice
|
||||
from zigpy.quirks.v2 import add_to_registry_v2
|
||||
import zigpy.types as t
|
||||
import zigpy.zcl.clusters.general as general
|
||||
from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster
|
||||
@@ -33,7 +35,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .common import find_entity_id
|
||||
from .common import find_entity_id, update_attribute_cache
|
||||
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE
|
||||
|
||||
|
||||
@@ -56,7 +58,9 @@ def button_platform_only():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def contact_sensor(hass, zigpy_device_mock, zha_device_joined_restored):
|
||||
async def contact_sensor(
|
||||
hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored
|
||||
):
|
||||
"""Contact sensor fixture."""
|
||||
|
||||
zigpy_device = zigpy_device_mock(
|
||||
@@ -102,7 +106,9 @@ class FrostLockQuirk(CustomDevice):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def tuya_water_valve(hass, zigpy_device_mock, zha_device_joined_restored):
|
||||
async def tuya_water_valve(
|
||||
hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored
|
||||
):
|
||||
"""Tuya Water Valve fixture."""
|
||||
|
||||
zigpy_device = zigpy_device_mock(
|
||||
@@ -224,3 +230,141 @@ async def test_frost_unlock(hass: HomeAssistant, tuya_water_valve) -> None:
|
||||
call({"frost_lock_reset": 0}, manufacturer=None),
|
||||
call({"frost_lock_reset": 0}, manufacturer=None),
|
||||
]
|
||||
|
||||
|
||||
class FakeManufacturerCluster(CustomCluster, ManufacturerSpecificCluster):
|
||||
"""Fake manufacturer cluster."""
|
||||
|
||||
cluster_id: Final = 0xFFF3
|
||||
ep_attribute: Final = "mfg_identify"
|
||||
|
||||
class AttributeDefs(zcl_f.BaseAttributeDefs):
|
||||
"""Attribute definitions."""
|
||||
|
||||
feed: Final = zcl_f.ZCLAttributeDef(
|
||||
id=0x0000, type=t.uint8_t, access="rw", is_manufacturer_specific=True
|
||||
)
|
||||
|
||||
class ServerCommandDefs(zcl_f.BaseCommandDefs):
|
||||
"""Server command definitions."""
|
||||
|
||||
self_test: Final = zcl_f.ZCLCommandDef(
|
||||
id=0x00, schema={"identify_time": t.uint16_t}, direction=False
|
||||
)
|
||||
|
||||
|
||||
(
|
||||
add_to_registry_v2("Fake_Model", "Fake_Manufacturer")
|
||||
.replaces(FakeManufacturerCluster)
|
||||
.command_button(
|
||||
FakeManufacturerCluster.ServerCommandDefs.self_test.name,
|
||||
FakeManufacturerCluster.cluster_id,
|
||||
command_args=(5,),
|
||||
)
|
||||
.write_attr_button(
|
||||
FakeManufacturerCluster.AttributeDefs.feed.name,
|
||||
2,
|
||||
FakeManufacturerCluster.cluster_id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def custom_button_device(
|
||||
hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored
|
||||
):
|
||||
"""Button device fixture for quirks button tests."""
|
||||
|
||||
zigpy_device = zigpy_device_mock(
|
||||
{
|
||||
1: {
|
||||
SIG_EP_INPUT: [
|
||||
general.Basic.cluster_id,
|
||||
FakeManufacturerCluster.cluster_id,
|
||||
],
|
||||
SIG_EP_OUTPUT: [],
|
||||
SIG_EP_TYPE: zha.DeviceType.REMOTE_CONTROL,
|
||||
SIG_EP_PROFILE: zha.PROFILE_ID,
|
||||
}
|
||||
},
|
||||
manufacturer="Fake_Model",
|
||||
model="Fake_Manufacturer",
|
||||
)
|
||||
|
||||
zigpy_device.endpoints[1].mfg_identify.PLUGGED_ATTR_READS = {
|
||||
FakeManufacturerCluster.AttributeDefs.feed.name: 0,
|
||||
}
|
||||
update_attribute_cache(zigpy_device.endpoints[1].mfg_identify)
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
return zha_device, zigpy_device.endpoints[1].mfg_identify
|
||||
|
||||
|
||||
@freeze_time("2021-11-04 17:37:00", tz_offset=-1)
|
||||
async def test_quirks_command_button(hass: HomeAssistant, custom_button_device) -> None:
|
||||
"""Test ZHA button platform."""
|
||||
|
||||
zha_device, cluster = custom_button_device
|
||||
assert cluster is not None
|
||||
entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="self_test")
|
||||
assert entity_id is not None
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request",
|
||||
return_value=[0x00, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(cluster.request.mock_calls) == 1
|
||||
assert cluster.request.call_args[0][0] is False
|
||||
assert cluster.request.call_args[0][1] == 0
|
||||
assert cluster.request.call_args[0][3] == 5 # duration in seconds
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == "2021-11-04T16:37:00+00:00"
|
||||
|
||||
|
||||
@freeze_time("2021-11-04 17:37:00", tz_offset=-1)
|
||||
async def test_quirks_write_attr_button(
|
||||
hass: HomeAssistant, custom_button_device
|
||||
) -> None:
|
||||
"""Test ZHA button platform."""
|
||||
|
||||
zha_device, cluster = custom_button_device
|
||||
assert cluster is not None
|
||||
entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="feed")
|
||||
assert entity_id is not None
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_UNKNOWN
|
||||
assert cluster.get(cluster.AttributeDefs.feed.name) == 0
|
||||
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request",
|
||||
return_value=[0x00, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert cluster.write_attributes.mock_calls == [
|
||||
call({cluster.AttributeDefs.feed.name: 2}, manufacturer=None)
|
||||
]
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == "2021-11-04T16:37:00+00:00"
|
||||
assert cluster.get(cluster.AttributeDefs.feed.name) == 2
|
||||
|
||||
@@ -6,10 +6,23 @@ from unittest import mock
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from zhaquirks.ikea import PowerConfig1CRCluster, ScenesCluster
|
||||
from zhaquirks.xiaomi import (
|
||||
BasicCluster,
|
||||
LocalIlluminanceMeasurementCluster,
|
||||
XiaomiPowerConfigurationPercent,
|
||||
)
|
||||
from zhaquirks.xiaomi.aqara.driver_curtain_e1 import (
|
||||
WindowCoveringE1,
|
||||
XiaomiAqaraDriverE1,
|
||||
)
|
||||
from zigpy.const import SIG_ENDPOINTS, SIG_MANUFACTURER, SIG_MODEL, SIG_NODE_DESC
|
||||
import zigpy.profiles.zha
|
||||
import zigpy.quirks
|
||||
from zigpy.quirks.v2 import EntityType, add_to_registry_v2
|
||||
from zigpy.quirks.v2.homeassistant import UnitOfTime
|
||||
import zigpy.types
|
||||
from zigpy.zcl import ClusterType
|
||||
import zigpy.zcl.clusters.closures
|
||||
import zigpy.zcl.clusters.general
|
||||
import zigpy.zcl.clusters.security
|
||||
@@ -22,11 +35,12 @@ import homeassistant.components.zha.core.discovery as disc
|
||||
from homeassistant.components.zha.core.endpoint import Endpoint
|
||||
from homeassistant.components.zha.core.helpers import get_zha_gateway
|
||||
import homeassistant.components.zha.core.registries as zha_regs
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import STATE_OFF, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import EntityPlatform
|
||||
|
||||
from .common import find_entity_id, update_attribute_cache
|
||||
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE
|
||||
from .zha_devices_list import (
|
||||
DEV_SIG_ATTRIBUTES,
|
||||
@@ -147,7 +161,9 @@ async def test_devices(
|
||||
for (platform, unique_id), ent_info in device[DEV_SIG_ENT_MAP].items():
|
||||
no_tail_id = NO_TAIL_ID.sub("", ent_info[DEV_SIG_ENT_MAP_ID])
|
||||
ha_entity_id = entity_registry.async_get_entity_id(platform, "zha", unique_id)
|
||||
assert ha_entity_id is not None
|
||||
message1 = f"No entity found for platform[{platform}] unique_id[{unique_id}]"
|
||||
message2 = f"no_tail_id[{no_tail_id}] with entity_id[{ha_entity_id}]"
|
||||
assert ha_entity_id is not None, f"{message1} {message2}"
|
||||
assert ha_entity_id.startswith(no_tail_id)
|
||||
|
||||
entity = created_entities[ha_entity_id]
|
||||
@@ -461,3 +477,332 @@ async def test_group_probe_cleanup_called(
|
||||
await config_entry.async_unload(hass_disable_services)
|
||||
await hass_disable_services.async_block_till_done()
|
||||
disc.GROUP_PROBE.cleanup.assert_called()
|
||||
|
||||
|
||||
async def test_quirks_v2_entity_discovery(
|
||||
hass,
|
||||
zigpy_device_mock,
|
||||
zha_device_joined,
|
||||
) -> None:
|
||||
"""Test quirks v2 discovery."""
|
||||
|
||||
zigpy_device = zigpy_device_mock(
|
||||
{
|
||||
1: {
|
||||
SIG_EP_INPUT: [
|
||||
zigpy.zcl.clusters.general.PowerConfiguration.cluster_id,
|
||||
zigpy.zcl.clusters.general.Groups.cluster_id,
|
||||
zigpy.zcl.clusters.general.OnOff.cluster_id,
|
||||
],
|
||||
SIG_EP_OUTPUT: [
|
||||
zigpy.zcl.clusters.general.Scenes.cluster_id,
|
||||
],
|
||||
SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER,
|
||||
}
|
||||
},
|
||||
ieee="01:2d:6f:00:0a:90:69:e8",
|
||||
manufacturer="Ikea of Sweden",
|
||||
model="TRADFRI remote control",
|
||||
)
|
||||
|
||||
(
|
||||
add_to_registry_v2(
|
||||
"Ikea of Sweden", "TRADFRI remote control", zigpy.quirks._DEVICE_REGISTRY
|
||||
)
|
||||
.replaces(PowerConfig1CRCluster)
|
||||
.replaces(ScenesCluster, cluster_type=ClusterType.Client)
|
||||
.number(
|
||||
zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name,
|
||||
zigpy.zcl.clusters.general.OnOff.cluster_id,
|
||||
min_value=1,
|
||||
max_value=100,
|
||||
step=1,
|
||||
unit=UnitOfTime.SECONDS,
|
||||
multiplier=1,
|
||||
)
|
||||
)
|
||||
|
||||
zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device)
|
||||
zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = {
|
||||
"battery_voltage": 3,
|
||||
"battery_percentage_remaining": 100,
|
||||
}
|
||||
update_attribute_cache(zigpy_device.endpoints[1].power)
|
||||
zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = {
|
||||
zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3,
|
||||
}
|
||||
update_attribute_cache(zigpy_device.endpoints[1].on_off)
|
||||
|
||||
zha_device = await zha_device_joined(zigpy_device)
|
||||
|
||||
entity_id = find_entity_id(
|
||||
Platform.NUMBER,
|
||||
zha_device,
|
||||
hass,
|
||||
)
|
||||
assert entity_id is not None
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
|
||||
|
||||
async def test_quirks_v2_entity_discovery_e1_curtain(
|
||||
hass,
|
||||
zigpy_device_mock,
|
||||
zha_device_joined,
|
||||
) -> None:
|
||||
"""Test quirks v2 discovery for e1 curtain motor."""
|
||||
aqara_E1_device = zigpy_device_mock(
|
||||
{
|
||||
1: {
|
||||
SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_DEVICE,
|
||||
SIG_EP_INPUT: [
|
||||
zigpy.zcl.clusters.general.Basic.cluster_id,
|
||||
zigpy.zcl.clusters.general.PowerConfiguration.cluster_id,
|
||||
zigpy.zcl.clusters.general.Identify.cluster_id,
|
||||
zigpy.zcl.clusters.general.Time.cluster_id,
|
||||
WindowCoveringE1.cluster_id,
|
||||
XiaomiAqaraDriverE1.cluster_id,
|
||||
],
|
||||
SIG_EP_OUTPUT: [
|
||||
zigpy.zcl.clusters.general.Identify.cluster_id,
|
||||
zigpy.zcl.clusters.general.Time.cluster_id,
|
||||
zigpy.zcl.clusters.general.Ota.cluster_id,
|
||||
XiaomiAqaraDriverE1.cluster_id,
|
||||
],
|
||||
}
|
||||
},
|
||||
ieee="01:2d:6f:00:0a:90:69:e8",
|
||||
manufacturer="LUMI",
|
||||
model="lumi.curtain.agl006",
|
||||
)
|
||||
|
||||
class AqaraE1HookState(zigpy.types.enum8):
|
||||
"""Aqara hook state."""
|
||||
|
||||
Unlocked = 0x00
|
||||
Locked = 0x01
|
||||
Locking = 0x02
|
||||
Unlocking = 0x03
|
||||
|
||||
class FakeXiaomiAqaraDriverE1(XiaomiAqaraDriverE1):
|
||||
"""Fake XiaomiAqaraDriverE1 cluster."""
|
||||
|
||||
attributes = XiaomiAqaraDriverE1.attributes.copy()
|
||||
attributes.update(
|
||||
{
|
||||
0x9999: ("error_detected", zigpy.types.Bool, True),
|
||||
}
|
||||
)
|
||||
|
||||
(
|
||||
add_to_registry_v2("LUMI", "lumi.curtain.agl006")
|
||||
.adds(LocalIlluminanceMeasurementCluster)
|
||||
.replaces(BasicCluster)
|
||||
.replaces(XiaomiPowerConfigurationPercent)
|
||||
.replaces(WindowCoveringE1)
|
||||
.replaces(FakeXiaomiAqaraDriverE1)
|
||||
.removes(FakeXiaomiAqaraDriverE1, cluster_type=ClusterType.Client)
|
||||
.enum(
|
||||
BasicCluster.AttributeDefs.power_source.name,
|
||||
BasicCluster.PowerSource,
|
||||
BasicCluster.cluster_id,
|
||||
entity_platform=Platform.SENSOR,
|
||||
entity_type=EntityType.DIAGNOSTIC,
|
||||
)
|
||||
.enum(
|
||||
"hooks_state",
|
||||
AqaraE1HookState,
|
||||
FakeXiaomiAqaraDriverE1.cluster_id,
|
||||
entity_platform=Platform.SENSOR,
|
||||
entity_type=EntityType.DIAGNOSTIC,
|
||||
)
|
||||
.binary_sensor("error_detected", FakeXiaomiAqaraDriverE1.cluster_id)
|
||||
)
|
||||
|
||||
aqara_E1_device = zigpy.quirks._DEVICE_REGISTRY.get_device(aqara_E1_device)
|
||||
|
||||
aqara_E1_device.endpoints[1].opple_cluster.PLUGGED_ATTR_READS = {
|
||||
"hand_open": 0,
|
||||
"positions_stored": 0,
|
||||
"hooks_lock": 0,
|
||||
"hooks_state": AqaraE1HookState.Unlocked,
|
||||
"light_level": 0,
|
||||
"error_detected": 0,
|
||||
}
|
||||
update_attribute_cache(aqara_E1_device.endpoints[1].opple_cluster)
|
||||
|
||||
aqara_E1_device.endpoints[1].basic.PLUGGED_ATTR_READS = {
|
||||
BasicCluster.AttributeDefs.power_source.name: BasicCluster.PowerSource.Mains_single_phase,
|
||||
}
|
||||
update_attribute_cache(aqara_E1_device.endpoints[1].basic)
|
||||
|
||||
WCAttrs = zigpy.zcl.clusters.closures.WindowCovering.AttributeDefs
|
||||
WCT = zigpy.zcl.clusters.closures.WindowCovering.WindowCoveringType
|
||||
WCCS = zigpy.zcl.clusters.closures.WindowCovering.ConfigStatus
|
||||
aqara_E1_device.endpoints[1].window_covering.PLUGGED_ATTR_READS = {
|
||||
WCAttrs.current_position_lift_percentage.name: 0,
|
||||
WCAttrs.window_covering_type.name: WCT.Drapery,
|
||||
WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed),
|
||||
}
|
||||
update_attribute_cache(aqara_E1_device.endpoints[1].window_covering)
|
||||
|
||||
zha_device = await zha_device_joined(aqara_E1_device)
|
||||
|
||||
power_source_entity_id = find_entity_id(
|
||||
Platform.SENSOR,
|
||||
zha_device,
|
||||
hass,
|
||||
qualifier=BasicCluster.AttributeDefs.power_source.name,
|
||||
)
|
||||
assert power_source_entity_id is not None
|
||||
state = hass.states.get(power_source_entity_id)
|
||||
assert state is not None
|
||||
assert state.state == BasicCluster.PowerSource.Mains_single_phase.name
|
||||
|
||||
hook_state_entity_id = find_entity_id(
|
||||
Platform.SENSOR,
|
||||
zha_device,
|
||||
hass,
|
||||
qualifier="hooks_state",
|
||||
)
|
||||
assert hook_state_entity_id is not None
|
||||
state = hass.states.get(hook_state_entity_id)
|
||||
assert state is not None
|
||||
assert state.state == AqaraE1HookState.Unlocked.name
|
||||
|
||||
error_detected_entity_id = find_entity_id(
|
||||
Platform.BINARY_SENSOR,
|
||||
zha_device,
|
||||
hass,
|
||||
)
|
||||
assert error_detected_entity_id is not None
|
||||
state = hass.states.get(error_detected_entity_id)
|
||||
assert state is not None
|
||||
assert state.state == STATE_OFF
|
||||
|
||||
|
||||
def _get_test_device(zigpy_device_mock, manufacturer: str, model: str):
|
||||
zigpy_device = zigpy_device_mock(
|
||||
{
|
||||
1: {
|
||||
SIG_EP_INPUT: [
|
||||
zigpy.zcl.clusters.general.PowerConfiguration.cluster_id,
|
||||
zigpy.zcl.clusters.general.Groups.cluster_id,
|
||||
zigpy.zcl.clusters.general.OnOff.cluster_id,
|
||||
],
|
||||
SIG_EP_OUTPUT: [
|
||||
zigpy.zcl.clusters.general.Scenes.cluster_id,
|
||||
],
|
||||
SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER,
|
||||
}
|
||||
},
|
||||
ieee="01:2d:6f:00:0a:90:69:e8",
|
||||
manufacturer=manufacturer,
|
||||
model=model,
|
||||
)
|
||||
|
||||
(
|
||||
add_to_registry_v2(manufacturer, model, zigpy.quirks._DEVICE_REGISTRY)
|
||||
.replaces(PowerConfig1CRCluster)
|
||||
.replaces(ScenesCluster, cluster_type=ClusterType.Client)
|
||||
.number(
|
||||
zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name,
|
||||
zigpy.zcl.clusters.general.OnOff.cluster_id,
|
||||
endpoint_id=3,
|
||||
min_value=1,
|
||||
max_value=100,
|
||||
step=1,
|
||||
unit=UnitOfTime.SECONDS,
|
||||
multiplier=1,
|
||||
)
|
||||
.number(
|
||||
zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name,
|
||||
zigpy.zcl.clusters.general.Time.cluster_id,
|
||||
min_value=1,
|
||||
max_value=100,
|
||||
step=1,
|
||||
unit=UnitOfTime.SECONDS,
|
||||
multiplier=1,
|
||||
)
|
||||
.sensor(
|
||||
zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name,
|
||||
zigpy.zcl.clusters.general.OnOff.cluster_id,
|
||||
entity_type=EntityType.CONFIG,
|
||||
)
|
||||
)
|
||||
|
||||
zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device)
|
||||
zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = {
|
||||
"battery_voltage": 3,
|
||||
"battery_percentage_remaining": 100,
|
||||
}
|
||||
update_attribute_cache(zigpy_device.endpoints[1].power)
|
||||
zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = {
|
||||
zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3,
|
||||
}
|
||||
update_attribute_cache(zigpy_device.endpoints[1].on_off)
|
||||
return zigpy_device
|
||||
|
||||
|
||||
async def test_quirks_v2_entity_no_metadata(
|
||||
hass: HomeAssistant,
|
||||
zigpy_device_mock,
|
||||
zha_device_joined,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test quirks v2 discovery skipped - no metadata."""
|
||||
|
||||
zigpy_device = _get_test_device(
|
||||
zigpy_device_mock, "Ikea of Sweden2", "TRADFRI remote control2"
|
||||
)
|
||||
setattr(zigpy_device, "_exposes_metadata", {})
|
||||
zha_device = await zha_device_joined(zigpy_device)
|
||||
assert (
|
||||
f"Device: {str(zigpy_device.ieee)}-{zha_device.name} does not expose any quirks v2 entities"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
async def test_quirks_v2_entity_discovery_errors(
|
||||
hass: HomeAssistant,
|
||||
zigpy_device_mock,
|
||||
zha_device_joined,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test quirks v2 discovery skipped - errors."""
|
||||
|
||||
zigpy_device = _get_test_device(
|
||||
zigpy_device_mock, "Ikea of Sweden3", "TRADFRI remote control3"
|
||||
)
|
||||
zha_device = await zha_device_joined(zigpy_device)
|
||||
|
||||
m1 = f"Device: {str(zigpy_device.ieee)}-{zha_device.name} does not have an"
|
||||
m2 = " endpoint with id: 3 - unable to create entity with cluster"
|
||||
m3 = " details: (3, 6, <ClusterType.Server: 0>)"
|
||||
assert f"{m1}{m2}{m3}" in caplog.text
|
||||
|
||||
time_cluster_id = zigpy.zcl.clusters.general.Time.cluster_id
|
||||
|
||||
m1 = f"Device: {str(zigpy_device.ieee)}-{zha_device.name} does not have a"
|
||||
m2 = f" cluster with id: {time_cluster_id} - unable to create entity with "
|
||||
m3 = f"cluster details: (1, {time_cluster_id}, <ClusterType.Server: 0>)"
|
||||
assert f"{m1}{m2}{m3}" in caplog.text
|
||||
|
||||
# fmt: off
|
||||
entity_details = (
|
||||
"{'cluster_details': (1, 6, <ClusterType.Server: 0>), "
|
||||
"'quirk_metadata': EntityMetadata(entity_metadata=ZCLSensorMetadata("
|
||||
"attribute_name='off_wait_time', divisor=1, multiplier=1, unit=None, "
|
||||
"device_class=None, state_class=None), entity_platform=<EntityPlatform."
|
||||
"SENSOR: 'sensor'>, entity_type=<EntityType.CONFIG: 'config'>, "
|
||||
"cluster_id=6, endpoint_id=1, cluster_type=<ClusterType.Server: 0>, "
|
||||
"initially_disabled=False, attribute_initialized_from_cache=True, "
|
||||
"translation_key=None)}"
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
m1 = f"Device: {str(zigpy_device.ieee)}-{zha_device.name} has an entity with "
|
||||
m2 = f"details: {entity_details} that does not have an entity class mapping - "
|
||||
m3 = "unable to create entity"
|
||||
assert f"{m1}{m2}{m3}" in caplog.text
|
||||
|
||||
@@ -12,6 +12,7 @@ from zhaquirks import (
|
||||
from zigpy.const import SIG_EP_PROFILE
|
||||
import zigpy.profiles.zha as zha
|
||||
from zigpy.quirks import CustomCluster, CustomDevice
|
||||
from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2
|
||||
import zigpy.types as t
|
||||
import zigpy.zcl.clusters.general as general
|
||||
from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster
|
||||
@@ -349,13 +350,19 @@ class MotionSensitivityQuirk(CustomDevice):
|
||||
ep_attribute = "opple_cluster"
|
||||
attributes = {
|
||||
0x010C: ("motion_sensitivity", t.uint8_t, True),
|
||||
0x020C: ("motion_sensitivity_disabled", t.uint8_t, True),
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize."""
|
||||
super().__init__(*args, **kwargs)
|
||||
# populate cache to create config entity
|
||||
self._attr_cache.update({0x010C: AqaraMotionSensitivities.Medium})
|
||||
self._attr_cache.update(
|
||||
{
|
||||
0x010C: AqaraMotionSensitivities.Medium,
|
||||
0x020C: AqaraMotionSensitivities.Medium,
|
||||
}
|
||||
)
|
||||
|
||||
replacement = {
|
||||
ENDPOINTS: {
|
||||
@@ -413,3 +420,79 @@ async def test_on_off_select_attribute_report(
|
||||
hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low}
|
||||
)
|
||||
assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name
|
||||
|
||||
|
||||
(
|
||||
add_to_registry_v2("Fake_Manufacturer", "Fake_Model")
|
||||
.replaces(MotionSensitivityQuirk.OppleCluster)
|
||||
.enum(
|
||||
"motion_sensitivity",
|
||||
AqaraMotionSensitivities,
|
||||
MotionSensitivityQuirk.OppleCluster.cluster_id,
|
||||
)
|
||||
.enum(
|
||||
"motion_sensitivity_disabled",
|
||||
AqaraMotionSensitivities,
|
||||
MotionSensitivityQuirk.OppleCluster.cluster_id,
|
||||
translation_key="motion_sensitivity_translation_key",
|
||||
initially_disabled=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def zigpy_device_aqara_sensor_v2(
|
||||
hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored
|
||||
):
|
||||
"""Device tracker zigpy Aqara motion sensor device."""
|
||||
|
||||
zigpy_device = zigpy_device_mock(
|
||||
{
|
||||
1: {
|
||||
SIG_EP_INPUT: [
|
||||
general.Basic.cluster_id,
|
||||
MotionSensitivityQuirk.OppleCluster.cluster_id,
|
||||
],
|
||||
SIG_EP_OUTPUT: [],
|
||||
SIG_EP_TYPE: zha.DeviceType.OCCUPANCY_SENSOR,
|
||||
}
|
||||
},
|
||||
manufacturer="Fake_Manufacturer",
|
||||
model="Fake_Model",
|
||||
)
|
||||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
return zha_device, zigpy_device.endpoints[1].opple_cluster
|
||||
|
||||
|
||||
async def test_on_off_select_attribute_report_v2(
|
||||
hass: HomeAssistant, zigpy_device_aqara_sensor_v2
|
||||
) -> None:
|
||||
"""Test ZHA attribute report parsing for select platform."""
|
||||
|
||||
zha_device, cluster = zigpy_device_aqara_sensor_v2
|
||||
assert isinstance(zha_device.device, CustomDeviceV2)
|
||||
entity_id = find_entity_id(
|
||||
Platform.SELECT, zha_device, hass, qualifier="motion_sensitivity"
|
||||
)
|
||||
assert entity_id is not None
|
||||
|
||||
# allow traffic to flow through the gateway and device
|
||||
await async_enable_traffic(hass, [zha_device])
|
||||
|
||||
# test that the state is in default medium state
|
||||
assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Medium.name
|
||||
|
||||
# send attribute report from device
|
||||
await send_attributes_report(
|
||||
hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low}
|
||||
)
|
||||
assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
# none in id because the translation key does not exist
|
||||
entity_entry = entity_registry.async_get("select.fake_manufacturer_fake_model_none")
|
||||
assert entity_entry
|
||||
assert entity_entry.entity_category == EntityCategory.CONFIG
|
||||
assert entity_entry.disabled is True
|
||||
assert entity_entry.translation_key == "motion_sensitivity_translation_key"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user