mirror of
https://github.com/home-assistant/core.git
synced 2026-05-08 02:46:39 +02:00
Compare commits
90 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b01e56582a | |||
| 9be078475d | |||
| 9174ae4e00 | |||
| d4aa1b53f2 | |||
| ba29f210c2 | |||
| 845572927c | |||
| 9cd7ac2722 | |||
| a7fd763570 | |||
| 65491372c2 | |||
| de96ee44e5 | |||
| 6edcf5722e | |||
| e6acebb322 | |||
| 277daf2dba | |||
| 1b935314f8 | |||
| cad5c9e8fa | |||
| f7201f1910 | |||
| c406e1aeed | |||
| 946a3bcf11 | |||
| 2c8d9c7207 | |||
| db25f1911e | |||
| 7e2fa90773 | |||
| ef83ccc423 | |||
| 046b48df43 | |||
| 66cd719f85 | |||
| b0c2e57649 | |||
| cb92fa27ba | |||
| c3f8f6f310 | |||
| a82205fed7 | |||
| 776fd69e39 | |||
| 2863b59be4 | |||
| 676e9c7f29 | |||
| 05c3c058d6 | |||
| fd93f24208 | |||
| 544b21f014 | |||
| 8d30abab9e | |||
| ee19c11565 | |||
| c26eb2374d | |||
| 59bc46a9d2 | |||
| ab668ac576 | |||
| c4836600c4 | |||
| f4e0349825 | |||
| 4d578b6c98 | |||
| 741779efd7 | |||
| eb1babedfd | |||
| de0d24e91c | |||
| 0de23f2636 | |||
| ff69557b17 | |||
| 3b93ccc7ba | |||
| f886b60e2c | |||
| d0f126f945 | |||
| ce5f2330eb | |||
| 427758ef15 | |||
| c2ce313ec8 | |||
| b8ba1c123d | |||
| 10f1cbb51e | |||
| e3bcce06bf | |||
| 4e0472feb5 | |||
| 046298f2ca | |||
| c92128b282 | |||
| 886e66e7e3 | |||
| 7da49570b5 | |||
| b8baa3271b | |||
| 65bc4bf1d0 | |||
| 27a8d185c9 | |||
| 1e5992f2b5 | |||
| ac84a14846 | |||
| fa265b18ce | |||
| 38634ddd55 | |||
| 13dd831874 | |||
| 3be5906398 | |||
| cef918d6f8 | |||
| 19aa1b6578 | |||
| b0eb69936e | |||
| b6096a71d1 | |||
| 059d7011ba | |||
| bbe00ef79e | |||
| 7f447abc3a | |||
| 923e099467 | |||
| 26714c6d9f | |||
| 5f1201dbbe | |||
| 52e1d9443c | |||
| 824f5205e9 | |||
| cf8bc55add | |||
| 1e9244f4fc | |||
| be4f4928d5 | |||
| 80f6f8ee31 | |||
| 267d52491a | |||
| ee84d625cd | |||
| 5d091d25d5 | |||
| 97b5f1cf64 |
@@ -323,7 +323,7 @@ jobs:
|
||||
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||
|
||||
publish_container:
|
||||
name: Publish meta container for ${{ matrix.registry }}
|
||||
name: Publish to ${{ matrix.registry }}
|
||||
environment: ${{ needs.init.outputs.channel }}
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: ["init", "build_base"]
|
||||
|
||||
@@ -281,7 +281,7 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@cbc2f23eb5539cf20d82d1aabd0d0ecbcc56f4e3 # v2.0.2
|
||||
uses: j178/prek-action@6ad80277337ad479fe43bd70701c3f7f8aa74db3 # v2.0.3
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config,zizmor
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
@@ -302,7 +302,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Run zizmor
|
||||
uses: j178/prek-action@cbc2f23eb5539cf20d82d1aabd0d0ecbcc56f4e3 # v2.0.2
|
||||
uses: j178/prek-action@6ad80277337ad479fe43bd70701c3f7f8aa74db3 # v2.0.3
|
||||
with:
|
||||
extra-args: --all-files zizmor
|
||||
|
||||
|
||||
@@ -155,6 +155,7 @@ homeassistant.components.counter.*
|
||||
homeassistant.components.cover.*
|
||||
homeassistant.components.cpuspeed.*
|
||||
homeassistant.components.crownstone.*
|
||||
homeassistant.components.data_grand_lyon.*
|
||||
homeassistant.components.date.*
|
||||
homeassistant.components.datetime.*
|
||||
homeassistant.components.deako.*
|
||||
@@ -423,6 +424,7 @@ homeassistant.components.otp.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.paj_gps.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.paperless_ngx.*
|
||||
homeassistant.components.peblar.*
|
||||
|
||||
Generated
+8
-2
@@ -294,6 +294,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/chacon_dio/ @cnico
|
||||
/homeassistant/components/chess_com/ @joostlek
|
||||
/tests/components/chess_com/ @joostlek
|
||||
/homeassistant/components/cielo_home/ @ihsan-cielo @mudasar-cielo
|
||||
/tests/components/cielo_home/ @ihsan-cielo @mudasar-cielo
|
||||
/homeassistant/components/cisco_ios/ @fbradyirl
|
||||
/homeassistant/components/cisco_mobility_express/ @fbradyirl
|
||||
/homeassistant/components/cisco_webex_teams/ @fbradyirl
|
||||
@@ -345,6 +347,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/cync/ @Kinachi249
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
/tests/components/daikin/ @fredrike
|
||||
/homeassistant/components/data_grand_lyon/ @Crocmagnon
|
||||
/tests/components/data_grand_lyon/ @Crocmagnon
|
||||
/homeassistant/components/date/ @home-assistant/core
|
||||
/tests/components/date/ @home-assistant/core
|
||||
/homeassistant/components/datetime/ @home-assistant/core
|
||||
@@ -1308,6 +1312,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
/tests/components/p1_monitor/ @klaasnicolaas
|
||||
/homeassistant/components/paj_gps/ @skipperro
|
||||
/tests/components/paj_gps/ @skipperro
|
||||
/homeassistant/components/palazzetti/ @dotvav
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
@@ -1495,8 +1501,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/roku/ @ctalkington
|
||||
/homeassistant/components/romy/ @xeniter
|
||||
/tests/components/romy/ @xeniter
|
||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn
|
||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn
|
||||
/homeassistant/components/roon/ @pavoni
|
||||
/tests/components/roon/ @pavoni
|
||||
/homeassistant/components/route_b_smart_meter/ @SeraphicRav
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["serialx==1.7.0"]
|
||||
"requirements": ["serialx==1.7.1"]
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaOptionsFlowHandler,
|
||||
)
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
from .bridge import AsusWrtBridge
|
||||
from .const import (
|
||||
@@ -142,20 +141,12 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
user_input = self._config_data
|
||||
|
||||
add_schema: VolDictType
|
||||
if self.show_advanced_options:
|
||||
add_schema = {
|
||||
vol.Exclusive(CONF_PASSWORD, PASS_KEY, PASS_KEY_MSG): str,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
vol.Exclusive(CONF_SSH_KEY, PASS_KEY, PASS_KEY_MSG): str,
|
||||
}
|
||||
else:
|
||||
add_schema = {vol.Required(CONF_PASSWORD): str}
|
||||
|
||||
schema = {
|
||||
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
|
||||
vol.Required(CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")): str,
|
||||
**add_schema,
|
||||
vol.Exclusive(CONF_PASSWORD, PASS_KEY, PASS_KEY_MSG): str,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
vol.Exclusive(CONF_SSH_KEY, PASS_KEY, PASS_KEY_MSG): str,
|
||||
vol.Required(
|
||||
CONF_PROTOCOL,
|
||||
default=user_input.get(CONF_PROTOCOL, PROTOCOL_HTTPS),
|
||||
|
||||
@@ -1,36 +1,16 @@
|
||||
"""Provides triggers for buttons."""
|
||||
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
)
|
||||
from homeassistant.helpers.trigger import StatelessEntityTriggerBase, Trigger
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
|
||||
class ButtonPressedTrigger(EntityTriggerBase):
|
||||
class ButtonPressedTrigger(StatelessEntityTriggerBase):
|
||||
"""Trigger for button entity presses."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec()}
|
||||
_schema = ENTITY_STATE_TRIGGER_SCHEMA
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and different from the current state."""
|
||||
|
||||
# UNKNOWN is a valid from_state, otherwise the first time the button is pressed
|
||||
# would not trigger
|
||||
if from_state.state == STATE_UNAVAILABLE:
|
||||
return False
|
||||
|
||||
return from_state.state != to_state.state
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
"""Check if the new state is not invalid."""
|
||||
return state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
"""Component to embed Google Cast."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Protocol
|
||||
from uuid import UUID
|
||||
|
||||
from pychromecast import Chromecast
|
||||
from pychromecast.controllers.multizone import MultizoneManager
|
||||
from pychromecast.discovery import CastBrowser
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaType
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -20,12 +23,41 @@ from .const import DOMAIN
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
type CastConfigEntry = ConfigEntry[CastRuntimeData]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@dataclass
|
||||
class CastRuntimeData:
|
||||
"""Runtime data for the Cast integration."""
|
||||
|
||||
cast_platforms: dict[str, CastProtocol] = field(default_factory=dict)
|
||||
unknown_models: dict[str | None, tuple[str | None, str | None]] = field(
|
||||
default_factory=dict
|
||||
)
|
||||
added_cast_devices: set[UUID] = field(default_factory=set)
|
||||
browser: CastBrowser | None = None
|
||||
multizone_manager: MultizoneManager | None = None
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CastConfigEntry) -> bool:
|
||||
"""Set up Cast from a config entry."""
|
||||
hass.data[DOMAIN] = {"cast_platform": {}, "unknown_models": {}}
|
||||
entry.runtime_data = CastRuntimeData()
|
||||
await home_assistant_cast.async_setup_ha_cast(hass, entry)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@callback
|
||||
def _register_cast_platform(
|
||||
hass: HomeAssistant, integration_domain: str, platform: CastProtocol
|
||||
) -> None:
|
||||
"""Register a cast platform."""
|
||||
if (
|
||||
not hasattr(platform, "async_get_media_browser_root_object")
|
||||
or not hasattr(platform, "async_browse_media")
|
||||
or not hasattr(platform, "async_play_media")
|
||||
):
|
||||
raise HomeAssistantError(f"Invalid cast platform {platform}")
|
||||
entry.runtime_data.cast_platforms[integration_domain] = platform
|
||||
|
||||
await async_process_integration_platforms(hass, DOMAIN, _register_cast_platform)
|
||||
return True
|
||||
|
||||
@@ -65,27 +97,13 @@ class CastProtocol(Protocol):
|
||||
"""
|
||||
|
||||
|
||||
@callback
|
||||
def _register_cast_platform(
|
||||
hass: HomeAssistant, integration_domain: str, platform: CastProtocol
|
||||
):
|
||||
"""Register a cast platform."""
|
||||
if (
|
||||
not hasattr(platform, "async_get_media_browser_root_object")
|
||||
or not hasattr(platform, "async_browse_media")
|
||||
or not hasattr(platform, "async_play_media")
|
||||
):
|
||||
raise HomeAssistantError(f"Invalid cast platform {platform}")
|
||||
hass.data[DOMAIN]["cast_platform"][integration_domain] = platform
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: CastConfigEntry) -> None:
|
||||
"""Remove Home Assistant Cast user."""
|
||||
await home_assistant_cast.async_remove_user(hass, entry)
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
|
||||
hass: HomeAssistant, config_entry: CastConfigEntry, device_entry: dr.DeviceEntry
|
||||
) -> bool:
|
||||
"""Remove cast config entry from a device.
|
||||
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
"""Config flow for Cast."""
|
||||
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import onboarding
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.const import CONF_UUID
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -19,6 +14,9 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_IGNORE_CEC, CONF_KNOWN_HOSTS, DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CastConfigEntry
|
||||
|
||||
IGNORE_CEC_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
|
||||
KNOWN_HOSTS_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -40,7 +38,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: CastConfigEntry,
|
||||
) -> CastOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return CastOptionsFlowHandler()
|
||||
|
||||
@@ -12,13 +12,6 @@ DOMAIN = "cast"
|
||||
|
||||
# Stores a threading.Lock that is held by the internal pychromecast discovery.
|
||||
INTERNAL_DISCOVERY_RUNNING_KEY = "cast_discovery_running"
|
||||
# Stores UUIDs of cast devices that were added as entities. Doesn't store
|
||||
# None UUIDs.
|
||||
ADDED_CAST_DEVICES_KEY = "cast_added_cast_devices"
|
||||
# Stores an audio group manager.
|
||||
CAST_MULTIZONE_MANAGER_KEY = "cast_multizone_manager"
|
||||
# Store a CastBrowser
|
||||
CAST_BROWSER_KEY = "cast_browser"
|
||||
|
||||
# Dispatcher signal fired with a ChromecastInfo every time we discover a new
|
||||
# Chromecast or receive it through configuration
|
||||
|
||||
@@ -2,17 +2,16 @@
|
||||
|
||||
import logging
|
||||
import threading
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pychromecast.discovery
|
||||
import pychromecast.models
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
from .const import (
|
||||
CAST_BROWSER_KEY,
|
||||
CONF_KNOWN_HOSTS,
|
||||
INTERNAL_DISCOVERY_RUNNING_KEY,
|
||||
SIGNAL_CAST_DISCOVERED,
|
||||
@@ -20,11 +19,16 @@ from .const import (
|
||||
)
|
||||
from .helpers import ChromecastInfo, ChromeCastZeroconf
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CastConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def discover_chromecast(
|
||||
hass: HomeAssistant, cast_info: pychromecast.models.CastInfo
|
||||
hass: HomeAssistant,
|
||||
cast_info: pychromecast.models.CastInfo,
|
||||
config_entry: CastConfigEntry,
|
||||
) -> None:
|
||||
"""Discover a Chromecast."""
|
||||
|
||||
@@ -36,7 +40,7 @@ def discover_chromecast(
|
||||
_LOGGER.error("Discovered chromecast without uuid %s", info)
|
||||
return
|
||||
|
||||
info = info.fill_out_missing_chromecast_info(hass)
|
||||
info = info.fill_out_missing_chromecast_info(hass, config_entry)
|
||||
_LOGGER.debug("Discovered new or updated chromecast %s", info)
|
||||
|
||||
dispatcher_send(hass, SIGNAL_CAST_DISCOVERED, info)
|
||||
@@ -49,7 +53,9 @@ def _remove_chromecast(hass: HomeAssistant, info: ChromecastInfo) -> None:
|
||||
dispatcher_send(hass, SIGNAL_CAST_REMOVED, info)
|
||||
|
||||
|
||||
def setup_internal_discovery(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
def setup_internal_discovery(
|
||||
hass: HomeAssistant, config_entry: CastConfigEntry
|
||||
) -> None:
|
||||
"""Set up the pychromecast internal discovery."""
|
||||
if INTERNAL_DISCOVERY_RUNNING_KEY not in hass.data:
|
||||
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY] = threading.Lock()
|
||||
@@ -63,11 +69,11 @@ def setup_internal_discovery(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
|
||||
def add_cast(self, uuid, _):
|
||||
"""Handle zeroconf discovery of a new chromecast."""
|
||||
discover_chromecast(hass, browser.devices[uuid])
|
||||
discover_chromecast(hass, browser.devices[uuid], config_entry)
|
||||
|
||||
def update_cast(self, uuid, _):
|
||||
"""Handle zeroconf discovery of an updated chromecast."""
|
||||
discover_chromecast(hass, browser.devices[uuid])
|
||||
discover_chromecast(hass, browser.devices[uuid], config_entry)
|
||||
|
||||
def remove_cast(self, uuid, service, cast_info):
|
||||
"""Handle zeroconf discovery of a removed chromecast."""
|
||||
@@ -84,7 +90,7 @@ def setup_internal_discovery(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
ChromeCastZeroconf.get_zeroconf(),
|
||||
config_entry.data.get(CONF_KNOWN_HOSTS),
|
||||
)
|
||||
hass.data[CAST_BROWSER_KEY] = browser
|
||||
config_entry.runtime_data.browser = browser
|
||||
browser.start_discovery()
|
||||
|
||||
def stop_discovery(event):
|
||||
@@ -98,7 +104,9 @@ def setup_internal_discovery(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
config_entry.add_update_listener(config_entry_updated)
|
||||
|
||||
|
||||
async def config_entry_updated(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
async def config_entry_updated(
|
||||
hass: HomeAssistant, config_entry: CastConfigEntry
|
||||
) -> None:
|
||||
"""Handle config entry being updated."""
|
||||
browser = hass.data[CAST_BROWSER_KEY]
|
||||
browser.host_browser.update_hosts(config_entry.data.get(CONF_KNOWN_HOSTS))
|
||||
if browser := config_entry.runtime_data.browser:
|
||||
browser.host_browser.update_hosts(config_entry.data.get(CONF_KNOWN_HOSTS))
|
||||
|
||||
@@ -20,11 +20,11 @@ import pychromecast.socket_client
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components import zeroconf
|
||||
|
||||
from . import CastConfigEntry
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -56,16 +56,16 @@ class ChromecastInfo:
|
||||
"""Return the UUID."""
|
||||
return self.cast_info.uuid
|
||||
|
||||
def fill_out_missing_chromecast_info(self, hass: HomeAssistant) -> ChromecastInfo:
|
||||
def fill_out_missing_chromecast_info(
|
||||
self, hass: HomeAssistant, config_entry: CastConfigEntry
|
||||
) -> ChromecastInfo:
|
||||
"""Return a new ChromecastInfo object with missing attributes filled in.
|
||||
|
||||
Uses blocking HTTP / HTTPS.
|
||||
"""
|
||||
cast_info = self.cast_info
|
||||
if self.cast_info.cast_type is None or self.cast_info.manufacturer is None:
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
unknown_models = hass.data[DOMAIN]["unknown_models"]
|
||||
unknown_models = config_entry.runtime_data.unknown_models
|
||||
if self.cast_info.model_name not in unknown_models:
|
||||
# Manufacturer and cast type is not available in mDNS data,
|
||||
# get it over HTTP
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
"""Home Assistant Cast integration for Cast."""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import auth, config_entries, core
|
||||
from homeassistant import auth, core
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, dispatcher, instance_id
|
||||
@@ -11,6 +13,9 @@ from homeassistant.helpers.service import async_register_admin_service
|
||||
|
||||
from .const import DOMAIN, SIGNAL_HASS_CAST_SHOW_VIEW, HomeAssistantControllerData
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CastConfigEntry
|
||||
|
||||
SERVICE_SHOW_VIEW = "show_lovelace_view"
|
||||
ATTR_VIEW_PATH = "view_path"
|
||||
ATTR_URL_PATH = "dashboard_path"
|
||||
@@ -21,9 +26,7 @@ NO_URL_AVAILABLE_ERROR = (
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_ha_cast(
|
||||
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
|
||||
):
|
||||
async def async_setup_ha_cast(hass: core.HomeAssistant, entry: CastConfigEntry) -> None:
|
||||
"""Set up Home Assistant Cast."""
|
||||
user_id: str | None = entry.data.get("user_id")
|
||||
user: auth.models.User | None = None
|
||||
@@ -87,9 +90,7 @@ async def async_setup_ha_cast(
|
||||
)
|
||||
|
||||
|
||||
async def async_remove_user(
|
||||
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
|
||||
):
|
||||
async def async_remove_user(hass: core.HomeAssistant, entry: CastConfigEntry) -> None:
|
||||
"""Remove Home Assistant Cast user."""
|
||||
user_id: str | None = entry.data.get("user_id")
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Provide functionality to interact with Cast devices on the network."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
@@ -42,7 +41,6 @@ from homeassistant.components.media_player import (
|
||||
MediaType,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
|
||||
CONF_UUID,
|
||||
@@ -58,8 +56,6 @@ from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.logging import async_create_catching_coro
|
||||
|
||||
from .const import (
|
||||
ADDED_CAST_DEVICES_KEY,
|
||||
CAST_MULTIZONE_MANAGER_KEY,
|
||||
CONF_IGNORE_CEC,
|
||||
DOMAIN,
|
||||
SIGNAL_CAST_DISCOVERED,
|
||||
@@ -78,7 +74,7 @@ from .helpers import (
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CastProtocol
|
||||
from . import CastConfigEntry, CastProtocol
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -110,7 +106,9 @@ def api_error[_CastDeviceT: CastDevice, **_P, _R](
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_cast_device(hass: HomeAssistant, info: ChromecastInfo):
|
||||
def _async_create_cast_device(
|
||||
hass: HomeAssistant, config_entry: CastConfigEntry, info: ChromecastInfo
|
||||
):
|
||||
"""Create a CastDevice entity or dynamic group from the chromecast object.
|
||||
|
||||
Returns None if the cast device has already been added.
|
||||
@@ -121,7 +119,7 @@ def _async_create_cast_device(hass: HomeAssistant, info: ChromecastInfo):
|
||||
return None
|
||||
|
||||
# Found a cast with UUID
|
||||
added_casts = hass.data[ADDED_CAST_DEVICES_KEY]
|
||||
added_casts = config_entry.runtime_data.added_cast_devices
|
||||
if info.uuid in added_casts:
|
||||
# Already added this one, the entity will take care of moved hosts
|
||||
# itself
|
||||
@@ -131,21 +129,19 @@ def _async_create_cast_device(hass: HomeAssistant, info: ChromecastInfo):
|
||||
|
||||
if info.is_dynamic_group:
|
||||
# This is a dynamic group, do not add it but connect to the service.
|
||||
group = DynamicCastGroup(hass, info)
|
||||
group = DynamicCastGroup(hass, config_entry, info)
|
||||
group.async_setup()
|
||||
return None
|
||||
|
||||
return CastMediaPlayerEntity(hass, info)
|
||||
return CastMediaPlayerEntity(hass, config_entry, info)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: CastConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Cast from a config entry."""
|
||||
hass.data.setdefault(ADDED_CAST_DEVICES_KEY, set())
|
||||
|
||||
# Import CEC IGNORE attributes
|
||||
pychromecast.IGNORE_CEC += config_entry.data.get(CONF_IGNORE_CEC) or []
|
||||
|
||||
@@ -160,7 +156,7 @@ async def async_setup_entry(
|
||||
# UUID not matching, ignore.
|
||||
return
|
||||
|
||||
cast_device = _async_create_cast_device(hass, discover)
|
||||
cast_device = _async_create_cast_device(hass, config_entry, discover)
|
||||
if cast_device is not None:
|
||||
async_add_entities([cast_device])
|
||||
|
||||
@@ -179,13 +175,19 @@ class CastDevice:
|
||||
|
||||
_mz_only: bool
|
||||
|
||||
def __init__(self, hass: HomeAssistant, cast_info: ChromecastInfo) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: CastConfigEntry,
|
||||
cast_info: ChromecastInfo,
|
||||
) -> None:
|
||||
"""Initialize the cast device."""
|
||||
|
||||
self.hass: HomeAssistant = hass
|
||||
self._config_entry = config_entry
|
||||
self._cast_info = cast_info
|
||||
self._chromecast: pychromecast.Chromecast | None = None
|
||||
self.mz_mgr = None
|
||||
self.mz_mgr: MultizoneManager | None = None
|
||||
self._status_listener: CastStatusListener | None = None
|
||||
self._add_remove_handler: Callable[[], None] | None = None
|
||||
self._del_remove_handler: Callable[[], None] | None = None
|
||||
@@ -214,7 +216,9 @@ class CastDevice:
|
||||
if self._cast_info.uuid is not None:
|
||||
# Remove the entity from the added casts so that it can dynamically
|
||||
# be re-added again.
|
||||
self.hass.data[ADDED_CAST_DEVICES_KEY].remove(self._cast_info.uuid)
|
||||
self._config_entry.runtime_data.added_cast_devices.remove(
|
||||
self._cast_info.uuid
|
||||
)
|
||||
if self._add_remove_handler:
|
||||
self._add_remove_handler()
|
||||
self._add_remove_handler = None
|
||||
@@ -237,10 +241,10 @@ class CastDevice:
|
||||
)
|
||||
self._chromecast = chromecast
|
||||
|
||||
if CAST_MULTIZONE_MANAGER_KEY not in self.hass.data:
|
||||
self.hass.data[CAST_MULTIZONE_MANAGER_KEY] = MultizoneManager()
|
||||
|
||||
self.mz_mgr = self.hass.data[CAST_MULTIZONE_MANAGER_KEY]
|
||||
runtime_data = self._config_entry.runtime_data
|
||||
if runtime_data.multizone_manager is None:
|
||||
runtime_data.multizone_manager = MultizoneManager()
|
||||
self.mz_mgr = runtime_data.multizone_manager
|
||||
|
||||
self._status_listener = CastStatusListener(
|
||||
self, chromecast, self.mz_mgr, self._mz_only
|
||||
@@ -300,10 +304,15 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
_attr_media_image_remotely_accessible = True
|
||||
_mz_only = False
|
||||
|
||||
def __init__(self, hass: HomeAssistant, cast_info: ChromecastInfo) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: CastConfigEntry,
|
||||
cast_info: ChromecastInfo,
|
||||
) -> None:
|
||||
"""Initialize the cast device."""
|
||||
|
||||
CastDevice.__init__(self, hass, cast_info)
|
||||
CastDevice.__init__(self, hass, config_entry, cast_info)
|
||||
|
||||
self.cast_status = None
|
||||
self.media_status = None
|
||||
@@ -592,7 +601,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
"""Generate root node."""
|
||||
children = []
|
||||
# Add media browsers
|
||||
for platform in self.hass.data[DOMAIN]["cast_platform"].values():
|
||||
for platform in self._config_entry.runtime_data.cast_platforms.values():
|
||||
children.extend(
|
||||
await platform.async_get_media_browser_root_object(
|
||||
self.hass, self._chromecast.cast_type
|
||||
@@ -651,7 +660,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
|
||||
platform: CastProtocol
|
||||
assert media_content_type is not None
|
||||
for platform in self.hass.data[DOMAIN]["cast_platform"].values():
|
||||
for platform in self._config_entry.runtime_data.cast_platforms.values():
|
||||
browse_media = await platform.async_browse_media(
|
||||
self.hass,
|
||||
media_content_type,
|
||||
@@ -713,7 +722,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
return
|
||||
|
||||
# Try the cast platforms
|
||||
for platform in self.hass.data[DOMAIN]["cast_platform"].values():
|
||||
for platform in self._config_entry.runtime_data.cast_platforms.values():
|
||||
result = await platform.async_play_media(
|
||||
self.hass, self.entity_id, chromecast, media_type, media_id
|
||||
)
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
"""Integration for Cielo Home."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import PLATFORMS
|
||||
from .coordinator import CieloDataUpdateCoordinator, CieloHomeConfigEntry
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CieloHomeConfigEntry) -> bool:
|
||||
"""Set up Cielo Home from a config entry."""
|
||||
coordinator = CieloDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CieloHomeConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
await coordinator.async_shutdown()
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -0,0 +1,311 @@
|
||||
"""Support for Cielo home thermostats and Smart AC Controllers."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any, Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from cieloconnectapi.exceptions import AuthenticationError
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import CIELO_ERRORS, LOGGER, TIMEOUT
|
||||
from .coordinator import CieloDataUpdateCoordinator, CieloHomeConfigEntry
|
||||
from .entity import CieloDeviceEntity
|
||||
|
||||
_T = TypeVar("_T", bound="CieloDeviceEntity")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
CIELO_TO_HA_HVAC: dict[str, HVACMode] = {
|
||||
"cool": HVACMode.COOL,
|
||||
"heat": HVACMode.HEAT,
|
||||
"fan": HVACMode.FAN_ONLY,
|
||||
"dry": HVACMode.DRY,
|
||||
"auto": HVACMode.AUTO,
|
||||
"heat_cool": HVACMode.HEAT_COOL,
|
||||
"off": HVACMode.OFF,
|
||||
}
|
||||
HA_TO_CIELO_HVAC: dict[HVACMode, str] = {v: k for k, v in CIELO_TO_HA_HVAC.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CieloHomeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Cielo climate platform."""
|
||||
coordinator = entry.runtime_data
|
||||
devices = coordinator.data.parsed
|
||||
async_add_entities([CieloClimate(coordinator, dev_id) for dev_id in devices])
|
||||
|
||||
|
||||
def async_handle_api_call(
|
||||
function: Callable[Concatenate[_T, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, Any]]:
|
||||
"""Decorate api calls to handle exceptions and update state."""
|
||||
|
||||
async def wrap_api_call(*args: Any, **kwargs: Any) -> None:
|
||||
"""Wrap services for api calls."""
|
||||
entity: _T = args[0]
|
||||
res: Any = None
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
res = await function(*args, **kwargs)
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
except CIELO_ERRORS as err:
|
||||
if isinstance(err, TimeoutError):
|
||||
raise HomeAssistantError("API call timed out") from err
|
||||
raise HomeAssistantError("Unable to perform API call") from err
|
||||
|
||||
LOGGER.debug(
|
||||
"API call result for entity %s: type=%s keys=%s",
|
||||
entity.entity_id,
|
||||
type(res),
|
||||
list(res.keys()) if isinstance(res, dict) else None,
|
||||
)
|
||||
|
||||
if not isinstance(res, dict):
|
||||
LOGGER.error(
|
||||
"API function did not return a dictionary for entity %s, got %s",
|
||||
entity.entity_id,
|
||||
type(res),
|
||||
)
|
||||
raise HomeAssistantError("Invalid API response format")
|
||||
|
||||
data: dict[str, Any] | None = res.get("data")
|
||||
|
||||
if not data:
|
||||
raise HomeAssistantError("API response contained no data payload")
|
||||
|
||||
await entity.coordinator.async_apply_action_result(entity.device_id, data)
|
||||
|
||||
return wrap_api_call
|
||||
|
||||
|
||||
class CieloClimate(CieloDeviceEntity, ClimateEntity):
|
||||
"""Representation of a Cielo Smart AC Controller."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "climate_device"
|
||||
|
||||
def __init__(self, coordinator: CieloDataUpdateCoordinator, device_id: str) -> None:
|
||||
"""Initialize the climate device."""
|
||||
super().__init__(coordinator, device_id)
|
||||
self._attr_unique_id = device_id
|
||||
|
||||
@property
|
||||
def temperature_unit(self) -> str:
|
||||
"""Return the unit of temperature in Home Assistant format.
|
||||
|
||||
It can change over time based on the device settings, so we fetch it dynamically from the client.
|
||||
"""
|
||||
unit = self.client.temperature_unit()
|
||||
|
||||
if not unit:
|
||||
return UnitOfTemperature.CELSIUS
|
||||
|
||||
normalized = unit.strip().lower()
|
||||
|
||||
if normalized in {"c", "°c", "celsius"}:
|
||||
return UnitOfTemperature.CELSIUS
|
||||
if normalized in {"f", "°f", "fahrenheit"}:
|
||||
return UnitOfTemperature.FAHRENHEIT
|
||||
|
||||
return UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def supported_features(self) -> ClimateEntityFeature:
|
||||
"""Return dynamic feature flags based on the current mode."""
|
||||
flags = ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON
|
||||
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
flags |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
elif self.client.mode_supports_temperature():
|
||||
flags |= ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
|
||||
caps = self.client.mode_caps()
|
||||
|
||||
if caps.get("fan_levels"):
|
||||
flags |= ClimateEntityFeature.FAN_MODE
|
||||
|
||||
if caps.get("swing"):
|
||||
flags |= ClimateEntityFeature.SWING_MODE
|
||||
|
||||
if self.device_data and self.device_data.preset_modes:
|
||||
flags |= ClimateEntityFeature.PRESET_MODE
|
||||
|
||||
return flags
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity, if available."""
|
||||
if self.device_data:
|
||||
return self.device_data.humidity
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the low target temperature for HEAT_COOL mode."""
|
||||
return self.client.target_temperature_low(self.temperature_unit)
|
||||
|
||||
@property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the high target temperature for HEAT_COOL mode."""
|
||||
return self.client.target_temperature_high(self.temperature_unit)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
mode = self.client.hvac_mode()
|
||||
return CIELO_TO_HA_HVAC.get(mode, mode)
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the list of available HVAC modes."""
|
||||
modes = self.client.hvac_modes() or []
|
||||
return [CIELO_TO_HA_HVAC.get(m, m) for m in modes]
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current indoor temperature."""
|
||||
return self.client.current_temperature()
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
return self.client.target_temperature()
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum possible target temperature."""
|
||||
return self.client.min_temp()
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum possible target temperature."""
|
||||
return self.client.max_temp()
|
||||
|
||||
@property
|
||||
def target_temperature_step(self) -> float | None:
|
||||
"""Return the precision of the thermostat."""
|
||||
return self.client.target_temperature_step(self.temperature_unit)
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
return self.client.fan_mode()
|
||||
|
||||
@property
|
||||
def fan_modes(self) -> list[str] | None:
|
||||
"""Return the list of available fan modes.
|
||||
|
||||
Fan modes are normalized in the backend to snake_case values that
|
||||
match Home Assistant expectations (e.g. "low", "medium", "high", "auto").
|
||||
This allows HA to translate and display icons correctly using the
|
||||
integration strings definitions.
|
||||
"""
|
||||
return self.client.fan_modes()
|
||||
|
||||
@property
|
||||
def swing_modes(self) -> list[str] | None:
|
||||
"""Return the list of available swing modes.
|
||||
|
||||
Swing modes are normalized in the backend to snake_case values
|
||||
compatible with Home Assistant (e.g. "auto", "swing").
|
||||
These values align with the integration translations so HA can display
|
||||
proper labels and icons.
|
||||
"""
|
||||
return self.client.swing_modes()
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
return self.client.preset_mode()
|
||||
|
||||
@property
|
||||
def preset_modes(self) -> list[str] | None:
|
||||
"""Return the list of available preset modes.
|
||||
|
||||
Preset modes are normalized in the backend to snake_case values that
|
||||
match Home Assistant expectations (e.g. "home", "away", "sleep", "pets").
|
||||
This allows HA to translate and display icons correctly using the
|
||||
integration strings definitions.
|
||||
"""
|
||||
return self.client.preset_modes()
|
||||
|
||||
@property
|
||||
def swing_mode(self) -> str | None:
|
||||
"""Return the current swing mode."""
|
||||
return self.device_data.swing_mode if self.device_data else None
|
||||
|
||||
@property
|
||||
def precision(self) -> float:
|
||||
"""Return the precision of the thermostat."""
|
||||
return self.client.precision(self.temperature_unit)
|
||||
|
||||
@async_handle_api_call
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
return await self.client.async_set_temperature(
|
||||
self.temperature_unit,
|
||||
**{
|
||||
ATTR_TARGET_TEMP_LOW: kwargs.get(ATTR_TARGET_TEMP_LOW),
|
||||
ATTR_TARGET_TEMP_HIGH: kwargs.get(ATTR_TARGET_TEMP_HIGH),
|
||||
},
|
||||
)
|
||||
return await self.client.async_set_temperature(
|
||||
self.temperature_unit,
|
||||
**{ATTR_TEMPERATURE: kwargs.get(ATTR_TEMPERATURE)},
|
||||
)
|
||||
|
||||
@async_handle_api_call
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new fan mode."""
|
||||
return await self.client.async_set_fan_mode(fan_mode)
|
||||
|
||||
@async_handle_api_call
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
return await self.client.async_set_preset_mode(preset_mode)
|
||||
|
||||
@async_handle_api_call
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new HVAC mode."""
|
||||
cielo_mode = HA_TO_CIELO_HVAC.get(hvac_mode)
|
||||
return await self.client.async_set_hvac_mode(cielo_mode)
|
||||
|
||||
@async_handle_api_call
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
"""Set new swing mode."""
|
||||
return await self.client.async_set_swing_mode(swing_mode)
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the climate device on."""
|
||||
modes = self.hvac_modes or []
|
||||
|
||||
# Select the first supported non-off mode when turning on
|
||||
for mode in modes:
|
||||
if mode != HVACMode.OFF:
|
||||
await self.async_set_hvac_mode(mode)
|
||||
return
|
||||
|
||||
raise HomeAssistantError("No non-off HVAC modes available to turn on device")
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn the climate device off."""
|
||||
await self.async_set_hvac_mode(HVACMode.OFF)
|
||||
@@ -0,0 +1,99 @@
|
||||
"""Config Flow for Cielo integration."""
|
||||
|
||||
from typing import Any, Final
|
||||
|
||||
from aiohttp import ClientError
|
||||
from cieloconnectapi import CieloClient
|
||||
from cieloconnectapi.exceptions import AuthenticationError, CieloError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.config_entries import ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN, LOGGER, TIMEOUT
|
||||
|
||||
DATA_SCHEMA: Final = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): TextSelector(
|
||||
TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CieloConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Cielo integration."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
async def _async_validate_api_key(
|
||||
self, api_key: str
|
||||
) -> tuple[str | None, dict[str, str]]:
|
||||
"""Validate the API key, initialize the client, and return errors or token."""
|
||||
client = CieloClient(
|
||||
api_key=api_key,
|
||||
timeout=TIMEOUT,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
try:
|
||||
token = await client.get_or_refresh_token()
|
||||
|
||||
devices = await client.get_devices_data()
|
||||
if not devices.parsed:
|
||||
return None, {"base": "no_devices"}
|
||||
|
||||
except AuthenticationError:
|
||||
return None, {"base": "invalid_auth"}
|
||||
except ConnectionError, TimeoutError, ClientError, CieloError:
|
||||
return None, {"base": "cannot_connect"}
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception during config flow validation")
|
||||
return None, {"base": "unknown"}
|
||||
|
||||
return client.user_id, {CONF_TOKEN: token}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input:
|
||||
api_key = user_input[CONF_API_KEY].strip()
|
||||
|
||||
user_id, validation_result = await self._async_validate_api_key(api_key)
|
||||
|
||||
if "base" in validation_result:
|
||||
errors = validation_result
|
||||
else:
|
||||
token: str = validation_result[CONF_TOKEN]
|
||||
|
||||
user_input[CONF_API_KEY] = api_key
|
||||
user_input[CONF_TOKEN] = token
|
||||
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_NAME,
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
# Show the user form
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"url": "https://www.home-assistant.io/integrations/cielo_home"
|
||||
},
|
||||
)
|
||||
@@ -0,0 +1,24 @@
|
||||
"""Constants for the Cielo Home integration."""
|
||||
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from aiohttp import ClientError
|
||||
from cieloconnectapi.exceptions import CieloError
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN: Final = "cielo_home"
|
||||
PLATFORMS: Final[list[Platform]] = [
|
||||
Platform.CLIMATE,
|
||||
]
|
||||
DEFAULT_NAME: Final = "Cielo Home"
|
||||
DEFAULT_SCAN_INTERVAL: Final[int] = 2 * 60
|
||||
TIMEOUT: Final[int] = 20
|
||||
LOGGER: Final = logging.getLogger(__package__)
|
||||
|
||||
CIELO_ERRORS: Final[tuple] = (
|
||||
ClientError,
|
||||
TimeoutError,
|
||||
CieloError,
|
||||
)
|
||||
@@ -0,0 +1,107 @@
|
||||
"""Coordinator for Cielo integration."""
|
||||
|
||||
from copy import copy
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import Any, Final
|
||||
|
||||
from aiohttp import ClientError
|
||||
from cieloconnectapi import CieloClient
|
||||
from cieloconnectapi.exceptions import AuthenticationError, CieloError
|
||||
from cieloconnectapi.model import CieloDevice
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, TIMEOUT
|
||||
|
||||
REQUEST_REFRESH_DELAY: Final[int] = 2 * 60
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class CieloData:
|
||||
"""Data structure for the coordinator."""
|
||||
|
||||
raw: dict[str, Any]
|
||||
parsed: dict[str, CieloDevice]
|
||||
|
||||
|
||||
class CieloDataUpdateCoordinator(DataUpdateCoordinator[CieloData]):
|
||||
"""Cielo Data Update Coordinator."""
|
||||
|
||||
config_entry: CieloHomeConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: CieloHomeConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.client = CieloClient(
|
||||
api_key=entry.data[CONF_API_KEY],
|
||||
timeout=TIMEOUT,
|
||||
token=entry.data[CONF_TOKEN],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
name=DOMAIN,
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
# The debouncer prevents multiple rapid refresh requests from triggering repeated full data fetches from the backend.
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> CieloData:
|
||||
"""Fetch data from the API."""
|
||||
try:
|
||||
data = await self.client.get_devices_data()
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (TimeoutError, ConnectionError, CieloError, ClientError) as err:
|
||||
raise UpdateFailed(err) from err
|
||||
|
||||
return CieloData(raw=data.raw, parsed=data.parsed)
|
||||
|
||||
async def async_apply_action_result(
|
||||
self, device_id: str, data: dict[str, Any]
|
||||
) -> None:
|
||||
"""Apply an optimistic update from an API action response.
|
||||
|
||||
This updates the affected device locally in the coordinator state so the
|
||||
UI reflects the change immediately without requiring a full backend refresh.
|
||||
|
||||
Performing a coordinator refresh after every action would fetch all devices
|
||||
for the account, even when only a single device was updated. This is not
|
||||
optimal from an API usage/cost perspective.
|
||||
|
||||
Instead, the coordinator applies the action result locally for the affected
|
||||
device and schedules a later refresh to reconcile with the backend state.
|
||||
"""
|
||||
if not self.data or not self.data.parsed or device_id not in self.data.parsed:
|
||||
await self.async_request_refresh()
|
||||
return
|
||||
|
||||
new_parsed = dict(self.data.parsed)
|
||||
dev = copy(new_parsed[device_id])
|
||||
|
||||
try:
|
||||
dev.apply_update(data)
|
||||
except KeyError, ValueError, TypeError:
|
||||
await self.async_request_refresh()
|
||||
return
|
||||
|
||||
new_parsed[device_id] = dev
|
||||
self.async_set_updated_data(CieloData(raw=self.data.raw, parsed=new_parsed))
|
||||
|
||||
# Request a debounced refresh to reconcile with the backend state.
|
||||
await self.async_request_refresh()
|
||||
|
||||
|
||||
# Define the ConfigEntry type here to avoid circular imports
|
||||
type CieloHomeConfigEntry = ConfigEntry[CieloDataUpdateCoordinator]
|
||||
@@ -0,0 +1,76 @@
|
||||
"""Base entity for Cielo integration."""
|
||||
|
||||
from cieloconnectapi.device import CieloDeviceAPI
|
||||
from cieloconnectapi.model import CieloDevice
|
||||
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CieloDataUpdateCoordinator
|
||||
|
||||
|
||||
class CieloBaseEntity(CoordinatorEntity[CieloDataUpdateCoordinator]):
|
||||
"""Representation of a Cielo base entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CieloDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize the Cielo base entity."""
|
||||
super().__init__(coordinator)
|
||||
self._device_id = device_id
|
||||
self.client = CieloDeviceAPI(
|
||||
coordinator.client, coordinator.data.parsed[device_id]
|
||||
)
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if (dev := self.device_data) is not None:
|
||||
self.client.device_data = dev
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def device_data(self) -> CieloDevice | None:
|
||||
"""Return the device data from the coordinator."""
|
||||
return self.coordinator.data.parsed.get(self._device_id)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available and online."""
|
||||
if not (super().available and self._device_id in self.coordinator.data.parsed):
|
||||
return False
|
||||
|
||||
dev = self.device_data
|
||||
return bool(dev and dev.device_status)
|
||||
|
||||
|
||||
class CieloDeviceEntity(CieloBaseEntity):
|
||||
"""Representation of a Cielo Device."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CieloDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize the device entity."""
|
||||
super().__init__(coordinator, device_id)
|
||||
self.device_id = device_id
|
||||
|
||||
device = coordinator.data.parsed[device_id]
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
connections={(CONNECTION_NETWORK_MAC, format_mac(device.mac_address))},
|
||||
manufacturer="Cielo",
|
||||
configuration_url="https://home.cielowigle.com/",
|
||||
suggested_area=device.name,
|
||||
)
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "cielo_home",
|
||||
"name": "Cielo Home",
|
||||
"codeowners": ["@ihsan-cielo", "@mudasar-cielo"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/cielo_home",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["cieloconnectapi"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["cielo-connect-api==1.0.6"]
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "Invalid or expired API key; generate a new one",
|
||||
"no_devices": "No devices found; make sure devices are set up in the Cielo Home app",
|
||||
"no_user_id": "No valid user information found for the API key",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The API key from your Cielo Home account"
|
||||
},
|
||||
"description": "Sign in with your Cielo Home API key. Follow the [documentation]({url}) to learn how to get your API key.",
|
||||
"title": "Connect to Cielo Home"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"climate": {
|
||||
"climate_device": {
|
||||
"state_attributes": {
|
||||
"fan_mode": {
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"quiet": "Quiet",
|
||||
"super_high": "Super high",
|
||||
"ultra_high": "Ultra high"
|
||||
}
|
||||
},
|
||||
"swing_mode": {
|
||||
"state": {
|
||||
"adjust": "Adjust",
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"auto_stop": "Auto Stop",
|
||||
"pos1": "Position 1",
|
||||
"pos10": "Position 10",
|
||||
"pos11": "Position 11",
|
||||
"pos12": "Position 12",
|
||||
"pos13": "Position 13",
|
||||
"pos14": "Position 14",
|
||||
"pos15": "Position 15",
|
||||
"pos2": "Position 2",
|
||||
"pos3": "Position 3",
|
||||
"pos4": "Position 4",
|
||||
"pos5": "Position 5",
|
||||
"pos6": "Position 6",
|
||||
"pos7": "Position 7",
|
||||
"pos8": "Position 8",
|
||||
"pos9": "Position 9",
|
||||
"swing": "Swing"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,8 +13,8 @@ from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionConfig,
|
||||
EntityConditionBase,
|
||||
EntityNumericalConditionBase,
|
||||
EntityNumericalConditionWithUnitBase,
|
||||
make_entity_numerical_condition,
|
||||
make_entity_state_condition,
|
||||
)
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
@@ -59,12 +59,33 @@ class ClimateTargetTemperatureCondition(EntityNumericalConditionWithUnitBase):
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_TEMPERATURE)}
|
||||
_unit_converter = TemperatureConverter
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip climate entities that do not expose a target temperature."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_TEMPERATURE) is not None
|
||||
)
|
||||
|
||||
def _get_entity_unit(self, entity_state: State) -> str | None:
|
||||
"""Get the temperature unit of a climate entity from its state."""
|
||||
# Climate entities convert temperatures to the system unit via show_temp
|
||||
return self._hass.config.units.temperature_unit
|
||||
|
||||
|
||||
class ClimateTargetHumidityCondition(EntityNumericalConditionBase):
|
||||
"""Condition for climate target humidity."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)}
|
||||
_valid_unit = "%"
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip climate entities that do not expose a target humidity."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_HUMIDITY) is not None
|
||||
)
|
||||
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_hvac_mode": ClimateHVACModeCondition,
|
||||
"is_off": make_entity_state_condition(DOMAIN, HVACMode.OFF),
|
||||
@@ -88,10 +109,7 @@ CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_heating": make_entity_state_condition(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HVAC_ACTION)}, HVACAction.HEATING
|
||||
),
|
||||
"target_humidity": make_entity_numerical_condition(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)},
|
||||
valid_unit="%",
|
||||
),
|
||||
"target_humidity": ClimateTargetHumidityCondition,
|
||||
"target_temperature": ClimateTargetTemperatureCondition,
|
||||
}
|
||||
|
||||
|
||||
@@ -56,6 +56,13 @@ class _ClimateTargetTemperatureTriggerMixin(EntityNumericalStateTriggerWithUnitB
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_TEMPERATURE)}
|
||||
_unit_converter = TemperatureConverter
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip climate entities that do not expose a target temperature."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_TEMPERATURE) is not None
|
||||
)
|
||||
|
||||
def _get_entity_unit(self, state: State) -> str | None:
|
||||
"""Get the temperature unit of a climate entity from its state."""
|
||||
# Climate entities convert temperatures to the system unit via show_temp
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
"""Provides triggers for counters."""
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_MAXIMUM,
|
||||
CONF_MINIMUM,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.const import CONF_MAXIMUM, CONF_MINIMUM
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
@@ -41,9 +36,7 @@ class CounterDecrementedTrigger(CounterBaseIntegerTrigger):
|
||||
"""Trigger for when a counter is decremented."""
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and the state has changed."""
|
||||
if from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
return False
|
||||
"""Check that the counter value decreased."""
|
||||
return int(from_state.state) > int(to_state.state)
|
||||
|
||||
|
||||
@@ -51,9 +44,7 @@ class CounterIncrementedTrigger(CounterBaseIntegerTrigger):
|
||||
"""Trigger for when a counter is incremented."""
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and the state has changed."""
|
||||
if from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
return False
|
||||
"""Check that the counter value increased."""
|
||||
return int(from_state.state) < int(to_state.state)
|
||||
|
||||
|
||||
@@ -62,12 +53,6 @@ class CounterValueBaseTrigger(EntityTriggerBase):
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec()}
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and the state has changed."""
|
||||
if from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
return False
|
||||
return from_state.state != to_state.state
|
||||
|
||||
|
||||
class CounterMaxReachedTrigger(CounterValueBaseTrigger):
|
||||
"""Trigger for when a counter reaches its maximum value."""
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from collections.abc import Mapping
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.trigger import EntityTriggerBase, Trigger
|
||||
|
||||
@@ -28,9 +28,7 @@ class CoverTriggerBase(EntityTriggerBase):
|
||||
return self._get_value(state) == domain_spec.target_value
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the transition is valid for a cover state change."""
|
||||
if from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
return False
|
||||
"""Check that the relevant cover value changed."""
|
||||
if (from_value := self._get_value(from_state)) is None:
|
||||
return False
|
||||
return from_value != self._get_value(to_state)
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
"""The Data Grand Lyon integration."""
|
||||
|
||||
from data_grand_lyon_ha import DataGrandLyonClient
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import DataGrandLyonConfigEntry, DataGrandLyonCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: DataGrandLyonConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Data Grand Lyon from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
client = DataGrandLyonClient(
|
||||
session=session,
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
coordinator = DataGrandLyonCoordinator(hass, entry, client)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_entry))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_update_entry(
|
||||
hass: HomeAssistant, entry: DataGrandLyonConfigEntry
|
||||
) -> None:
|
||||
"""Handle config entry update (e.g., subentry changes)."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: DataGrandLyonConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -0,0 +1,132 @@
|
||||
"""Config flow for the Data Grand Lyon integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from data_grand_lyon_ha import DataGrandLyonClient, TclPassageType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_LINE, CONF_STOP_ID, DOMAIN, SUBENTRY_TYPE_STOP
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_STOP_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_LINE): str,
|
||||
vol.Required(CONF_STOP_ID): vol.Coerce(int),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DataGrandLyonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Data Grand Lyon."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentry types supported by this integration."""
|
||||
return {
|
||||
SUBENTRY_TYPE_STOP: StopSubentryFlowHandler,
|
||||
}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_USERNAME: user_input[CONF_USERNAME]})
|
||||
|
||||
if error := await self._test_connection(user_input):
|
||||
errors["base"] = error
|
||||
else:
|
||||
return self.async_create_entry(title="Data Grand Lyon", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _test_connection(self, user_input: dict[str, Any]) -> str | None:
|
||||
"""Test connectivity by making a dummy API call.
|
||||
|
||||
Returns None on success, or an error key for the errors dict.
|
||||
"""
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = DataGrandLyonClient(
|
||||
session=session,
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
)
|
||||
try:
|
||||
# the upstream library filters in memory so these placeholder values
|
||||
# won't trigger an exception ; the returned list will be empty
|
||||
await client.get_tcl_passages(
|
||||
ligne="__test__", stop_id=0, passage_type=TclPassageType.ESTIMATED
|
||||
)
|
||||
except ClientResponseError as err:
|
||||
if err.status in (401, 403):
|
||||
return "invalid_auth"
|
||||
return "cannot_connect"
|
||||
except ClientError, TimeoutError:
|
||||
return "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error testing Data Grand Lyon connection")
|
||||
return "unknown"
|
||||
return None
|
||||
|
||||
|
||||
class StopSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle a subentry flow for adding a Data Grand Lyon stop."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Handle the user step to add a new stop."""
|
||||
entry = self._get_entry()
|
||||
|
||||
if user_input is not None:
|
||||
line = user_input[CONF_LINE]
|
||||
stop_id = user_input[CONF_STOP_ID]
|
||||
unique_id = f"{line}_{stop_id}"
|
||||
|
||||
for subentry in entry.subentries.values():
|
||||
if subentry.unique_id == unique_id:
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
name = f"{line} - Stop {stop_id}"
|
||||
return self.async_create_entry(
|
||||
title=name,
|
||||
data={CONF_LINE: line, CONF_STOP_ID: stop_id},
|
||||
unique_id=unique_id,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_STOP_DATA_SCHEMA,
|
||||
)
|
||||
@@ -0,0 +1,11 @@
|
||||
"""Constants for the Data Grand Lyon integration."""
|
||||
|
||||
import logging
|
||||
|
||||
DOMAIN = "data_grand_lyon"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
SUBENTRY_TYPE_STOP = "stop"
|
||||
|
||||
CONF_LINE = "line"
|
||||
CONF_STOP_ID = "stop_id"
|
||||
@@ -0,0 +1,70 @@
|
||||
"""DataUpdateCoordinator for the Data Grand Lyon integration."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
|
||||
from data_grand_lyon_ha import DataGrandLyonClient, TclPassage
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_LINE, CONF_STOP_ID, DOMAIN, LOGGER, SUBENTRY_TYPE_STOP
|
||||
|
||||
type DataGrandLyonConfigEntry = ConfigEntry[DataGrandLyonCoordinator]
|
||||
|
||||
|
||||
class DataGrandLyonCoordinator(DataUpdateCoordinator[dict[str, list[TclPassage]]]):
|
||||
"""Coordinator for the Data Grand Lyon integration."""
|
||||
|
||||
config_entry: DataGrandLyonConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: DataGrandLyonConfigEntry,
|
||||
client: DataGrandLyonClient,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.client = client
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=5),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, list[TclPassage]]:
|
||||
"""Fetch data for all monitored stops."""
|
||||
stop_subentries = list(
|
||||
self.config_entry.get_subentries_of_type(SUBENTRY_TYPE_STOP)
|
||||
)
|
||||
|
||||
stop_tasks = [
|
||||
self.client.get_tcl_passages(
|
||||
ligne=subentry.data[CONF_LINE],
|
||||
stop_id=subentry.data[CONF_STOP_ID],
|
||||
)
|
||||
for subentry in stop_subentries
|
||||
]
|
||||
|
||||
stop_results: list[list[TclPassage] | BaseException] = await asyncio.gather(
|
||||
*stop_tasks, return_exceptions=True
|
||||
)
|
||||
|
||||
stops: dict[str, list[TclPassage]] = {}
|
||||
for i, subentry in enumerate(stop_subentries):
|
||||
result = stop_results[i]
|
||||
if isinstance(result, BaseException):
|
||||
LOGGER.warning(
|
||||
"Error fetching departures for stop %s: %s",
|
||||
subentry.subentry_id,
|
||||
result,
|
||||
)
|
||||
continue
|
||||
stops[subentry.subentry_id] = result
|
||||
|
||||
if stop_subentries and not stops:
|
||||
raise UpdateFailed("Error fetching DataGrandLyon data: all requests failed")
|
||||
return stops
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"next_departure_1_direction": {
|
||||
"default": "mdi:directions"
|
||||
},
|
||||
"next_departure_2_direction": {
|
||||
"default": "mdi:directions"
|
||||
},
|
||||
"next_departure_3_direction": {
|
||||
"default": "mdi:directions"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "data_grand_lyon",
|
||||
"name": "Data Grand Lyon",
|
||||
"codeowners": ["@Crocmagnon"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/data_grand_lyon",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["data-grand-lyon-ha==0.5.0"]
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: This integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: This integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities use the coordinator pattern and do not subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: This integration does not register custom actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: This is a service integration; there are no discoverable devices.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: This is a service integration; there are no discoverable devices.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: done
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -0,0 +1,201 @@
|
||||
"""Sensor platform for the Data Grand Lyon integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from data_grand_lyon_ha import TclPassage, TclPassageType
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, SUBENTRY_TYPE_STOP
|
||||
from .coordinator import DataGrandLyonConfigEntry, DataGrandLyonCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_TZ_PARIS = ZoneInfo("Europe/Paris")
|
||||
|
||||
_DEPARTURE_TYPE_OPTIONS = [t.name.lower() for t in TclPassageType]
|
||||
|
||||
|
||||
def _departure_time(departure: TclPassage) -> datetime:
|
||||
"""Return the departure time, localized to Europe/Paris if naive."""
|
||||
dt = departure.heure_passage
|
||||
if dt.tzinfo is None:
|
||||
return dt.replace(tzinfo=_TZ_PARIS)
|
||||
return dt
|
||||
|
||||
|
||||
def _departure_icon(departure: TclPassage) -> str:
|
||||
"""Return icon based on departure type."""
|
||||
if departure.type == TclPassageType.ESTIMATED:
|
||||
return "mdi:clock-check-outline"
|
||||
return "mdi:clock-outline"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class DataGrandLyonStopSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes a Data Grand Lyon stop departure sensor entity."""
|
||||
|
||||
departure_index: int
|
||||
value_fn: Callable[[TclPassage], StateType | datetime]
|
||||
icon_fn: Callable[[TclPassage], str] | None = None
|
||||
|
||||
|
||||
STOP_SENSOR_DESCRIPTIONS: tuple[DataGrandLyonStopSensorEntityDescription, ...] = (
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_1",
|
||||
translation_key="next_departure_1",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
departure_index=0,
|
||||
value_fn=_departure_time,
|
||||
icon_fn=_departure_icon,
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_1_direction",
|
||||
translation_key="next_departure_1_direction",
|
||||
departure_index=0,
|
||||
value_fn=lambda p: p.direction,
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_1_type",
|
||||
translation_key="next_departure_1_type",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=_DEPARTURE_TYPE_OPTIONS,
|
||||
departure_index=0,
|
||||
value_fn=lambda p: p.type.name.lower(),
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_2",
|
||||
translation_key="next_departure_2",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
departure_index=1,
|
||||
value_fn=_departure_time,
|
||||
icon_fn=_departure_icon,
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_2_direction",
|
||||
translation_key="next_departure_2_direction",
|
||||
departure_index=1,
|
||||
value_fn=lambda p: p.direction,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_2_type",
|
||||
translation_key="next_departure_2_type",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=_DEPARTURE_TYPE_OPTIONS,
|
||||
departure_index=1,
|
||||
value_fn=lambda p: p.type.name.lower(),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_3",
|
||||
translation_key="next_departure_3",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
departure_index=2,
|
||||
value_fn=_departure_time,
|
||||
icon_fn=_departure_icon,
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_3_direction",
|
||||
translation_key="next_departure_3_direction",
|
||||
departure_index=2,
|
||||
value_fn=lambda p: p.direction,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
DataGrandLyonStopSensorEntityDescription(
|
||||
key="next_departure_3_type",
|
||||
translation_key="next_departure_3_type",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=_DEPARTURE_TYPE_OPTIONS,
|
||||
departure_index=2,
|
||||
value_fn=lambda p: p.type.name.lower(),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: DataGrandLyonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Data Grand Lyon sensor entities."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
for subentry in entry.get_subentries_of_type(SUBENTRY_TYPE_STOP):
|
||||
async_add_entities(
|
||||
(
|
||||
DataGrandLyonStopSensor(coordinator, subentry, description)
|
||||
for description in STOP_SENSOR_DESCRIPTIONS
|
||||
),
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class DataGrandLyonStopSensor(
|
||||
CoordinatorEntity[DataGrandLyonCoordinator], SensorEntity
|
||||
):
|
||||
"""Sensor for Data Grand Lyon stop departures."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: DataGrandLyonStopSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataGrandLyonCoordinator,
|
||||
subentry: ConfigSubentry,
|
||||
description: DataGrandLyonStopSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._subentry_id = subentry.subentry_id
|
||||
assert subentry.unique_id is not None
|
||||
|
||||
self._attr_unique_id = f"{subentry.unique_id}-{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.unique_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="TCL",
|
||||
model="Stop",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
def _get_departure(self) -> TclPassage | None:
|
||||
"""Return the departure for this sensor's index, or None."""
|
||||
departures = self.coordinator.data.get(self._subentry_id, [])
|
||||
index = self.entity_description.departure_index
|
||||
if index >= len(departures):
|
||||
return None
|
||||
return departures[index]
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the sensor value."""
|
||||
departure = self._get_departure()
|
||||
if departure is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(departure)
|
||||
|
||||
@property
|
||||
def icon(self) -> str | None:
|
||||
"""Return a dynamic icon when the description provides one."""
|
||||
if self.entity_description.icon_fn is None:
|
||||
return None
|
||||
departure = self._get_departure()
|
||||
if departure is None:
|
||||
return None
|
||||
return self.entity_description.icon_fn(departure)
|
||||
@@ -0,0 +1,86 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "Your password on data.grandlyon.com.",
|
||||
"username": "Your username on data.grandlyon.com."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"stop": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
},
|
||||
"entry_type": "Transit stop",
|
||||
"initiate_flow": {
|
||||
"user": "Add transit stop"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"line": "Line",
|
||||
"stop_id": "Stop ID"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"next_departure_1": {
|
||||
"name": "Next departure 1"
|
||||
},
|
||||
"next_departure_1_direction": {
|
||||
"name": "Next departure 1 direction"
|
||||
},
|
||||
"next_departure_1_type": {
|
||||
"name": "Next departure 1 type",
|
||||
"state": {
|
||||
"estimated": "Estimated",
|
||||
"theoretical": "Theoretical"
|
||||
}
|
||||
},
|
||||
"next_departure_2": {
|
||||
"name": "Next departure 2"
|
||||
},
|
||||
"next_departure_2_direction": {
|
||||
"name": "Next departure 2 direction"
|
||||
},
|
||||
"next_departure_2_type": {
|
||||
"name": "Next departure 2 type",
|
||||
"state": {
|
||||
"estimated": "[%key:component::data_grand_lyon::entity::sensor::next_departure_1_type::state::estimated%]",
|
||||
"theoretical": "[%key:component::data_grand_lyon::entity::sensor::next_departure_1_type::state::theoretical%]"
|
||||
}
|
||||
},
|
||||
"next_departure_3": {
|
||||
"name": "Next departure 3"
|
||||
},
|
||||
"next_departure_3_direction": {
|
||||
"name": "Next departure 3 direction"
|
||||
},
|
||||
"next_departure_3_type": {
|
||||
"name": "Next departure 3 type",
|
||||
"state": {
|
||||
"estimated": "[%key:component::data_grand_lyon::entity::sensor::next_departure_1_type::state::estimated%]",
|
||||
"theoretical": "[%key:component::data_grand_lyon::entity::sensor::next_departure_1_type::state::theoretical%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,11 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import SectionConfig, section
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
CONF_ADVANCED_OPTIONS,
|
||||
CONF_HOSTNAME,
|
||||
CONF_IPV4,
|
||||
CONF_IPV6,
|
||||
@@ -37,15 +39,17 @@ from .const import (
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
}
|
||||
)
|
||||
DATA_SCHEMA_ADV = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOSTNAME, default=DEFAULT_HOSTNAME): cv.string,
|
||||
vol.Optional(CONF_RESOLVER): cv.string,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_RESOLVER_IPV6): cv.string,
|
||||
vol.Optional(CONF_PORT_IPV6): cv.port,
|
||||
vol.Required(CONF_ADVANCED_OPTIONS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_RESOLVER): cv.string,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_RESOLVER_IPV6): cv.string,
|
||||
vol.Optional(CONF_PORT_IPV6): cv.port,
|
||||
}
|
||||
),
|
||||
SectionConfig(collapsed=True),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -111,10 +115,13 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input:
|
||||
hostname = user_input[CONF_HOSTNAME]
|
||||
name = DEFAULT_NAME if hostname == DEFAULT_HOSTNAME else hostname
|
||||
resolver = user_input.get(CONF_RESOLVER, DEFAULT_RESOLVER)
|
||||
resolver_ipv6 = user_input.get(CONF_RESOLVER_IPV6, DEFAULT_RESOLVER_IPV6)
|
||||
port = user_input.get(CONF_PORT, DEFAULT_PORT)
|
||||
port_ipv6 = user_input.get(CONF_PORT_IPV6, DEFAULT_PORT)
|
||||
advanced_options = user_input[CONF_ADVANCED_OPTIONS]
|
||||
resolver = advanced_options.get(CONF_RESOLVER, DEFAULT_RESOLVER)
|
||||
resolver_ipv6 = advanced_options.get(
|
||||
CONF_RESOLVER_IPV6, DEFAULT_RESOLVER_IPV6
|
||||
)
|
||||
port = advanced_options.get(CONF_PORT, DEFAULT_PORT)
|
||||
port_ipv6 = advanced_options.get(CONF_PORT_IPV6, DEFAULT_PORT)
|
||||
|
||||
validate = await async_validate_hostname(
|
||||
hostname, resolver, resolver_ipv6, port, port_ipv6
|
||||
@@ -149,12 +156,6 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
if self.show_advanced_options is True:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA_ADV,
|
||||
errors=errors,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
|
||||
@@ -12,6 +12,7 @@ CONF_PORT_IPV6 = "port_ipv6"
|
||||
CONF_IPV4 = "ipv4"
|
||||
CONF_IPV6 = "ipv6"
|
||||
CONF_IPV6_V4 = "ipv6_v4"
|
||||
CONF_ADVANCED_OPTIONS = "advanced_options"
|
||||
|
||||
DEFAULT_HOSTNAME = "myip.opendns.com"
|
||||
DEFAULT_IPV6 = False
|
||||
|
||||
@@ -9,18 +9,28 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"hostname": "Hostname",
|
||||
"port": "IPv4 port",
|
||||
"port_ipv6": "IPv6 port",
|
||||
"resolver": "IPv4 resolver",
|
||||
"resolver_ipv6": "IPv6 resolver"
|
||||
"hostname": "Hostname"
|
||||
},
|
||||
"data_description": {
|
||||
"hostname": "The hostname for which to perform the DNS query.",
|
||||
"port": "Port used for the IPv4 lookup.",
|
||||
"port_ipv6": "Port used for the IPv6 lookup.",
|
||||
"resolver": "Resolver used for the IPv4 lookup.",
|
||||
"resolver_ipv6": "Resolver used for the IPv6 lookup."
|
||||
"hostname": "The hostname for which to perform the DNS query."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_options": {
|
||||
"data": {
|
||||
"port": "IPv4 port",
|
||||
"port_ipv6": "IPv6 port",
|
||||
"resolver": "IPv4 resolver",
|
||||
"resolver_ipv6": "IPv6 resolver"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "Port used for the IPv4 lookup.",
|
||||
"port_ipv6": "Port used for the IPv6 lookup.",
|
||||
"resolver": "Resolver used for the IPv4 lookup.",
|
||||
"resolver_ipv6": "Resolver used for the IPv6 lookup."
|
||||
},
|
||||
"description": "Optionally change resolvers and ports.",
|
||||
"name": "Advanced options"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -53,17 +63,18 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"port": "[%key:component::dnsip::config::step::user::data::port%]",
|
||||
"port_ipv6": "[%key:component::dnsip::config::step::user::data::port_ipv6%]",
|
||||
"resolver": "[%key:component::dnsip::config::step::user::data::resolver%]",
|
||||
"resolver_ipv6": "[%key:component::dnsip::config::step::user::data::resolver_ipv6%]"
|
||||
"port": "[%key:component::dnsip::config::step::user::sections::advanced_options::data::port%]",
|
||||
"port_ipv6": "[%key:component::dnsip::config::step::user::sections::advanced_options::data::port_ipv6%]",
|
||||
"resolver": "[%key:component::dnsip::config::step::user::sections::advanced_options::data::resolver%]",
|
||||
"resolver_ipv6": "[%key:component::dnsip::config::step::user::sections::advanced_options::data::resolver_ipv6%]"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "[%key:component::dnsip::config::step::user::data_description::port%]",
|
||||
"port_ipv6": "[%key:component::dnsip::config::step::user::data_description::port_ipv6%]",
|
||||
"resolver": "[%key:component::dnsip::config::step::user::data_description::resolver%]",
|
||||
"resolver_ipv6": "[%key:component::dnsip::config::step::user::data_description::resolver_ipv6%]"
|
||||
}
|
||||
"port": "[%key:component::dnsip::config::step::user::sections::advanced_options::data_description::port%]",
|
||||
"port_ipv6": "[%key:component::dnsip::config::step::user::sections::advanced_options::data_description::port_ipv6%]",
|
||||
"resolver": "[%key:component::dnsip::config::step::user::sections::advanced_options::data_description::resolver%]",
|
||||
"resolver_ipv6": "[%key:component::dnsip::config::step::user::sections::advanced_options::data_description::resolver_ipv6%]"
|
||||
},
|
||||
"description": "Optionally change resolvers and ports."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,38 +6,19 @@ from homeassistant.components.event import (
|
||||
DoorbellEventType,
|
||||
EventDeviceClass,
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
)
|
||||
from homeassistant.helpers.trigger import StatelessEntityTriggerBase, Trigger
|
||||
|
||||
|
||||
class DoorbellRangTrigger(EntityTriggerBase):
|
||||
class DoorbellRangTrigger(StatelessEntityTriggerBase):
|
||||
"""Trigger for doorbell event entity when a ring event is received."""
|
||||
|
||||
_domain_specs = {EVENT_DOMAIN: DomainSpec(device_class=EventDeviceClass.DOORBELL)}
|
||||
_schema = ENTITY_STATE_TRIGGER_SCHEMA
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
"""Check if the entity is available and the event type is ring."""
|
||||
return (
|
||||
state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
and state.attributes.get(ATTR_EVENT_TYPE) == DoorbellEventType.RING
|
||||
)
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and different from the current state."""
|
||||
|
||||
# UNKNOWN is a valid from_state, otherwise the first time the event is received
|
||||
# would not trigger
|
||||
if from_state.state == STATE_UNAVAILABLE:
|
||||
return False
|
||||
|
||||
return from_state.state != to_state.state
|
||||
"""Check if the event type is ring."""
|
||||
return state.attributes.get(ATTR_EVENT_TYPE) == DoorbellEventType.RING
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["duco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-duco-client==0.4.0"],
|
||||
"requirements": ["python-duco-client==0.5.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "duco [[][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][]].*",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==18.2.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==18.3.0"]
|
||||
}
|
||||
|
||||
@@ -72,9 +72,11 @@ PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.LIGHT,
|
||||
Platform.NUMBER,
|
||||
Platform.SCENE,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TIME,
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
"""Support for ElkM1 number entities."""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from elkm1_lib.const import SettingFormat
|
||||
from elkm1_lib.elements import Element
|
||||
from elkm1_lib.settings import Setting
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity
|
||||
from homeassistant.const import UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import ElkM1ConfigEntry
|
||||
from .entity import ElkAttachedEntity, ElkEntity, create_elk_entities
|
||||
from .models import ELKM1Data
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ElkM1ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Elk-M1 number platform."""
|
||||
elk_data = config_entry.runtime_data
|
||||
elk = elk_data.elk
|
||||
entities: list[ElkEntity] = []
|
||||
number_settings = [
|
||||
setting
|
||||
for setting in cast(list[Setting], elk.settings)
|
||||
if setting.value_format in (SettingFormat.NUMBER, SettingFormat.TIMER)
|
||||
]
|
||||
|
||||
create_elk_entities(
|
||||
elk_data,
|
||||
number_settings,
|
||||
"setting",
|
||||
ElkNumberSetting,
|
||||
entities,
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class ElkNumberSetting(ElkAttachedEntity, NumberEntity):
|
||||
"""Representation of an Elk-M1 Number Setting."""
|
||||
|
||||
_element: Setting
|
||||
|
||||
_attr_native_min_value = 0
|
||||
_attr_native_max_value = 65535
|
||||
_attr_native_step = 1
|
||||
|
||||
def __init__(self, element: Setting, elk: Any, elk_data: ELKM1Data) -> None:
|
||||
"""Initialize the number setting."""
|
||||
super().__init__(element, elk, elk_data)
|
||||
if element.value_format == SettingFormat.TIMER:
|
||||
self._attr_device_class = NumberDeviceClass.DURATION
|
||||
self._attr_native_unit_of_measurement = UnitOfTime.SECONDS
|
||||
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
# Guard against the panel possibly changing the underlying
|
||||
# type without us knowing about the change
|
||||
if isinstance(self._element.value, int):
|
||||
self._attr_native_value = self._element.value
|
||||
else:
|
||||
self._attr_available = False
|
||||
_LOGGER.warning(
|
||||
"Setting type for '%s' differs between the ElkM1 and the entity. Restart the integration to fix",
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the value of the setting."""
|
||||
self._element.set(int(value))
|
||||
@@ -0,0 +1,66 @@
|
||||
"""Support for ElkM1 time entities."""
|
||||
|
||||
from datetime import time as dt_time
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from elkm1_lib.const import SettingFormat
|
||||
from elkm1_lib.elements import Element
|
||||
from elkm1_lib.settings import Setting
|
||||
|
||||
from homeassistant.components.time import TimeEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import ElkM1ConfigEntry
|
||||
from .entity import ElkAttachedEntity, ElkEntity, create_elk_entities
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ElkM1ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Elk-M1 time platform."""
|
||||
elk_data = config_entry.runtime_data
|
||||
elk = elk_data.elk
|
||||
entities: list[ElkEntity] = []
|
||||
time_settings = [
|
||||
setting
|
||||
for setting in cast(list[Setting], elk.settings)
|
||||
if setting.value_format == SettingFormat.TIME_OF_DAY
|
||||
]
|
||||
|
||||
create_elk_entities(
|
||||
elk_data,
|
||||
time_settings,
|
||||
"setting",
|
||||
ElkTimeSetting,
|
||||
entities,
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class ElkTimeSetting(ElkAttachedEntity, TimeEntity):
|
||||
"""Representation of an Elk-M1 Time Setting."""
|
||||
|
||||
_element: Setting
|
||||
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
value = self._element.value
|
||||
# Guard against the panel possibly changing the underlying
|
||||
# type without us knowing about the change
|
||||
if isinstance(value, tuple):
|
||||
self._attr_native_value = dt_time(hour=value[0], minute=value[1])
|
||||
else:
|
||||
self._attr_available = False
|
||||
_LOGGER.warning(
|
||||
"Setting type for '%s' differs between the ElkM1 and the entity. Restart the integration to fix",
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
async def async_set_value(self, value: dt_time) -> None:
|
||||
"""Set the time of the setting."""
|
||||
self._element.set((value.hour, value.minute))
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA,
|
||||
EntityTriggerBase,
|
||||
StatelessEntityTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
)
|
||||
@@ -28,7 +28,7 @@ EVENT_RECEIVED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
class EventReceivedTrigger(EntityTriggerBase):
|
||||
class EventReceivedTrigger(StatelessEntityTriggerBase):
|
||||
"""Trigger for event entity when it receives a matching event."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec()}
|
||||
@@ -39,22 +39,9 @@ class EventReceivedTrigger(EntityTriggerBase):
|
||||
super().__init__(hass, config)
|
||||
self._event_types = set(self._options[CONF_EVENT_TYPE])
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and different from the current state."""
|
||||
|
||||
# UNKNOWN is a valid from_state, otherwise the first time the event is received
|
||||
# would not trigger
|
||||
if from_state.state == STATE_UNAVAILABLE:
|
||||
return False
|
||||
|
||||
return from_state.state != to_state.state
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
"""Check if the event type is valid and matches one of the configured types."""
|
||||
return (
|
||||
state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
and state.attributes.get(ATTR_EVENT_TYPE) in self._event_types
|
||||
)
|
||||
"""Check if the event type matches one of the configured types."""
|
||||
return state.attributes.get(ATTR_EVENT_TYPE) in self._event_types
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
|
||||
@@ -8,6 +8,7 @@ from functools import partial
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, TypedDict, cast
|
||||
from xml.etree.ElementTree import ParseError
|
||||
|
||||
from fritzconnection import FritzConnection
|
||||
from fritzconnection.core.exceptions import FritzActionError
|
||||
@@ -24,7 +25,7 @@ from homeassistant.components.device_tracker import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
@@ -226,7 +227,13 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
self.fritz_guest_wifi = FritzGuestWLAN(fc=self.connection)
|
||||
self.fritz_status = FritzStatus(fc=self.connection)
|
||||
self.fritz_call = FritzCall(fc=self.connection)
|
||||
info = self.fritz_status.get_device_info()
|
||||
try:
|
||||
info = self.fritz_status.get_device_info()
|
||||
except ParseError as ex:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_parse_device_info",
|
||||
) from ex
|
||||
|
||||
_LOGGER.debug(
|
||||
"gathered device info of %s %s",
|
||||
|
||||
@@ -185,6 +185,9 @@
|
||||
"config_entry_not_found": {
|
||||
"message": "Failed to perform action \"{service}\". Config entry for target not found"
|
||||
},
|
||||
"error_parse_device_info": {
|
||||
"message": "Error parsing device info. Please check the system event log of your FRITZ!Box for malformed data and clear the event list."
|
||||
},
|
||||
"error_refresh_hosts_info": {
|
||||
"message": "Error refreshing hosts info"
|
||||
},
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"integration_type": "system",
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260429.2"]
|
||||
"requirements": ["home-assistant-frontend==20260429.3"]
|
||||
}
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"],
|
||||
"requirements": ["gardena-bluetooth==2.4.0"]
|
||||
"requirements": ["gardena-bluetooth==2.8.1"]
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import voluptuous as vol
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.providers import homeassistant as auth_ha
|
||||
from homeassistant.components.http import KEY_HASS, KEY_HASS_USER, HomeAssistantView
|
||||
from homeassistant.components.http.const import is_supervisor_unix_socket_request
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -41,14 +42,18 @@ class HassIOBaseAuth(HomeAssistantView):
|
||||
|
||||
def _check_access(self, request: web.Request) -> None:
|
||||
"""Check if this call is from Supervisor."""
|
||||
# Check caller IP
|
||||
hassio_ip = os.environ["SUPERVISOR"].split(":")[0]
|
||||
assert request.transport
|
||||
if ip_address(request.transport.get_extra_info("peername")[0]) != ip_address(
|
||||
hassio_ip
|
||||
):
|
||||
_LOGGER.error("Invalid auth request from %s", request.remote)
|
||||
raise HTTPUnauthorized
|
||||
# Requests over the Supervisor Unix socket are authenticated by the
|
||||
# http auth middleware as the Supervisor user, so the caller-IP check
|
||||
# below does not apply (and would crash, since `peername` is empty for
|
||||
# Unix sockets). The user-ID check still runs to ensure only the
|
||||
# Supervisor user can reach this endpoint.
|
||||
if not is_supervisor_unix_socket_request(request):
|
||||
hassio_ip = os.environ["SUPERVISOR"].split(":")[0]
|
||||
assert request.transport
|
||||
peername = request.transport.get_extra_info("peername")
|
||||
if not peername or ip_address(peername[0]) != ip_address(hassio_ip):
|
||||
_LOGGER.error("Invalid auth request from %s", request.remote)
|
||||
raise HTTPUnauthorized
|
||||
|
||||
# Check caller token
|
||||
if request[KEY_HASS_USER].id != self.user.id:
|
||||
|
||||
@@ -44,14 +44,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: HiveConfigEntry) -> bool
|
||||
except HiveReauthRequired as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
hub_data = devices["parent"][0]
|
||||
connections: set[tuple[str, str]] = set()
|
||||
if mac := hub_data.get("macAddress"):
|
||||
connections.add((dr.CONNECTION_NETWORK_MAC, dr.format_mac(mac)))
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, devices["parent"][0]["device_id"])},
|
||||
name=devices["parent"][0]["hiveName"],
|
||||
model=devices["parent"][0]["deviceData"]["model"],
|
||||
sw_version=devices["parent"][0]["deviceData"]["version"],
|
||||
manufacturer=devices["parent"][0]["deviceData"]["manufacturer"],
|
||||
identifiers={(DOMAIN, hub_data["device_id"])},
|
||||
connections=connections,
|
||||
name=hub_data["hiveName"],
|
||||
model=hub_data["deviceData"]["model"],
|
||||
sw_version=hub_data["deviceData"]["version"],
|
||||
manufacturer=hub_data["deviceData"]["manufacturer"],
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.95", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.96", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.9.0"]
|
||||
"requirements": ["homematicip==2.11.0"]
|
||||
}
|
||||
|
||||
@@ -4,8 +4,6 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from python_qube_heatpump.models import QubeState
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
@@ -13,6 +11,7 @@ from homeassistant.components.binary_sensor import (
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
|
||||
from .coordinator import QubeData
|
||||
from .entity import QubeEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -29,7 +28,7 @@ if TYPE_CHECKING:
|
||||
class QubeBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Binary sensor entity description for Qube Heat Pump."""
|
||||
|
||||
value_fn: Callable[[QubeState], bool | None]
|
||||
value_fn: Callable[[QubeData], bool | None]
|
||||
|
||||
|
||||
BINARY_SENSOR_TYPES: tuple[QubeBinarySensorEntityDescription, ...] = (
|
||||
@@ -37,58 +36,58 @@ BINARY_SENSOR_TYPES: tuple[QubeBinarySensorEntityDescription, ...] = (
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="source_pump",
|
||||
translation_key="source_pump",
|
||||
value_fn=lambda data: data.dout_srcpmp_val,
|
||||
value_fn=lambda data: data.state.dout_srcpmp_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="user_pump",
|
||||
translation_key="user_pump",
|
||||
value_fn=lambda data: data.dout_usrpmp_val,
|
||||
value_fn=lambda data: data.state.dout_usrpmp_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="four_way_valve",
|
||||
translation_key="four_way_valve",
|
||||
value_fn=lambda data: data.dout_fourwayvlv_val,
|
||||
value_fn=lambda data: data.state.dout_fourwayvlv_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="cooling_output",
|
||||
translation_key="cooling_output",
|
||||
value_fn=lambda data: data.dout_cooling_val,
|
||||
value_fn=lambda data: data.state.dout_cooling_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="three_way_valve",
|
||||
translation_key="three_way_valve",
|
||||
value_fn=lambda data: data.dout_threewayvlv_val,
|
||||
value_fn=lambda data: data.state.dout_threewayvlv_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="buffer_pump",
|
||||
translation_key="buffer_pump",
|
||||
value_fn=lambda data: data.dout_bufferpmp_val,
|
||||
value_fn=lambda data: data.state.dout_bufferpmp_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="heater_step_1",
|
||||
translation_key="heater_step_1",
|
||||
value_fn=lambda data: data.dout_heaterstep1_val,
|
||||
value_fn=lambda data: data.state.dout_heaterstep1_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="heater_step_2",
|
||||
translation_key="heater_step_2",
|
||||
value_fn=lambda data: data.dout_heaterstep2_val,
|
||||
value_fn=lambda data: data.state.dout_heaterstep2_val,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="heater_step_3",
|
||||
translation_key="heater_step_3",
|
||||
value_fn=lambda data: data.dout_heaterstep3_val,
|
||||
value_fn=lambda data: data.state.dout_heaterstep3_val,
|
||||
),
|
||||
# System status
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="keypad",
|
||||
translation_key="keypad",
|
||||
value_fn=lambda data: data.keybonoff,
|
||||
value_fn=lambda data: data.state.keybonoff,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="day_mode",
|
||||
translation_key="day_mode",
|
||||
value_fn=lambda data: data.daynightmode,
|
||||
value_fn=lambda data: data.state.daynightmode,
|
||||
),
|
||||
# Alarms
|
||||
QubeBinarySensorEntityDescription(
|
||||
@@ -96,84 +95,84 @@ BINARY_SENSOR_TYPES: tuple[QubeBinarySensorEntityDescription, ...] = (
|
||||
translation_key="alarm_antilegionella_timeout",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.al_maxtime_antileg_active,
|
||||
value_fn=lambda data: data.state.al_maxtime_antileg_active,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_dhw_timeout",
|
||||
translation_key="alarm_dhw_timeout",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.al_maxtime_dhw_active,
|
||||
value_fn=lambda data: data.state.al_maxtime_dhw_active,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_dewpoint",
|
||||
translation_key="alarm_dewpoint",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.al_dewpoint_active,
|
||||
value_fn=lambda data: data.state.al_dewpoint_active,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_supply_too_hot",
|
||||
translation_key="alarm_supply_too_hot",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.al_underfloorsafety_active,
|
||||
value_fn=lambda data: data.state.al_underfloorsafety_active,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_flow",
|
||||
translation_key="alarm_flow",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alrm_flw,
|
||||
value_fn=lambda data: data.state.alrm_flw,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_central_heating",
|
||||
translation_key="alarm_central_heating",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.usralrms,
|
||||
value_fn=lambda data: data.state.usralrms,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_cooling",
|
||||
translation_key="alarm_cooling",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.coolingalrms,
|
||||
value_fn=lambda data: data.state.coolingalrms,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_heating",
|
||||
translation_key="alarm_heating",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.heatingalrms,
|
||||
value_fn=lambda data: data.state.heatingalrms,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_working_hours",
|
||||
translation_key="alarm_working_hours",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarmmng_al_workinghour,
|
||||
value_fn=lambda data: data.state.alarmmng_al_workinghour,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_source",
|
||||
translation_key="alarm_source",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.srsalrm,
|
||||
value_fn=lambda data: data.state.srsalrm,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_global",
|
||||
translation_key="alarm_global",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.glbal,
|
||||
value_fn=lambda data: data.state.glbal,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="alarm_compressor",
|
||||
translation_key="alarm_compressor",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarmmng_al_pwrplus,
|
||||
value_fn=lambda data: data.state.alarmmng_al_pwrplus,
|
||||
),
|
||||
# Sensor/controller status
|
||||
QubeBinarySensorEntityDescription(
|
||||
@@ -181,76 +180,76 @@ BINARY_SENSOR_TYPES: tuple[QubeBinarySensorEntityDescription, ...] = (
|
||||
translation_key="room_sensor_enabled",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.roomprb_en,
|
||||
value_fn=lambda data: data.state.roomprb_en,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="plant_sensor_enabled",
|
||||
translation_key="plant_sensor_enabled",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.plantprb_en,
|
||||
value_fn=lambda data: data.state.plantprb_en,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="buffer_sensor_enabled",
|
||||
translation_key="buffer_sensor_enabled",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.bufferprb_en,
|
||||
value_fn=lambda data: data.state.bufferprb_en,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="dhw_controller_enabled",
|
||||
translation_key="dhw_controller_enabled",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.en_dhwpid,
|
||||
value_fn=lambda data: data.state.en_dhwpid,
|
||||
),
|
||||
# Demand signals
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="plant_demand",
|
||||
translation_key="plant_demand",
|
||||
value_fn=lambda data: data.plantdemand,
|
||||
value_fn=lambda data: data.state.plantdemand,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="external_demand",
|
||||
translation_key="external_demand",
|
||||
value_fn=lambda data: data.id_demand,
|
||||
value_fn=lambda data: data.state.id_demand,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="thermostat_demand",
|
||||
translation_key="thermostat_demand",
|
||||
value_fn=lambda data: data.thermostatdemand,
|
||||
value_fn=lambda data: data.state.thermostatdemand,
|
||||
),
|
||||
# Digital inputs
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="summer_mode",
|
||||
translation_key="summer_mode",
|
||||
value_fn=lambda data: data.id_summerwinter,
|
||||
value_fn=lambda data: data.state.id_summerwinter,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="dewpoint",
|
||||
translation_key="dewpoint",
|
||||
value_fn=lambda data: data.dewpoint,
|
||||
value_fn=lambda data: data.state.dewpoint,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="booster_security",
|
||||
translation_key="booster_security",
|
||||
value_fn=lambda data: data.boostersecurity,
|
||||
value_fn=lambda data: data.state.boostersecurity,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="source_flow",
|
||||
translation_key="source_flow",
|
||||
value_fn=lambda data: data.srcflw,
|
||||
value_fn=lambda data: data.state.srcflw,
|
||||
),
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="anti_legionella",
|
||||
translation_key="anti_legionella",
|
||||
value_fn=lambda data: data.req_antileg_1,
|
||||
value_fn=lambda data: data.state.req_antileg_1,
|
||||
),
|
||||
# Energy
|
||||
QubeBinarySensorEntityDescription(
|
||||
key="pv_surplus",
|
||||
translation_key="pv_surplus",
|
||||
value_fn=lambda data: data.surplus_pv,
|
||||
value_fn=lambda data: data.state.surplus_pv,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "hr_energy_qube"
|
||||
PLATFORMS = (Platform.BINARY_SENSOR, Platform.SENSOR)
|
||||
PLATFORMS = (Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH)
|
||||
|
||||
DEFAULT_PORT = 502
|
||||
DEFAULT_SCAN_INTERVAL = 15
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""DataUpdateCoordinator for Qube Heat Pump."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -18,7 +19,15 @@ if TYPE_CHECKING:
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class QubeCoordinator(DataUpdateCoordinator[QubeState]):
|
||||
@dataclass
|
||||
class QubeData:
|
||||
"""Data from the Qube coordinator."""
|
||||
|
||||
state: QubeState
|
||||
switches: dict[str, bool | None]
|
||||
|
||||
|
||||
class QubeCoordinator(DataUpdateCoordinator[QubeData]):
|
||||
"""Qube Heat Pump data coordinator."""
|
||||
|
||||
def __init__(
|
||||
@@ -34,16 +43,17 @@ class QubeCoordinator(DataUpdateCoordinator[QubeState]):
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> QubeState:
|
||||
async def _async_update_data(self) -> QubeData:
|
||||
"""Fetch data from the device."""
|
||||
try:
|
||||
data = await self.client.get_all_data()
|
||||
state = await self.client.get_all_data()
|
||||
switches = await self.client.read_all_switches()
|
||||
except (ConnectionError, TimeoutError, OSError) as exc:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Qube heat pump: {exc}"
|
||||
) from exc
|
||||
|
||||
if data is None:
|
||||
if state is None:
|
||||
raise UpdateFailed("No data received from Qube heat pump")
|
||||
|
||||
return data
|
||||
return QubeData(state=state, switches=switches)
|
||||
|
||||
@@ -4,8 +4,6 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from python_qube_heatpump.models import QubeState
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -21,6 +19,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import QubeData
|
||||
from .entity import QubeEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -52,12 +51,12 @@ STATUS_MAP: dict[int, str] = {
|
||||
class QubeSensorEntityDescription(SensorEntityDescription):
|
||||
"""Sensor entity description for Qube Heat Pump."""
|
||||
|
||||
value_fn: Callable[[QubeState], StateType]
|
||||
value_fn: Callable[[QubeData], StateType]
|
||||
|
||||
|
||||
def _status_value(data: QubeState) -> StateType:
|
||||
def _status_value(data: QubeData) -> StateType:
|
||||
"""Return status string from status code."""
|
||||
code = data.status_code
|
||||
code = data.state.status_code
|
||||
if code is None:
|
||||
return None
|
||||
return STATUS_MAP.get(code)
|
||||
@@ -71,7 +70,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.temp_supply,
|
||||
value_fn=lambda data: data.state.temp_supply,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="temp_return",
|
||||
@@ -80,7 +79,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.temp_return,
|
||||
value_fn=lambda data: data.state.temp_return,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="temp_source_in",
|
||||
@@ -89,7 +88,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.temp_source_in,
|
||||
value_fn=lambda data: data.state.temp_source_in,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="temp_source_out",
|
||||
@@ -98,7 +97,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.temp_source_out,
|
||||
value_fn=lambda data: data.state.temp_source_out,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="temp_room",
|
||||
@@ -107,7 +106,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.temp_room,
|
||||
value_fn=lambda data: data.state.temp_room,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="temp_dhw",
|
||||
@@ -116,7 +115,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.temp_dhw,
|
||||
value_fn=lambda data: data.state.temp_dhw,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="temp_outside",
|
||||
@@ -125,7 +124,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.temp_outside,
|
||||
value_fn=lambda data: data.state.temp_outside,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="power_thermic",
|
||||
@@ -134,7 +133,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.power_thermic,
|
||||
value_fn=lambda data: data.state.power_thermic,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="power_electric",
|
||||
@@ -143,7 +142,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.power_electric,
|
||||
value_fn=lambda data: data.state.power_electric,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="energy_total_electric",
|
||||
@@ -152,7 +151,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=3,
|
||||
value_fn=lambda data: data.energy_total_electric,
|
||||
value_fn=lambda data: data.state.energy_total_electric,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="energy_total_thermic",
|
||||
@@ -161,14 +160,14 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=3,
|
||||
value_fn=lambda data: data.energy_total_thermic,
|
||||
value_fn=lambda data: data.state.energy_total_thermic,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="cop_calc",
|
||||
translation_key="cop_calc",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.cop_calc,
|
||||
value_fn=lambda data: data.state.cop_calc,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="compressor_speed",
|
||||
@@ -176,7 +175,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.compressor_speed,
|
||||
value_fn=lambda data: data.state.compressor_speed,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="flow_rate",
|
||||
@@ -185,7 +184,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.flow_rate,
|
||||
value_fn=lambda data: data.state.flow_rate,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="setpoint_room_heat_day",
|
||||
@@ -194,7 +193,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.setpoint_room_heat_day,
|
||||
value_fn=lambda data: data.state.setpoint_room_heat_day,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="setpoint_room_heat_night",
|
||||
@@ -203,7 +202,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.setpoint_room_heat_night,
|
||||
value_fn=lambda data: data.state.setpoint_room_heat_night,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="setpoint_room_cool_day",
|
||||
@@ -212,7 +211,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.setpoint_room_cool_day,
|
||||
value_fn=lambda data: data.state.setpoint_room_cool_day,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="setpoint_room_cool_night",
|
||||
@@ -221,7 +220,7 @@ SENSOR_TYPES: tuple[QubeSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.setpoint_room_cool_night,
|
||||
value_fn=lambda data: data.state.setpoint_room_cool_night,
|
||||
),
|
||||
QubeSensorEntityDescription(
|
||||
key="status_heatpump",
|
||||
|
||||
@@ -199,6 +199,25 @@
|
||||
"temp_supply": {
|
||||
"name": "Supply temperature CH"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"anti_legionella_cycle": {
|
||||
"name": "Anti-legionella cycle"
|
||||
},
|
||||
"heating_curve": {
|
||||
"name": "Heating curve"
|
||||
},
|
||||
"heating_demand": {
|
||||
"name": "Heating demand"
|
||||
},
|
||||
"summer_mode": {
|
||||
"name": "Summer mode"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"switch_command_failed": {
|
||||
"message": "Failed to send command to the heat pump."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
"""Switch platform for Qube Heat Pump."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import QubeConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import QubeCoordinator
|
||||
from .entity import QubeEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class QubeSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Switch entity description for Qube Heat Pump."""
|
||||
|
||||
register_key: str
|
||||
|
||||
|
||||
SWITCH_TYPES: tuple[QubeSwitchEntityDescription, ...] = (
|
||||
QubeSwitchEntityDescription(
|
||||
key="summer_mode",
|
||||
translation_key="summer_mode",
|
||||
register_key="bms_summerwinter",
|
||||
),
|
||||
QubeSwitchEntityDescription(
|
||||
key="anti_legionella_cycle",
|
||||
translation_key="anti_legionella_cycle",
|
||||
register_key="antilegionella_frcstart_ant",
|
||||
),
|
||||
QubeSwitchEntityDescription(
|
||||
key="heating_curve",
|
||||
translation_key="heating_curve",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
register_key="en_plantsetp_compens",
|
||||
),
|
||||
QubeSwitchEntityDescription(
|
||||
key="heating_demand",
|
||||
translation_key="heating_demand",
|
||||
register_key="modbus_demand",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: QubeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Qube switches."""
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
|
||||
async_add_entities(
|
||||
QubeSwitch(coordinator, entry, description) for description in SWITCH_TYPES
|
||||
)
|
||||
|
||||
|
||||
class QubeSwitch(QubeEntity, SwitchEntity):
|
||||
"""Qube switch entity."""
|
||||
|
||||
entity_description: QubeSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: QubeCoordinator,
|
||||
entry: QubeConfigEntry,
|
||||
description: QubeSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator, entry)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{entry.entry_id}-{description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.entity_description.register_key in self.coordinator.data.switches
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the switch is on."""
|
||||
return self.coordinator.data.switches.get(self.entity_description.register_key)
|
||||
|
||||
async def _async_write_switch(self, value: bool) -> None:
|
||||
"""Write switch value to the device."""
|
||||
register_key = self.entity_description.register_key
|
||||
try:
|
||||
success = await self.coordinator.client.write_switch(register_key, value)
|
||||
except (ConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="switch_command_failed",
|
||||
) from err
|
||||
if not success:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="switch_command_failed",
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self._async_write_switch(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self._async_write_switch(False)
|
||||
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_MODE, CONF_OPTIONS, PERCENTAGE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
@@ -13,8 +13,8 @@ from homeassistant.helpers.condition import (
|
||||
ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL,
|
||||
Condition,
|
||||
ConditionConfig,
|
||||
EntityNumericalConditionBase,
|
||||
EntityStateConditionBase,
|
||||
make_entity_numerical_condition,
|
||||
make_entity_state_condition,
|
||||
)
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
@@ -46,6 +46,20 @@ def _supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> boo
|
||||
return False
|
||||
|
||||
|
||||
class IsTargetHumidityCondition(EntityNumericalConditionBase):
|
||||
"""Condition for humidifier target humidity."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)}
|
||||
_valid_unit = PERCENTAGE
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip humidifier entities that do not expose a target humidity."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_HUMIDITY) is not None
|
||||
)
|
||||
|
||||
|
||||
class IsModeCondition(EntityStateConditionBase):
|
||||
"""Condition for humidifier mode."""
|
||||
|
||||
@@ -79,10 +93,7 @@ CONDITIONS: dict[str, type[Condition]] = {
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_ACTION)}, HumidifierAction.HUMIDIFYING
|
||||
),
|
||||
"is_mode": IsModeCondition,
|
||||
"is_target_humidity": make_entity_numerical_condition(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)},
|
||||
valid_unit=PERCENTAGE,
|
||||
),
|
||||
"is_target_humidity": IsTargetHumidityCondition,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -14,9 +14,9 @@ from homeassistant.components.weather import (
|
||||
DOMAIN as WEATHER_DOMAIN,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.condition import Condition, make_entity_numerical_condition
|
||||
from homeassistant.helpers.condition import Condition, EntityNumericalConditionBase
|
||||
|
||||
HUMIDITY_DOMAIN_SPECS = {
|
||||
CLIMATE_DOMAIN: DomainSpec(
|
||||
@@ -31,8 +31,31 @@ HUMIDITY_DOMAIN_SPECS = {
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class HumidityCondition(EntityNumericalConditionBase):
|
||||
"""Condition for humidity value across multiple domains."""
|
||||
|
||||
_domain_specs = HUMIDITY_DOMAIN_SPECS
|
||||
_valid_unit = PERCENTAGE
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip attribute-source entities that lack the humidity attribute.
|
||||
|
||||
Mirrors the humidity trigger: for climate / humidifier / weather
|
||||
(attribute-based), the entity is filtered when the source attribute
|
||||
is absent; sensor entities (state-value-based) fall through to the
|
||||
base impl.
|
||||
"""
|
||||
if not super()._should_include(state):
|
||||
return False
|
||||
domain_spec = self._domain_specs[state.domain]
|
||||
if domain_spec.value_source is None:
|
||||
return True
|
||||
return state.attributes.get(domain_spec.value_source) is not None
|
||||
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_value": make_entity_numerical_condition(HUMIDITY_DOMAIN_SPECS, PERCENTAGE),
|
||||
"is_value": HumidityCondition,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -13,12 +13,13 @@ from homeassistant.components.weather import (
|
||||
ATTR_WEATHER_HUMIDITY,
|
||||
DOMAIN as WEATHER_DOMAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityNumericalStateChangedTriggerBase,
|
||||
EntityNumericalStateCrossedThresholdTriggerBase,
|
||||
EntityNumericalStateTriggerBase,
|
||||
Trigger,
|
||||
make_entity_numerical_state_changed_trigger,
|
||||
make_entity_numerical_state_crossed_threshold_trigger,
|
||||
)
|
||||
|
||||
HUMIDITY_DOMAIN_SPECS: dict[str, DomainSpec] = {
|
||||
@@ -36,13 +37,46 @@ HUMIDITY_DOMAIN_SPECS: dict[str, DomainSpec] = {
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class _HumidityTriggerMixin(EntityNumericalStateTriggerBase):
|
||||
"""Mixin for humidity triggers providing entity filtering."""
|
||||
|
||||
_domain_specs = HUMIDITY_DOMAIN_SPECS
|
||||
_valid_unit = "%"
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip attribute-source entities that lack the humidity attribute.
|
||||
|
||||
For domains whose tracked value comes from an attribute
|
||||
(climate / humidifier / weather), require the attribute to be
|
||||
present; otherwise the all/count check would treat an entity that
|
||||
cannot report a humidity as a non-match and block behavior=last.
|
||||
Sensor entities source their value from `state.state`, so they
|
||||
fall through to the base impl.
|
||||
"""
|
||||
if not super()._should_include(state):
|
||||
return False
|
||||
domain_spec = self._domain_specs[state.domain]
|
||||
if domain_spec.value_source is None:
|
||||
return True
|
||||
return state.attributes.get(domain_spec.value_source) is not None
|
||||
|
||||
|
||||
class HumidityChangedTrigger(
|
||||
_HumidityTriggerMixin, EntityNumericalStateChangedTriggerBase
|
||||
):
|
||||
"""Trigger for humidity value changes across multiple domains."""
|
||||
|
||||
|
||||
class HumidityCrossedThresholdTrigger(
|
||||
_HumidityTriggerMixin, EntityNumericalStateCrossedThresholdTriggerBase
|
||||
):
|
||||
"""Trigger for humidity value crossing a threshold across multiple domains."""
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"changed": make_entity_numerical_state_changed_trigger(
|
||||
HUMIDITY_DOMAIN_SPECS, valid_unit="%"
|
||||
),
|
||||
"crossed_threshold": make_entity_numerical_state_crossed_threshold_trigger(
|
||||
HUMIDITY_DOMAIN_SPECS, valid_unit="%"
|
||||
),
|
||||
"changed": HumidityChangedTrigger,
|
||||
"crossed_threshold": HumidityCrossedThresholdTrigger,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/husqvarna_automower_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["automower-ble==0.2.8", "gardena-bluetooth==2.4.0"]
|
||||
"requirements": ["automower-ble==0.2.8", "gardena-bluetooth==2.8.1"]
|
||||
}
|
||||
|
||||
@@ -46,6 +46,11 @@ async def async_setup_entry(
|
||||
water_use_coordinator = HydrawiseWaterUseDataUpdateCoordinator(
|
||||
hass, config_entry, hydrawise, main_coordinator
|
||||
)
|
||||
# async_track_zones is registered first on water_use_coordinator,
|
||||
# so the water-use coordinator's data is in sync before
|
||||
# callbacks below construct entities for newly added zones.
|
||||
water_use_coordinator.async_track_zones()
|
||||
main_coordinator.async_track_zones()
|
||||
await water_use_coordinator.async_config_entry_first_refresh()
|
||||
config_entry.runtime_data = HydrawiseUpdateCoordinators(
|
||||
main=main_coordinator,
|
||||
|
||||
@@ -82,6 +82,10 @@ class HydrawiseMainDataUpdateCoordinator(HydrawiseDataUpdateCoordinator):
|
||||
self.new_zones_callbacks: list[
|
||||
Callable[[Iterable[tuple[Zone, Controller]]], None]
|
||||
] = []
|
||||
|
||||
@callback
|
||||
def async_track_zones(self) -> None:
|
||||
"""Begin tracking zone and controller add/remove on updates."""
|
||||
self.async_add_listener(self._add_remove_zones)
|
||||
|
||||
async def _async_update_data(self) -> HydrawiseData:
|
||||
@@ -198,6 +202,23 @@ class HydrawiseWaterUseDataUpdateCoordinator(HydrawiseDataUpdateCoordinator):
|
||||
self.api = api
|
||||
self._main_coordinator = main_coordinator
|
||||
|
||||
@callback
|
||||
def async_track_zones(self) -> None:
|
||||
"""Begin tracking zone and controller add/remove on updates."""
|
||||
self._main_coordinator.async_add_listener(self._sync_data_from_main)
|
||||
|
||||
@callback
|
||||
def _sync_data_from_main(self) -> None:
|
||||
"""Sync data references from the main coordinator after it updates."""
|
||||
if self.data is None or self._main_coordinator.data is None:
|
||||
return # type: ignore[unreachable]
|
||||
main_data = self._main_coordinator.data
|
||||
self.data.user = main_data.user
|
||||
self.data.controllers = main_data.controllers
|
||||
self.data.zones = main_data.zones
|
||||
self.data.zone_id_to_controller = main_data.zone_id_to_controller
|
||||
self.data.sensors = main_data.sensors
|
||||
|
||||
async def _async_update_data(self) -> HydrawiseData:
|
||||
"""Fetch the latest data from Hydrawise."""
|
||||
daily_water_summary: dict[int, ControllerWaterUseSummary] = {}
|
||||
|
||||
@@ -76,14 +76,12 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_SEARCH, default="UnSeen UnDeleted"): str,
|
||||
# The default for new entries is to not include text and headers
|
||||
vol.Optional(CONF_EVENT_MESSAGE_DATA, default=[]): EVENT_MESSAGE_DATA_SELECTOR,
|
||||
vol.Optional(
|
||||
CONF_SSL_CIPHER_LIST, default=SSLCipherList.PYTHON_DEFAULT
|
||||
): CIPHER_SELECTOR,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=True): BOOLEAN_SELECTOR,
|
||||
}
|
||||
)
|
||||
CONFIG_SCHEMA_ADVANCED = {
|
||||
vol.Optional(
|
||||
CONF_SSL_CIPHER_LIST, default=SSLCipherList.PYTHON_DEFAULT
|
||||
): CIPHER_SELECTOR,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=True): BOOLEAN_SELECTOR,
|
||||
}
|
||||
|
||||
OPTIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -93,18 +91,15 @@ OPTIONS_SCHEMA = vol.Schema(
|
||||
vol.Optional(
|
||||
CONF_EVENT_MESSAGE_DATA, default=MESSAGE_DATA_OPTIONS
|
||||
): EVENT_MESSAGE_DATA_SELECTOR,
|
||||
vol.Optional(CONF_CUSTOM_EVENT_DATA_TEMPLATE): TEMPLATE_SELECTOR,
|
||||
vol.Optional(CONF_MAX_MESSAGE_SIZE, default=DEFAULT_MAX_MESSAGE_SIZE): vol.All(
|
||||
cv.positive_int,
|
||||
vol.Range(min=DEFAULT_MAX_MESSAGE_SIZE, max=MAX_MESSAGE_SIZE_LIMIT),
|
||||
),
|
||||
vol.Optional(CONF_ENABLE_PUSH, default=True): BOOLEAN_SELECTOR,
|
||||
}
|
||||
)
|
||||
|
||||
OPTIONS_SCHEMA_ADVANCED = {
|
||||
vol.Optional(CONF_CUSTOM_EVENT_DATA_TEMPLATE): TEMPLATE_SELECTOR,
|
||||
vol.Optional(CONF_MAX_MESSAGE_SIZE, default=DEFAULT_MAX_MESSAGE_SIZE): vol.All(
|
||||
cv.positive_int,
|
||||
vol.Range(min=DEFAULT_MAX_MESSAGE_SIZE, max=MAX_MESSAGE_SIZE_LIMIT),
|
||||
),
|
||||
vol.Optional(CONF_ENABLE_PUSH, default=True): BOOLEAN_SELECTOR,
|
||||
}
|
||||
|
||||
|
||||
async def validate_input(
|
||||
hass: HomeAssistant, user_input: dict[str, Any]
|
||||
@@ -151,8 +146,6 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
|
||||
schema = CONFIG_SCHEMA
|
||||
if self.show_advanced_options:
|
||||
schema = schema.extend(CONFIG_SCHEMA_ADVANCED)
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=schema)
|
||||
@@ -250,8 +243,6 @@ class ImapOptionsFlow(OptionsFlow):
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
schema = OPTIONS_SCHEMA
|
||||
if self.show_advanced_options:
|
||||
schema = schema.extend(OPTIONS_SCHEMA_ADVANCED)
|
||||
schema = self.add_suggested_values_to_schema(schema, entry_data)
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
|
||||
|
||||
@@ -5,7 +5,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import final
|
||||
|
||||
from infrared_protocols import Command as InfraredCommand
|
||||
from infrared_protocols.commands import Command as InfraredCommand
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/infrared",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["infrared-protocols==2.1.0"]
|
||||
"requirements": ["infrared-protocols==3.1.0"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import asyncio
|
||||
|
||||
import aiohttp
|
||||
from intellifire4py import UnifiedFireplace
|
||||
from intellifire4py.cloud_interface import IntelliFireCloudInterface
|
||||
from intellifire4py.const import IntelliFireApiMode
|
||||
@@ -153,6 +154,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: IntellifireConfigEntry)
|
||||
raise ConfigEntryNotReady(
|
||||
"Initialization of fireplace timed out after 10 minutes"
|
||||
) from err
|
||||
except (aiohttp.ClientConnectionError, ConnectionError) as err:
|
||||
raise ConfigEntryNotReady(
|
||||
"Error communicating with fireplace during initialization"
|
||||
) from err
|
||||
|
||||
# Construct coordinator
|
||||
data_update_coordinator = IntellifireDataUpdateCoordinator(hass, entry, fireplace)
|
||||
|
||||
@@ -13,6 +13,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
@@ -287,10 +288,8 @@ class IntelliFireOptionsFlowHandler(OptionsFlow):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
# Validate connectivity for requested modes if runtime data is available
|
||||
coordinator = self.config_entry.runtime_data
|
||||
if coordinator is not None:
|
||||
fireplace = coordinator.fireplace
|
||||
if self.config_entry.state is ConfigEntryState.LOADED:
|
||||
fireplace = self.config_entry.runtime_data.fireplace
|
||||
|
||||
# Refresh connectivity status before validating
|
||||
await fireplace.async_validate_connectivity()
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
import infrared_protocols
|
||||
from infrared_protocols.commands.nec import NECCommand
|
||||
|
||||
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||
from homeassistant.components.infrared import async_send_command
|
||||
@@ -102,7 +102,7 @@ class DemoInfraredFan(FanEntity):
|
||||
|
||||
async def _send_command(self, command_code: int) -> None:
|
||||
"""Send an IR command using the NEC protocol."""
|
||||
command = infrared_protocols.NECCommand(
|
||||
command = NECCommand(
|
||||
address=DUMMY_FAN_ADDRESS,
|
||||
command=command_code,
|
||||
modulation=38000,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Demo platform that offers a fake infrared entity."""
|
||||
|
||||
import infrared_protocols
|
||||
from infrared_protocols.commands import Command as InfraredCommand
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.infrared import InfraredEntity
|
||||
@@ -51,7 +51,7 @@ class DemoInfrared(InfraredEntity):
|
||||
)
|
||||
self._attr_name = entity_name
|
||||
|
||||
async def async_send_command(self, command: infrared_protocols.Command) -> None:
|
||||
async def async_send_command(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command."""
|
||||
persistent_notification.async_create(
|
||||
self.hass, str(command.get_raw_timings()), title="Infrared Command"
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"back": {
|
||||
"default": "mdi:keyboard-backspace"
|
||||
},
|
||||
"down": {
|
||||
"default": "mdi:arrow-down"
|
||||
},
|
||||
"exit": {
|
||||
"default": "mdi:exit-to-app"
|
||||
},
|
||||
"guide": {
|
||||
"default": "mdi:television-guide"
|
||||
},
|
||||
"hdmi_1": {
|
||||
"default": "mdi:video-input-hdmi"
|
||||
},
|
||||
"hdmi_2": {
|
||||
"default": "mdi:video-input-hdmi"
|
||||
},
|
||||
"hdmi_3": {
|
||||
"default": "mdi:video-input-hdmi"
|
||||
},
|
||||
"hdmi_4": {
|
||||
"default": "mdi:video-input-hdmi"
|
||||
},
|
||||
"home": {
|
||||
"default": "mdi:home"
|
||||
},
|
||||
"info": {
|
||||
"default": "mdi:information-outline"
|
||||
},
|
||||
"input": {
|
||||
"default": "mdi:import"
|
||||
},
|
||||
"left": {
|
||||
"default": "mdi:arrow-left"
|
||||
},
|
||||
"menu": {
|
||||
"default": "mdi:menu"
|
||||
},
|
||||
"num_0": {
|
||||
"default": "mdi:numeric-0"
|
||||
},
|
||||
"num_1": {
|
||||
"default": "mdi:numeric-1"
|
||||
},
|
||||
"num_2": {
|
||||
"default": "mdi:numeric-2"
|
||||
},
|
||||
"num_3": {
|
||||
"default": "mdi:numeric-3"
|
||||
},
|
||||
"num_4": {
|
||||
"default": "mdi:numeric-4"
|
||||
},
|
||||
"num_5": {
|
||||
"default": "mdi:numeric-5"
|
||||
},
|
||||
"num_6": {
|
||||
"default": "mdi:numeric-6"
|
||||
},
|
||||
"num_7": {
|
||||
"default": "mdi:numeric-7"
|
||||
},
|
||||
"num_8": {
|
||||
"default": "mdi:numeric-8"
|
||||
},
|
||||
"num_9": {
|
||||
"default": "mdi:numeric-9"
|
||||
},
|
||||
"ok": {
|
||||
"default": "mdi:check"
|
||||
},
|
||||
"power_off": {
|
||||
"default": "mdi:power-off"
|
||||
},
|
||||
"power_on": {
|
||||
"default": "mdi:power-on"
|
||||
},
|
||||
"right": {
|
||||
"default": "mdi:arrow-right"
|
||||
},
|
||||
"up": {
|
||||
"default": "mdi:arrow-up"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.percentage import (
|
||||
ordered_list_item_to_percentage,
|
||||
percentage_to_ordered_list_item,
|
||||
percentage_to_ranged_value,
|
||||
ranged_value_to_percentage,
|
||||
)
|
||||
|
||||
from . import ThinqConfigEntry
|
||||
@@ -33,6 +35,11 @@ class ThinQFanEntityDescription(FanEntityDescription):
|
||||
preset_modes: list[str] | None = None
|
||||
|
||||
|
||||
HOOD_FAN_DESC = FanEntityDescription(
|
||||
key=ThinQProperty.FAN_SPEED,
|
||||
translation_key=ThinQProperty.FAN_SPEED,
|
||||
)
|
||||
|
||||
DEVICE_TYPE_FAN_MAP: dict[DeviceType, tuple[ThinQFanEntityDescription, ...]] = {
|
||||
DeviceType.CEILING_FAN: (
|
||||
ThinQFanEntityDescription(
|
||||
@@ -52,6 +59,8 @@ DEVICE_TYPE_FAN_MAP: dict[DeviceType, tuple[ThinQFanEntityDescription, ...]] = {
|
||||
),
|
||||
}
|
||||
|
||||
HOOD_DEVICE_TYPES: set[DeviceType] = {DeviceType.HOOD, DeviceType.MICROWAVE_OVEN}
|
||||
|
||||
ORDERED_NAMED_FAN_SPEEDS = ["low", "mid", "high", "turbo", "power"]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -63,11 +72,20 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up an entry for fan platform."""
|
||||
entities: list[ThinQFanEntity] = []
|
||||
entities: list[ThinQFanEntity | ThinQHoodFanEntity] = []
|
||||
for coordinator in entry.runtime_data.coordinators.values():
|
||||
if (
|
||||
descriptions := DEVICE_TYPE_FAN_MAP.get(coordinator.api.device.device_type)
|
||||
) is not None:
|
||||
device_type = coordinator.api.device.device_type
|
||||
|
||||
# Handle hood-type devices with numeric fan speed
|
||||
if device_type in HOOD_DEVICE_TYPES:
|
||||
entities.extend(
|
||||
ThinQHoodFanEntity(coordinator, HOOD_FAN_DESC, property_id)
|
||||
for property_id in coordinator.api.get_active_idx(
|
||||
HOOD_FAN_DESC.key, ActiveMode.READ_WRITE
|
||||
)
|
||||
)
|
||||
# Handle other fan devices with named speeds
|
||||
elif (descriptions := DEVICE_TYPE_FAN_MAP.get(device_type)) is not None:
|
||||
for description in descriptions:
|
||||
entities.extend(
|
||||
ThinQFanEntity(coordinator, description, property_id)
|
||||
@@ -210,3 +228,112 @@ class ThinQFanEntity(ThinQEntity, FanEntity):
|
||||
await self.async_call_api(
|
||||
self.coordinator.api.async_turn_off(self._operation_id)
|
||||
)
|
||||
|
||||
|
||||
class ThinQHoodFanEntity(ThinQEntity, FanEntity):
|
||||
"""Represent a thinq hood fan platform.
|
||||
|
||||
Hood fans use numeric speed values (e.g., 0=off, 1=low, 2=high)
|
||||
rather than named speed presets.
|
||||
"""
|
||||
|
||||
_attr_supported_features = (
|
||||
FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.TURN_ON
|
||||
| FanEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DeviceDataUpdateCoordinator,
|
||||
entity_description: FanEntityDescription,
|
||||
property_id: str,
|
||||
) -> None:
|
||||
"""Initialize hood fan platform."""
|
||||
super().__init__(coordinator, entity_description, property_id)
|
||||
|
||||
self._min_speed: int = int(self.data.min)
|
||||
self._max_speed: int = int(self.data.max)
|
||||
|
||||
# Speed count is the number of non-zero speeds
|
||||
self._attr_speed_count = self._max_speed - self._min_speed
|
||||
|
||||
@property
|
||||
def _speed_range(self) -> tuple[int, int]:
|
||||
"""Return the speed range excluding off (0)."""
|
||||
return (self._min_speed + 1, self._max_speed)
|
||||
|
||||
def _update_status(self) -> None:
|
||||
"""Update status itself."""
|
||||
super()._update_status()
|
||||
|
||||
# Get current speed value
|
||||
current_speed = self.data.value
|
||||
if current_speed is None or current_speed == self._min_speed:
|
||||
self._attr_is_on = False
|
||||
self._attr_percentage = 0
|
||||
else:
|
||||
self._attr_is_on = True
|
||||
self._attr_percentage = ranged_value_to_percentage(
|
||||
self._speed_range, current_speed
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] update status: is_on=%s, percentage=%s, speed=%s, min=%s, max=%s",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
self.is_on,
|
||||
self.percentage,
|
||||
current_speed,
|
||||
self._min_speed,
|
||||
self._max_speed,
|
||||
)
|
||||
|
||||
async def async_set_percentage(self, percentage: int) -> None:
|
||||
"""Set the speed percentage of the fan."""
|
||||
if percentage == 0:
|
||||
await self.async_turn_off()
|
||||
return
|
||||
|
||||
speed = round(percentage_to_ranged_value(self._speed_range, percentage))
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_set_percentage: percentage=%s -> speed=%s",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
percentage,
|
||||
speed,
|
||||
)
|
||||
await self.async_call_api(self.coordinator.api.post(self.property_id, speed))
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
preset_mode: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Turn on the fan."""
|
||||
if percentage is not None:
|
||||
await self.async_set_percentage(percentage)
|
||||
return
|
||||
|
||||
# Default to lowest non-zero speed
|
||||
speed = self._min_speed + 1
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_turn_on: speed=%s",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
speed,
|
||||
)
|
||||
await self.async_call_api(self.coordinator.api.post(self.property_id, speed))
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the fan off."""
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_turn_off",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
)
|
||||
await self.async_call_api(
|
||||
self.coordinator.api.post(self.property_id, self._min_speed)
|
||||
)
|
||||
|
||||
@@ -6,23 +6,33 @@ from thinqconnect import DeviceType
|
||||
from thinqconnect.devices.const import Property as ThinQProperty
|
||||
from thinqconnect.integration import ActiveMode, TimerProperty
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from . import ThinqConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import ThinQEntity
|
||||
|
||||
NUMBER_DESC: dict[ThinQProperty, NumberEntityDescription] = {
|
||||
ThinQProperty.FAN_SPEED: NumberEntityDescription(
|
||||
key=ThinQProperty.FAN_SPEED,
|
||||
translation_key=ThinQProperty.FAN_SPEED,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ThinQProperty.LAMP_BRIGHTNESS: NumberEntityDescription(
|
||||
key=ThinQProperty.LAMP_BRIGHTNESS,
|
||||
@@ -126,9 +136,71 @@ DEVICE_TYPE_NUMBER_MAP: dict[DeviceType, tuple[NumberEntityDescription, ...]] =
|
||||
),
|
||||
}
|
||||
|
||||
DEPRECATED_FAN_SPEED_DEVICE_TYPES: set[DeviceType] = {
|
||||
DeviceType.HOOD,
|
||||
DeviceType.MICROWAVE_OVEN,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _check_deprecated_fan_speed_entity(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
unique_id: str,
|
||||
) -> bool:
|
||||
"""Check if a deprecated fan speed number entity should be created.
|
||||
|
||||
Returns True if the entity exists and is enabled (should still be created).
|
||||
"""
|
||||
if not (
|
||||
entity_id := entity_registry.async_get_entity_id("number", DOMAIN, unique_id)
|
||||
):
|
||||
return False
|
||||
|
||||
entity_entry = entity_registry.async_get(entity_id)
|
||||
if not entity_entry:
|
||||
return False
|
||||
|
||||
if entity_entry.disabled:
|
||||
entity_registry.async_remove(entity_id)
|
||||
async_delete_issue(hass, DOMAIN, f"deprecated_fan_speed_number_{entity_id}")
|
||||
return False
|
||||
|
||||
translation_key = "deprecated_fan_speed_number"
|
||||
placeholders: dict[str, str] = {
|
||||
"entity_id": entity_id,
|
||||
"entity_name": entity_entry.name or entity_entry.original_name or "Unknown",
|
||||
}
|
||||
|
||||
automation_entities = automations_with_entity(hass, entity_id)
|
||||
script_entities = scripts_with_entity(hass, entity_id)
|
||||
if automation_entities or script_entities:
|
||||
translation_key = f"{translation_key}_scripts"
|
||||
placeholders["items"] = "\n".join(
|
||||
f"- [{item.original_name}](/config/{integration}/edit/{item.unique_id})"
|
||||
for integration, entities in (
|
||||
("automation", automation_entities),
|
||||
("script", script_entities),
|
||||
)
|
||||
for eid in entities
|
||||
if (item := entity_registry.async_get(eid))
|
||||
)
|
||||
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_fan_speed_number_{entity_id}",
|
||||
breaks_in_ha_version="2026.12.0",
|
||||
is_fixable=True,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=placeholders,
|
||||
data={"entity_id": entity_id, **placeholders},
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ThinqConfigEntry,
|
||||
@@ -136,18 +208,27 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up an entry for number platform."""
|
||||
entities: list[ThinQNumberEntity] = []
|
||||
entity_registry = er.async_get(hass)
|
||||
for coordinator in entry.runtime_data.coordinators.values():
|
||||
if (
|
||||
descriptions := DEVICE_TYPE_NUMBER_MAP.get(
|
||||
coordinator.api.device.device_type
|
||||
)
|
||||
) is not None:
|
||||
for description in descriptions:
|
||||
entities.extend(
|
||||
descriptions = DEVICE_TYPE_NUMBER_MAP.get(coordinator.api.device.device_type)
|
||||
if descriptions is None:
|
||||
continue
|
||||
for description in descriptions:
|
||||
for property_id in coordinator.api.get_active_idx(
|
||||
description.key, ActiveMode.READ_WRITE
|
||||
):
|
||||
if (
|
||||
description.key == ThinQProperty.FAN_SPEED
|
||||
and coordinator.api.device.device_type
|
||||
in DEPRECATED_FAN_SPEED_DEVICE_TYPES
|
||||
):
|
||||
unique_id = f"{coordinator.unique_id}_{property_id}"
|
||||
if not _check_deprecated_fan_speed_entity(
|
||||
hass, entity_registry, unique_id
|
||||
):
|
||||
continue
|
||||
entities.append(
|
||||
ThinQNumberEntity(coordinator, description, property_id)
|
||||
for property_id in coordinator.api.get_active_idx(
|
||||
description.key, ActiveMode.READ_WRITE
|
||||
)
|
||||
)
|
||||
|
||||
if entities:
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
"""Repairs for LG ThinQ integration."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
|
||||
class DeprecatedFanSpeedRepairFlow(RepairsFlow):
|
||||
"""Handler for deprecated fan speed number entity fixing flow."""
|
||||
|
||||
def __init__(self, data: dict[str, str]) -> None:
|
||||
"""Initialize."""
|
||||
self.entity_id = data["entity_id"]
|
||||
self._placeholders = data
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
if user_input is not None:
|
||||
entity_registry = er.async_get(self.hass)
|
||||
if entity_registry.async_get(self.entity_id):
|
||||
entity_registry.async_update_entity(
|
||||
self.entity_id,
|
||||
disabled_by=er.RegistryEntryDisabler.USER,
|
||||
)
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders=self._placeholders,
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str],
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id.startswith("deprecated_fan_speed_number_"):
|
||||
return DeprecatedFanSpeedRepairFlow(data)
|
||||
return ConfirmRepairFlow()
|
||||
@@ -199,6 +199,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"fan_speed": {
|
||||
"name": "Hood"
|
||||
}
|
||||
},
|
||||
"humidifier": {
|
||||
"dehumidifier": {
|
||||
"state_attributes": {
|
||||
@@ -1154,5 +1159,29 @@
|
||||
"failed_to_connect_mqtt": {
|
||||
"message": "Failed to connect MQTT: {error}"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_fan_speed_number": {
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "The number entity {entity_name} (`{entity_id}`) is deprecated because it has been replaced with a fan entity.\n\nPlease update your dashboards and templates to use the new fan entity.\n\nAfter updating, click **Submit** to disable the number entity and fix this issue.",
|
||||
"title": "Fan speed number entity deprecated"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "[%key:component::lg_thinq::issues::deprecated_fan_speed_number::fix_flow::step::confirm::title%]"
|
||||
},
|
||||
"deprecated_fan_speed_number_scripts": {
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "The number entity {entity_name} (`{entity_id}`) is deprecated because it has been replaced with a fan entity.\n\nThe entity was used in the following automations or scripts:\n{items}\n\nPlease update the above automations or scripts to use the new fan entity.\n\nAfter updating, click **Submit** to disable the number entity and fix this issue.",
|
||||
"title": "[%key:component::lg_thinq::issues::deprecated_fan_speed_number::fix_flow::step::confirm::title%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "[%key:component::lg_thinq::issues::deprecated_fan_speed_number::fix_flow::step::confirm::title%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from lunatone_rest_api_client import Auth, DALIBroadcast, Devices, Info
|
||||
from lunatone_rest_api_client import Auth, DALIBroadcast, Devices, Info, Sensors
|
||||
|
||||
from homeassistant.const import CONF_URL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -18,10 +18,11 @@ from .coordinator import (
|
||||
LunatoneData,
|
||||
LunatoneDevicesDataUpdateCoordinator,
|
||||
LunatoneInfoDataUpdateCoordinator,
|
||||
LunatoneSensorsDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PLATFORMS: Final[list[Platform]] = [Platform.LIGHT]
|
||||
PLATFORMS: Final[list[Platform]] = [Platform.LIGHT, Platform.SENSOR]
|
||||
|
||||
|
||||
async def _update_unique_id(
|
||||
@@ -70,6 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LunatoneConfigEntry) ->
|
||||
auth_api = Auth(async_get_clientsession(hass), entry.data[CONF_URL])
|
||||
info_api = Info(auth_api)
|
||||
devices_api = Devices(info_api)
|
||||
sensors_api = Sensors(auth_api)
|
||||
|
||||
coordinator_info = LunatoneInfoDataUpdateCoordinator(hass, entry, info_api)
|
||||
await coordinator_info.async_config_entry_first_refresh()
|
||||
@@ -105,6 +107,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: LunatoneConfigEntry) ->
|
||||
coordinator_devices = LunatoneDevicesDataUpdateCoordinator(hass, entry, devices_api)
|
||||
await coordinator_devices.async_config_entry_first_refresh()
|
||||
|
||||
coordinator_sensors = LunatoneSensorsDataUpdateCoordinator(hass, entry, sensors_api)
|
||||
await coordinator_sensors.async_config_entry_first_refresh()
|
||||
|
||||
dali_line_broadcasts = [
|
||||
DALIBroadcast(auth_api, int(line)) for line in coordinator_info.data.lines
|
||||
]
|
||||
@@ -112,6 +117,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LunatoneConfigEntry) ->
|
||||
entry.runtime_data = LunatoneData(
|
||||
coordinator_info,
|
||||
coordinator_devices,
|
||||
coordinator_sensors,
|
||||
dali_line_broadcasts,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -5,7 +5,14 @@ from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from lunatone_rest_api_client import DALIBroadcast, Device, Devices, Info
|
||||
from lunatone_rest_api_client import (
|
||||
DALIBroadcast,
|
||||
Device,
|
||||
Devices,
|
||||
Info,
|
||||
Sensor,
|
||||
Sensors,
|
||||
)
|
||||
from lunatone_rest_api_client.models import InfoData
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -18,6 +25,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_INFO_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
DEFAULT_DEVICES_SCAN_INTERVAL = timedelta(seconds=10)
|
||||
DEFAULT_SENSORS_SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -26,6 +34,7 @@ class LunatoneData:
|
||||
|
||||
coordinator_info: LunatoneInfoDataUpdateCoordinator
|
||||
coordinator_devices: LunatoneDevicesDataUpdateCoordinator
|
||||
coordinator_sensors: LunatoneSensorsDataUpdateCoordinator
|
||||
dali_line_broadcasts: list[DALIBroadcast]
|
||||
|
||||
|
||||
@@ -98,5 +107,40 @@ class LunatoneDevicesDataUpdateCoordinator(DataUpdateCoordinator[dict[int, Devic
|
||||
|
||||
if self.devices_api.data is None:
|
||||
raise UpdateFailed("Did not receive devices data from Lunatone REST API")
|
||||
|
||||
return {device.id: device for device in self.devices_api.devices}
|
||||
|
||||
|
||||
class LunatoneSensorsDataUpdateCoordinator(DataUpdateCoordinator[dict[int, Sensor]]):
|
||||
"""Data update coordinator for Lunatone sensors."""
|
||||
|
||||
config_entry: LunatoneConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: LunatoneConfigEntry,
|
||||
sensors_api: Sensors,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}-sensors",
|
||||
always_update=False,
|
||||
update_interval=DEFAULT_SENSORS_SCAN_INTERVAL,
|
||||
)
|
||||
self.sensors_api = sensors_api
|
||||
|
||||
async def _async_update_data(self) -> dict[int, Sensor]:
|
||||
"""Update sensor data."""
|
||||
try:
|
||||
await self.sensors_api.async_update()
|
||||
except aiohttp.ClientConnectionError as ex:
|
||||
raise UpdateFailed(
|
||||
"Unable to retrieve sensors data from Lunatone REST API"
|
||||
) from ex
|
||||
|
||||
if self.sensors_api.data is None:
|
||||
raise UpdateFailed("Did not receive sensors data from Lunatone REST API")
|
||||
return {sensor.id: sensor for sensor in self.sensors_api.sensors}
|
||||
|
||||
@@ -0,0 +1,157 @@
|
||||
"""Platform for Lunatone sensor integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
from lunatone_rest_api_client import Sensor
|
||||
from lunatone_rest_api_client.models import SensorAddressType, SensorType
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LunatoneConfigEntry, LunatoneSensorsDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = {
|
||||
SensorType.AIR_HUMIDITY: SensorEntityDescription(
|
||||
key="air_humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorType.AIR_PRESSURE: SensorEntityDescription(
|
||||
key="air_pressure",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.HPA,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorType.AIR_QUALITY: SensorEntityDescription(
|
||||
key="air_quality",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorType.ECO2: SensorEntityDescription(
|
||||
key="eco2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorType.LIGHT: SensorEntityDescription(
|
||||
key="light",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorType.TEMPERATURE: SensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorType.VOC: SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: LunatoneConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Lunatone sensors from the config entry."""
|
||||
coordinator_sensors = config_entry.runtime_data.coordinator_sensors
|
||||
|
||||
assert config_entry.unique_id is not None
|
||||
|
||||
async_add_entities(
|
||||
LunatoneSensor(
|
||||
coordinator_sensors, description, sensor_id, config_entry.unique_id
|
||||
)
|
||||
for sensor_id, sensor_data in coordinator_sensors.data.items()
|
||||
if (description := SENSOR_TYPES.get(sensor_data.data.type))
|
||||
)
|
||||
|
||||
|
||||
class LunatoneSensor(
|
||||
CoordinatorEntity[LunatoneSensorsDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Representation of a Lunatone Sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LunatoneSensorsDataUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
sensor_id: int,
|
||||
config_entry_unique_id: str,
|
||||
) -> None:
|
||||
"""Initialize a Lunatone Sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
|
||||
self._config_entry_unique_id = config_entry_unique_id
|
||||
self._sensor_id = sensor_id
|
||||
|
||||
self._attr_name = self.sensor.name
|
||||
self._attr_unique_id = (
|
||||
f"{config_entry_unique_id}-sensor{sensor_id}-{description.key}"
|
||||
)
|
||||
device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._config_entry_unique_id)},
|
||||
)
|
||||
if (
|
||||
self.sensor.data.address_type == SensorAddressType.DALI
|
||||
and self.sensor.data.dali_sensor_address
|
||||
):
|
||||
device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(
|
||||
DOMAIN,
|
||||
f"{self._config_entry_unique_id}"
|
||||
f"-line{self.sensor.data.dali_sensor_address.line}"
|
||||
f"-d24-address{self.sensor.data.dali_sensor_address.address}",
|
||||
)
|
||||
},
|
||||
name=(
|
||||
f"DALI Line {self.sensor.data.dali_sensor_address.line}"
|
||||
f" - A{self.sensor.data.dali_sensor_address.address}\u00b2"
|
||||
),
|
||||
via_device=(DOMAIN, str(self._config_entry_unique_id)),
|
||||
)
|
||||
self._attr_device_info = device_info
|
||||
|
||||
@property
|
||||
def sensor(self) -> Sensor:
|
||||
"""Return the sensor data."""
|
||||
return self.coordinator.data[self._sensor_id]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self._sensor_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the measurement value of the sensor."""
|
||||
return self.sensor.data.value
|
||||
@@ -251,8 +251,10 @@ class MatterFan(MatterEntity, FanEntity):
|
||||
return
|
||||
self._feature_map = feature_map
|
||||
self._attr_supported_features = FanEntityFeature(0)
|
||||
# Reset to default so a featuremap change from MultiSpeed -> non-MultiSpeed
|
||||
# does not leave a stale speed_count / percentage_step.
|
||||
self._attr_speed_count = 100
|
||||
if feature_map & FanControlFeature.kMultiSpeed:
|
||||
self._attr_supported_features |= FanEntityFeature.SET_SPEED
|
||||
self._attr_speed_count = int(
|
||||
self.get_matter_attribute_value(clusters.FanControl.Attributes.SpeedMax)
|
||||
)
|
||||
@@ -302,8 +304,12 @@ class MatterFan(MatterEntity, FanEntity):
|
||||
if feature_map & FanControlFeature.kAirflowDirection:
|
||||
self._attr_supported_features |= FanEntityFeature.DIRECTION
|
||||
|
||||
# PercentSetting is always a mandatory attribute of the FanControl cluster,
|
||||
# so percentage-based speed control is always available.
|
||||
self._attr_supported_features |= (
|
||||
FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON
|
||||
FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,108 @@
|
||||
"""Provides conditions for media players."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
EntityConditionBase,
|
||||
EntityNumericalConditionBase,
|
||||
make_entity_state_condition,
|
||||
)
|
||||
|
||||
from . import ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED
|
||||
from .const import DOMAIN, MediaPlayerState
|
||||
|
||||
|
||||
class _MediaPlayerMutedConditionBase(EntityConditionBase):
|
||||
"""Base class for media player is_muted/is_unmuted conditions."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec()}
|
||||
_target_muted: bool
|
||||
|
||||
def _state_valid_since(self, state: State) -> datetime:
|
||||
"""Anchor `for:` durations to `last_updated` for the muted attribute.
|
||||
|
||||
Needed because the domain spec does not reflect that the condition
|
||||
reads from the muted and volume attributes.
|
||||
"""
|
||||
return state.last_updated
|
||||
|
||||
def _has_volume_attributes(self, state: State) -> bool:
|
||||
"""Check if the state has volume muted or volume level attributes."""
|
||||
return (
|
||||
state.attributes.get(ATTR_MEDIA_VOLUME_MUTED) is not None
|
||||
or state.attributes.get(ATTR_MEDIA_VOLUME_LEVEL) is not None
|
||||
)
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip entities without volume attributes from the all/count check."""
|
||||
return super()._should_include(state) and self._has_volume_attributes(state)
|
||||
|
||||
def _is_muted(self, state: State) -> bool:
|
||||
"""Check if the media player is muted."""
|
||||
return (
|
||||
state.attributes.get(ATTR_MEDIA_VOLUME_MUTED) is True
|
||||
or state.attributes.get(ATTR_MEDIA_VOLUME_LEVEL) == 0
|
||||
)
|
||||
|
||||
def is_valid_state(self, entity_state: State) -> bool:
|
||||
"""Check if the entity state matches the targeted muted state."""
|
||||
if not self._has_volume_attributes(entity_state):
|
||||
return False
|
||||
return self._is_muted(entity_state) is self._target_muted
|
||||
|
||||
|
||||
class MediaPlayerIsMutedCondition(_MediaPlayerMutedConditionBase):
|
||||
"""Condition that passes when the media player is muted."""
|
||||
|
||||
_target_muted = True
|
||||
|
||||
|
||||
class MediaPlayerIsUnmutedCondition(_MediaPlayerMutedConditionBase):
|
||||
"""Condition that passes when the media player is not muted."""
|
||||
|
||||
_target_muted = False
|
||||
|
||||
|
||||
class MediaPlayerIsVolumeCondition(EntityNumericalConditionBase):
|
||||
"""Condition for media player volume level with 0.0-1.0 to percentage conversion."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_MEDIA_VOLUME_LEVEL)}
|
||||
_valid_unit = "%"
|
||||
|
||||
def _get_tracked_value(self, entity_state: State) -> Any:
|
||||
"""Get the volume value converted from 0.0-1.0 to percentage (0-100)."""
|
||||
raw = super()._get_tracked_value(entity_state)
|
||||
if raw is None:
|
||||
return None
|
||||
try:
|
||||
return float(raw) * 100.0
|
||||
except TypeError, ValueError:
|
||||
return None
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip media players that do not expose a volume_level attribute."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_MEDIA_VOLUME_LEVEL) is not None
|
||||
)
|
||||
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_muted": MediaPlayerIsMutedCondition,
|
||||
"is_not_playing": make_entity_state_condition(
|
||||
DOMAIN,
|
||||
{
|
||||
MediaPlayerState.BUFFERING,
|
||||
MediaPlayerState.IDLE,
|
||||
MediaPlayerState.OFF,
|
||||
MediaPlayerState.ON,
|
||||
MediaPlayerState.PAUSED,
|
||||
},
|
||||
),
|
||||
"is_off": make_entity_state_condition(DOMAIN, MediaPlayerState.OFF),
|
||||
"is_on": make_entity_state_condition(
|
||||
DOMAIN,
|
||||
@@ -17,18 +114,10 @@ CONDITIONS: dict[str, type[Condition]] = {
|
||||
MediaPlayerState.PLAYING,
|
||||
},
|
||||
),
|
||||
"is_not_playing": make_entity_state_condition(
|
||||
DOMAIN,
|
||||
{
|
||||
MediaPlayerState.BUFFERING,
|
||||
MediaPlayerState.IDLE,
|
||||
MediaPlayerState.OFF,
|
||||
MediaPlayerState.ON,
|
||||
MediaPlayerState.PAUSED,
|
||||
},
|
||||
),
|
||||
"is_paused": make_entity_state_condition(DOMAIN, MediaPlayerState.PAUSED),
|
||||
"is_playing": make_entity_state_condition(DOMAIN, MediaPlayerState.PLAYING),
|
||||
"is_unmuted": MediaPlayerIsUnmutedCondition,
|
||||
"is_volume": MediaPlayerIsVolumeCondition,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,22 +1,51 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
target: &condition_media_player_target
|
||||
entity:
|
||||
domain: media_player
|
||||
fields:
|
||||
behavior:
|
||||
behavior: &condition_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for:
|
||||
for: &condition_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
.volume_threshold_entity: &volume_threshold_entity
|
||||
- domain: input_number
|
||||
unit_of_measurement: "%"
|
||||
- domain: number
|
||||
unit_of_measurement: "%"
|
||||
- domain: sensor
|
||||
unit_of_measurement: "%"
|
||||
|
||||
.volume_threshold_number: &volume_threshold_number
|
||||
min: 0
|
||||
max: 100
|
||||
mode: box
|
||||
unit_of_measurement: "%"
|
||||
|
||||
is_muted: *condition_common
|
||||
is_off: *condition_common
|
||||
is_on: *condition_common
|
||||
is_not_playing: *condition_common
|
||||
is_paused: *condition_common
|
||||
is_playing: *condition_common
|
||||
is_unmuted: *condition_common
|
||||
|
||||
is_volume:
|
||||
target: *condition_media_player_target
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
numeric_threshold:
|
||||
entity: *volume_threshold_entity
|
||||
mode: is
|
||||
number: *volume_threshold_number
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_muted": {
|
||||
"condition": "mdi:volume-mute"
|
||||
},
|
||||
"is_not_playing": {
|
||||
"condition": "mdi:stop"
|
||||
},
|
||||
@@ -14,6 +17,12 @@
|
||||
},
|
||||
"is_playing": {
|
||||
"condition": "mdi:play"
|
||||
},
|
||||
"is_unmuted": {
|
||||
"condition": "mdi:volume-high"
|
||||
},
|
||||
"is_volume": {
|
||||
"condition": "mdi:volume-medium"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
@@ -143,6 +152,12 @@
|
||||
},
|
||||
"unmuted": {
|
||||
"trigger": "mdi:volume-high"
|
||||
},
|
||||
"volume_changed": {
|
||||
"trigger": "mdi:volume-medium"
|
||||
},
|
||||
"volume_crossed_threshold": {
|
||||
"trigger": "mdi:volume-medium"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,10 +2,24 @@
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"condition_for_name": "For at least",
|
||||
"condition_threshold_name": "Threshold",
|
||||
"trigger_behavior_name": "Trigger when",
|
||||
"trigger_for_name": "For at least"
|
||||
"trigger_for_name": "For at least",
|
||||
"trigger_threshold_name": "Threshold"
|
||||
},
|
||||
"conditions": {
|
||||
"is_muted": {
|
||||
"description": "Tests if one or more media players are muted.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::media_player::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::media_player::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Media player is muted"
|
||||
},
|
||||
"is_not_playing": {
|
||||
"description": "Tests if one or more media players are not playing.",
|
||||
"fields": {
|
||||
@@ -65,6 +79,33 @@
|
||||
}
|
||||
},
|
||||
"name": "Media player is playing"
|
||||
},
|
||||
"is_unmuted": {
|
||||
"description": "Tests if one or more media players are not muted.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::media_player::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::media_player::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Media player is not muted"
|
||||
},
|
||||
"is_volume": {
|
||||
"description": "Tests the volume of one or more media players.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::media_player::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::media_player::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::media_player::common::condition_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Volume"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
@@ -520,6 +561,30 @@
|
||||
}
|
||||
},
|
||||
"name": "Media player unmuted"
|
||||
},
|
||||
"volume_changed": {
|
||||
"description": "Triggers after the volume of one or more media players changes.",
|
||||
"fields": {
|
||||
"threshold": {
|
||||
"name": "[%key:component::media_player::common::trigger_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Media player volume changed"
|
||||
},
|
||||
"volume_crossed_threshold": {
|
||||
"description": "Triggers after the volume of one or more media players crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::media_player::common::trigger_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::media_player::common::trigger_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Media player volume crossed threshold"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
"""Provides triggers for media players."""
|
||||
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityNumericalStateChangedTriggerBase,
|
||||
EntityNumericalStateCrossedThresholdTriggerBase,
|
||||
EntityNumericalStateTriggerBase,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
make_entity_transition_trigger,
|
||||
@@ -12,6 +14,10 @@ from homeassistant.helpers.trigger import (
|
||||
from . import ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, MediaPlayerState
|
||||
from .const import DOMAIN
|
||||
|
||||
VOLUME_DOMAIN_SPECS = {
|
||||
DOMAIN: DomainSpec(value_source=ATTR_MEDIA_VOLUME_LEVEL),
|
||||
}
|
||||
|
||||
|
||||
class _MediaPlayerMutedStateTriggerBase(EntityTriggerBase):
|
||||
"""Base class for media player muted/unmuted triggers."""
|
||||
@@ -43,10 +49,7 @@ class _MediaPlayerMutedStateTriggerBase(EntityTriggerBase):
|
||||
)
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and the state has changed."""
|
||||
if from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
return False
|
||||
|
||||
"""Check that the muted-state changed."""
|
||||
if not self._has_volume_attributes(to_state):
|
||||
return False
|
||||
|
||||
@@ -71,9 +74,48 @@ class MediaPlayerUnmutedTrigger(_MediaPlayerMutedStateTriggerBase):
|
||||
_target_muted = False
|
||||
|
||||
|
||||
class VolumeTriggerMixin(EntityNumericalStateTriggerBase):
|
||||
"""Mixin for volume triggers."""
|
||||
|
||||
_domain_specs = VOLUME_DOMAIN_SPECS
|
||||
_valid_unit = "%"
|
||||
|
||||
def _get_tracked_value(self, state: State) -> float | None:
|
||||
"""Get tracked volume as a percentage."""
|
||||
value = super()._get_tracked_value(state)
|
||||
if value is None:
|
||||
return None
|
||||
# Convert 0.0-1.0 range to percentage (0-100)
|
||||
return value * 100.0
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Check if an entity should participate in all/count checks.
|
||||
|
||||
Entities without a volume level cannot have their volume tracked,
|
||||
so they are excluded - otherwise an "all" check would never pass
|
||||
when there are media players without volume support.
|
||||
"""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_MEDIA_VOLUME_LEVEL) is not None
|
||||
)
|
||||
|
||||
|
||||
class VolumeChangedTrigger(EntityNumericalStateChangedTriggerBase, VolumeTriggerMixin):
|
||||
"""Trigger for media player volume changes."""
|
||||
|
||||
|
||||
class VolumeCrossedThresholdTrigger(
|
||||
EntityNumericalStateCrossedThresholdTriggerBase, VolumeTriggerMixin
|
||||
):
|
||||
"""Trigger for media player volume crossing a threshold."""
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"muted": MediaPlayerMutedTrigger,
|
||||
"unmuted": MediaPlayerUnmutedTrigger,
|
||||
"volume_changed": VolumeChangedTrigger,
|
||||
"volume_crossed_threshold": VolumeCrossedThresholdTrigger,
|
||||
"paused_playing": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
|
||||
@@ -1,20 +1,34 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
target: &trigger_media_player_target
|
||||
entity:
|
||||
domain: media_player
|
||||
fields:
|
||||
behavior:
|
||||
behavior: &trigger_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for:
|
||||
for: &trigger_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
.volume_threshold_entity: &volume_threshold_entity
|
||||
- domain: input_number
|
||||
unit_of_measurement: "%"
|
||||
- domain: number
|
||||
unit_of_measurement: "%"
|
||||
- domain: sensor
|
||||
unit_of_measurement: "%"
|
||||
|
||||
.volume_threshold_number: &volume_threshold_number
|
||||
min: 0
|
||||
max: 100
|
||||
mode: box
|
||||
unit_of_measurement: "%"
|
||||
|
||||
muted: *trigger_common
|
||||
unmuted: *trigger_common
|
||||
paused_playing: *trigger_common
|
||||
@@ -22,3 +36,27 @@ started_playing: *trigger_common
|
||||
stopped_playing: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
|
||||
volume_changed:
|
||||
target: *trigger_media_player_target
|
||||
fields:
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
numeric_threshold:
|
||||
entity: *volume_threshold_entity
|
||||
mode: changed
|
||||
number: *volume_threshold_number
|
||||
|
||||
volume_crossed_threshold:
|
||||
target: *trigger_media_player_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
for: *trigger_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
numeric_threshold:
|
||||
entity: *volume_threshold_entity
|
||||
mode: crossed
|
||||
number: *volume_threshold_number
|
||||
|
||||
@@ -5,30 +5,31 @@ from datetime import timedelta
|
||||
from mill import Mill
|
||||
from mill_local import Mill as MillLocal
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CLOUD, CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import MillDataUpdateCoordinator, MillHistoricDataUpdateCoordinator
|
||||
from .coordinator import (
|
||||
MillConfigEntry,
|
||||
MillDataUpdateCoordinator,
|
||||
MillHistoricDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
|
||||
|
||||
__all__ = ["CLOUD", "CONNECTION_TYPE", "DOMAIN", "LOCAL"]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MillConfigEntry) -> bool:
|
||||
"""Set up the Mill heater."""
|
||||
hass.data.setdefault(DOMAIN, {LOCAL: {}, CLOUD: {}})
|
||||
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
mill_data_connection = MillLocal(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
update_interval = timedelta(seconds=15)
|
||||
key = entry.data[CONF_IP_ADDRESS]
|
||||
conn_type = LOCAL
|
||||
else:
|
||||
mill_data_connection = Mill(
|
||||
entry.data[CONF_USERNAME],
|
||||
@@ -36,8 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
update_interval = timedelta(seconds=30)
|
||||
key = entry.data[CONF_USERNAME]
|
||||
conn_type = CLOUD
|
||||
|
||||
historic_data_coordinator = MillHistoricDataUpdateCoordinator(
|
||||
hass,
|
||||
@@ -56,14 +55,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await data_coordinator.async_config_entry_first_refresh()
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
hass.data[DOMAIN][conn_type][key] = data_coordinator
|
||||
entry.runtime_data = data_coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MillConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Support for mill wifi-enabled home heaters."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from typing import Any
|
||||
|
||||
@@ -14,14 +13,7 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_USERNAME,
|
||||
PRECISION_TENTHS,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -33,7 +25,6 @@ from .const import (
|
||||
ATTR_COMFORT_TEMP,
|
||||
ATTR_ROOM_NAME,
|
||||
ATTR_SLEEP_TEMP,
|
||||
CLOUD,
|
||||
CONNECTION_TYPE,
|
||||
DOMAIN,
|
||||
LOCAL,
|
||||
@@ -42,7 +33,7 @@ from .const import (
|
||||
MIN_TEMP,
|
||||
SERVICE_SET_ROOM_TEMP,
|
||||
)
|
||||
from .coordinator import MillDataUpdateCoordinator
|
||||
from .coordinator import MillConfigEntry, MillDataUpdateCoordinator
|
||||
from .entity import MillBaseEntity
|
||||
|
||||
SET_ROOM_TEMP_SCHEMA = vol.Schema(
|
||||
@@ -57,17 +48,16 @@ SET_ROOM_TEMP_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MillConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Mill climate."""
|
||||
mill_data_coordinator = entry.runtime_data
|
||||
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
mill_data_coordinator = hass.data[DOMAIN][LOCAL][entry.data[CONF_IP_ADDRESS]]
|
||||
async_add_entities([LocalMillHeater(mill_data_coordinator)])
|
||||
return
|
||||
|
||||
mill_data_coordinator = hass.data[DOMAIN][CLOUD][entry.data[CONF_USERNAME]]
|
||||
|
||||
entities = [
|
||||
MillHeater(mill_data_coordinator, mill_device)
|
||||
for mill_device in mill_data_coordinator.data.values()
|
||||
|
||||
@@ -57,6 +57,9 @@ class MillDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
)
|
||||
|
||||
|
||||
type MillConfigEntry = ConfigEntry[MillDataUpdateCoordinator]
|
||||
|
||||
|
||||
class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching Mill historic data."""
|
||||
|
||||
|
||||
@@ -3,28 +3,23 @@
|
||||
from mill import Heater, MillDevice
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_USERNAME, UnitOfPower
|
||||
from homeassistant.const import UnitOfPower
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import CLOUD, CONNECTION_TYPE, DOMAIN
|
||||
from .coordinator import MillDataUpdateCoordinator
|
||||
from .const import CLOUD, CONNECTION_TYPE
|
||||
from .coordinator import MillConfigEntry, MillDataUpdateCoordinator
|
||||
from .entity import MillBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MillConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Mill Number."""
|
||||
if entry.data.get(CONNECTION_TYPE) == CLOUD:
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
mill_data_coordinator: MillDataUpdateCoordinator = hass.data[DOMAIN][CLOUD][
|
||||
entry.data[CONF_USERNAME]
|
||||
]
|
||||
mill_data_coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
MillNumber(mill_data_coordinator, mill_device)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Support for mill wifi-enabled home heaters."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
import mill
|
||||
|
||||
@@ -9,12 +8,9 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_USERNAME,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
UnitOfEnergy,
|
||||
@@ -29,11 +25,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
BATTERY,
|
||||
CLOUD,
|
||||
CONNECTION_TYPE,
|
||||
CONSUMPTION_TODAY,
|
||||
CONSUMPTION_YEAR,
|
||||
DOMAIN,
|
||||
ECO2,
|
||||
HUMIDITY,
|
||||
LOCAL,
|
||||
@@ -41,7 +35,7 @@ from .const import (
|
||||
TEMPERATURE,
|
||||
TVOC,
|
||||
)
|
||||
from .coordinator import MillDataUpdateCoordinator
|
||||
from .coordinator import MillConfigEntry, MillDataUpdateCoordinator
|
||||
from .entity import MillBaseEntity
|
||||
|
||||
HEATER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
@@ -146,13 +140,13 @@ SOCKET_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MillConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Mill sensor."""
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
mill_data_coordinator = hass.data[DOMAIN][LOCAL][entry.data[CONF_IP_ADDRESS]]
|
||||
mill_data_coordinator = entry.runtime_data
|
||||
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
async_add_entities(
|
||||
LocalMillSensor(
|
||||
mill_data_coordinator,
|
||||
@@ -162,8 +156,6 @@ async def async_setup_entry(
|
||||
)
|
||||
return
|
||||
|
||||
mill_data_coordinator = hass.data[DOMAIN][CLOUD][entry.data[CONF_USERNAME]]
|
||||
|
||||
entities = [
|
||||
MillSensor(
|
||||
mill_data_coordinator,
|
||||
|
||||
@@ -82,6 +82,7 @@ ATTR_SENSOR_UOM = "unit_of_measurement"
|
||||
|
||||
SIGNAL_SENSOR_UPDATE = f"{DOMAIN}_sensor_update"
|
||||
SIGNAL_LOCATION_UPDATE = DOMAIN + "_location_update_{}"
|
||||
SIGNAL_RECORD_NOTIFICATION = f"{DOMAIN}_record_notification"
|
||||
|
||||
ATTR_CAMERA_ENTITY_ID = "camera_entity_id"
|
||||
|
||||
|
||||
@@ -21,9 +21,13 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -46,6 +50,7 @@ from .const import (
|
||||
DATA_NOTIFY,
|
||||
DATA_PUSH_CHANNEL,
|
||||
DOMAIN,
|
||||
SIGNAL_RECORD_NOTIFICATION,
|
||||
)
|
||||
from .helpers import device_info
|
||||
from .push_notification import PushChannel
|
||||
@@ -111,6 +116,21 @@ class MobileAppNotifyEntity(NotifyEntity):
|
||||
translation_placeholders={"device_name": self._config_entry.title},
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_notification(self, webhook_id: str) -> None:
|
||||
"""Handle notifications triggered externally."""
|
||||
if webhook_id == self._config_entry.data[ATTR_WEBHOOK_ID]:
|
||||
self._async_record_notification()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callback."""
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass, SIGNAL_RECORD_NOTIFICATION, self._async_handle_notification
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def push_registrations(hass: HomeAssistant) -> dict[str, str]:
|
||||
"""Return a dictionary of push enabled registrations."""
|
||||
@@ -195,6 +215,7 @@ class MobileAppNotificationService(BaseNotificationService):
|
||||
data,
|
||||
partial(self._async_send_remote_message_target, entry),
|
||||
)
|
||||
async_dispatcher_send(self.hass, SIGNAL_RECORD_NOTIFICATION, target)
|
||||
continue
|
||||
|
||||
# Test if local push only.
|
||||
@@ -203,6 +224,7 @@ class MobileAppNotificationService(BaseNotificationService):
|
||||
continue
|
||||
|
||||
await self._async_send_remote_message_target(entry, data)
|
||||
async_dispatcher_send(self.hass, SIGNAL_RECORD_NOTIFICATION, target)
|
||||
|
||||
if failed_targets:
|
||||
raise HomeAssistantError(
|
||||
|
||||
@@ -16,6 +16,8 @@ from typing import TYPE_CHECKING, Any
|
||||
from uuid import uuid4
|
||||
|
||||
import certifi
|
||||
import paho.mqtt.client as mqtt
|
||||
from paho.mqtt.matcher import MQTTMatcher
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -40,6 +42,7 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.frame import ReportBehavior, report_usage
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -47,6 +50,7 @@ from homeassistant.setup import SetupPhases, async_pause_setup
|
||||
from homeassistant.util.collection import chunked_or_all
|
||||
from homeassistant.util.logging import catch_log_exception, log_exception
|
||||
|
||||
from .async_client import AsyncMQTTClient
|
||||
from .const import (
|
||||
CONF_BIRTH_MESSAGE,
|
||||
CONF_BROKER,
|
||||
@@ -86,13 +90,6 @@ from .models import (
|
||||
)
|
||||
from .util import EnsureJobAfterCooldown, get_file_path, mqtt_config_entry_enabled
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Only import for paho-mqtt type checking here, imports are done locally
|
||||
# because integrations should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
from .async_client import AsyncMQTTClient
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_BUFFER_SIZE = 131072 # Minimum buffer size to use if preferred size fails
|
||||
@@ -128,8 +125,8 @@ def publish(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
payload: PublishPayloadType,
|
||||
qos: int | None = 0,
|
||||
retain: bool | None = False,
|
||||
qos: int = 0,
|
||||
retain: bool = False,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
) -> None:
|
||||
"""Publish message to a MQTT topic."""
|
||||
@@ -140,8 +137,8 @@ async def async_publish(
|
||||
hass: HomeAssistant,
|
||||
topic: str,
|
||||
payload: PublishPayloadType,
|
||||
qos: int | None = 0,
|
||||
retain: bool | None = False,
|
||||
qos: int = 0,
|
||||
retain: bool = False,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
) -> None:
|
||||
"""Publish message to a MQTT topic."""
|
||||
@@ -181,9 +178,22 @@ async def async_publish(
|
||||
)
|
||||
return
|
||||
|
||||
await mqtt_data.client.async_publish(
|
||||
topic, outgoing_payload, qos or 0, retain or False
|
||||
)
|
||||
# Passing None for qos or retain args was deprecated.
|
||||
# Custom integrations should update there code.
|
||||
# Check for fallback to `None` values can be removed with HA Core 2027.6
|
||||
if qos is None or retain is None:
|
||||
report_usage( # type: ignore[unreachable]
|
||||
"that calls the MQTT publish API with `None` for qos or retain. "
|
||||
"The `qos` argument must be an `int`, "
|
||||
"and the `retain` argument must be a `bool`",
|
||||
breaks_in_ha_version="2027.6.0",
|
||||
core_behavior=ReportBehavior.LOG,
|
||||
exclude_integrations={DOMAIN},
|
||||
)
|
||||
qos = qos or 0
|
||||
retain = retain or False
|
||||
|
||||
await mqtt_data.client.async_publish(topic, outgoing_payload, qos, retain)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -323,12 +333,6 @@ class MqttClientSetup:
|
||||
The setup of the MQTT client should be run in an executor job,
|
||||
because it accesses files, so it does IO.
|
||||
"""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
from paho.mqtt import client as mqtt # noqa: PLC0415
|
||||
|
||||
from .async_client import AsyncMQTTClient # noqa: PLC0415
|
||||
|
||||
config = self._config
|
||||
clean_session: bool | None = None
|
||||
# If no protocol setting is set in the config entry data
|
||||
@@ -561,7 +565,6 @@ class MQTT:
|
||||
"""Start the misc periodic."""
|
||||
assert self._misc_timer is None, "Misc periodic already started"
|
||||
_LOGGER.debug("%s: Starting client misc loop", self.config_entry.title)
|
||||
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||
|
||||
# Inner function to avoid having to check late import
|
||||
# each time the function is called.
|
||||
@@ -705,7 +708,6 @@ class MQTT:
|
||||
|
||||
async def async_connect(self, client_available: asyncio.Future[bool]) -> None:
|
||||
"""Connect to the host. Does not process messages yet."""
|
||||
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||
|
||||
result: int | None = None
|
||||
self._available_future = client_available
|
||||
@@ -763,7 +765,6 @@ class MQTT:
|
||||
|
||||
async def _reconnect_loop(self) -> None:
|
||||
"""Reconnect to the MQTT server."""
|
||||
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||
|
||||
while True:
|
||||
if not self.connected:
|
||||
@@ -1265,9 +1266,6 @@ class MQTT:
|
||||
@callback
|
||||
def _async_handle_callback_exception(self, status: mqtt.MQTTErrorCode) -> None:
|
||||
"""Handle a callback exception."""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||
|
||||
_LOGGER.warning(
|
||||
"Error returned from MQTT server: %s",
|
||||
@@ -1312,8 +1310,6 @@ class MQTT:
|
||||
) -> None:
|
||||
"""Wait for ACK from broker or raise on error."""
|
||||
if result_code != 0:
|
||||
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="mqtt_broker_error",
|
||||
@@ -1360,8 +1356,6 @@ class MQTT:
|
||||
|
||||
|
||||
def _matcher_for_topic(subscription: str) -> Callable[[str], bool]:
|
||||
from paho.mqtt.matcher import MQTTMatcher # noqa: PLC0415
|
||||
|
||||
matcher = MQTTMatcher() # type: ignore[no-untyped-call]
|
||||
matcher[subscription] = True
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ from cryptography.hazmat.primitives.serialization import (
|
||||
load_pem_private_key,
|
||||
)
|
||||
from cryptography.x509 import load_der_x509_certificate, load_pem_x509_certificate
|
||||
import paho.mqtt.client as mqtt
|
||||
import voluptuous as vol
|
||||
import yaml
|
||||
|
||||
@@ -5371,12 +5372,9 @@ async def async_get_broker_settings( # noqa: C901
|
||||
description={"suggested_value": current_pass},
|
||||
)
|
||||
] = PASSWORD_SELECTOR
|
||||
# show advanced options checkbox if requested and
|
||||
# advanced options are enabled
|
||||
# or when the defaults of advanced options are overridden
|
||||
# show advanced options checkbox if no defaults
|
||||
# of the advanced options are overridden
|
||||
if not advanced_broker_options:
|
||||
if not flow.show_advanced_options:
|
||||
return False
|
||||
fields[
|
||||
vol.Optional(
|
||||
ADVANCED_OPTIONS,
|
||||
@@ -5482,10 +5480,6 @@ def try_connection(
|
||||
user_input: dict[str, Any],
|
||||
) -> bool:
|
||||
"""Test if we can connect to an MQTT broker."""
|
||||
# We don't import on the top because some integrations
|
||||
# should be able to optionally rely on MQTT.
|
||||
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||
|
||||
mqtt_client_setup = MqttClientSetup(user_input)
|
||||
mqtt_client_setup.setup()
|
||||
client = mqtt_client_setup.client
|
||||
|
||||
@@ -9,6 +9,8 @@ from enum import StrEnum
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, TypedDict
|
||||
|
||||
from paho.mqtt.client import MQTTMessage
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME, Platform
|
||||
from homeassistant.core import CALLBACK_TYPE, callback
|
||||
from homeassistant.exceptions import ServiceValidationError, TemplateError
|
||||
@@ -24,8 +26,6 @@ from homeassistant.helpers.typing import (
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from paho.mqtt.client import MQTTMessage
|
||||
|
||||
from .client import MQTT, Subscription
|
||||
from .debug_info import TimestampedPublishMessage
|
||||
from .device_trigger import Trigger
|
||||
|
||||
@@ -67,25 +67,11 @@ OPENING_CATEGORY_TO_DEVICE_CLASS: Final[dict[str | None, BinarySensorDeviceClass
|
||||
|
||||
|
||||
def get_opening_category(netatmo_device: NetatmoDevice) -> str:
|
||||
"""Helper function to get opening category from Netatmo API raw data."""
|
||||
"""Helper function to get opening category for doortag."""
|
||||
|
||||
# Iterate through each home in the raw data.
|
||||
for home in netatmo_device.data_handler.account.raw_data["homes"]:
|
||||
# Check if the modules list exists for the current home.
|
||||
if "modules" in home:
|
||||
# Iterate through each module to find a matching ID.
|
||||
for module in home["modules"]:
|
||||
if module["id"] == netatmo_device.device.entity_id:
|
||||
# We found the matching device. Get its category.
|
||||
if module.get("category") is not None:
|
||||
return cast(str, module["category"])
|
||||
raise ValueError(
|
||||
f"Device {netatmo_device.device.entity_id} found, "
|
||||
"but 'category' is missing in raw data."
|
||||
)
|
||||
|
||||
raise ValueError(
|
||||
f"Device {netatmo_device.device.entity_id} not found in Netatmo raw data."
|
||||
return (
|
||||
getattr(netatmo_device.device, "doortag_category", None)
|
||||
or DOORTAG_CATEGORY_OTHER
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -36,6 +36,22 @@ if TYPE_CHECKING:
|
||||
from . import NordPoolConfigEntry
|
||||
from .const import ATTR_RESOLUTION, DOMAIN
|
||||
|
||||
|
||||
def _validate_areas(areas: list[str]) -> list[str]:
|
||||
"""Validate the areas."""
|
||||
validated_areas: list[str] = []
|
||||
|
||||
for area in areas:
|
||||
validated_area = cv.string(area)
|
||||
validated_area = validated_area.upper()
|
||||
if validated_area not in AREAS:
|
||||
raise vol.Invalid(f"Area {area} is not valid")
|
||||
|
||||
validated_areas.append(validated_area)
|
||||
|
||||
return validated_areas
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
ATTR_CONFIG_ENTRY = "config_entry"
|
||||
ATTR_AREAS = "areas"
|
||||
@@ -47,9 +63,11 @@ SERVICE_GET_PRICES_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_DATE): cv.date,
|
||||
vol.Optional(ATTR_AREAS): vol.All(vol.In(list(AREAS)), cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_AREAS, default=[]): vol.All(cv.ensure_list, _validate_areas),
|
||||
vol.Optional(ATTR_CURRENCY): vol.All(
|
||||
cv.string, vol.In([currency.value for currency in Currency])
|
||||
cv.string,
|
||||
vol.Upper,
|
||||
vol.In([currency.value for currency in Currency]),
|
||||
),
|
||||
}
|
||||
)
|
||||
@@ -76,20 +94,14 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
client = entry.runtime_data.client
|
||||
asked_date: date = call.data[ATTR_DATE]
|
||||
|
||||
areas: list[str] = entry.data[ATTR_AREAS]
|
||||
if _areas := call.data.get(ATTR_AREAS):
|
||||
areas = _areas
|
||||
areas = call.data.get(ATTR_AREAS)
|
||||
areas = areas or entry.data[ATTR_AREAS]
|
||||
|
||||
currency: str = entry.data[ATTR_CURRENCY]
|
||||
if _currency := call.data.get(ATTR_CURRENCY):
|
||||
currency = _currency
|
||||
currency = call.data.get(ATTR_CURRENCY)
|
||||
currency = currency or entry.data[ATTR_CURRENCY]
|
||||
|
||||
resolution: int = 60
|
||||
if _resolution := call.data.get(ATTR_RESOLUTION):
|
||||
resolution = _resolution
|
||||
|
||||
areas = [area.upper() for area in areas]
|
||||
currency = currency.upper()
|
||||
resolution = call.data.get(ATTR_RESOLUTION)
|
||||
resolution = resolution or 60
|
||||
|
||||
return (client, asked_date, currency, areas, resolution)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user