mirror of
https://github.com/home-assistant/core.git
synced 2026-03-04 14:57:07 +01:00
Compare commits
1 Commits
fix_test_s
...
gen-dashbo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6170b47332 |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -254,7 +254,7 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
|
||||
uses: j178/prek-action@564dda4cfa5e96aafdc4a5696c4bf7b46baae5ac # v1.1.0
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -15,7 +15,7 @@
|
||||
.yamllint @home-assistant/core
|
||||
pyproject.toml @home-assistant/core
|
||||
requirements_test.txt @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core @edenhaus
|
||||
/.devcontainer/ @home-assistant/core
|
||||
/.github/ @home-assistant/core
|
||||
/.vscode/ @home-assistant/core
|
||||
/homeassistant/*.py @home-assistant/core
|
||||
@@ -672,8 +672,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/hdmi_cec/ @inytar
|
||||
/tests/components/hdmi_cec/ @inytar
|
||||
/homeassistant/components/heatmiser/ @andylockran
|
||||
/homeassistant/components/hegel/ @boazca
|
||||
/tests/components/hegel/ @boazca
|
||||
/homeassistant/components/heos/ @andrewsayre
|
||||
/tests/components/heos/ @andrewsayre
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
@@ -717,8 +715,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/tests/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th
|
||||
/tests/components/homematicip_cloud/ @hahn-th
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==12.0.0"]
|
||||
"requirements": ["aioamazondevices==11.1.3"]
|
||||
}
|
||||
|
||||
@@ -73,21 +73,31 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
started = False
|
||||
|
||||
async def _async_handle_labs_update(
|
||||
event_data: labs.EventLabsUpdatedData,
|
||||
event: Event[labs.EventLabsUpdatedData],
|
||||
) -> None:
|
||||
"""Handle labs feature toggle."""
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event_data["enabled"]})
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event.data["enabled"]})
|
||||
if started:
|
||||
await analytics.async_schedule()
|
||||
|
||||
@callback
|
||||
def _async_labs_event_filter(event_data: labs.EventLabsUpdatedData) -> bool:
|
||||
"""Filter labs events for this integration's snapshot feature."""
|
||||
return (
|
||||
event_data["domain"] == DOMAIN
|
||||
and event_data["preview_feature"] == LABS_SNAPSHOT_FEATURE
|
||||
)
|
||||
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
nonlocal started
|
||||
started = True
|
||||
await analytics.async_schedule()
|
||||
|
||||
labs.async_subscribe_preview_feature(
|
||||
hass, DOMAIN, LABS_SNAPSHOT_FEATURE, _async_handle_labs_update
|
||||
hass.bus.async_listen(
|
||||
labs.EVENT_LABS_UPDATED,
|
||||
_async_handle_labs_update,
|
||||
event_filter=_async_labs_event_filter,
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.17"]
|
||||
"requirements": ["py-aosmith==1.0.16"]
|
||||
}
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.6.0"]
|
||||
"requirements": ["aranet4==2.5.1"]
|
||||
}
|
||||
|
||||
@@ -30,9 +30,6 @@
|
||||
"title": "Set up one-time password delivered by notify component"
|
||||
},
|
||||
"setup": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "A one-time password has been sent via **notify.{notify_service}**. Please enter it below:",
|
||||
"title": "Verify setup"
|
||||
}
|
||||
@@ -45,9 +42,6 @@
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator]({google_authenticator_url}) or [Authy]({authy_url}).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**.",
|
||||
"title": "Set up two-factor authentication using TOTP"
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_subscribe_preview_feature
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -386,13 +386,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema({vol.Optional(CONF_ID): str}),
|
||||
)
|
||||
|
||||
async def new_triggers_conditions_listener(
|
||||
_event_data: labs.EventLabsUpdatedData,
|
||||
) -> None:
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
await reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
hass.async_create_task(
|
||||
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
|
||||
async_subscribe_preview_feature(
|
||||
async_labs_listen(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiostreammagic==2.12.1"],
|
||||
"requirements": ["aiostreammagic==2.11.0"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -19,11 +19,11 @@
|
||||
"secret_access_key": "Secret access key"
|
||||
},
|
||||
"data_description": {
|
||||
"access_key_id": "Access key ID to connect to Cloudflare R2",
|
||||
"access_key_id": "Access key ID to connect to Cloudflare R2 (this is your Account ID)",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"endpoint_url": "Cloudflare R2 S3-compatible endpoint.",
|
||||
"prefix": "Optional folder path inside the bucket. Example: backups/homeassistant",
|
||||
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Cloudflare documentation]({auth_docs_url})"
|
||||
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Docs]({auth_docs_url})"
|
||||
},
|
||||
"title": "Add Cloudflare R2 bucket"
|
||||
}
|
||||
|
||||
@@ -70,10 +70,6 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
|
||||
_attr_name = None
|
||||
|
||||
# TODO(2026.7.0): When support for unknown fan speeds is removed, delete this variable.
|
||||
# Holds unknown fan speeds we have already warned about.
|
||||
warned_unknown_fan_speeds: set[str] = set()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CoolmasterDataUpdateCoordinator,
|
||||
@@ -129,20 +125,8 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
def fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
|
||||
# Normalize to lowercase for lookup, and pass unknown lowercase values through.
|
||||
fan_speed_lower = self._unit.fan_speed.lower()
|
||||
if fan_speed_lower not in CM_TO_HA_FAN:
|
||||
# TODO(2026.7.0): Stop supporting unknown fan speeds.
|
||||
if fan_speed_lower not in CoolmasterClimate.warned_unknown_fan_speeds:
|
||||
CoolmasterClimate.warned_unknown_fan_speeds.add(fan_speed_lower)
|
||||
_LOGGER.warning(
|
||||
"Detected unknown fan speed value from HVAC unit: %s. "
|
||||
"Support for unknown fan speeds will be removed in 2026.7.0",
|
||||
fan_speed_lower,
|
||||
)
|
||||
return fan_speed_lower
|
||||
|
||||
return CM_TO_HA_FAN[fan_speed_lower]
|
||||
# Normalize to lowercase for lookup, and pass unknown values through.
|
||||
return CM_TO_HA_FAN.get(self._unit.fan_speed.lower(), self._unit.fan_speed)
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==44.0.0",
|
||||
"aioesphomeapi==43.14.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.6.0"
|
||||
],
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.components.sensor import (
|
||||
RestoreSensor,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfMass
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -48,7 +47,6 @@ class EufyLifeSensorEntity(SensorEntity):
|
||||
"""Representation of an EufyLife sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, data: EufyLifeData) -> None:
|
||||
"""Initialize the weight sensor entity."""
|
||||
|
||||
@@ -65,10 +65,10 @@ class FritzGuestWifiQRImage(FritzBoxBaseEntity, ImageEntity):
|
||||
super().__init__(avm_wrapper, device_friendly_name)
|
||||
ImageEntity.__init__(self, hass)
|
||||
|
||||
def _fetch_image(self) -> bytes:
|
||||
async def _fetch_image(self) -> bytes:
|
||||
"""Fetch the QR code from the Fritz!Box."""
|
||||
qr_stream: BytesIO = self._avm_wrapper.fritz_guest_wifi.get_wifi_qr_code(
|
||||
"png", border=2
|
||||
qr_stream: BytesIO = await self.hass.async_add_executor_job(
|
||||
self._avm_wrapper.fritz_guest_wifi.get_wifi_qr_code, "png"
|
||||
)
|
||||
qr_bytes = qr_stream.getvalue()
|
||||
_LOGGER.debug("fetched %s bytes", len(qr_bytes))
|
||||
@@ -77,15 +77,13 @@ class FritzGuestWifiQRImage(FritzBoxBaseEntity, ImageEntity):
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Fetch and set initial data and state."""
|
||||
self._current_qr_bytes = await self.hass.async_add_executor_job(
|
||||
self._fetch_image
|
||||
)
|
||||
self._current_qr_bytes = await self._fetch_image()
|
||||
self._attr_image_last_updated = dt_util.utcnow()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the image entity data."""
|
||||
try:
|
||||
qr_bytes = await self.hass.async_add_executor_job(self._fetch_image)
|
||||
qr_bytes = await self._fetch_image()
|
||||
except RequestException:
|
||||
self._current_qr_bytes = None
|
||||
self._attr_image_last_updated = None
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"pitch": "Default pitch of the voice",
|
||||
"profiles": "Default audio profiles",
|
||||
"speed": "Default rate/speed of the voice",
|
||||
"stt_model": "Speech-to-text model",
|
||||
"stt_model": "Speech-to-Text model",
|
||||
"text_type": "Default text type",
|
||||
"voice": "Default voice name (overrides language and gender)"
|
||||
}
|
||||
|
||||
@@ -43,11 +43,7 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
translation_key="highest_price_today",
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_highest_price_today(data)) is not None
|
||||
else None
|
||||
),
|
||||
value_fn=lambda api, data: api.get_highest_price_today(data),
|
||||
),
|
||||
GreenPlanetEnergySensorEntityDescription(
|
||||
key="gpe_highest_price_time",
|
||||
@@ -65,11 +61,7 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
translation_placeholders={"time_range": "(06:00-18:00)"},
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_lowest_price_day(data)) is not None
|
||||
else None
|
||||
),
|
||||
value_fn=lambda api, data: api.get_lowest_price_day(data),
|
||||
),
|
||||
GreenPlanetEnergySensorEntityDescription(
|
||||
key="gpe_lowest_price_day_time",
|
||||
@@ -88,11 +80,7 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
translation_placeholders={"time_range": "(18:00-06:00)"},
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_lowest_price_night(data)) is not None
|
||||
else None
|
||||
),
|
||||
value_fn=lambda api, data: api.get_lowest_price_night(data),
|
||||
),
|
||||
GreenPlanetEnergySensorEntityDescription(
|
||||
key="gpe_lowest_price_night_time",
|
||||
@@ -110,11 +98,7 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
translation_key="current_price",
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_current_price(data, dt_util.now().hour)) is not None
|
||||
else None
|
||||
),
|
||||
value_fn=lambda api, data: api.get_current_price(data, dt_util.now().hour),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
"""The Hegel integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from hegel_ip_client import HegelClient
|
||||
from hegel_ip_client.exceptions import HegelConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DEFAULT_PORT
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.MEDIA_PLAYER]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type HegelConfigEntry = ConfigEntry[HegelClient]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HegelConfigEntry) -> bool:
|
||||
"""Set up the Hegel integration."""
|
||||
host = entry.data[CONF_HOST]
|
||||
|
||||
# Create and test client connection
|
||||
client = HegelClient(host, DEFAULT_PORT)
|
||||
|
||||
try:
|
||||
# Test connection before proceeding with setup
|
||||
await client.start()
|
||||
await client.ensure_connected(timeout=10.0)
|
||||
_LOGGER.debug("Successfully connected to Hegel at %s:%s", host, DEFAULT_PORT)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
_LOGGER.error(
|
||||
"Failed to connect to Hegel at %s:%s: %s", host, DEFAULT_PORT, err
|
||||
)
|
||||
await client.stop() # Clean up
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unable to connect to Hegel amplifier at {host}:{DEFAULT_PORT}"
|
||||
) from err
|
||||
|
||||
# Store client in runtime_data
|
||||
entry.runtime_data = client
|
||||
|
||||
async def _async_close_client(event):
|
||||
await client.stop()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_close_client)
|
||||
)
|
||||
|
||||
# Forward setup to supported platforms
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: HegelConfigEntry) -> bool:
|
||||
"""Unload a Hegel config entry and stop active client connection."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
if unload_ok:
|
||||
client = entry.runtime_data
|
||||
_LOGGER.debug("Stopping Hegel client for %s", entry.title)
|
||||
try:
|
||||
await client.stop()
|
||||
except (HegelConnectionError, OSError) as err:
|
||||
_LOGGER.warning("Error while stopping Hegel client: %s", err)
|
||||
|
||||
return unload_ok
|
||||
@@ -1,154 +0,0 @@
|
||||
"""Config flow for Hegel integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hegel_ip_client import HegelClient
|
||||
from hegel_ip_client.exceptions import HegelConnectionError
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers.service_info.ssdp import SsdpServiceInfo
|
||||
|
||||
from .const import CONF_MODEL, DEFAULT_PORT, DOMAIN, MODEL_INPUTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HegelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Hegel amplifiers."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._host: str | None = None
|
||||
self._name: str | None = None
|
||||
self._model: str | None = None
|
||||
|
||||
async def _async_try_connect(self, host: str) -> bool:
|
||||
"""Try to connect to the Hegel amplifier using the library."""
|
||||
client = HegelClient(host, DEFAULT_PORT)
|
||||
try:
|
||||
await client.start()
|
||||
await client.ensure_connected(timeout=5.0)
|
||||
except HegelConnectionError, TimeoutError, OSError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
finally:
|
||||
await client.stop()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle manual setup by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
host = user_input[CONF_HOST]
|
||||
|
||||
# Prevent duplicate entries by host
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
|
||||
if not await self._async_try_connect(host):
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=f"Hegel {user_input[CONF_MODEL]}",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_MODEL): vol.In(list(MODEL_INPUTS.keys())),
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle SSDP discovery."""
|
||||
upnp = discovery_info.upnp or {}
|
||||
|
||||
# Get host from presentationURL or ssdp_location
|
||||
url = upnp.get("presentationURL") or discovery_info.ssdp_location
|
||||
if not url:
|
||||
return self.async_abort(reason="no_host_found")
|
||||
|
||||
host = URL(url).host
|
||||
if not host:
|
||||
return self.async_abort(reason="no_host_found")
|
||||
|
||||
# Use UDN as unique id (device UUID)
|
||||
unique_id = discovery_info.ssdp_udn
|
||||
if not unique_id:
|
||||
return self.async_abort(reason="no_host_found")
|
||||
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
|
||||
# Test connection before showing confirmation
|
||||
if not await self._async_try_connect(host):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
# Get device info
|
||||
friendly_name = upnp.get("friendlyName", f"Hegel {host}")
|
||||
suggested_model = upnp.get("modelName") or ""
|
||||
model_default = next(
|
||||
(m for m in MODEL_INPUTS if suggested_model.upper().startswith(m.upper())),
|
||||
None,
|
||||
)
|
||||
|
||||
self._host = host
|
||||
self._name = friendly_name
|
||||
self._model = model_default
|
||||
|
||||
self.context.update(
|
||||
{
|
||||
"title_placeholders": {"name": friendly_name},
|
||||
}
|
||||
)
|
||||
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle discovery confirmation - user can change model if needed."""
|
||||
assert self._host is not None
|
||||
assert self._name is not None
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self._name,
|
||||
data={
|
||||
CONF_HOST: self._host,
|
||||
CONF_MODEL: user_input[CONF_MODEL],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_MODEL,
|
||||
default=self._model or list(MODEL_INPUTS.keys())[0],
|
||||
): vol.In(list(MODEL_INPUTS.keys())),
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"host": self._host,
|
||||
"name": self._name,
|
||||
},
|
||||
)
|
||||
@@ -1,92 +0,0 @@
|
||||
"""Constants for the Hegel integration."""
|
||||
|
||||
DOMAIN = "hegel"
|
||||
DEFAULT_PORT = 50001
|
||||
|
||||
CONF_MODEL = "model"
|
||||
CONF_MAX_VOLUME = "max_volume" # 1.0 means amp's internal max
|
||||
|
||||
HEARTBEAT_TIMEOUT_MINUTES = 3
|
||||
|
||||
MODEL_INPUTS = {
|
||||
"Röst": [
|
||||
"Balanced",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H95": [
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H120": [
|
||||
"Balanced",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H190": [
|
||||
"Balanced",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H190V": [
|
||||
"XLR",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
"Phono",
|
||||
],
|
||||
"H390": [
|
||||
"XLR",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"BNC",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H590": [
|
||||
"XLR 1",
|
||||
"XLR 2",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"BNC",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"domain": "hegel",
|
||||
"name": "Hegel Amplifier",
|
||||
"codeowners": ["@boazca"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hegel/",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["hegel_ip_client"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["hegel-ip-client==0.1.4"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
"manufacturer": "Hegel"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,343 +0,0 @@
|
||||
"""Hegel media player platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import contextlib
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hegel_ip_client import (
|
||||
COMMANDS,
|
||||
HegelClient,
|
||||
apply_state_changes,
|
||||
parse_reply_message,
|
||||
)
|
||||
from hegel_ip_client.exceptions import HegelConnectionError
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from . import HegelConfigEntry
|
||||
from .const import CONF_MODEL, DOMAIN, HEARTBEAT_TIMEOUT_MINUTES, MODEL_INPUTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HegelConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Hegel media player from a config entry."""
|
||||
model = entry.data[CONF_MODEL]
|
||||
unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
# map inputs (source_map)
|
||||
source_map: dict[int, str] = (
|
||||
dict(enumerate(MODEL_INPUTS[model], start=1)) if model in MODEL_INPUTS else {}
|
||||
)
|
||||
|
||||
# Use the client from the config entry's runtime_data (already connected)
|
||||
client = entry.runtime_data
|
||||
|
||||
# Create entity
|
||||
media = HegelMediaPlayer(
|
||||
entry,
|
||||
client,
|
||||
source_map,
|
||||
unique_id,
|
||||
)
|
||||
|
||||
async_add_entities([media])
|
||||
|
||||
|
||||
class HegelMediaPlayer(MediaPlayerEntity):
|
||||
"""Hegel amplifier entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_name = None
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
| MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
| MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: HegelConfigEntry,
|
||||
client: HegelClient,
|
||||
source_map: dict[int, str],
|
||||
unique_id: str,
|
||||
) -> None:
|
||||
"""Initialize the Hegel media player entity."""
|
||||
self._entry = config_entry
|
||||
self._client = client
|
||||
self._source_map = source_map
|
||||
|
||||
# Set unique_id from config entry
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
# Set device info
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=config_entry.title,
|
||||
manufacturer="Hegel",
|
||||
model=config_entry.data[CONF_MODEL],
|
||||
)
|
||||
|
||||
# State will be populated by async_update on first connection
|
||||
self._state: dict[str, Any] = {}
|
||||
|
||||
# Background tasks
|
||||
self._connected_watcher_task: asyncio.Task[None] | None = None
|
||||
self._push_task: asyncio.Task[None] | None = None
|
||||
self._push_handler: Callable[[str], None] | None = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
_LOGGER.debug("Hegel media player added to hass: %s", self.entity_id)
|
||||
|
||||
# Register push handler for real-time updates from the amplifier
|
||||
# The client expects a synchronous callable; schedule a coroutine safely
|
||||
def push_handler(msg: str) -> None:
|
||||
self._push_task = self.hass.async_create_task(self._async_handle_push(msg))
|
||||
|
||||
self._push_handler = push_handler
|
||||
self._client.add_push_callback(push_handler)
|
||||
|
||||
# Register cleanup for push handler using async_on_remove
|
||||
def cleanup_push_handler() -> None:
|
||||
if self._push_handler:
|
||||
self._client.remove_push_callback(self._push_handler)
|
||||
_LOGGER.debug("Push callback removed")
|
||||
self._push_handler = None
|
||||
|
||||
self.async_on_remove(cleanup_push_handler)
|
||||
|
||||
# Perform initial state fetch if already connected
|
||||
# The watcher handles reconnections, but we need to fetch state on first setup
|
||||
if self._client.is_connected():
|
||||
_LOGGER.debug("Client already connected, performing initial state fetch")
|
||||
await self.async_update()
|
||||
|
||||
# Start a watcher task
|
||||
# Use config_entry.async_create_background_task for automatic cleanup on unload
|
||||
self._connected_watcher_task = self._entry.async_create_background_task(
|
||||
self.hass,
|
||||
self._connected_watcher(),
|
||||
name=f"hegel_{self.entity_id}_connected_watcher",
|
||||
)
|
||||
# Note: No need for async_on_remove - entry.async_create_background_task
|
||||
# automatically cancels the task when the config entry is unloaded
|
||||
|
||||
# Schedule the heartbeat every 2 minutes while the reset timeout is 3 minutes
|
||||
self.async_on_remove(
|
||||
async_track_time_interval(
|
||||
self.hass,
|
||||
self._send_heartbeat,
|
||||
timedelta(minutes=HEARTBEAT_TIMEOUT_MINUTES - 1),
|
||||
)
|
||||
)
|
||||
# Send the first heartbeat immediately
|
||||
self.hass.async_create_task(self._send_heartbeat())
|
||||
|
||||
async def _send_heartbeat(self, now=None) -> None:
|
||||
if not self.available:
|
||||
return
|
||||
try:
|
||||
await self._client.send(
|
||||
f"-r.{HEARTBEAT_TIMEOUT_MINUTES}", expect_reply=False
|
||||
)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
_LOGGER.debug("Heartbeat failed: %s", err)
|
||||
|
||||
async def _async_handle_push(self, msg: str) -> None:
|
||||
"""Handle incoming push message from client (runs in event loop)."""
|
||||
try:
|
||||
update = parse_reply_message(msg)
|
||||
if update.has_changes():
|
||||
apply_state_changes(self._state, update, logger=_LOGGER, source="push")
|
||||
# notify HA
|
||||
self.async_write_ha_state()
|
||||
except ValueError, KeyError, AttributeError:
|
||||
_LOGGER.exception("Failed to handle push message")
|
||||
|
||||
async def _connected_watcher(self) -> None:
|
||||
"""Watch the client's connection events and update state accordingly."""
|
||||
conn_event = self._client.connected_event
|
||||
disconn_event = self._client.disconnected_event
|
||||
_LOGGER.debug("Connected watcher started")
|
||||
|
||||
try:
|
||||
while True:
|
||||
# Wait for connection
|
||||
_LOGGER.debug("Watcher: waiting for connection")
|
||||
await conn_event.wait()
|
||||
_LOGGER.debug("Watcher: connected, refreshing state")
|
||||
|
||||
# Immediately notify HA that we're available again
|
||||
self.async_write_ha_state()
|
||||
|
||||
# Schedule a state refresh through HA
|
||||
self.async_schedule_update_ha_state(force_refresh=True)
|
||||
|
||||
# Wait for disconnection using event (no polling!)
|
||||
_LOGGER.debug("Watcher: waiting for disconnection")
|
||||
await disconn_event.wait()
|
||||
_LOGGER.debug("Watcher: disconnected")
|
||||
|
||||
# Notify HA that we're unavailable
|
||||
self.async_write_ha_state()
|
||||
|
||||
except asyncio.CancelledError:
|
||||
_LOGGER.debug("Connected watcher cancelled")
|
||||
except (HegelConnectionError, OSError) as err:
|
||||
_LOGGER.warning("Connected watcher failed: %s", err)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Handle entity removal from Home Assistant.
|
||||
|
||||
Note: Push callback cleanup is handled by async_on_remove.
|
||||
_connected_watcher_task cleanup is handled automatically by
|
||||
entry.async_create_background_task when the config entry is unloaded.
|
||||
"""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
# Cancel push task if running (short-lived task, defensive cleanup)
|
||||
if self._push_task and not self._push_task.done():
|
||||
self._push_task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await self._push_task
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Query the amplifier for the main values and update state dict."""
|
||||
for cmd in (
|
||||
COMMANDS["power_query"],
|
||||
COMMANDS["volume_query"],
|
||||
COMMANDS["mute_query"],
|
||||
COMMANDS["input_query"],
|
||||
):
|
||||
try:
|
||||
update = await self._client.send(cmd, expect_reply=True, timeout=3.0)
|
||||
if update and update.has_changes():
|
||||
apply_state_changes(
|
||||
self._state, update, logger=_LOGGER, source="update"
|
||||
)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
_LOGGER.debug("Refresh command %s failed: %s", cmd, err)
|
||||
# update entity state
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if the client is connected."""
|
||||
return self._client.is_connected()
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState | None:
|
||||
"""Return the current state of the media player."""
|
||||
power = self._state.get("power")
|
||||
if power is None:
|
||||
return None
|
||||
return MediaPlayerState.ON if power else MediaPlayerState.OFF
|
||||
|
||||
@property
|
||||
def volume_level(self) -> float | None:
|
||||
"""Return the volume level."""
|
||||
volume = self._state.get("volume")
|
||||
if volume is None:
|
||||
return None
|
||||
return float(volume)
|
||||
|
||||
@property
|
||||
def is_volume_muted(self) -> bool | None:
|
||||
"""Return whether volume is muted."""
|
||||
return bool(self._state.get("mute", False))
|
||||
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Return the current input source."""
|
||||
idx = self._state.get("input")
|
||||
return self._source_map.get(idx, f"Input {idx}") if idx else None
|
||||
|
||||
@property
|
||||
def source_list(self) -> list[str] | None:
|
||||
"""Return the list of available input sources."""
|
||||
return [self._source_map[k] for k in sorted(self._source_map.keys())] or None
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn on the media player."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["power_on"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to turn on: {err}") from err
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn off the media player."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["power_off"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to turn off: {err}") from err
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Set volume level, range 0..1."""
|
||||
vol = max(0.0, min(volume, 1.0))
|
||||
amp_vol = int(round(vol * 100))
|
||||
try:
|
||||
await self._client.send(COMMANDS["volume_set"](amp_vol), expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to set volume: {err}") from err
|
||||
|
||||
async def async_mute_volume(self, mute: bool) -> None:
|
||||
"""Mute or unmute the volume."""
|
||||
try:
|
||||
await self._client.send(
|
||||
COMMANDS["mute_on" if mute else "mute_off"], expect_reply=False
|
||||
)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to set mute: {err}") from err
|
||||
|
||||
async def async_volume_up(self) -> None:
|
||||
"""Increase volume."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["volume_up"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to increase volume: {err}") from err
|
||||
|
||||
async def async_volume_down(self) -> None:
|
||||
"""Decrease volume."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["volume_down"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to decrease volume: {err}") from err
|
||||
|
||||
async def async_select_source(self, source: str) -> None:
|
||||
"""Select input source."""
|
||||
inv = {v: k for k, v in self._source_map.items()}
|
||||
idx = inv.get(source)
|
||||
if idx is None:
|
||||
raise ServiceValidationError(f"Unknown source: {source}")
|
||||
try:
|
||||
await self._client.send(COMMANDS["input_set"](idx), expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to select source {source}: {err}"
|
||||
) from err
|
||||
@@ -1,95 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: done
|
||||
comment: |
|
||||
Entities subscribe to push events from hegel-ip-client library.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device uses local IP control without authentication.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: |
|
||||
Single media_player entity, no categories needed.
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
Single main entity, should be enabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No repair issues needed for this integration.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
Uses raw TCP connection, not HTTP.
|
||||
strict-typing: todo
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_host_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"discovery_confirm": {
|
||||
"data": {
|
||||
"model": "Model"
|
||||
},
|
||||
"data_description": {
|
||||
"model": "Select your Hegel amplifier model for proper input mapping"
|
||||
},
|
||||
"description": "Discovered Hegel amplifier **{name}** at `{host}`. Confirm the model to complete setup.",
|
||||
"title": "Confirm Hegel amplifier"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"model": "Model"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Hostname or IP address of your Hegel amplifier",
|
||||
"model": "Select your Hegel amplifier model for proper input mapping"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"domain": "homematicip_cloud",
|
||||
"name": "HomematicIP Cloud",
|
||||
"codeowners": ["@hahn-th", "@lackas"],
|
||||
"codeowners": ["@hahn-th"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.6.0"]
|
||||
"requirements": ["homematicip==2.5.0"]
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
MAX_WS_RECONNECT_TIME = 600
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
SCAN_INTERVAL = timedelta(minutes=8)
|
||||
DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time
|
||||
PING_INTERVAL = 60
|
||||
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2.7.3"]
|
||||
"requirements": ["aioautomower==2.7.1"]
|
||||
}
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioimmich"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioimmich==0.12.0"]
|
||||
"requirements": ["aioimmich==0.11.1"]
|
||||
}
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["intellifire4py"],
|
||||
"requirements": ["intellifire4py==4.3.1"]
|
||||
"requirements": ["intellifire4py==4.2.1"]
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
TIMER_NOT_FOUND_RESPONSE = "timer_not_found"
|
||||
MULTIPLE_TIMERS_MATCHED_RESPONSE = "multiple_timers_matched"
|
||||
NO_TIMER_SUPPORT_RESPONSE = "no_timer_support"
|
||||
NO_TIMER_COMMAND_RESPONSE = "no_timer_command"
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -193,17 +192,6 @@ class MultipleTimersMatchedError(intent.IntentHandleError):
|
||||
super().__init__("Multiple timers matched", MULTIPLE_TIMERS_MATCHED_RESPONSE)
|
||||
|
||||
|
||||
class NoTimerCommandError(intent.IntentHandleError):
|
||||
"""Error when a conversation command does not match any intent."""
|
||||
|
||||
def __init__(self, command: str) -> None:
|
||||
"""Initialize error."""
|
||||
super().__init__(
|
||||
f"Intent not recognized: {command}",
|
||||
NO_TIMER_COMMAND_RESPONSE,
|
||||
)
|
||||
|
||||
|
||||
class TimersNotSupportedError(intent.IntentHandleError):
|
||||
"""Error when a timer intent is used from a device that isn't registered to handle timer events."""
|
||||
|
||||
@@ -848,12 +836,6 @@ class StartTimerIntentHandler(intent.IntentHandler):
|
||||
# Fail early if this is not a delayed command
|
||||
raise TimersNotSupportedError(intent_obj.device_id)
|
||||
|
||||
# Validate conversation command if provided
|
||||
if conversation_command and not await self._validate_conversation_command(
|
||||
intent_obj, conversation_command
|
||||
):
|
||||
raise NoTimerCommandError(conversation_command)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
@@ -883,48 +865,6 @@ class StartTimerIntentHandler(intent.IntentHandler):
|
||||
|
||||
return intent_obj.create_response()
|
||||
|
||||
async def _validate_conversation_command(
|
||||
self, intent_obj: intent.Intent, conversation_command: str
|
||||
) -> bool:
|
||||
"""Validate that a conversation command can be executed."""
|
||||
from homeassistant.components.conversation import ( # noqa: PLC0415
|
||||
ConversationInput,
|
||||
async_get_agent,
|
||||
default_agent,
|
||||
)
|
||||
|
||||
# Only validate if using the default agent
|
||||
conversation_agent = async_get_agent(
|
||||
intent_obj.hass, intent_obj.conversation_agent_id
|
||||
)
|
||||
|
||||
if conversation_agent is None or not isinstance(
|
||||
conversation_agent, default_agent.DefaultAgent
|
||||
):
|
||||
return True # Skip validation
|
||||
|
||||
test_input = ConversationInput(
|
||||
text=conversation_command,
|
||||
context=intent_obj.context,
|
||||
conversation_id=None,
|
||||
device_id=intent_obj.device_id,
|
||||
satellite_id=intent_obj.satellite_id,
|
||||
language=intent_obj.language,
|
||||
agent_id=conversation_agent.entity_id,
|
||||
)
|
||||
|
||||
# check for sentence trigger
|
||||
if (
|
||||
await conversation_agent.async_recognize_sentence_trigger(test_input)
|
||||
) is not None:
|
||||
return True
|
||||
|
||||
# check for intent
|
||||
if (await conversation_agent.async_recognize_intent(test_input)) is not None:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class CancelTimerIntentHandler(intent.IntentHandler):
|
||||
"""Intent handler for cancelling a timer."""
|
||||
|
||||
@@ -21,7 +21,6 @@ from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .helpers import (
|
||||
async_is_preview_feature_enabled,
|
||||
async_listen,
|
||||
async_subscribe_preview_feature,
|
||||
async_update_preview_feature,
|
||||
)
|
||||
from .models import (
|
||||
@@ -42,7 +41,6 @@ __all__ = [
|
||||
"EventLabsUpdatedData",
|
||||
"async_is_preview_feature_enabled",
|
||||
"async_listen",
|
||||
"async_subscribe_preview_feature",
|
||||
"async_update_preview_feature",
|
||||
]
|
||||
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any
|
||||
from collections.abc import Callable
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
@@ -33,43 +32,6 @@ def async_is_preview_feature_enabled(
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_subscribe_preview_feature(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[EventLabsUpdatedData], Coroutine[Any, Any, None]],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Coroutine function to invoke when the preview feature
|
||||
is toggled. Receives the event data as argument. Runs eagerly.
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_event_filter(event_data: EventLabsUpdatedData) -> bool:
|
||||
"""Filter labs events for this integration's preview feature."""
|
||||
return (
|
||||
event_data["domain"] == domain
|
||||
and event_data["preview_feature"] == preview_feature
|
||||
)
|
||||
|
||||
async def _handler(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
await listener(event.data)
|
||||
|
||||
return hass.bus.async_listen(
|
||||
EVENT_LABS_UPDATED, _handler, event_filter=_async_event_filter
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
@@ -89,10 +51,16 @@ def async_listen(
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
async def _listener(_event_data: EventLabsUpdatedData) -> None:
|
||||
listener()
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return async_subscribe_preview_feature(hass, domain, preview_feature, _listener)
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
|
||||
|
||||
async def async_update_preview_feature(
|
||||
|
||||
@@ -13,10 +13,9 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from .const import LABS_DATA
|
||||
from .helpers import (
|
||||
async_is_preview_feature_enabled,
|
||||
async_subscribe_preview_feature,
|
||||
async_listen,
|
||||
async_update_preview_feature,
|
||||
)
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@callback
|
||||
@@ -103,6 +102,7 @@ async def websocket_update_preview_feature(
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "labs/subscribe",
|
||||
@@ -110,8 +110,7 @@ async def websocket_update_preview_feature(
|
||||
vol.Required("preview_feature"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_subscribe_feature(
|
||||
def websocket_subscribe_feature(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
@@ -133,13 +132,10 @@ async def websocket_subscribe_feature(
|
||||
|
||||
preview_feature = labs_data.preview_features[preview_feature_id]
|
||||
|
||||
async def send_event(event_data: EventLabsUpdatedData | None = None) -> None:
|
||||
@callback
|
||||
def send_event() -> None:
|
||||
"""Send feature state to client."""
|
||||
enabled = (
|
||||
event_data["enabled"]
|
||||
if event_data is not None
|
||||
else async_is_preview_feature_enabled(hass, domain, preview_feature_key)
|
||||
)
|
||||
enabled = async_is_preview_feature_enabled(hass, domain, preview_feature_key)
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"],
|
||||
@@ -147,9 +143,9 @@ async def websocket_subscribe_feature(
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_preview_feature(
|
||||
connection.subscriptions[msg["id"]] = async_listen(
|
||||
hass, domain, preview_feature_key, send_event
|
||||
)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
await send_event()
|
||||
send_event()
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
"""Diagnostics support for Liebherr."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import LiebherrConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_API_KEY}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: LiebherrConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"devices": {
|
||||
device_id: {
|
||||
"coordinator": {
|
||||
"last_update_success": coordinator.last_update_success,
|
||||
"update_interval": str(coordinator.update_interval),
|
||||
"last_exception": str(coordinator.last_exception)
|
||||
if coordinator.last_exception
|
||||
else None,
|
||||
},
|
||||
"data": asdict(coordinator.data),
|
||||
}
|
||||
for device_id, coordinator in entry.runtime_data.items()
|
||||
},
|
||||
}
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Cloud API does not require updating entry data from network discovery.
|
||||
|
||||
@@ -805,6 +805,39 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Return the color mode of the light."""
|
||||
return self._attr_color_mode
|
||||
|
||||
@property
|
||||
def _light_internal_color_mode(self) -> str:
|
||||
"""Return the color mode of the light with backwards compatibility."""
|
||||
if (color_mode := self.color_mode) is None:
|
||||
# Backwards compatibility for color_mode added in 2021.4
|
||||
# Warning added in 2024.3, break in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) does not report a color mode, this will stop working "
|
||||
"in Home Assistant Core 2025.3, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
report_issue,
|
||||
)
|
||||
|
||||
supported = self._light_internal_supported_color_modes
|
||||
|
||||
if ColorMode.HS in supported and self.hs_color is not None:
|
||||
return ColorMode.HS
|
||||
if ColorMode.COLOR_TEMP in supported and self.color_temp_kelvin is not None:
|
||||
return ColorMode.COLOR_TEMP
|
||||
if ColorMode.BRIGHTNESS in supported and self.brightness is not None:
|
||||
return ColorMode.BRIGHTNESS
|
||||
if ColorMode.ONOFF in supported:
|
||||
return ColorMode.ONOFF
|
||||
return ColorMode.UNKNOWN
|
||||
|
||||
return color_mode
|
||||
|
||||
@cached_property
|
||||
def hs_color(self) -> tuple[float, float] | None:
|
||||
"""Return the hue and saturation color value [float, float]."""
|
||||
@@ -952,8 +985,8 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
def __validate_color_mode(
|
||||
self,
|
||||
color_mode: ColorMode | None,
|
||||
supported_color_modes: set[ColorMode],
|
||||
color_mode: ColorMode | str | None,
|
||||
supported_color_modes: set[ColorMode] | set[str],
|
||||
effect: str | None,
|
||||
) -> None:
|
||||
"""Validate the color mode."""
|
||||
@@ -966,10 +999,23 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
# color modes
|
||||
if color_mode in supported_color_modes:
|
||||
return
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) set to unsupported color mode "
|
||||
f"{color_mode}, expected one of {supported_color_modes}"
|
||||
)
|
||||
# Warning added in 2024.3, reject in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) set to unsupported color mode %s, expected one of %s, "
|
||||
"this will stop working in Home Assistant Core 2025.3, "
|
||||
"please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
color_mode,
|
||||
supported_color_modes,
|
||||
report_issue,
|
||||
)
|
||||
return
|
||||
|
||||
# When an effect is active, the color mode should indicate what adjustments are
|
||||
# supported by the effect. To make this possible, we allow the light to set its
|
||||
@@ -982,24 +1028,49 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if color_mode in effect_color_modes:
|
||||
return
|
||||
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) set to unsupported color mode "
|
||||
f"{color_mode} when rendering an effect, expected one "
|
||||
f"of {effect_color_modes}"
|
||||
)
|
||||
# Warning added in 2024.3, reject in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) set to unsupported color mode %s when rendering an effect,"
|
||||
" expected one of %s, this will stop working in Home Assistant "
|
||||
"Core 2025.3, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
color_mode,
|
||||
effect_color_modes,
|
||||
report_issue,
|
||||
)
|
||||
return
|
||||
|
||||
def __validate_supported_color_modes(
|
||||
self,
|
||||
supported_color_modes: set[ColorMode],
|
||||
) -> None:
|
||||
"""Validate the supported color modes."""
|
||||
if self.__color_mode_reported:
|
||||
return
|
||||
|
||||
try:
|
||||
valid_supported_color_modes(supported_color_modes)
|
||||
except vol.Error as err:
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) sets invalid supported color modes "
|
||||
f"{supported_color_modes}"
|
||||
) from err
|
||||
except vol.Error:
|
||||
# Warning added in 2024.3, reject in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) sets invalid supported color modes %s, this will stop "
|
||||
"working in Home Assistant Core 2025.3, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
supported_color_modes,
|
||||
report_issue,
|
||||
)
|
||||
|
||||
@final
|
||||
@property
|
||||
@@ -1013,17 +1084,13 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
)
|
||||
|
||||
_is_on = self.is_on
|
||||
color_mode = self.color_mode if _is_on else None
|
||||
if _is_on and color_mode is None:
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) does not report a color mode"
|
||||
)
|
||||
color_mode = self._light_internal_color_mode if _is_on else None
|
||||
|
||||
effect: str | None = None
|
||||
effect: str | None
|
||||
if LightEntityFeature.EFFECT in supported_features:
|
||||
if _is_on:
|
||||
effect = self.effect
|
||||
data[ATTR_EFFECT] = effect
|
||||
data[ATTR_EFFECT] = effect = self.effect if _is_on else None
|
||||
else:
|
||||
effect = None
|
||||
|
||||
self.__validate_color_mode(color_mode, legacy_supported_color_modes, effect)
|
||||
|
||||
|
||||
165
homeassistant/components/lovelace/GUIDE.md
Normal file
165
homeassistant/components/lovelace/GUIDE.md
Normal file
@@ -0,0 +1,165 @@
|
||||
# Dashboard Creation Guide
|
||||
|
||||
This guide provides best practices for building effective Home Assistant dashboards.
|
||||
|
||||
## Basic Structure of a Dashboard
|
||||
|
||||
A dashboard is a collection of views, and each view contains sections with cards. The basic structure looks like this:
|
||||
|
||||
```yaml
|
||||
views:
|
||||
- title: Living Room
|
||||
path: living-room
|
||||
icon: mdi:sofa
|
||||
badges:
|
||||
- type: entity
|
||||
entity: sensor.living_room_temperature
|
||||
- type: entity
|
||||
entity: sensor.living_room_humidity
|
||||
sections:
|
||||
- type: grid
|
||||
title: Lights
|
||||
cards:
|
||||
- type: tile
|
||||
entity: light.living_room_ceiling
|
||||
features:
|
||||
- type: light-brightness
|
||||
- type: tile
|
||||
entity: light.floor_lamp
|
||||
- type: tile
|
||||
entity: light.reading_lamp
|
||||
- type: grid
|
||||
title: Climate
|
||||
cards:
|
||||
- type: thermostat
|
||||
entity: climate.living_room
|
||||
- type: tile
|
||||
entity: sensor.living_room_temperature
|
||||
- type: tile
|
||||
entity: sensor.living_room_humidity
|
||||
```
|
||||
|
||||
## Registry Listing Strategy
|
||||
|
||||
Use the list tools first to discover available data before building cards:
|
||||
|
||||
- `area_list`: list areas and filter with `area-id` and `floor`
|
||||
- `device_list`: list devices and filter with `device-id`, `area`, and `floor`
|
||||
- `entity_list`: list entities and filter with `entity-id`, `domain`, `area`, `floor`, `label`, `device`, and `device-class`
|
||||
|
||||
When needed, use `count`, `brief`, and `limit` flags to narrow output and then run a second call with the exact IDs you want to include in the dashboard.
|
||||
|
||||
## Task-Focused Dashboards
|
||||
|
||||
When creating a dashboard focused on a specific task that involves a few devices (e.g., "Home Office", "Coffee Station", "Media Center"), include a **Maintenance section** alongside the primary controls. This section should contain:
|
||||
|
||||
- Battery levels for wireless devices
|
||||
- Signal strength indicators
|
||||
- Firmware update status
|
||||
- Device connectivity states
|
||||
- Any diagnostic entities relevant to the devices
|
||||
|
||||
This approach keeps users informed about the health of the devices supporting their task without cluttering the main interface. When something stops working, the maintenance section provides immediate visibility into potential issues.
|
||||
|
||||
## Respect Entity Categories
|
||||
|
||||
Entities have categories that indicate their intended purpose:
|
||||
|
||||
- **No category (primary)**: Main controls and states meant for regular user interaction
|
||||
- **Diagnostic**: Entities for maintenance and troubleshooting (e.g., signal strength, battery level, firmware version)
|
||||
- **Config**: Configuration entities for device settings (e.g., sensitivity levels, LED brightness)
|
||||
|
||||
When building dashboards:
|
||||
- Group primary entities together for the main user interface
|
||||
- Place diagnostic entities in a separate "Maintenance" or "Diagnostics" section
|
||||
- Config entities typically belong in a dedicated settings area, not the main dashboard
|
||||
|
||||
This separation keeps dashboards clean and prevents users from accidentally changing configuration settings.
|
||||
|
||||
## Tile Card Features for Enhanced Control
|
||||
|
||||
Tile cards support features that provide additional control directly on the card. Consider using tile card features for:
|
||||
|
||||
- **Primary controls**: Light brightness slider, cover position, fan speed
|
||||
- **Frequently used actions**: Toggle switches, quick actions
|
||||
|
||||
Avoid adding features to:
|
||||
- Diagnostic entities
|
||||
- Configuration entities
|
||||
- Entities where simple state display is sufficient
|
||||
|
||||
Tile card features make important controls more accessible and visually prominent.
|
||||
|
||||
```yaml
|
||||
type: tile
|
||||
entity: light.ceiling_lights
|
||||
features:
|
||||
- type: light-brightness
|
||||
```
|
||||
|
||||
Available features: `cover-open-close`, `cover-position`, `cover-tilt`, `cover-tilt-position`, `light-brightness`, `light-color-temp`, `lock-commands`, `lock-open-door`, `media-player-playback`, `media-player-volume-slider`, `media-player-volume-buttons`, `fan-direction`, `fan-oscillate`, `fan-preset-modes`, `fan-speed`, `alarm-modes`, `climate-fan-modes`, `climate-swing-modes`, `climate-swing-horizontal-modes`, `climate-hvac-modes`, `climate-preset-modes`, `counter-actions`, `date-set`, `select-options`, `numeric-input`, `target-humidity`, `target-temperature`, `toggle`, `water-heater-operation-modes`, `humidifier-modes`, `humidifier-toggle`, `vacuum-commands`, `valve-open-close`, `valve-position`, `lawn-mower-commands`, `update-actions`, `trend-graph`, `area-controls`, `bar-gauge`,
|
||||
|
||||
## Specialized Cards for Specific Domains
|
||||
|
||||
### Climate Entities
|
||||
Use the **thermostat card** for climate entities. It provides:
|
||||
- Current and target temperature display
|
||||
- HVAC mode selection
|
||||
- Temperature adjustment controls
|
||||
- A visual representation that users intuitively understand
|
||||
|
||||
```yaml
|
||||
type: thermostat
|
||||
entity: climate.heatpump
|
||||
```
|
||||
|
||||
### Camera and Image Entities
|
||||
Use **picture-entity cards** for camera and image entities:
|
||||
- Hide the state (the image itself is the state)
|
||||
- Hide the name unless the image context is ambiguous (most cameras and images are self-explanatory when viewed)
|
||||
- Let the visual content speak for itself
|
||||
|
||||
```yaml
|
||||
type: picture-entity
|
||||
entity: camera.demo_camera
|
||||
show_state: false
|
||||
show_name: false
|
||||
camera_view: auto
|
||||
fit_mode: cover
|
||||
```
|
||||
|
||||
### Graph Cards
|
||||
|
||||
Sometimes you want to show historical data for an entity. The choice of graph card depends on the type of entity:
|
||||
|
||||
#### Statistics Graph (for sensor entities)
|
||||
Use **statistics-graph** cards when displaying sensor data over time:
|
||||
- Automatically calculates and displays statistics (mean, min, max)
|
||||
- Optimized for numerical sensor data
|
||||
- Better performance for long time ranges
|
||||
|
||||
#### History Graph (for other entity types)
|
||||
Use **history-graph** cards for:
|
||||
- Climate entity history (showing temperature changes alongside HVAC states)
|
||||
- Binary sensor timelines
|
||||
- State-based entities where you want to see state changes over time
|
||||
- Any non-sensor entity where historical data is valuable
|
||||
|
||||
The history graph shows actual state changes as they occurred, which is more appropriate for non-numerical entities.
|
||||
|
||||
## Using Badges for Global Information
|
||||
|
||||
Badges are ideal for displaying global data points that apply to an entire dashboard view. Good candidates include:
|
||||
|
||||
- Area temperature and humidity
|
||||
- Security system status
|
||||
- Weather conditions
|
||||
- Presence/occupancy indicators
|
||||
- General alerts or warnings
|
||||
|
||||
If the information is more specific to a subset of the dashboard, consider adding it to a section header instead of a badge. Badges work best for truly dashboard-wide context.
|
||||
|
||||
```yaml
|
||||
type: entity
|
||||
entity: sensor.temperature
|
||||
```
|
||||
@@ -196,6 +196,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(
|
||||
hass, websocket.websocket_lovelace_delete_config
|
||||
)
|
||||
websocket_api.async_register_command(
|
||||
hass, websocket.websocket_lovelace_generate_dashboard
|
||||
)
|
||||
|
||||
yaml_dashboards = config[DOMAIN].get(CONF_DASHBOARDS, {})
|
||||
|
||||
|
||||
379
homeassistant/components/lovelace/llm.py
Normal file
379
homeassistant/components/lovelace/llm.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""LLM tools for generating Lovelace dashboards."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
llm,
|
||||
)
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
API_ID = "lovelace_dashboard_generation"
|
||||
API_NAME = "Lovelace Dashboard Generation"
|
||||
API_PROMPT = """Use the list tools to discover available areas, devices and entities.
|
||||
Always reference real entity_ids from tool results when building dashboard cards.
|
||||
Return dashboard data that includes a top-level `views` array."""
|
||||
|
||||
GENERATE_GUIDELINES = Path(__file__).parent / "GUIDE.md"
|
||||
|
||||
_AREA_LIST_PARAMETERS = vol.Schema(
|
||||
{
|
||||
vol.Optional("area_id"): str,
|
||||
vol.Optional("area-id"): str,
|
||||
vol.Optional("floor"): str,
|
||||
vol.Optional("count", default=False): bool,
|
||||
vol.Optional("brief", default=False): bool,
|
||||
vol.Optional("limit", default=0): vol.All(vol.Coerce(int), vol.Range(min=0)),
|
||||
}
|
||||
)
|
||||
|
||||
_DEVICE_LIST_PARAMETERS = vol.Schema(
|
||||
{
|
||||
vol.Optional("device_id"): str,
|
||||
vol.Optional("device-id"): str,
|
||||
vol.Optional("area"): str,
|
||||
vol.Optional("floor"): str,
|
||||
vol.Optional("count", default=False): bool,
|
||||
vol.Optional("brief", default=False): bool,
|
||||
vol.Optional("limit", default=0): vol.All(vol.Coerce(int), vol.Range(min=0)),
|
||||
}
|
||||
)
|
||||
|
||||
_ENTITY_LIST_PARAMETERS = vol.Schema(
|
||||
{
|
||||
vol.Optional("entity_id"): str,
|
||||
vol.Optional("entity-id"): str,
|
||||
vol.Optional("domain"): str,
|
||||
vol.Optional("area"): str,
|
||||
vol.Optional("floor"): str,
|
||||
vol.Optional("label"): str,
|
||||
vol.Optional("device"): str,
|
||||
vol.Optional("device_class"): str,
|
||||
vol.Optional("device-class"): str,
|
||||
vol.Optional("count", default=False): bool,
|
||||
vol.Optional("brief", default=False): bool,
|
||||
vol.Optional("limit", default=0): vol.All(vol.Coerce(int), vol.Range(min=0)),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _tool_str(data: dict[str, Any], *keys: str) -> str | None:
|
||||
"""Extract a string value from alternate parameter names."""
|
||||
for key in keys:
|
||||
value = data.get(key)
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
def _entity_device_class(
|
||||
reg_entry: er.RegistryEntry | None, attributes: dict[str, Any]
|
||||
) -> str:
|
||||
"""Resolve device class with the same precedence as hab entity list."""
|
||||
if reg_entry and reg_entry.original_device_class:
|
||||
return reg_entry.original_device_class
|
||||
if reg_entry and reg_entry.device_class:
|
||||
return reg_entry.device_class
|
||||
device_class = attributes.get("device_class")
|
||||
if isinstance(device_class, str):
|
||||
return device_class
|
||||
return ""
|
||||
|
||||
|
||||
def _apply_limit(items: list[dict[str, Any]], limit: int) -> list[dict[str, Any]]:
|
||||
"""Apply list limit the same way as hab list commands."""
|
||||
if limit > 0 and len(items) > limit:
|
||||
return items[:limit]
|
||||
return items
|
||||
|
||||
|
||||
async def build_generation_instructions(hass: HomeAssistant, prompt: str) -> str:
|
||||
"""Build instructions used for Lovelace dashboard generation."""
|
||||
guide = await hass.async_add_executor_job(GENERATE_GUIDELINES.read_text)
|
||||
|
||||
return (
|
||||
"Generate a Home Assistant Lovelace dashboard configuration.\n"
|
||||
"Return only valid JSON (no markdown and no explanation).\n"
|
||||
"Return a complete dashboard object with a top-level `views` array.\n"
|
||||
"Each view should include useful cards for the user request.\n"
|
||||
"Use the list tools to discover real area, device and entity IDs.\n"
|
||||
"Use real entity IDs discovered from available tools.\n"
|
||||
"Prioritize readable, practical dashboards over decorative layouts.\n\n"
|
||||
f"User request:\n{prompt.strip()}\n\n"
|
||||
f"{guide}"
|
||||
)
|
||||
|
||||
|
||||
class AreaListTool(llm.Tool):
|
||||
"""Tool mirroring `hab area list`."""
|
||||
|
||||
name = "area_list"
|
||||
description = (
|
||||
"List areas with hab-compatible filters: area-id, floor, count, brief, limit."
|
||||
)
|
||||
parameters = _AREA_LIST_PARAMETERS
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the tool."""
|
||||
self._hass = hass
|
||||
|
||||
async def async_call(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
tool_input: llm.ToolInput,
|
||||
llm_context: llm.LLMContext,
|
||||
) -> JsonObjectType:
|
||||
"""List areas with hab-compatible output fields."""
|
||||
del hass, llm_context
|
||||
data = cast(dict[str, Any], self.parameters(tool_input.tool_args))
|
||||
area_id_filter = _tool_str(data, "area_id", "area-id")
|
||||
floor_filter = _tool_str(data, "floor")
|
||||
count = cast(bool, data["count"])
|
||||
brief = cast(bool, data["brief"])
|
||||
limit = cast(int, data["limit"])
|
||||
|
||||
area_registry = ar.async_get(self._hass)
|
||||
|
||||
result: list[dict[str, Any]] = []
|
||||
for area in area_registry.areas.values():
|
||||
if area_id_filter and area.id != area_id_filter:
|
||||
continue
|
||||
if floor_filter and area.floor_id != floor_filter:
|
||||
continue
|
||||
result.append(
|
||||
{
|
||||
"area_id": area.id,
|
||||
"name": area.name,
|
||||
"floor_id": area.floor_id,
|
||||
"icon": area.icon,
|
||||
"labels": sorted(area.labels),
|
||||
}
|
||||
)
|
||||
|
||||
if count:
|
||||
return {"count": len(result)}
|
||||
|
||||
result = _apply_limit(result, limit)
|
||||
if brief:
|
||||
return {
|
||||
"areas": [
|
||||
{"area_id": area["area_id"], "name": area["name"]}
|
||||
for area in result
|
||||
]
|
||||
}
|
||||
return {"areas": result}
|
||||
|
||||
|
||||
class DeviceListTool(llm.Tool):
|
||||
"""Tool mirroring `hab device list`."""
|
||||
|
||||
name = "device_list"
|
||||
description = (
|
||||
"List devices with hab-compatible filters: device-id, area, floor, count,"
|
||||
" brief, limit."
|
||||
)
|
||||
parameters = _DEVICE_LIST_PARAMETERS
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the tool."""
|
||||
self._hass = hass
|
||||
|
||||
async def async_call(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
tool_input: llm.ToolInput,
|
||||
llm_context: llm.LLMContext,
|
||||
) -> JsonObjectType:
|
||||
"""List devices with hab-compatible output fields."""
|
||||
del hass, llm_context
|
||||
data = cast(dict[str, Any], self.parameters(tool_input.tool_args))
|
||||
device_id_filter = _tool_str(data, "device_id", "device-id")
|
||||
area_filter = _tool_str(data, "area")
|
||||
floor_filter = _tool_str(data, "floor")
|
||||
count = cast(bool, data["count"])
|
||||
brief = cast(bool, data["brief"])
|
||||
limit = cast(int, data["limit"])
|
||||
|
||||
area_floor_map: dict[str, str] = {}
|
||||
if floor_filter:
|
||||
area_registry = ar.async_get(self._hass)
|
||||
area_floor_map = {
|
||||
area.id: area.floor_id or ""
|
||||
for area in area_registry.areas.values()
|
||||
if area.id
|
||||
}
|
||||
|
||||
device_registry = dr.async_get(self._hass)
|
||||
result: list[dict[str, Any]] = []
|
||||
for device in device_registry.devices.values():
|
||||
if device_id_filter and device.id != device_id_filter:
|
||||
continue
|
||||
if area_filter and device.area_id != area_filter:
|
||||
continue
|
||||
if floor_filter:
|
||||
if not device.area_id:
|
||||
continue
|
||||
if area_floor_map.get(device.area_id) != floor_filter:
|
||||
continue
|
||||
result.append(
|
||||
{
|
||||
"id": device.id,
|
||||
"name": device.name,
|
||||
"manufacturer": device.manufacturer,
|
||||
"model": device.model,
|
||||
"area_id": device.area_id,
|
||||
}
|
||||
)
|
||||
|
||||
if count:
|
||||
return {"count": len(result)}
|
||||
|
||||
result = _apply_limit(result, limit)
|
||||
if brief:
|
||||
return {
|
||||
"devices": [{"id": item["id"], "name": item["name"]} for item in result]
|
||||
}
|
||||
return {"devices": result}
|
||||
|
||||
|
||||
class EntityListTool(llm.Tool):
|
||||
"""Tool mirroring `hab entity list`."""
|
||||
|
||||
name = "entity_list"
|
||||
description = (
|
||||
"List entities with hab-compatible filters: entity-id, domain, area, floor,"
|
||||
" label, device, device-class, count, brief, limit."
|
||||
)
|
||||
parameters = _ENTITY_LIST_PARAMETERS
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the tool."""
|
||||
self._hass = hass
|
||||
|
||||
async def async_call(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
tool_input: llm.ToolInput,
|
||||
llm_context: llm.LLMContext,
|
||||
) -> JsonObjectType:
|
||||
"""List entities with hab-compatible output fields."""
|
||||
del hass, llm_context
|
||||
data = cast(dict[str, Any], self.parameters(tool_input.tool_args))
|
||||
entity_id_filter = _tool_str(data, "entity_id", "entity-id")
|
||||
domain_filter = _tool_str(data, "domain")
|
||||
area_filter = _tool_str(data, "area")
|
||||
floor_filter = _tool_str(data, "floor")
|
||||
label_filter = _tool_str(data, "label")
|
||||
device_filter = _tool_str(data, "device")
|
||||
device_class_filter = _tool_str(data, "device_class", "device-class")
|
||||
count = cast(bool, data["count"])
|
||||
brief = cast(bool, data["brief"])
|
||||
limit = cast(int, data["limit"])
|
||||
|
||||
area_floor_map: dict[str, str] = {}
|
||||
if floor_filter:
|
||||
area_registry = ar.async_get(self._hass)
|
||||
area_floor_map = {
|
||||
area.id: area.floor_id or ""
|
||||
for area in area_registry.areas.values()
|
||||
if area.id
|
||||
}
|
||||
|
||||
entity_registry = er.async_get(self._hass)
|
||||
|
||||
result: list[dict[str, Any]] = []
|
||||
for state in self._hass.states.async_all():
|
||||
entity_id = state.entity_id
|
||||
|
||||
if entity_id_filter and entity_id != entity_id_filter:
|
||||
continue
|
||||
|
||||
if domain_filter and state.domain != domain_filter:
|
||||
continue
|
||||
|
||||
reg_entry = entity_registry.async_get(entity_id)
|
||||
|
||||
if device_filter:
|
||||
if reg_entry is None or reg_entry.device_id != device_filter:
|
||||
continue
|
||||
|
||||
if area_filter:
|
||||
if reg_entry is None or reg_entry.area_id != area_filter:
|
||||
continue
|
||||
|
||||
if floor_filter:
|
||||
if reg_entry is None or not reg_entry.area_id:
|
||||
continue
|
||||
if area_floor_map.get(reg_entry.area_id) != floor_filter:
|
||||
continue
|
||||
|
||||
if label_filter:
|
||||
if reg_entry is None or label_filter not in reg_entry.labels:
|
||||
continue
|
||||
|
||||
friendly_name = state.attributes.get("friendly_name")
|
||||
if not isinstance(friendly_name, str):
|
||||
friendly_name = ""
|
||||
|
||||
device_class = _entity_device_class(reg_entry, state.attributes)
|
||||
if device_class_filter and device_class != device_class_filter:
|
||||
continue
|
||||
|
||||
result.append(
|
||||
{
|
||||
"entity_id": entity_id,
|
||||
"state": state.state,
|
||||
"name": friendly_name,
|
||||
"area_id": reg_entry.area_id if reg_entry else "",
|
||||
"device_id": reg_entry.device_id if reg_entry else "",
|
||||
"device_class": device_class,
|
||||
"labels": sorted(reg_entry.labels) if reg_entry else [],
|
||||
"disabled": reg_entry.disabled_by is not None
|
||||
if reg_entry
|
||||
else False,
|
||||
}
|
||||
)
|
||||
|
||||
if count:
|
||||
return {"count": len(result)}
|
||||
|
||||
result = _apply_limit(result, limit)
|
||||
if brief:
|
||||
return {
|
||||
"entities": [
|
||||
{"entity_id": item["entity_id"], "name": item["name"]}
|
||||
for item in result
|
||||
]
|
||||
}
|
||||
return {"entities": result}
|
||||
|
||||
|
||||
class LovelaceDashboardGenerationAPI(llm.API):
|
||||
"""LLM API for Lovelace dashboard generation."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the API."""
|
||||
super().__init__(hass=hass, id=API_ID, name=API_NAME)
|
||||
|
||||
async def async_get_api_instance(
|
||||
self, llm_context: llm.LLMContext
|
||||
) -> llm.APIInstance:
|
||||
"""Return the API instance."""
|
||||
return llm.APIInstance(
|
||||
api=self,
|
||||
api_prompt=API_PROMPT,
|
||||
llm_context=llm_context,
|
||||
tools=[
|
||||
AreaListTool(self.hass),
|
||||
DeviceListTool(self.hass),
|
||||
EntityListTool(self.hass),
|
||||
],
|
||||
)
|
||||
@@ -8,11 +8,12 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components import ai_task, websocket_api
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.json import json_fragment
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import (
|
||||
CONF_RESOURCE_MODE,
|
||||
@@ -22,6 +23,7 @@ from .const import (
|
||||
ConfigNotFound,
|
||||
)
|
||||
from .dashboard import LovelaceConfig
|
||||
from .llm import LovelaceDashboardGenerationAPI, build_generation_instructions
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .resources import ResourceStorageCollection
|
||||
@@ -184,3 +186,93 @@ async def websocket_lovelace_delete_config(
|
||||
) -> None:
|
||||
"""Delete Lovelace UI configuration."""
|
||||
await config.async_delete()
|
||||
|
||||
|
||||
def _coerce_generated_dashboard(data: Any) -> dict[str, Any]:
|
||||
"""Coerce AI output into a dashboard config object."""
|
||||
if isinstance(data, dict):
|
||||
return data
|
||||
|
||||
if not isinstance(data, str):
|
||||
raise HomeAssistantError("Generated dashboard must be a valid JSON object")
|
||||
|
||||
candidates = [data.strip()]
|
||||
|
||||
if "```" in data:
|
||||
for block in data.split("```"):
|
||||
candidate = block.strip()
|
||||
if not candidate:
|
||||
continue
|
||||
if candidate.casefold().startswith("json"):
|
||||
candidate = candidate[4:].strip()
|
||||
candidates.append(candidate)
|
||||
|
||||
for candidate in candidates:
|
||||
try:
|
||||
parsed = json_loads(candidate)
|
||||
except ValueError:
|
||||
continue
|
||||
if isinstance(parsed, dict):
|
||||
return parsed
|
||||
|
||||
raise HomeAssistantError("Generated dashboard must be a valid JSON object")
|
||||
|
||||
|
||||
def _validate_generated_dashboard(data: Any) -> dict[str, Any]:
|
||||
"""Validate generated dashboard response."""
|
||||
if not isinstance(data, dict):
|
||||
raise HomeAssistantError("Generated dashboard must be an object")
|
||||
|
||||
views = data.get("views")
|
||||
if not isinstance(views, list) or not views:
|
||||
raise HomeAssistantError(
|
||||
"Generated dashboard must include at least one view in `views`"
|
||||
)
|
||||
|
||||
if not all(isinstance(view, dict) for view in views):
|
||||
raise HomeAssistantError("Each dashboard view must be an object")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "lovelace/config/generate",
|
||||
vol.Required("prompt"): cv.string,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_lovelace_generate_dashboard(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a Lovelace dashboard configuration from a prompt."""
|
||||
if ai_task.DOMAIN not in hass.config.components:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"error",
|
||||
"AI Task integration is not available. Configure AI Task first.",
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
result = await ai_task.async_generate_data(
|
||||
hass,
|
||||
task_name="lovelace_dashboard_generation",
|
||||
instructions=await build_generation_instructions(hass, msg["prompt"]),
|
||||
llm_api=LovelaceDashboardGenerationAPI(hass),
|
||||
)
|
||||
config = _validate_generated_dashboard(_coerce_generated_dashboard(result.data))
|
||||
except HomeAssistantError as err:
|
||||
connection.send_error(msg["id"], "error", str(err))
|
||||
return
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"conversation_id": result.conversation_id,
|
||||
"config": config,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
"""Diagnostics support for Lunatone integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import LunatoneConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: LunatoneConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"info": entry.runtime_data.coordinator_info.data.model_dump(),
|
||||
"devices": [
|
||||
v.data.model_dump()
|
||||
for v in entry.runtime_data.coordinator_devices.data.values()
|
||||
],
|
||||
}
|
||||
@@ -51,7 +51,7 @@ rules:
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: Discovery not yet supported
|
||||
|
||||
@@ -251,18 +251,6 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.SmokeCoAlarm.Attributes.SmokeState,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmCOStateSensor",
|
||||
device_class=BinarySensorDeviceClass.CO,
|
||||
device_to_ha=lambda x: (
|
||||
x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.SmokeCoAlarm.Attributes.COState,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
|
||||
@@ -435,9 +435,9 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
and color_mode == ColorMode.XY
|
||||
):
|
||||
self._attr_xy_color = self._get_xy_color()
|
||||
elif self._supports_color_temperature:
|
||||
elif self._attr_color_temp_kelvin is not None:
|
||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||
elif self._supports_brightness:
|
||||
elif self._attr_brightness is not None:
|
||||
self._attr_color_mode = ColorMode.BRIGHTNESS
|
||||
else:
|
||||
self._attr_color_mode = ColorMode.ONOFF
|
||||
|
||||
@@ -284,7 +284,6 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
featuremap_contains=(clusters.Thermostat.Bitmaps.Feature.kSetback),
|
||||
),
|
||||
# Eve temperature offset with higher min/max
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
@@ -304,27 +303,7 @@ DISCOVERY_SCHEMAS = [
|
||||
required_attributes=(
|
||||
clusters.Thermostat.Attributes.LocalTemperatureCalibration,
|
||||
),
|
||||
vendor_id=(4874,), # Eve Systems
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
key="TemperatureOffset",
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="temperature_offset",
|
||||
native_max_value=25, # Matter 1.3 limit
|
||||
native_min_value=-25, # Matter 1.3 limit
|
||||
native_step=0.5,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_device=lambda x: round(x * 10),
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
entity_class=MatterNumber,
|
||||
required_attributes=(
|
||||
clusters.Thermostat.Attributes.LocalTemperatureCalibration,
|
||||
),
|
||||
vendor_id=(4874,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
|
||||
@@ -265,8 +265,6 @@ class ProgramPhaseOven(MieleEnum, missing_to_none=True):
|
||||
heating_up = 3073
|
||||
process_running = 3074
|
||||
process_finished = 3078
|
||||
searing = 3080
|
||||
roasting = 3081
|
||||
energy_save = 3084
|
||||
pre_heating = 3099
|
||||
|
||||
@@ -359,8 +357,6 @@ class ProgramPhaseSteamOvenCombi(MieleEnum, missing_to_none=True):
|
||||
heating_up = 3073
|
||||
process_running = 3074, 7938
|
||||
process_finished = 3078, 7942
|
||||
searing = 3080
|
||||
roasting = 3081
|
||||
energy_save = 3084
|
||||
pre_heating = 3099
|
||||
|
||||
|
||||
@@ -61,7 +61,6 @@ PLATE_COUNT = {
|
||||
"KM7575": 6,
|
||||
"KM7678": 6,
|
||||
"KM7697": 6,
|
||||
"KM7699": 5,
|
||||
"KM7878": 6,
|
||||
"KM7897": 6,
|
||||
"KMDA7633": 5,
|
||||
|
||||
@@ -1018,9 +1018,7 @@
|
||||
"rinse_hold": "Rinse hold",
|
||||
"rinse_out_lint": "Rinse out lint",
|
||||
"rinses": "Rinses",
|
||||
"roasting": "Roasting",
|
||||
"safety_cooling": "Safety cooling",
|
||||
"searing": "Searing",
|
||||
"slightly_dry": "Slightly dry",
|
||||
"slow_roasting": "Slow roasting",
|
||||
"smoothing": "Smoothing",
|
||||
|
||||
@@ -187,7 +187,6 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity):
|
||||
color_modes.add(ColorMode.HS)
|
||||
self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
self._fixed_color_mode = None
|
||||
self._attr_color_mode = ColorMode.UNKNOWN
|
||||
if self.supported_color_modes and len(self.supported_color_modes) == 1:
|
||||
self._fixed_color_mode = next(iter(self.supported_color_modes))
|
||||
self._attr_color_mode = self._fixed_color_mode
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["nrgkick-api==1.7.1"],
|
||||
"requirements": ["nrgkick-api==1.6.0"],
|
||||
"zeroconf": ["_nrgkick._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -15,8 +15,6 @@ from aiontfy.exceptions import (
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components import camera, image
|
||||
from homeassistant.components.media_source import async_resolve_media
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_MESSAGE,
|
||||
ATTR_TITLE,
|
||||
@@ -28,7 +26,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.selector import MediaSelector
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import NtfyConfigEntry
|
||||
@@ -52,48 +49,25 @@ ATTR_MARKDOWN = "markdown"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_TAGS = "tags"
|
||||
ATTR_SEQUENCE_ID = "sequence_id"
|
||||
ATTR_ATTACH_FILE = "attach_file"
|
||||
ATTR_FILENAME = "filename"
|
||||
GRP_ATTACHMENT = "attachment"
|
||||
MSG_ATTACHMENT = "Only one attachment source is allowed: URL or local file"
|
||||
|
||||
|
||||
def validate_filename(params: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate filename."""
|
||||
if ATTR_FILENAME in params and not (
|
||||
ATTR_ATTACH_FILE in params or ATTR_ATTACH in params
|
||||
):
|
||||
raise vol.Invalid("Filename only allowed when attachment is provided")
|
||||
return params
|
||||
|
||||
|
||||
SERVICE_PUBLISH_SCHEMA = vol.All(
|
||||
cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional(ATTR_TITLE): cv.string,
|
||||
vol.Optional(ATTR_MESSAGE): cv.string,
|
||||
vol.Optional(ATTR_MARKDOWN): cv.boolean,
|
||||
vol.Optional(ATTR_TAGS): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(vol.Coerce(int), vol.Range(1, 5)),
|
||||
vol.Optional(ATTR_CLICK): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_DELAY): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(seconds=10), max=timedelta(days=3)),
|
||||
),
|
||||
vol.Optional(ATTR_EMAIL): vol.Email(),
|
||||
vol.Optional(ATTR_CALL): cv.string,
|
||||
vol.Optional(ATTR_ICON): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_SEQUENCE_ID): cv.string,
|
||||
vol.Exclusive(ATTR_ATTACH, GRP_ATTACHMENT, MSG_ATTACHMENT): vol.All(
|
||||
vol.Url(), vol.Coerce(URL)
|
||||
),
|
||||
vol.Exclusive(
|
||||
ATTR_ATTACH_FILE, GRP_ATTACHMENT, MSG_ATTACHMENT
|
||||
): MediaSelector({"accept": ["*/*"]}),
|
||||
vol.Optional(ATTR_FILENAME): cv.string,
|
||||
}
|
||||
),
|
||||
validate_filename,
|
||||
SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional(ATTR_TITLE): cv.string,
|
||||
vol.Optional(ATTR_MESSAGE): cv.string,
|
||||
vol.Optional(ATTR_MARKDOWN): cv.boolean,
|
||||
vol.Optional(ATTR_TAGS): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(vol.Coerce(int), vol.Range(1, 5)),
|
||||
vol.Optional(ATTR_CLICK): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_DELAY): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(seconds=10), max=timedelta(days=3)),
|
||||
),
|
||||
vol.Optional(ATTR_ATTACH): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_EMAIL): vol.Email(),
|
||||
vol.Optional(ATTR_CALL): cv.string,
|
||||
vol.Optional(ATTR_ICON): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_SEQUENCE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_CLEAR_DELETE_SCHEMA = cv.make_entity_service_schema(
|
||||
@@ -155,7 +129,7 @@ class NtfyNotifyEntity(NtfyBaseEntity, NotifyEntity):
|
||||
|
||||
async def publish(self, **kwargs: Any) -> None:
|
||||
"""Publish a message to a topic."""
|
||||
attachment = None
|
||||
|
||||
params: dict[str, Any] = kwargs
|
||||
delay: timedelta | None = params.get("delay")
|
||||
if delay:
|
||||
@@ -170,36 +144,10 @@ class NtfyNotifyEntity(NtfyBaseEntity, NotifyEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="delay_no_call",
|
||||
)
|
||||
if file := params.pop(ATTR_ATTACH_FILE, None):
|
||||
media_content_id: str = file["media_content_id"]
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
attachment = (
|
||||
await camera.async_get_image(self.hass, entity_id)
|
||||
).content
|
||||
elif media_content_id.startswith("media-source://image/"):
|
||||
entity_id = media_content_id.removeprefix("media-source://image/")
|
||||
attachment = (await image.async_get_image(self.hass, entity_id)).content
|
||||
else:
|
||||
media = await async_resolve_media(
|
||||
self.hass, file["media_content_id"], None
|
||||
)
|
||||
|
||||
if media.path is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="media_source_not_supported",
|
||||
)
|
||||
|
||||
attachment = await self.hass.async_add_executor_job(
|
||||
media.path.read_bytes
|
||||
)
|
||||
|
||||
params.setdefault(ATTR_FILENAME, media.path.name)
|
||||
|
||||
msg = Message(topic=self.topic, **params)
|
||||
try:
|
||||
await self.ntfy.publish(msg, attachment)
|
||||
await self.ntfy.publish(msg)
|
||||
except NtfyUnauthorizedAuthenticationError as e:
|
||||
self.config_entry.async_start_reauth(self.hass)
|
||||
raise HomeAssistantError(
|
||||
|
||||
@@ -67,17 +67,6 @@ publish:
|
||||
type: url
|
||||
autocomplete: url
|
||||
example: https://example.org/download.zip
|
||||
attach_file:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
filename:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
example: attachment.jpg
|
||||
email:
|
||||
required: false
|
||||
selector:
|
||||
|
||||
@@ -288,9 +288,6 @@
|
||||
"entry_not_loaded": {
|
||||
"message": "The selected ntfy service is currently not loaded or disabled in Home Assistant."
|
||||
},
|
||||
"media_source_not_supported": {
|
||||
"message": "Media source currently not supported"
|
||||
},
|
||||
"publish_failed_exception": {
|
||||
"message": "Failed to publish notification due to a connection error"
|
||||
},
|
||||
@@ -356,10 +353,6 @@
|
||||
"description": "Attach images or other files by URL.",
|
||||
"name": "Attachment URL"
|
||||
},
|
||||
"attach_file": {
|
||||
"description": "Attach images or other files by uploading from a local file, camera, or image media source. When selecting a camera entity, a snapshot of the current view will be captured and attached to the notification.",
|
||||
"name": "Attach local file"
|
||||
},
|
||||
"call": {
|
||||
"description": "Phone number to call and read the message out loud using text-to-speech. Requires ntfy Pro and prior phone number verification.",
|
||||
"name": "Phone call"
|
||||
@@ -376,10 +369,6 @@
|
||||
"description": "Specify the address to forward the notification to, for example mail@example.com",
|
||||
"name": "Forward to email"
|
||||
},
|
||||
"filename": {
|
||||
"description": "Specify a custom filename for the attachment, including the file extension (for example, attachment.jpg). If not provided, the original filename will be used.",
|
||||
"name": "Attachment filename"
|
||||
},
|
||||
"icon": {
|
||||
"description": "Include an icon that will appear next to the text of the notification. Only JPEG and PNG images are supported.",
|
||||
"name": "Icon URL"
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.1.2"]
|
||||
"requirements": ["onedrive-personal-sdk==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -49,7 +49,6 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_REASONING_EFFORT,
|
||||
CONF_REASONING_SUMMARY,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
@@ -72,7 +71,6 @@ from .const import (
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_REASONING_SUMMARY,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_P,
|
||||
RECOMMENDED_VERBOSITY,
|
||||
@@ -401,23 +399,10 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_REASONING_SUMMARY,
|
||||
default=RECOMMENDED_REASONING_SUMMARY,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=["off", "auto", "short", "detailed"],
|
||||
translation_key=CONF_REASONING_SUMMARY,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
elif CONF_VERBOSITY in options:
|
||||
options.pop(CONF_VERBOSITY)
|
||||
if CONF_REASONING_SUMMARY in options:
|
||||
if not model.startswith("gpt-5"):
|
||||
options.pop(CONF_REASONING_SUMMARY)
|
||||
|
||||
if self._subentry_type == "conversation" and not model.startswith(
|
||||
tuple(UNSUPPORTED_WEB_SEARCH_MODELS)
|
||||
|
||||
@@ -19,7 +19,6 @@ CONF_FILENAMES = "filenames"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_REASONING_EFFORT = "reasoning_effort"
|
||||
CONF_REASONING_SUMMARY = "reasoning_summary"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
CONF_TOP_P = "top_p"
|
||||
@@ -37,7 +36,6 @@ RECOMMENDED_CHAT_MODEL = "gpt-4o-mini"
|
||||
RECOMMENDED_IMAGE_MODEL = "gpt-image-1.5"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
RECOMMENDED_REASONING_EFFORT = "low"
|
||||
RECOMMENDED_REASONING_SUMMARY = "auto"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
RECOMMENDED_TOP_P = 1.0
|
||||
RECOMMENDED_VERBOSITY = "medium"
|
||||
|
||||
@@ -73,7 +73,6 @@ from .const import (
|
||||
CONF_IMAGE_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_REASONING_EFFORT,
|
||||
CONF_REASONING_SUMMARY,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
CONF_VERBOSITY,
|
||||
@@ -91,7 +90,6 @@ from .const import (
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_REASONING_SUMMARY,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_P,
|
||||
RECOMMENDED_VERBOSITY,
|
||||
@@ -503,9 +501,7 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
)
|
||||
if not model_args["model"].startswith("gpt-5-pro")
|
||||
else "high", # GPT-5 pro only supports reasoning.effort: high
|
||||
"summary": options.get(
|
||||
CONF_REASONING_SUMMARY, RECOMMENDED_REASONING_SUMMARY
|
||||
),
|
||||
"summary": "auto",
|
||||
}
|
||||
model_args["include"] = ["reasoning.encrypted_content"]
|
||||
|
||||
|
||||
@@ -67,7 +67,6 @@
|
||||
"image_model": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::image_model%]",
|
||||
"inline_citations": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::inline_citations%]",
|
||||
"reasoning_effort": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::reasoning_effort%]",
|
||||
"reasoning_summary": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::reasoning_summary%]",
|
||||
"search_context_size": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::search_context_size%]",
|
||||
"user_location": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::user_location%]",
|
||||
"web_search": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::web_search%]"
|
||||
@@ -77,7 +76,6 @@
|
||||
"image_model": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::image_model%]",
|
||||
"inline_citations": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::inline_citations%]",
|
||||
"reasoning_effort": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::reasoning_effort%]",
|
||||
"reasoning_summary": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::reasoning_summary%]",
|
||||
"search_context_size": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::search_context_size%]",
|
||||
"user_location": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||
"web_search": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::web_search%]"
|
||||
@@ -127,7 +125,6 @@
|
||||
"image_model": "Image generation model",
|
||||
"inline_citations": "Include links in web search results",
|
||||
"reasoning_effort": "Reasoning effort",
|
||||
"reasoning_summary": "Reasoning summary",
|
||||
"search_context_size": "Search context size",
|
||||
"user_location": "Include home location",
|
||||
"web_search": "Enable web search"
|
||||
@@ -137,7 +134,6 @@
|
||||
"image_model": "The model to use when generating images",
|
||||
"inline_citations": "If disabled, additional prompt is added to ask the model to not include source citations",
|
||||
"reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt",
|
||||
"reasoning_summary": "Controls the length and detail of reasoning summaries provided by the model",
|
||||
"search_context_size": "High level guidance for the amount of context window space to use for the search",
|
||||
"user_location": "Refine search results based on geography",
|
||||
"web_search": "Allow the model to search the web for the latest information before generating a response"
|
||||
@@ -169,14 +165,6 @@
|
||||
"xhigh": "X-High"
|
||||
}
|
||||
},
|
||||
"reasoning_summary": {
|
||||
"options": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"detailed": "Detailed",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"short": "Short"
|
||||
}
|
||||
},
|
||||
"search_context_size": {
|
||||
"options": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
|
||||
@@ -195,7 +195,7 @@ class OpenRGBLight(CoordinatorEntity[OpenRGBCoordinator], LightEntity):
|
||||
color_mode = self._attr_color_mode
|
||||
|
||||
if color_mode is None:
|
||||
# If color mode is still unknown, default to RGB
|
||||
# If color mode is still None, default to RGB
|
||||
color_mode = ColorMode.RGB
|
||||
|
||||
if self._attr_brightness is not None and self._attr_brightness != brightness:
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Coordinator to handle Opower connections."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
@@ -45,17 +44,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type OpowerConfigEntry = ConfigEntry[OpowerCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class OpowerData:
|
||||
"""Class to hold Opower data."""
|
||||
|
||||
account: Account
|
||||
forecast: Forecast | None
|
||||
last_changed: datetime | None
|
||||
last_updated: datetime
|
||||
|
||||
|
||||
class OpowerCoordinator(DataUpdateCoordinator[dict[str, OpowerData]]):
|
||||
class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
"""Handle fetching Opower data, updating sensors and inserting statistics."""
|
||||
|
||||
config_entry: OpowerConfigEntry
|
||||
@@ -96,7 +85,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, OpowerData]]):
|
||||
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> dict[str, OpowerData]:
|
||||
) -> dict[str, Forecast]:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
# Login expires after a few minutes.
|
||||
@@ -109,38 +98,24 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, OpowerData]]):
|
||||
except CannotConnect as err:
|
||||
_LOGGER.error("Error during login: %s", err)
|
||||
raise UpdateFailed(f"Error during login: {err}") from err
|
||||
try:
|
||||
forecasts: list[Forecast] = await self.api.async_get_forecast()
|
||||
except ApiException as err:
|
||||
_LOGGER.error("Error getting forecasts: %s", err)
|
||||
raise
|
||||
_LOGGER.debug("Updating sensor data with: %s", forecasts)
|
||||
# Because Opower provides historical usage/cost with a delay of a couple of days
|
||||
# we need to insert data into statistics.
|
||||
await self._insert_statistics()
|
||||
return {forecast.account.utility_account_id: forecast for forecast in forecasts}
|
||||
|
||||
async def _insert_statistics(self) -> None:
|
||||
"""Insert Opower statistics."""
|
||||
try:
|
||||
accounts = await self.api.async_get_accounts()
|
||||
except ApiException as err:
|
||||
_LOGGER.error("Error getting accounts: %s", err)
|
||||
raise
|
||||
|
||||
try:
|
||||
forecasts_list = await self.api.async_get_forecast()
|
||||
except ApiException as err:
|
||||
_LOGGER.error("Error getting forecasts: %s", err)
|
||||
raise
|
||||
|
||||
forecasts = {f.account.utility_account_id: f for f in forecasts_list}
|
||||
_LOGGER.debug("Updating sensor data with: %s", forecasts)
|
||||
|
||||
# Because Opower provides historical usage/cost with a delay of a couple of days
|
||||
# we need to insert data into statistics.
|
||||
last_changed_per_account = await self._insert_statistics(accounts)
|
||||
return {
|
||||
account.utility_account_id: OpowerData(
|
||||
account=account,
|
||||
forecast=forecasts.get(account.utility_account_id),
|
||||
last_changed=last_changed_per_account.get(account.utility_account_id),
|
||||
last_updated=dt_util.utcnow(),
|
||||
)
|
||||
for account in accounts
|
||||
}
|
||||
|
||||
async def _insert_statistics(self, accounts: list[Account]) -> dict[str, datetime]:
|
||||
"""Insert Opower statistics."""
|
||||
last_changed_per_account: dict[str, datetime] = {}
|
||||
for account in accounts:
|
||||
id_prefix = (
|
||||
(
|
||||
@@ -302,15 +277,6 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, OpowerData]]):
|
||||
return_sum = _safe_get_sum(stats.get(return_statistic_id, []))
|
||||
last_stats_time = stats[consumption_statistic_id][0]["start"]
|
||||
|
||||
if cost_reads:
|
||||
last_changed_per_account[account.utility_account_id] = cost_reads[
|
||||
-1
|
||||
].start_time
|
||||
elif last_stats_time is not None:
|
||||
last_changed_per_account[account.utility_account_id] = (
|
||||
dt_util.utc_from_timestamp(last_stats_time)
|
||||
)
|
||||
|
||||
cost_statistics = []
|
||||
compensation_statistics = []
|
||||
consumption_statistics = []
|
||||
@@ -377,8 +343,6 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, OpowerData]]):
|
||||
)
|
||||
async_add_external_statistics(self.hass, return_metadata, return_statistics)
|
||||
|
||||
return last_changed_per_account
|
||||
|
||||
async def _async_maybe_migrate_statistics(
|
||||
self,
|
||||
utility_account_id: str,
|
||||
|
||||
@@ -4,9 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime
|
||||
from datetime import date
|
||||
|
||||
from opower import MeterType, UnitOfMeasure
|
||||
from opower import Forecast, MeterType, UnitOfMeasure
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -22,7 +22,7 @@ from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import OpowerConfigEntry, OpowerCoordinator, OpowerData
|
||||
from .coordinator import OpowerConfigEntry, OpowerCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -31,26 +31,9 @@ PARALLEL_UPDATES = 0
|
||||
class OpowerEntityDescription(SensorEntityDescription):
|
||||
"""Class describing Opower sensors entities."""
|
||||
|
||||
value_fn: Callable[[OpowerData], str | float | date | datetime | None]
|
||||
value_fn: Callable[[Forecast], str | float | date]
|
||||
|
||||
|
||||
COMMON_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
OpowerEntityDescription(
|
||||
key="last_changed",
|
||||
translation_key="last_changed",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.last_changed,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="last_updated",
|
||||
translation_key="last_updated",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.last_updated,
|
||||
),
|
||||
)
|
||||
|
||||
# suggested_display_precision=0 for all sensors since
|
||||
# Opower provides 0 decimal points for all these.
|
||||
# (for the statistics in the energy dashboard Opower does provide decimal points)
|
||||
@@ -63,7 +46,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
# Not TOTAL_INCREASING because it can decrease for accounts with solar
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.usage_to_date if data.forecast else None,
|
||||
value_fn=lambda data: data.usage_to_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_forecasted_usage",
|
||||
@@ -72,7 +55,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.forecasted_usage if data.forecast else None,
|
||||
value_fn=lambda data: data.forecasted_usage,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_typical_usage",
|
||||
@@ -81,7 +64,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.typical_usage if data.forecast else None,
|
||||
value_fn=lambda data: data.typical_usage,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_cost_to_date",
|
||||
@@ -90,7 +73,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.cost_to_date if data.forecast else None,
|
||||
value_fn=lambda data: data.cost_to_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_forecasted_cost",
|
||||
@@ -99,7 +82,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.forecasted_cost if data.forecast else None,
|
||||
value_fn=lambda data: data.forecasted_cost,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_typical_cost",
|
||||
@@ -108,7 +91,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.typical_cost if data.forecast else None,
|
||||
value_fn=lambda data: data.typical_cost,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_start_date",
|
||||
@@ -116,7 +99,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.forecast.start_date if data.forecast else None,
|
||||
value_fn=lambda data: data.start_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_end_date",
|
||||
@@ -124,7 +107,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.forecast.end_date if data.forecast else None,
|
||||
value_fn=lambda data: data.end_date,
|
||||
),
|
||||
)
|
||||
GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
@@ -135,7 +118,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.usage_to_date if data.forecast else None,
|
||||
value_fn=lambda data: data.usage_to_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_forecasted_usage",
|
||||
@@ -144,7 +127,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.forecasted_usage if data.forecast else None,
|
||||
value_fn=lambda data: data.forecasted_usage,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_typical_usage",
|
||||
@@ -153,7 +136,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.typical_usage if data.forecast else None,
|
||||
value_fn=lambda data: data.typical_usage,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_cost_to_date",
|
||||
@@ -162,7 +145,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.cost_to_date if data.forecast else None,
|
||||
value_fn=lambda data: data.cost_to_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_forecasted_cost",
|
||||
@@ -171,7 +154,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.forecasted_cost if data.forecast else None,
|
||||
value_fn=lambda data: data.forecasted_cost,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_typical_cost",
|
||||
@@ -180,7 +163,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecast.typical_cost if data.forecast else None,
|
||||
value_fn=lambda data: data.typical_cost,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_start_date",
|
||||
@@ -188,7 +171,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.forecast.start_date if data.forecast else None,
|
||||
value_fn=lambda data: data.start_date,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_end_date",
|
||||
@@ -196,7 +179,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.forecast.end_date if data.forecast else None,
|
||||
value_fn=lambda data: data.end_date,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -210,38 +193,32 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[OpowerSensor] = []
|
||||
opower_data_list = coordinator.data.values()
|
||||
for opower_data in opower_data_list:
|
||||
account = opower_data.account
|
||||
forecast = opower_data.forecast
|
||||
device_id = (
|
||||
f"{coordinator.api.utility.subdomain()}_{account.utility_account_id}"
|
||||
)
|
||||
forecasts = coordinator.data.values()
|
||||
for forecast in forecasts:
|
||||
device_id = f"{coordinator.api.utility.subdomain()}_{forecast.account.utility_account_id}"
|
||||
device = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=f"{account.meter_type.name} account {account.utility_account_id}",
|
||||
name=f"{forecast.account.meter_type.name} account {forecast.account.utility_account_id}",
|
||||
manufacturer="Opower",
|
||||
model=coordinator.api.utility.name(),
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
sensors: tuple[OpowerEntityDescription, ...] = COMMON_SENSORS
|
||||
sensors: tuple[OpowerEntityDescription, ...] = ()
|
||||
if (
|
||||
account.meter_type == MeterType.ELEC
|
||||
and forecast is not None
|
||||
forecast.account.meter_type == MeterType.ELEC
|
||||
and forecast.unit_of_measure == UnitOfMeasure.KWH
|
||||
):
|
||||
sensors += ELEC_SENSORS
|
||||
sensors = ELEC_SENSORS
|
||||
elif (
|
||||
account.meter_type == MeterType.GAS
|
||||
and forecast is not None
|
||||
forecast.account.meter_type == MeterType.GAS
|
||||
and forecast.unit_of_measure in [UnitOfMeasure.THERM, UnitOfMeasure.CCF]
|
||||
):
|
||||
sensors += GAS_SENSORS
|
||||
sensors = GAS_SENSORS
|
||||
entities.extend(
|
||||
OpowerSensor(
|
||||
coordinator,
|
||||
sensor,
|
||||
account.utility_account_id,
|
||||
forecast.account.utility_account_id,
|
||||
device,
|
||||
device_id,
|
||||
)
|
||||
@@ -273,7 +250,7 @@ class OpowerSensor(CoordinatorEntity[OpowerCoordinator], SensorEntity):
|
||||
self.utility_account_id = utility_account_id
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | date | datetime:
|
||||
def native_value(self) -> StateType | date:
|
||||
"""Return the state."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data[self.utility_account_id]
|
||||
|
||||
@@ -115,12 +115,6 @@
|
||||
},
|
||||
"gas_usage_to_date": {
|
||||
"name": "Current bill gas usage to date"
|
||||
},
|
||||
"last_changed": {
|
||||
"name": "Last changed"
|
||||
},
|
||||
"last_updated": {
|
||||
"name": "Last updated"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
@@ -42,7 +42,6 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
|
||||
manufacturer=DEFAULT_NAME,
|
||||
model="Endpoint",
|
||||
name=device_info.endpoint.name,
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -88,7 +87,6 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
f"{self.coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||
),
|
||||
translation_key=None if self.device_name else "unknown_container",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"codeowners": ["@erwindouna"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/portainer",
|
||||
"integration_type": "service",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.23"]
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.19.0"]
|
||||
"requirements": ["reolink-aio==0.18.2"]
|
||||
}
|
||||
|
||||
@@ -87,12 +87,11 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="zoom",
|
||||
cmd_key="GetZoomFocus",
|
||||
cmd_id=294,
|
||||
translation_key="zoom",
|
||||
mode=NumberMode.SLIDER,
|
||||
native_step=1,
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["zoom"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["zoom"]["max"],
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["zoom"]["pos"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["zoom"]["pos"]["max"],
|
||||
supported=lambda api, ch: api.supported(ch, "zoom"),
|
||||
value=lambda api, ch: api.get_zoom(ch),
|
||||
method=lambda api, ch, value: api.set_zoom(ch, int(value)),
|
||||
@@ -100,12 +99,11 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="focus",
|
||||
cmd_key="GetZoomFocus",
|
||||
cmd_id=294,
|
||||
translation_key="focus",
|
||||
mode=NumberMode.SLIDER,
|
||||
native_step=1,
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["focus"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["focus"]["max"],
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["focus"]["pos"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["focus"]["pos"]["max"],
|
||||
supported=lambda api, ch: api.supported(ch, "focus"),
|
||||
value=lambda api, ch: api.get_focus(ch),
|
||||
method=lambda api, ch, value: api.set_focus(ch, int(value)),
|
||||
|
||||
@@ -61,7 +61,6 @@ class ReolinkHostSensorEntityDescription(
|
||||
SENSORS = (
|
||||
ReolinkSensorEntityDescription(
|
||||
key="ptz_pan_position",
|
||||
cmd_id=433,
|
||||
cmd_key="GetPtzCurPos",
|
||||
translation_key="ptz_pan_position",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
@@ -71,7 +70,6 @@ SENSORS = (
|
||||
),
|
||||
ReolinkSensorEntityDescription(
|
||||
key="ptz_tilt_position",
|
||||
cmd_id=433,
|
||||
cmd_key="GetPtzCurPos",
|
||||
translation_key="ptz_tilt_position",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
|
||||
@@ -13,10 +13,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import automation, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import (
|
||||
EventLabsUpdatedData,
|
||||
async_subscribe_preview_feature,
|
||||
)
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
@@ -285,13 +282,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
DOMAIN, SERVICE_TOGGLE, toggle_service, schema=SCRIPT_TURN_ONOFF_SCHEMA
|
||||
)
|
||||
|
||||
async def new_triggers_conditions_listener(
|
||||
_event_data: EventLabsUpdatedData,
|
||||
) -> None:
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
await reload_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
hass.async_create_task(
|
||||
reload_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
|
||||
async_subscribe_preview_feature(
|
||||
async_labs_listen(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
automation.NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
"""Support for Switchbot devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import switchbot
|
||||
|
||||
@@ -23,12 +20,10 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import (
|
||||
CONF_CURTAIN_SPEED,
|
||||
CONF_ENCRYPTION_KEY,
|
||||
CONF_KEY_ID,
|
||||
CONF_RETRY_COUNT,
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES,
|
||||
DEFAULT_CURTAIN_SPEED,
|
||||
DEFAULT_RETRY_COUNT,
|
||||
DOMAIN,
|
||||
ENCRYPTED_MODELS,
|
||||
@@ -190,6 +185,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) ->
|
||||
data={**entry.data, CONF_ADDRESS: mac},
|
||||
)
|
||||
|
||||
if not entry.options:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options={CONF_RETRY_COUNT: DEFAULT_RETRY_COUNT},
|
||||
)
|
||||
|
||||
sensor_type: str = entry.data[CONF_SENSOR_TYPE]
|
||||
switchbot_model = HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL[sensor_type]
|
||||
# connectable means we can make connections to the device
|
||||
@@ -240,7 +241,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) ->
|
||||
entry.data.get(CONF_NAME, entry.title),
|
||||
connectable,
|
||||
switchbot_model,
|
||||
entry,
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_start())
|
||||
if not await coordinator.async_wait_ready():
|
||||
@@ -258,38 +258,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
version = entry.version
|
||||
minor_version = entry.minor_version
|
||||
_LOGGER.debug("Migrating from version %s.%s", version, minor_version)
|
||||
|
||||
if version > 1:
|
||||
return False
|
||||
|
||||
if version == 1 and minor_version < 2:
|
||||
new_options: dict[str, Any] = {**entry.options}
|
||||
|
||||
if CONF_RETRY_COUNT not in new_options:
|
||||
new_options[CONF_RETRY_COUNT] = DEFAULT_RETRY_COUNT
|
||||
|
||||
sensor_type = entry.data.get(CONF_SENSOR_TYPE)
|
||||
if (
|
||||
sensor_type == SupportedModels.CURTAIN
|
||||
and CONF_CURTAIN_SPEED not in new_options
|
||||
):
|
||||
new_options[CONF_CURTAIN_SPEED] = DEFAULT_CURTAIN_SPEED
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options=new_options,
|
||||
minor_version=2,
|
||||
)
|
||||
_LOGGER.debug("Migration to version %s.2 successful", version)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -34,19 +34,14 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_CURTAIN_SPEED,
|
||||
CONF_ENCRYPTION_KEY,
|
||||
CONF_KEY_ID,
|
||||
CONF_LOCK_NIGHTLATCH,
|
||||
CONF_RETRY_COUNT,
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES,
|
||||
CURTAIN_SPEED_MAX,
|
||||
CURTAIN_SPEED_MIN,
|
||||
DEFAULT_CURTAIN_SPEED,
|
||||
DEFAULT_LOCK_NIGHTLATCH,
|
||||
DEFAULT_RETRY_COUNT,
|
||||
DOMAIN,
|
||||
@@ -80,7 +75,6 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Switchbot."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
@@ -136,20 +130,13 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
discovery = self._discovered_adv
|
||||
name = name_from_discovery(discovery)
|
||||
model_name = discovery.data["modelName"]
|
||||
sensor_type = SUPPORTED_MODEL_TYPES[model_name]
|
||||
|
||||
options: dict[str, Any] = {CONF_RETRY_COUNT: DEFAULT_RETRY_COUNT}
|
||||
if sensor_type == SupportedModels.CURTAIN:
|
||||
options[CONF_CURTAIN_SPEED] = DEFAULT_CURTAIN_SPEED
|
||||
|
||||
return self.async_create_entry(
|
||||
title=name,
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ADDRESS: discovery.address,
|
||||
CONF_SENSOR_TYPE: str(sensor_type),
|
||||
CONF_SENSOR_TYPE: str(SUPPORTED_MODEL_TYPES[model_name]),
|
||||
},
|
||||
options=options,
|
||||
)
|
||||
|
||||
async def async_step_confirm(
|
||||
@@ -468,26 +455,5 @@ class SwitchbotOptionsFlowHandler(OptionsFlow):
|
||||
): bool
|
||||
}
|
||||
)
|
||||
if (
|
||||
CONF_SENSOR_TYPE in self.config_entry.data
|
||||
and self.config_entry.data[CONF_SENSOR_TYPE] == SupportedModels.CURTAIN
|
||||
):
|
||||
options.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CURTAIN_SPEED,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_CURTAIN_SPEED, DEFAULT_CURTAIN_SPEED
|
||||
),
|
||||
): selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
min=CURTAIN_SPEED_MIN,
|
||||
max=CURTAIN_SPEED_MAX,
|
||||
step=1,
|
||||
mode=selector.NumberSelectorMode.SLIDER,
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
|
||||
|
||||
@@ -182,13 +182,9 @@ HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {
|
||||
# Config Defaults
|
||||
DEFAULT_RETRY_COUNT = 3
|
||||
DEFAULT_LOCK_NIGHTLATCH = False
|
||||
DEFAULT_CURTAIN_SPEED = 255
|
||||
CURTAIN_SPEED_MIN = 0
|
||||
CURTAIN_SPEED_MAX = 255
|
||||
|
||||
# Config Options
|
||||
CONF_RETRY_COUNT = "retry_count"
|
||||
CONF_KEY_ID = "key_id"
|
||||
CONF_ENCRYPTION_KEY = "encryption_key"
|
||||
CONF_LOCK_NIGHTLATCH = "lock_force_nightlatch"
|
||||
CONF_CURTAIN_SPEED = "curtain_speed"
|
||||
|
||||
@@ -41,7 +41,6 @@ class SwitchbotDataUpdateCoordinator(ActiveBluetoothDataUpdateCoordinator[None])
|
||||
device_name: str,
|
||||
connectable: bool,
|
||||
model: SwitchbotModel,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize global switchbot data updater."""
|
||||
super().__init__(
|
||||
@@ -58,7 +57,6 @@ class SwitchbotDataUpdateCoordinator(ActiveBluetoothDataUpdateCoordinator[None])
|
||||
self.device_name = device_name
|
||||
self.base_unique_id = base_unique_id
|
||||
self.model = model
|
||||
self.config_entry = config_entry
|
||||
self._ready_event = asyncio.Event()
|
||||
self._was_unavailable = True
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .const import CONF_CURTAIN_SPEED, DEFAULT_CURTAIN_SPEED
|
||||
from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator
|
||||
from .entity import SwitchbotEntity, exception_handler
|
||||
|
||||
@@ -65,15 +64,6 @@ class SwitchBotCurtainEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
super().__init__(coordinator)
|
||||
self._attr_is_closed = None
|
||||
|
||||
@callback
|
||||
def _get_curtain_speed(self) -> int:
|
||||
"""Return the configured curtain speed."""
|
||||
return int(
|
||||
self.coordinator.config_entry.options.get(
|
||||
CONF_CURTAIN_SPEED, DEFAULT_CURTAIN_SPEED
|
||||
)
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -93,8 +83,7 @@ class SwitchBotCurtainEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
"""Open the curtain."""
|
||||
|
||||
_LOGGER.debug("Switchbot to open curtain %s", self._address)
|
||||
speed = self._get_curtain_speed()
|
||||
self._last_run_success = bool(await self._device.open(speed))
|
||||
self._last_run_success = bool(await self._device.open())
|
||||
self._attr_is_opening = self._device.is_opening()
|
||||
self._attr_is_closing = self._device.is_closing()
|
||||
self.async_write_ha_state()
|
||||
@@ -104,8 +93,7 @@ class SwitchBotCurtainEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
"""Close the curtain."""
|
||||
|
||||
_LOGGER.debug("Switchbot to close the curtain %s", self._address)
|
||||
speed = self._get_curtain_speed()
|
||||
self._last_run_success = bool(await self._device.close(speed))
|
||||
self._last_run_success = bool(await self._device.close())
|
||||
self._attr_is_opening = self._device.is_opening()
|
||||
self._attr_is_closing = self._device.is_closing()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -358,12 +358,10 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"curtain_speed": "Curtain movement speed",
|
||||
"lock_force_nightlatch": "Force Nightlatch operation mode",
|
||||
"retry_count": "Retry count"
|
||||
},
|
||||
"data_description": {
|
||||
"curtain_speed": "Speed for curtain open and close operations (1-255, where 1 is slowest and 255 is fastest)",
|
||||
"lock_force_nightlatch": "Force Nightlatch operation mode even if Nightlatch is not detected",
|
||||
"retry_count": "How many times to retry sending commands to your SwitchBot devices"
|
||||
}
|
||||
|
||||
@@ -237,9 +237,9 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
# validate connection to Telegram API
|
||||
errors: dict[str, str] = {}
|
||||
user_input[CONF_API_ENDPOINT] = user_input[SECTION_ADVANCED_SETTINGS][
|
||||
CONF_API_ENDPOINT
|
||||
]
|
||||
user_input[CONF_API_ENDPOINT] = (
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_API_ENDPOINT],
|
||||
)
|
||||
user_input[CONF_PROXY_URL] = user_input[SECTION_ADVANCED_SETTINGS].get(
|
||||
CONF_PROXY_URL
|
||||
)
|
||||
|
||||
@@ -103,8 +103,6 @@ class AbstractTemplateEntity(Entity):
|
||||
attribute: str,
|
||||
validator: Callable[[Any], Any] | None = None,
|
||||
on_update: Callable[[Any], None] | None = None,
|
||||
render_complex: bool = False,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
"""Set up a template that manages any property or attribute of the entity.
|
||||
|
||||
@@ -120,10 +118,6 @@ class AbstractTemplateEntity(Entity):
|
||||
on_update:
|
||||
Called to store the template result rather than storing it
|
||||
the supplied attribute. Passed the result of the validator.
|
||||
render_complex (default=False):
|
||||
This signals trigger based template entities to render the template
|
||||
as a complex result. State based template entities always render
|
||||
complex results.
|
||||
"""
|
||||
|
||||
def add_template(
|
||||
|
||||
@@ -124,24 +124,14 @@ class AbstractTemplateEvent(AbstractTemplateEntity, EventEntity):
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._event_type_template = config[CONF_EVENT_TYPE]
|
||||
self._event_types_template = config[CONF_EVENT_TYPES]
|
||||
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
self._event_type = None
|
||||
self._attr_event_types = []
|
||||
|
||||
self.setup_template(
|
||||
CONF_EVENT_TYPES,
|
||||
"_attr_event_types",
|
||||
None,
|
||||
self._update_event_types,
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_EVENT_TYPE,
|
||||
"_event_type",
|
||||
None,
|
||||
self._update_event_type,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_event_types(self, event_types: Any) -> None:
|
||||
"""Update the event types from the template."""
|
||||
@@ -189,6 +179,25 @@ class StateEventEntity(TemplateEntity, AbstractTemplateEvent):
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateEvent.__init__(self, config)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
self.add_template_attribute(
|
||||
"_attr_event_types",
|
||||
self._event_types_template,
|
||||
None,
|
||||
self._update_event_types,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
self.add_template_attribute(
|
||||
"_event_type",
|
||||
self._event_type_template,
|
||||
None,
|
||||
self._update_event_type,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerEventEntity(TriggerEntity, AbstractTemplateEvent, RestoreEntity):
|
||||
"""Event entity based on trigger data."""
|
||||
@@ -208,3 +217,20 @@ class TriggerEventEntity(TriggerEntity, AbstractTemplateEvent, RestoreEntity):
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateEvent.__init__(self, config)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
for key, updater in (
|
||||
(CONF_EVENT_TYPES, self._update_event_types),
|
||||
(CONF_EVENT_TYPE, self._update_event_type),
|
||||
):
|
||||
updater(self._rendered[key])
|
||||
|
||||
self.async_set_context(self.coordinator.data["context"])
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Sequence
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -27,16 +28,19 @@ from homeassistant.const import (
|
||||
CONF_STATE,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
@@ -199,53 +203,30 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_is_on",
|
||||
template_validators.boolean(self, CONF_STATE),
|
||||
)
|
||||
self._percentage_template = config.get(CONF_PERCENTAGE)
|
||||
self._preset_mode_template = config.get(CONF_PRESET_MODE)
|
||||
self._oscillating_template = config.get(CONF_OSCILLATING)
|
||||
self._direction_template = config.get(CONF_DIRECTION)
|
||||
|
||||
# Ensure legacy template entity functionality by setting percentage to None instead
|
||||
# of the FanEntity default of 0.
|
||||
# Required for legacy functionality.
|
||||
self._attr_is_on = False
|
||||
self._attr_percentage = None
|
||||
self.setup_template(
|
||||
CONF_PERCENTAGE,
|
||||
"_attr_percentage",
|
||||
template_validators.number(self, CONF_PERCENTAGE, 0, 100),
|
||||
)
|
||||
|
||||
# List of valid preset modes
|
||||
self._attr_preset_modes: list[str] | None = config.get(CONF_PRESET_MODES)
|
||||
self.setup_template(
|
||||
CONF_PRESET_MODE,
|
||||
"_attr_preset_mode",
|
||||
template_validators.item_in_list(
|
||||
self, CONF_PRESET_MODE, self._attr_preset_modes
|
||||
),
|
||||
)
|
||||
|
||||
# Oscillating boolean
|
||||
self.setup_template(
|
||||
CONF_OSCILLATING,
|
||||
"_attr_oscillating",
|
||||
template_validators.boolean(self, CONF_OSCILLATING),
|
||||
)
|
||||
|
||||
# Forward/Reverse Directions
|
||||
self.setup_template(
|
||||
CONF_DIRECTION,
|
||||
"_attr_current_direction",
|
||||
template_validators.item_in_list(self, CONF_DIRECTION, _VALID_DIRECTIONS),
|
||||
)
|
||||
|
||||
# Number of valid speeds
|
||||
self._attr_speed_count = config.get(CONF_SPEED_COUNT) or 100
|
||||
|
||||
# List of valid preset modes
|
||||
self._attr_preset_modes: list[str] | None = config.get(CONF_PRESET_MODES)
|
||||
|
||||
self._attr_supported_features |= (
|
||||
FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
def _iterate_scripts(
|
||||
self, config: dict[str, Any]
|
||||
) -> Generator[tuple[str, Sequence[dict[str, Any]], FanEntityFeature | int]]:
|
||||
for action_id, supported_feature in (
|
||||
(CONF_ON_ACTION, 0),
|
||||
(CONF_OFF_ACTION, 0),
|
||||
@@ -255,14 +236,99 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
(CONF_SET_DIRECTION_ACTION, FanEntityFeature.DIRECTION),
|
||||
):
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
yield (action_id, action_config, supported_feature)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if device is on."""
|
||||
return self._attr_is_on
|
||||
|
||||
def _handle_state(self, result) -> None:
|
||||
if isinstance(result, bool):
|
||||
self._attr_is_on = result
|
||||
return
|
||||
|
||||
if isinstance(result, str):
|
||||
self._attr_is_on = result.lower() in ("true", STATE_ON)
|
||||
return
|
||||
|
||||
self._attr_is_on = False
|
||||
|
||||
@callback
|
||||
def _update_percentage(self, percentage):
|
||||
# Validate percentage
|
||||
try:
|
||||
percentage = int(float(percentage))
|
||||
except ValueError, TypeError:
|
||||
_LOGGER.error(
|
||||
"Received invalid percentage: %s for entity %s",
|
||||
percentage,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_percentage = 0
|
||||
return
|
||||
|
||||
if 0 <= percentage <= 100:
|
||||
self._attr_percentage = percentage
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid percentage: %s for entity %s",
|
||||
percentage,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_percentage = 0
|
||||
|
||||
@callback
|
||||
def _update_preset_mode(self, preset_mode):
|
||||
# Validate preset mode
|
||||
preset_mode = str(preset_mode)
|
||||
|
||||
if self.preset_modes and preset_mode in self.preset_modes:
|
||||
self._attr_preset_mode = preset_mode
|
||||
elif preset_mode in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
self._attr_preset_mode = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid preset_mode: %s for entity %s. Expected: %s",
|
||||
preset_mode,
|
||||
self.entity_id,
|
||||
self.preset_mode,
|
||||
)
|
||||
self._attr_preset_mode = None
|
||||
|
||||
@callback
|
||||
def _update_oscillating(self, oscillating):
|
||||
# Validate osc
|
||||
if oscillating == "True" or oscillating is True:
|
||||
self._attr_oscillating = True
|
||||
elif oscillating == "False" or oscillating is False:
|
||||
self._attr_oscillating = False
|
||||
elif oscillating in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
self._attr_oscillating = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid oscillating: %s for entity %s. Expected: True/False",
|
||||
oscillating,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_oscillating = None
|
||||
|
||||
@callback
|
||||
def _update_direction(self, direction):
|
||||
# Validate direction
|
||||
if direction in _VALID_DIRECTIONS:
|
||||
self._attr_current_direction = direction
|
||||
elif direction in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
self._attr_current_direction = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid direction: %s for entity %s. Expected: %s",
|
||||
direction,
|
||||
self.entity_id,
|
||||
", ".join(_VALID_DIRECTIONS),
|
||||
)
|
||||
self._attr_current_direction = None
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
@@ -312,7 +378,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
if self._attr_assumed_state:
|
||||
self._attr_is_on = percentage != 0
|
||||
|
||||
if self._attr_assumed_state or CONF_PERCENTAGE not in self._templates:
|
||||
if self._attr_assumed_state or self._percentage_template is None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
@@ -329,7 +395,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
if self._attr_assumed_state:
|
||||
self._attr_is_on = True
|
||||
|
||||
if self._attr_assumed_state or CONF_PRESET_MODE not in self._templates:
|
||||
if self._attr_assumed_state or self._preset_mode_template is None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_oscillate(self, oscillating: bool) -> None:
|
||||
@@ -344,7 +410,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
if CONF_OSCILLATING not in self._templates:
|
||||
if self._oscillating_template is None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_direction(self, direction: str) -> None:
|
||||
@@ -359,7 +425,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
run_variables={ATTR_DIRECTION: direction},
|
||||
context=self._context,
|
||||
)
|
||||
if CONF_DIRECTION not in self._templates:
|
||||
if self._direction_template is None:
|
||||
self.async_write_ha_state()
|
||||
else:
|
||||
_LOGGER.error(
|
||||
@@ -383,10 +449,67 @@ class StateFanEntity(TemplateEntity, AbstractTemplateFan):
|
||||
) -> None:
|
||||
"""Initialize the fan."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateFan.__init__(self, config)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
AbstractTemplateFan.__init__(self, name, config)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_is_on = None
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template:
|
||||
self.add_template_attribute(
|
||||
"_attr_is_on", self._template, None, self._update_state
|
||||
)
|
||||
|
||||
if self._preset_mode_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_preset_mode",
|
||||
self._preset_mode_template,
|
||||
None,
|
||||
self._update_preset_mode,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._percentage_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_percentage",
|
||||
self._percentage_template,
|
||||
None,
|
||||
self._update_percentage,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._oscillating_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_oscillating",
|
||||
self._oscillating_template,
|
||||
None,
|
||||
self._update_oscillating,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._direction_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_current_direction",
|
||||
self._direction_template,
|
||||
None,
|
||||
self._update_direction,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerFanEntity(TriggerEntity, AbstractTemplateFan):
|
||||
@@ -402,5 +525,50 @@ class TriggerFanEntity(TriggerEntity, AbstractTemplateFan):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateFan.__init__(self, config)
|
||||
|
||||
self._attr_name = name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateFan.__init__(self, name, config)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
for key in (
|
||||
CONF_STATE,
|
||||
CONF_PRESET_MODE,
|
||||
CONF_PERCENTAGE,
|
||||
CONF_OSCILLATING,
|
||||
CONF_DIRECTION,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_STATE, self._handle_state),
|
||||
(CONF_PRESET_MODE, self._update_preset_mode),
|
||||
(CONF_PERCENTAGE, self._update_percentage),
|
||||
(CONF_OSCILLATING, self._update_oscillating),
|
||||
(CONF_DIRECTION, self._update_direction),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -25,14 +26,13 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError, TemplateError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
@@ -152,41 +152,26 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._code_format_template = config.get(CONF_CODE_FORMAT)
|
||||
self._code_format_template_error: TemplateError | None = None
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_lock_state",
|
||||
template_validators.strenum(
|
||||
self, CONF_STATE, LockState, LockState.LOCKED, LockState.UNLOCKED
|
||||
),
|
||||
self._set_state,
|
||||
)
|
||||
|
||||
self.setup_template(
|
||||
CONF_CODE_FORMAT,
|
||||
"_attr_code_format",
|
||||
None,
|
||||
self._update_code_format,
|
||||
none_on_template_error=False,
|
||||
)
|
||||
# Legacy behavior, create all locks as Unlocked.
|
||||
self._set_state(LockState.UNLOCKED)
|
||||
|
||||
def _iterate_scripts(
|
||||
self, config: dict[str, Any]
|
||||
) -> Generator[tuple[str, Sequence[dict[str, Any]], LockEntityFeature | int]]:
|
||||
for action_id, supported_feature in (
|
||||
(CONF_LOCK, 0),
|
||||
(CONF_UNLOCK, 0),
|
||||
(CONF_OPEN, LockEntityFeature.OPEN),
|
||||
):
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
yield (action_id, action_config, supported_feature)
|
||||
|
||||
def _set_state(self, state: LockState | None) -> None:
|
||||
if state is None:
|
||||
self._attr_is_locked = None
|
||||
return
|
||||
|
||||
self._attr_is_jammed = state == LockState.JAMMED
|
||||
self._attr_is_opening = state == LockState.OPENING
|
||||
self._attr_is_locking = state == LockState.LOCKING
|
||||
@@ -194,6 +179,33 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
self._attr_is_unlocking = state == LockState.UNLOCKING
|
||||
self._attr_is_locked = state == LockState.LOCKED
|
||||
|
||||
def _handle_state(self, result: Any) -> None:
|
||||
if isinstance(result, bool):
|
||||
self._set_state(LockState.LOCKED if result else LockState.UNLOCKED)
|
||||
return
|
||||
|
||||
if isinstance(result, str):
|
||||
if result.lower() in (
|
||||
"true",
|
||||
"on",
|
||||
"locked",
|
||||
):
|
||||
self._set_state(LockState.LOCKED)
|
||||
elif result.lower() in (
|
||||
"false",
|
||||
"off",
|
||||
"unlocked",
|
||||
):
|
||||
self._set_state(LockState.UNLOCKED)
|
||||
else:
|
||||
try:
|
||||
self._set_state(LockState(result.lower()))
|
||||
except ValueError:
|
||||
self._set_state(None)
|
||||
return
|
||||
|
||||
self._set_state(None)
|
||||
|
||||
@callback
|
||||
def _update_code_format(self, render: str | TemplateError | None):
|
||||
"""Update code format from the template."""
|
||||
@@ -269,7 +281,7 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
translation_key="code_format_template_error",
|
||||
translation_placeholders={
|
||||
"entity_id": self.entity_id,
|
||||
"code_format_template": self._templates[CONF_CODE_FORMAT].template,
|
||||
"code_format_template": self._code_format_template.template,
|
||||
"cause": str(self._code_format_template_error),
|
||||
},
|
||||
)
|
||||
@@ -288,10 +300,45 @@ class StateLockEntity(TemplateEntity, AbstractTemplateLock):
|
||||
) -> None:
|
||||
"""Initialize the lock."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateLock.__init__(self, config)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
AbstractTemplateLock.__init__(self, name, config)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
@callback
|
||||
def _update_state(self, result: str | TemplateError) -> None:
|
||||
"""Update the state from the template."""
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_is_locked = None
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_is_locked",
|
||||
self._template,
|
||||
None,
|
||||
self._update_state,
|
||||
)
|
||||
if self._code_format_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_code_format",
|
||||
self._code_format_template,
|
||||
None,
|
||||
self._update_code_format,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerLockEntity(TriggerEntity, AbstractTemplateLock):
|
||||
@@ -307,5 +354,45 @@ class TriggerLockEntity(TriggerEntity, AbstractTemplateLock):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateLock.__init__(self, config)
|
||||
|
||||
self._attr_name = name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateLock.__init__(self, name, config)
|
||||
|
||||
if CONF_STATE in config:
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
|
||||
if isinstance(config.get(CONF_CODE_FORMAT), template.Template):
|
||||
self._to_render_simple.append(CONF_CODE_FORMAT)
|
||||
self._parse_result.add(CONF_CODE_FORMAT)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_STATE, self._handle_state),
|
||||
(CONF_CODE_FORMAT, self._update_code_format),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if not self._attr_assumed_state:
|
||||
write_ha_state = True
|
||||
elif self._attr_assumed_state and len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -18,14 +19,14 @@ from homeassistant.components.number import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_STATE, CONF_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -41,6 +42,8 @@ from .schemas import (
|
||||
from .template_entity import TemplateEntity
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_SET_VALUE = "set_value"
|
||||
|
||||
DEFAULT_NAME = "Template Number"
|
||||
@@ -117,29 +120,17 @@ class AbstractTemplateNumber(AbstractTemplateEntity, NumberEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._step_template = config[CONF_STEP]
|
||||
self._min_template = config[CONF_MIN]
|
||||
self._max_template = config[CONF_MAX]
|
||||
|
||||
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_native_step = DEFAULT_STEP
|
||||
self._attr_native_min_value = DEFAULT_MIN_VALUE
|
||||
self._attr_native_max_value = DEFAULT_MAX_VALUE
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_native_value",
|
||||
template_validators.number(self, CONF_STATE),
|
||||
)
|
||||
for option, attribute in (
|
||||
(CONF_STEP, "_attr_native_step"),
|
||||
(CONF_MIN, "_attr_native_min_value"),
|
||||
(CONF_MAX, "_attr_native_max_value"),
|
||||
):
|
||||
self.setup_template(
|
||||
option, attribute, template_validators.number(self, option)
|
||||
)
|
||||
|
||||
self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], name, DOMAIN)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set value of the number."""
|
||||
if self._attr_assumed_state:
|
||||
@@ -166,10 +157,46 @@ class StateNumberEntity(TemplateEntity, AbstractTemplateNumber):
|
||||
) -> None:
|
||||
"""Initialize the number."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateNumber.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
AbstractTemplateNumber.__init__(self, name, config)
|
||||
|
||||
self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], name, DOMAIN)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_value",
|
||||
self._template,
|
||||
vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._step_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_step",
|
||||
self._step_template,
|
||||
vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._min_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_min_value",
|
||||
self._min_template,
|
||||
validator=vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._max_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_max_value",
|
||||
self._max_template,
|
||||
validator=vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerNumberEntity(TriggerEntity, AbstractTemplateNumber):
|
||||
@@ -185,5 +212,47 @@ class TriggerNumberEntity(TriggerEntity, AbstractTemplateNumber):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateNumber.__init__(self, name, config)
|
||||
AbstractTemplateNumber.__init__(self, config)
|
||||
|
||||
for key in (
|
||||
CONF_STATE,
|
||||
CONF_STEP,
|
||||
CONF_MIN,
|
||||
CONF_MAX,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
self.add_script(
|
||||
CONF_SET_VALUE,
|
||||
config[CONF_SET_VALUE],
|
||||
self._rendered.get(CONF_NAME, DEFAULT_NAME),
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
def _handle_coordinator_update(self):
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, attr in (
|
||||
(CONF_STATE, "_attr_native_value"),
|
||||
(CONF_STEP, "_attr_native_step"),
|
||||
(CONF_MIN, "_attr_native_min_value"),
|
||||
(CONF_MAX, "_attr_native_max_value"),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
setattr(self, attr, vol.Any(vol.Coerce(float), None)(rendered))
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_set_context(self.coordinator.data["context"])
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -50,7 +50,7 @@ DEFAULT_NAME = "Template Select"
|
||||
SELECT_COMMON_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_OPTIONS): cv.template,
|
||||
vol.Optional(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Required(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_STATE): cv.template,
|
||||
}
|
||||
)
|
||||
@@ -115,26 +115,13 @@ class AbstractTemplateSelect(AbstractTemplateEntity, SelectEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._options_template = config[ATTR_OPTIONS]
|
||||
|
||||
self._attr_options = []
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_current_option",
|
||||
cv.string,
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_OPTIONS,
|
||||
"_attr_options",
|
||||
template_validators.list_of_strings(self, CONF_OPTIONS),
|
||||
)
|
||||
|
||||
self._attr_current_option = None
|
||||
|
||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||
self.add_script(CONF_SELECT_OPTION, select_option, name, DOMAIN)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
if self._attr_assumed_state:
|
||||
@@ -161,10 +148,32 @@ class TemplateSelect(TemplateEntity, AbstractTemplateSelect):
|
||||
) -> None:
|
||||
"""Initialize the select."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateSelect.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
AbstractTemplateSelect.__init__(self, name, config)
|
||||
|
||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||
self.add_script(CONF_SELECT_OPTION, select_option, name, DOMAIN)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_current_option",
|
||||
self._template,
|
||||
validator=cv.string,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
self.add_template_attribute(
|
||||
"_attr_options",
|
||||
self._options_template,
|
||||
validator=vol.All(cv.ensure_list, [cv.string]),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerSelectEntity(TriggerEntity, AbstractTemplateSelect):
|
||||
@@ -181,5 +190,39 @@ class TriggerSelectEntity(TriggerEntity, AbstractTemplateSelect):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateSelect.__init__(self, name, config)
|
||||
AbstractTemplateSelect.__init__(self, config)
|
||||
|
||||
if CONF_STATE in config:
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||
self.add_script(
|
||||
CONF_SELECT_OPTION,
|
||||
select_option,
|
||||
self._rendered.get(CONF_NAME, DEFAULT_NAME),
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
def _handle_coordinator_update(self):
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
if (options := self._rendered.get(ATTR_OPTIONS)) is not None:
|
||||
self._attr_options = vol.All(cv.ensure_list, [cv.string])(options)
|
||||
write_ha_state = True
|
||||
|
||||
if (state := self._rendered.get(CONF_STATE)) is not None:
|
||||
self._attr_current_option = cv.string(state)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import date, datetime
|
||||
from decimal import Decimal
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -22,6 +20,9 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor.helpers import ( # pylint: disable=hass-component-root-import
|
||||
async_parse_date_datetime,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -40,15 +41,16 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
async_setup_template_entry,
|
||||
@@ -184,47 +186,6 @@ def async_create_preview_sensor(
|
||||
)
|
||||
|
||||
|
||||
def validate_datetime(
|
||||
entity: AbstractTemplateSensor,
|
||||
attribute: str,
|
||||
resolve_as: SensorDeviceClass,
|
||||
**kwargs,
|
||||
) -> Callable[[Any], datetime | date | None]:
|
||||
"""Converts the template result into a datetime or date."""
|
||||
|
||||
def convert(result: Any) -> datetime | date | None:
|
||||
if resolve_as == SensorDeviceClass.TIMESTAMP:
|
||||
if isinstance(result, datetime):
|
||||
return result
|
||||
|
||||
if (parsed_timestamp := dt_util.parse_datetime(result)) is None:
|
||||
template_validators.log_validation_result_error(
|
||||
entity, attribute, result, "expected a valid timestamp"
|
||||
)
|
||||
return None
|
||||
|
||||
if kwargs.get("require_tzinfo", True) and parsed_timestamp.tzinfo is None:
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
"expected a valid timestamp with a timezone",
|
||||
)
|
||||
return None
|
||||
|
||||
return parsed_timestamp
|
||||
|
||||
if (parsed_date := dt_util.parse_date(result)) is not None:
|
||||
return parsed_date
|
||||
|
||||
template_validators.log_validation_result_error(
|
||||
entity, attribute, result, "expected a valid date"
|
||||
)
|
||||
return None
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
class AbstractTemplateSensor(AbstractTemplateEntity, RestoreSensor):
|
||||
"""Representation of a template sensor features."""
|
||||
|
||||
@@ -237,32 +198,38 @@ class AbstractTemplateSensor(AbstractTemplateEntity, RestoreSensor):
|
||||
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||
self._attr_last_reset = None
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_native_value",
|
||||
self._validate_state,
|
||||
)
|
||||
self.setup_template(
|
||||
ATTR_LAST_RESET,
|
||||
"_attr_last_reset",
|
||||
validate_datetime(
|
||||
self, ATTR_LAST_RESET, SensorDeviceClass.TIMESTAMP, require_tzinfo=False
|
||||
),
|
||||
self._template: template.Template = config[CONF_STATE]
|
||||
self._attr_last_reset_template: template.Template | None = config.get(
|
||||
ATTR_LAST_RESET
|
||||
)
|
||||
|
||||
def _validate_state(
|
||||
self, result: Any
|
||||
) -> StateType | date | datetime | Decimal | None:
|
||||
"""Validate the state."""
|
||||
@callback
|
||||
def _update_last_reset(self, result: Any) -> None:
|
||||
if isinstance(result, datetime):
|
||||
self._attr_last_reset = result
|
||||
return
|
||||
|
||||
parsed_timestamp = dt_util.parse_datetime(result)
|
||||
if parsed_timestamp is None:
|
||||
_LOGGER.warning(
|
||||
"%s rendered invalid timestamp for last_reset attribute: %s",
|
||||
self.entity_id,
|
||||
result,
|
||||
)
|
||||
else:
|
||||
self._attr_last_reset = parsed_timestamp
|
||||
|
||||
def _handle_state(self, result: Any) -> None:
|
||||
if result is None or self.device_class not in (
|
||||
SensorDeviceClass.DATE,
|
||||
SensorDeviceClass.TIMESTAMP,
|
||||
):
|
||||
return result
|
||||
self._attr_native_value = result
|
||||
return
|
||||
|
||||
return validate_datetime(self, CONF_STATE, self.device_class)(result)
|
||||
self._attr_native_value = async_parse_date_datetime(
|
||||
result, self.entity_id, self.device_class
|
||||
)
|
||||
|
||||
|
||||
class StateSensorEntity(TemplateEntity, AbstractTemplateSensor):
|
||||
@@ -281,6 +248,31 @@ class StateSensorEntity(TemplateEntity, AbstractTemplateSensor):
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateSensor.__init__(self, config)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
self.add_template_attribute(
|
||||
"_attr_native_value", self._template, None, self._update_state
|
||||
)
|
||||
if self._attr_last_reset_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_last_reset",
|
||||
self._attr_last_reset_template,
|
||||
cv.datetime,
|
||||
self._update_last_reset,
|
||||
)
|
||||
|
||||
super()._async_setup_templates()
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
|
||||
class TriggerSensorEntity(TriggerEntity, AbstractTemplateSensor):
|
||||
"""Sensor entity based on trigger data."""
|
||||
@@ -297,6 +289,15 @@ class TriggerSensorEntity(TriggerEntity, AbstractTemplateSensor):
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateSensor.__init__(self, config)
|
||||
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
self._parse_result.add(CONF_STATE)
|
||||
|
||||
if last_reset_template := self._attr_last_reset_template:
|
||||
if last_reset_template.is_static:
|
||||
self._static_rendered[ATTR_LAST_RESET] = last_reset_template.template
|
||||
else:
|
||||
self._to_render_simple.append(ATTR_LAST_RESET)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -310,3 +311,16 @@ class TriggerSensorEntity(TriggerEntity, AbstractTemplateSensor):
|
||||
):
|
||||
self._attr_native_value = extra_data.native_value
|
||||
self.restore_attributes(last_state)
|
||||
|
||||
@callback
|
||||
def _process_data(self) -> None:
|
||||
"""Process new data."""
|
||||
super()._process_data()
|
||||
|
||||
# Update last_reset
|
||||
if (last_reset := self._rendered.get(ATTR_LAST_RESET)) is not None:
|
||||
self._update_last_reset(last_reset)
|
||||
|
||||
rendered = self._rendered.get(CONF_STATE)
|
||||
self._handle_state(rendered)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -21,12 +21,14 @@ from homeassistant.const import (
|
||||
CONF_SWITCHES,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -34,7 +36,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import CONF_TURN_OFF, CONF_TURN_ON, DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -51,6 +53,8 @@ from .schemas import (
|
||||
from .template_entity import TemplateEntity
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
}
|
||||
@@ -151,21 +155,9 @@ class AbstractTemplateSwitch(AbstractTemplateEntity, SwitchEntity, RestoreEntity
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_is_on",
|
||||
template_validators.boolean(self, CONF_STATE),
|
||||
)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (on_action := config.get(CONF_TURN_ON)) is not None:
|
||||
self.add_script(CONF_TURN_ON, on_action, name, DOMAIN)
|
||||
if (off_action := config.get(CONF_TURN_OFF)) is not None:
|
||||
self.add_script(CONF_TURN_OFF, off_action, name, DOMAIN)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Fire the on action."""
|
||||
if on_script := self._action_scripts.get(CONF_TURN_ON):
|
||||
@@ -196,20 +188,54 @@ class StateSwitchEntity(TemplateEntity, AbstractTemplateSwitch):
|
||||
) -> None:
|
||||
"""Initialize the Template switch."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateSwitch.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
AbstractTemplateSwitch.__init__(self, name, config)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (on_action := config.get(CONF_TURN_ON)) is not None:
|
||||
self.add_script(CONF_TURN_ON, on_action, name, DOMAIN)
|
||||
if (off_action := config.get(CONF_TURN_OFF)) is not None:
|
||||
self.add_script(CONF_TURN_OFF, off_action, name, DOMAIN)
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_is_on = None
|
||||
return
|
||||
|
||||
if isinstance(result, bool):
|
||||
self._attr_is_on = result
|
||||
return
|
||||
|
||||
if isinstance(result, str):
|
||||
self._attr_is_on = result.lower() in ("true", STATE_ON)
|
||||
return
|
||||
|
||||
self._attr_is_on = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
if CONF_STATE not in self._templates:
|
||||
if self._template is None:
|
||||
# restore state after startup
|
||||
await super().async_added_to_hass()
|
||||
if state := await self.async_get_last_state():
|
||||
self._attr_is_on = state.state == STATE_ON
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_is_on", self._template, None, self._update_state
|
||||
)
|
||||
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerSwitchEntity(TriggerEntity, AbstractTemplateSwitch):
|
||||
"""Switch entity based on trigger data."""
|
||||
@@ -224,8 +250,17 @@ class TriggerSwitchEntity(TriggerEntity, AbstractTemplateSwitch):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateSwitch.__init__(self, config)
|
||||
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateSwitch.__init__(self, name, config)
|
||||
if on_action := config.get(CONF_TURN_ON):
|
||||
self.add_script(CONF_TURN_ON, on_action, name, DOMAIN)
|
||||
if off_action := config.get(CONF_TURN_OFF):
|
||||
self.add_script(CONF_TURN_OFF, off_action, name, DOMAIN)
|
||||
|
||||
if CONF_STATE in config:
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
self._parse_result.add(CONF_STATE)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
@@ -239,3 +274,24 @@ class TriggerSwitchEntity(TriggerEntity, AbstractTemplateSwitch):
|
||||
):
|
||||
self._attr_is_on = last_state.state == STATE_ON
|
||||
self.restore_attributes(last_state)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
if (state := self._rendered.get(CONF_STATE)) is not None:
|
||||
self._attr_is_on = template.result_as_boolean(state)
|
||||
write_ha_state = True
|
||||
|
||||
elif len(self._rendered) > 0:
|
||||
# In case name, icon, or friendly name have a template but
|
||||
# states does not
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -295,10 +295,6 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
self._attr_available = False
|
||||
return
|
||||
|
||||
# Recover from template errors if they happened before.
|
||||
if not self._availability_template and not self._attr_available:
|
||||
self._attr_available = True
|
||||
|
||||
state = validator(result) if validator else result
|
||||
if on_update:
|
||||
on_update(state)
|
||||
@@ -313,8 +309,6 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
attribute: str,
|
||||
validator: Callable[[Any], Any] | None = None,
|
||||
on_update: Callable[[Any], None] | None = None,
|
||||
render_complex: bool = False,
|
||||
**kwargs,
|
||||
):
|
||||
"""Set up a template that manages any property or attribute of the entity.
|
||||
|
||||
@@ -330,15 +324,8 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
on_update:
|
||||
Called to store the template result rather than storing it
|
||||
the supplied attribute. Passed the result of the validator.
|
||||
render_complex (default=False):
|
||||
This signals trigger based template entities to render the template
|
||||
as a complex result. State based template entities always render
|
||||
complex results.
|
||||
"""
|
||||
none_on_template_error = kwargs.get("none_on_template_error", True)
|
||||
self.add_template(
|
||||
option, attribute, validator, on_update, none_on_template_error
|
||||
)
|
||||
self.add_template(option, attribute, validator, on_update, True)
|
||||
|
||||
def add_template_attribute(
|
||||
self,
|
||||
|
||||
@@ -69,8 +69,6 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
||||
attribute: str,
|
||||
validator: Callable[[Any], Any] | None = None,
|
||||
on_update: Callable[[Any], None] | None = None,
|
||||
render_complex: bool = False,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
"""Set up a template that manages any property or attribute of the entity.
|
||||
|
||||
@@ -86,17 +84,8 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
||||
on_update:
|
||||
Called to store the template result rather than storing it
|
||||
the supplied attribute. Passed the result of the validator.
|
||||
render_complex (default=False):
|
||||
This signals trigger based template entities to render the template
|
||||
as a complex result. State based template entities always render
|
||||
complex results.
|
||||
"""
|
||||
if self.add_template(option, attribute, validator, on_update):
|
||||
if render_complex:
|
||||
self._to_render_complex.append(option)
|
||||
else:
|
||||
self._to_render_simple.append(option)
|
||||
self._parse_result.add(option)
|
||||
self.setup_state_template(option, attribute, validator, on_update)
|
||||
|
||||
@property
|
||||
def referenced_blueprint(self) -> str | None:
|
||||
@@ -142,18 +131,19 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
||||
# Handle any templates.
|
||||
write_state = False
|
||||
for option, entity_template in self._templates.items():
|
||||
value = _SENTINEL
|
||||
if (rendered := self._rendered.get(option)) is not None:
|
||||
value = rendered
|
||||
|
||||
# Capture templates that did not render a result due to an exception and
|
||||
# ensure the state object updates. _SENTINEL is used to differentiate
|
||||
# templates that render None.
|
||||
if (rendered := self._rendered.get(option, _SENTINEL)) is _SENTINEL:
|
||||
if value is _SENTINEL:
|
||||
write_state = True
|
||||
continue
|
||||
|
||||
value = (
|
||||
entity_template.validator(rendered)
|
||||
if entity_template.validator
|
||||
else rendered
|
||||
)
|
||||
if entity_template.validator:
|
||||
value = entity_template.validator(rendered)
|
||||
|
||||
if entity_template.on_update:
|
||||
entity_template.on_update(value)
|
||||
|
||||
@@ -24,15 +24,16 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.template import _SENTINEL
|
||||
from homeassistant.helpers.trigger_template_entity import CONF_PICTURE
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from . import TriggerUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -144,49 +145,19 @@ class AbstractTemplateUpdate(AbstractTemplateEntity, UpdateEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
|
||||
self._installed_version_template = config[CONF_INSTALLED_VERSION]
|
||||
self._latest_version_template = config[CONF_LATEST_VERSION]
|
||||
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
# Setup templates.
|
||||
self.setup_template(
|
||||
CONF_INSTALLED_VERSION,
|
||||
"_attr_installed_version",
|
||||
template_validators.string(self, CONF_INSTALLED_VERSION),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_LATEST_VERSION,
|
||||
"_attr_latest_version",
|
||||
template_validators.string(self, CONF_LATEST_VERSION),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_IN_PROGRESS,
|
||||
"_attr_in_progress",
|
||||
template_validators.boolean(self, CONF_IN_PROGRESS),
|
||||
self._update_in_progress,
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_RELEASE_SUMMARY,
|
||||
"_attr_release_summary",
|
||||
template_validators.string(self, CONF_RELEASE_SUMMARY),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_RELEASE_URL,
|
||||
"_attr_release_url",
|
||||
template_validators.url(self, CONF_RELEASE_URL),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_TITLE,
|
||||
"_attr_title",
|
||||
template_validators.string(self, CONF_TITLE),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_UPDATE_PERCENTAGE,
|
||||
"_attr_update_percentage",
|
||||
template_validators.number(self, CONF_UPDATE_PERCENTAGE, 0.0, 100.0),
|
||||
self._update_update_percentage,
|
||||
)
|
||||
self._in_progress_template = config.get(CONF_IN_PROGRESS)
|
||||
self._release_summary_template = config.get(CONF_RELEASE_SUMMARY)
|
||||
self._release_url_template = config.get(CONF_RELEASE_URL)
|
||||
self._title_template = config.get(CONF_TITLE)
|
||||
self._update_percentage_template = config.get(CONF_UPDATE_PERCENTAGE)
|
||||
|
||||
self._attr_supported_features = UpdateEntityFeature(0)
|
||||
if config[CONF_BACKUP]:
|
||||
@@ -194,40 +165,99 @@ class AbstractTemplateUpdate(AbstractTemplateEntity, UpdateEntity):
|
||||
if config[CONF_SPECIFIC_VERSION]:
|
||||
self._attr_supported_features |= UpdateEntityFeature.SPECIFIC_VERSION
|
||||
if (
|
||||
CONF_IN_PROGRESS in self._templates
|
||||
or CONF_UPDATE_PERCENTAGE in self._templates
|
||||
self._in_progress_template is not None
|
||||
or self._update_percentage_template is not None
|
||||
):
|
||||
self._attr_supported_features |= UpdateEntityFeature.PROGRESS
|
||||
|
||||
self._optimistic_in_process = (
|
||||
CONF_IN_PROGRESS not in self._templates
|
||||
and CONF_UPDATE_PERCENTAGE in self._templates
|
||||
self._in_progress_template is None
|
||||
and self._update_percentage_template is not None
|
||||
)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (install_action := config.get(CONF_INSTALL)) is not None:
|
||||
self.add_script(CONF_INSTALL, install_action, name, DOMAIN)
|
||||
self._attr_supported_features |= UpdateEntityFeature.INSTALL
|
||||
|
||||
@callback
|
||||
def _update_in_progress(self, result: bool | None) -> None:
|
||||
def _update_installed_version(self, result: Any) -> None:
|
||||
if result is None:
|
||||
template_validators.log_validation_result_error(
|
||||
self, CONF_IN_PROGRESS, result, "expected a boolean"
|
||||
)
|
||||
self._attr_in_progress = result or False
|
||||
self._attr_installed_version = None
|
||||
return
|
||||
|
||||
self._attr_installed_version = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_update_percentage(self, result: float | None) -> None:
|
||||
def _update_latest_version(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_latest_version = None
|
||||
return
|
||||
|
||||
self._attr_latest_version = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_in_process(self, result: Any) -> None:
|
||||
try:
|
||||
self._attr_in_progress = cv.boolean(result)
|
||||
except vol.Invalid:
|
||||
_LOGGER.error(
|
||||
"Received invalid in_process value: %s for entity %s. Expected: True, False",
|
||||
result,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_in_progress = False
|
||||
|
||||
@callback
|
||||
def _update_release_summary(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_release_summary = None
|
||||
return
|
||||
|
||||
self._attr_release_summary = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_release_url(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_release_url = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_release_url = cv.url(result)
|
||||
except vol.Invalid:
|
||||
_LOGGER.error(
|
||||
"Received invalid release_url: %s for entity %s",
|
||||
result,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_release_url = None
|
||||
|
||||
@callback
|
||||
def _update_title(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_title = None
|
||||
return
|
||||
|
||||
self._attr_title = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_update_percentage(self, result: Any) -> None:
|
||||
if result is None:
|
||||
if self._optimistic_in_process:
|
||||
self._attr_in_progress = False
|
||||
self._attr_update_percentage = None
|
||||
return
|
||||
|
||||
if self._optimistic_in_process:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = result
|
||||
try:
|
||||
percentage = vol.All(
|
||||
vol.Coerce(float),
|
||||
vol.Range(0, 100, min_included=True, max_included=True),
|
||||
)(result)
|
||||
if self._optimistic_in_process:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = percentage
|
||||
except vol.Invalid:
|
||||
_LOGGER.error(
|
||||
"Received invalid update_percentage: %s for entity %s",
|
||||
result,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_update_percentage = None
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
@@ -253,10 +283,16 @@ class StateUpdateEntity(TemplateEntity, AbstractTemplateUpdate):
|
||||
) -> None:
|
||||
"""Initialize the Template update."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateUpdate.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
AbstractTemplateUpdate.__init__(self, name, config)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (install_action := config.get(CONF_INSTALL)) is not None:
|
||||
self.add_script(CONF_INSTALL, install_action, name, DOMAIN)
|
||||
self._attr_supported_features |= UpdateEntityFeature.INSTALL
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
@@ -269,6 +305,65 @@ class StateUpdateEntity(TemplateEntity, AbstractTemplateUpdate):
|
||||
return "https://brands.home-assistant.io/_/template/icon.png"
|
||||
return self._attr_entity_picture
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
self.add_template_attribute(
|
||||
"_attr_installed_version",
|
||||
self._installed_version_template,
|
||||
None,
|
||||
self._update_installed_version,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
self.add_template_attribute(
|
||||
"_attr_latest_version",
|
||||
self._latest_version_template,
|
||||
None,
|
||||
self._update_latest_version,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._in_progress_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_in_progress",
|
||||
self._in_progress_template,
|
||||
None,
|
||||
self._update_in_process,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._release_summary_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_release_summary",
|
||||
self._release_summary_template,
|
||||
None,
|
||||
self._update_release_summary,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._release_url_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_release_url",
|
||||
self._release_url_template,
|
||||
None,
|
||||
self._update_release_url,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._title_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_title",
|
||||
self._title_template,
|
||||
None,
|
||||
self._update_title,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._update_percentage_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_update_percentage",
|
||||
self._update_percentage_template,
|
||||
None,
|
||||
self._update_update_percentage,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerUpdateEntity(TriggerEntity, AbstractTemplateUpdate):
|
||||
"""Update entity based on trigger data."""
|
||||
@@ -283,8 +378,35 @@ class TriggerUpdateEntity(TriggerEntity, AbstractTemplateUpdate):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateUpdate.__init__(self, name, config)
|
||||
AbstractTemplateUpdate.__init__(self, config)
|
||||
|
||||
for key in (
|
||||
CONF_INSTALLED_VERSION,
|
||||
CONF_LATEST_VERSION,
|
||||
):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (install_action := config.get(CONF_INSTALL)) is not None:
|
||||
self.add_script(
|
||||
CONF_INSTALL,
|
||||
install_action,
|
||||
self._rendered.get(CONF_NAME, DEFAULT_NAME),
|
||||
DOMAIN,
|
||||
)
|
||||
self._attr_supported_features |= UpdateEntityFeature.INSTALL
|
||||
|
||||
for key in (
|
||||
CONF_IN_PROGRESS,
|
||||
CONF_RELEASE_SUMMARY,
|
||||
CONF_RELEASE_URL,
|
||||
CONF_TITLE,
|
||||
CONF_UPDATE_PERCENTAGE,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
# Ensure the entity picture can resolve None to produce the default picture.
|
||||
if CONF_PICTURE in config:
|
||||
@@ -309,3 +431,32 @@ class TriggerUpdateEntity(TriggerEntity, AbstractTemplateUpdate):
|
||||
if (picture := self._rendered.get(CONF_PICTURE)) is None:
|
||||
return UpdateEntity.entity_picture.fget(self) # type: ignore[attr-defined]
|
||||
return picture
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_INSTALLED_VERSION, self._update_installed_version),
|
||||
(CONF_LATEST_VERSION, self._update_latest_version),
|
||||
(CONF_IN_PROGRESS, self._update_in_process),
|
||||
(CONF_RELEASE_SUMMARY, self._update_release_summary),
|
||||
(CONF_RELEASE_URL, self._update_release_url),
|
||||
(CONF_TITLE, self._update_title),
|
||||
(CONF_UPDATE_PERCENTAGE, self._update_update_percentage),
|
||||
):
|
||||
if (rendered := self._rendered.get(key, _SENTINEL)) is not _SENTINEL:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Sequence
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -29,9 +30,15 @@ from homeassistant.const import (
|
||||
CONF_STATE,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
issue_registry as ir,
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -39,8 +46,8 @@ from homeassistant.helpers.entity_platform import (
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
async_setup_template_entry,
|
||||
@@ -69,6 +76,14 @@ CONF_FAN_SPEED_TEMPLATE = "fan_speed_template"
|
||||
DEFAULT_NAME = "Template Vacuum"
|
||||
|
||||
ENTITY_ID_FORMAT = VACUUM_DOMAIN + ".{}"
|
||||
_VALID_STATES = [
|
||||
VacuumActivity.CLEANING,
|
||||
VacuumActivity.DOCKED,
|
||||
VacuumActivity.PAUSED,
|
||||
VacuumActivity.IDLE,
|
||||
VacuumActivity.RETURNING,
|
||||
VacuumActivity.ERROR,
|
||||
]
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
CONF_BATTERY_LEVEL_TEMPLATE: CONF_BATTERY_LEVEL,
|
||||
@@ -210,36 +225,27 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._battery_level_template = config.get(CONF_BATTERY_LEVEL)
|
||||
self._fan_speed_template = config.get(CONF_FAN_SPEED)
|
||||
|
||||
self._battery_level = None
|
||||
self._attr_fan_speed = None
|
||||
|
||||
# List of valid fan speeds
|
||||
self._attr_fan_speed_list = config[CONF_FAN_SPEED_LIST]
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_activity",
|
||||
template_validators.strenum(self, CONF_STATE, VacuumActivity),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_FAN_SPEED,
|
||||
"_attr_fan_speed",
|
||||
template_validators.item_in_list(
|
||||
self, CONF_FAN_SPEED, self._attr_fan_speed_list
|
||||
),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_BATTERY_LEVEL,
|
||||
"_attr_battery_level",
|
||||
template_validators.number(self, CONF_BATTERY_LEVEL, 0.0, 100.0),
|
||||
)
|
||||
|
||||
self._attr_supported_features = (
|
||||
VacuumEntityFeature.START | VacuumEntityFeature.STATE
|
||||
)
|
||||
|
||||
if CONF_BATTERY_LEVEL in self._templates:
|
||||
if self._battery_level_template:
|
||||
self._attr_supported_features |= VacuumEntityFeature.BATTERY
|
||||
|
||||
def _iterate_scripts(
|
||||
self, config: dict[str, Any]
|
||||
) -> Generator[tuple[str, Sequence[dict[str, Any]], VacuumEntityFeature | int]]:
|
||||
for action_id, supported_feature in (
|
||||
(SERVICE_START, 0),
|
||||
(SERVICE_PAUSE, VacuumEntityFeature.PAUSE),
|
||||
@@ -250,8 +256,22 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
(SERVICE_SET_FAN_SPEED, VacuumEntityFeature.FAN_SPEED),
|
||||
):
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
yield (action_id, action_config, supported_feature)
|
||||
|
||||
def _handle_state(self, result: Any) -> None:
|
||||
# Validate state
|
||||
if result in _VALID_STATES:
|
||||
self._attr_activity = result
|
||||
elif result == STATE_UNKNOWN:
|
||||
self._attr_activity = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid vacuum state: %s for entity %s. Expected: %s",
|
||||
result,
|
||||
self.entity_id,
|
||||
", ".join(_VALID_STATES),
|
||||
)
|
||||
self._attr_activity = None
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start or resume the cleaning task."""
|
||||
@@ -315,6 +335,44 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
script, run_variables={ATTR_FAN_SPEED: fan_speed}, context=self._context
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_battery_level(self, battery_level):
|
||||
try:
|
||||
battery_level_int = int(battery_level)
|
||||
if not 0 <= battery_level_int <= 100:
|
||||
raise ValueError # noqa: TRY301
|
||||
except ValueError:
|
||||
_LOGGER.error(
|
||||
"Received invalid battery level: %s for entity %s. Expected: 0-100",
|
||||
battery_level,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_battery_level = None
|
||||
return
|
||||
|
||||
self._attr_battery_level = battery_level_int
|
||||
|
||||
@callback
|
||||
def _update_fan_speed(self, fan_speed):
|
||||
if isinstance(fan_speed, TemplateError):
|
||||
# This is legacy behavior
|
||||
self._attr_fan_speed = None
|
||||
self._attr_activity = None
|
||||
return
|
||||
|
||||
if fan_speed in self._attr_fan_speed_list:
|
||||
self._attr_fan_speed = fan_speed
|
||||
elif fan_speed == STATE_UNKNOWN:
|
||||
self._attr_fan_speed = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid fan speed: %s for entity %s. Expected: %s",
|
||||
fan_speed,
|
||||
self.entity_id,
|
||||
self._attr_fan_speed_list,
|
||||
)
|
||||
self._attr_fan_speed = None
|
||||
|
||||
|
||||
class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||
"""A template vacuum component."""
|
||||
@@ -329,10 +387,16 @@ class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||
) -> None:
|
||||
"""Initialize the vacuum."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateVacuum.__init__(self, config)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
AbstractTemplateVacuum.__init__(self, name, config)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
@@ -344,6 +408,42 @@ class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_activity", self._template, None, self._update_state
|
||||
)
|
||||
if self._fan_speed_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_fan_speed",
|
||||
self._fan_speed_template,
|
||||
None,
|
||||
self._update_fan_speed,
|
||||
)
|
||||
if self._battery_level_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_battery_level",
|
||||
self._battery_level_template,
|
||||
None,
|
||||
self._update_battery_level,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
# This is legacy behavior
|
||||
self._attr_activity = None
|
||||
if not self._availability_template:
|
||||
self._attr_available = True
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
|
||||
class TriggerVacuumEntity(TriggerEntity, AbstractTemplateVacuum):
|
||||
"""Vacuum entity based on trigger data."""
|
||||
@@ -358,8 +458,20 @@ class TriggerVacuumEntity(TriggerEntity, AbstractTemplateVacuum):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateVacuum.__init__(self, config)
|
||||
|
||||
self._attr_name = name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateVacuum.__init__(self, name, config)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
for key in (CONF_STATE, CONF_FAN_SPEED, CONF_BATTERY_LEVEL):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
@@ -370,3 +482,28 @@ class TriggerVacuumEntity(TriggerEntity, AbstractTemplateVacuum):
|
||||
self._attr_name or DEFAULT_NAME,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_STATE, self._handle_state),
|
||||
(CONF_FAN_SPEED, self._update_fan_speed),
|
||||
(CONF_BATTERY_LEVEL, self._update_battery_level),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -19,7 +19,7 @@ RESULT_ON = ("1", "true", "yes", "on", "enable")
|
||||
RESULT_OFF = ("0", "false", "no", "off", "disable")
|
||||
|
||||
|
||||
def log_validation_result_error(
|
||||
def _log_validation_result_error(
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
value: Any,
|
||||
@@ -44,7 +44,7 @@ def log_validation_result_error(
|
||||
)
|
||||
|
||||
|
||||
def check_result_for_none(result: Any, **kwargs: Any) -> bool:
|
||||
def _check_result_for_none(result: Any, **kwargs: Any) -> bool:
|
||||
"""Checks the result for none, unknown, unavailable."""
|
||||
if result is None:
|
||||
return True
|
||||
@@ -74,7 +74,7 @@ def strenum[T: StrEnum](
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> T | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if isinstance(result, str):
|
||||
@@ -102,7 +102,7 @@ def strenum[T: StrEnum](
|
||||
if state_off:
|
||||
expected += RESULT_OFF
|
||||
|
||||
log_validation_result_error(
|
||||
_log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
@@ -130,7 +130,7 @@ def boolean(
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> bool | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if isinstance(result, bool):
|
||||
@@ -154,7 +154,7 @@ def boolean(
|
||||
if as_false:
|
||||
items += as_false
|
||||
|
||||
log_validation_result_error(entity, attribute, result, items)
|
||||
_log_validation_result_error(entity, attribute, result, items)
|
||||
return None
|
||||
|
||||
return convert
|
||||
@@ -182,11 +182,11 @@ def number(
|
||||
message = f"{message} less than or equal to {maximum:0.1f}"
|
||||
|
||||
def convert(result: Any) -> float | int | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if (result_type := type(result)) is bool:
|
||||
log_validation_result_error(entity, attribute, result, message)
|
||||
_log_validation_result_error(entity, attribute, result, message)
|
||||
return None
|
||||
|
||||
if isinstance(result, (float, int)):
|
||||
@@ -201,7 +201,7 @@ def number(
|
||||
if return_type is int:
|
||||
value = int(value)
|
||||
except vol.Invalid:
|
||||
log_validation_result_error(entity, attribute, result, message)
|
||||
_log_validation_result_error(entity, attribute, result, message)
|
||||
return None
|
||||
|
||||
if minimum is None and maximum is None:
|
||||
@@ -218,7 +218,7 @@ def number(
|
||||
):
|
||||
return value
|
||||
|
||||
log_validation_result_error(entity, attribute, result, message)
|
||||
_log_validation_result_error(entity, attribute, result, message)
|
||||
return None
|
||||
|
||||
return convert
|
||||
@@ -239,11 +239,11 @@ def list_of_strings(
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> list[str] | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if not isinstance(result, list):
|
||||
log_validation_result_error(
|
||||
_log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
@@ -263,7 +263,7 @@ def list_of_strings(
|
||||
def item_in_list[T](
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
items: list[Any] | str | None,
|
||||
items: list[Any] | None,
|
||||
items_attribute: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> Callable[[Any], Any | None]:
|
||||
@@ -274,20 +274,15 @@ def item_in_list[T](
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> Any | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
# items may be mutable based on another template field. Always
|
||||
# perform this check when the items come from an configured
|
||||
# attribute.
|
||||
if isinstance(items, str):
|
||||
_items = getattr(entity, items)
|
||||
else:
|
||||
_items = items
|
||||
|
||||
if _items is None or (len(_items) == 0):
|
||||
if items is None or (len(items) == 0):
|
||||
if items_attribute:
|
||||
log_validation_result_error(
|
||||
_log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
@@ -296,68 +291,15 @@ def item_in_list[T](
|
||||
|
||||
return None
|
||||
|
||||
if result not in _items:
|
||||
log_validation_result_error(
|
||||
if result not in items:
|
||||
_log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
tuple(str(v) for v in _items),
|
||||
tuple(str(v) for v in items),
|
||||
)
|
||||
return None
|
||||
|
||||
return result
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
def url(
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
**kwargs: Any,
|
||||
) -> Callable[[Any], str | None]:
|
||||
"""Convert the result to a string url or None."""
|
||||
|
||||
def convert(result: Any) -> str | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
try:
|
||||
return cv.url(result)
|
||||
except vol.Invalid:
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
"expected a url",
|
||||
)
|
||||
return None
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
def string(
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
**kwargs: Any,
|
||||
) -> Callable[[Any], str | None]:
|
||||
"""Convert the result to a string or None."""
|
||||
|
||||
def convert(result: Any) -> str | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if isinstance(result, str):
|
||||
return result
|
||||
|
||||
try:
|
||||
return cv.string(result)
|
||||
except vol.Invalid:
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
"expected a string",
|
||||
)
|
||||
return None
|
||||
|
||||
return convert
|
||||
|
||||
@@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import asdict, dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, Literal, Self
|
||||
from typing import TYPE_CHECKING, Any, Literal, Self
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -41,7 +42,8 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -55,7 +57,7 @@ from homeassistant.util.unit_conversion import (
|
||||
TemperatureConverter,
|
||||
)
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
async_setup_template_entry,
|
||||
@@ -80,23 +82,23 @@ CHECK_FORECAST_KEYS = (
|
||||
.union(("apparent_temperature", "wind_gust_speed", "dew_point"))
|
||||
)
|
||||
|
||||
CONDITION_CLASSES = [
|
||||
CONDITION_CLASSES = {
|
||||
ATTR_CONDITION_CLEAR_NIGHT,
|
||||
ATTR_CONDITION_CLOUDY,
|
||||
ATTR_CONDITION_EXCEPTIONAL,
|
||||
ATTR_CONDITION_FOG,
|
||||
ATTR_CONDITION_HAIL,
|
||||
ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
ATTR_CONDITION_LIGHTNING,
|
||||
ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
ATTR_CONDITION_PARTLYCLOUDY,
|
||||
ATTR_CONDITION_POURING,
|
||||
ATTR_CONDITION_RAINY,
|
||||
ATTR_CONDITION_SNOWY_RAINY,
|
||||
ATTR_CONDITION_SNOWY,
|
||||
ATTR_CONDITION_SNOWY_RAINY,
|
||||
ATTR_CONDITION_SUNNY,
|
||||
ATTR_CONDITION_WINDY_VARIANT,
|
||||
ATTR_CONDITION_WINDY,
|
||||
]
|
||||
ATTR_CONDITION_WINDY_VARIANT,
|
||||
ATTR_CONDITION_EXCEPTIONAL,
|
||||
}
|
||||
|
||||
CONF_APPARENT_TEMPERATURE = "apparent_temperature"
|
||||
CONF_APPARENT_TEMPERATURE_TEMPLATE = "apparent_temperature_template"
|
||||
@@ -316,75 +318,6 @@ def async_create_preview_weather(
|
||||
)
|
||||
|
||||
|
||||
def validate_forecast(
|
||||
entity: AbstractTemplateWeather,
|
||||
option: str,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
) -> Callable[[Any], list[Forecast] | None]:
|
||||
"""Validate a forecast."""
|
||||
|
||||
weather_message = (
|
||||
"see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
|
||||
def validate(result: Any) -> list[Forecast] | None:
|
||||
if template_validators.check_result_for_none(result):
|
||||
return None
|
||||
|
||||
if not isinstance(result, list):
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"expected a list, {weather_message}",
|
||||
)
|
||||
|
||||
raised = False
|
||||
for forecast in result:
|
||||
if not isinstance(forecast, dict):
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"expected a list of forecast dictionaries, got {forecast}, {weather_message}",
|
||||
)
|
||||
continue
|
||||
|
||||
diff_result = set().union(forecast.keys()).difference(CHECK_FORECAST_KEYS)
|
||||
if diff_result:
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"expected valid forecast keys, unallowed keys: ({diff_result}) for {forecast}, {weather_message}",
|
||||
)
|
||||
if forecast_type == "twice_daily" and "is_daytime" not in forecast:
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"`is_daytime` is missing in twice_daily forecast {forecast}, {weather_message}",
|
||||
)
|
||||
if "datetime" not in forecast:
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"`datetime` is missing in forecast, got {forecast}, {weather_message}",
|
||||
)
|
||||
|
||||
if raised:
|
||||
return None
|
||||
|
||||
return result
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
"""Representation of a template weathers features."""
|
||||
|
||||
@@ -394,79 +327,28 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self, config: dict[str, Any]
|
||||
self, config: dict[str, Any], initial_state: bool | None = False
|
||||
) -> None:
|
||||
"""Initialize the features."""
|
||||
|
||||
# Required options
|
||||
self.setup_template(
|
||||
CONF_CONDITION,
|
||||
"_attr_condition",
|
||||
template_validators.item_in_list(self, CONF_CONDITION, CONDITION_CLASSES),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_HUMIDITY,
|
||||
"_attr_humidity",
|
||||
template_validators.number(self, CONF_HUMIDITY, 0.0, 100.0),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_TEMPERATURE,
|
||||
"_attr_native_temperature",
|
||||
template_validators.number(self, CONF_TEMPERATURE),
|
||||
)
|
||||
|
||||
# Optional options
|
||||
|
||||
self.setup_template(
|
||||
CONF_ATTRIBUTION,
|
||||
"_attribution",
|
||||
vol.Coerce(str),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_WIND_BEARING, "_attr_wind_bearing", None, self._update_wind_bearing
|
||||
)
|
||||
|
||||
# Optional numeric options
|
||||
for option, attribute in (
|
||||
(CONF_APPARENT_TEMPERATURE, "_attr_native_apparent_temperature"),
|
||||
(CONF_CLOUD_COVERAGE, "_attr_cloud_coverage"),
|
||||
(CONF_DEW_POINT, "_attr_native_dew_point"),
|
||||
(CONF_OZONE, "_attr_ozone"),
|
||||
(CONF_PRESSURE, "_attr_native_pressure"),
|
||||
(CONF_UV_INDEX, "_attr_uv_index"),
|
||||
(CONF_VISIBILITY, "_attr_native_visibility"),
|
||||
(CONF_WIND_GUST_SPEED, "_attr_native_wind_gust_speed"),
|
||||
(CONF_WIND_SPEED, "_attr_native_wind_speed"),
|
||||
):
|
||||
self.setup_template(
|
||||
option, attribute, template_validators.number(self, option)
|
||||
)
|
||||
|
||||
# Forecasts
|
||||
|
||||
self._forecast_daily: list[Forecast] | None = []
|
||||
self.setup_template(
|
||||
CONF_FORECAST_DAILY,
|
||||
"_forecast_daily",
|
||||
validate_forecast(self, CONF_FORECAST_DAILY, "daily"),
|
||||
self._update_forecast("daily"),
|
||||
)
|
||||
|
||||
self._forecast_hourly: list[Forecast] | None = []
|
||||
self.setup_template(
|
||||
CONF_FORECAST_HOURLY,
|
||||
"_forecast_hourly",
|
||||
validate_forecast(self, CONF_FORECAST_HOURLY, "hourly"),
|
||||
self._update_forecast("hourly"),
|
||||
)
|
||||
|
||||
self._forecast_twice_daily: list[Forecast] | None = []
|
||||
self.setup_template(
|
||||
CONF_FORECAST_TWICE_DAILY,
|
||||
"_forecast_twice_daily",
|
||||
validate_forecast(self, CONF_FORECAST_TWICE_DAILY, "twice_daily"),
|
||||
self._update_forecast("twice_daily"),
|
||||
)
|
||||
# Templates
|
||||
self._apparent_temperature_template = config.get(CONF_APPARENT_TEMPERATURE)
|
||||
self._attribution_template = config.get(CONF_ATTRIBUTION)
|
||||
self._cloud_coverage_template = config.get(CONF_CLOUD_COVERAGE)
|
||||
self._condition_template = config[CONF_CONDITION]
|
||||
self._dew_point_template = config.get(CONF_DEW_POINT)
|
||||
self._forecast_daily_template = config.get(CONF_FORECAST_DAILY)
|
||||
self._forecast_hourly_template = config.get(CONF_FORECAST_HOURLY)
|
||||
self._forecast_twice_daily_template = config.get(CONF_FORECAST_TWICE_DAILY)
|
||||
self._humidity_template = config[CONF_HUMIDITY]
|
||||
self._ozone_template = config.get(CONF_OZONE)
|
||||
self._pressure_template = config.get(CONF_PRESSURE)
|
||||
self._temperature_template = config[CONF_TEMPERATURE]
|
||||
self._uv_index_template = config.get(CONF_UV_INDEX)
|
||||
self._visibility_template = config.get(CONF_VISIBILITY)
|
||||
self._wind_bearing_template = config.get(CONF_WIND_BEARING)
|
||||
self._wind_gust_speed_template = config.get(CONF_WIND_GUST_SPEED)
|
||||
self._wind_speed_template = config.get(CONF_WIND_SPEED)
|
||||
|
||||
# Legacy support
|
||||
self._attribution: str | None = None
|
||||
@@ -480,11 +362,11 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
|
||||
# Supported Features
|
||||
self._attr_supported_features = 0
|
||||
if CONF_FORECAST_DAILY in self._templates:
|
||||
if self._forecast_daily_template:
|
||||
self._attr_supported_features |= WeatherEntityFeature.FORECAST_DAILY
|
||||
if CONF_FORECAST_HOURLY in self._templates:
|
||||
if self._forecast_hourly_template:
|
||||
self._attr_supported_features |= WeatherEntityFeature.FORECAST_HOURLY
|
||||
if CONF_FORECAST_TWICE_DAILY in self._templates:
|
||||
if self._forecast_twice_daily_template:
|
||||
self._attr_supported_features |= WeatherEntityFeature.FORECAST_TWICE_DAILY
|
||||
|
||||
@property
|
||||
@@ -494,6 +376,62 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
return "Powered by Home Assistant"
|
||||
return self._attribution
|
||||
|
||||
def _validate[T](
|
||||
self,
|
||||
validator: Callable[[Any], T],
|
||||
result: Any,
|
||||
) -> T | None:
|
||||
try:
|
||||
return validator(result)
|
||||
except vol.Invalid:
|
||||
return None
|
||||
|
||||
@callback
|
||||
def _update_apparent_temperature(self, result: Any) -> None:
|
||||
self._attr_native_apparent_temperature = self._validate(
|
||||
vol.Coerce(float), result
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_attribution(self, result: Any) -> None:
|
||||
self._attribution = vol.Coerce(str)(result)
|
||||
|
||||
@callback
|
||||
def _update_condition(self, result: Any) -> None:
|
||||
self._attr_condition = result if result in CONDITION_CLASSES else None
|
||||
|
||||
@callback
|
||||
def _update_coverage(self, result: Any) -> None:
|
||||
self._attr_cloud_coverage = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_dew_point(self, result: Any) -> None:
|
||||
self._attr_native_dew_point = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_humidity(self, result: Any) -> None:
|
||||
self._attr_humidity = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_ozone(self, result: Any) -> None:
|
||||
self._attr_ozone = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_pressure(self, result: Any) -> None:
|
||||
self._attr_native_pressure = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_temperature(self, result: Any) -> None:
|
||||
self._attr_native_temperature = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_uv_index(self, result: Any) -> None:
|
||||
self._attr_uv_index = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_visibility(self, result: Any) -> None:
|
||||
self._attr_native_visibility = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_wind_bearing(self, result: Any) -> None:
|
||||
try:
|
||||
@@ -502,31 +440,48 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
self._attr_wind_bearing = vol.Coerce(str)(result)
|
||||
|
||||
@callback
|
||||
def _update_forecast(
|
||||
def _update_wind_gust_speed(self, result: Any) -> None:
|
||||
self._attr_native_wind_gust_speed = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_wind_speed(self, result: Any) -> None:
|
||||
self._attr_native_wind_speed = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _validate_forecast(
|
||||
self,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
) -> Callable[[list[Forecast] | None], None]:
|
||||
"""Save template result and trigger forecast listener."""
|
||||
result: Any,
|
||||
) -> list[Forecast] | None:
|
||||
"""Validate the forecasts."""
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
def update(result: list[Forecast] | None) -> None:
|
||||
setattr(self, f"_forecast_{forecast_type}", result)
|
||||
self.hass.async_create_task(
|
||||
self.async_update_listeners([forecast_type]), eager_start=True
|
||||
if not isinstance(result, list):
|
||||
raise vol.Invalid(
|
||||
"Forecasts is not a list, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
|
||||
return update
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_daily or []
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_hourly or []
|
||||
|
||||
async def async_forecast_twice_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_twice_daily or []
|
||||
for forecast in result:
|
||||
if not isinstance(forecast, dict):
|
||||
raise vol.Invalid(
|
||||
"Forecast in list is not a dict, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
diff_result = set().union(forecast.keys()).difference(CHECK_FORECAST_KEYS)
|
||||
if diff_result:
|
||||
raise vol.Invalid(
|
||||
f"Only valid keys in Forecast are allowed, unallowed keys: ({diff_result}), "
|
||||
"see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
if forecast_type == "twice_daily" and "is_daytime" not in forecast:
|
||||
raise vol.Invalid(
|
||||
"`is_daytime` is missing in twice_daily forecast, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
if "datetime" not in forecast:
|
||||
raise vol.Invalid(
|
||||
"`datetime` is required in forecasts, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
continue
|
||||
return result
|
||||
|
||||
|
||||
class StateWeatherEntity(TemplateEntity, AbstractTemplateWeather):
|
||||
@@ -544,6 +499,152 @@ class StateWeatherEntity(TemplateEntity, AbstractTemplateWeather):
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateWeather.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
# Forecasts
|
||||
self._forecast_daily: list[Forecast] | None = []
|
||||
self._forecast_hourly: list[Forecast] | None = []
|
||||
self._forecast_twice_daily: list[Forecast] | None = []
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
|
||||
if self._apparent_temperature_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_apparent_temperature",
|
||||
self._apparent_temperature_template,
|
||||
on_update=self._update_apparent_temperature,
|
||||
)
|
||||
if self._attribution_template:
|
||||
self.add_template_attribute(
|
||||
"_attribution",
|
||||
self._attribution_template,
|
||||
on_update=self._update_attribution,
|
||||
)
|
||||
if self._cloud_coverage_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_cloud_coverage",
|
||||
self._cloud_coverage_template,
|
||||
on_update=self._update_coverage,
|
||||
)
|
||||
if self._condition_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_condition",
|
||||
self._condition_template,
|
||||
on_update=self._update_condition,
|
||||
)
|
||||
if self._dew_point_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_dew_point",
|
||||
self._dew_point_template,
|
||||
on_update=self._update_dew_point,
|
||||
)
|
||||
if self._forecast_daily_template:
|
||||
self.add_template_attribute(
|
||||
"_forecast_daily",
|
||||
self._forecast_daily_template,
|
||||
on_update=partial(self._update_forecast, "daily"),
|
||||
validator=partial(self._validate_forecast, "daily"),
|
||||
)
|
||||
if self._forecast_hourly_template:
|
||||
self.add_template_attribute(
|
||||
"_forecast_hourly",
|
||||
self._forecast_hourly_template,
|
||||
on_update=partial(self._update_forecast, "hourly"),
|
||||
validator=partial(self._validate_forecast, "hourly"),
|
||||
)
|
||||
if self._forecast_twice_daily_template:
|
||||
self.add_template_attribute(
|
||||
"_forecast_twice_daily",
|
||||
self._forecast_twice_daily_template,
|
||||
on_update=partial(self._update_forecast, "twice_daily"),
|
||||
validator=partial(self._validate_forecast, "twice_daily"),
|
||||
)
|
||||
if self._humidity_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_humidity",
|
||||
self._humidity_template,
|
||||
on_update=self._update_humidity,
|
||||
)
|
||||
if self._ozone_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_ozone",
|
||||
self._ozone_template,
|
||||
on_update=self._update_ozone,
|
||||
)
|
||||
if self._pressure_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_pressure",
|
||||
self._pressure_template,
|
||||
on_update=self._update_pressure,
|
||||
)
|
||||
if self._temperature_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_temperature",
|
||||
self._temperature_template,
|
||||
on_update=self._update_temperature,
|
||||
)
|
||||
if self._uv_index_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_uv_index",
|
||||
self._uv_index_template,
|
||||
on_update=self._update_uv_index,
|
||||
)
|
||||
if self._visibility_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_visibility",
|
||||
self._visibility_template,
|
||||
on_update=self._update_visibility,
|
||||
)
|
||||
if self._wind_bearing_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_wind_bearing",
|
||||
self._wind_bearing_template,
|
||||
on_update=self._update_wind_bearing,
|
||||
)
|
||||
if self._wind_gust_speed_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_wind_gust_speed",
|
||||
self._wind_gust_speed_template,
|
||||
on_update=self._update_wind_gust_speed,
|
||||
)
|
||||
if self._wind_speed_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_wind_speed",
|
||||
self._wind_speed_template,
|
||||
on_update=self._update_wind_speed,
|
||||
)
|
||||
|
||||
super()._async_setup_templates()
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_daily or []
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_hourly or []
|
||||
|
||||
async def async_forecast_twice_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_twice_daily or []
|
||||
|
||||
@callback
|
||||
def _update_forecast(
|
||||
self,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
result: list[Forecast] | TemplateError,
|
||||
) -> None:
|
||||
"""Save template result and trigger forecast listener."""
|
||||
attr_result = None if isinstance(result, TemplateError) else result
|
||||
setattr(self, f"_forecast_{forecast_type}", attr_result)
|
||||
self.hass.async_create_task(
|
||||
self.async_update_listeners([forecast_type]), eager_start=True
|
||||
)
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class WeatherExtraStoredData(ExtraStoredData):
|
||||
@@ -610,6 +711,11 @@ class TriggerWeatherEntity(TriggerEntity, AbstractTemplateWeather, RestoreEntity
|
||||
"""Weather entity based on trigger data."""
|
||||
|
||||
domain = WEATHER_DOMAIN
|
||||
extra_template_keys = (
|
||||
CONF_CONDITION,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_HUMIDITY,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -619,7 +725,27 @@ class TriggerWeatherEntity(TriggerEntity, AbstractTemplateWeather, RestoreEntity
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateWeather.__init__(self, config)
|
||||
AbstractTemplateWeather.__init__(self, config, None)
|
||||
|
||||
for key in (
|
||||
CONF_APPARENT_TEMPERATURE,
|
||||
CONF_ATTRIBUTION,
|
||||
CONF_CLOUD_COVERAGE,
|
||||
CONF_DEW_POINT,
|
||||
CONF_FORECAST_DAILY,
|
||||
CONF_FORECAST_HOURLY,
|
||||
CONF_FORECAST_TWICE_DAILY,
|
||||
CONF_OZONE,
|
||||
CONF_PRESSURE,
|
||||
CONF_UV_INDEX,
|
||||
CONF_VISIBILITY,
|
||||
CONF_WIND_BEARING,
|
||||
CONF_WIND_GUST_SPEED,
|
||||
CONF_WIND_SPEED,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
@@ -646,6 +772,72 @@ class TriggerWeatherEntity(TriggerEntity, AbstractTemplateWeather, RestoreEntity
|
||||
self._attr_native_wind_gust_speed = weather_data.last_wind_gust_speed
|
||||
self._attr_native_wind_speed = weather_data.last_wind_speed
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_APPARENT_TEMPERATURE, self._update_apparent_temperature),
|
||||
(CONF_ATTRIBUTION, self._update_attribution),
|
||||
(CONF_CLOUD_COVERAGE, self._update_coverage),
|
||||
(CONF_CONDITION, self._update_condition),
|
||||
(CONF_DEW_POINT, self._update_dew_point),
|
||||
(CONF_HUMIDITY, self._update_humidity),
|
||||
(CONF_OZONE, self._update_ozone),
|
||||
(CONF_PRESSURE, self._update_pressure),
|
||||
(CONF_TEMPERATURE, self._update_temperature),
|
||||
(CONF_UV_INDEX, self._update_uv_index),
|
||||
(CONF_VISIBILITY, self._update_visibility),
|
||||
(CONF_WIND_BEARING, self._update_wind_bearing),
|
||||
(CONF_WIND_GUST_SPEED, self._update_wind_gust_speed),
|
||||
(CONF_WIND_SPEED, self._update_wind_speed),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _check_forecast(
|
||||
self,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
key: str,
|
||||
) -> list[Forecast]:
|
||||
result = self._rendered.get(key)
|
||||
try:
|
||||
return self._validate_forecast(forecast_type, result) or []
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Error validating template result '%s' "
|
||||
"for attribute '%s' in entity %s "
|
||||
"validation message '%s'"
|
||||
),
|
||||
result,
|
||||
key,
|
||||
self.entity_id,
|
||||
err.msg,
|
||||
)
|
||||
return []
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._check_forecast("daily", CONF_FORECAST_DAILY)
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._check_forecast("hourly", CONF_FORECAST_HOURLY)
|
||||
|
||||
async def async_forecast_twice_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._check_forecast("twice_daily", CONF_FORECAST_TWICE_DAILY)
|
||||
|
||||
@property
|
||||
def extra_restore_state_data(self) -> WeatherExtraStoredData:
|
||||
"""Return weather specific state data to be restored."""
|
||||
|
||||
@@ -10,7 +10,11 @@ from typing import Any, cast
|
||||
import jwt
|
||||
from tesla_fleet_api import TeslaFleetApi
|
||||
from tesla_fleet_api.const import SERVERS
|
||||
from tesla_fleet_api.exceptions import PreconditionFailed, TeslaFleetError
|
||||
from tesla_fleet_api.exceptions import (
|
||||
InvalidResponse,
|
||||
PreconditionFailed,
|
||||
TeslaFleetError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
@@ -37,9 +41,12 @@ class OAuth2FlowHandler(
|
||||
"""Initialize config flow."""
|
||||
super().__init__()
|
||||
self.domain: str | None = None
|
||||
self.registration_status: dict[str, bool] = {}
|
||||
self.tesla_apis: dict[str, TeslaFleetApi] = {}
|
||||
self.failed_regions: list[str] = []
|
||||
self.data: dict[str, Any] = {}
|
||||
self.uid: str | None = None
|
||||
self.apis: list[TeslaFleetApi] = []
|
||||
self.api: TeslaFleetApi | None = None
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
@@ -57,6 +64,7 @@ class OAuth2FlowHandler(
|
||||
|
||||
self.data = data
|
||||
self.uid = token["sub"]
|
||||
server = SERVERS[token["ou_code"].lower()]
|
||||
|
||||
await self.async_set_unique_id(self.uid)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
@@ -66,28 +74,24 @@ class OAuth2FlowHandler(
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# OAuth done, setup Partner API connections for all regions
|
||||
# OAuth done, setup a Partner API connection
|
||||
implementation = cast(TeslaUserImplementation, self.flow_impl)
|
||||
session = async_get_clientsession(self.hass)
|
||||
|
||||
for region, server_url in SERVERS.items():
|
||||
if region == "cn":
|
||||
continue
|
||||
api = TeslaFleetApi(
|
||||
session=session,
|
||||
access_token="",
|
||||
server=server_url,
|
||||
partner_scope=True,
|
||||
charging_scope=False,
|
||||
energy_scope=False,
|
||||
user_scope=False,
|
||||
vehicle_scope=False,
|
||||
)
|
||||
await api.get_private_key(self.hass.config.path("tesla_fleet.key"))
|
||||
await api.partner_login(
|
||||
implementation.client_id, implementation.client_secret
|
||||
)
|
||||
self.apis.append(api)
|
||||
session = async_get_clientsession(self.hass)
|
||||
self.api = TeslaFleetApi(
|
||||
access_token="",
|
||||
session=session,
|
||||
server=server,
|
||||
partner_scope=True,
|
||||
charging_scope=False,
|
||||
energy_scope=False,
|
||||
user_scope=False,
|
||||
vehicle_scope=False,
|
||||
)
|
||||
await self.api.get_private_key(self.hass.config.path("tesla_fleet.key"))
|
||||
await self.api.partner_login(
|
||||
implementation.client_id, implementation.client_secret
|
||||
)
|
||||
|
||||
return await self.async_step_domain_input()
|
||||
|
||||
@@ -126,67 +130,44 @@ class OAuth2FlowHandler(
|
||||
async def async_step_domain_registration(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle domain registration for all regions."""
|
||||
"""Handle domain registration for both regions."""
|
||||
|
||||
assert self.apis
|
||||
assert self.apis[0].private_key
|
||||
assert self.api
|
||||
assert self.api.private_key
|
||||
assert self.domain
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
errors = {}
|
||||
description_placeholders = {
|
||||
"public_key_url": f"https://{self.domain}/.well-known/appspecific/com.tesla.3p.public-key.pem",
|
||||
"pem": self.apis[0].public_pem,
|
||||
"pem": self.api.public_pem,
|
||||
}
|
||||
|
||||
successful_response: dict[str, Any] | None = None
|
||||
failed_regions: list[str] = []
|
||||
|
||||
for api in self.apis:
|
||||
try:
|
||||
register_response = await api.partner.register(self.domain)
|
||||
except PreconditionFailed:
|
||||
return await self.async_step_domain_input(
|
||||
errors={CONF_DOMAIN: "precondition_failed"}
|
||||
)
|
||||
except TeslaFleetError as e:
|
||||
LOGGER.warning(
|
||||
"Partner registration failed for %s: %s",
|
||||
api.server,
|
||||
e.message,
|
||||
)
|
||||
failed_regions.append(api.server or "unknown")
|
||||
else:
|
||||
if successful_response is None:
|
||||
successful_response = register_response
|
||||
|
||||
if successful_response is None:
|
||||
try:
|
||||
register_response = await self.api.partner.register(self.domain)
|
||||
except PreconditionFailed:
|
||||
return await self.async_step_domain_input(
|
||||
errors={CONF_DOMAIN: "precondition_failed"}
|
||||
)
|
||||
except InvalidResponse:
|
||||
errors["base"] = "invalid_response"
|
||||
return self.async_show_form(
|
||||
step_id="domain_registration",
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
if failed_regions:
|
||||
LOGGER.warning(
|
||||
"Partner registration succeeded on some regions but failed on: %s",
|
||||
", ".join(failed_regions),
|
||||
)
|
||||
|
||||
# Verify public key from the successful response
|
||||
registered_public_key = successful_response.get("response", {}).get(
|
||||
"public_key"
|
||||
)
|
||||
|
||||
if not registered_public_key:
|
||||
errors["base"] = "public_key_not_found"
|
||||
elif (
|
||||
registered_public_key.lower()
|
||||
!= self.apis[0].public_uncompressed_point.lower()
|
||||
):
|
||||
errors["base"] = "public_key_mismatch"
|
||||
except TeslaFleetError as e:
|
||||
errors["base"] = "unknown_error"
|
||||
description_placeholders["error"] = e.message
|
||||
else:
|
||||
return await self.async_step_registration_complete()
|
||||
# Get public key from response
|
||||
registered_public_key = register_response.get("response", {}).get(
|
||||
"public_key"
|
||||
)
|
||||
|
||||
if not registered_public_key:
|
||||
errors["base"] = "public_key_not_found"
|
||||
elif (
|
||||
registered_public_key.lower()
|
||||
!= self.api.public_uncompressed_point.lower()
|
||||
):
|
||||
errors["base"] = "public_key_mismatch"
|
||||
else:
|
||||
return await self.async_step_registration_complete()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="domain_registration",
|
||||
|
||||
@@ -87,24 +87,15 @@ async def _get_access_token(oauth_session: OAuth2Session) -> str:
|
||||
await oauth_session.async_ensure_token_valid()
|
||||
except ClientResponseError as err:
|
||||
if err.status == 401:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed",
|
||||
) from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_connection_error",
|
||||
) from err
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise ConfigEntryNotReady from err
|
||||
except (KeyError, TypeError) as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_data_malformed",
|
||||
) from err
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_connection_error",
|
||||
) from err
|
||||
raise ConfigEntryNotReady from err
|
||||
return oauth_session.token[CONF_ACCESS_TOKEN]
|
||||
|
||||
|
||||
@@ -140,20 +131,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
teslemetry.products(),
|
||||
)
|
||||
except InvalidToken as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_invalid_token",
|
||||
) from e
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except SubscriptionRequired as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_subscription_required",
|
||||
) from e
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except TeslaFleetError as e:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_api_error",
|
||||
) from e
|
||||
raise ConfigEntryNotReady from e
|
||||
|
||||
scopes = calls[0]["scopes"]
|
||||
region = calls[0]["region"]
|
||||
@@ -260,26 +242,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
# Check live status endpoint works before creating its coordinator
|
||||
try:
|
||||
live_status = (await energy_site.live_status())["response"]
|
||||
except InvalidToken as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_invalid_token",
|
||||
) from e
|
||||
except SubscriptionRequired as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_subscription_required",
|
||||
) from e
|
||||
except Forbidden as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_invalid_token",
|
||||
) from e
|
||||
except (InvalidToken, Forbidden, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except TeslaFleetError as e:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_api_error",
|
||||
) from e
|
||||
raise ConfigEntryNotReady(e.message) from e
|
||||
|
||||
energysites.append(
|
||||
TeslemetryEnergyData(
|
||||
@@ -379,10 +345,7 @@ async def async_migrate_entry(
|
||||
CLIENT_ID, hass.config.location_name
|
||||
)
|
||||
except (ClientError, TypeError) as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_migration",
|
||||
) from e
|
||||
raise ConfigEntryAuthFailed from e
|
||||
|
||||
# Add auth_implementation for OAuth2 flow compatibility
|
||||
data["auth_implementation"] = DOMAIN
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
if TYPE_CHECKING:
|
||||
from . import TeslemetryConfigEntry
|
||||
|
||||
from .const import DOMAIN, ENERGY_HISTORY_FIELDS, LOGGER
|
||||
from .const import ENERGY_HISTORY_FIELDS, LOGGER
|
||||
from .helpers import flatten
|
||||
|
||||
RETRY_EXCEPTIONS = (
|
||||
@@ -94,16 +94,9 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
raise UpdateFailed(e.message) from e
|
||||
return flatten(data)
|
||||
|
||||
|
||||
@@ -143,16 +136,9 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]])
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
raise UpdateFailed(e.message) from e
|
||||
# Convert Wall Connectors from array to dict
|
||||
data["wall_connectors"] = {
|
||||
wc["din"]: wc for wc in (data.get("wall_connectors") or [])
|
||||
@@ -190,16 +176,9 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]])
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
raise UpdateFailed(e.message) from e
|
||||
return flatten(data)
|
||||
|
||||
|
||||
@@ -232,22 +211,12 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
raise UpdateFailed(e.message) from e
|
||||
|
||||
if not data or not isinstance(data.get("time_series"), list):
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed_invalid_data",
|
||||
)
|
||||
raise UpdateFailed("Received invalid data")
|
||||
|
||||
# Add all time periods together
|
||||
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, None)
|
||||
|
||||
@@ -173,6 +173,7 @@
|
||||
"default": "mdi:ev-plug-ccs2"
|
||||
}
|
||||
},
|
||||
|
||||
"device_tracker": {
|
||||
"location": {
|
||||
"default": "mdi:map-marker"
|
||||
@@ -195,38 +196,6 @@
|
||||
"default": "mdi:car-speed-limiter"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"backup_reserve_percent": {
|
||||
"default": "mdi:battery-outline",
|
||||
"range": {
|
||||
"10": "mdi:battery-10",
|
||||
"20": "mdi:battery-20",
|
||||
"30": "mdi:battery-30",
|
||||
"40": "mdi:battery-40",
|
||||
"50": "mdi:battery-50",
|
||||
"60": "mdi:battery-60",
|
||||
"70": "mdi:battery-70",
|
||||
"80": "mdi:battery-80",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
},
|
||||
"off_grid_vehicle_charging_reserve_percent": {
|
||||
"default": "mdi:battery-outline",
|
||||
"range": {
|
||||
"10": "mdi:battery-10",
|
||||
"20": "mdi:battery-20",
|
||||
"30": "mdi:battery-30",
|
||||
"40": "mdi:battery-40",
|
||||
"50": "mdi:battery-50",
|
||||
"60": "mdi:battery-60",
|
||||
"70": "mdi:battery-70",
|
||||
"80": "mdi:battery-80",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"climate_state_seat_heater_left": {
|
||||
"default": "mdi:car-seat-heater",
|
||||
|
||||
@@ -27,6 +27,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.icon import icon_for_battery_level
|
||||
|
||||
from . import TeslemetryConfigEntry
|
||||
from .entity import (
|
||||
@@ -295,6 +296,7 @@ class TeslemetryEnergyInfoNumberSensorEntity(TeslemetryEnergyInfoEntity, NumberE
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the attributes of the entity."""
|
||||
self._attr_native_value = self._value
|
||||
self._attr_icon = icon_for_battery_level(self.native_value)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set new value."""
|
||||
|
||||
@@ -66,8 +66,18 @@ rules:
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
exception-translations:
|
||||
status: todo
|
||||
comment: |
|
||||
ConfigEntryAuthFailed and UpdateFailed exceptions can have translated messages.
|
||||
Also one "unknown error" that cannot be translated.
|
||||
icon-translations:
|
||||
status: todo
|
||||
comment: |
|
||||
number.py:299 uses _attr_icon = icon_for_battery_level() instead of
|
||||
range-based icons in icons.json. Affects backup_reserve_percent and
|
||||
off_grid_vehicle_charging_reserve_percent entities. Remove the dynamic
|
||||
icon assignment and add range-based icon entries to icons.json.
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
|
||||
@@ -1090,18 +1090,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_failed": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
},
|
||||
"auth_failed_invalid_token": {
|
||||
"message": "Access token is invalid, please reauthenticate"
|
||||
},
|
||||
"auth_failed_migration": {
|
||||
"message": "Failed to migrate to OAuth, please reauthenticate"
|
||||
},
|
||||
"auth_failed_subscription_required": {
|
||||
"message": "Teslemetry subscription required"
|
||||
},
|
||||
"command_error": {
|
||||
"message": "Command returned error: {error}"
|
||||
},
|
||||
@@ -1138,12 +1126,6 @@
|
||||
"no_vehicle_data_for_device": {
|
||||
"message": "No vehicle data for device ID: {device_id}"
|
||||
},
|
||||
"not_ready_api_error": {
|
||||
"message": "Error communicating with Teslemetry API"
|
||||
},
|
||||
"not_ready_connection_error": {
|
||||
"message": "Unable to connect to Teslemetry"
|
||||
},
|
||||
"oauth_implementation_not_available": {
|
||||
"message": "OAuth implementation not available, try reauthenticating"
|
||||
},
|
||||
@@ -1159,12 +1141,6 @@
|
||||
"token_data_malformed": {
|
||||
"message": "Token data malformed, try reauthenticating"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Error fetching data from Teslemetry API"
|
||||
},
|
||||
"update_failed_invalid_data": {
|
||||
"message": "Received invalid data from API"
|
||||
},
|
||||
"wake_up_failed": {
|
||||
"message": "Failed to wake up vehicle: {message}"
|
||||
},
|
||||
|
||||
@@ -93,7 +93,4 @@ COLLABORATORS: Final = "collaborators"
|
||||
|
||||
DOMAIN: Final = "todoist"
|
||||
|
||||
# Maximum number of items per page for Todoist API requests
|
||||
MAX_PAGE_SIZE: Final = 200
|
||||
|
||||
SERVICE_NEW_TASK: Final = "new_task"
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""DataUpdateCoordinator for the Todoist component."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -13,8 +12,6 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import MAX_PAGE_SIZE
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@@ -56,30 +53,26 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]):
|
||||
async def _async_update_data(self) -> list[Task]:
|
||||
"""Fetch tasks from the Todoist API."""
|
||||
try:
|
||||
tasks_async = await self.api.get_tasks(limit=MAX_PAGE_SIZE)
|
||||
return await flatten_async_pages(tasks_async)
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
tasks_async = await self.api.get_tasks()
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
return await flatten_async_pages(tasks_async)
|
||||
|
||||
async def async_get_projects(self) -> list[Project]:
|
||||
"""Return todoist projects fetched at most once."""
|
||||
if self._projects is None:
|
||||
projects_async = await self.api.get_projects(limit=MAX_PAGE_SIZE)
|
||||
projects_async = await self.api.get_projects()
|
||||
self._projects = await flatten_async_pages(projects_async)
|
||||
return self._projects
|
||||
|
||||
async def async_get_sections(self, project_id: str) -> list[Section]:
|
||||
"""Return todoist sections for a given project ID."""
|
||||
sections_async = await self.api.get_sections(
|
||||
project_id=project_id, limit=MAX_PAGE_SIZE
|
||||
)
|
||||
sections_async = await self.api.get_sections(project_id=project_id)
|
||||
return await flatten_async_pages(sections_async)
|
||||
|
||||
async def async_get_labels(self) -> list[Label]:
|
||||
"""Return todoist labels fetched at most once."""
|
||||
if self._labels is None:
|
||||
labels_async = await self.api.get_labels(limit=MAX_PAGE_SIZE)
|
||||
labels_async = await self.api.get_labels()
|
||||
self._labels = await flatten_async_pages(labels_async)
|
||||
return self._labels
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user