mirror of
https://github.com/home-assistant/core.git
synced 2026-02-28 04:51:41 +01:00
Compare commits
83 Commits
gen-dashbo
...
fix_test_s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
998971260f | ||
|
|
eab80f78d9 | ||
|
|
aa9fdd56ec | ||
|
|
c727261f67 | ||
|
|
703c62aa74 | ||
|
|
6e1f90228b | ||
|
|
3be089d2a5 | ||
|
|
692d3d35cc | ||
|
|
c52cb8362e | ||
|
|
93ac215ab4 | ||
|
|
f9eb86b50a | ||
|
|
a7f9992a4e | ||
|
|
13fde0d135 | ||
|
|
5105c6c50f | ||
|
|
af152ebe50 | ||
|
|
dea4452e42 | ||
|
|
af07631d83 | ||
|
|
d2ca00ca53 | ||
|
|
bb2f7bdfc4 | ||
|
|
b1379d9153 | ||
|
|
ea4b286659 | ||
|
|
2d00cb9a29 | ||
|
|
2ef1a20ae4 | ||
|
|
95defddfff | ||
|
|
009bdd91cc | ||
|
|
63bbead41e | ||
|
|
2c9a96b62a | ||
|
|
ace7fad62a | ||
|
|
3c73cc8bad | ||
|
|
83c41c265d | ||
|
|
c8bc5618dc | ||
|
|
60d770f265 | ||
|
|
6f4b9dcad7 | ||
|
|
1bba31f7af | ||
|
|
4705e584b0 | ||
|
|
80bbe5df6a | ||
|
|
88c4d88e06 | ||
|
|
718f459026 | ||
|
|
5c3ddcff3e | ||
|
|
08acececb2 | ||
|
|
27d6ae2881 | ||
|
|
5c4d9f4ca4 | ||
|
|
9ece327881 | ||
|
|
1b0ef3f358 | ||
|
|
a5eca0614a | ||
|
|
7b2509fadb | ||
|
|
f6e0bc28f4 | ||
|
|
e87056408e | ||
|
|
c945f32989 | ||
|
|
8d37917d8b | ||
|
|
68cc2dff53 | ||
|
|
45babbca92 | ||
|
|
b56dcfb7e9 | ||
|
|
a56114d84a | ||
|
|
de8a26c5b0 | ||
|
|
48f39524c4 | ||
|
|
2b4ef312c3 | ||
|
|
b4d175b811 | ||
|
|
7ff6c2a421 | ||
|
|
cf0a438f32 | ||
|
|
9e1bfa3564 | ||
|
|
3c266183e1 | ||
|
|
5c5f5d064a | ||
|
|
fc18ec4588 | ||
|
|
3fd2fa27e7 | ||
|
|
cf637f8c2f | ||
|
|
228fca9f0c | ||
|
|
c5ce8998e2 | ||
|
|
a4204bf11e | ||
|
|
3e44d15fc1 | ||
|
|
4f07d8688c | ||
|
|
89fda1a4ae | ||
|
|
f678e7ef34 | ||
|
|
24e8208deb | ||
|
|
3c66a1b35d | ||
|
|
5a2299e8b6 | ||
|
|
8087953b90 | ||
|
|
77a15b44c9 | ||
|
|
2177b494b9 | ||
|
|
10497c2bf4 | ||
|
|
e7fd744941 | ||
|
|
b9bfbc9e98 | ||
|
|
ba6f1343cc |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -254,7 +254,7 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@564dda4cfa5e96aafdc4a5696c4bf7b46baae5ac # v1.1.0
|
||||
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -15,7 +15,7 @@
|
||||
.yamllint @home-assistant/core
|
||||
pyproject.toml @home-assistant/core
|
||||
requirements_test.txt @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core @edenhaus
|
||||
/.github/ @home-assistant/core
|
||||
/.vscode/ @home-assistant/core
|
||||
/homeassistant/*.py @home-assistant/core
|
||||
@@ -672,6 +672,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/hdmi_cec/ @inytar
|
||||
/tests/components/hdmi_cec/ @inytar
|
||||
/homeassistant/components/heatmiser/ @andylockran
|
||||
/homeassistant/components/hegel/ @boazca
|
||||
/tests/components/hegel/ @boazca
|
||||
/homeassistant/components/heos/ @andrewsayre
|
||||
/tests/components/heos/ @andrewsayre
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
@@ -715,8 +717,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th
|
||||
/tests/components/homematicip_cloud/ @hahn-th
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/tests/components/homematicip_cloud/ @hahn-th @lackas
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==11.1.3"]
|
||||
"requirements": ["aioamazondevices==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -73,31 +73,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
started = False
|
||||
|
||||
async def _async_handle_labs_update(
|
||||
event: Event[labs.EventLabsUpdatedData],
|
||||
event_data: labs.EventLabsUpdatedData,
|
||||
) -> None:
|
||||
"""Handle labs feature toggle."""
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event.data["enabled"]})
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: event_data["enabled"]})
|
||||
if started:
|
||||
await analytics.async_schedule()
|
||||
|
||||
@callback
|
||||
def _async_labs_event_filter(event_data: labs.EventLabsUpdatedData) -> bool:
|
||||
"""Filter labs events for this integration's snapshot feature."""
|
||||
return (
|
||||
event_data["domain"] == DOMAIN
|
||||
and event_data["preview_feature"] == LABS_SNAPSHOT_FEATURE
|
||||
)
|
||||
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
nonlocal started
|
||||
started = True
|
||||
await analytics.async_schedule()
|
||||
|
||||
hass.bus.async_listen(
|
||||
labs.EVENT_LABS_UPDATED,
|
||||
_async_handle_labs_update,
|
||||
event_filter=_async_labs_event_filter,
|
||||
labs.async_subscribe_preview_feature(
|
||||
hass, DOMAIN, LABS_SNAPSHOT_FEATURE, _async_handle_labs_update
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.16"]
|
||||
"requirements": ["py-aosmith==1.0.17"]
|
||||
}
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.5.1"]
|
||||
"requirements": ["aranet4==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -30,6 +30,9 @@
|
||||
"title": "Set up one-time password delivered by notify component"
|
||||
},
|
||||
"setup": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "A one-time password has been sent via **notify.{notify_service}**. Please enter it below:",
|
||||
"title": "Verify setup"
|
||||
}
|
||||
@@ -42,6 +45,9 @@
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"code": "Code"
|
||||
},
|
||||
"description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator]({google_authenticator_url}) or [Authy]({authy_url}).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**.",
|
||||
"title": "Set up two-factor authentication using TOTP"
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.components.labs import async_subscribe_preview_feature
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -386,14 +386,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema({vol.Optional(CONF_ID): str}),
|
||||
)
|
||||
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
async def new_triggers_conditions_listener(
|
||||
_event_data: labs.EventLabsUpdatedData,
|
||||
) -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
hass.async_create_task(
|
||||
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
await reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
|
||||
async_labs_listen(
|
||||
async_subscribe_preview_feature(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiostreammagic==2.11.0"],
|
||||
"requirements": ["aiostreammagic==2.12.1"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -19,11 +19,11 @@
|
||||
"secret_access_key": "Secret access key"
|
||||
},
|
||||
"data_description": {
|
||||
"access_key_id": "Access key ID to connect to Cloudflare R2 (this is your Account ID)",
|
||||
"access_key_id": "Access key ID to connect to Cloudflare R2",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"endpoint_url": "Cloudflare R2 S3-compatible endpoint.",
|
||||
"prefix": "Optional folder path inside the bucket. Example: backups/homeassistant",
|
||||
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Docs]({auth_docs_url})"
|
||||
"secret_access_key": "Secret access key to connect to Cloudflare R2. See [Cloudflare documentation]({auth_docs_url})"
|
||||
},
|
||||
"title": "Add Cloudflare R2 bucket"
|
||||
}
|
||||
|
||||
@@ -70,6 +70,10 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
|
||||
_attr_name = None
|
||||
|
||||
# TODO(2026.7.0): When support for unknown fan speeds is removed, delete this variable.
|
||||
# Holds unknown fan speeds we have already warned about.
|
||||
warned_unknown_fan_speeds: set[str] = set()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CoolmasterDataUpdateCoordinator,
|
||||
@@ -125,8 +129,20 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
def fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
|
||||
# Normalize to lowercase for lookup, and pass unknown values through.
|
||||
return CM_TO_HA_FAN.get(self._unit.fan_speed.lower(), self._unit.fan_speed)
|
||||
# Normalize to lowercase for lookup, and pass unknown lowercase values through.
|
||||
fan_speed_lower = self._unit.fan_speed.lower()
|
||||
if fan_speed_lower not in CM_TO_HA_FAN:
|
||||
# TODO(2026.7.0): Stop supporting unknown fan speeds.
|
||||
if fan_speed_lower not in CoolmasterClimate.warned_unknown_fan_speeds:
|
||||
CoolmasterClimate.warned_unknown_fan_speeds.add(fan_speed_lower)
|
||||
_LOGGER.warning(
|
||||
"Detected unknown fan speed value from HVAC unit: %s. "
|
||||
"Support for unknown fan speeds will be removed in 2026.7.0",
|
||||
fan_speed_lower,
|
||||
)
|
||||
return fan_speed_lower
|
||||
|
||||
return CM_TO_HA_FAN[fan_speed_lower]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.14.0",
|
||||
"aioesphomeapi==44.0.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.6.0"
|
||||
],
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.components.sensor import (
|
||||
RestoreSensor,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfMass
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -47,6 +48,7 @@ class EufyLifeSensorEntity(SensorEntity):
|
||||
"""Representation of an EufyLife sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, data: EufyLifeData) -> None:
|
||||
"""Initialize the weight sensor entity."""
|
||||
|
||||
@@ -65,10 +65,10 @@ class FritzGuestWifiQRImage(FritzBoxBaseEntity, ImageEntity):
|
||||
super().__init__(avm_wrapper, device_friendly_name)
|
||||
ImageEntity.__init__(self, hass)
|
||||
|
||||
async def _fetch_image(self) -> bytes:
|
||||
def _fetch_image(self) -> bytes:
|
||||
"""Fetch the QR code from the Fritz!Box."""
|
||||
qr_stream: BytesIO = await self.hass.async_add_executor_job(
|
||||
self._avm_wrapper.fritz_guest_wifi.get_wifi_qr_code, "png"
|
||||
qr_stream: BytesIO = self._avm_wrapper.fritz_guest_wifi.get_wifi_qr_code(
|
||||
"png", border=2
|
||||
)
|
||||
qr_bytes = qr_stream.getvalue()
|
||||
_LOGGER.debug("fetched %s bytes", len(qr_bytes))
|
||||
@@ -77,13 +77,15 @@ class FritzGuestWifiQRImage(FritzBoxBaseEntity, ImageEntity):
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Fetch and set initial data and state."""
|
||||
self._current_qr_bytes = await self._fetch_image()
|
||||
self._current_qr_bytes = await self.hass.async_add_executor_job(
|
||||
self._fetch_image
|
||||
)
|
||||
self._attr_image_last_updated = dt_util.utcnow()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the image entity data."""
|
||||
try:
|
||||
qr_bytes = await self._fetch_image()
|
||||
qr_bytes = await self.hass.async_add_executor_job(self._fetch_image)
|
||||
except RequestException:
|
||||
self._current_qr_bytes = None
|
||||
self._attr_image_last_updated = None
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"pitch": "Default pitch of the voice",
|
||||
"profiles": "Default audio profiles",
|
||||
"speed": "Default rate/speed of the voice",
|
||||
"stt_model": "Speech-to-Text model",
|
||||
"stt_model": "Speech-to-text model",
|
||||
"text_type": "Default text type",
|
||||
"voice": "Default voice name (overrides language and gender)"
|
||||
}
|
||||
|
||||
@@ -43,7 +43,11 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
translation_key="highest_price_today",
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
value_fn=lambda api, data: api.get_highest_price_today(data),
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_highest_price_today(data)) is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
GreenPlanetEnergySensorEntityDescription(
|
||||
key="gpe_highest_price_time",
|
||||
@@ -61,7 +65,11 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
translation_placeholders={"time_range": "(06:00-18:00)"},
|
||||
value_fn=lambda api, data: api.get_lowest_price_day(data),
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_lowest_price_day(data)) is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
GreenPlanetEnergySensorEntityDescription(
|
||||
key="gpe_lowest_price_day_time",
|
||||
@@ -80,7 +88,11 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
translation_placeholders={"time_range": "(18:00-06:00)"},
|
||||
value_fn=lambda api, data: api.get_lowest_price_night(data),
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_lowest_price_night(data)) is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
GreenPlanetEnergySensorEntityDescription(
|
||||
key="gpe_lowest_price_night_time",
|
||||
@@ -98,7 +110,11 @@ SENSOR_DESCRIPTIONS: list[GreenPlanetEnergySensorEntityDescription] = [
|
||||
translation_key="current_price",
|
||||
native_unit_of_measurement=f"{CURRENCY_EURO}/{UnitOfEnergy.KILO_WATT_HOUR}",
|
||||
suggested_display_precision=4,
|
||||
value_fn=lambda api, data: api.get_current_price(data, dt_util.now().hour),
|
||||
value_fn=lambda api, data: (
|
||||
price / 100
|
||||
if (price := api.get_current_price(data, dt_util.now().hour)) is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
72
homeassistant/components/hegel/__init__.py
Normal file
72
homeassistant/components/hegel/__init__.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""The Hegel integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from hegel_ip_client import HegelClient
|
||||
from hegel_ip_client.exceptions import HegelConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DEFAULT_PORT
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.MEDIA_PLAYER]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type HegelConfigEntry = ConfigEntry[HegelClient]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HegelConfigEntry) -> bool:
|
||||
"""Set up the Hegel integration."""
|
||||
host = entry.data[CONF_HOST]
|
||||
|
||||
# Create and test client connection
|
||||
client = HegelClient(host, DEFAULT_PORT)
|
||||
|
||||
try:
|
||||
# Test connection before proceeding with setup
|
||||
await client.start()
|
||||
await client.ensure_connected(timeout=10.0)
|
||||
_LOGGER.debug("Successfully connected to Hegel at %s:%s", host, DEFAULT_PORT)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
_LOGGER.error(
|
||||
"Failed to connect to Hegel at %s:%s: %s", host, DEFAULT_PORT, err
|
||||
)
|
||||
await client.stop() # Clean up
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unable to connect to Hegel amplifier at {host}:{DEFAULT_PORT}"
|
||||
) from err
|
||||
|
||||
# Store client in runtime_data
|
||||
entry.runtime_data = client
|
||||
|
||||
async def _async_close_client(event):
|
||||
await client.stop()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_close_client)
|
||||
)
|
||||
|
||||
# Forward setup to supported platforms
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: HegelConfigEntry) -> bool:
|
||||
"""Unload a Hegel config entry and stop active client connection."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
if unload_ok:
|
||||
client = entry.runtime_data
|
||||
_LOGGER.debug("Stopping Hegel client for %s", entry.title)
|
||||
try:
|
||||
await client.stop()
|
||||
except (HegelConnectionError, OSError) as err:
|
||||
_LOGGER.warning("Error while stopping Hegel client: %s", err)
|
||||
|
||||
return unload_ok
|
||||
154
homeassistant/components/hegel/config_flow.py
Normal file
154
homeassistant/components/hegel/config_flow.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Config flow for Hegel integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hegel_ip_client import HegelClient
|
||||
from hegel_ip_client.exceptions import HegelConnectionError
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers.service_info.ssdp import SsdpServiceInfo
|
||||
|
||||
from .const import CONF_MODEL, DEFAULT_PORT, DOMAIN, MODEL_INPUTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HegelConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Hegel amplifiers."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._host: str | None = None
|
||||
self._name: str | None = None
|
||||
self._model: str | None = None
|
||||
|
||||
async def _async_try_connect(self, host: str) -> bool:
|
||||
"""Try to connect to the Hegel amplifier using the library."""
|
||||
client = HegelClient(host, DEFAULT_PORT)
|
||||
try:
|
||||
await client.start()
|
||||
await client.ensure_connected(timeout=5.0)
|
||||
except HegelConnectionError, TimeoutError, OSError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
finally:
|
||||
await client.stop()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle manual setup by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
host = user_input[CONF_HOST]
|
||||
|
||||
# Prevent duplicate entries by host
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
|
||||
if not await self._async_try_connect(host):
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=f"Hegel {user_input[CONF_MODEL]}",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_MODEL): vol.In(list(MODEL_INPUTS.keys())),
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle SSDP discovery."""
|
||||
upnp = discovery_info.upnp or {}
|
||||
|
||||
# Get host from presentationURL or ssdp_location
|
||||
url = upnp.get("presentationURL") or discovery_info.ssdp_location
|
||||
if not url:
|
||||
return self.async_abort(reason="no_host_found")
|
||||
|
||||
host = URL(url).host
|
||||
if not host:
|
||||
return self.async_abort(reason="no_host_found")
|
||||
|
||||
# Use UDN as unique id (device UUID)
|
||||
unique_id = discovery_info.ssdp_udn
|
||||
if not unique_id:
|
||||
return self.async_abort(reason="no_host_found")
|
||||
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
|
||||
# Test connection before showing confirmation
|
||||
if not await self._async_try_connect(host):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
# Get device info
|
||||
friendly_name = upnp.get("friendlyName", f"Hegel {host}")
|
||||
suggested_model = upnp.get("modelName") or ""
|
||||
model_default = next(
|
||||
(m for m in MODEL_INPUTS if suggested_model.upper().startswith(m.upper())),
|
||||
None,
|
||||
)
|
||||
|
||||
self._host = host
|
||||
self._name = friendly_name
|
||||
self._model = model_default
|
||||
|
||||
self.context.update(
|
||||
{
|
||||
"title_placeholders": {"name": friendly_name},
|
||||
}
|
||||
)
|
||||
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle discovery confirmation - user can change model if needed."""
|
||||
assert self._host is not None
|
||||
assert self._name is not None
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self._name,
|
||||
data={
|
||||
CONF_HOST: self._host,
|
||||
CONF_MODEL: user_input[CONF_MODEL],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_MODEL,
|
||||
default=self._model or list(MODEL_INPUTS.keys())[0],
|
||||
): vol.In(list(MODEL_INPUTS.keys())),
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"host": self._host,
|
||||
"name": self._name,
|
||||
},
|
||||
)
|
||||
92
homeassistant/components/hegel/const.py
Normal file
92
homeassistant/components/hegel/const.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Constants for the Hegel integration."""
|
||||
|
||||
DOMAIN = "hegel"
|
||||
DEFAULT_PORT = 50001
|
||||
|
||||
CONF_MODEL = "model"
|
||||
CONF_MAX_VOLUME = "max_volume" # 1.0 means amp's internal max
|
||||
|
||||
HEARTBEAT_TIMEOUT_MINUTES = 3
|
||||
|
||||
MODEL_INPUTS = {
|
||||
"Röst": [
|
||||
"Balanced",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H95": [
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H120": [
|
||||
"Balanced",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H190": [
|
||||
"Balanced",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H190V": [
|
||||
"XLR",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
"Phono",
|
||||
],
|
||||
"H390": [
|
||||
"XLR",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"BNC",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
"H590": [
|
||||
"XLR 1",
|
||||
"XLR 2",
|
||||
"Analog 1",
|
||||
"Analog 2",
|
||||
"BNC",
|
||||
"Coaxial",
|
||||
"Optical 1",
|
||||
"Optical 2",
|
||||
"Optical 3",
|
||||
"USB",
|
||||
"Network",
|
||||
],
|
||||
}
|
||||
18
homeassistant/components/hegel/manifest.json
Normal file
18
homeassistant/components/hegel/manifest.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"domain": "hegel",
|
||||
"name": "Hegel Amplifier",
|
||||
"codeowners": ["@boazca"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hegel/",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["hegel_ip_client"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["hegel-ip-client==0.1.4"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
"manufacturer": "Hegel"
|
||||
}
|
||||
]
|
||||
}
|
||||
343
homeassistant/components/hegel/media_player.py
Normal file
343
homeassistant/components/hegel/media_player.py
Normal file
@@ -0,0 +1,343 @@
|
||||
"""Hegel media player platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import contextlib
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hegel_ip_client import (
|
||||
COMMANDS,
|
||||
HegelClient,
|
||||
apply_state_changes,
|
||||
parse_reply_message,
|
||||
)
|
||||
from hegel_ip_client.exceptions import HegelConnectionError
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from . import HegelConfigEntry
|
||||
from .const import CONF_MODEL, DOMAIN, HEARTBEAT_TIMEOUT_MINUTES, MODEL_INPUTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HegelConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Hegel media player from a config entry."""
|
||||
model = entry.data[CONF_MODEL]
|
||||
unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
# map inputs (source_map)
|
||||
source_map: dict[int, str] = (
|
||||
dict(enumerate(MODEL_INPUTS[model], start=1)) if model in MODEL_INPUTS else {}
|
||||
)
|
||||
|
||||
# Use the client from the config entry's runtime_data (already connected)
|
||||
client = entry.runtime_data
|
||||
|
||||
# Create entity
|
||||
media = HegelMediaPlayer(
|
||||
entry,
|
||||
client,
|
||||
source_map,
|
||||
unique_id,
|
||||
)
|
||||
|
||||
async_add_entities([media])
|
||||
|
||||
|
||||
class HegelMediaPlayer(MediaPlayerEntity):
|
||||
"""Hegel amplifier entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_name = None
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
| MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
| MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: HegelConfigEntry,
|
||||
client: HegelClient,
|
||||
source_map: dict[int, str],
|
||||
unique_id: str,
|
||||
) -> None:
|
||||
"""Initialize the Hegel media player entity."""
|
||||
self._entry = config_entry
|
||||
self._client = client
|
||||
self._source_map = source_map
|
||||
|
||||
# Set unique_id from config entry
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
# Set device info
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=config_entry.title,
|
||||
manufacturer="Hegel",
|
||||
model=config_entry.data[CONF_MODEL],
|
||||
)
|
||||
|
||||
# State will be populated by async_update on first connection
|
||||
self._state: dict[str, Any] = {}
|
||||
|
||||
# Background tasks
|
||||
self._connected_watcher_task: asyncio.Task[None] | None = None
|
||||
self._push_task: asyncio.Task[None] | None = None
|
||||
self._push_handler: Callable[[str], None] | None = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
_LOGGER.debug("Hegel media player added to hass: %s", self.entity_id)
|
||||
|
||||
# Register push handler for real-time updates from the amplifier
|
||||
# The client expects a synchronous callable; schedule a coroutine safely
|
||||
def push_handler(msg: str) -> None:
|
||||
self._push_task = self.hass.async_create_task(self._async_handle_push(msg))
|
||||
|
||||
self._push_handler = push_handler
|
||||
self._client.add_push_callback(push_handler)
|
||||
|
||||
# Register cleanup for push handler using async_on_remove
|
||||
def cleanup_push_handler() -> None:
|
||||
if self._push_handler:
|
||||
self._client.remove_push_callback(self._push_handler)
|
||||
_LOGGER.debug("Push callback removed")
|
||||
self._push_handler = None
|
||||
|
||||
self.async_on_remove(cleanup_push_handler)
|
||||
|
||||
# Perform initial state fetch if already connected
|
||||
# The watcher handles reconnections, but we need to fetch state on first setup
|
||||
if self._client.is_connected():
|
||||
_LOGGER.debug("Client already connected, performing initial state fetch")
|
||||
await self.async_update()
|
||||
|
||||
# Start a watcher task
|
||||
# Use config_entry.async_create_background_task for automatic cleanup on unload
|
||||
self._connected_watcher_task = self._entry.async_create_background_task(
|
||||
self.hass,
|
||||
self._connected_watcher(),
|
||||
name=f"hegel_{self.entity_id}_connected_watcher",
|
||||
)
|
||||
# Note: No need for async_on_remove - entry.async_create_background_task
|
||||
# automatically cancels the task when the config entry is unloaded
|
||||
|
||||
# Schedule the heartbeat every 2 minutes while the reset timeout is 3 minutes
|
||||
self.async_on_remove(
|
||||
async_track_time_interval(
|
||||
self.hass,
|
||||
self._send_heartbeat,
|
||||
timedelta(minutes=HEARTBEAT_TIMEOUT_MINUTES - 1),
|
||||
)
|
||||
)
|
||||
# Send the first heartbeat immediately
|
||||
self.hass.async_create_task(self._send_heartbeat())
|
||||
|
||||
async def _send_heartbeat(self, now=None) -> None:
|
||||
if not self.available:
|
||||
return
|
||||
try:
|
||||
await self._client.send(
|
||||
f"-r.{HEARTBEAT_TIMEOUT_MINUTES}", expect_reply=False
|
||||
)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
_LOGGER.debug("Heartbeat failed: %s", err)
|
||||
|
||||
async def _async_handle_push(self, msg: str) -> None:
|
||||
"""Handle incoming push message from client (runs in event loop)."""
|
||||
try:
|
||||
update = parse_reply_message(msg)
|
||||
if update.has_changes():
|
||||
apply_state_changes(self._state, update, logger=_LOGGER, source="push")
|
||||
# notify HA
|
||||
self.async_write_ha_state()
|
||||
except ValueError, KeyError, AttributeError:
|
||||
_LOGGER.exception("Failed to handle push message")
|
||||
|
||||
async def _connected_watcher(self) -> None:
|
||||
"""Watch the client's connection events and update state accordingly."""
|
||||
conn_event = self._client.connected_event
|
||||
disconn_event = self._client.disconnected_event
|
||||
_LOGGER.debug("Connected watcher started")
|
||||
|
||||
try:
|
||||
while True:
|
||||
# Wait for connection
|
||||
_LOGGER.debug("Watcher: waiting for connection")
|
||||
await conn_event.wait()
|
||||
_LOGGER.debug("Watcher: connected, refreshing state")
|
||||
|
||||
# Immediately notify HA that we're available again
|
||||
self.async_write_ha_state()
|
||||
|
||||
# Schedule a state refresh through HA
|
||||
self.async_schedule_update_ha_state(force_refresh=True)
|
||||
|
||||
# Wait for disconnection using event (no polling!)
|
||||
_LOGGER.debug("Watcher: waiting for disconnection")
|
||||
await disconn_event.wait()
|
||||
_LOGGER.debug("Watcher: disconnected")
|
||||
|
||||
# Notify HA that we're unavailable
|
||||
self.async_write_ha_state()
|
||||
|
||||
except asyncio.CancelledError:
|
||||
_LOGGER.debug("Connected watcher cancelled")
|
||||
except (HegelConnectionError, OSError) as err:
|
||||
_LOGGER.warning("Connected watcher failed: %s", err)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Handle entity removal from Home Assistant.
|
||||
|
||||
Note: Push callback cleanup is handled by async_on_remove.
|
||||
_connected_watcher_task cleanup is handled automatically by
|
||||
entry.async_create_background_task when the config entry is unloaded.
|
||||
"""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
# Cancel push task if running (short-lived task, defensive cleanup)
|
||||
if self._push_task and not self._push_task.done():
|
||||
self._push_task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await self._push_task
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Query the amplifier for the main values and update state dict."""
|
||||
for cmd in (
|
||||
COMMANDS["power_query"],
|
||||
COMMANDS["volume_query"],
|
||||
COMMANDS["mute_query"],
|
||||
COMMANDS["input_query"],
|
||||
):
|
||||
try:
|
||||
update = await self._client.send(cmd, expect_reply=True, timeout=3.0)
|
||||
if update and update.has_changes():
|
||||
apply_state_changes(
|
||||
self._state, update, logger=_LOGGER, source="update"
|
||||
)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
_LOGGER.debug("Refresh command %s failed: %s", cmd, err)
|
||||
# update entity state
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if the client is connected."""
|
||||
return self._client.is_connected()
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState | None:
|
||||
"""Return the current state of the media player."""
|
||||
power = self._state.get("power")
|
||||
if power is None:
|
||||
return None
|
||||
return MediaPlayerState.ON if power else MediaPlayerState.OFF
|
||||
|
||||
@property
|
||||
def volume_level(self) -> float | None:
|
||||
"""Return the volume level."""
|
||||
volume = self._state.get("volume")
|
||||
if volume is None:
|
||||
return None
|
||||
return float(volume)
|
||||
|
||||
@property
|
||||
def is_volume_muted(self) -> bool | None:
|
||||
"""Return whether volume is muted."""
|
||||
return bool(self._state.get("mute", False))
|
||||
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Return the current input source."""
|
||||
idx = self._state.get("input")
|
||||
return self._source_map.get(idx, f"Input {idx}") if idx else None
|
||||
|
||||
@property
|
||||
def source_list(self) -> list[str] | None:
|
||||
"""Return the list of available input sources."""
|
||||
return [self._source_map[k] for k in sorted(self._source_map.keys())] or None
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn on the media player."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["power_on"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to turn on: {err}") from err
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn off the media player."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["power_off"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to turn off: {err}") from err
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Set volume level, range 0..1."""
|
||||
vol = max(0.0, min(volume, 1.0))
|
||||
amp_vol = int(round(vol * 100))
|
||||
try:
|
||||
await self._client.send(COMMANDS["volume_set"](amp_vol), expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to set volume: {err}") from err
|
||||
|
||||
async def async_mute_volume(self, mute: bool) -> None:
|
||||
"""Mute or unmute the volume."""
|
||||
try:
|
||||
await self._client.send(
|
||||
COMMANDS["mute_on" if mute else "mute_off"], expect_reply=False
|
||||
)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to set mute: {err}") from err
|
||||
|
||||
async def async_volume_up(self) -> None:
|
||||
"""Increase volume."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["volume_up"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to increase volume: {err}") from err
|
||||
|
||||
async def async_volume_down(self) -> None:
|
||||
"""Decrease volume."""
|
||||
try:
|
||||
await self._client.send(COMMANDS["volume_down"], expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(f"Failed to decrease volume: {err}") from err
|
||||
|
||||
async def async_select_source(self, source: str) -> None:
|
||||
"""Select input source."""
|
||||
inv = {v: k for k, v in self._source_map.items()}
|
||||
idx = inv.get(source)
|
||||
if idx is None:
|
||||
raise ServiceValidationError(f"Unknown source: {source}")
|
||||
try:
|
||||
await self._client.send(COMMANDS["input_set"](idx), expect_reply=False)
|
||||
except (HegelConnectionError, TimeoutError, OSError) as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to select source {source}: {err}"
|
||||
) from err
|
||||
95
homeassistant/components/hegel/quality_scale.yaml
Normal file
95
homeassistant/components/hegel/quality_scale.yaml
Normal file
@@ -0,0 +1,95 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: done
|
||||
comment: |
|
||||
Entities subscribe to push events from hegel-ip-client library.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device uses local IP control without authentication.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: |
|
||||
Single media_player entity, no categories needed.
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
Single main entity, should be enabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No repair issues needed for this integration.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
Uses raw TCP connection, not HTTP.
|
||||
strict-typing: todo
|
||||
35
homeassistant/components/hegel/strings.json
Normal file
35
homeassistant/components/hegel/strings.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_host_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"discovery_confirm": {
|
||||
"data": {
|
||||
"model": "Model"
|
||||
},
|
||||
"data_description": {
|
||||
"model": "Select your Hegel amplifier model for proper input mapping"
|
||||
},
|
||||
"description": "Discovered Hegel amplifier **{name}** at `{host}`. Confirm the model to complete setup.",
|
||||
"title": "Confirm Hegel amplifier"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"model": "Model"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Hostname or IP address of your Hegel amplifier",
|
||||
"model": "Select your Hegel amplifier model for proper input mapping"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"domain": "homematicip_cloud",
|
||||
"name": "HomematicIP Cloud",
|
||||
"codeowners": ["@hahn-th"],
|
||||
"codeowners": ["@hahn-th", "@lackas"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.5.0"]
|
||||
"requirements": ["homematicip==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
MAX_WS_RECONNECT_TIME = 600
|
||||
SCAN_INTERVAL = timedelta(minutes=8)
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time
|
||||
PING_INTERVAL = 60
|
||||
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2.7.1"]
|
||||
"requirements": ["aioautomower==2.7.3"]
|
||||
}
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioimmich"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioimmich==0.11.1"]
|
||||
"requirements": ["aioimmich==0.12.0"]
|
||||
}
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["intellifire4py"],
|
||||
"requirements": ["intellifire4py==4.2.1"]
|
||||
"requirements": ["intellifire4py==4.3.1"]
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
TIMER_NOT_FOUND_RESPONSE = "timer_not_found"
|
||||
MULTIPLE_TIMERS_MATCHED_RESPONSE = "multiple_timers_matched"
|
||||
NO_TIMER_SUPPORT_RESPONSE = "no_timer_support"
|
||||
NO_TIMER_COMMAND_RESPONSE = "no_timer_command"
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -192,6 +193,17 @@ class MultipleTimersMatchedError(intent.IntentHandleError):
|
||||
super().__init__("Multiple timers matched", MULTIPLE_TIMERS_MATCHED_RESPONSE)
|
||||
|
||||
|
||||
class NoTimerCommandError(intent.IntentHandleError):
|
||||
"""Error when a conversation command does not match any intent."""
|
||||
|
||||
def __init__(self, command: str) -> None:
|
||||
"""Initialize error."""
|
||||
super().__init__(
|
||||
f"Intent not recognized: {command}",
|
||||
NO_TIMER_COMMAND_RESPONSE,
|
||||
)
|
||||
|
||||
|
||||
class TimersNotSupportedError(intent.IntentHandleError):
|
||||
"""Error when a timer intent is used from a device that isn't registered to handle timer events."""
|
||||
|
||||
@@ -836,6 +848,12 @@ class StartTimerIntentHandler(intent.IntentHandler):
|
||||
# Fail early if this is not a delayed command
|
||||
raise TimersNotSupportedError(intent_obj.device_id)
|
||||
|
||||
# Validate conversation command if provided
|
||||
if conversation_command and not await self._validate_conversation_command(
|
||||
intent_obj, conversation_command
|
||||
):
|
||||
raise NoTimerCommandError(conversation_command)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
@@ -865,6 +883,48 @@ class StartTimerIntentHandler(intent.IntentHandler):
|
||||
|
||||
return intent_obj.create_response()
|
||||
|
||||
async def _validate_conversation_command(
|
||||
self, intent_obj: intent.Intent, conversation_command: str
|
||||
) -> bool:
|
||||
"""Validate that a conversation command can be executed."""
|
||||
from homeassistant.components.conversation import ( # noqa: PLC0415
|
||||
ConversationInput,
|
||||
async_get_agent,
|
||||
default_agent,
|
||||
)
|
||||
|
||||
# Only validate if using the default agent
|
||||
conversation_agent = async_get_agent(
|
||||
intent_obj.hass, intent_obj.conversation_agent_id
|
||||
)
|
||||
|
||||
if conversation_agent is None or not isinstance(
|
||||
conversation_agent, default_agent.DefaultAgent
|
||||
):
|
||||
return True # Skip validation
|
||||
|
||||
test_input = ConversationInput(
|
||||
text=conversation_command,
|
||||
context=intent_obj.context,
|
||||
conversation_id=None,
|
||||
device_id=intent_obj.device_id,
|
||||
satellite_id=intent_obj.satellite_id,
|
||||
language=intent_obj.language,
|
||||
agent_id=conversation_agent.entity_id,
|
||||
)
|
||||
|
||||
# check for sentence trigger
|
||||
if (
|
||||
await conversation_agent.async_recognize_sentence_trigger(test_input)
|
||||
) is not None:
|
||||
return True
|
||||
|
||||
# check for intent
|
||||
if (await conversation_agent.async_recognize_intent(test_input)) is not None:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class CancelTimerIntentHandler(intent.IntentHandler):
|
||||
"""Intent handler for cancelling a timer."""
|
||||
|
||||
@@ -21,6 +21,7 @@ from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .helpers import (
|
||||
async_is_preview_feature_enabled,
|
||||
async_listen,
|
||||
async_subscribe_preview_feature,
|
||||
async_update_preview_feature,
|
||||
)
|
||||
from .models import (
|
||||
@@ -41,6 +42,7 @@ __all__ = [
|
||||
"EventLabsUpdatedData",
|
||||
"async_is_preview_feature_enabled",
|
||||
"async_listen",
|
||||
"async_subscribe_preview_feature",
|
||||
"async_update_preview_feature",
|
||||
]
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
@@ -32,6 +33,43 @@ def async_is_preview_feature_enabled(
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_subscribe_preview_feature(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[EventLabsUpdatedData], Coroutine[Any, Any, None]],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Coroutine function to invoke when the preview feature
|
||||
is toggled. Receives the event data as argument. Runs eagerly.
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_event_filter(event_data: EventLabsUpdatedData) -> bool:
|
||||
"""Filter labs events for this integration's preview feature."""
|
||||
return (
|
||||
event_data["domain"] == domain
|
||||
and event_data["preview_feature"] == preview_feature
|
||||
)
|
||||
|
||||
async def _handler(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
await listener(event.data)
|
||||
|
||||
return hass.bus.async_listen(
|
||||
EVENT_LABS_UPDATED, _handler, event_filter=_async_event_filter
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
@@ -51,16 +89,10 @@ def async_listen(
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
async def _listener(_event_data: EventLabsUpdatedData) -> None:
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
return async_subscribe_preview_feature(hass, domain, preview_feature, _listener)
|
||||
|
||||
|
||||
async def async_update_preview_feature(
|
||||
|
||||
@@ -13,9 +13,10 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from .const import LABS_DATA
|
||||
from .helpers import (
|
||||
async_is_preview_feature_enabled,
|
||||
async_listen,
|
||||
async_subscribe_preview_feature,
|
||||
async_update_preview_feature,
|
||||
)
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@callback
|
||||
@@ -102,7 +103,6 @@ async def websocket_update_preview_feature(
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "labs/subscribe",
|
||||
@@ -110,7 +110,8 @@ async def websocket_update_preview_feature(
|
||||
vol.Required("preview_feature"): str,
|
||||
}
|
||||
)
|
||||
def websocket_subscribe_feature(
|
||||
@websocket_api.async_response
|
||||
async def websocket_subscribe_feature(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
@@ -132,10 +133,13 @@ def websocket_subscribe_feature(
|
||||
|
||||
preview_feature = labs_data.preview_features[preview_feature_id]
|
||||
|
||||
@callback
|
||||
def send_event() -> None:
|
||||
async def send_event(event_data: EventLabsUpdatedData | None = None) -> None:
|
||||
"""Send feature state to client."""
|
||||
enabled = async_is_preview_feature_enabled(hass, domain, preview_feature_key)
|
||||
enabled = (
|
||||
event_data["enabled"]
|
||||
if event_data is not None
|
||||
else async_is_preview_feature_enabled(hass, domain, preview_feature_key)
|
||||
)
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"],
|
||||
@@ -143,9 +147,9 @@ def websocket_subscribe_feature(
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_listen(
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_preview_feature(
|
||||
hass, domain, preview_feature_key, send_event
|
||||
)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
send_event()
|
||||
await send_event()
|
||||
|
||||
34
homeassistant/components/liebherr/diagnostics.py
Normal file
34
homeassistant/components/liebherr/diagnostics.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Diagnostics support for Liebherr."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import LiebherrConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_API_KEY}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: LiebherrConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"devices": {
|
||||
device_id: {
|
||||
"coordinator": {
|
||||
"last_update_success": coordinator.last_update_success,
|
||||
"update_interval": str(coordinator.update_interval),
|
||||
"last_exception": str(coordinator.last_exception)
|
||||
if coordinator.last_exception
|
||||
else None,
|
||||
},
|
||||
"data": asdict(coordinator.data),
|
||||
}
|
||||
for device_id, coordinator in entry.runtime_data.items()
|
||||
},
|
||||
}
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Cloud API does not require updating entry data from network discovery.
|
||||
|
||||
@@ -805,39 +805,6 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Return the color mode of the light."""
|
||||
return self._attr_color_mode
|
||||
|
||||
@property
|
||||
def _light_internal_color_mode(self) -> str:
|
||||
"""Return the color mode of the light with backwards compatibility."""
|
||||
if (color_mode := self.color_mode) is None:
|
||||
# Backwards compatibility for color_mode added in 2021.4
|
||||
# Warning added in 2024.3, break in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) does not report a color mode, this will stop working "
|
||||
"in Home Assistant Core 2025.3, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
report_issue,
|
||||
)
|
||||
|
||||
supported = self._light_internal_supported_color_modes
|
||||
|
||||
if ColorMode.HS in supported and self.hs_color is not None:
|
||||
return ColorMode.HS
|
||||
if ColorMode.COLOR_TEMP in supported and self.color_temp_kelvin is not None:
|
||||
return ColorMode.COLOR_TEMP
|
||||
if ColorMode.BRIGHTNESS in supported and self.brightness is not None:
|
||||
return ColorMode.BRIGHTNESS
|
||||
if ColorMode.ONOFF in supported:
|
||||
return ColorMode.ONOFF
|
||||
return ColorMode.UNKNOWN
|
||||
|
||||
return color_mode
|
||||
|
||||
@cached_property
|
||||
def hs_color(self) -> tuple[float, float] | None:
|
||||
"""Return the hue and saturation color value [float, float]."""
|
||||
@@ -985,8 +952,8 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
def __validate_color_mode(
|
||||
self,
|
||||
color_mode: ColorMode | str | None,
|
||||
supported_color_modes: set[ColorMode] | set[str],
|
||||
color_mode: ColorMode | None,
|
||||
supported_color_modes: set[ColorMode],
|
||||
effect: str | None,
|
||||
) -> None:
|
||||
"""Validate the color mode."""
|
||||
@@ -999,23 +966,10 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
# color modes
|
||||
if color_mode in supported_color_modes:
|
||||
return
|
||||
# Warning added in 2024.3, reject in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) set to unsupported color mode %s, expected one of %s, "
|
||||
"this will stop working in Home Assistant Core 2025.3, "
|
||||
"please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
color_mode,
|
||||
supported_color_modes,
|
||||
report_issue,
|
||||
)
|
||||
return
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) set to unsupported color mode "
|
||||
f"{color_mode}, expected one of {supported_color_modes}"
|
||||
)
|
||||
|
||||
# When an effect is active, the color mode should indicate what adjustments are
|
||||
# supported by the effect. To make this possible, we allow the light to set its
|
||||
@@ -1028,49 +982,24 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if color_mode in effect_color_modes:
|
||||
return
|
||||
|
||||
# Warning added in 2024.3, reject in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) set to unsupported color mode %s when rendering an effect,"
|
||||
" expected one of %s, this will stop working in Home Assistant "
|
||||
"Core 2025.3, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
color_mode,
|
||||
effect_color_modes,
|
||||
report_issue,
|
||||
)
|
||||
return
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) set to unsupported color mode "
|
||||
f"{color_mode} when rendering an effect, expected one "
|
||||
f"of {effect_color_modes}"
|
||||
)
|
||||
|
||||
def __validate_supported_color_modes(
|
||||
self,
|
||||
supported_color_modes: set[ColorMode],
|
||||
) -> None:
|
||||
"""Validate the supported color modes."""
|
||||
if self.__color_mode_reported:
|
||||
return
|
||||
|
||||
try:
|
||||
valid_supported_color_modes(supported_color_modes)
|
||||
except vol.Error:
|
||||
# Warning added in 2024.3, reject in 2025.3
|
||||
if not self.__color_mode_reported and self.__should_report_light_issue():
|
||||
self.__color_mode_reported = True
|
||||
report_issue = self._suggest_report_issue()
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s (%s) sets invalid supported color modes %s, this will stop "
|
||||
"working in Home Assistant Core 2025.3, please %s"
|
||||
),
|
||||
self.entity_id,
|
||||
type(self),
|
||||
supported_color_modes,
|
||||
report_issue,
|
||||
)
|
||||
except vol.Error as err:
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) sets invalid supported color modes "
|
||||
f"{supported_color_modes}"
|
||||
) from err
|
||||
|
||||
@final
|
||||
@property
|
||||
@@ -1084,13 +1013,17 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
)
|
||||
|
||||
_is_on = self.is_on
|
||||
color_mode = self._light_internal_color_mode if _is_on else None
|
||||
color_mode = self.color_mode if _is_on else None
|
||||
if _is_on and color_mode is None:
|
||||
raise HomeAssistantError(
|
||||
f"{self.entity_id} ({type(self)}) does not report a color mode"
|
||||
)
|
||||
|
||||
effect: str | None
|
||||
effect: str | None = None
|
||||
if LightEntityFeature.EFFECT in supported_features:
|
||||
data[ATTR_EFFECT] = effect = self.effect if _is_on else None
|
||||
else:
|
||||
effect = None
|
||||
if _is_on:
|
||||
effect = self.effect
|
||||
data[ATTR_EFFECT] = effect
|
||||
|
||||
self.__validate_color_mode(color_mode, legacy_supported_color_modes, effect)
|
||||
|
||||
|
||||
20
homeassistant/components/lunatone/diagnostics.py
Normal file
20
homeassistant/components/lunatone/diagnostics.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Diagnostics support for Lunatone integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import LunatoneConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: LunatoneConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"info": entry.runtime_data.coordinator_info.data.model_dump(),
|
||||
"devices": [
|
||||
v.data.model_dump()
|
||||
for v in entry.runtime_data.coordinator_devices.data.values()
|
||||
],
|
||||
}
|
||||
@@ -51,7 +51,7 @@ rules:
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: Discovery not yet supported
|
||||
|
||||
@@ -251,6 +251,18 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.SmokeCoAlarm.Attributes.SmokeState,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="SmokeCoAlarmCOStateSensor",
|
||||
device_class=BinarySensorDeviceClass.CO,
|
||||
device_to_ha=lambda x: (
|
||||
x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.SmokeCoAlarm.Attributes.COState,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
|
||||
@@ -435,9 +435,9 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
and color_mode == ColorMode.XY
|
||||
):
|
||||
self._attr_xy_color = self._get_xy_color()
|
||||
elif self._attr_color_temp_kelvin is not None:
|
||||
elif self._supports_color_temperature:
|
||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||
elif self._attr_brightness is not None:
|
||||
elif self._supports_brightness:
|
||||
self._attr_color_mode = ColorMode.BRIGHTNESS
|
||||
else:
|
||||
self._attr_color_mode = ColorMode.ONOFF
|
||||
|
||||
@@ -284,6 +284,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
featuremap_contains=(clusters.Thermostat.Bitmaps.Feature.kSetback),
|
||||
),
|
||||
# Eve temperature offset with higher min/max
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
@@ -303,7 +304,27 @@ DISCOVERY_SCHEMAS = [
|
||||
required_attributes=(
|
||||
clusters.Thermostat.Attributes.LocalTemperatureCalibration,
|
||||
),
|
||||
vendor_id=(4874,),
|
||||
vendor_id=(4874,), # Eve Systems
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
key="TemperatureOffset",
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="temperature_offset",
|
||||
native_max_value=25, # Matter 1.3 limit
|
||||
native_min_value=-25, # Matter 1.3 limit
|
||||
native_step=0.5,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_device=lambda x: round(x * 10),
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
entity_class=MatterNumber,
|
||||
required_attributes=(
|
||||
clusters.Thermostat.Attributes.LocalTemperatureCalibration,
|
||||
),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
|
||||
@@ -265,6 +265,8 @@ class ProgramPhaseOven(MieleEnum, missing_to_none=True):
|
||||
heating_up = 3073
|
||||
process_running = 3074
|
||||
process_finished = 3078
|
||||
searing = 3080
|
||||
roasting = 3081
|
||||
energy_save = 3084
|
||||
pre_heating = 3099
|
||||
|
||||
@@ -357,6 +359,8 @@ class ProgramPhaseSteamOvenCombi(MieleEnum, missing_to_none=True):
|
||||
heating_up = 3073
|
||||
process_running = 3074, 7938
|
||||
process_finished = 3078, 7942
|
||||
searing = 3080
|
||||
roasting = 3081
|
||||
energy_save = 3084
|
||||
pre_heating = 3099
|
||||
|
||||
|
||||
@@ -61,6 +61,7 @@ PLATE_COUNT = {
|
||||
"KM7575": 6,
|
||||
"KM7678": 6,
|
||||
"KM7697": 6,
|
||||
"KM7699": 5,
|
||||
"KM7878": 6,
|
||||
"KM7897": 6,
|
||||
"KMDA7633": 5,
|
||||
|
||||
@@ -1018,7 +1018,9 @@
|
||||
"rinse_hold": "Rinse hold",
|
||||
"rinse_out_lint": "Rinse out lint",
|
||||
"rinses": "Rinses",
|
||||
"roasting": "Roasting",
|
||||
"safety_cooling": "Safety cooling",
|
||||
"searing": "Searing",
|
||||
"slightly_dry": "Slightly dry",
|
||||
"slow_roasting": "Slow roasting",
|
||||
"smoothing": "Smoothing",
|
||||
|
||||
@@ -187,6 +187,7 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity):
|
||||
color_modes.add(ColorMode.HS)
|
||||
self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
self._fixed_color_mode = None
|
||||
self._attr_color_mode = ColorMode.UNKNOWN
|
||||
if self.supported_color_modes and len(self.supported_color_modes) == 1:
|
||||
self._fixed_color_mode = next(iter(self.supported_color_modes))
|
||||
self._attr_color_mode = self._fixed_color_mode
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["nrgkick-api==1.6.0"],
|
||||
"requirements": ["nrgkick-api==1.7.1"],
|
||||
"zeroconf": ["_nrgkick._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@ from aiontfy.exceptions import (
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components import camera, image
|
||||
from homeassistant.components.media_source import async_resolve_media
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_MESSAGE,
|
||||
ATTR_TITLE,
|
||||
@@ -26,6 +28,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.selector import MediaSelector
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import NtfyConfigEntry
|
||||
@@ -49,25 +52,48 @@ ATTR_MARKDOWN = "markdown"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_TAGS = "tags"
|
||||
ATTR_SEQUENCE_ID = "sequence_id"
|
||||
ATTR_ATTACH_FILE = "attach_file"
|
||||
ATTR_FILENAME = "filename"
|
||||
GRP_ATTACHMENT = "attachment"
|
||||
MSG_ATTACHMENT = "Only one attachment source is allowed: URL or local file"
|
||||
|
||||
SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional(ATTR_TITLE): cv.string,
|
||||
vol.Optional(ATTR_MESSAGE): cv.string,
|
||||
vol.Optional(ATTR_MARKDOWN): cv.boolean,
|
||||
vol.Optional(ATTR_TAGS): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(vol.Coerce(int), vol.Range(1, 5)),
|
||||
vol.Optional(ATTR_CLICK): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_DELAY): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(seconds=10), max=timedelta(days=3)),
|
||||
),
|
||||
vol.Optional(ATTR_ATTACH): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_EMAIL): vol.Email(),
|
||||
vol.Optional(ATTR_CALL): cv.string,
|
||||
vol.Optional(ATTR_ICON): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_SEQUENCE_ID): cv.string,
|
||||
}
|
||||
|
||||
def validate_filename(params: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate filename."""
|
||||
if ATTR_FILENAME in params and not (
|
||||
ATTR_ATTACH_FILE in params or ATTR_ATTACH in params
|
||||
):
|
||||
raise vol.Invalid("Filename only allowed when attachment is provided")
|
||||
return params
|
||||
|
||||
|
||||
SERVICE_PUBLISH_SCHEMA = vol.All(
|
||||
cv.make_entity_service_schema(
|
||||
{
|
||||
vol.Optional(ATTR_TITLE): cv.string,
|
||||
vol.Optional(ATTR_MESSAGE): cv.string,
|
||||
vol.Optional(ATTR_MARKDOWN): cv.boolean,
|
||||
vol.Optional(ATTR_TAGS): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(vol.Coerce(int), vol.Range(1, 5)),
|
||||
vol.Optional(ATTR_CLICK): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_DELAY): vol.All(
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(seconds=10), max=timedelta(days=3)),
|
||||
),
|
||||
vol.Optional(ATTR_EMAIL): vol.Email(),
|
||||
vol.Optional(ATTR_CALL): cv.string,
|
||||
vol.Optional(ATTR_ICON): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_SEQUENCE_ID): cv.string,
|
||||
vol.Exclusive(ATTR_ATTACH, GRP_ATTACHMENT, MSG_ATTACHMENT): vol.All(
|
||||
vol.Url(), vol.Coerce(URL)
|
||||
),
|
||||
vol.Exclusive(
|
||||
ATTR_ATTACH_FILE, GRP_ATTACHMENT, MSG_ATTACHMENT
|
||||
): MediaSelector({"accept": ["*/*"]}),
|
||||
vol.Optional(ATTR_FILENAME): cv.string,
|
||||
}
|
||||
),
|
||||
validate_filename,
|
||||
)
|
||||
|
||||
SERVICE_CLEAR_DELETE_SCHEMA = cv.make_entity_service_schema(
|
||||
@@ -129,7 +155,7 @@ class NtfyNotifyEntity(NtfyBaseEntity, NotifyEntity):
|
||||
|
||||
async def publish(self, **kwargs: Any) -> None:
|
||||
"""Publish a message to a topic."""
|
||||
|
||||
attachment = None
|
||||
params: dict[str, Any] = kwargs
|
||||
delay: timedelta | None = params.get("delay")
|
||||
if delay:
|
||||
@@ -144,10 +170,36 @@ class NtfyNotifyEntity(NtfyBaseEntity, NotifyEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="delay_no_call",
|
||||
)
|
||||
if file := params.pop(ATTR_ATTACH_FILE, None):
|
||||
media_content_id: str = file["media_content_id"]
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
attachment = (
|
||||
await camera.async_get_image(self.hass, entity_id)
|
||||
).content
|
||||
elif media_content_id.startswith("media-source://image/"):
|
||||
entity_id = media_content_id.removeprefix("media-source://image/")
|
||||
attachment = (await image.async_get_image(self.hass, entity_id)).content
|
||||
else:
|
||||
media = await async_resolve_media(
|
||||
self.hass, file["media_content_id"], None
|
||||
)
|
||||
|
||||
if media.path is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="media_source_not_supported",
|
||||
)
|
||||
|
||||
attachment = await self.hass.async_add_executor_job(
|
||||
media.path.read_bytes
|
||||
)
|
||||
|
||||
params.setdefault(ATTR_FILENAME, media.path.name)
|
||||
|
||||
msg = Message(topic=self.topic, **params)
|
||||
try:
|
||||
await self.ntfy.publish(msg)
|
||||
await self.ntfy.publish(msg, attachment)
|
||||
except NtfyUnauthorizedAuthenticationError as e:
|
||||
self.config_entry.async_start_reauth(self.hass)
|
||||
raise HomeAssistantError(
|
||||
|
||||
@@ -67,6 +67,17 @@ publish:
|
||||
type: url
|
||||
autocomplete: url
|
||||
example: https://example.org/download.zip
|
||||
attach_file:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
filename:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
example: attachment.jpg
|
||||
email:
|
||||
required: false
|
||||
selector:
|
||||
|
||||
@@ -288,6 +288,9 @@
|
||||
"entry_not_loaded": {
|
||||
"message": "The selected ntfy service is currently not loaded or disabled in Home Assistant."
|
||||
},
|
||||
"media_source_not_supported": {
|
||||
"message": "Media source currently not supported"
|
||||
},
|
||||
"publish_failed_exception": {
|
||||
"message": "Failed to publish notification due to a connection error"
|
||||
},
|
||||
@@ -353,6 +356,10 @@
|
||||
"description": "Attach images or other files by URL.",
|
||||
"name": "Attachment URL"
|
||||
},
|
||||
"attach_file": {
|
||||
"description": "Attach images or other files by uploading from a local file, camera, or image media source. When selecting a camera entity, a snapshot of the current view will be captured and attached to the notification.",
|
||||
"name": "Attach local file"
|
||||
},
|
||||
"call": {
|
||||
"description": "Phone number to call and read the message out loud using text-to-speech. Requires ntfy Pro and prior phone number verification.",
|
||||
"name": "Phone call"
|
||||
@@ -369,6 +376,10 @@
|
||||
"description": "Specify the address to forward the notification to, for example mail@example.com",
|
||||
"name": "Forward to email"
|
||||
},
|
||||
"filename": {
|
||||
"description": "Specify a custom filename for the attachment, including the file extension (for example, attachment.jpg). If not provided, the original filename will be used.",
|
||||
"name": "Attachment filename"
|
||||
},
|
||||
"icon": {
|
||||
"description": "Include an icon that will appear next to the text of the notification. Only JPEG and PNG images are supported.",
|
||||
"name": "Icon URL"
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.1.1"]
|
||||
"requirements": ["onedrive-personal-sdk==0.1.2"]
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ from .const import (
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_REASONING_EFFORT,
|
||||
CONF_REASONING_SUMMARY,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
@@ -71,6 +72,7 @@ from .const import (
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_REASONING_SUMMARY,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_P,
|
||||
RECOMMENDED_VERBOSITY,
|
||||
@@ -399,10 +401,23 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_REASONING_SUMMARY,
|
||||
default=RECOMMENDED_REASONING_SUMMARY,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=["off", "auto", "short", "detailed"],
|
||||
translation_key=CONF_REASONING_SUMMARY,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
elif CONF_VERBOSITY in options:
|
||||
options.pop(CONF_VERBOSITY)
|
||||
if CONF_REASONING_SUMMARY in options:
|
||||
if not model.startswith("gpt-5"):
|
||||
options.pop(CONF_REASONING_SUMMARY)
|
||||
|
||||
if self._subentry_type == "conversation" and not model.startswith(
|
||||
tuple(UNSUPPORTED_WEB_SEARCH_MODELS)
|
||||
|
||||
@@ -19,6 +19,7 @@ CONF_FILENAMES = "filenames"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_REASONING_EFFORT = "reasoning_effort"
|
||||
CONF_REASONING_SUMMARY = "reasoning_summary"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
CONF_TOP_P = "top_p"
|
||||
@@ -36,6 +37,7 @@ RECOMMENDED_CHAT_MODEL = "gpt-4o-mini"
|
||||
RECOMMENDED_IMAGE_MODEL = "gpt-image-1.5"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
RECOMMENDED_REASONING_EFFORT = "low"
|
||||
RECOMMENDED_REASONING_SUMMARY = "auto"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
RECOMMENDED_TOP_P = 1.0
|
||||
RECOMMENDED_VERBOSITY = "medium"
|
||||
|
||||
@@ -73,6 +73,7 @@ from .const import (
|
||||
CONF_IMAGE_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_REASONING_EFFORT,
|
||||
CONF_REASONING_SUMMARY,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
CONF_VERBOSITY,
|
||||
@@ -90,6 +91,7 @@ from .const import (
|
||||
RECOMMENDED_IMAGE_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_REASONING_EFFORT,
|
||||
RECOMMENDED_REASONING_SUMMARY,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_P,
|
||||
RECOMMENDED_VERBOSITY,
|
||||
@@ -501,7 +503,9 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
)
|
||||
if not model_args["model"].startswith("gpt-5-pro")
|
||||
else "high", # GPT-5 pro only supports reasoning.effort: high
|
||||
"summary": "auto",
|
||||
"summary": options.get(
|
||||
CONF_REASONING_SUMMARY, RECOMMENDED_REASONING_SUMMARY
|
||||
),
|
||||
}
|
||||
model_args["include"] = ["reasoning.encrypted_content"]
|
||||
|
||||
|
||||
@@ -67,6 +67,7 @@
|
||||
"image_model": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::image_model%]",
|
||||
"inline_citations": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::inline_citations%]",
|
||||
"reasoning_effort": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::reasoning_effort%]",
|
||||
"reasoning_summary": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::reasoning_summary%]",
|
||||
"search_context_size": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::search_context_size%]",
|
||||
"user_location": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::user_location%]",
|
||||
"web_search": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data::web_search%]"
|
||||
@@ -76,6 +77,7 @@
|
||||
"image_model": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::image_model%]",
|
||||
"inline_citations": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::inline_citations%]",
|
||||
"reasoning_effort": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::reasoning_effort%]",
|
||||
"reasoning_summary": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::reasoning_summary%]",
|
||||
"search_context_size": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::search_context_size%]",
|
||||
"user_location": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||
"web_search": "[%key:component::openai_conversation::config_subentries::conversation::step::model::data_description::web_search%]"
|
||||
@@ -125,6 +127,7 @@
|
||||
"image_model": "Image generation model",
|
||||
"inline_citations": "Include links in web search results",
|
||||
"reasoning_effort": "Reasoning effort",
|
||||
"reasoning_summary": "Reasoning summary",
|
||||
"search_context_size": "Search context size",
|
||||
"user_location": "Include home location",
|
||||
"web_search": "Enable web search"
|
||||
@@ -134,6 +137,7 @@
|
||||
"image_model": "The model to use when generating images",
|
||||
"inline_citations": "If disabled, additional prompt is added to ask the model to not include source citations",
|
||||
"reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt",
|
||||
"reasoning_summary": "Controls the length and detail of reasoning summaries provided by the model",
|
||||
"search_context_size": "High level guidance for the amount of context window space to use for the search",
|
||||
"user_location": "Refine search results based on geography",
|
||||
"web_search": "Allow the model to search the web for the latest information before generating a response"
|
||||
@@ -165,6 +169,14 @@
|
||||
"xhigh": "X-High"
|
||||
}
|
||||
},
|
||||
"reasoning_summary": {
|
||||
"options": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"detailed": "Detailed",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"short": "Short"
|
||||
}
|
||||
},
|
||||
"search_context_size": {
|
||||
"options": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
|
||||
@@ -195,7 +195,7 @@ class OpenRGBLight(CoordinatorEntity[OpenRGBCoordinator], LightEntity):
|
||||
color_mode = self._attr_color_mode
|
||||
|
||||
if color_mode is None:
|
||||
# If color mode is still None, default to RGB
|
||||
# If color mode is still unknown, default to RGB
|
||||
color_mode = ColorMode.RGB
|
||||
|
||||
if self._attr_brightness is not None and self._attr_brightness != brightness:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Coordinator to handle Opower connections."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
@@ -44,7 +45,17 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type OpowerConfigEntry = ConfigEntry[OpowerCoordinator]
|
||||
|
||||
|
||||
class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
@dataclass
|
||||
class OpowerData:
|
||||
"""Class to hold Opower data."""
|
||||
|
||||
account: Account
|
||||
forecast: Forecast | None
|
||||
last_changed: datetime | None
|
||||
last_updated: datetime
|
||||
|
||||
|
||||
class OpowerCoordinator(DataUpdateCoordinator[dict[str, OpowerData]]):
|
||||
"""Handle fetching Opower data, updating sensors and inserting statistics."""
|
||||
|
||||
config_entry: OpowerConfigEntry
|
||||
@@ -85,7 +96,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> dict[str, Forecast]:
|
||||
) -> dict[str, OpowerData]:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
# Login expires after a few minutes.
|
||||
@@ -98,24 +109,38 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
except CannotConnect as err:
|
||||
_LOGGER.error("Error during login: %s", err)
|
||||
raise UpdateFailed(f"Error during login: {err}") from err
|
||||
try:
|
||||
forecasts: list[Forecast] = await self.api.async_get_forecast()
|
||||
except ApiException as err:
|
||||
_LOGGER.error("Error getting forecasts: %s", err)
|
||||
raise
|
||||
_LOGGER.debug("Updating sensor data with: %s", forecasts)
|
||||
# Because Opower provides historical usage/cost with a delay of a couple of days
|
||||
# we need to insert data into statistics.
|
||||
await self._insert_statistics()
|
||||
return {forecast.account.utility_account_id: forecast for forecast in forecasts}
|
||||
|
||||
async def _insert_statistics(self) -> None:
|
||||
"""Insert Opower statistics."""
|
||||
try:
|
||||
accounts = await self.api.async_get_accounts()
|
||||
except ApiException as err:
|
||||
_LOGGER.error("Error getting accounts: %s", err)
|
||||
raise
|
||||
|
||||
try:
|
||||
forecasts_list = await self.api.async_get_forecast()
|
||||
except ApiException as err:
|
||||
_LOGGER.error("Error getting forecasts: %s", err)
|
||||
raise
|
||||
|
||||
forecasts = {f.account.utility_account_id: f for f in forecasts_list}
|
||||
_LOGGER.debug("Updating sensor data with: %s", forecasts)
|
||||
|
||||
# Because Opower provides historical usage/cost with a delay of a couple of days
|
||||
# we need to insert data into statistics.
|
||||
last_changed_per_account = await self._insert_statistics(accounts)
|
||||
return {
|
||||
account.utility_account_id: OpowerData(
|
||||
account=account,
|
||||
forecast=forecasts.get(account.utility_account_id),
|
||||
last_changed=last_changed_per_account.get(account.utility_account_id),
|
||||
last_updated=dt_util.utcnow(),
|
||||
)
|
||||
for account in accounts
|
||||
}
|
||||
|
||||
async def _insert_statistics(self, accounts: list[Account]) -> dict[str, datetime]:
|
||||
"""Insert Opower statistics."""
|
||||
last_changed_per_account: dict[str, datetime] = {}
|
||||
for account in accounts:
|
||||
id_prefix = (
|
||||
(
|
||||
@@ -277,6 +302,15 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
return_sum = _safe_get_sum(stats.get(return_statistic_id, []))
|
||||
last_stats_time = stats[consumption_statistic_id][0]["start"]
|
||||
|
||||
if cost_reads:
|
||||
last_changed_per_account[account.utility_account_id] = cost_reads[
|
||||
-1
|
||||
].start_time
|
||||
elif last_stats_time is not None:
|
||||
last_changed_per_account[account.utility_account_id] = (
|
||||
dt_util.utc_from_timestamp(last_stats_time)
|
||||
)
|
||||
|
||||
cost_statistics = []
|
||||
compensation_statistics = []
|
||||
consumption_statistics = []
|
||||
@@ -343,6 +377,8 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
||||
)
|
||||
async_add_external_statistics(self.hass, return_metadata, return_statistics)
|
||||
|
||||
return last_changed_per_account
|
||||
|
||||
async def _async_maybe_migrate_statistics(
|
||||
self,
|
||||
utility_account_id: str,
|
||||
|
||||
@@ -4,9 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import date
|
||||
from datetime import date, datetime
|
||||
|
||||
from opower import Forecast, MeterType, UnitOfMeasure
|
||||
from opower import MeterType, UnitOfMeasure
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -22,7 +22,7 @@ from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import OpowerConfigEntry, OpowerCoordinator
|
||||
from .coordinator import OpowerConfigEntry, OpowerCoordinator, OpowerData
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -31,9 +31,26 @@ PARALLEL_UPDATES = 0
|
||||
class OpowerEntityDescription(SensorEntityDescription):
|
||||
"""Class describing Opower sensors entities."""
|
||||
|
||||
value_fn: Callable[[Forecast], str | float | date]
|
||||
value_fn: Callable[[OpowerData], str | float | date | datetime | None]
|
||||
|
||||
|
||||
COMMON_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
OpowerEntityDescription(
|
||||
key="last_changed",
|
||||
translation_key="last_changed",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.last_changed,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="last_updated",
|
||||
translation_key="last_updated",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.last_updated,
|
||||
),
|
||||
)
|
||||
|
||||
# suggested_display_precision=0 for all sensors since
|
||||
# Opower provides 0 decimal points for all these.
|
||||
# (for the statistics in the energy dashboard Opower does provide decimal points)
|
||||
@@ -46,7 +63,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
# Not TOTAL_INCREASING because it can decrease for accounts with solar
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.usage_to_date,
|
||||
value_fn=lambda data: data.forecast.usage_to_date if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_forecasted_usage",
|
||||
@@ -55,7 +72,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecasted_usage,
|
||||
value_fn=lambda data: data.forecast.forecasted_usage if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_typical_usage",
|
||||
@@ -64,7 +81,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.typical_usage,
|
||||
value_fn=lambda data: data.forecast.typical_usage if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_cost_to_date",
|
||||
@@ -73,7 +90,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.cost_to_date,
|
||||
value_fn=lambda data: data.forecast.cost_to_date if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_forecasted_cost",
|
||||
@@ -82,7 +99,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecasted_cost,
|
||||
value_fn=lambda data: data.forecast.forecasted_cost if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_typical_cost",
|
||||
@@ -91,7 +108,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.typical_cost,
|
||||
value_fn=lambda data: data.forecast.typical_cost if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_start_date",
|
||||
@@ -99,7 +116,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.start_date,
|
||||
value_fn=lambda data: data.forecast.start_date if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="elec_end_date",
|
||||
@@ -107,7 +124,7 @@ ELEC_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.end_date,
|
||||
value_fn=lambda data: data.forecast.end_date if data.forecast else None,
|
||||
),
|
||||
)
|
||||
GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
@@ -118,7 +135,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.usage_to_date,
|
||||
value_fn=lambda data: data.forecast.usage_to_date if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_forecasted_usage",
|
||||
@@ -127,7 +144,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecasted_usage,
|
||||
value_fn=lambda data: data.forecast.forecasted_usage if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_typical_usage",
|
||||
@@ -136,7 +153,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.typical_usage,
|
||||
value_fn=lambda data: data.forecast.typical_usage if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_cost_to_date",
|
||||
@@ -145,7 +162,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.cost_to_date,
|
||||
value_fn=lambda data: data.forecast.cost_to_date if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_forecasted_cost",
|
||||
@@ -154,7 +171,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.forecasted_cost,
|
||||
value_fn=lambda data: data.forecast.forecasted_cost if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_typical_cost",
|
||||
@@ -163,7 +180,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
native_unit_of_measurement="USD",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda data: data.typical_cost,
|
||||
value_fn=lambda data: data.forecast.typical_cost if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_start_date",
|
||||
@@ -171,7 +188,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.start_date,
|
||||
value_fn=lambda data: data.forecast.start_date if data.forecast else None,
|
||||
),
|
||||
OpowerEntityDescription(
|
||||
key="gas_end_date",
|
||||
@@ -179,7 +196,7 @@ GAS_SENSORS: tuple[OpowerEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.end_date,
|
||||
value_fn=lambda data: data.forecast.end_date if data.forecast else None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -193,32 +210,38 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[OpowerSensor] = []
|
||||
forecasts = coordinator.data.values()
|
||||
for forecast in forecasts:
|
||||
device_id = f"{coordinator.api.utility.subdomain()}_{forecast.account.utility_account_id}"
|
||||
opower_data_list = coordinator.data.values()
|
||||
for opower_data in opower_data_list:
|
||||
account = opower_data.account
|
||||
forecast = opower_data.forecast
|
||||
device_id = (
|
||||
f"{coordinator.api.utility.subdomain()}_{account.utility_account_id}"
|
||||
)
|
||||
device = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=f"{forecast.account.meter_type.name} account {forecast.account.utility_account_id}",
|
||||
name=f"{account.meter_type.name} account {account.utility_account_id}",
|
||||
manufacturer="Opower",
|
||||
model=coordinator.api.utility.name(),
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
sensors: tuple[OpowerEntityDescription, ...] = ()
|
||||
sensors: tuple[OpowerEntityDescription, ...] = COMMON_SENSORS
|
||||
if (
|
||||
forecast.account.meter_type == MeterType.ELEC
|
||||
account.meter_type == MeterType.ELEC
|
||||
and forecast is not None
|
||||
and forecast.unit_of_measure == UnitOfMeasure.KWH
|
||||
):
|
||||
sensors = ELEC_SENSORS
|
||||
sensors += ELEC_SENSORS
|
||||
elif (
|
||||
forecast.account.meter_type == MeterType.GAS
|
||||
account.meter_type == MeterType.GAS
|
||||
and forecast is not None
|
||||
and forecast.unit_of_measure in [UnitOfMeasure.THERM, UnitOfMeasure.CCF]
|
||||
):
|
||||
sensors = GAS_SENSORS
|
||||
sensors += GAS_SENSORS
|
||||
entities.extend(
|
||||
OpowerSensor(
|
||||
coordinator,
|
||||
sensor,
|
||||
forecast.account.utility_account_id,
|
||||
account.utility_account_id,
|
||||
device,
|
||||
device_id,
|
||||
)
|
||||
@@ -250,7 +273,7 @@ class OpowerSensor(CoordinatorEntity[OpowerCoordinator], SensorEntity):
|
||||
self.utility_account_id = utility_account_id
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | date:
|
||||
def native_value(self) -> StateType | date | datetime:
|
||||
"""Return the state."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data[self.utility_account_id]
|
||||
|
||||
@@ -115,6 +115,12 @@
|
||||
},
|
||||
"gas_usage_to_date": {
|
||||
"name": "Current bill gas usage to date"
|
||||
},
|
||||
"last_changed": {
|
||||
"name": "Last changed"
|
||||
},
|
||||
"last_updated": {
|
||||
"name": "Last updated"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
@@ -42,6 +42,7 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
|
||||
manufacturer=DEFAULT_NAME,
|
||||
model="Endpoint",
|
||||
name=device_info.endpoint.name,
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -87,6 +88,7 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
f"{self.coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||
),
|
||||
translation_key=None if self.device_name else "unknown_container",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"codeowners": ["@erwindouna"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/portainer",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.23"]
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.18.2"]
|
||||
"requirements": ["reolink-aio==0.19.0"]
|
||||
}
|
||||
|
||||
@@ -87,11 +87,12 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="zoom",
|
||||
cmd_key="GetZoomFocus",
|
||||
cmd_id=294,
|
||||
translation_key="zoom",
|
||||
mode=NumberMode.SLIDER,
|
||||
native_step=1,
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["zoom"]["pos"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["zoom"]["pos"]["max"],
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["zoom"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["zoom"]["max"],
|
||||
supported=lambda api, ch: api.supported(ch, "zoom"),
|
||||
value=lambda api, ch: api.get_zoom(ch),
|
||||
method=lambda api, ch, value: api.set_zoom(ch, int(value)),
|
||||
@@ -99,11 +100,12 @@ NUMBER_ENTITIES = (
|
||||
ReolinkNumberEntityDescription(
|
||||
key="focus",
|
||||
cmd_key="GetZoomFocus",
|
||||
cmd_id=294,
|
||||
translation_key="focus",
|
||||
mode=NumberMode.SLIDER,
|
||||
native_step=1,
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["focus"]["pos"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["focus"]["pos"]["max"],
|
||||
get_min_value=lambda api, ch: api.zoom_range(ch)["focus"]["min"],
|
||||
get_max_value=lambda api, ch: api.zoom_range(ch)["focus"]["max"],
|
||||
supported=lambda api, ch: api.supported(ch, "focus"),
|
||||
value=lambda api, ch: api.get_focus(ch),
|
||||
method=lambda api, ch, value: api.set_focus(ch, int(value)),
|
||||
|
||||
@@ -61,6 +61,7 @@ class ReolinkHostSensorEntityDescription(
|
||||
SENSORS = (
|
||||
ReolinkSensorEntityDescription(
|
||||
key="ptz_pan_position",
|
||||
cmd_id=433,
|
||||
cmd_key="GetPtzCurPos",
|
||||
translation_key="ptz_pan_position",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
@@ -70,6 +71,7 @@ SENSORS = (
|
||||
),
|
||||
ReolinkSensorEntityDescription(
|
||||
key="ptz_tilt_position",
|
||||
cmd_id=433,
|
||||
cmd_key="GetPtzCurPos",
|
||||
translation_key="ptz_tilt_position",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
|
||||
@@ -13,7 +13,10 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import automation, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.components.labs import (
|
||||
EventLabsUpdatedData,
|
||||
async_subscribe_preview_feature,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
@@ -282,14 +285,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
DOMAIN, SERVICE_TOGGLE, toggle_service, schema=SCRIPT_TURN_ONOFF_SCHEMA
|
||||
)
|
||||
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
async def new_triggers_conditions_listener(
|
||||
_event_data: EventLabsUpdatedData,
|
||||
) -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
hass.async_create_task(
|
||||
reload_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
await reload_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
|
||||
async_labs_listen(
|
||||
async_subscribe_preview_feature(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
automation.NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
"""Support for Switchbot devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import switchbot
|
||||
|
||||
@@ -20,10 +23,12 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import (
|
||||
CONF_CURTAIN_SPEED,
|
||||
CONF_ENCRYPTION_KEY,
|
||||
CONF_KEY_ID,
|
||||
CONF_RETRY_COUNT,
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES,
|
||||
DEFAULT_CURTAIN_SPEED,
|
||||
DEFAULT_RETRY_COUNT,
|
||||
DOMAIN,
|
||||
ENCRYPTED_MODELS,
|
||||
@@ -185,12 +190,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) ->
|
||||
data={**entry.data, CONF_ADDRESS: mac},
|
||||
)
|
||||
|
||||
if not entry.options:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options={CONF_RETRY_COUNT: DEFAULT_RETRY_COUNT},
|
||||
)
|
||||
|
||||
sensor_type: str = entry.data[CONF_SENSOR_TYPE]
|
||||
switchbot_model = HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL[sensor_type]
|
||||
# connectable means we can make connections to the device
|
||||
@@ -241,6 +240,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) ->
|
||||
entry.data.get(CONF_NAME, entry.title),
|
||||
connectable,
|
||||
switchbot_model,
|
||||
entry,
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_start())
|
||||
if not await coordinator.async_wait_ready():
|
||||
@@ -258,6 +258,38 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
version = entry.version
|
||||
minor_version = entry.minor_version
|
||||
_LOGGER.debug("Migrating from version %s.%s", version, minor_version)
|
||||
|
||||
if version > 1:
|
||||
return False
|
||||
|
||||
if version == 1 and minor_version < 2:
|
||||
new_options: dict[str, Any] = {**entry.options}
|
||||
|
||||
if CONF_RETRY_COUNT not in new_options:
|
||||
new_options[CONF_RETRY_COUNT] = DEFAULT_RETRY_COUNT
|
||||
|
||||
sensor_type = entry.data.get(CONF_SENSOR_TYPE)
|
||||
if (
|
||||
sensor_type == SupportedModels.CURTAIN
|
||||
and CONF_CURTAIN_SPEED not in new_options
|
||||
):
|
||||
new_options[CONF_CURTAIN_SPEED] = DEFAULT_CURTAIN_SPEED
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options=new_options,
|
||||
minor_version=2,
|
||||
)
|
||||
_LOGGER.debug("Migration to version %s.2 successful", version)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -34,14 +34,19 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_CURTAIN_SPEED,
|
||||
CONF_ENCRYPTION_KEY,
|
||||
CONF_KEY_ID,
|
||||
CONF_LOCK_NIGHTLATCH,
|
||||
CONF_RETRY_COUNT,
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES,
|
||||
CURTAIN_SPEED_MAX,
|
||||
CURTAIN_SPEED_MIN,
|
||||
DEFAULT_CURTAIN_SPEED,
|
||||
DEFAULT_LOCK_NIGHTLATCH,
|
||||
DEFAULT_RETRY_COUNT,
|
||||
DOMAIN,
|
||||
@@ -75,6 +80,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Switchbot."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
@@ -130,13 +136,20 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
discovery = self._discovered_adv
|
||||
name = name_from_discovery(discovery)
|
||||
model_name = discovery.data["modelName"]
|
||||
sensor_type = SUPPORTED_MODEL_TYPES[model_name]
|
||||
|
||||
options: dict[str, Any] = {CONF_RETRY_COUNT: DEFAULT_RETRY_COUNT}
|
||||
if sensor_type == SupportedModels.CURTAIN:
|
||||
options[CONF_CURTAIN_SPEED] = DEFAULT_CURTAIN_SPEED
|
||||
|
||||
return self.async_create_entry(
|
||||
title=name,
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ADDRESS: discovery.address,
|
||||
CONF_SENSOR_TYPE: str(SUPPORTED_MODEL_TYPES[model_name]),
|
||||
CONF_SENSOR_TYPE: str(sensor_type),
|
||||
},
|
||||
options=options,
|
||||
)
|
||||
|
||||
async def async_step_confirm(
|
||||
@@ -455,5 +468,26 @@ class SwitchbotOptionsFlowHandler(OptionsFlow):
|
||||
): bool
|
||||
}
|
||||
)
|
||||
if (
|
||||
CONF_SENSOR_TYPE in self.config_entry.data
|
||||
and self.config_entry.data[CONF_SENSOR_TYPE] == SupportedModels.CURTAIN
|
||||
):
|
||||
options.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CURTAIN_SPEED,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_CURTAIN_SPEED, DEFAULT_CURTAIN_SPEED
|
||||
),
|
||||
): selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
min=CURTAIN_SPEED_MIN,
|
||||
max=CURTAIN_SPEED_MAX,
|
||||
step=1,
|
||||
mode=selector.NumberSelectorMode.SLIDER,
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
|
||||
|
||||
@@ -182,9 +182,13 @@ HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {
|
||||
# Config Defaults
|
||||
DEFAULT_RETRY_COUNT = 3
|
||||
DEFAULT_LOCK_NIGHTLATCH = False
|
||||
DEFAULT_CURTAIN_SPEED = 255
|
||||
CURTAIN_SPEED_MIN = 0
|
||||
CURTAIN_SPEED_MAX = 255
|
||||
|
||||
# Config Options
|
||||
CONF_RETRY_COUNT = "retry_count"
|
||||
CONF_KEY_ID = "key_id"
|
||||
CONF_ENCRYPTION_KEY = "encryption_key"
|
||||
CONF_LOCK_NIGHTLATCH = "lock_force_nightlatch"
|
||||
CONF_CURTAIN_SPEED = "curtain_speed"
|
||||
|
||||
@@ -41,6 +41,7 @@ class SwitchbotDataUpdateCoordinator(ActiveBluetoothDataUpdateCoordinator[None])
|
||||
device_name: str,
|
||||
connectable: bool,
|
||||
model: SwitchbotModel,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize global switchbot data updater."""
|
||||
super().__init__(
|
||||
@@ -57,6 +58,7 @@ class SwitchbotDataUpdateCoordinator(ActiveBluetoothDataUpdateCoordinator[None])
|
||||
self.device_name = device_name
|
||||
self.base_unique_id = base_unique_id
|
||||
self.model = model
|
||||
self.config_entry = config_entry
|
||||
self._ready_event = asyncio.Event()
|
||||
self._was_unavailable = True
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .const import CONF_CURTAIN_SPEED, DEFAULT_CURTAIN_SPEED
|
||||
from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator
|
||||
from .entity import SwitchbotEntity, exception_handler
|
||||
|
||||
@@ -64,6 +65,15 @@ class SwitchBotCurtainEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
super().__init__(coordinator)
|
||||
self._attr_is_closed = None
|
||||
|
||||
@callback
|
||||
def _get_curtain_speed(self) -> int:
|
||||
"""Return the configured curtain speed."""
|
||||
return int(
|
||||
self.coordinator.config_entry.options.get(
|
||||
CONF_CURTAIN_SPEED, DEFAULT_CURTAIN_SPEED
|
||||
)
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -83,7 +93,8 @@ class SwitchBotCurtainEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
"""Open the curtain."""
|
||||
|
||||
_LOGGER.debug("Switchbot to open curtain %s", self._address)
|
||||
self._last_run_success = bool(await self._device.open())
|
||||
speed = self._get_curtain_speed()
|
||||
self._last_run_success = bool(await self._device.open(speed))
|
||||
self._attr_is_opening = self._device.is_opening()
|
||||
self._attr_is_closing = self._device.is_closing()
|
||||
self.async_write_ha_state()
|
||||
@@ -93,7 +104,8 @@ class SwitchBotCurtainEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
"""Close the curtain."""
|
||||
|
||||
_LOGGER.debug("Switchbot to close the curtain %s", self._address)
|
||||
self._last_run_success = bool(await self._device.close())
|
||||
speed = self._get_curtain_speed()
|
||||
self._last_run_success = bool(await self._device.close(speed))
|
||||
self._attr_is_opening = self._device.is_opening()
|
||||
self._attr_is_closing = self._device.is_closing()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -358,10 +358,12 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"curtain_speed": "Curtain movement speed",
|
||||
"lock_force_nightlatch": "Force Nightlatch operation mode",
|
||||
"retry_count": "Retry count"
|
||||
},
|
||||
"data_description": {
|
||||
"curtain_speed": "Speed for curtain open and close operations (1-255, where 1 is slowest and 255 is fastest)",
|
||||
"lock_force_nightlatch": "Force Nightlatch operation mode even if Nightlatch is not detected",
|
||||
"retry_count": "How many times to retry sending commands to your SwitchBot devices"
|
||||
}
|
||||
|
||||
@@ -237,9 +237,9 @@ class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
# validate connection to Telegram API
|
||||
errors: dict[str, str] = {}
|
||||
user_input[CONF_API_ENDPOINT] = (
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_API_ENDPOINT],
|
||||
)
|
||||
user_input[CONF_API_ENDPOINT] = user_input[SECTION_ADVANCED_SETTINGS][
|
||||
CONF_API_ENDPOINT
|
||||
]
|
||||
user_input[CONF_PROXY_URL] = user_input[SECTION_ADVANCED_SETTINGS].get(
|
||||
CONF_PROXY_URL
|
||||
)
|
||||
|
||||
@@ -103,6 +103,8 @@ class AbstractTemplateEntity(Entity):
|
||||
attribute: str,
|
||||
validator: Callable[[Any], Any] | None = None,
|
||||
on_update: Callable[[Any], None] | None = None,
|
||||
render_complex: bool = False,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
"""Set up a template that manages any property or attribute of the entity.
|
||||
|
||||
@@ -118,6 +120,10 @@ class AbstractTemplateEntity(Entity):
|
||||
on_update:
|
||||
Called to store the template result rather than storing it
|
||||
the supplied attribute. Passed the result of the validator.
|
||||
render_complex (default=False):
|
||||
This signals trigger based template entities to render the template
|
||||
as a complex result. State based template entities always render
|
||||
complex results.
|
||||
"""
|
||||
|
||||
def add_template(
|
||||
|
||||
@@ -124,14 +124,24 @@ class AbstractTemplateEvent(AbstractTemplateEntity, EventEntity):
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._event_type_template = config[CONF_EVENT_TYPE]
|
||||
self._event_types_template = config[CONF_EVENT_TYPES]
|
||||
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
self._event_type = None
|
||||
self._attr_event_types = []
|
||||
|
||||
self.setup_template(
|
||||
CONF_EVENT_TYPES,
|
||||
"_attr_event_types",
|
||||
None,
|
||||
self._update_event_types,
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_EVENT_TYPE,
|
||||
"_event_type",
|
||||
None,
|
||||
self._update_event_type,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_event_types(self, event_types: Any) -> None:
|
||||
"""Update the event types from the template."""
|
||||
@@ -179,25 +189,6 @@ class StateEventEntity(TemplateEntity, AbstractTemplateEvent):
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateEvent.__init__(self, config)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
self.add_template_attribute(
|
||||
"_attr_event_types",
|
||||
self._event_types_template,
|
||||
None,
|
||||
self._update_event_types,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
self.add_template_attribute(
|
||||
"_event_type",
|
||||
self._event_type_template,
|
||||
None,
|
||||
self._update_event_type,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerEventEntity(TriggerEntity, AbstractTemplateEvent, RestoreEntity):
|
||||
"""Event entity based on trigger data."""
|
||||
@@ -217,20 +208,3 @@ class TriggerEventEntity(TriggerEntity, AbstractTemplateEvent, RestoreEntity):
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateEvent.__init__(self, config)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
for key, updater in (
|
||||
(CONF_EVENT_TYPES, self._update_event_types),
|
||||
(CONF_EVENT_TYPE, self._update_event_type),
|
||||
):
|
||||
updater(self._rendered[key])
|
||||
|
||||
self.async_set_context(self.coordinator.data["context"])
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Sequence
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -28,19 +27,16 @@ from homeassistant.const import (
|
||||
CONF_STATE,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
@@ -203,30 +199,53 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._percentage_template = config.get(CONF_PERCENTAGE)
|
||||
self._preset_mode_template = config.get(CONF_PRESET_MODE)
|
||||
self._oscillating_template = config.get(CONF_OSCILLATING)
|
||||
self._direction_template = config.get(CONF_DIRECTION)
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_is_on",
|
||||
template_validators.boolean(self, CONF_STATE),
|
||||
)
|
||||
|
||||
# Required for legacy functionality.
|
||||
self._attr_is_on = False
|
||||
# Ensure legacy template entity functionality by setting percentage to None instead
|
||||
# of the FanEntity default of 0.
|
||||
self._attr_percentage = None
|
||||
self.setup_template(
|
||||
CONF_PERCENTAGE,
|
||||
"_attr_percentage",
|
||||
template_validators.number(self, CONF_PERCENTAGE, 0, 100),
|
||||
)
|
||||
|
||||
# List of valid preset modes
|
||||
self._attr_preset_modes: list[str] | None = config.get(CONF_PRESET_MODES)
|
||||
self.setup_template(
|
||||
CONF_PRESET_MODE,
|
||||
"_attr_preset_mode",
|
||||
template_validators.item_in_list(
|
||||
self, CONF_PRESET_MODE, self._attr_preset_modes
|
||||
),
|
||||
)
|
||||
|
||||
# Oscillating boolean
|
||||
self.setup_template(
|
||||
CONF_OSCILLATING,
|
||||
"_attr_oscillating",
|
||||
template_validators.boolean(self, CONF_OSCILLATING),
|
||||
)
|
||||
|
||||
# Forward/Reverse Directions
|
||||
self.setup_template(
|
||||
CONF_DIRECTION,
|
||||
"_attr_current_direction",
|
||||
template_validators.item_in_list(self, CONF_DIRECTION, _VALID_DIRECTIONS),
|
||||
)
|
||||
|
||||
# Number of valid speeds
|
||||
self._attr_speed_count = config.get(CONF_SPEED_COUNT) or 100
|
||||
|
||||
# List of valid preset modes
|
||||
self._attr_preset_modes: list[str] | None = config.get(CONF_PRESET_MODES)
|
||||
|
||||
self._attr_supported_features |= (
|
||||
FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
def _iterate_scripts(
|
||||
self, config: dict[str, Any]
|
||||
) -> Generator[tuple[str, Sequence[dict[str, Any]], FanEntityFeature | int]]:
|
||||
for action_id, supported_feature in (
|
||||
(CONF_ON_ACTION, 0),
|
||||
(CONF_OFF_ACTION, 0),
|
||||
@@ -236,99 +255,14 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
(CONF_SET_DIRECTION_ACTION, FanEntityFeature.DIRECTION),
|
||||
):
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
yield (action_id, action_config, supported_feature)
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if device is on."""
|
||||
return self._attr_is_on
|
||||
|
||||
def _handle_state(self, result) -> None:
|
||||
if isinstance(result, bool):
|
||||
self._attr_is_on = result
|
||||
return
|
||||
|
||||
if isinstance(result, str):
|
||||
self._attr_is_on = result.lower() in ("true", STATE_ON)
|
||||
return
|
||||
|
||||
self._attr_is_on = False
|
||||
|
||||
@callback
|
||||
def _update_percentage(self, percentage):
|
||||
# Validate percentage
|
||||
try:
|
||||
percentage = int(float(percentage))
|
||||
except ValueError, TypeError:
|
||||
_LOGGER.error(
|
||||
"Received invalid percentage: %s for entity %s",
|
||||
percentage,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_percentage = 0
|
||||
return
|
||||
|
||||
if 0 <= percentage <= 100:
|
||||
self._attr_percentage = percentage
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid percentage: %s for entity %s",
|
||||
percentage,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_percentage = 0
|
||||
|
||||
@callback
|
||||
def _update_preset_mode(self, preset_mode):
|
||||
# Validate preset mode
|
||||
preset_mode = str(preset_mode)
|
||||
|
||||
if self.preset_modes and preset_mode in self.preset_modes:
|
||||
self._attr_preset_mode = preset_mode
|
||||
elif preset_mode in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
self._attr_preset_mode = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid preset_mode: %s for entity %s. Expected: %s",
|
||||
preset_mode,
|
||||
self.entity_id,
|
||||
self.preset_mode,
|
||||
)
|
||||
self._attr_preset_mode = None
|
||||
|
||||
@callback
|
||||
def _update_oscillating(self, oscillating):
|
||||
# Validate osc
|
||||
if oscillating == "True" or oscillating is True:
|
||||
self._attr_oscillating = True
|
||||
elif oscillating == "False" or oscillating is False:
|
||||
self._attr_oscillating = False
|
||||
elif oscillating in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
self._attr_oscillating = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid oscillating: %s for entity %s. Expected: True/False",
|
||||
oscillating,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_oscillating = None
|
||||
|
||||
@callback
|
||||
def _update_direction(self, direction):
|
||||
# Validate direction
|
||||
if direction in _VALID_DIRECTIONS:
|
||||
self._attr_current_direction = direction
|
||||
elif direction in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
self._attr_current_direction = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid direction: %s for entity %s. Expected: %s",
|
||||
direction,
|
||||
self.entity_id,
|
||||
", ".join(_VALID_DIRECTIONS),
|
||||
)
|
||||
self._attr_current_direction = None
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
@@ -378,7 +312,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
if self._attr_assumed_state:
|
||||
self._attr_is_on = percentage != 0
|
||||
|
||||
if self._attr_assumed_state or self._percentage_template is None:
|
||||
if self._attr_assumed_state or CONF_PERCENTAGE not in self._templates:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
@@ -395,7 +329,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
if self._attr_assumed_state:
|
||||
self._attr_is_on = True
|
||||
|
||||
if self._attr_assumed_state or self._preset_mode_template is None:
|
||||
if self._attr_assumed_state or CONF_PRESET_MODE not in self._templates:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_oscillate(self, oscillating: bool) -> None:
|
||||
@@ -410,7 +344,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
if self._oscillating_template is None:
|
||||
if CONF_OSCILLATING not in self._templates:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_direction(self, direction: str) -> None:
|
||||
@@ -425,7 +359,7 @@ class AbstractTemplateFan(AbstractTemplateEntity, FanEntity):
|
||||
run_variables={ATTR_DIRECTION: direction},
|
||||
context=self._context,
|
||||
)
|
||||
if self._direction_template is None:
|
||||
if CONF_DIRECTION not in self._templates:
|
||||
self.async_write_ha_state()
|
||||
else:
|
||||
_LOGGER.error(
|
||||
@@ -449,67 +383,10 @@ class StateFanEntity(TemplateEntity, AbstractTemplateFan):
|
||||
) -> None:
|
||||
"""Initialize the fan."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateFan.__init__(self, config)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_is_on = None
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template:
|
||||
self.add_template_attribute(
|
||||
"_attr_is_on", self._template, None, self._update_state
|
||||
)
|
||||
|
||||
if self._preset_mode_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_preset_mode",
|
||||
self._preset_mode_template,
|
||||
None,
|
||||
self._update_preset_mode,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._percentage_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_percentage",
|
||||
self._percentage_template,
|
||||
None,
|
||||
self._update_percentage,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._oscillating_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_oscillating",
|
||||
self._oscillating_template,
|
||||
None,
|
||||
self._update_oscillating,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._direction_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_current_direction",
|
||||
self._direction_template,
|
||||
None,
|
||||
self._update_direction,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
AbstractTemplateFan.__init__(self, name, config)
|
||||
|
||||
|
||||
class TriggerFanEntity(TriggerEntity, AbstractTemplateFan):
|
||||
@@ -525,50 +402,5 @@ class TriggerFanEntity(TriggerEntity, AbstractTemplateFan):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateFan.__init__(self, config)
|
||||
|
||||
self._attr_name = name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
for key in (
|
||||
CONF_STATE,
|
||||
CONF_PRESET_MODE,
|
||||
CONF_PERCENTAGE,
|
||||
CONF_OSCILLATING,
|
||||
CONF_DIRECTION,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_STATE, self._handle_state),
|
||||
(CONF_PRESET_MODE, self._update_preset_mode),
|
||||
(CONF_PERCENTAGE, self._update_percentage),
|
||||
(CONF_OSCILLATING, self._update_oscillating),
|
||||
(CONF_DIRECTION, self._update_direction),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
AbstractTemplateFan.__init__(self, name, config)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -26,13 +25,14 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError, TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
@@ -152,26 +152,41 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._code_format_template = config.get(CONF_CODE_FORMAT)
|
||||
self._code_format_template_error: TemplateError | None = None
|
||||
|
||||
# Legacy behavior, create all locks as Unlocked.
|
||||
self._set_state(LockState.UNLOCKED)
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_lock_state",
|
||||
template_validators.strenum(
|
||||
self, CONF_STATE, LockState, LockState.LOCKED, LockState.UNLOCKED
|
||||
),
|
||||
self._set_state,
|
||||
)
|
||||
|
||||
self.setup_template(
|
||||
CONF_CODE_FORMAT,
|
||||
"_attr_code_format",
|
||||
None,
|
||||
self._update_code_format,
|
||||
none_on_template_error=False,
|
||||
)
|
||||
|
||||
def _iterate_scripts(
|
||||
self, config: dict[str, Any]
|
||||
) -> Generator[tuple[str, Sequence[dict[str, Any]], LockEntityFeature | int]]:
|
||||
for action_id, supported_feature in (
|
||||
(CONF_LOCK, 0),
|
||||
(CONF_UNLOCK, 0),
|
||||
(CONF_OPEN, LockEntityFeature.OPEN),
|
||||
):
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
yield (action_id, action_config, supported_feature)
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
def _set_state(self, state: LockState | None) -> None:
|
||||
if state is None:
|
||||
self._attr_is_locked = None
|
||||
return
|
||||
|
||||
self._attr_is_jammed = state == LockState.JAMMED
|
||||
self._attr_is_opening = state == LockState.OPENING
|
||||
self._attr_is_locking = state == LockState.LOCKING
|
||||
@@ -179,33 +194,6 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
self._attr_is_unlocking = state == LockState.UNLOCKING
|
||||
self._attr_is_locked = state == LockState.LOCKED
|
||||
|
||||
def _handle_state(self, result: Any) -> None:
|
||||
if isinstance(result, bool):
|
||||
self._set_state(LockState.LOCKED if result else LockState.UNLOCKED)
|
||||
return
|
||||
|
||||
if isinstance(result, str):
|
||||
if result.lower() in (
|
||||
"true",
|
||||
"on",
|
||||
"locked",
|
||||
):
|
||||
self._set_state(LockState.LOCKED)
|
||||
elif result.lower() in (
|
||||
"false",
|
||||
"off",
|
||||
"unlocked",
|
||||
):
|
||||
self._set_state(LockState.UNLOCKED)
|
||||
else:
|
||||
try:
|
||||
self._set_state(LockState(result.lower()))
|
||||
except ValueError:
|
||||
self._set_state(None)
|
||||
return
|
||||
|
||||
self._set_state(None)
|
||||
|
||||
@callback
|
||||
def _update_code_format(self, render: str | TemplateError | None):
|
||||
"""Update code format from the template."""
|
||||
@@ -281,7 +269,7 @@ class AbstractTemplateLock(AbstractTemplateEntity, LockEntity):
|
||||
translation_key="code_format_template_error",
|
||||
translation_placeholders={
|
||||
"entity_id": self.entity_id,
|
||||
"code_format_template": self._code_format_template.template,
|
||||
"code_format_template": self._templates[CONF_CODE_FORMAT].template,
|
||||
"cause": str(self._code_format_template_error),
|
||||
},
|
||||
)
|
||||
@@ -300,45 +288,10 @@ class StateLockEntity(TemplateEntity, AbstractTemplateLock):
|
||||
) -> None:
|
||||
"""Initialize the lock."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateLock.__init__(self, config)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
@callback
|
||||
def _update_state(self, result: str | TemplateError) -> None:
|
||||
"""Update the state from the template."""
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_is_locked = None
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_is_locked",
|
||||
self._template,
|
||||
None,
|
||||
self._update_state,
|
||||
)
|
||||
if self._code_format_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_code_format",
|
||||
self._code_format_template,
|
||||
None,
|
||||
self._update_code_format,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
AbstractTemplateLock.__init__(self, name, config)
|
||||
|
||||
|
||||
class TriggerLockEntity(TriggerEntity, AbstractTemplateLock):
|
||||
@@ -354,45 +307,5 @@ class TriggerLockEntity(TriggerEntity, AbstractTemplateLock):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateLock.__init__(self, config)
|
||||
|
||||
self._attr_name = name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
|
||||
if CONF_STATE in config:
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
|
||||
if isinstance(config.get(CONF_CODE_FORMAT), template.Template):
|
||||
self._to_render_simple.append(CONF_CODE_FORMAT)
|
||||
self._parse_result.add(CONF_CODE_FORMAT)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_STATE, self._handle_state),
|
||||
(CONF_CODE_FORMAT, self._update_code_format),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if not self._attr_assumed_state:
|
||||
write_ha_state = True
|
||||
elif self._attr_assumed_state and len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
AbstractTemplateLock.__init__(self, name, config)
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -19,14 +18,14 @@ from homeassistant.components.number import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_STATE, CONF_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -42,8 +41,6 @@ from .schemas import (
|
||||
from .template_entity import TemplateEntity
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_SET_VALUE = "set_value"
|
||||
|
||||
DEFAULT_NAME = "Template Number"
|
||||
@@ -120,17 +117,29 @@ class AbstractTemplateNumber(AbstractTemplateEntity, NumberEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._step_template = config[CONF_STEP]
|
||||
self._min_template = config[CONF_MIN]
|
||||
self._max_template = config[CONF_MAX]
|
||||
|
||||
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_native_step = DEFAULT_STEP
|
||||
self._attr_native_min_value = DEFAULT_MIN_VALUE
|
||||
self._attr_native_max_value = DEFAULT_MAX_VALUE
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_native_value",
|
||||
template_validators.number(self, CONF_STATE),
|
||||
)
|
||||
for option, attribute in (
|
||||
(CONF_STEP, "_attr_native_step"),
|
||||
(CONF_MIN, "_attr_native_min_value"),
|
||||
(CONF_MAX, "_attr_native_max_value"),
|
||||
):
|
||||
self.setup_template(
|
||||
option, attribute, template_validators.number(self, option)
|
||||
)
|
||||
|
||||
self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], name, DOMAIN)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set value of the number."""
|
||||
if self._attr_assumed_state:
|
||||
@@ -157,46 +166,10 @@ class StateNumberEntity(TemplateEntity, AbstractTemplateNumber):
|
||||
) -> None:
|
||||
"""Initialize the number."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateNumber.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], name, DOMAIN)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_value",
|
||||
self._template,
|
||||
vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._step_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_step",
|
||||
self._step_template,
|
||||
vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._min_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_min_value",
|
||||
self._min_template,
|
||||
validator=vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._max_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_max_value",
|
||||
self._max_template,
|
||||
validator=vol.Coerce(float),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
AbstractTemplateNumber.__init__(self, name, config)
|
||||
|
||||
|
||||
class TriggerNumberEntity(TriggerEntity, AbstractTemplateNumber):
|
||||
@@ -212,47 +185,5 @@ class TriggerNumberEntity(TriggerEntity, AbstractTemplateNumber):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateNumber.__init__(self, config)
|
||||
|
||||
for key in (
|
||||
CONF_STATE,
|
||||
CONF_STEP,
|
||||
CONF_MIN,
|
||||
CONF_MAX,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
self.add_script(
|
||||
CONF_SET_VALUE,
|
||||
config[CONF_SET_VALUE],
|
||||
self._rendered.get(CONF_NAME, DEFAULT_NAME),
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
def _handle_coordinator_update(self):
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, attr in (
|
||||
(CONF_STATE, "_attr_native_value"),
|
||||
(CONF_STEP, "_attr_native_step"),
|
||||
(CONF_MIN, "_attr_native_min_value"),
|
||||
(CONF_MAX, "_attr_native_max_value"),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
setattr(self, attr, vol.Any(vol.Coerce(float), None)(rendered))
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_set_context(self.coordinator.data["context"])
|
||||
self.async_write_ha_state()
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateNumber.__init__(self, name, config)
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -50,7 +50,7 @@ DEFAULT_NAME = "Template Select"
|
||||
SELECT_COMMON_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_OPTIONS): cv.template,
|
||||
vol.Required(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_STATE): cv.template,
|
||||
}
|
||||
)
|
||||
@@ -115,13 +115,26 @@ class AbstractTemplateSelect(AbstractTemplateEntity, SelectEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._options_template = config[ATTR_OPTIONS]
|
||||
|
||||
self._attr_options = []
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_current_option",
|
||||
cv.string,
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_OPTIONS,
|
||||
"_attr_options",
|
||||
template_validators.list_of_strings(self, CONF_OPTIONS),
|
||||
)
|
||||
|
||||
self._attr_current_option = None
|
||||
|
||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||
self.add_script(CONF_SELECT_OPTION, select_option, name, DOMAIN)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
if self._attr_assumed_state:
|
||||
@@ -148,32 +161,10 @@ class TemplateSelect(TemplateEntity, AbstractTemplateSelect):
|
||||
) -> None:
|
||||
"""Initialize the select."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateSelect.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||
self.add_script(CONF_SELECT_OPTION, select_option, name, DOMAIN)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_current_option",
|
||||
self._template,
|
||||
validator=cv.string,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
self.add_template_attribute(
|
||||
"_attr_options",
|
||||
self._options_template,
|
||||
validator=vol.All(cv.ensure_list, [cv.string]),
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
AbstractTemplateSelect.__init__(self, name, config)
|
||||
|
||||
|
||||
class TriggerSelectEntity(TriggerEntity, AbstractTemplateSelect):
|
||||
@@ -190,39 +181,5 @@ class TriggerSelectEntity(TriggerEntity, AbstractTemplateSelect):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateSelect.__init__(self, config)
|
||||
|
||||
if CONF_STATE in config:
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (select_option := config.get(CONF_SELECT_OPTION)) is not None:
|
||||
self.add_script(
|
||||
CONF_SELECT_OPTION,
|
||||
select_option,
|
||||
self._rendered.get(CONF_NAME, DEFAULT_NAME),
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
def _handle_coordinator_update(self):
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
if (options := self._rendered.get(ATTR_OPTIONS)) is not None:
|
||||
self._attr_options = vol.All(cv.ensure_list, [cv.string])(options)
|
||||
write_ha_state = True
|
||||
|
||||
if (state := self._rendered.get(CONF_STATE)) is not None:
|
||||
self._attr_current_option = cv.string(state)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateSelect.__init__(self, name, config)
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from collections.abc import Callable
|
||||
from datetime import date, datetime
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -20,9 +22,6 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor.helpers import ( # pylint: disable=hass-component-root-import
|
||||
async_parse_date_datetime,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -41,16 +40,15 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
async_setup_template_entry,
|
||||
@@ -186,6 +184,47 @@ def async_create_preview_sensor(
|
||||
)
|
||||
|
||||
|
||||
def validate_datetime(
|
||||
entity: AbstractTemplateSensor,
|
||||
attribute: str,
|
||||
resolve_as: SensorDeviceClass,
|
||||
**kwargs,
|
||||
) -> Callable[[Any], datetime | date | None]:
|
||||
"""Converts the template result into a datetime or date."""
|
||||
|
||||
def convert(result: Any) -> datetime | date | None:
|
||||
if resolve_as == SensorDeviceClass.TIMESTAMP:
|
||||
if isinstance(result, datetime):
|
||||
return result
|
||||
|
||||
if (parsed_timestamp := dt_util.parse_datetime(result)) is None:
|
||||
template_validators.log_validation_result_error(
|
||||
entity, attribute, result, "expected a valid timestamp"
|
||||
)
|
||||
return None
|
||||
|
||||
if kwargs.get("require_tzinfo", True) and parsed_timestamp.tzinfo is None:
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
"expected a valid timestamp with a timezone",
|
||||
)
|
||||
return None
|
||||
|
||||
return parsed_timestamp
|
||||
|
||||
if (parsed_date := dt_util.parse_date(result)) is not None:
|
||||
return parsed_date
|
||||
|
||||
template_validators.log_validation_result_error(
|
||||
entity, attribute, result, "expected a valid date"
|
||||
)
|
||||
return None
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
class AbstractTemplateSensor(AbstractTemplateEntity, RestoreSensor):
|
||||
"""Representation of a template sensor features."""
|
||||
|
||||
@@ -198,38 +237,32 @@ class AbstractTemplateSensor(AbstractTemplateEntity, RestoreSensor):
|
||||
self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||
self._template: template.Template = config[CONF_STATE]
|
||||
self._attr_last_reset_template: template.Template | None = config.get(
|
||||
ATTR_LAST_RESET
|
||||
self._attr_last_reset = None
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_native_value",
|
||||
self._validate_state,
|
||||
)
|
||||
self.setup_template(
|
||||
ATTR_LAST_RESET,
|
||||
"_attr_last_reset",
|
||||
validate_datetime(
|
||||
self, ATTR_LAST_RESET, SensorDeviceClass.TIMESTAMP, require_tzinfo=False
|
||||
),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_last_reset(self, result: Any) -> None:
|
||||
if isinstance(result, datetime):
|
||||
self._attr_last_reset = result
|
||||
return
|
||||
|
||||
parsed_timestamp = dt_util.parse_datetime(result)
|
||||
if parsed_timestamp is None:
|
||||
_LOGGER.warning(
|
||||
"%s rendered invalid timestamp for last_reset attribute: %s",
|
||||
self.entity_id,
|
||||
result,
|
||||
)
|
||||
else:
|
||||
self._attr_last_reset = parsed_timestamp
|
||||
|
||||
def _handle_state(self, result: Any) -> None:
|
||||
def _validate_state(
|
||||
self, result: Any
|
||||
) -> StateType | date | datetime | Decimal | None:
|
||||
"""Validate the state."""
|
||||
if result is None or self.device_class not in (
|
||||
SensorDeviceClass.DATE,
|
||||
SensorDeviceClass.TIMESTAMP,
|
||||
):
|
||||
self._attr_native_value = result
|
||||
return
|
||||
return result
|
||||
|
||||
self._attr_native_value = async_parse_date_datetime(
|
||||
result, self.entity_id, self.device_class
|
||||
)
|
||||
return validate_datetime(self, CONF_STATE, self.device_class)(result)
|
||||
|
||||
|
||||
class StateSensorEntity(TemplateEntity, AbstractTemplateSensor):
|
||||
@@ -248,31 +281,6 @@ class StateSensorEntity(TemplateEntity, AbstractTemplateSensor):
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateSensor.__init__(self, config)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
self.add_template_attribute(
|
||||
"_attr_native_value", self._template, None, self._update_state
|
||||
)
|
||||
if self._attr_last_reset_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_last_reset",
|
||||
self._attr_last_reset_template,
|
||||
cv.datetime,
|
||||
self._update_last_reset,
|
||||
)
|
||||
|
||||
super()._async_setup_templates()
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
|
||||
class TriggerSensorEntity(TriggerEntity, AbstractTemplateSensor):
|
||||
"""Sensor entity based on trigger data."""
|
||||
@@ -289,15 +297,6 @@ class TriggerSensorEntity(TriggerEntity, AbstractTemplateSensor):
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateSensor.__init__(self, config)
|
||||
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
self._parse_result.add(CONF_STATE)
|
||||
|
||||
if last_reset_template := self._attr_last_reset_template:
|
||||
if last_reset_template.is_static:
|
||||
self._static_rendered[ATTR_LAST_RESET] = last_reset_template.template
|
||||
else:
|
||||
self._to_render_simple.append(ATTR_LAST_RESET)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -311,16 +310,3 @@ class TriggerSensorEntity(TriggerEntity, AbstractTemplateSensor):
|
||||
):
|
||||
self._attr_native_value = extra_data.native_value
|
||||
self.restore_attributes(last_state)
|
||||
|
||||
@callback
|
||||
def _process_data(self) -> None:
|
||||
"""Process new data."""
|
||||
super()._process_data()
|
||||
|
||||
# Update last_reset
|
||||
if (last_reset := self._rendered.get(ATTR_LAST_RESET)) is not None:
|
||||
self._update_last_reset(last_reset)
|
||||
|
||||
rendered = self._rendered.get(CONF_STATE)
|
||||
self._handle_state(rendered)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -21,14 +21,12 @@ from homeassistant.const import (
|
||||
CONF_SWITCHES,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -36,7 +34,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .const import CONF_TURN_OFF, CONF_TURN_ON, DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -53,8 +51,6 @@ from .schemas import (
|
||||
from .template_entity import TemplateEntity
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
CONF_VALUE_TEMPLATE: CONF_STATE,
|
||||
}
|
||||
@@ -155,9 +151,21 @@ class AbstractTemplateSwitch(AbstractTemplateEntity, SwitchEntity, RestoreEntity
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_is_on",
|
||||
template_validators.boolean(self, CONF_STATE),
|
||||
)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (on_action := config.get(CONF_TURN_ON)) is not None:
|
||||
self.add_script(CONF_TURN_ON, on_action, name, DOMAIN)
|
||||
if (off_action := config.get(CONF_TURN_OFF)) is not None:
|
||||
self.add_script(CONF_TURN_OFF, off_action, name, DOMAIN)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Fire the on action."""
|
||||
if on_script := self._action_scripts.get(CONF_TURN_ON):
|
||||
@@ -188,54 +196,20 @@ class StateSwitchEntity(TemplateEntity, AbstractTemplateSwitch):
|
||||
) -> None:
|
||||
"""Initialize the Template switch."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateSwitch.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (on_action := config.get(CONF_TURN_ON)) is not None:
|
||||
self.add_script(CONF_TURN_ON, on_action, name, DOMAIN)
|
||||
if (off_action := config.get(CONF_TURN_OFF)) is not None:
|
||||
self.add_script(CONF_TURN_OFF, off_action, name, DOMAIN)
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
self._attr_is_on = None
|
||||
return
|
||||
|
||||
if isinstance(result, bool):
|
||||
self._attr_is_on = result
|
||||
return
|
||||
|
||||
if isinstance(result, str):
|
||||
self._attr_is_on = result.lower() in ("true", STATE_ON)
|
||||
return
|
||||
|
||||
self._attr_is_on = False
|
||||
AbstractTemplateSwitch.__init__(self, name, config)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
if self._template is None:
|
||||
if CONF_STATE not in self._templates:
|
||||
# restore state after startup
|
||||
await super().async_added_to_hass()
|
||||
if state := await self.async_get_last_state():
|
||||
self._attr_is_on = state.state == STATE_ON
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_is_on", self._template, None, self._update_state
|
||||
)
|
||||
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerSwitchEntity(TriggerEntity, AbstractTemplateSwitch):
|
||||
"""Switch entity based on trigger data."""
|
||||
@@ -250,17 +224,8 @@ class TriggerSwitchEntity(TriggerEntity, AbstractTemplateSwitch):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateSwitch.__init__(self, config)
|
||||
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
if on_action := config.get(CONF_TURN_ON):
|
||||
self.add_script(CONF_TURN_ON, on_action, name, DOMAIN)
|
||||
if off_action := config.get(CONF_TURN_OFF):
|
||||
self.add_script(CONF_TURN_OFF, off_action, name, DOMAIN)
|
||||
|
||||
if CONF_STATE in config:
|
||||
self._to_render_simple.append(CONF_STATE)
|
||||
self._parse_result.add(CONF_STATE)
|
||||
AbstractTemplateSwitch.__init__(self, name, config)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
@@ -274,24 +239,3 @@ class TriggerSwitchEntity(TriggerEntity, AbstractTemplateSwitch):
|
||||
):
|
||||
self._attr_is_on = last_state.state == STATE_ON
|
||||
self.restore_attributes(last_state)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
if (state := self._rendered.get(CONF_STATE)) is not None:
|
||||
self._attr_is_on = template.result_as_boolean(state)
|
||||
write_ha_state = True
|
||||
|
||||
elif len(self._rendered) > 0:
|
||||
# In case name, icon, or friendly name have a template but
|
||||
# states does not
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -295,6 +295,10 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
self._attr_available = False
|
||||
return
|
||||
|
||||
# Recover from template errors if they happened before.
|
||||
if not self._availability_template and not self._attr_available:
|
||||
self._attr_available = True
|
||||
|
||||
state = validator(result) if validator else result
|
||||
if on_update:
|
||||
on_update(state)
|
||||
@@ -309,6 +313,8 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
attribute: str,
|
||||
validator: Callable[[Any], Any] | None = None,
|
||||
on_update: Callable[[Any], None] | None = None,
|
||||
render_complex: bool = False,
|
||||
**kwargs,
|
||||
):
|
||||
"""Set up a template that manages any property or attribute of the entity.
|
||||
|
||||
@@ -324,8 +330,15 @@ class TemplateEntity(AbstractTemplateEntity):
|
||||
on_update:
|
||||
Called to store the template result rather than storing it
|
||||
the supplied attribute. Passed the result of the validator.
|
||||
render_complex (default=False):
|
||||
This signals trigger based template entities to render the template
|
||||
as a complex result. State based template entities always render
|
||||
complex results.
|
||||
"""
|
||||
self.add_template(option, attribute, validator, on_update, True)
|
||||
none_on_template_error = kwargs.get("none_on_template_error", True)
|
||||
self.add_template(
|
||||
option, attribute, validator, on_update, none_on_template_error
|
||||
)
|
||||
|
||||
def add_template_attribute(
|
||||
self,
|
||||
|
||||
@@ -69,6 +69,8 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
||||
attribute: str,
|
||||
validator: Callable[[Any], Any] | None = None,
|
||||
on_update: Callable[[Any], None] | None = None,
|
||||
render_complex: bool = False,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
"""Set up a template that manages any property or attribute of the entity.
|
||||
|
||||
@@ -84,8 +86,17 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
||||
on_update:
|
||||
Called to store the template result rather than storing it
|
||||
the supplied attribute. Passed the result of the validator.
|
||||
render_complex (default=False):
|
||||
This signals trigger based template entities to render the template
|
||||
as a complex result. State based template entities always render
|
||||
complex results.
|
||||
"""
|
||||
self.setup_state_template(option, attribute, validator, on_update)
|
||||
if self.add_template(option, attribute, validator, on_update):
|
||||
if render_complex:
|
||||
self._to_render_complex.append(option)
|
||||
else:
|
||||
self._to_render_simple.append(option)
|
||||
self._parse_result.add(option)
|
||||
|
||||
@property
|
||||
def referenced_blueprint(self) -> str | None:
|
||||
@@ -131,19 +142,18 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
|
||||
# Handle any templates.
|
||||
write_state = False
|
||||
for option, entity_template in self._templates.items():
|
||||
value = _SENTINEL
|
||||
if (rendered := self._rendered.get(option)) is not None:
|
||||
value = rendered
|
||||
|
||||
# Capture templates that did not render a result due to an exception and
|
||||
# ensure the state object updates. _SENTINEL is used to differentiate
|
||||
# templates that render None.
|
||||
if value is _SENTINEL:
|
||||
if (rendered := self._rendered.get(option, _SENTINEL)) is _SENTINEL:
|
||||
write_state = True
|
||||
continue
|
||||
|
||||
if entity_template.validator:
|
||||
value = entity_template.validator(rendered)
|
||||
value = (
|
||||
entity_template.validator(rendered)
|
||||
if entity_template.validator
|
||||
else rendered
|
||||
)
|
||||
|
||||
if entity_template.on_update:
|
||||
entity_template.on_update(value)
|
||||
|
||||
@@ -24,16 +24,15 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.template import _SENTINEL
|
||||
from homeassistant.helpers.trigger_template_entity import CONF_PICTURE
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
@@ -145,19 +144,49 @@ class AbstractTemplateUpdate(AbstractTemplateEntity, UpdateEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
|
||||
self._installed_version_template = config[CONF_INSTALLED_VERSION]
|
||||
self._latest_version_template = config[CONF_LATEST_VERSION]
|
||||
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
self._in_progress_template = config.get(CONF_IN_PROGRESS)
|
||||
self._release_summary_template = config.get(CONF_RELEASE_SUMMARY)
|
||||
self._release_url_template = config.get(CONF_RELEASE_URL)
|
||||
self._title_template = config.get(CONF_TITLE)
|
||||
self._update_percentage_template = config.get(CONF_UPDATE_PERCENTAGE)
|
||||
# Setup templates.
|
||||
self.setup_template(
|
||||
CONF_INSTALLED_VERSION,
|
||||
"_attr_installed_version",
|
||||
template_validators.string(self, CONF_INSTALLED_VERSION),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_LATEST_VERSION,
|
||||
"_attr_latest_version",
|
||||
template_validators.string(self, CONF_LATEST_VERSION),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_IN_PROGRESS,
|
||||
"_attr_in_progress",
|
||||
template_validators.boolean(self, CONF_IN_PROGRESS),
|
||||
self._update_in_progress,
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_RELEASE_SUMMARY,
|
||||
"_attr_release_summary",
|
||||
template_validators.string(self, CONF_RELEASE_SUMMARY),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_RELEASE_URL,
|
||||
"_attr_release_url",
|
||||
template_validators.url(self, CONF_RELEASE_URL),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_TITLE,
|
||||
"_attr_title",
|
||||
template_validators.string(self, CONF_TITLE),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_UPDATE_PERCENTAGE,
|
||||
"_attr_update_percentage",
|
||||
template_validators.number(self, CONF_UPDATE_PERCENTAGE, 0.0, 100.0),
|
||||
self._update_update_percentage,
|
||||
)
|
||||
|
||||
self._attr_supported_features = UpdateEntityFeature(0)
|
||||
if config[CONF_BACKUP]:
|
||||
@@ -165,99 +194,40 @@ class AbstractTemplateUpdate(AbstractTemplateEntity, UpdateEntity):
|
||||
if config[CONF_SPECIFIC_VERSION]:
|
||||
self._attr_supported_features |= UpdateEntityFeature.SPECIFIC_VERSION
|
||||
if (
|
||||
self._in_progress_template is not None
|
||||
or self._update_percentage_template is not None
|
||||
CONF_IN_PROGRESS in self._templates
|
||||
or CONF_UPDATE_PERCENTAGE in self._templates
|
||||
):
|
||||
self._attr_supported_features |= UpdateEntityFeature.PROGRESS
|
||||
|
||||
self._optimistic_in_process = (
|
||||
self._in_progress_template is None
|
||||
and self._update_percentage_template is not None
|
||||
CONF_IN_PROGRESS not in self._templates
|
||||
and CONF_UPDATE_PERCENTAGE in self._templates
|
||||
)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (install_action := config.get(CONF_INSTALL)) is not None:
|
||||
self.add_script(CONF_INSTALL, install_action, name, DOMAIN)
|
||||
self._attr_supported_features |= UpdateEntityFeature.INSTALL
|
||||
|
||||
@callback
|
||||
def _update_installed_version(self, result: Any) -> None:
|
||||
def _update_in_progress(self, result: bool | None) -> None:
|
||||
if result is None:
|
||||
self._attr_installed_version = None
|
||||
return
|
||||
|
||||
self._attr_installed_version = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_latest_version(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_latest_version = None
|
||||
return
|
||||
|
||||
self._attr_latest_version = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_in_process(self, result: Any) -> None:
|
||||
try:
|
||||
self._attr_in_progress = cv.boolean(result)
|
||||
except vol.Invalid:
|
||||
_LOGGER.error(
|
||||
"Received invalid in_process value: %s for entity %s. Expected: True, False",
|
||||
result,
|
||||
self.entity_id,
|
||||
template_validators.log_validation_result_error(
|
||||
self, CONF_IN_PROGRESS, result, "expected a boolean"
|
||||
)
|
||||
self._attr_in_progress = False
|
||||
self._attr_in_progress = result or False
|
||||
|
||||
@callback
|
||||
def _update_release_summary(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_release_summary = None
|
||||
return
|
||||
|
||||
self._attr_release_summary = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_release_url(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_release_url = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_release_url = cv.url(result)
|
||||
except vol.Invalid:
|
||||
_LOGGER.error(
|
||||
"Received invalid release_url: %s for entity %s",
|
||||
result,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_release_url = None
|
||||
|
||||
@callback
|
||||
def _update_title(self, result: Any) -> None:
|
||||
if result is None:
|
||||
self._attr_title = None
|
||||
return
|
||||
|
||||
self._attr_title = cv.string(result)
|
||||
|
||||
@callback
|
||||
def _update_update_percentage(self, result: Any) -> None:
|
||||
def _update_update_percentage(self, result: float | None) -> None:
|
||||
if result is None:
|
||||
if self._optimistic_in_process:
|
||||
self._attr_in_progress = False
|
||||
self._attr_update_percentage = None
|
||||
return
|
||||
|
||||
try:
|
||||
percentage = vol.All(
|
||||
vol.Coerce(float),
|
||||
vol.Range(0, 100, min_included=True, max_included=True),
|
||||
)(result)
|
||||
if self._optimistic_in_process:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = percentage
|
||||
except vol.Invalid:
|
||||
_LOGGER.error(
|
||||
"Received invalid update_percentage: %s for entity %s",
|
||||
result,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_update_percentage = None
|
||||
if self._optimistic_in_process:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = result
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
@@ -283,16 +253,10 @@ class StateUpdateEntity(TemplateEntity, AbstractTemplateUpdate):
|
||||
) -> None:
|
||||
"""Initialize the Template update."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateUpdate.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (install_action := config.get(CONF_INSTALL)) is not None:
|
||||
self.add_script(CONF_INSTALL, install_action, name, DOMAIN)
|
||||
self._attr_supported_features |= UpdateEntityFeature.INSTALL
|
||||
AbstractTemplateUpdate.__init__(self, name, config)
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
@@ -305,65 +269,6 @@ class StateUpdateEntity(TemplateEntity, AbstractTemplateUpdate):
|
||||
return "https://brands.home-assistant.io/_/template/icon.png"
|
||||
return self._attr_entity_picture
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
self.add_template_attribute(
|
||||
"_attr_installed_version",
|
||||
self._installed_version_template,
|
||||
None,
|
||||
self._update_installed_version,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
self.add_template_attribute(
|
||||
"_attr_latest_version",
|
||||
self._latest_version_template,
|
||||
None,
|
||||
self._update_latest_version,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._in_progress_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_in_progress",
|
||||
self._in_progress_template,
|
||||
None,
|
||||
self._update_in_process,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._release_summary_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_release_summary",
|
||||
self._release_summary_template,
|
||||
None,
|
||||
self._update_release_summary,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._release_url_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_release_url",
|
||||
self._release_url_template,
|
||||
None,
|
||||
self._update_release_url,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._title_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_title",
|
||||
self._title_template,
|
||||
None,
|
||||
self._update_title,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
if self._update_percentage_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_update_percentage",
|
||||
self._update_percentage_template,
|
||||
None,
|
||||
self._update_update_percentage,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
|
||||
class TriggerUpdateEntity(TriggerEntity, AbstractTemplateUpdate):
|
||||
"""Update entity based on trigger data."""
|
||||
@@ -378,35 +283,8 @@ class TriggerUpdateEntity(TriggerEntity, AbstractTemplateUpdate):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateUpdate.__init__(self, config)
|
||||
|
||||
for key in (
|
||||
CONF_INSTALLED_VERSION,
|
||||
CONF_LATEST_VERSION,
|
||||
):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
|
||||
# Scripts can be an empty list, therefore we need to check for None
|
||||
if (install_action := config.get(CONF_INSTALL)) is not None:
|
||||
self.add_script(
|
||||
CONF_INSTALL,
|
||||
install_action,
|
||||
self._rendered.get(CONF_NAME, DEFAULT_NAME),
|
||||
DOMAIN,
|
||||
)
|
||||
self._attr_supported_features |= UpdateEntityFeature.INSTALL
|
||||
|
||||
for key in (
|
||||
CONF_IN_PROGRESS,
|
||||
CONF_RELEASE_SUMMARY,
|
||||
CONF_RELEASE_URL,
|
||||
CONF_TITLE,
|
||||
CONF_UPDATE_PERCENTAGE,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
AbstractTemplateUpdate.__init__(self, name, config)
|
||||
|
||||
# Ensure the entity picture can resolve None to produce the default picture.
|
||||
if CONF_PICTURE in config:
|
||||
@@ -431,32 +309,3 @@ class TriggerUpdateEntity(TriggerEntity, AbstractTemplateUpdate):
|
||||
if (picture := self._rendered.get(CONF_PICTURE)) is None:
|
||||
return UpdateEntity.entity_picture.fget(self) # type: ignore[attr-defined]
|
||||
return picture
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_INSTALLED_VERSION, self._update_installed_version),
|
||||
(CONF_LATEST_VERSION, self._update_latest_version),
|
||||
(CONF_IN_PROGRESS, self._update_in_process),
|
||||
(CONF_RELEASE_SUMMARY, self._update_release_summary),
|
||||
(CONF_RELEASE_URL, self._update_release_url),
|
||||
(CONF_TITLE, self._update_title),
|
||||
(CONF_UPDATE_PERCENTAGE, self._update_update_percentage),
|
||||
):
|
||||
if (rendered := self._rendered.get(key, _SENTINEL)) is not _SENTINEL:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Sequence
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -30,15 +29,9 @@ from homeassistant.const import (
|
||||
CONF_STATE,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
issue_registry as ir,
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -46,8 +39,8 @@ from homeassistant.helpers.entity_platform import (
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .const import DOMAIN
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
async_setup_template_entry,
|
||||
@@ -76,14 +69,6 @@ CONF_FAN_SPEED_TEMPLATE = "fan_speed_template"
|
||||
DEFAULT_NAME = "Template Vacuum"
|
||||
|
||||
ENTITY_ID_FORMAT = VACUUM_DOMAIN + ".{}"
|
||||
_VALID_STATES = [
|
||||
VacuumActivity.CLEANING,
|
||||
VacuumActivity.DOCKED,
|
||||
VacuumActivity.PAUSED,
|
||||
VacuumActivity.IDLE,
|
||||
VacuumActivity.RETURNING,
|
||||
VacuumActivity.ERROR,
|
||||
]
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
CONF_BATTERY_LEVEL_TEMPLATE: CONF_BATTERY_LEVEL,
|
||||
@@ -225,27 +210,36 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__(self, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
def __init__(self, name: str, config: dict[str, Any]) -> None: # pylint: disable=super-init-not-called
|
||||
"""Initialize the features."""
|
||||
self._battery_level_template = config.get(CONF_BATTERY_LEVEL)
|
||||
self._fan_speed_template = config.get(CONF_FAN_SPEED)
|
||||
|
||||
self._battery_level = None
|
||||
self._attr_fan_speed = None
|
||||
|
||||
# List of valid fan speeds
|
||||
self._attr_fan_speed_list = config[CONF_FAN_SPEED_LIST]
|
||||
self.setup_state_template(
|
||||
CONF_STATE,
|
||||
"_attr_activity",
|
||||
template_validators.strenum(self, CONF_STATE, VacuumActivity),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_FAN_SPEED,
|
||||
"_attr_fan_speed",
|
||||
template_validators.item_in_list(
|
||||
self, CONF_FAN_SPEED, self._attr_fan_speed_list
|
||||
),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_BATTERY_LEVEL,
|
||||
"_attr_battery_level",
|
||||
template_validators.number(self, CONF_BATTERY_LEVEL, 0.0, 100.0),
|
||||
)
|
||||
|
||||
self._attr_supported_features = (
|
||||
VacuumEntityFeature.START | VacuumEntityFeature.STATE
|
||||
)
|
||||
|
||||
if self._battery_level_template:
|
||||
if CONF_BATTERY_LEVEL in self._templates:
|
||||
self._attr_supported_features |= VacuumEntityFeature.BATTERY
|
||||
|
||||
def _iterate_scripts(
|
||||
self, config: dict[str, Any]
|
||||
) -> Generator[tuple[str, Sequence[dict[str, Any]], VacuumEntityFeature | int]]:
|
||||
for action_id, supported_feature in (
|
||||
(SERVICE_START, 0),
|
||||
(SERVICE_PAUSE, VacuumEntityFeature.PAUSE),
|
||||
@@ -256,22 +250,8 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
(SERVICE_SET_FAN_SPEED, VacuumEntityFeature.FAN_SPEED),
|
||||
):
|
||||
if (action_config := config.get(action_id)) is not None:
|
||||
yield (action_id, action_config, supported_feature)
|
||||
|
||||
def _handle_state(self, result: Any) -> None:
|
||||
# Validate state
|
||||
if result in _VALID_STATES:
|
||||
self._attr_activity = result
|
||||
elif result == STATE_UNKNOWN:
|
||||
self._attr_activity = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid vacuum state: %s for entity %s. Expected: %s",
|
||||
result,
|
||||
self.entity_id,
|
||||
", ".join(_VALID_STATES),
|
||||
)
|
||||
self._attr_activity = None
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start or resume the cleaning task."""
|
||||
@@ -335,44 +315,6 @@ class AbstractTemplateVacuum(AbstractTemplateEntity, StateVacuumEntity):
|
||||
script, run_variables={ATTR_FAN_SPEED: fan_speed}, context=self._context
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_battery_level(self, battery_level):
|
||||
try:
|
||||
battery_level_int = int(battery_level)
|
||||
if not 0 <= battery_level_int <= 100:
|
||||
raise ValueError # noqa: TRY301
|
||||
except ValueError:
|
||||
_LOGGER.error(
|
||||
"Received invalid battery level: %s for entity %s. Expected: 0-100",
|
||||
battery_level,
|
||||
self.entity_id,
|
||||
)
|
||||
self._attr_battery_level = None
|
||||
return
|
||||
|
||||
self._attr_battery_level = battery_level_int
|
||||
|
||||
@callback
|
||||
def _update_fan_speed(self, fan_speed):
|
||||
if isinstance(fan_speed, TemplateError):
|
||||
# This is legacy behavior
|
||||
self._attr_fan_speed = None
|
||||
self._attr_activity = None
|
||||
return
|
||||
|
||||
if fan_speed in self._attr_fan_speed_list:
|
||||
self._attr_fan_speed = fan_speed
|
||||
elif fan_speed == STATE_UNKNOWN:
|
||||
self._attr_fan_speed = None
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid fan speed: %s for entity %s. Expected: %s",
|
||||
fan_speed,
|
||||
self.entity_id,
|
||||
self._attr_fan_speed_list,
|
||||
)
|
||||
self._attr_fan_speed = None
|
||||
|
||||
|
||||
class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||
"""A template vacuum component."""
|
||||
@@ -387,16 +329,10 @@ class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||
) -> None:
|
||||
"""Initialize the vacuum."""
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateVacuum.__init__(self, config)
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
AbstractTemplateVacuum.__init__(self, name, config)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
@@ -408,42 +344,6 @@ class TemplateStateVacuumEntity(TemplateEntity, AbstractTemplateVacuum):
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
if self._template is not None:
|
||||
self.add_template_attribute(
|
||||
"_attr_activity", self._template, None, self._update_state
|
||||
)
|
||||
if self._fan_speed_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_fan_speed",
|
||||
self._fan_speed_template,
|
||||
None,
|
||||
self._update_fan_speed,
|
||||
)
|
||||
if self._battery_level_template is not None:
|
||||
self.add_template_attribute(
|
||||
"_battery_level",
|
||||
self._battery_level_template,
|
||||
None,
|
||||
self._update_battery_level,
|
||||
none_on_template_error=True,
|
||||
)
|
||||
super()._async_setup_templates()
|
||||
|
||||
@callback
|
||||
def _update_state(self, result):
|
||||
super()._update_state(result)
|
||||
if isinstance(result, TemplateError):
|
||||
# This is legacy behavior
|
||||
self._attr_activity = None
|
||||
if not self._availability_template:
|
||||
self._attr_available = True
|
||||
return
|
||||
|
||||
self._handle_state(result)
|
||||
|
||||
|
||||
class TriggerVacuumEntity(TriggerEntity, AbstractTemplateVacuum):
|
||||
"""Vacuum entity based on trigger data."""
|
||||
@@ -458,20 +358,8 @@ class TriggerVacuumEntity(TriggerEntity, AbstractTemplateVacuum):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateVacuum.__init__(self, config)
|
||||
|
||||
self._attr_name = name = self._rendered.get(CONF_NAME, DEFAULT_NAME)
|
||||
|
||||
for action_id, action_config, supported_feature in self._iterate_scripts(
|
||||
config
|
||||
):
|
||||
self.add_script(action_id, action_config, name, DOMAIN)
|
||||
self._attr_supported_features |= supported_feature
|
||||
|
||||
for key in (CONF_STATE, CONF_FAN_SPEED, CONF_BATTERY_LEVEL):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
AbstractTemplateVacuum.__init__(self, name, config)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
@@ -482,28 +370,3 @@ class TriggerVacuumEntity(TriggerEntity, AbstractTemplateVacuum):
|
||||
self._attr_name or DEFAULT_NAME,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_STATE, self._handle_state),
|
||||
(CONF_FAN_SPEED, self._update_fan_speed),
|
||||
(CONF_BATTERY_LEVEL, self._update_battery_level),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if len(self._rendered) > 0:
|
||||
# In case any non optimistic template
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -19,7 +19,7 @@ RESULT_ON = ("1", "true", "yes", "on", "enable")
|
||||
RESULT_OFF = ("0", "false", "no", "off", "disable")
|
||||
|
||||
|
||||
def _log_validation_result_error(
|
||||
def log_validation_result_error(
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
value: Any,
|
||||
@@ -44,7 +44,7 @@ def _log_validation_result_error(
|
||||
)
|
||||
|
||||
|
||||
def _check_result_for_none(result: Any, **kwargs: Any) -> bool:
|
||||
def check_result_for_none(result: Any, **kwargs: Any) -> bool:
|
||||
"""Checks the result for none, unknown, unavailable."""
|
||||
if result is None:
|
||||
return True
|
||||
@@ -74,7 +74,7 @@ def strenum[T: StrEnum](
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> T | None:
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if isinstance(result, str):
|
||||
@@ -102,7 +102,7 @@ def strenum[T: StrEnum](
|
||||
if state_off:
|
||||
expected += RESULT_OFF
|
||||
|
||||
_log_validation_result_error(
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
@@ -130,7 +130,7 @@ def boolean(
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> bool | None:
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if isinstance(result, bool):
|
||||
@@ -154,7 +154,7 @@ def boolean(
|
||||
if as_false:
|
||||
items += as_false
|
||||
|
||||
_log_validation_result_error(entity, attribute, result, items)
|
||||
log_validation_result_error(entity, attribute, result, items)
|
||||
return None
|
||||
|
||||
return convert
|
||||
@@ -182,11 +182,11 @@ def number(
|
||||
message = f"{message} less than or equal to {maximum:0.1f}"
|
||||
|
||||
def convert(result: Any) -> float | int | None:
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if (result_type := type(result)) is bool:
|
||||
_log_validation_result_error(entity, attribute, result, message)
|
||||
log_validation_result_error(entity, attribute, result, message)
|
||||
return None
|
||||
|
||||
if isinstance(result, (float, int)):
|
||||
@@ -201,7 +201,7 @@ def number(
|
||||
if return_type is int:
|
||||
value = int(value)
|
||||
except vol.Invalid:
|
||||
_log_validation_result_error(entity, attribute, result, message)
|
||||
log_validation_result_error(entity, attribute, result, message)
|
||||
return None
|
||||
|
||||
if minimum is None and maximum is None:
|
||||
@@ -218,7 +218,7 @@ def number(
|
||||
):
|
||||
return value
|
||||
|
||||
_log_validation_result_error(entity, attribute, result, message)
|
||||
log_validation_result_error(entity, attribute, result, message)
|
||||
return None
|
||||
|
||||
return convert
|
||||
@@ -239,11 +239,11 @@ def list_of_strings(
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> list[str] | None:
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if not isinstance(result, list):
|
||||
_log_validation_result_error(
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
@@ -263,7 +263,7 @@ def list_of_strings(
|
||||
def item_in_list[T](
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
items: list[Any] | None,
|
||||
items: list[Any] | str | None,
|
||||
items_attribute: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> Callable[[Any], Any | None]:
|
||||
@@ -274,15 +274,20 @@ def item_in_list[T](
|
||||
"""
|
||||
|
||||
def convert(result: Any) -> Any | None:
|
||||
if _check_result_for_none(result, **kwargs):
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
# items may be mutable based on another template field. Always
|
||||
# perform this check when the items come from an configured
|
||||
# attribute.
|
||||
if items is None or (len(items) == 0):
|
||||
if isinstance(items, str):
|
||||
_items = getattr(entity, items)
|
||||
else:
|
||||
_items = items
|
||||
|
||||
if _items is None or (len(_items) == 0):
|
||||
if items_attribute:
|
||||
_log_validation_result_error(
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
@@ -291,15 +296,68 @@ def item_in_list[T](
|
||||
|
||||
return None
|
||||
|
||||
if result not in items:
|
||||
_log_validation_result_error(
|
||||
if result not in _items:
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
tuple(str(v) for v in items),
|
||||
tuple(str(v) for v in _items),
|
||||
)
|
||||
return None
|
||||
|
||||
return result
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
def url(
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
**kwargs: Any,
|
||||
) -> Callable[[Any], str | None]:
|
||||
"""Convert the result to a string url or None."""
|
||||
|
||||
def convert(result: Any) -> str | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
try:
|
||||
return cv.url(result)
|
||||
except vol.Invalid:
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
"expected a url",
|
||||
)
|
||||
return None
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
def string(
|
||||
entity: Entity,
|
||||
attribute: str,
|
||||
**kwargs: Any,
|
||||
) -> Callable[[Any], str | None]:
|
||||
"""Convert the result to a string or None."""
|
||||
|
||||
def convert(result: Any) -> str | None:
|
||||
if check_result_for_none(result, **kwargs):
|
||||
return None
|
||||
|
||||
if isinstance(result, str):
|
||||
return result
|
||||
|
||||
try:
|
||||
return cv.string(result)
|
||||
except vol.Invalid:
|
||||
log_validation_result_error(
|
||||
entity,
|
||||
attribute,
|
||||
result,
|
||||
"expected a string",
|
||||
)
|
||||
return None
|
||||
|
||||
return convert
|
||||
|
||||
@@ -4,9 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import asdict, dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Literal, Self
|
||||
from typing import Any, Literal, Self
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -42,8 +41,7 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -57,7 +55,7 @@ from homeassistant.util.unit_conversion import (
|
||||
TemperatureConverter,
|
||||
)
|
||||
|
||||
from .coordinator import TriggerUpdateCoordinator
|
||||
from . import TriggerUpdateCoordinator, validators as template_validators
|
||||
from .entity import AbstractTemplateEntity
|
||||
from .helpers import (
|
||||
async_setup_template_entry,
|
||||
@@ -82,23 +80,23 @@ CHECK_FORECAST_KEYS = (
|
||||
.union(("apparent_temperature", "wind_gust_speed", "dew_point"))
|
||||
)
|
||||
|
||||
CONDITION_CLASSES = {
|
||||
CONDITION_CLASSES = [
|
||||
ATTR_CONDITION_CLEAR_NIGHT,
|
||||
ATTR_CONDITION_CLOUDY,
|
||||
ATTR_CONDITION_EXCEPTIONAL,
|
||||
ATTR_CONDITION_FOG,
|
||||
ATTR_CONDITION_HAIL,
|
||||
ATTR_CONDITION_LIGHTNING,
|
||||
ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
ATTR_CONDITION_LIGHTNING,
|
||||
ATTR_CONDITION_PARTLYCLOUDY,
|
||||
ATTR_CONDITION_POURING,
|
||||
ATTR_CONDITION_RAINY,
|
||||
ATTR_CONDITION_SNOWY,
|
||||
ATTR_CONDITION_SNOWY_RAINY,
|
||||
ATTR_CONDITION_SNOWY,
|
||||
ATTR_CONDITION_SUNNY,
|
||||
ATTR_CONDITION_WINDY,
|
||||
ATTR_CONDITION_WINDY_VARIANT,
|
||||
ATTR_CONDITION_EXCEPTIONAL,
|
||||
}
|
||||
ATTR_CONDITION_WINDY,
|
||||
]
|
||||
|
||||
CONF_APPARENT_TEMPERATURE = "apparent_temperature"
|
||||
CONF_APPARENT_TEMPERATURE_TEMPLATE = "apparent_temperature_template"
|
||||
@@ -318,6 +316,75 @@ def async_create_preview_weather(
|
||||
)
|
||||
|
||||
|
||||
def validate_forecast(
|
||||
entity: AbstractTemplateWeather,
|
||||
option: str,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
) -> Callable[[Any], list[Forecast] | None]:
|
||||
"""Validate a forecast."""
|
||||
|
||||
weather_message = (
|
||||
"see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
|
||||
def validate(result: Any) -> list[Forecast] | None:
|
||||
if template_validators.check_result_for_none(result):
|
||||
return None
|
||||
|
||||
if not isinstance(result, list):
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"expected a list, {weather_message}",
|
||||
)
|
||||
|
||||
raised = False
|
||||
for forecast in result:
|
||||
if not isinstance(forecast, dict):
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"expected a list of forecast dictionaries, got {forecast}, {weather_message}",
|
||||
)
|
||||
continue
|
||||
|
||||
diff_result = set().union(forecast.keys()).difference(CHECK_FORECAST_KEYS)
|
||||
if diff_result:
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"expected valid forecast keys, unallowed keys: ({diff_result}) for {forecast}, {weather_message}",
|
||||
)
|
||||
if forecast_type == "twice_daily" and "is_daytime" not in forecast:
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"`is_daytime` is missing in twice_daily forecast {forecast}, {weather_message}",
|
||||
)
|
||||
if "datetime" not in forecast:
|
||||
raised = True
|
||||
template_validators.log_validation_result_error(
|
||||
entity,
|
||||
option,
|
||||
result,
|
||||
f"`datetime` is missing in forecast, got {forecast}, {weather_message}",
|
||||
)
|
||||
|
||||
if raised:
|
||||
return None
|
||||
|
||||
return result
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
"""Representation of a template weathers features."""
|
||||
|
||||
@@ -327,28 +394,79 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
# The super init is not called because TemplateEntity and TriggerEntity will call AbstractTemplateEntity.__init__.
|
||||
# This ensures that the __init__ on AbstractTemplateEntity is not called twice.
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self, config: dict[str, Any], initial_state: bool | None = False
|
||||
self, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize the features."""
|
||||
|
||||
# Templates
|
||||
self._apparent_temperature_template = config.get(CONF_APPARENT_TEMPERATURE)
|
||||
self._attribution_template = config.get(CONF_ATTRIBUTION)
|
||||
self._cloud_coverage_template = config.get(CONF_CLOUD_COVERAGE)
|
||||
self._condition_template = config[CONF_CONDITION]
|
||||
self._dew_point_template = config.get(CONF_DEW_POINT)
|
||||
self._forecast_daily_template = config.get(CONF_FORECAST_DAILY)
|
||||
self._forecast_hourly_template = config.get(CONF_FORECAST_HOURLY)
|
||||
self._forecast_twice_daily_template = config.get(CONF_FORECAST_TWICE_DAILY)
|
||||
self._humidity_template = config[CONF_HUMIDITY]
|
||||
self._ozone_template = config.get(CONF_OZONE)
|
||||
self._pressure_template = config.get(CONF_PRESSURE)
|
||||
self._temperature_template = config[CONF_TEMPERATURE]
|
||||
self._uv_index_template = config.get(CONF_UV_INDEX)
|
||||
self._visibility_template = config.get(CONF_VISIBILITY)
|
||||
self._wind_bearing_template = config.get(CONF_WIND_BEARING)
|
||||
self._wind_gust_speed_template = config.get(CONF_WIND_GUST_SPEED)
|
||||
self._wind_speed_template = config.get(CONF_WIND_SPEED)
|
||||
# Required options
|
||||
self.setup_template(
|
||||
CONF_CONDITION,
|
||||
"_attr_condition",
|
||||
template_validators.item_in_list(self, CONF_CONDITION, CONDITION_CLASSES),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_HUMIDITY,
|
||||
"_attr_humidity",
|
||||
template_validators.number(self, CONF_HUMIDITY, 0.0, 100.0),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_TEMPERATURE,
|
||||
"_attr_native_temperature",
|
||||
template_validators.number(self, CONF_TEMPERATURE),
|
||||
)
|
||||
|
||||
# Optional options
|
||||
|
||||
self.setup_template(
|
||||
CONF_ATTRIBUTION,
|
||||
"_attribution",
|
||||
vol.Coerce(str),
|
||||
)
|
||||
self.setup_template(
|
||||
CONF_WIND_BEARING, "_attr_wind_bearing", None, self._update_wind_bearing
|
||||
)
|
||||
|
||||
# Optional numeric options
|
||||
for option, attribute in (
|
||||
(CONF_APPARENT_TEMPERATURE, "_attr_native_apparent_temperature"),
|
||||
(CONF_CLOUD_COVERAGE, "_attr_cloud_coverage"),
|
||||
(CONF_DEW_POINT, "_attr_native_dew_point"),
|
||||
(CONF_OZONE, "_attr_ozone"),
|
||||
(CONF_PRESSURE, "_attr_native_pressure"),
|
||||
(CONF_UV_INDEX, "_attr_uv_index"),
|
||||
(CONF_VISIBILITY, "_attr_native_visibility"),
|
||||
(CONF_WIND_GUST_SPEED, "_attr_native_wind_gust_speed"),
|
||||
(CONF_WIND_SPEED, "_attr_native_wind_speed"),
|
||||
):
|
||||
self.setup_template(
|
||||
option, attribute, template_validators.number(self, option)
|
||||
)
|
||||
|
||||
# Forecasts
|
||||
|
||||
self._forecast_daily: list[Forecast] | None = []
|
||||
self.setup_template(
|
||||
CONF_FORECAST_DAILY,
|
||||
"_forecast_daily",
|
||||
validate_forecast(self, CONF_FORECAST_DAILY, "daily"),
|
||||
self._update_forecast("daily"),
|
||||
)
|
||||
|
||||
self._forecast_hourly: list[Forecast] | None = []
|
||||
self.setup_template(
|
||||
CONF_FORECAST_HOURLY,
|
||||
"_forecast_hourly",
|
||||
validate_forecast(self, CONF_FORECAST_HOURLY, "hourly"),
|
||||
self._update_forecast("hourly"),
|
||||
)
|
||||
|
||||
self._forecast_twice_daily: list[Forecast] | None = []
|
||||
self.setup_template(
|
||||
CONF_FORECAST_TWICE_DAILY,
|
||||
"_forecast_twice_daily",
|
||||
validate_forecast(self, CONF_FORECAST_TWICE_DAILY, "twice_daily"),
|
||||
self._update_forecast("twice_daily"),
|
||||
)
|
||||
|
||||
# Legacy support
|
||||
self._attribution: str | None = None
|
||||
@@ -362,11 +480,11 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
|
||||
# Supported Features
|
||||
self._attr_supported_features = 0
|
||||
if self._forecast_daily_template:
|
||||
if CONF_FORECAST_DAILY in self._templates:
|
||||
self._attr_supported_features |= WeatherEntityFeature.FORECAST_DAILY
|
||||
if self._forecast_hourly_template:
|
||||
if CONF_FORECAST_HOURLY in self._templates:
|
||||
self._attr_supported_features |= WeatherEntityFeature.FORECAST_HOURLY
|
||||
if self._forecast_twice_daily_template:
|
||||
if CONF_FORECAST_TWICE_DAILY in self._templates:
|
||||
self._attr_supported_features |= WeatherEntityFeature.FORECAST_TWICE_DAILY
|
||||
|
||||
@property
|
||||
@@ -376,62 +494,6 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
return "Powered by Home Assistant"
|
||||
return self._attribution
|
||||
|
||||
def _validate[T](
|
||||
self,
|
||||
validator: Callable[[Any], T],
|
||||
result: Any,
|
||||
) -> T | None:
|
||||
try:
|
||||
return validator(result)
|
||||
except vol.Invalid:
|
||||
return None
|
||||
|
||||
@callback
|
||||
def _update_apparent_temperature(self, result: Any) -> None:
|
||||
self._attr_native_apparent_temperature = self._validate(
|
||||
vol.Coerce(float), result
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_attribution(self, result: Any) -> None:
|
||||
self._attribution = vol.Coerce(str)(result)
|
||||
|
||||
@callback
|
||||
def _update_condition(self, result: Any) -> None:
|
||||
self._attr_condition = result if result in CONDITION_CLASSES else None
|
||||
|
||||
@callback
|
||||
def _update_coverage(self, result: Any) -> None:
|
||||
self._attr_cloud_coverage = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_dew_point(self, result: Any) -> None:
|
||||
self._attr_native_dew_point = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_humidity(self, result: Any) -> None:
|
||||
self._attr_humidity = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_ozone(self, result: Any) -> None:
|
||||
self._attr_ozone = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_pressure(self, result: Any) -> None:
|
||||
self._attr_native_pressure = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_temperature(self, result: Any) -> None:
|
||||
self._attr_native_temperature = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_uv_index(self, result: Any) -> None:
|
||||
self._attr_uv_index = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_visibility(self, result: Any) -> None:
|
||||
self._attr_native_visibility = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_wind_bearing(self, result: Any) -> None:
|
||||
try:
|
||||
@@ -440,48 +502,31 @@ class AbstractTemplateWeather(AbstractTemplateEntity, WeatherEntity):
|
||||
self._attr_wind_bearing = vol.Coerce(str)(result)
|
||||
|
||||
@callback
|
||||
def _update_wind_gust_speed(self, result: Any) -> None:
|
||||
self._attr_native_wind_gust_speed = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _update_wind_speed(self, result: Any) -> None:
|
||||
self._attr_native_wind_speed = self._validate(vol.Coerce(float), result)
|
||||
|
||||
@callback
|
||||
def _validate_forecast(
|
||||
def _update_forecast(
|
||||
self,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
result: Any,
|
||||
) -> list[Forecast] | None:
|
||||
"""Validate the forecasts."""
|
||||
if result is None:
|
||||
return None
|
||||
) -> Callable[[list[Forecast] | None], None]:
|
||||
"""Save template result and trigger forecast listener."""
|
||||
|
||||
if not isinstance(result, list):
|
||||
raise vol.Invalid(
|
||||
"Forecasts is not a list, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
def update(result: list[Forecast] | None) -> None:
|
||||
setattr(self, f"_forecast_{forecast_type}", result)
|
||||
self.hass.async_create_task(
|
||||
self.async_update_listeners([forecast_type]), eager_start=True
|
||||
)
|
||||
for forecast in result:
|
||||
if not isinstance(forecast, dict):
|
||||
raise vol.Invalid(
|
||||
"Forecast in list is not a dict, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
diff_result = set().union(forecast.keys()).difference(CHECK_FORECAST_KEYS)
|
||||
if diff_result:
|
||||
raise vol.Invalid(
|
||||
f"Only valid keys in Forecast are allowed, unallowed keys: ({diff_result}), "
|
||||
"see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
if forecast_type == "twice_daily" and "is_daytime" not in forecast:
|
||||
raise vol.Invalid(
|
||||
"`is_daytime` is missing in twice_daily forecast, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
if "datetime" not in forecast:
|
||||
raise vol.Invalid(
|
||||
"`datetime` is required in forecasts, see Weather documentation https://www.home-assistant.io/integrations/weather/"
|
||||
)
|
||||
continue
|
||||
return result
|
||||
|
||||
return update
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_daily or []
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_hourly or []
|
||||
|
||||
async def async_forecast_twice_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_twice_daily or []
|
||||
|
||||
|
||||
class StateWeatherEntity(TemplateEntity, AbstractTemplateWeather):
|
||||
@@ -499,152 +544,6 @@ class StateWeatherEntity(TemplateEntity, AbstractTemplateWeather):
|
||||
TemplateEntity.__init__(self, hass, config, unique_id)
|
||||
AbstractTemplateWeather.__init__(self, config)
|
||||
|
||||
name = self._attr_name
|
||||
if TYPE_CHECKING:
|
||||
assert name is not None
|
||||
|
||||
# Forecasts
|
||||
self._forecast_daily: list[Forecast] | None = []
|
||||
self._forecast_hourly: list[Forecast] | None = []
|
||||
self._forecast_twice_daily: list[Forecast] | None = []
|
||||
|
||||
@callback
|
||||
def _async_setup_templates(self) -> None:
|
||||
"""Set up templates."""
|
||||
|
||||
if self._apparent_temperature_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_apparent_temperature",
|
||||
self._apparent_temperature_template,
|
||||
on_update=self._update_apparent_temperature,
|
||||
)
|
||||
if self._attribution_template:
|
||||
self.add_template_attribute(
|
||||
"_attribution",
|
||||
self._attribution_template,
|
||||
on_update=self._update_attribution,
|
||||
)
|
||||
if self._cloud_coverage_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_cloud_coverage",
|
||||
self._cloud_coverage_template,
|
||||
on_update=self._update_coverage,
|
||||
)
|
||||
if self._condition_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_condition",
|
||||
self._condition_template,
|
||||
on_update=self._update_condition,
|
||||
)
|
||||
if self._dew_point_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_dew_point",
|
||||
self._dew_point_template,
|
||||
on_update=self._update_dew_point,
|
||||
)
|
||||
if self._forecast_daily_template:
|
||||
self.add_template_attribute(
|
||||
"_forecast_daily",
|
||||
self._forecast_daily_template,
|
||||
on_update=partial(self._update_forecast, "daily"),
|
||||
validator=partial(self._validate_forecast, "daily"),
|
||||
)
|
||||
if self._forecast_hourly_template:
|
||||
self.add_template_attribute(
|
||||
"_forecast_hourly",
|
||||
self._forecast_hourly_template,
|
||||
on_update=partial(self._update_forecast, "hourly"),
|
||||
validator=partial(self._validate_forecast, "hourly"),
|
||||
)
|
||||
if self._forecast_twice_daily_template:
|
||||
self.add_template_attribute(
|
||||
"_forecast_twice_daily",
|
||||
self._forecast_twice_daily_template,
|
||||
on_update=partial(self._update_forecast, "twice_daily"),
|
||||
validator=partial(self._validate_forecast, "twice_daily"),
|
||||
)
|
||||
if self._humidity_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_humidity",
|
||||
self._humidity_template,
|
||||
on_update=self._update_humidity,
|
||||
)
|
||||
if self._ozone_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_ozone",
|
||||
self._ozone_template,
|
||||
on_update=self._update_ozone,
|
||||
)
|
||||
if self._pressure_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_pressure",
|
||||
self._pressure_template,
|
||||
on_update=self._update_pressure,
|
||||
)
|
||||
if self._temperature_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_temperature",
|
||||
self._temperature_template,
|
||||
on_update=self._update_temperature,
|
||||
)
|
||||
if self._uv_index_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_uv_index",
|
||||
self._uv_index_template,
|
||||
on_update=self._update_uv_index,
|
||||
)
|
||||
if self._visibility_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_visibility",
|
||||
self._visibility_template,
|
||||
on_update=self._update_visibility,
|
||||
)
|
||||
if self._wind_bearing_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_wind_bearing",
|
||||
self._wind_bearing_template,
|
||||
on_update=self._update_wind_bearing,
|
||||
)
|
||||
if self._wind_gust_speed_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_wind_gust_speed",
|
||||
self._wind_gust_speed_template,
|
||||
on_update=self._update_wind_gust_speed,
|
||||
)
|
||||
if self._wind_speed_template:
|
||||
self.add_template_attribute(
|
||||
"_attr_native_wind_speed",
|
||||
self._wind_speed_template,
|
||||
on_update=self._update_wind_speed,
|
||||
)
|
||||
|
||||
super()._async_setup_templates()
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_daily or []
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_hourly or []
|
||||
|
||||
async def async_forecast_twice_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._forecast_twice_daily or []
|
||||
|
||||
@callback
|
||||
def _update_forecast(
|
||||
self,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
result: list[Forecast] | TemplateError,
|
||||
) -> None:
|
||||
"""Save template result and trigger forecast listener."""
|
||||
attr_result = None if isinstance(result, TemplateError) else result
|
||||
setattr(self, f"_forecast_{forecast_type}", attr_result)
|
||||
self.hass.async_create_task(
|
||||
self.async_update_listeners([forecast_type]), eager_start=True
|
||||
)
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class WeatherExtraStoredData(ExtraStoredData):
|
||||
@@ -711,11 +610,6 @@ class TriggerWeatherEntity(TriggerEntity, AbstractTemplateWeather, RestoreEntity
|
||||
"""Weather entity based on trigger data."""
|
||||
|
||||
domain = WEATHER_DOMAIN
|
||||
extra_template_keys = (
|
||||
CONF_CONDITION,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_HUMIDITY,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -725,27 +619,7 @@ class TriggerWeatherEntity(TriggerEntity, AbstractTemplateWeather, RestoreEntity
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
TriggerEntity.__init__(self, hass, coordinator, config)
|
||||
AbstractTemplateWeather.__init__(self, config, None)
|
||||
|
||||
for key in (
|
||||
CONF_APPARENT_TEMPERATURE,
|
||||
CONF_ATTRIBUTION,
|
||||
CONF_CLOUD_COVERAGE,
|
||||
CONF_DEW_POINT,
|
||||
CONF_FORECAST_DAILY,
|
||||
CONF_FORECAST_HOURLY,
|
||||
CONF_FORECAST_TWICE_DAILY,
|
||||
CONF_OZONE,
|
||||
CONF_PRESSURE,
|
||||
CONF_UV_INDEX,
|
||||
CONF_VISIBILITY,
|
||||
CONF_WIND_BEARING,
|
||||
CONF_WIND_GUST_SPEED,
|
||||
CONF_WIND_SPEED,
|
||||
):
|
||||
if isinstance(config.get(key), template.Template):
|
||||
self._to_render_simple.append(key)
|
||||
self._parse_result.add(key)
|
||||
AbstractTemplateWeather.__init__(self, config)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
@@ -772,72 +646,6 @@ class TriggerWeatherEntity(TriggerEntity, AbstractTemplateWeather, RestoreEntity
|
||||
self._attr_native_wind_gust_speed = weather_data.last_wind_gust_speed
|
||||
self._attr_native_wind_speed = weather_data.last_wind_speed
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle update of the data."""
|
||||
self._process_data()
|
||||
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
write_ha_state = False
|
||||
for key, updater in (
|
||||
(CONF_APPARENT_TEMPERATURE, self._update_apparent_temperature),
|
||||
(CONF_ATTRIBUTION, self._update_attribution),
|
||||
(CONF_CLOUD_COVERAGE, self._update_coverage),
|
||||
(CONF_CONDITION, self._update_condition),
|
||||
(CONF_DEW_POINT, self._update_dew_point),
|
||||
(CONF_HUMIDITY, self._update_humidity),
|
||||
(CONF_OZONE, self._update_ozone),
|
||||
(CONF_PRESSURE, self._update_pressure),
|
||||
(CONF_TEMPERATURE, self._update_temperature),
|
||||
(CONF_UV_INDEX, self._update_uv_index),
|
||||
(CONF_VISIBILITY, self._update_visibility),
|
||||
(CONF_WIND_BEARING, self._update_wind_bearing),
|
||||
(CONF_WIND_GUST_SPEED, self._update_wind_gust_speed),
|
||||
(CONF_WIND_SPEED, self._update_wind_speed),
|
||||
):
|
||||
if (rendered := self._rendered.get(key)) is not None:
|
||||
updater(rendered)
|
||||
write_ha_state = True
|
||||
|
||||
if write_ha_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _check_forecast(
|
||||
self,
|
||||
forecast_type: Literal["daily", "hourly", "twice_daily"],
|
||||
key: str,
|
||||
) -> list[Forecast]:
|
||||
result = self._rendered.get(key)
|
||||
try:
|
||||
return self._validate_forecast(forecast_type, result) or []
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Error validating template result '%s' "
|
||||
"for attribute '%s' in entity %s "
|
||||
"validation message '%s'"
|
||||
),
|
||||
result,
|
||||
key,
|
||||
self.entity_id,
|
||||
err.msg,
|
||||
)
|
||||
return []
|
||||
|
||||
async def async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._check_forecast("daily", CONF_FORECAST_DAILY)
|
||||
|
||||
async def async_forecast_hourly(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._check_forecast("hourly", CONF_FORECAST_HOURLY)
|
||||
|
||||
async def async_forecast_twice_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self._check_forecast("twice_daily", CONF_FORECAST_TWICE_DAILY)
|
||||
|
||||
@property
|
||||
def extra_restore_state_data(self) -> WeatherExtraStoredData:
|
||||
"""Return weather specific state data to be restored."""
|
||||
|
||||
@@ -10,11 +10,7 @@ from typing import Any, cast
|
||||
import jwt
|
||||
from tesla_fleet_api import TeslaFleetApi
|
||||
from tesla_fleet_api.const import SERVERS
|
||||
from tesla_fleet_api.exceptions import (
|
||||
InvalidResponse,
|
||||
PreconditionFailed,
|
||||
TeslaFleetError,
|
||||
)
|
||||
from tesla_fleet_api.exceptions import PreconditionFailed, TeslaFleetError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
@@ -41,12 +37,9 @@ class OAuth2FlowHandler(
|
||||
"""Initialize config flow."""
|
||||
super().__init__()
|
||||
self.domain: str | None = None
|
||||
self.registration_status: dict[str, bool] = {}
|
||||
self.tesla_apis: dict[str, TeslaFleetApi] = {}
|
||||
self.failed_regions: list[str] = []
|
||||
self.data: dict[str, Any] = {}
|
||||
self.uid: str | None = None
|
||||
self.api: TeslaFleetApi | None = None
|
||||
self.apis: list[TeslaFleetApi] = []
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
@@ -64,7 +57,6 @@ class OAuth2FlowHandler(
|
||||
|
||||
self.data = data
|
||||
self.uid = token["sub"]
|
||||
server = SERVERS[token["ou_code"].lower()]
|
||||
|
||||
await self.async_set_unique_id(self.uid)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
@@ -74,24 +66,28 @@ class OAuth2FlowHandler(
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# OAuth done, setup a Partner API connection
|
||||
# OAuth done, setup Partner API connections for all regions
|
||||
implementation = cast(TeslaUserImplementation, self.flow_impl)
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
self.api = TeslaFleetApi(
|
||||
access_token="",
|
||||
session=session,
|
||||
server=server,
|
||||
partner_scope=True,
|
||||
charging_scope=False,
|
||||
energy_scope=False,
|
||||
user_scope=False,
|
||||
vehicle_scope=False,
|
||||
)
|
||||
await self.api.get_private_key(self.hass.config.path("tesla_fleet.key"))
|
||||
await self.api.partner_login(
|
||||
implementation.client_id, implementation.client_secret
|
||||
)
|
||||
|
||||
for region, server_url in SERVERS.items():
|
||||
if region == "cn":
|
||||
continue
|
||||
api = TeslaFleetApi(
|
||||
session=session,
|
||||
access_token="",
|
||||
server=server_url,
|
||||
partner_scope=True,
|
||||
charging_scope=False,
|
||||
energy_scope=False,
|
||||
user_scope=False,
|
||||
vehicle_scope=False,
|
||||
)
|
||||
await api.get_private_key(self.hass.config.path("tesla_fleet.key"))
|
||||
await api.partner_login(
|
||||
implementation.client_id, implementation.client_secret
|
||||
)
|
||||
self.apis.append(api)
|
||||
|
||||
return await self.async_step_domain_input()
|
||||
|
||||
@@ -130,44 +126,67 @@ class OAuth2FlowHandler(
|
||||
async def async_step_domain_registration(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle domain registration for both regions."""
|
||||
"""Handle domain registration for all regions."""
|
||||
|
||||
assert self.api
|
||||
assert self.api.private_key
|
||||
assert self.apis
|
||||
assert self.apis[0].private_key
|
||||
assert self.domain
|
||||
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders = {
|
||||
"public_key_url": f"https://{self.domain}/.well-known/appspecific/com.tesla.3p.public-key.pem",
|
||||
"pem": self.api.public_pem,
|
||||
"pem": self.apis[0].public_pem,
|
||||
}
|
||||
|
||||
try:
|
||||
register_response = await self.api.partner.register(self.domain)
|
||||
except PreconditionFailed:
|
||||
return await self.async_step_domain_input(
|
||||
errors={CONF_DOMAIN: "precondition_failed"}
|
||||
)
|
||||
except InvalidResponse:
|
||||
successful_response: dict[str, Any] | None = None
|
||||
failed_regions: list[str] = []
|
||||
|
||||
for api in self.apis:
|
||||
try:
|
||||
register_response = await api.partner.register(self.domain)
|
||||
except PreconditionFailed:
|
||||
return await self.async_step_domain_input(
|
||||
errors={CONF_DOMAIN: "precondition_failed"}
|
||||
)
|
||||
except TeslaFleetError as e:
|
||||
LOGGER.warning(
|
||||
"Partner registration failed for %s: %s",
|
||||
api.server,
|
||||
e.message,
|
||||
)
|
||||
failed_regions.append(api.server or "unknown")
|
||||
else:
|
||||
if successful_response is None:
|
||||
successful_response = register_response
|
||||
|
||||
if successful_response is None:
|
||||
errors["base"] = "invalid_response"
|
||||
except TeslaFleetError as e:
|
||||
errors["base"] = "unknown_error"
|
||||
description_placeholders["error"] = e.message
|
||||
else:
|
||||
# Get public key from response
|
||||
registered_public_key = register_response.get("response", {}).get(
|
||||
"public_key"
|
||||
return self.async_show_form(
|
||||
step_id="domain_registration",
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
if not registered_public_key:
|
||||
errors["base"] = "public_key_not_found"
|
||||
elif (
|
||||
registered_public_key.lower()
|
||||
!= self.api.public_uncompressed_point.lower()
|
||||
):
|
||||
errors["base"] = "public_key_mismatch"
|
||||
else:
|
||||
return await self.async_step_registration_complete()
|
||||
if failed_regions:
|
||||
LOGGER.warning(
|
||||
"Partner registration succeeded on some regions but failed on: %s",
|
||||
", ".join(failed_regions),
|
||||
)
|
||||
|
||||
# Verify public key from the successful response
|
||||
registered_public_key = successful_response.get("response", {}).get(
|
||||
"public_key"
|
||||
)
|
||||
|
||||
if not registered_public_key:
|
||||
errors["base"] = "public_key_not_found"
|
||||
elif (
|
||||
registered_public_key.lower()
|
||||
!= self.apis[0].public_uncompressed_point.lower()
|
||||
):
|
||||
errors["base"] = "public_key_mismatch"
|
||||
else:
|
||||
return await self.async_step_registration_complete()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="domain_registration",
|
||||
|
||||
@@ -87,15 +87,24 @@ async def _get_access_token(oauth_session: OAuth2Session) -> str:
|
||||
await oauth_session.async_ensure_token_valid()
|
||||
except ClientResponseError as err:
|
||||
if err.status == 401:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed",
|
||||
) from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_connection_error",
|
||||
) from err
|
||||
except (KeyError, TypeError) as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="token_data_malformed",
|
||||
) from err
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_connection_error",
|
||||
) from err
|
||||
return oauth_session.token[CONF_ACCESS_TOKEN]
|
||||
|
||||
|
||||
@@ -131,11 +140,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
teslemetry.products(),
|
||||
)
|
||||
except InvalidToken as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_invalid_token",
|
||||
) from e
|
||||
except SubscriptionRequired as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_subscription_required",
|
||||
) from e
|
||||
except TeslaFleetError as e:
|
||||
raise ConfigEntryNotReady from e
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_api_error",
|
||||
) from e
|
||||
|
||||
scopes = calls[0]["scopes"]
|
||||
region = calls[0]["region"]
|
||||
@@ -242,10 +260,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
|
||||
# Check live status endpoint works before creating its coordinator
|
||||
try:
|
||||
live_status = (await energy_site.live_status())["response"]
|
||||
except (InvalidToken, Forbidden, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except InvalidToken as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_invalid_token",
|
||||
) from e
|
||||
except SubscriptionRequired as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_subscription_required",
|
||||
) from e
|
||||
except Forbidden as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_invalid_token",
|
||||
) from e
|
||||
except TeslaFleetError as e:
|
||||
raise ConfigEntryNotReady(e.message) from e
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_ready_api_error",
|
||||
) from e
|
||||
|
||||
energysites.append(
|
||||
TeslemetryEnergyData(
|
||||
@@ -345,7 +379,10 @@ async def async_migrate_entry(
|
||||
CLIENT_ID, hass.config.location_name
|
||||
)
|
||||
except (ClientError, TypeError) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_migration",
|
||||
) from e
|
||||
|
||||
# Add auth_implementation for OAuth2 flow compatibility
|
||||
data["auth_implementation"] = DOMAIN
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
if TYPE_CHECKING:
|
||||
from . import TeslemetryConfigEntry
|
||||
|
||||
from .const import ENERGY_HISTORY_FIELDS, LOGGER
|
||||
from .const import DOMAIN, ENERGY_HISTORY_FIELDS, LOGGER
|
||||
from .helpers import flatten
|
||||
|
||||
RETRY_EXCEPTIONS = (
|
||||
@@ -94,9 +94,16 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(e.message) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
return flatten(data)
|
||||
|
||||
|
||||
@@ -136,9 +143,16 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]])
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(e.message) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
# Convert Wall Connectors from array to dict
|
||||
data["wall_connectors"] = {
|
||||
wc["din"]: wc for wc in (data.get("wall_connectors") or [])
|
||||
@@ -176,9 +190,16 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]])
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(e.message) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
return flatten(data)
|
||||
|
||||
|
||||
@@ -211,12 +232,22 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
except (InvalidToken, SubscriptionRequired) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
except RETRY_EXCEPTIONS as e:
|
||||
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
retry_after=_get_retry_after(e),
|
||||
) from e
|
||||
except TeslaFleetError as e:
|
||||
raise UpdateFailed(e.message) from e
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
) from e
|
||||
|
||||
if not data or not isinstance(data.get("time_series"), list):
|
||||
raise UpdateFailed("Received invalid data")
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed_invalid_data",
|
||||
)
|
||||
|
||||
# Add all time periods together
|
||||
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, None)
|
||||
|
||||
@@ -173,7 +173,6 @@
|
||||
"default": "mdi:ev-plug-ccs2"
|
||||
}
|
||||
},
|
||||
|
||||
"device_tracker": {
|
||||
"location": {
|
||||
"default": "mdi:map-marker"
|
||||
@@ -196,6 +195,38 @@
|
||||
"default": "mdi:car-speed-limiter"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"backup_reserve_percent": {
|
||||
"default": "mdi:battery-outline",
|
||||
"range": {
|
||||
"10": "mdi:battery-10",
|
||||
"20": "mdi:battery-20",
|
||||
"30": "mdi:battery-30",
|
||||
"40": "mdi:battery-40",
|
||||
"50": "mdi:battery-50",
|
||||
"60": "mdi:battery-60",
|
||||
"70": "mdi:battery-70",
|
||||
"80": "mdi:battery-80",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
},
|
||||
"off_grid_vehicle_charging_reserve_percent": {
|
||||
"default": "mdi:battery-outline",
|
||||
"range": {
|
||||
"10": "mdi:battery-10",
|
||||
"20": "mdi:battery-20",
|
||||
"30": "mdi:battery-30",
|
||||
"40": "mdi:battery-40",
|
||||
"50": "mdi:battery-50",
|
||||
"60": "mdi:battery-60",
|
||||
"70": "mdi:battery-70",
|
||||
"80": "mdi:battery-80",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"climate_state_seat_heater_left": {
|
||||
"default": "mdi:car-seat-heater",
|
||||
|
||||
@@ -27,7 +27,6 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.icon import icon_for_battery_level
|
||||
|
||||
from . import TeslemetryConfigEntry
|
||||
from .entity import (
|
||||
@@ -296,7 +295,6 @@ class TeslemetryEnergyInfoNumberSensorEntity(TeslemetryEnergyInfoEntity, NumberE
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the attributes of the entity."""
|
||||
self._attr_native_value = self._value
|
||||
self._attr_icon = icon_for_battery_level(self.native_value)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set new value."""
|
||||
|
||||
@@ -66,18 +66,8 @@ rules:
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations:
|
||||
status: todo
|
||||
comment: |
|
||||
ConfigEntryAuthFailed and UpdateFailed exceptions can have translated messages.
|
||||
Also one "unknown error" that cannot be translated.
|
||||
icon-translations:
|
||||
status: todo
|
||||
comment: |
|
||||
number.py:299 uses _attr_icon = icon_for_battery_level() instead of
|
||||
range-based icons in icons.json. Affects backup_reserve_percent and
|
||||
off_grid_vehicle_charging_reserve_percent entities. Remove the dynamic
|
||||
icon assignment and add range-based icon entries to icons.json.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
|
||||
@@ -1090,6 +1090,18 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_failed": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
},
|
||||
"auth_failed_invalid_token": {
|
||||
"message": "Access token is invalid, please reauthenticate"
|
||||
},
|
||||
"auth_failed_migration": {
|
||||
"message": "Failed to migrate to OAuth, please reauthenticate"
|
||||
},
|
||||
"auth_failed_subscription_required": {
|
||||
"message": "Teslemetry subscription required"
|
||||
},
|
||||
"command_error": {
|
||||
"message": "Command returned error: {error}"
|
||||
},
|
||||
@@ -1126,6 +1138,12 @@
|
||||
"no_vehicle_data_for_device": {
|
||||
"message": "No vehicle data for device ID: {device_id}"
|
||||
},
|
||||
"not_ready_api_error": {
|
||||
"message": "Error communicating with Teslemetry API"
|
||||
},
|
||||
"not_ready_connection_error": {
|
||||
"message": "Unable to connect to Teslemetry"
|
||||
},
|
||||
"oauth_implementation_not_available": {
|
||||
"message": "OAuth implementation not available, try reauthenticating"
|
||||
},
|
||||
@@ -1141,6 +1159,12 @@
|
||||
"token_data_malformed": {
|
||||
"message": "Token data malformed, try reauthenticating"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Error fetching data from Teslemetry API"
|
||||
},
|
||||
"update_failed_invalid_data": {
|
||||
"message": "Received invalid data from API"
|
||||
},
|
||||
"wake_up_failed": {
|
||||
"message": "Failed to wake up vehicle: {message}"
|
||||
},
|
||||
|
||||
@@ -93,4 +93,7 @@ COLLABORATORS: Final = "collaborators"
|
||||
|
||||
DOMAIN: Final = "todoist"
|
||||
|
||||
# Maximum number of items per page for Todoist API requests
|
||||
MAX_PAGE_SIZE: Final = 200
|
||||
|
||||
SERVICE_NEW_TASK: Final = "new_task"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""DataUpdateCoordinator for the Todoist component."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -12,6 +13,8 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import MAX_PAGE_SIZE
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@@ -53,26 +56,30 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]):
|
||||
async def _async_update_data(self) -> list[Task]:
|
||||
"""Fetch tasks from the Todoist API."""
|
||||
try:
|
||||
tasks_async = await self.api.get_tasks()
|
||||
tasks_async = await self.api.get_tasks(limit=MAX_PAGE_SIZE)
|
||||
return await flatten_async_pages(tasks_async)
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
return await flatten_async_pages(tasks_async)
|
||||
|
||||
async def async_get_projects(self) -> list[Project]:
|
||||
"""Return todoist projects fetched at most once."""
|
||||
if self._projects is None:
|
||||
projects_async = await self.api.get_projects()
|
||||
projects_async = await self.api.get_projects(limit=MAX_PAGE_SIZE)
|
||||
self._projects = await flatten_async_pages(projects_async)
|
||||
return self._projects
|
||||
|
||||
async def async_get_sections(self, project_id: str) -> list[Section]:
|
||||
"""Return todoist sections for a given project ID."""
|
||||
sections_async = await self.api.get_sections(project_id=project_id)
|
||||
sections_async = await self.api.get_sections(
|
||||
project_id=project_id, limit=MAX_PAGE_SIZE
|
||||
)
|
||||
return await flatten_async_pages(sections_async)
|
||||
|
||||
async def async_get_labels(self) -> list[Label]:
|
||||
"""Return todoist labels fetched at most once."""
|
||||
if self._labels is None:
|
||||
labels_async = await self.api.get_labels()
|
||||
labels_async = await self.api.get_labels(limit=MAX_PAGE_SIZE)
|
||||
self._labels = await flatten_async_pages(labels_async)
|
||||
return self._labels
|
||||
|
||||
@@ -32,6 +32,8 @@ from . import OmadaConfigEntry
|
||||
from .controller import OmadaGatewayCoordinator
|
||||
from .entity import OmadaDeviceEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Connected Wi-Fi device scanners for TP-Link Omada access points."""
|
||||
|
||||
import logging
|
||||
|
||||
from tplink_omada_client.clients import OmadaWirelessClient
|
||||
|
||||
from homeassistant.components.device_tracker import ScannerEntity
|
||||
@@ -13,7 +11,7 @@ from . import OmadaConfigEntry
|
||||
from .config_flow import CONF_SITE
|
||||
from .controller import OmadaClientsCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -37,7 +37,7 @@ rules:
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: todo
|
||||
|
||||
|
||||
@@ -24,6 +24,8 @@ from .const import OmadaDeviceStatus
|
||||
from .coordinator import OmadaDevicesCoordinator
|
||||
from .entity import OmadaDeviceEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
# Useful low level status categories, mapped to a more descriptive status.
|
||||
DEVICE_STATUS_MAP = {
|
||||
DeviceStatus.PROVISIONING: OmadaDeviceStatus.PENDING,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user