mirror of
https://github.com/home-assistant/core.git
synced 2026-02-07 15:46:19 +01:00
Compare commits
42 Commits
epenet/202
...
strings/ma
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b955cf6f3d | ||
|
|
b1be3fe0da | ||
|
|
97a7ab011b | ||
|
|
694a3050b9 | ||
|
|
8164e65188 | ||
|
|
9af0d1eed4 | ||
|
|
72e6ca55ba | ||
|
|
0fb62a7e97 | ||
|
|
930eb70a8b | ||
|
|
462104fa68 | ||
|
|
d0c77d8a7e | ||
|
|
606780b20f | ||
|
|
8f465cf2ca | ||
|
|
4e29476dd9 | ||
|
|
b4328083be | ||
|
|
72ba59f559 | ||
|
|
826168b601 | ||
|
|
66f181992c | ||
|
|
336ef4c37b | ||
|
|
72e7bf7f9c | ||
|
|
acbdbc9be7 | ||
|
|
3551382f8d | ||
|
|
95014d7e6d | ||
|
|
dfe1990484 | ||
|
|
15ff5d0f74 | ||
|
|
1407f61a9c | ||
|
|
6107b794d6 | ||
|
|
7ab8ceab7e | ||
|
|
a4db6a9ebc | ||
|
|
12a2650b6b | ||
|
|
23da7ecedd | ||
|
|
8d9e7b0b26 | ||
|
|
9664047345 | ||
|
|
804fbf9cef | ||
|
|
e10fe074c9 | ||
|
|
7b0e21da74 | ||
|
|
29e142cf1e | ||
|
|
6b765ebabb | ||
|
|
899aa62697 | ||
|
|
a7cc4e1282 | ||
|
|
c6aed73d2b | ||
|
|
c019331de1 |
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -921,6 +921,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/libre_hardware_monitor/ @Sab44
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/liebherr/ @mettolen
|
||||
/tests/components/liebherr/ @mettolen
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
/tests/components/lifx/ @Djelibeybi
|
||||
/homeassistant/components/light/ @home-assistant/core
|
||||
@@ -1878,6 +1880,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wsdot/ @ucodery
|
||||
/tests/components/wsdot/ @ucodery
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
|
||||
@@ -158,15 +158,15 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
"description": "Arms an alarm in the away mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -176,7 +176,7 @@
|
||||
"name": "Arm away"
|
||||
},
|
||||
"alarm_arm_custom_bypass": {
|
||||
"description": "Arms the alarm while allowing to bypass a custom area.",
|
||||
"description": "Arms an alarm while allowing to bypass a custom area.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to arm the alarm.",
|
||||
@@ -186,7 +186,7 @@
|
||||
"name": "Arm with custom bypass"
|
||||
},
|
||||
"alarm_arm_home": {
|
||||
"description": "Arms the alarm in the home mode.",
|
||||
"description": "Arms an alarm in the home mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -196,7 +196,7 @@
|
||||
"name": "Arm home"
|
||||
},
|
||||
"alarm_arm_night": {
|
||||
"description": "Arms the alarm in the night mode.",
|
||||
"description": "Arms an alarm in the night mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -206,7 +206,7 @@
|
||||
"name": "Arm night"
|
||||
},
|
||||
"alarm_arm_vacation": {
|
||||
"description": "Arms the alarm in the vacation mode.",
|
||||
"description": "Arms an alarm in the vacation mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -216,7 +216,7 @@
|
||||
"name": "Arm vacation"
|
||||
},
|
||||
"alarm_disarm": {
|
||||
"description": "Disarms the alarm.",
|
||||
"description": "Disarms an alarm.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to disarm the alarm.",
|
||||
@@ -226,7 +226,7 @@
|
||||
"name": "Disarm"
|
||||
},
|
||||
"alarm_trigger": {
|
||||
"description": "Triggers the alarm manually.",
|
||||
"description": "Triggers an alarm manually.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
|
||||
@@ -73,9 +73,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==5.3.1.108.0"],
|
||||
"requirements": ["mozart-api==5.3.1.108.2"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api import __version__ as MOZART_API_VERSION
|
||||
@@ -735,7 +736,7 @@ class BeoMediaPlayer(BeoEntity, MediaPlayerEntity):
|
||||
await self._client.set_active_source(source_id=key)
|
||||
else:
|
||||
# Video
|
||||
await self._client.post_remote_trigger(id=key)
|
||||
await self._client.post_remote_trigger(id=UUID(key))
|
||||
|
||||
async def async_select_sound_mode(self, sound_mode: str) -> None:
|
||||
"""Select a sound mode."""
|
||||
@@ -894,7 +895,7 @@ class BeoMediaPlayer(BeoEntity, MediaPlayerEntity):
|
||||
translation_key="play_media_error",
|
||||
translation_placeholders={
|
||||
"media_type": media_type,
|
||||
"error_message": json.loads(error.body)["message"],
|
||||
"error_message": json.loads(cast(str, error.body))["message"],
|
||||
},
|
||||
) from error
|
||||
|
||||
|
||||
@@ -324,9 +324,9 @@
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -260,9 +260,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
},
|
||||
"trigger_threshold_type": {
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
import logging
|
||||
|
||||
from datadog import DogStatsd, initialize
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PORT,
|
||||
@@ -16,53 +15,15 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, state as state_helper
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import config_flow as config_flow
|
||||
from .const import (
|
||||
CONF_RATE,
|
||||
DEFAULT_HOST,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_PREFIX,
|
||||
DEFAULT_RATE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import CONF_RATE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type DatadogConfigEntry = ConfigEntry[DogStatsd]
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): cv.string,
|
||||
vol.Optional(CONF_RATE, default=DEFAULT_RATE): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Datadog integration from YAML, initiating config flow import."""
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config[DOMAIN],
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DatadogConfigEntry) -> bool:
|
||||
|
||||
@@ -12,8 +12,7 @@ from homeassistant.config_entries import (
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_PREFIX
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import (
|
||||
CONF_RATE,
|
||||
@@ -71,22 +70,6 @@ class DatadogConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle import from configuration.yaml."""
|
||||
# Check for duplicates
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]}
|
||||
)
|
||||
|
||||
result = await self.async_step_user(user_input)
|
||||
|
||||
if errors := result.get("errors"):
|
||||
await deprecate_yaml_issue(self.hass, False)
|
||||
return self.async_abort(reason=errors["base"])
|
||||
|
||||
await deprecate_yaml_issue(self.hass, True)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
@@ -163,41 +146,3 @@ async def validate_datadog_connection(
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
async def deprecate_yaml_issue(
|
||||
hass: HomeAssistant,
|
||||
import_success: bool,
|
||||
) -> None:
|
||||
"""Create an issue to deprecate YAML config."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.2.0",
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Datadog",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_connection_error",
|
||||
breaks_in_ha_version="2026.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_connection_error",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Datadog",
|
||||
"url": f"/config/integrations/dashboard/add?domain={DOMAIN}",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -25,12 +25,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_connection_error": {
|
||||
"description": "There was an error connecting to the Datadog Agent when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the {domain} configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
|
||||
"title": "{domain} YAML configuration import failed"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
|
||||
@@ -7,10 +7,7 @@ import logging
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SOURCE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.device import async_entity_id_to_device_id
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
@@ -22,11 +19,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Derivative from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass, entry.entry_id, entry.options[CONF_SOURCE]
|
||||
)
|
||||
|
||||
def set_source_entity_id_or_uuid(source_entity_id: str) -> None:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""The Dexcom integration."""
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -14,10 +15,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: DexcomConfigEntry) -> bo
|
||||
"""Set up Dexcom from a config entry."""
|
||||
try:
|
||||
dexcom = await hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if entry.data[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except AccountError:
|
||||
return False
|
||||
|
||||
@@ -5,7 +5,8 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -37,10 +38,13 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
Dexcom,
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
user_input[CONF_SERVER] == SERVER_OUS,
|
||||
lambda: Dexcom(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if user_input[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
)
|
||||
except SessionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -18,7 +18,7 @@ _SCAN_INTERVAL = timedelta(seconds=180)
|
||||
type DexcomConfigEntry = ConfigEntry[DexcomCoordinator]
|
||||
|
||||
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading | None]):
|
||||
"""Dexcom Coordinator."""
|
||||
|
||||
def __init__(
|
||||
@@ -37,7 +37,7 @@ class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
)
|
||||
self.dexcom = dexcom
|
||||
|
||||
async def _async_update_data(self) -> GlucoseReading:
|
||||
async def _async_update_data(self) -> GlucoseReading | None:
|
||||
"""Fetch data from API endpoint."""
|
||||
return await self.hass.async_add_executor_job(
|
||||
self.dexcom.get_current_glucose_reading
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydexcom"],
|
||||
"requirements": ["pydexcom==0.2.3"]
|
||||
"requirements": ["pydexcom==0.5.1"]
|
||||
}
|
||||
|
||||
@@ -103,9 +103,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.1", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0"]
|
||||
"requirements": ["fritzconnection[qr]==1.15.1"]
|
||||
}
|
||||
|
||||
@@ -35,11 +35,11 @@
|
||||
},
|
||||
"services": {
|
||||
"decrement": {
|
||||
"description": "Decrements the current value by 1 step.",
|
||||
"description": "Decrements the value of an input number by 1 step.",
|
||||
"name": "Decrement"
|
||||
},
|
||||
"increment": {
|
||||
"description": "Increments the current value by 1 step.",
|
||||
"description": "Increments the value of an input number by 1 step.",
|
||||
"name": "Increment"
|
||||
},
|
||||
"reload": {
|
||||
@@ -47,7 +47,7 @@
|
||||
"name": "[%key:common::action::reload%]"
|
||||
},
|
||||
"set_value": {
|
||||
"description": "Sets the value.",
|
||||
"description": "Sets the value of an input number.",
|
||||
"fields": {
|
||||
"value": {
|
||||
"description": "The target value.",
|
||||
|
||||
@@ -7,10 +7,7 @@ import logging
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.device import async_entity_id_to_device_id
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
@@ -24,13 +21,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Integration from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass,
|
||||
entry.entry_id,
|
||||
entry.options[CONF_SOURCE_SENSOR],
|
||||
)
|
||||
|
||||
def set_source_entity_id_or_uuid(source_entity_id: str) -> None:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
|
||||
@@ -76,7 +76,7 @@ async def async_migrate_entities(
|
||||
def _update_entry(entry: RegistryEntry) -> dict[str, str] | None:
|
||||
"""Fix unique_id of power binary_sensor entry."""
|
||||
if entry.domain == Platform.BINARY_SENSOR and ":" not in entry.unique_id:
|
||||
if "_power" in entry.unique_id:
|
||||
if entry.unique_id.endswith("_power"):
|
||||
return {"new_unique_id": f"{coordinator.unique_id}_power"}
|
||||
return None
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import POWER
|
||||
from .coordinator import JVCConfigEntry, JvcProjectorDataUpdateCoordinator
|
||||
from .entity import JvcProjectorEntity
|
||||
|
||||
@@ -41,4 +40,4 @@ class JvcBinarySensor(JvcProjectorEntity, BinarySensorEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the JVC Projector is on."""
|
||||
return self.coordinator.data[POWER] in ON_STATUS
|
||||
return self.coordinator.data[cmd.Power.name] in ON_STATUS
|
||||
|
||||
@@ -3,7 +3,3 @@
|
||||
NAME = "JVC Projector"
|
||||
DOMAIN = "jvc_projector"
|
||||
MANUFACTURER = "JVC"
|
||||
|
||||
POWER = "power"
|
||||
INPUT = "input"
|
||||
SOURCE = "source"
|
||||
|
||||
@@ -2,29 +2,40 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from jvcprojector import (
|
||||
JvcProjector,
|
||||
JvcProjectorAuthError,
|
||||
JvcProjectorTimeoutError,
|
||||
command as cmd,
|
||||
)
|
||||
from jvcprojector import JvcProjector, JvcProjectorTimeoutError, command as cmd
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import INPUT, NAME, POWER
|
||||
from .const import NAME
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from jvcprojector import Command
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
INTERVAL_SLOW = timedelta(seconds=10)
|
||||
INTERVAL_FAST = timedelta(seconds=5)
|
||||
|
||||
CORE_COMMANDS: tuple[type[Command], ...] = (
|
||||
cmd.Power,
|
||||
cmd.Signal,
|
||||
cmd.Input,
|
||||
cmd.LightTime,
|
||||
)
|
||||
|
||||
TRANSLATIONS = str.maketrans({"+": "p", "%": "p", ":": "x"})
|
||||
|
||||
TIMEOUT_RETRIES = 12
|
||||
TIMEOUT_SLEEP = 1
|
||||
|
||||
type JVCConfigEntry = ConfigEntry[JvcProjectorDataUpdateCoordinator]
|
||||
|
||||
|
||||
@@ -51,27 +62,108 @@ class JvcProjectorDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str]]):
|
||||
assert config_entry.unique_id is not None
|
||||
self.unique_id = config_entry.unique_id
|
||||
|
||||
self.capabilities = self.device.capabilities()
|
||||
|
||||
self.state: dict[type[Command], str] = {}
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Get the latest state data."""
|
||||
state: dict[str, str | None] = {
|
||||
POWER: None,
|
||||
INPUT: None,
|
||||
}
|
||||
"""Update state with the current value of a command."""
|
||||
commands: set[type[Command]] = set(self.async_contexts())
|
||||
commands = commands.difference(CORE_COMMANDS)
|
||||
|
||||
try:
|
||||
state[POWER] = await self.device.get(cmd.Power)
|
||||
last_timeout: JvcProjectorTimeoutError | None = None
|
||||
|
||||
if state[POWER] == cmd.Power.ON:
|
||||
state[INPUT] = await self.device.get(cmd.Input)
|
||||
for _ in range(TIMEOUT_RETRIES):
|
||||
try:
|
||||
new_state = await self._get_device_state(commands)
|
||||
break
|
||||
except JvcProjectorTimeoutError as err:
|
||||
# Timeouts are expected when the projector loses signal and ignores commands for a brief time.
|
||||
last_timeout = err
|
||||
await asyncio.sleep(TIMEOUT_SLEEP)
|
||||
else:
|
||||
raise UpdateFailed(str(last_timeout)) from last_timeout
|
||||
|
||||
except JvcProjectorTimeoutError as err:
|
||||
raise UpdateFailed(f"Unable to connect to {self.device.host}") from err
|
||||
except JvcProjectorAuthError as err:
|
||||
raise ConfigEntryAuthFailed("Password authentication failed") from err
|
||||
# Clear state on signal loss
|
||||
if (
|
||||
new_state.get(cmd.Signal) == cmd.Signal.NONE
|
||||
and self.state.get(cmd.Signal) != cmd.Signal.NONE
|
||||
):
|
||||
self.state = {k: v for k, v in self.state.items() if k in CORE_COMMANDS}
|
||||
|
||||
if state[POWER] != cmd.Power.STANDBY:
|
||||
# Update state with new values
|
||||
for k, v in new_state.items():
|
||||
self.state[k] = v
|
||||
|
||||
if self.state[cmd.Power] != cmd.Power.STANDBY:
|
||||
self.update_interval = INTERVAL_FAST
|
||||
else:
|
||||
self.update_interval = INTERVAL_SLOW
|
||||
|
||||
return state
|
||||
return {k.name: v for k, v in self.state.items()}
|
||||
|
||||
async def _get_device_state(
|
||||
self, commands: set[type[Command]]
|
||||
) -> dict[type[Command], str]:
|
||||
"""Get the current state of the device."""
|
||||
new_state: dict[type[Command], str] = {}
|
||||
deferred_commands: list[type[Command]] = []
|
||||
|
||||
power = await self._update_command_state(cmd.Power, new_state)
|
||||
|
||||
if power == cmd.Power.ON:
|
||||
signal = await self._update_command_state(cmd.Signal, new_state)
|
||||
await self._update_command_state(cmd.Input, new_state)
|
||||
await self._update_command_state(cmd.LightTime, new_state)
|
||||
|
||||
if signal == cmd.Signal.SIGNAL:
|
||||
for command in commands:
|
||||
if command.depends:
|
||||
# Command has dependencies so defer until below
|
||||
deferred_commands.append(command)
|
||||
else:
|
||||
await self._update_command_state(command, new_state)
|
||||
|
||||
# Deferred commands should have had dependencies met above
|
||||
for command in deferred_commands:
|
||||
depend_command, depend_values = next(iter(command.depends.items()))
|
||||
value: str | None = None
|
||||
if depend_command in new_state:
|
||||
value = new_state[depend_command]
|
||||
elif depend_command in self.state:
|
||||
value = self.state[depend_command]
|
||||
if value and value in depend_values:
|
||||
await self._update_command_state(command, new_state)
|
||||
|
||||
elif self.state.get(cmd.Signal) != cmd.Signal.NONE:
|
||||
new_state[cmd.Signal] = cmd.Signal.NONE
|
||||
|
||||
return new_state
|
||||
|
||||
async def _update_command_state(
|
||||
self, command: type[Command], new_state: dict[type[Command], str]
|
||||
) -> str | None:
|
||||
"""Update state with the current value of a command."""
|
||||
value = await self.device.get(command)
|
||||
|
||||
if value != self.state.get(command):
|
||||
new_state[command] = value
|
||||
|
||||
return value
|
||||
|
||||
def get_options_map(self, command: str) -> dict[str, str]:
|
||||
"""Get the available options for a command."""
|
||||
capabilities = self.capabilities.get(command, {})
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(capabilities, dict)
|
||||
assert isinstance(capabilities.get("parameter", {}), dict)
|
||||
assert isinstance(capabilities.get("parameter", {}).get("read", {}), dict)
|
||||
|
||||
values = list(capabilities.get("parameter", {}).get("read", {}).values())
|
||||
|
||||
return {v: v.translate(TRANSLATIONS) for v in values}
|
||||
|
||||
def supports(self, command: type[Command]) -> bool:
|
||||
"""Check if the device supports a command."""
|
||||
return self.device.supports(command)
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from jvcprojector import JvcProjector
|
||||
from jvcprojector import Command, JvcProjector
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -20,9 +20,13 @@ class JvcProjectorEntity(CoordinatorEntity[JvcProjectorDataUpdateCoordinator]):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: JvcProjectorDataUpdateCoordinator) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: JvcProjectorDataUpdateCoordinator,
|
||||
command: type[Command] | None = None,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(coordinator, command)
|
||||
|
||||
self._attr_unique_id = coordinator.unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"jvc_power": {
|
||||
"power": {
|
||||
"default": "mdi:projector-off",
|
||||
"state": {
|
||||
"on": "mdi:projector"
|
||||
@@ -9,17 +9,47 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"anamorphic": {
|
||||
"default": "mdi:fit-to-screen-outline"
|
||||
},
|
||||
"clear_motion_drive": {
|
||||
"default": "mdi:blur"
|
||||
},
|
||||
"dynamic_control": {
|
||||
"default": "mdi:lightbulb-on-outline"
|
||||
},
|
||||
"input": {
|
||||
"default": "mdi:hdmi-port"
|
||||
},
|
||||
"installation_mode": {
|
||||
"default": "mdi:aspect-ratio"
|
||||
},
|
||||
"light_power": {
|
||||
"default": "mdi:lightbulb-on-outline"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"jvc_power_status": {
|
||||
"default": "mdi:power-plug-off",
|
||||
"color_depth": {
|
||||
"default": "mdi:palette-outline"
|
||||
},
|
||||
"color_space": {
|
||||
"default": "mdi:palette-outline"
|
||||
},
|
||||
"hdr": {
|
||||
"default": "mdi:image-filter-hdr-outline"
|
||||
},
|
||||
"hdr_processing": {
|
||||
"default": "mdi:image-filter-hdr-outline"
|
||||
},
|
||||
"picture_mode": {
|
||||
"default": "mdi:movie-roll"
|
||||
},
|
||||
"power": {
|
||||
"default": "mdi:power",
|
||||
"state": {
|
||||
"cooling": "mdi:snowflake",
|
||||
"error": "mdi:alert-circle",
|
||||
"on": "mdi:power-plug",
|
||||
"on": "mdi:power",
|
||||
"warming": "mdi:heat-wave"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import POWER
|
||||
from .coordinator import JVCConfigEntry
|
||||
from .entity import JvcProjectorEntity
|
||||
|
||||
@@ -65,6 +64,8 @@ RENAMED_COMMANDS: dict[str, str] = {
|
||||
"hdmi2": cmd.Remote.HDMI2,
|
||||
}
|
||||
|
||||
ON_STATUS = (cmd.Power.ON, cmd.Power.WARMING)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -86,7 +87,7 @@ class JvcProjectorRemote(JvcProjectorEntity, RemoteEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the entity is on."""
|
||||
return self.coordinator.data[POWER] in (cmd.Power.ON, cmd.Power.WARMING)
|
||||
return self.coordinator.data.get(cmd.Power.name) in ON_STATUS
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the device on."""
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from jvcprojector import JvcProjector, command as cmd
|
||||
from jvcprojector import Command, command as cmd
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -20,17 +19,37 @@ from .entity import JvcProjectorEntity
|
||||
class JvcProjectorSelectDescription(SelectEntityDescription):
|
||||
"""Describes JVC Projector select entities."""
|
||||
|
||||
command: Callable[[JvcProjector, str], Awaitable[None]]
|
||||
command: type[Command]
|
||||
|
||||
|
||||
SELECTS: Final[list[JvcProjectorSelectDescription]] = [
|
||||
SELECTS: Final[tuple[JvcProjectorSelectDescription, ...]] = (
|
||||
JvcProjectorSelectDescription(key="input", command=cmd.Input),
|
||||
JvcProjectorSelectDescription(
|
||||
key="input",
|
||||
translation_key="input",
|
||||
options=[cmd.Input.HDMI1, cmd.Input.HDMI2],
|
||||
command=lambda device, option: device.set(cmd.Input, option),
|
||||
)
|
||||
]
|
||||
key="installation_mode",
|
||||
command=cmd.InstallationMode,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="light_power",
|
||||
command=cmd.LightPower,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="dynamic_control",
|
||||
command=cmd.DynamicControl,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="clear_motion_drive",
|
||||
command=cmd.ClearMotionDrive,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSelectDescription(
|
||||
key="anamorphic",
|
||||
command=cmd.Anamorphic,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -42,30 +61,45 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
JvcProjectorSelectEntity(coordinator, description) for description in SELECTS
|
||||
JvcProjectorSelectEntity(coordinator, description)
|
||||
for description in SELECTS
|
||||
if coordinator.supports(description.command)
|
||||
)
|
||||
|
||||
|
||||
class JvcProjectorSelectEntity(JvcProjectorEntity, SelectEntity):
|
||||
"""Representation of a JVC Projector select entity."""
|
||||
|
||||
entity_description: JvcProjectorSelectDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: JvcProjectorDataUpdateCoordinator,
|
||||
description: JvcProjectorSelectDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(coordinator, description.command)
|
||||
self.command: type[Command] = description.command
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_id}_{description.key}"
|
||||
self._attr_translation_key = description.key
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{description.key}"
|
||||
|
||||
self._options_map: dict[str, str] = coordinator.get_options_map(
|
||||
self.command.name
|
||||
)
|
||||
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return a list of selectable options."""
|
||||
return list(self._options_map.values())
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the selected entity option to represent the entity state."""
|
||||
return self.coordinator.data[self.entity_description.key]
|
||||
if value := self.coordinator.data.get(self.command.name):
|
||||
return self._options_map.get(value)
|
||||
return None
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.entity_description.command(self.coordinator.device, option)
|
||||
value = next((k for k, v in self._options_map.items() if v == option), None)
|
||||
await self.coordinator.device.set(self.command, value)
|
||||
|
||||
@@ -2,33 +2,77 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from jvcprojector import command as cmd
|
||||
from dataclasses import dataclass
|
||||
|
||||
from jvcprojector import Command, command as cmd
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.const import EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import JVCConfigEntry, JvcProjectorDataUpdateCoordinator
|
||||
from .entity import JvcProjectorEntity
|
||||
|
||||
JVC_SENSORS = (
|
||||
SensorEntityDescription(
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class JvcProjectorSensorDescription(SensorEntityDescription):
|
||||
"""Describes JVC Projector sensor entities."""
|
||||
|
||||
command: type[Command]
|
||||
|
||||
|
||||
SENSORS: tuple[JvcProjectorSensorDescription, ...] = (
|
||||
JvcProjectorSensorDescription(
|
||||
key="power",
|
||||
translation_key="jvc_power_status",
|
||||
command=cmd.Power,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="light_time",
|
||||
command=cmd.LightTime,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="color_depth",
|
||||
command=cmd.ColorDepth,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=[
|
||||
cmd.Power.STANDBY,
|
||||
cmd.Power.ON,
|
||||
cmd.Power.WARMING,
|
||||
cmd.Power.COOLING,
|
||||
cmd.Power.ERROR,
|
||||
],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="color_space",
|
||||
command=cmd.ColorSpace,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="hdr",
|
||||
command=cmd.Hdr,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="hdr_processing",
|
||||
command=cmd.HdrProcessing,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
JvcProjectorSensorDescription(
|
||||
key="picture_mode",
|
||||
command=cmd.PictureMode,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -42,24 +86,48 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
JvcSensor(coordinator, description) for description in JVC_SENSORS
|
||||
JvcProjectorSensorEntity(coordinator, description)
|
||||
for description in SENSORS
|
||||
if coordinator.supports(description.command)
|
||||
)
|
||||
|
||||
|
||||
class JvcSensor(JvcProjectorEntity, SensorEntity):
|
||||
class JvcProjectorSensorEntity(JvcProjectorEntity, SensorEntity):
|
||||
"""The entity class for JVC Projector integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: JvcProjectorDataUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
description: JvcProjectorSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the JVC Projector sensor."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(coordinator, description.command)
|
||||
self.command: type[Command] = description.command
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_id}_{description.key}"
|
||||
self._attr_translation_key = description.key
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{description.key}"
|
||||
|
||||
self._options_map: dict[str, str] = {}
|
||||
if self.device_class == SensorDeviceClass.ENUM:
|
||||
self._options_map = coordinator.get_options_map(self.command.name)
|
||||
|
||||
@property
|
||||
def options(self) -> list[str] | None:
|
||||
"""Return a set of possible options."""
|
||||
if self.device_class == SensorDeviceClass.ENUM:
|
||||
return list(self._options_map.values())
|
||||
return None
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the native value."""
|
||||
return self.coordinator.data[self.entity_description.key]
|
||||
value = self.coordinator.data.get(self.command.name)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if self.device_class == SensorDeviceClass.ENUM:
|
||||
return self._options_map.get(value)
|
||||
|
||||
return value
|
||||
|
||||
@@ -36,20 +36,134 @@
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"power": {
|
||||
"name": "[%key:component::binary_sensor::entity_component::power::name%]"
|
||||
"name": "Power"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"anamorphic": {
|
||||
"name": "Anamorphic",
|
||||
"state": {
|
||||
"a": "A",
|
||||
"b": "B",
|
||||
"c": "C",
|
||||
"d": "D",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"clear_motion_drive": {
|
||||
"name": "Clear Motion Drive",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"inverse-telecine": "Inverse Telecine",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"dynamic_control": {
|
||||
"name": "Dynamic Control",
|
||||
"state": {
|
||||
"balanced": "Balanced",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"mode-1": "Mode 1",
|
||||
"mode-2": "Mode 2",
|
||||
"mode-3": "Mode 3",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"name": "Input",
|
||||
"state": {
|
||||
"hdmi1": "HDMI 1",
|
||||
"hdmi2": "HDMI 2"
|
||||
}
|
||||
},
|
||||
"installation_mode": {
|
||||
"name": "Installation Mode",
|
||||
"state": {
|
||||
"memory-1": "Memory 1",
|
||||
"memory-10": "Memory 10",
|
||||
"memory-2": "Memory 2",
|
||||
"memory-3": "Memory 3",
|
||||
"memory-4": "Memory 4",
|
||||
"memory-5": "Memory 5",
|
||||
"memory-6": "Memory 6",
|
||||
"memory-7": "Memory 7",
|
||||
"memory-8": "Memory 8",
|
||||
"memory-9": "Memory 9"
|
||||
}
|
||||
},
|
||||
"light_power": {
|
||||
"name": "Light Power",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"mid": "[%key:common::state::medium%]",
|
||||
"normal": "[%key:common::state::normal%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"jvc_power_status": {
|
||||
"color_depth": {
|
||||
"name": "Color Depth",
|
||||
"state": {
|
||||
"8-bit": "8-bit",
|
||||
"10-bit": "10-bit",
|
||||
"12-bit": "12-bit"
|
||||
}
|
||||
},
|
||||
"color_space": {
|
||||
"name": "Color Space",
|
||||
"state": {
|
||||
"rgb": "RGB",
|
||||
"xv-color": "XV Color",
|
||||
"ycbcr-420": "YCbCr 4:2:0",
|
||||
"ycbcr-422": "YCbCr 4:2:2",
|
||||
"ycbcr-444": "YCbCr 4:4:4",
|
||||
"yuv": "YUV"
|
||||
}
|
||||
},
|
||||
"hdr": {
|
||||
"name": "HDR",
|
||||
"state": {
|
||||
"hdr": "HDR",
|
||||
"hdr10p": "HDR10+",
|
||||
"hybrid-log": "Hybrid Log",
|
||||
"none": "None",
|
||||
"sdr": "SDR",
|
||||
"smpte-st-2084": "SMPTE ST 2084"
|
||||
}
|
||||
},
|
||||
"hdr_processing": {
|
||||
"name": "HDR Processing",
|
||||
"state": {
|
||||
"frame-by-frame": "Frame-by-Frame",
|
||||
"hdr10p": "HDR10+",
|
||||
"scene-by-scene": "Scene-by-Scene",
|
||||
"static": "Static"
|
||||
}
|
||||
},
|
||||
"light_time": {
|
||||
"name": "Light Time"
|
||||
},
|
||||
"picture_mode": {
|
||||
"name": "Picture Mode",
|
||||
"state": {
|
||||
"frame-adapt-hdr": "Frame Adapt HDR",
|
||||
"frame-adapt-hdr2": "Frame Adapt HDR2",
|
||||
"frame-adapt-hdr3": "Frame Adapt HDR3",
|
||||
"hdr1": "HDR1",
|
||||
"hdr10": "HDR10",
|
||||
"hdr10-ll": "HDR10 LL",
|
||||
"hdr2": "HDR2",
|
||||
"last-setting": "Last Setting",
|
||||
"pana-pq": "Pana PQ",
|
||||
"user-4": "User 4",
|
||||
"user-5": "User 5",
|
||||
"user-6": "User 6"
|
||||
}
|
||||
},
|
||||
"power": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
"cooling": "Cooling",
|
||||
|
||||
@@ -78,9 +78,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
67
homeassistant/components/liebherr/__init__.py
Normal file
67
homeassistant/components/liebherr/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""The liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from pyliebherrhomeapi import LiebherrClient
|
||||
from pyliebherrhomeapi.exceptions import (
|
||||
LiebherrAuthenticationError,
|
||||
LiebherrConnectionError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) -> bool:
|
||||
"""Set up Liebherr from a config entry."""
|
||||
# Create shared API client
|
||||
client = LiebherrClient(
|
||||
api_key=entry.data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
# Fetch device list to create coordinators
|
||||
try:
|
||||
devices = await client.get_devices()
|
||||
except LiebherrAuthenticationError as err:
|
||||
raise ConfigEntryError("Invalid API key") from err
|
||||
except LiebherrConnectionError as err:
|
||||
raise ConfigEntryNotReady(f"Failed to connect to Liebherr API: {err}") from err
|
||||
|
||||
# Create a coordinator for each device (may be empty if no devices)
|
||||
coordinators: dict[str, LiebherrCoordinator] = {}
|
||||
for device in devices:
|
||||
coordinator = LiebherrCoordinator(
|
||||
hass=hass,
|
||||
config_entry=entry,
|
||||
client=client,
|
||||
device_id=device.device_id,
|
||||
)
|
||||
coordinators[device.device_id] = coordinator
|
||||
|
||||
await asyncio.gather(
|
||||
*(
|
||||
coordinator.async_config_entry_first_refresh()
|
||||
for coordinator in coordinators.values()
|
||||
)
|
||||
)
|
||||
|
||||
# Store coordinators in runtime data
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
68
homeassistant/components/liebherr/config_flow.py
Normal file
68
homeassistant/components/liebherr/config_flow.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""Config flow for the liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyliebherrhomeapi import LiebherrClient
|
||||
from pyliebherrhomeapi.exceptions import (
|
||||
LiebherrAuthenticationError,
|
||||
LiebherrConnectionError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class LiebherrConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for liebherr."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
user_input[CONF_API_KEY] = user_input[CONF_API_KEY].strip()
|
||||
|
||||
self._async_abort_entries_match({CONF_API_KEY: user_input[CONF_API_KEY]})
|
||||
|
||||
try:
|
||||
# Create a client and test the connection
|
||||
client = LiebherrClient(
|
||||
api_key=user_input[CONF_API_KEY],
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
devices = await client.get_devices()
|
||||
except LiebherrAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except LiebherrConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not devices:
|
||||
return self.async_abort(reason="no_devices")
|
||||
|
||||
return self.async_create_entry(
|
||||
title="Liebherr",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
6
homeassistant/components/liebherr/const.py
Normal file
6
homeassistant/components/liebherr/const.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Constants for the liebherr integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "liebherr"
|
||||
MANUFACTURER: Final = "Liebherr"
|
||||
75
homeassistant/components/liebherr/coordinator.py
Normal file
75
homeassistant/components/liebherr/coordinator.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""DataUpdateCoordinator for Liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyliebherrhomeapi import (
|
||||
DeviceState,
|
||||
LiebherrAuthenticationError,
|
||||
LiebherrClient,
|
||||
LiebherrConnectionError,
|
||||
LiebherrTimeoutError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type LiebherrConfigEntry = ConfigEntry[dict[str, LiebherrCoordinator]]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
class LiebherrCoordinator(DataUpdateCoordinator[DeviceState]):
|
||||
"""Class to manage fetching Liebherr data from the API for a single device."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: LiebherrConfigEntry,
|
||||
client: LiebherrClient,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
name=f"{DOMAIN}_{device_id}",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
self.client = client
|
||||
self.device_id = device_id
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator by validating device access."""
|
||||
try:
|
||||
await self.client.get_device(self.device_id)
|
||||
except LiebherrAuthenticationError as err:
|
||||
raise ConfigEntryError("Invalid API key") from err
|
||||
except LiebherrConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Failed to connect to device {self.device_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def _async_update_data(self) -> DeviceState:
|
||||
"""Fetch data from API for this device."""
|
||||
try:
|
||||
return await self.client.get_device_state(self.device_id)
|
||||
except LiebherrAuthenticationError as err:
|
||||
raise ConfigEntryError("API key is no longer valid") from err
|
||||
except LiebherrTimeoutError as err:
|
||||
raise UpdateFailed(
|
||||
f"Timeout communicating with device {self.device_id}"
|
||||
) from err
|
||||
except LiebherrConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with device {self.device_id}"
|
||||
) from err
|
||||
75
homeassistant/components/liebherr/entity.py
Normal file
75
homeassistant/components/liebherr/entity.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Base entity for Liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyliebherrhomeapi import TemperatureControl, ZonePosition
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import LiebherrCoordinator
|
||||
|
||||
# Zone position to translation key mapping
|
||||
ZONE_POSITION_MAP = {
|
||||
ZonePosition.TOP: "top_zone",
|
||||
ZonePosition.MIDDLE: "middle_zone",
|
||||
ZonePosition.BOTTOM: "bottom_zone",
|
||||
}
|
||||
|
||||
|
||||
class LiebherrEntity(CoordinatorEntity[LiebherrCoordinator]):
|
||||
"""Base entity for Liebherr devices."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LiebherrCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the Liebherr entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
device = coordinator.data.device
|
||||
|
||||
model = None
|
||||
if device.device_type:
|
||||
model = device.device_type.title()
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.device_id)},
|
||||
name=device.nickname or device.device_name,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=model,
|
||||
model_id=device.device_name,
|
||||
)
|
||||
|
||||
|
||||
class LiebherrZoneEntity(LiebherrEntity):
|
||||
"""Base entity for zone-based Liebherr entities.
|
||||
|
||||
This class should be used for entities that are associated with a specific
|
||||
temperature control zone (e.g., climate, zone sensors).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LiebherrCoordinator,
|
||||
zone_id: int,
|
||||
) -> None:
|
||||
"""Initialize the zone entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone_id = zone_id
|
||||
|
||||
@property
|
||||
def temperature_control(self) -> TemperatureControl | None:
|
||||
"""Get the temperature control for this zone."""
|
||||
return self.coordinator.data.get_temperature_controls().get(self._zone_id)
|
||||
|
||||
def _get_zone_translation_key(self) -> str | None:
|
||||
"""Get the translation key for this zone."""
|
||||
control = self.temperature_control
|
||||
if control and isinstance(control.zone_position, ZonePosition):
|
||||
return ZONE_POSITION_MAP.get(control.zone_position)
|
||||
# Fallback to None to use device model name
|
||||
return None
|
||||
18
homeassistant/components/liebherr/manifest.json
Normal file
18
homeassistant/components/liebherr/manifest.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"domain": "liebherr",
|
||||
"name": "Liebherr",
|
||||
"codeowners": ["@mettolen"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/liebherr",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyliebherrhomeapi"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyliebherrhomeapi==0.2.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "liebherr*",
|
||||
"type": "_http._tcp.local."
|
||||
}
|
||||
]
|
||||
}
|
||||
72
homeassistant/components/liebherr/quality_scale.yaml
Normal file
72
homeassistant/components/liebherr/quality_scale.yaml
Normal file
@@ -0,0 +1,72 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration has no configurable parameters after initial setup.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Cloud API does not require updating entry data from network discovery.
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repair issues to implement at this time.
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
118
homeassistant/components/liebherr/sensor.py
Normal file
118
homeassistant/components/liebherr/sensor.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""Sensor platform for Liebherr integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyliebherrhomeapi import TemperatureControl, TemperatureUnit
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
|
||||
from .entity import LiebherrZoneEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class LiebherrSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Liebherr sensor entity."""
|
||||
|
||||
value_fn: Callable[[TemperatureControl], StateType]
|
||||
unit_fn: Callable[[TemperatureControl], str]
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[LiebherrSensorEntityDescription, ...] = (
|
||||
LiebherrSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda control: control.value,
|
||||
unit_fn=lambda control: (
|
||||
UnitOfTemperature.FAHRENHEIT
|
||||
if control.unit == TemperatureUnit.FAHRENHEIT
|
||||
else UnitOfTemperature.CELSIUS
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: LiebherrConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Liebherr sensor entities."""
|
||||
coordinators = entry.runtime_data
|
||||
entities: list[LiebherrSensor] = []
|
||||
|
||||
for coordinator in coordinators.values():
|
||||
# Get all temperature controls for this device
|
||||
temp_controls = coordinator.data.get_temperature_controls()
|
||||
|
||||
for temp_control in temp_controls.values():
|
||||
entities.extend(
|
||||
LiebherrSensor(
|
||||
coordinator=coordinator,
|
||||
zone_id=temp_control.zone_id,
|
||||
description=description,
|
||||
)
|
||||
for description in SENSOR_TYPES
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class LiebherrSensor(LiebherrZoneEntity, SensorEntity):
|
||||
"""Representation of a Liebherr sensor."""
|
||||
|
||||
entity_description: LiebherrSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LiebherrCoordinator,
|
||||
zone_id: int,
|
||||
description: LiebherrSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor entity."""
|
||||
super().__init__(coordinator, zone_id)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.device_id}_{description.key}_{zone_id}"
|
||||
|
||||
# If device has only one zone, use model name instead of zone name
|
||||
temp_controls = coordinator.data.get_temperature_controls()
|
||||
if len(temp_controls) == 1:
|
||||
self._attr_name = None
|
||||
else:
|
||||
# Set translation key based on zone position for multi-zone devices
|
||||
self._attr_translation_key = self._get_zone_translation_key()
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
if (temp_control := self.temperature_control) is None:
|
||||
return None
|
||||
return self.entity_description.unit_fn(temp_control)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the current value."""
|
||||
if (temp_control := self.temperature_control) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(temp_control)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.temperature_control is not None
|
||||
38
homeassistant/components/liebherr/strings.json
Normal file
38
homeassistant/components/liebherr/strings.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices": "No devices found for this API key"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"no_devices": "No devices found for this API key",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The API key from the Liebherr SmartDevice app. Note: The API key can only be copied once from the app."
|
||||
},
|
||||
"description": "Enter your Liebherr HomeAPI key. You can find it in the Liebherr SmartDevice app under Settings → Become a beta tester."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"bottom_zone": {
|
||||
"name": "Bottom zone"
|
||||
},
|
||||
"middle_zone": {
|
||||
"name": "Middle zone"
|
||||
},
|
||||
"top_zone": {
|
||||
"name": "Top zone"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -73,6 +73,3 @@ LIFX_CEILING_PRODUCT_IDS = {176, 177, 201, 202}
|
||||
LIFX_128ZONE_CEILING_PRODUCT_IDS = {201, 202}
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
# _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1
|
||||
_ATTR_COLOR_TEMP = "color_temp"
|
||||
|
||||
@@ -33,7 +33,7 @@ from homeassistant.helpers.target import (
|
||||
async_extract_referenced_entity_ids,
|
||||
)
|
||||
|
||||
from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DOMAIN
|
||||
from .const import ATTR_THEME, DOMAIN
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .util import convert_8_to_16, find_hsbk
|
||||
|
||||
@@ -135,8 +135,6 @@ LIFX_EFFECT_PULSE_SCHEMA = cv.make_entity_service_schema(
|
||||
vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1500, max=9000)
|
||||
),
|
||||
# _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1
|
||||
vol.Exclusive(_ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int,
|
||||
ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Range(min=0.05)),
|
||||
ATTR_CYCLES: vol.All(vol.Coerce(float), vol.Range(min=1)),
|
||||
ATTR_MODE: vol.In(PULSE_MODES),
|
||||
|
||||
@@ -26,7 +26,6 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from .const import (
|
||||
_ATTR_COLOR_TEMP,
|
||||
_LOGGER,
|
||||
DEFAULT_ATTEMPTS,
|
||||
DOMAIN,
|
||||
@@ -115,17 +114,6 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] |
|
||||
saturation = int(saturation / 100 * 65535)
|
||||
kelvin = 3500
|
||||
|
||||
if ATTR_COLOR_TEMP_KELVIN not in kwargs and _ATTR_COLOR_TEMP in kwargs:
|
||||
# added in 2025.1, can be removed in 2026.1
|
||||
_LOGGER.warning(
|
||||
"The 'color_temp' parameter is deprecated. Please use 'color_temp_kelvin' for"
|
||||
" all service calls"
|
||||
)
|
||||
kelvin = color_util.color_temperature_mired_to_kelvin(
|
||||
kwargs.pop(_ATTR_COLOR_TEMP)
|
||||
)
|
||||
saturation = 0
|
||||
|
||||
if ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||
kelvin = kwargs.pop(ATTR_COLOR_TEMP_KELVIN)
|
||||
saturation = 0
|
||||
|
||||
@@ -336,9 +336,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
},
|
||||
"trigger_threshold_type": {
|
||||
|
||||
@@ -244,9 +244,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -11,6 +11,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
@@ -25,7 +26,9 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
@@ -259,6 +262,7 @@ class MinMaxSensor(SensorEntity):
|
||||
)
|
||||
self._async_min_max_sensor_state_listener(state_event, update_state=False)
|
||||
|
||||
self._update_device_class()
|
||||
self._calc_values()
|
||||
|
||||
@property
|
||||
@@ -345,6 +349,32 @@ class MinMaxSensor(SensorEntity):
|
||||
self._calc_values()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _update_device_class(self) -> None:
|
||||
"""Update device_class based on source entities.
|
||||
|
||||
If all source entities have the same device_class, inherit it.
|
||||
Otherwise, leave device_class as None.
|
||||
"""
|
||||
device_classes: list[SensorDeviceClass | None] = []
|
||||
|
||||
for entity_id in self._entity_ids:
|
||||
try:
|
||||
device_class = get_device_class(self.hass, entity_id)
|
||||
if device_class:
|
||||
device_classes.append(SensorDeviceClass(device_class))
|
||||
else:
|
||||
device_classes.append(None)
|
||||
except (HomeAssistantError, ValueError):
|
||||
# If we can't get device class for any entity, don't set it
|
||||
device_classes.append(None)
|
||||
|
||||
# Only inherit device_class if all entities have the same non-None device_class
|
||||
if device_classes and all(
|
||||
dc is not None and dc == device_classes[0] for dc in device_classes
|
||||
):
|
||||
self._attr_device_class = device_classes[0]
|
||||
|
||||
@callback
|
||||
def _calc_values(self) -> None:
|
||||
"""Calculate the values."""
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.21.0"]
|
||||
"requirements": ["nibe==2.22.0"]
|
||||
}
|
||||
|
||||
@@ -594,7 +594,8 @@ UNIT_CONVERTERS: dict[NumberDeviceClass, type[BaseUnitConverter]] = {
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -604,4 +605,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["openevsehttp"],
|
||||
"quality_scale": "legacy",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-openevse-http==0.2.1"],
|
||||
"zeroconf": ["_openevse._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -25,6 +25,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN
|
||||
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class OpenEVSENumberDescription(NumberEntityDescription):
|
||||
|
||||
74
homeassistant/components/openevse/quality_scale.yaml
Normal file
74
homeassistant/components/openevse/quality_scale.yaml
Normal file
@@ -0,0 +1,74 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration has no options flow.
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery: done
|
||||
discovery-update-info: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Integration supports a single device per config entry.
|
||||
entity-category: todo
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: done
|
||||
comment: Integration creates repair issues for YAML deprecation.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Integration supports a single device per config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -15,8 +15,12 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import PortainerCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -25,6 +29,7 @@ _PLATFORMS: list[Platform] = [
|
||||
Platform.BUTTON,
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
|
||||
@@ -49,6 +54,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Portainer integration."""
|
||||
await async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
DOMAIN = "portainer"
|
||||
DEFAULT_NAME = "Portainer"
|
||||
|
||||
|
||||
ENDPOINT_STATUS_DOWN = 2
|
||||
|
||||
CONTAINER_STATE_RUNNING = "running"
|
||||
|
||||
@@ -67,5 +67,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"prune_images": {
|
||||
"service": "mdi:delete-sweep"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,10 +7,7 @@ rules:
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
@@ -33,10 +30,7 @@ rules:
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
No explicit parallel updates are defined.
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
|
||||
115
homeassistant/components/portainer/services.py
Normal file
115
homeassistant/components/portainer/services.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Services for the Portainer integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from pyportainer import (
|
||||
PortainerAuthenticationError,
|
||||
PortainerConnectionError,
|
||||
PortainerTimeoutError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.service import async_extract_config_entry_ids
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import PortainerConfigEntry
|
||||
|
||||
ATTR_DATE_UNTIL = "until"
|
||||
ATTR_DANGLING = "dangling"
|
||||
|
||||
SERVICE_PRUNE_IMAGES = "prune_images"
|
||||
SERVICE_PRUNE_IMAGES_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
vol.Optional(ATTR_DATE_UNTIL): vol.All(
|
||||
cv.time_period, vol.Range(min=timedelta(minutes=1))
|
||||
),
|
||||
vol.Optional(ATTR_DANGLING): cv.boolean,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _extract_config_entry(service_call: ServiceCall) -> PortainerConfigEntry:
|
||||
"""Extract config entry from the service call."""
|
||||
target_entry_ids = await async_extract_config_entry_ids(service_call)
|
||||
target_entries: list[PortainerConfigEntry] = [
|
||||
loaded_entry
|
||||
for loaded_entry in service_call.hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
if loaded_entry.entry_id in target_entry_ids
|
||||
]
|
||||
if not target_entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_target",
|
||||
)
|
||||
return target_entries[0]
|
||||
|
||||
|
||||
async def _get_endpoint_id(
|
||||
call: ServiceCall,
|
||||
config_entry: PortainerConfigEntry,
|
||||
) -> int:
|
||||
"""Get endpoint data from device ID."""
|
||||
device_reg = dr.async_get(call.hass)
|
||||
device_id = call.data[ATTR_DEVICE_ID]
|
||||
device = device_reg.async_get(device_id)
|
||||
assert device
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
endpoint_data = None
|
||||
for data in coordinator.data.values():
|
||||
if (
|
||||
DOMAIN,
|
||||
f"{config_entry.entry_id}_{data.endpoint.id}",
|
||||
) in device.identifiers:
|
||||
endpoint_data = data
|
||||
break
|
||||
|
||||
assert endpoint_data
|
||||
return endpoint_data.endpoint.id
|
||||
|
||||
|
||||
async def prune_images(call: ServiceCall) -> None:
|
||||
"""Prune unused images in Portainer, with more controls."""
|
||||
config_entry = await _extract_config_entry(call)
|
||||
coordinator = config_entry.runtime_data
|
||||
endpoint_id = await _get_endpoint_id(call, config_entry)
|
||||
|
||||
try:
|
||||
await coordinator.portainer.images_prune(
|
||||
endpoint_id=endpoint_id,
|
||||
until=call.data.get(ATTR_DATE_UNTIL),
|
||||
dangling=call.data.get(ATTR_DANGLING, False),
|
||||
)
|
||||
except PortainerAuthenticationError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth_no_details",
|
||||
) from err
|
||||
except PortainerConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect_no_details",
|
||||
) from err
|
||||
except PortainerTimeoutError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_connect_no_details",
|
||||
) from err
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_PRUNE_IMAGES,
|
||||
prune_images,
|
||||
SERVICE_PRUNE_IMAGES_SCHEMA,
|
||||
)
|
||||
18
homeassistant/components/portainer/services.yaml
Normal file
18
homeassistant/components/portainer/services.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Services for Portainer
|
||||
|
||||
prune_images:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: portainer
|
||||
model: Endpoint
|
||||
until:
|
||||
required: false
|
||||
selector:
|
||||
duration:
|
||||
dangling:
|
||||
required: false
|
||||
selector:
|
||||
boolean: {}
|
||||
@@ -155,11 +155,34 @@
|
||||
"invalid_auth_no_details": {
|
||||
"message": "An error occurred while trying to authenticate."
|
||||
},
|
||||
"invalid_target": {
|
||||
"message": "Invalid device targeted."
|
||||
},
|
||||
"timeout_connect": {
|
||||
"message": "A timeout occurred while trying to connect to the Portainer instance: {error}"
|
||||
},
|
||||
"timeout_connect_no_details": {
|
||||
"message": "A timeout occurred while trying to connect to the Portainer instance."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"prune_images": {
|
||||
"description": "Prunes unused images on a Portainer endpoint.",
|
||||
"fields": {
|
||||
"dangling": {
|
||||
"description": "If true, only prune dangling images.",
|
||||
"name": "Dangling"
|
||||
},
|
||||
"device_id": {
|
||||
"description": "The endpoint to prune images on.",
|
||||
"name": "Endpoint"
|
||||
},
|
||||
"until": {
|
||||
"description": "Prune images unused for at least this time duration in the past. If not provided, all unused images will be pruned.",
|
||||
"name": "Until"
|
||||
}
|
||||
},
|
||||
"name": "Prune unused images"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,15 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.components.calendar import (
|
||||
CalendarEntity,
|
||||
CalendarEntityDescription,
|
||||
CalendarEvent,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import CalendarUpdateCoordinator, RadarrConfigEntry, RadarrEvent
|
||||
from .entity import RadarrEntity
|
||||
|
||||
CALENDAR_TYPE = EntityDescription(
|
||||
CALENDAR_TYPE = CalendarEntityDescription(
|
||||
key="calendar",
|
||||
name=None,
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"data_description": {
|
||||
"calendar_name": "The name of the calendar shown in the UI.",
|
||||
"url": "The URL of the remote calendar.",
|
||||
"verify_ssl": "Enable SSL certificate verification for secure connections."
|
||||
"verify_ssl": "[%key:common::config_flow::description::verify_ssl%]"
|
||||
},
|
||||
"description": "Please choose a name for the calendar to be imported"
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.5.2"]
|
||||
"requirements": ["renault-api==0.5.3"]
|
||||
}
|
||||
|
||||
@@ -840,7 +840,8 @@ STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]]
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -850,4 +851,9 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -4,18 +4,13 @@ from homeassistant.const import Platform
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.MEDIA_PLAYER]
|
||||
|
||||
GROUP_PREFIX = "snapcast_group_"
|
||||
GROUP_SUFFIX = "Snapcast Group"
|
||||
CLIENT_PREFIX = "snapcast_client_"
|
||||
CLIENT_SUFFIX = "Snapcast Client"
|
||||
|
||||
SERVICE_SNAPSHOT = "snapshot"
|
||||
SERVICE_RESTORE = "restore"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
SERVICE_SET_LATENCY = "set_latency"
|
||||
|
||||
ATTR_MASTER = "master"
|
||||
ATTR_LATENCY = "latency"
|
||||
|
||||
DOMAIN = "snapcast"
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
{
|
||||
"services": {
|
||||
"join": {
|
||||
"service": "mdi:music-note-plus"
|
||||
},
|
||||
"restore": {
|
||||
"service": "mdi:camera-retake"
|
||||
},
|
||||
@@ -11,9 +8,6 @@
|
||||
},
|
||||
"snapshot": {
|
||||
"service": "mdi:camera"
|
||||
},
|
||||
"unjoin": {
|
||||
"service": "mdi:music-note-minus"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -25,23 +25,17 @@ from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
ATTR_LATENCY,
|
||||
ATTR_MASTER,
|
||||
CLIENT_PREFIX,
|
||||
CLIENT_SUFFIX,
|
||||
DOMAIN,
|
||||
GROUP_PREFIX,
|
||||
GROUP_SUFFIX,
|
||||
SERVICE_JOIN,
|
||||
SERVICE_RESTORE,
|
||||
SERVICE_SET_LATENCY,
|
||||
SERVICE_SNAPSHOT,
|
||||
SERVICE_UNJOIN,
|
||||
)
|
||||
from .coordinator import SnapcastUpdateCoordinator
|
||||
from .entity import SnapcastCoordinatorEntity
|
||||
@@ -52,12 +46,6 @@ STREAM_STATUS = {
|
||||
"unknown": None,
|
||||
}
|
||||
|
||||
_SUPPORTED_FEATURES = (
|
||||
MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -67,10 +55,6 @@ def register_services() -> None:
|
||||
|
||||
platform.async_register_entity_service(SERVICE_SNAPSHOT, None, "async_snapshot")
|
||||
platform.async_register_entity_service(SERVICE_RESTORE, None, "async_restore")
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join"
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_LATENCY,
|
||||
{vol.Required(ATTR_LATENCY): cv.positive_int},
|
||||
@@ -90,45 +74,39 @@ async def async_setup_entry(
|
||||
|
||||
register_services()
|
||||
|
||||
_known_group_ids: set[str] = set()
|
||||
_known_client_ids: set[str] = set()
|
||||
|
||||
@callback
|
||||
def _update_entities(
|
||||
entity_class: type[SnapcastClientDevice | SnapcastGroupDevice],
|
||||
known_ids: set[str],
|
||||
get_device: Callable[[str], Snapclient | Snapgroup],
|
||||
get_devices: Callable[[], list[Snapclient] | list[Snapgroup]],
|
||||
) -> None:
|
||||
# Get IDs of current devices on server
|
||||
snapcast_ids = {d.identifier for d in get_devices()}
|
||||
def _update_clients() -> None:
|
||||
# Get IDs of current clients on server
|
||||
snapcast_ids = {d.identifier for d in coordinator.server.clients}
|
||||
|
||||
# Update known IDs
|
||||
ids_to_add = snapcast_ids - known_ids
|
||||
ids_to_remove = known_ids - snapcast_ids
|
||||
ids_to_add = snapcast_ids - _known_client_ids
|
||||
ids_to_remove = _known_client_ids - snapcast_ids
|
||||
|
||||
known_ids.difference_update(ids_to_remove)
|
||||
known_ids.update(ids_to_add)
|
||||
_known_client_ids.difference_update(ids_to_remove)
|
||||
_known_client_ids.update(ids_to_add)
|
||||
|
||||
# Exit early if no changes
|
||||
if not (ids_to_add | ids_to_remove):
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"New %s: %s",
|
||||
entity_class,
|
||||
str([get_device(d).friendly_name for d in ids_to_add]),
|
||||
"New snapcast client: %s",
|
||||
str([coordinator.server.client(d).friendly_name for d in ids_to_add]),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Remove %s IDs: %s",
|
||||
entity_class,
|
||||
"Remove snapcast client IDs: %s",
|
||||
str([list(ids_to_remove)]),
|
||||
)
|
||||
|
||||
# Add new entities
|
||||
async_add_entities(
|
||||
[
|
||||
entity_class(coordinator, get_device(snapcast_id))
|
||||
SnapcastClientDevice(
|
||||
coordinator, coordinator.server.client(snapcast_id)
|
||||
)
|
||||
for snapcast_id in ids_to_add
|
||||
]
|
||||
)
|
||||
@@ -139,47 +117,33 @@ async def async_setup_entry(
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
MEDIA_PLAYER_DOMAIN,
|
||||
DOMAIN,
|
||||
entity_class.get_unique_id(coordinator.host_id, snapcast_id),
|
||||
SnapcastClientDevice.get_unique_id(coordinator.host_id, snapcast_id),
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
def _update_clients() -> None:
|
||||
_update_entities(
|
||||
SnapcastClientDevice,
|
||||
_known_client_ids,
|
||||
coordinator.server.client,
|
||||
lambda: coordinator.server.clients,
|
||||
)
|
||||
|
||||
# Create client entities and add listener to update clients on server update
|
||||
_update_clients()
|
||||
coordinator.async_add_listener(_update_clients)
|
||||
|
||||
def _update_groups() -> None:
|
||||
_update_entities(
|
||||
SnapcastGroupDevice,
|
||||
_known_group_ids,
|
||||
coordinator.server.group,
|
||||
lambda: coordinator.server.groups,
|
||||
)
|
||||
|
||||
# Create group entities and add listener to update groups on server update
|
||||
_update_groups()
|
||||
coordinator.async_add_listener(_update_groups)
|
||||
|
||||
|
||||
class SnapcastBaseDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
"""Base class representing a Snapcast device."""
|
||||
class SnapcastClientDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
"""Representation of a Snapcast client device."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = _SUPPORTED_FEATURES
|
||||
_attr_supported_features = (
|
||||
MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
)
|
||||
_attr_media_content_type = MediaType.MUSIC
|
||||
_attr_device_class = MediaPlayerDeviceClass.SPEAKER
|
||||
_device: Snapclient
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: SnapcastUpdateCoordinator,
|
||||
device: Snapgroup | Snapclient,
|
||||
device: Snapclient,
|
||||
) -> None:
|
||||
"""Initialize the base device."""
|
||||
super().__init__(coordinator)
|
||||
@@ -191,13 +155,13 @@ class SnapcastBaseDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
|
||||
@classmethod
|
||||
def get_unique_id(cls, host, id) -> str:
|
||||
"""Build a unique ID."""
|
||||
raise NotImplementedError
|
||||
"""Get a unique ID for a client."""
|
||||
return f"{CLIENT_PREFIX}{host}_{id}"
|
||||
|
||||
@property
|
||||
def _current_group(self) -> Snapgroup:
|
||||
"""Return the group."""
|
||||
raise NotImplementedError
|
||||
"""Return the group the client is associated with."""
|
||||
return self._device.group
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to events."""
|
||||
@@ -213,6 +177,33 @@ class SnapcastBaseDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
"""Return the snapcast identifier."""
|
||||
return self._device.identifier
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the device."""
|
||||
return f"{self._device.friendly_name} {CLIENT_SUFFIX}"
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState | None:
|
||||
"""Return the state of the player."""
|
||||
if self._device.connected:
|
||||
if self.is_volume_muted or self._current_group.muted:
|
||||
return MediaPlayerState.IDLE
|
||||
return STREAM_STATUS.get(self._current_group.stream_status)
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
state_attrs = {}
|
||||
if self.latency is not None:
|
||||
state_attrs["latency"] = self.latency
|
||||
return state_attrs
|
||||
|
||||
@property
|
||||
def latency(self) -> float | None:
|
||||
"""Return current latency."""
|
||||
return self._device.latency
|
||||
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Return the current input source."""
|
||||
@@ -260,29 +251,54 @@ class SnapcastBaseDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_latency(self, latency) -> None:
|
||||
"""Handle the set_latency service."""
|
||||
raise NotImplementedError
|
||||
"""Set the latency of the client."""
|
||||
await self._device.set_latency(latency)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_join(self, master) -> None:
|
||||
"""Handle the join service."""
|
||||
raise NotImplementedError
|
||||
@property
|
||||
def group_members(self) -> list[str] | None:
|
||||
"""List of player entities which are currently grouped together for synchronous playback."""
|
||||
entity_registry = er.async_get(self.hass)
|
||||
return [
|
||||
entity_id
|
||||
for client_id in self._current_group.clients
|
||||
if (
|
||||
entity_id := entity_registry.async_get_entity_id(
|
||||
MEDIA_PLAYER_DOMAIN,
|
||||
DOMAIN,
|
||||
self.get_unique_id(self.coordinator.host_id, client_id),
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
async def async_unjoin(self) -> None:
|
||||
"""Handle the unjoin service."""
|
||||
raise NotImplementedError
|
||||
async def async_join_players(self, group_members: list[str]) -> None:
|
||||
"""Add `group_members` to this client's current group."""
|
||||
# Get the client entity for each group member excluding self
|
||||
entity_registry = er.async_get(self.hass)
|
||||
clients = [
|
||||
entity
|
||||
for entity_id in group_members
|
||||
if (entity := entity_registry.async_get(entity_id))
|
||||
and entity.unique_id != self.unique_id
|
||||
]
|
||||
|
||||
def _async_create_grouping_deprecation_issue(self) -> None:
|
||||
"""Create an issue for deprecated grouping actions."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"deprecated_grouping_actions",
|
||||
breaks_in_ha_version="2026.2.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_grouping_actions",
|
||||
)
|
||||
for client in clients:
|
||||
# Valid entity is a snapcast client
|
||||
if not client.unique_id.startswith(CLIENT_PREFIX):
|
||||
raise ServiceValidationError(
|
||||
f"Entity '{client.entity_id}' is not a Snapcast client device."
|
||||
)
|
||||
|
||||
# Extract client ID and join it to the current group
|
||||
identifier = client.unique_id.split("_")[-1]
|
||||
await self._current_group.add_client(identifier)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Remove this client from it's current group."""
|
||||
await self._current_group.remove_client(self._device.identifier)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def metadata(self) -> Mapping[str, Any]:
|
||||
@@ -353,222 +369,3 @@ class SnapcastBaseDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
return int(value)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class SnapcastGroupDevice(SnapcastBaseDevice):
|
||||
"""Representation of a Snapcast group device."""
|
||||
|
||||
_device: Snapgroup
|
||||
|
||||
@classmethod
|
||||
def get_unique_id(cls, host, id) -> str:
|
||||
"""Get a unique ID for a group."""
|
||||
return f"{GROUP_PREFIX}{host}_{id}"
|
||||
|
||||
@property
|
||||
def _current_group(self) -> Snapgroup:
|
||||
"""Return the group."""
|
||||
return self._device
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the device."""
|
||||
return f"{self._device.friendly_name} {GROUP_SUFFIX}"
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState | None:
|
||||
"""Return the state of the player."""
|
||||
if self.is_volume_muted:
|
||||
return MediaPlayerState.IDLE
|
||||
return STREAM_STATUS.get(self._device.stream_status)
|
||||
|
||||
async def async_set_latency(self, latency) -> None:
|
||||
"""Handle the set_latency service."""
|
||||
raise ServiceValidationError("Latency can only be set for a Snapcast client.")
|
||||
|
||||
async def async_join(self, master) -> None:
|
||||
"""Handle the join service."""
|
||||
raise ServiceValidationError("Entity is not a client. Can only join clients.")
|
||||
|
||||
async def async_unjoin(self) -> None:
|
||||
"""Handle the unjoin service."""
|
||||
raise ServiceValidationError("Entity is not a client. Can only unjoin clients.")
|
||||
|
||||
def _async_create_group_deprecation_issue(self) -> None:
|
||||
"""Create an issue for deprecated group entities."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"deprecated_group_entities",
|
||||
breaks_in_ha_version="2026.2.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_group_entities",
|
||||
)
|
||||
|
||||
async def async_select_source(self, source: str) -> None:
|
||||
"""Set input source."""
|
||||
# Groups are deprecated, create an issue when used
|
||||
self._async_create_group_deprecation_issue()
|
||||
|
||||
await super().async_select_source(source)
|
||||
|
||||
async def async_mute_volume(self, mute: bool) -> None:
|
||||
"""Send the mute command."""
|
||||
# Groups are deprecated, create an issue when used
|
||||
self._async_create_group_deprecation_issue()
|
||||
|
||||
await super().async_mute_volume(mute)
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Set the volume level."""
|
||||
# Groups are deprecated, create an issue when used
|
||||
self._async_create_group_deprecation_issue()
|
||||
|
||||
await super().async_set_volume_level(volume)
|
||||
|
||||
async def async_snapshot(self) -> None:
|
||||
"""Snapshot the group state."""
|
||||
# Groups are deprecated, create an issue when used
|
||||
self._async_create_group_deprecation_issue()
|
||||
|
||||
await super().async_snapshot()
|
||||
|
||||
async def async_restore(self) -> None:
|
||||
"""Restore the group state."""
|
||||
# Groups are deprecated, create an issue when used
|
||||
self._async_create_group_deprecation_issue()
|
||||
|
||||
await super().async_restore()
|
||||
|
||||
|
||||
class SnapcastClientDevice(SnapcastBaseDevice):
|
||||
"""Representation of a Snapcast client device."""
|
||||
|
||||
_device: Snapclient
|
||||
_attr_supported_features = (
|
||||
_SUPPORTED_FEATURES | MediaPlayerEntityFeature.GROUPING
|
||||
) # Clients support grouping
|
||||
|
||||
@classmethod
|
||||
def get_unique_id(cls, host, id) -> str:
|
||||
"""Get a unique ID for a client."""
|
||||
return f"{CLIENT_PREFIX}{host}_{id}"
|
||||
|
||||
@property
|
||||
def _current_group(self) -> Snapgroup:
|
||||
"""Return the group the client is associated with."""
|
||||
return self._device.group
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the device."""
|
||||
return f"{self._device.friendly_name} {CLIENT_SUFFIX}"
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState | None:
|
||||
"""Return the state of the player."""
|
||||
if self._device.connected:
|
||||
if self.is_volume_muted or self._current_group.muted:
|
||||
return MediaPlayerState.IDLE
|
||||
return STREAM_STATUS.get(self._current_group.stream_status)
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
state_attrs = {}
|
||||
if self.latency is not None:
|
||||
state_attrs["latency"] = self.latency
|
||||
return state_attrs
|
||||
|
||||
@property
|
||||
def latency(self) -> float | None:
|
||||
"""Latency for Client."""
|
||||
return self._device.latency
|
||||
|
||||
async def async_set_latency(self, latency) -> None:
|
||||
"""Set the latency of the client."""
|
||||
await self._device.set_latency(latency)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_join(self, master) -> None:
|
||||
"""Join the group of the master player."""
|
||||
# Action is deprecated, create an issue
|
||||
self._async_create_grouping_deprecation_issue()
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
master_entity = entity_registry.async_get(master)
|
||||
if master_entity is None:
|
||||
raise ServiceValidationError(f"Master entity '{master}' not found.")
|
||||
|
||||
# Validate master entity is a client
|
||||
unique_id = master_entity.unique_id
|
||||
if not unique_id.startswith(CLIENT_PREFIX):
|
||||
raise ServiceValidationError(
|
||||
"Master is not a client device. Can only join clients."
|
||||
)
|
||||
|
||||
# Extract the client ID and locate it's group
|
||||
identifier = unique_id.split("_")[-1]
|
||||
master_group = next(
|
||||
group
|
||||
for group in self._device.groups_available()
|
||||
if identifier in group.clients
|
||||
)
|
||||
await master_group.add_client(self._device.identifier)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_unjoin(self) -> None:
|
||||
"""Unjoin the group the player is currently in."""
|
||||
# Action is deprecated, create an issue
|
||||
self._async_create_grouping_deprecation_issue()
|
||||
|
||||
await self._current_group.remove_client(self._device.identifier)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def group_members(self) -> list[str] | None:
|
||||
"""List of player entities which are currently grouped together for synchronous playback."""
|
||||
entity_registry = er.async_get(self.hass)
|
||||
return [
|
||||
entity_id
|
||||
for client_id in self._current_group.clients
|
||||
if (
|
||||
entity_id := entity_registry.async_get_entity_id(
|
||||
MEDIA_PLAYER_DOMAIN,
|
||||
DOMAIN,
|
||||
self.get_unique_id(self.coordinator.host_id, client_id),
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
async def async_join_players(self, group_members: list[str]) -> None:
|
||||
"""Add `group_members` to this client's current group."""
|
||||
# Get the client entity for each group member excluding self
|
||||
entity_registry = er.async_get(self.hass)
|
||||
clients = [
|
||||
entity
|
||||
for entity_id in group_members
|
||||
if (entity := entity_registry.async_get(entity_id))
|
||||
and entity.unique_id != self.unique_id
|
||||
]
|
||||
|
||||
for client in clients:
|
||||
# Valid entity is a snapcast client
|
||||
if not client.unique_id.startswith(CLIENT_PREFIX):
|
||||
raise ServiceValidationError(
|
||||
f"Entity '{client.entity_id}' is not a Snapcast client device."
|
||||
)
|
||||
|
||||
# Extract client ID and join it to the current group
|
||||
identifier = client.unique_id.split("_")[-1]
|
||||
await self._current_group.add_client(identifier)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Remove this client from it's current group."""
|
||||
await self._current_group.remove_client(self._device.identifier)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -1,24 +1,3 @@
|
||||
join:
|
||||
fields:
|
||||
master:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
integration: snapcast
|
||||
domain: media_player
|
||||
entity_id:
|
||||
selector:
|
||||
target:
|
||||
entity:
|
||||
integration: snapcast
|
||||
domain: media_player
|
||||
|
||||
unjoin:
|
||||
target:
|
||||
entity:
|
||||
integration: snapcast
|
||||
domain: media_player
|
||||
|
||||
snapshot:
|
||||
target:
|
||||
entity:
|
||||
|
||||
@@ -21,31 +21,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_group_entities": {
|
||||
"description": "Snapcast group entities are deprecated and will be removed in 2026.2. Please use the 'media_player.join' and 'media_player.unjoin' actions instead.",
|
||||
"title": "Snapcast Groups Entities Deprecated"
|
||||
},
|
||||
"deprecated_grouping_actions": {
|
||||
"description": "Actions 'snapcast.join' and 'snapcast.unjoin' are deprecated and will be removed in 2026.2. Use the 'media_player.join' and 'media_player.unjoin' actions instead.",
|
||||
"title": "Snapcast Actions Deprecated"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"join": {
|
||||
"description": "Groups players together in a single group.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"description": "The players to join to the \"master\".",
|
||||
"name": "Entity"
|
||||
},
|
||||
"master": {
|
||||
"description": "Entity ID of the player to synchronize to.",
|
||||
"name": "Master"
|
||||
}
|
||||
},
|
||||
"name": "Join"
|
||||
},
|
||||
"restore": {
|
||||
"description": "Restores a previously taken snapshot of a media player.",
|
||||
"name": "Restore"
|
||||
@@ -63,10 +39,6 @@
|
||||
"snapshot": {
|
||||
"description": "Takes a snapshot of what is currently playing on a media player.",
|
||||
"name": "Snapshot"
|
||||
},
|
||||
"unjoin": {
|
||||
"description": "Removes one or more players from a group.",
|
||||
"name": "Unjoin"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,9 +78,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
import switchbot
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.components.sensor import ConfigType
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_ADDRESS,
|
||||
@@ -16,7 +17,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import (
|
||||
CONF_ENCRYPTION_KEY,
|
||||
@@ -30,6 +31,10 @@ from .const import (
|
||||
SupportedModels,
|
||||
)
|
||||
from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
PLATFORMS_BY_TYPE = {
|
||||
SupportedModels.BULB.value: [Platform.SENSOR, Platform.LIGHT],
|
||||
@@ -113,6 +118,8 @@ PLATFORMS_BY_TYPE = {
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
],
|
||||
SupportedModels.KEYPAD_VISION.value: [Platform.SENSOR, Platform.BINARY_SENSOR],
|
||||
SupportedModels.KEYPAD_VISION_PRO.value: [Platform.SENSOR, Platform.BINARY_SENSOR],
|
||||
}
|
||||
CLASS_BY_DEVICE = {
|
||||
SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight,
|
||||
@@ -150,12 +157,20 @@ CLASS_BY_DEVICE = {
|
||||
SupportedModels.GARAGE_DOOR_OPENER.value: switchbot.SwitchbotGarageDoorOpener,
|
||||
SupportedModels.SMART_THERMOSTAT_RADIATOR.value: switchbot.SwitchbotSmartThermostatRadiator,
|
||||
SupportedModels.ART_FRAME.value: switchbot.SwitchbotArtFrame,
|
||||
SupportedModels.KEYPAD_VISION.value: switchbot.SwitchbotKeypadVision,
|
||||
SupportedModels.KEYPAD_VISION_PRO.value: switchbot.SwitchbotKeypadVision,
|
||||
}
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Switchbot Devices component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) -> bool:
|
||||
"""Set up Switchbot from a config entry."""
|
||||
assert entry.unique_id is not None
|
||||
|
||||
@@ -62,6 +62,8 @@ class SupportedModels(StrEnum):
|
||||
SMART_THERMOSTAT_RADIATOR = "smart_thermostat_radiator"
|
||||
S20_VACUUM = "s20_vacuum"
|
||||
ART_FRAME = "art_frame"
|
||||
KEYPAD_VISION = "keypad_vision"
|
||||
KEYPAD_VISION_PRO = "keypad_vision_pro"
|
||||
|
||||
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -102,6 +104,8 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
SwitchbotModel.CLIMATE_PANEL: SupportedModels.CLIMATE_PANEL,
|
||||
SwitchbotModel.SMART_THERMOSTAT_RADIATOR: SupportedModels.SMART_THERMOSTAT_RADIATOR,
|
||||
SwitchbotModel.ART_FRAME: SupportedModels.ART_FRAME,
|
||||
SwitchbotModel.KEYPAD_VISION: SupportedModels.KEYPAD_VISION,
|
||||
SwitchbotModel.KEYPAD_VISION_PRO: SupportedModels.KEYPAD_VISION_PRO,
|
||||
}
|
||||
|
||||
NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -142,6 +146,8 @@ ENCRYPTED_MODELS = {
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER,
|
||||
SwitchbotModel.SMART_THERMOSTAT_RADIATOR,
|
||||
SwitchbotModel.ART_FRAME,
|
||||
SwitchbotModel.KEYPAD_VISION,
|
||||
SwitchbotModel.KEYPAD_VISION_PRO,
|
||||
}
|
||||
|
||||
ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
@@ -165,6 +171,8 @@ ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER: switchbot.SwitchbotRelaySwitch,
|
||||
SwitchbotModel.SMART_THERMOSTAT_RADIATOR: switchbot.SwitchbotSmartThermostatRadiator,
|
||||
SwitchbotModel.ART_FRAME: switchbot.SwitchbotArtFrame,
|
||||
SwitchbotModel.KEYPAD_VISION: switchbot.SwitchbotKeypadVision,
|
||||
SwitchbotModel.KEYPAD_VISION_PRO: switchbot.SwitchbotKeypadVision,
|
||||
}
|
||||
|
||||
HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {
|
||||
|
||||
@@ -141,5 +141,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_password": {
|
||||
"service": "mdi:key-plus"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
119
homeassistant/components/switchbot/services.py
Normal file
119
homeassistant/components/switchbot/services.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Services for the SwitchBot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID, CONF_SENSOR_TYPE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import DOMAIN, SupportedModels
|
||||
from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator
|
||||
|
||||
SERVICE_ADD_PASSWORD = "add_password"
|
||||
|
||||
ATTR_PASSWORD = "password"
|
||||
|
||||
_PASSWORD_VALIDATOR = vol.All(cv.string, cv.matches_regex(r"^\d{6,12}$"))
|
||||
|
||||
SCHEMA_ADD_PASSWORD_SERVICE = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
vol.Required(ATTR_PASSWORD): _PASSWORD_VALIDATOR,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_switchbot_entry_for_device_id(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> SwitchbotConfigEntry:
|
||||
"""Return the loaded SwitchBot config entry for a device id."""
|
||||
device_registry = dr.async_get(hass)
|
||||
if not (device_entry := device_registry.async_get(device_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_device_id",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
entries = [
|
||||
hass.config_entries.async_get_entry(entry_id)
|
||||
for entry_id in device_entry.config_entries
|
||||
]
|
||||
switchbot_entries = [
|
||||
entry for entry in entries if entry is not None and entry.domain == DOMAIN
|
||||
]
|
||||
if not switchbot_entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_not_belonging",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
if not (
|
||||
loaded_entry := next(
|
||||
(
|
||||
entry
|
||||
for entry in switchbot_entries
|
||||
if entry.state is ConfigEntryState.LOADED
|
||||
),
|
||||
None,
|
||||
)
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_entry_not_loaded",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
return loaded_entry
|
||||
|
||||
|
||||
def _is_supported_keypad(entry: SwitchbotConfigEntry) -> bool:
|
||||
"""Return if the entry is a supported keypad model."""
|
||||
allowed_sensor_types = {
|
||||
SupportedModels.KEYPAD_VISION.value,
|
||||
SupportedModels.KEYPAD_VISION_PRO.value,
|
||||
}
|
||||
return entry.data.get(CONF_SENSOR_TYPE) in allowed_sensor_types
|
||||
|
||||
|
||||
@callback
|
||||
def _async_target(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> SwitchbotDataUpdateCoordinator:
|
||||
"""Return coordinator for a single target device."""
|
||||
entry = _async_get_switchbot_entry_for_device_id(hass, device_id)
|
||||
if not _is_supported_keypad(entry):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_keypad_vision_device",
|
||||
)
|
||||
|
||||
return entry.runtime_data
|
||||
|
||||
|
||||
async def async_add_password(call: ServiceCall) -> None:
|
||||
"""Add a password to a SwitchBot keypad device."""
|
||||
password: str = call.data[ATTR_PASSWORD]
|
||||
device_id = call.data[ATTR_DEVICE_ID]
|
||||
|
||||
coordinator = _async_target(call.hass, device_id)
|
||||
|
||||
await coordinator.device.add_password(password)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the SwitchBot integration."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_ADD_PASSWORD,
|
||||
async_add_password,
|
||||
schema=SCHEMA_ADD_PASSWORD_SERVICE,
|
||||
)
|
||||
14
homeassistant/components/switchbot/services.yaml
Normal file
14
homeassistant/components/switchbot/services.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
add_password:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
example: "c2d01328efd261f586e56d914e3af07e"
|
||||
selector:
|
||||
device:
|
||||
integration: switchbot
|
||||
password:
|
||||
required: true
|
||||
example: "123456"
|
||||
selector:
|
||||
text:
|
||||
type: password
|
||||
@@ -329,9 +329,24 @@
|
||||
"advertising_state_error": {
|
||||
"message": "{address} is not advertising state"
|
||||
},
|
||||
"device_entry_not_loaded": {
|
||||
"message": "The device ID {device_id} is not loaded."
|
||||
},
|
||||
"device_not_belonging": {
|
||||
"message": "The device ID {device_id} does not belong to SwitchBot integration."
|
||||
},
|
||||
"device_not_found_error": {
|
||||
"message": "Could not find Switchbot {sensor_type} with address {address}"
|
||||
},
|
||||
"device_without_config_entry": {
|
||||
"message": "The device ID {device_id} is not associated with a config entry."
|
||||
},
|
||||
"invalid_device_id": {
|
||||
"message": "The device ID {device_id} is not a valid device ID."
|
||||
},
|
||||
"not_keypad_vision_device": {
|
||||
"message": "This service is only supported for SwitchBot Keypad Vision devices."
|
||||
},
|
||||
"operation_error": {
|
||||
"message": "An error occurred while performing the action: {error}"
|
||||
},
|
||||
@@ -352,5 +367,21 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"add_password": {
|
||||
"description": "Add a password to your keypad vision device.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"description": "The device ID of the keypad vision device",
|
||||
"name": "Device ID"
|
||||
},
|
||||
"password": {
|
||||
"description": "A 6 to 12 digit password",
|
||||
"name": "Password"
|
||||
}
|
||||
},
|
||||
"name": "Add password"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
109
homeassistant/components/teslemetry/quality_scale.yaml
Normal file
109
homeassistant/components/teslemetry/quality_scale.yaml
Normal file
@@ -0,0 +1,109 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: todo
|
||||
comment: |
|
||||
Multiline lambdas should be wrapped in parentheses for readability (e.g. streaming_listener).
|
||||
Use chained comparison: "if 1 < x < 100" instead of "if x > 1 and x < 100".
|
||||
config-flow: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: Use mock_setup_entry fixture instead of inline patch
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading:
|
||||
status: todo
|
||||
comment: |
|
||||
async_unload_entry must clean up: (1) close TeslemetryStream websocket via
|
||||
stream.close(), (2) call remove_listener() for each vehicle to unsubscribe
|
||||
from stream events, (3) consider using entry.async_on_unload() during setup
|
||||
to register cleanup callbacks automatically.
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Discourage snapshot testing for state verification (e.g. test_binary_sensors_connectivity);
|
||||
use concrete assertions instead. Patch devices where they're used. Use entity_registry as
|
||||
test fixture. Clarify _alt and _noscope fixture purposes. Test error messages in
|
||||
test_service_validation_errors.
|
||||
# Gold
|
||||
devices:
|
||||
status: todo
|
||||
comment: |
|
||||
Add model id to device info. VIN sensor may be redundant (already serial number in device).
|
||||
Version sensor should be sw_version in device info instead.
|
||||
diagnostics: done
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Cloud polling integration
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Cloud polling integration
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: todo
|
||||
comment: |
|
||||
New vehicles/energy sites added to user's Tesla account after initial setup
|
||||
are not detected. Need to periodically poll teslemetry.products() and add
|
||||
new TeslemetryVehicleData/TeslemetryEnergyData to runtime_data, then trigger
|
||||
entity creation via coordinator listeners in each platform.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: todo
|
||||
comment: |
|
||||
DRIVE_INVERTER_STATES has "unavailable" as a state value which conflicts with HA's
|
||||
unavailable state - shows duplicate in state trigger UI.
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations:
|
||||
status: todo
|
||||
comment: |
|
||||
ConfigEntryAuthFailed and UpdateFailed exceptions can have translated messages.
|
||||
Also one "unknown error" that cannot be translated.
|
||||
icon-translations:
|
||||
status: todo
|
||||
comment: |
|
||||
number.py:299 uses _attr_icon = icon_for_battery_level() instead of
|
||||
range-based icons in icons.json. Affects backup_reserve_percent and
|
||||
off_grid_vehicle_charging_reserve_percent entities. Remove the dynamic
|
||||
icon assignment and add range-based icon entries to icons.json.
|
||||
reconfiguration-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
Reconfiguring has value even with OAuth - allows user to trigger reauth themselves
|
||||
(e.g. after logging out of all devices).
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No issues to repair
|
||||
stale-devices: done
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -16,10 +16,7 @@ from homeassistant.helpers import (
|
||||
discovery,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device import (
|
||||
async_entity_id_to_device_id,
|
||||
async_remove_stale_devices_links_keep_entity_device,
|
||||
)
|
||||
from homeassistant.helpers.device import async_entity_id_to_device_id
|
||||
from homeassistant.helpers.helper_integration import (
|
||||
async_handle_source_entity_changes,
|
||||
async_remove_helper_config_entry_from_source_device,
|
||||
@@ -180,11 +177,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Utility Meter from a config entry."""
|
||||
|
||||
# This can be removed in HA Core 2026.2
|
||||
async_remove_stale_devices_links_keep_entity_device(
|
||||
hass, entry.entry_id, entry.options[CONF_SOURCE_SENSOR]
|
||||
)
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
hass.data[DATA_UTILITY][entry.entry_id] = {
|
||||
"source": entry.options[CONF_SOURCE_SENSOR],
|
||||
|
||||
@@ -98,9 +98,9 @@
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
"any": "[%key:common::selector::trigger_behavior::options::any%]",
|
||||
"first": "[%key:common::selector::trigger_behavior::options::first%]",
|
||||
"last": "[%key:common::selector::trigger_behavior::options::last%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyvlx import Intensity, LighteningDevice
|
||||
from pyvlx import Intensity, Light
|
||||
|
||||
from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -26,7 +26,7 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
VeluxLight(node, config_entry.entry_id)
|
||||
for node in pyvlx.nodes
|
||||
if isinstance(node, LighteningDevice)
|
||||
if isinstance(node, Light)
|
||||
)
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ class VeluxLight(VeluxEntity, LightEntity):
|
||||
_attr_color_mode = ColorMode.BRIGHTNESS
|
||||
_attr_name = None
|
||||
|
||||
node: LighteningDevice
|
||||
node: Light
|
||||
|
||||
@property
|
||||
def brightness(self):
|
||||
|
||||
@@ -4,5 +4,6 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/viaggiatreno",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy"
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["viaggiatreno_ha==0.2.4"]
|
||||
}
|
||||
|
||||
@@ -2,12 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from viaggiatreno_ha.trainline import (
|
||||
TrainLine,
|
||||
TrainLineStatus,
|
||||
TrainState,
|
||||
Viaggiatreno,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -19,19 +24,12 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
VIAGGIATRENO_ENDPOINT = (
|
||||
"http://www.viaggiatreno.it/infomobilita/"
|
||||
"resteasy/viaggiatreno/andamentoTreno/"
|
||||
"{station_id}/{train_id}/{timestamp}"
|
||||
)
|
||||
|
||||
REQUEST_TIMEOUT = 5 # seconds
|
||||
ICON = "mdi:train"
|
||||
MONITORED_INFO = [
|
||||
MONITORED_INFO = [ # Backward compatibility with older versions
|
||||
"categoria",
|
||||
"compOrarioArrivoZeroEffettivo",
|
||||
"compOrarioPartenzaZeroEffettivo",
|
||||
@@ -47,7 +45,6 @@ DEFAULT_NAME = "Train {}"
|
||||
|
||||
CONF_NAME = "train_name"
|
||||
CONF_STATION_ID = "station_id"
|
||||
CONF_STATION_NAME = "station_name"
|
||||
CONF_TRAIN_ID = "train_id"
|
||||
|
||||
ARRIVED_STRING = "Arrived"
|
||||
@@ -55,6 +52,8 @@ CANCELLED_STRING = "Cancelled"
|
||||
NOT_DEPARTED_STRING = "Not departed yet"
|
||||
NO_INFORMATION_STRING = "No information for this train now"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=2)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_TRAIN_ID): cv.string,
|
||||
@@ -71,126 +70,94 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the ViaggiaTreno platform."""
|
||||
train_id = config.get(CONF_TRAIN_ID)
|
||||
station_id = config.get(CONF_STATION_ID)
|
||||
train_id = str(config.get(CONF_TRAIN_ID))
|
||||
station_id = str(config.get(CONF_STATION_ID))
|
||||
if not (name := config.get(CONF_NAME)):
|
||||
name = DEFAULT_NAME.format(train_id)
|
||||
async_add_entities([ViaggiaTrenoSensor(train_id, station_id, name)])
|
||||
|
||||
|
||||
async def async_http_request(hass, uri):
|
||||
"""Perform actual request."""
|
||||
try:
|
||||
session = async_get_clientsession(hass)
|
||||
async with asyncio.timeout(REQUEST_TIMEOUT):
|
||||
req = await session.get(uri)
|
||||
if req.status != HTTPStatus.OK:
|
||||
return {"error": req.status}
|
||||
json_response = await req.json()
|
||||
except (TimeoutError, aiohttp.ClientError) as exc:
|
||||
_LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc)
|
||||
return None
|
||||
except ValueError:
|
||||
_LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint")
|
||||
return None
|
||||
return json_response
|
||||
tl = TrainLine(train_id=train_id, starting_station=station_id)
|
||||
async_add_entities([ViaggiaTrenoSensor(tl, name)], True)
|
||||
|
||||
|
||||
class ViaggiaTrenoSensor(SensorEntity):
|
||||
"""Implementation of a ViaggiaTreno sensor."""
|
||||
|
||||
_attr_attribution = "Powered by ViaggiaTreno Data"
|
||||
_attr_should_poll = True
|
||||
|
||||
def __init__(self, train_id, station_id, name):
|
||||
def __init__(self, train_line: TrainLine, name: str) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._state = None
|
||||
self._attributes = {}
|
||||
self._unit = ""
|
||||
self._state: StateType = NO_INFORMATION_STRING
|
||||
self._attributes: dict[str, Any] = {}
|
||||
self._icon = ICON
|
||||
self._station_id = station_id
|
||||
self._name = name
|
||||
|
||||
self.uri = VIAGGIATRENO_ENDPOINT.format(
|
||||
station_id=station_id, train_id=train_id, timestamp=int(time.time()) * 1000
|
||||
)
|
||||
self._line = train_line
|
||||
self._viaggiatreno: Viaggiatreno | None = None
|
||||
self._tstatus: TrainLineStatus | None = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
def icon(self) -> str:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
return self._unit
|
||||
if isinstance(self.native_value, (int, float)):
|
||||
return UnitOfTime.MINUTES
|
||||
return None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return extra attributes."""
|
||||
return self._attributes
|
||||
|
||||
@staticmethod
|
||||
def has_departed(data):
|
||||
"""Check if the train has actually departed."""
|
||||
try:
|
||||
first_station = data["fermate"][0]
|
||||
if data["oraUltimoRilevamento"] or first_station["effettiva"]:
|
||||
return True
|
||||
except ValueError:
|
||||
_LOGGER.error("Cannot fetch first station: %s", data)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def has_arrived(data):
|
||||
"""Check if the train has already arrived."""
|
||||
last_station = data["fermate"][-1]
|
||||
if not last_station["effettiva"]:
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def is_cancelled(data):
|
||||
"""Check if the train is cancelled."""
|
||||
if data["tipoTreno"] == "ST" and data["provvedimento"] == 1:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update state."""
|
||||
uri = self.uri
|
||||
res = await async_http_request(self.hass, uri)
|
||||
if res.get("error", ""):
|
||||
if res["error"] == 204:
|
||||
self._state = NO_INFORMATION_STRING
|
||||
self._unit = ""
|
||||
else:
|
||||
self._state = f"Error: {res['error']}"
|
||||
self._unit = ""
|
||||
else:
|
||||
for i in MONITORED_INFO:
|
||||
self._attributes[i] = res[i]
|
||||
|
||||
if self.is_cancelled(res):
|
||||
if self._viaggiatreno is None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
self._viaggiatreno = Viaggiatreno(session)
|
||||
try:
|
||||
await self._viaggiatreno.query_if_useful(self._line)
|
||||
self._tstatus = self._viaggiatreno.get_line_status(self._line)
|
||||
if self._tstatus is None:
|
||||
_LOGGER.error(
|
||||
"Received status for line %s: None. Check the train and station IDs",
|
||||
self._line,
|
||||
)
|
||||
return
|
||||
except (TimeoutError, aiohttp.ClientError) as exc:
|
||||
_LOGGER.error("Cannot connect to ViaggiaTreno API endpoint: %s", exc)
|
||||
return
|
||||
except ValueError:
|
||||
_LOGGER.error("Received non-JSON data from ViaggiaTreno API endpoint")
|
||||
return
|
||||
if self._tstatus is not None:
|
||||
if self._tstatus.state == TrainState.CANCELLED:
|
||||
self._state = CANCELLED_STRING
|
||||
self._icon = "mdi:cancel"
|
||||
self._unit = ""
|
||||
elif not self.has_departed(res):
|
||||
elif self._tstatus.state == TrainState.NOT_YET_DEPARTED:
|
||||
self._state = NOT_DEPARTED_STRING
|
||||
self._unit = ""
|
||||
elif self.has_arrived(res):
|
||||
elif self._tstatus.state == TrainState.ARRIVED:
|
||||
self._state = ARRIVED_STRING
|
||||
self._unit = ""
|
||||
else:
|
||||
self._state = res.get("ritardo")
|
||||
self._unit = UnitOfTime.MINUTES
|
||||
elif self._tstatus.state in {
|
||||
TrainState.RUNNING,
|
||||
TrainState.PARTIALLY_CANCELLED,
|
||||
}:
|
||||
delay_minutes = self._tstatus.timetable.delay
|
||||
self._state = delay_minutes
|
||||
self._icon = ICON
|
||||
else:
|
||||
self._state = NO_INFORMATION_STRING
|
||||
# Update attributes
|
||||
for info in MONITORED_INFO:
|
||||
self._attributes[info] = self._viaggiatreno.json[self._line][info]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "wsdot",
|
||||
"name": "Washington State Department of Transportation (WSDOT)",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@ucodery"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/wsdot",
|
||||
"integration_type": "service",
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -379,6 +379,7 @@ FLOWS = {
|
||||
"lg_thinq",
|
||||
"libre_hardware_monitor",
|
||||
"lidarr",
|
||||
"liebherr",
|
||||
"lifx",
|
||||
"linkplay",
|
||||
"litejet",
|
||||
|
||||
@@ -3604,6 +3604,12 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"liebherr": {
|
||||
"name": "Liebherr",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"lifx": {
|
||||
"name": "LIFX",
|
||||
"integration_type": "device",
|
||||
|
||||
4
homeassistant/generated/zeroconf.py
generated
4
homeassistant/generated/zeroconf.py
generated
@@ -597,6 +597,10 @@ ZEROCONF = {
|
||||
"domain": "lektrico",
|
||||
"name": "lektrico*",
|
||||
},
|
||||
{
|
||||
"domain": "liebherr",
|
||||
"name": "liebherr*",
|
||||
},
|
||||
{
|
||||
"domain": "loqed",
|
||||
"name": "loqed*",
|
||||
|
||||
@@ -75,7 +75,8 @@
|
||||
},
|
||||
"description": {
|
||||
"confirm_setup": "Do you want to start setup?",
|
||||
"implementation": "The credentials you want to use to authenticate."
|
||||
"implementation": "The credentials you want to use to authenticate.",
|
||||
"verify_ssl": "Whether to verify the SSL certificate of the server. Disable this if the server uses a self-signed or otherwise invalid certificate."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Failed to connect",
|
||||
@@ -128,6 +129,15 @@
|
||||
"model": "Model",
|
||||
"ui_managed": "Managed via UI"
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"state": {
|
||||
"active": "Active",
|
||||
"auto": "Auto",
|
||||
|
||||
@@ -483,8 +483,6 @@ filterwarnings = [
|
||||
"ignore:Deprecated call to `pkg_resources.declare_namespace\\('azure'\\)`:DeprecationWarning:pkg_resources",
|
||||
|
||||
# -- tracked upstream / open PRs
|
||||
# https://github.com/kbr/fritzconnection/pull/244 - v1.15.0 - 2025-05-17
|
||||
"ignore:.*invalid escape sequence:SyntaxWarning:.*fritzconnection.core.soaper",
|
||||
# https://github.com/hacf-fr/meteofrance-api/pull/688 - v1.4.0 - 2025-03-26
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteofrance_api.model.forecast",
|
||||
|
||||
|
||||
16
requirements_all.txt
generated
16
requirements_all.txt
generated
@@ -1015,7 +1015,7 @@ fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
fritzconnection[qr]==1.15.0
|
||||
fritzconnection[qr]==1.15.1
|
||||
|
||||
# homeassistant.components.fyta
|
||||
fyta_cli==0.7.2
|
||||
@@ -1524,7 +1524,7 @@ motionblindsble==0.1.3
|
||||
motioneye-client==0.3.14
|
||||
|
||||
# homeassistant.components.bang_olufsen
|
||||
mozart-api==5.3.1.108.0
|
||||
mozart-api==5.3.1.108.2
|
||||
|
||||
# homeassistant.components.mullvad
|
||||
mullvad-api==1.0.0
|
||||
@@ -1584,7 +1584,7 @@ nextdns==5.0.0
|
||||
nhc==0.7.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.21.0
|
||||
nibe==2.22.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -1989,7 +1989,7 @@ pydeconz==120
|
||||
pydelijn==1.1.0
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
pydexcom==0.5.1
|
||||
|
||||
# homeassistant.components.discovergy
|
||||
pydiscovergy==3.0.2
|
||||
@@ -2187,6 +2187,9 @@ pylgnetcast==0.3.9
|
||||
# homeassistant.components.forked_daapd
|
||||
pylibrespot-java==0.1.1
|
||||
|
||||
# homeassistant.components.liebherr
|
||||
pyliebherrhomeapi==0.2.1
|
||||
|
||||
# homeassistant.components.litejet
|
||||
pylitejet==0.6.3
|
||||
|
||||
@@ -2748,7 +2751,7 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.2
|
||||
renault-api==0.5.3
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
@@ -3143,6 +3146,9 @@ velbus-aio==2026.1.4
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.21
|
||||
|
||||
# homeassistant.components.viaggiatreno
|
||||
viaggiatreno_ha==0.2.4
|
||||
|
||||
# homeassistant.components.victron_ble
|
||||
victron-ble-ha-parser==0.4.9
|
||||
|
||||
|
||||
13
requirements_test_all.txt
generated
13
requirements_test_all.txt
generated
@@ -894,7 +894,7 @@ fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
fritzconnection[qr]==1.15.0
|
||||
fritzconnection[qr]==1.15.1
|
||||
|
||||
# homeassistant.components.fyta
|
||||
fyta_cli==0.7.2
|
||||
@@ -1331,7 +1331,7 @@ motionblindsble==0.1.3
|
||||
motioneye-client==0.3.14
|
||||
|
||||
# homeassistant.components.bang_olufsen
|
||||
mozart-api==5.3.1.108.0
|
||||
mozart-api==5.3.1.108.2
|
||||
|
||||
# homeassistant.components.mullvad
|
||||
mullvad-api==1.0.0
|
||||
@@ -1379,7 +1379,7 @@ nextdns==5.0.0
|
||||
nhc==0.7.0
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.21.0
|
||||
nibe==2.22.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -1696,7 +1696,7 @@ pydeako==0.6.0
|
||||
pydeconz==120
|
||||
|
||||
# homeassistant.components.dexcom
|
||||
pydexcom==0.2.3
|
||||
pydexcom==0.5.1
|
||||
|
||||
# homeassistant.components.discovergy
|
||||
pydiscovergy==3.0.2
|
||||
@@ -1855,6 +1855,9 @@ pylgnetcast==0.3.9
|
||||
# homeassistant.components.forked_daapd
|
||||
pylibrespot-java==0.1.1
|
||||
|
||||
# homeassistant.components.liebherr
|
||||
pyliebherrhomeapi==0.2.1
|
||||
|
||||
# homeassistant.components.litejet
|
||||
pylitejet==0.6.3
|
||||
|
||||
@@ -2314,7 +2317,7 @@ refoss-ha==1.2.5
|
||||
regenmaschine==2024.03.0
|
||||
|
||||
# homeassistant.components.renault
|
||||
renault-api==0.5.2
|
||||
renault-api==0.5.3
|
||||
|
||||
# homeassistant.components.renson
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
@@ -707,7 +707,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"openai_conversation",
|
||||
"openalpr_cloud",
|
||||
"openerz",
|
||||
"openevse",
|
||||
"openexchangerates",
|
||||
"opengarage",
|
||||
"openhardwaremonitor",
|
||||
@@ -951,7 +950,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"template",
|
||||
"tesla_fleet",
|
||||
"tesla_wall_connector",
|
||||
"teslemetry",
|
||||
"tessie",
|
||||
"tfiac",
|
||||
"thermobeacon",
|
||||
@@ -1715,7 +1713,6 @@ INTEGRATIONS_WITHOUT_SCALE = [
|
||||
"openai_conversation",
|
||||
"openalpr_cloud",
|
||||
"openerz",
|
||||
"openevse",
|
||||
"openexchangerates",
|
||||
"opengarage",
|
||||
"openhardwaremonitor",
|
||||
|
||||
@@ -125,7 +125,7 @@ async def test_set_color_temp(hass: HomeAssistant) -> None:
|
||||
await hass.services.async_call(
|
||||
LIGHT_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: DEVICE_ID, "color_temp": 309},
|
||||
{ATTR_ENTITY_ID: DEVICE_ID, "color_temp_kelvin": 3236},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -12,13 +12,13 @@ from mozart_api.models import (
|
||||
ListeningMode,
|
||||
ListeningModeFeatures,
|
||||
ListeningModeRef,
|
||||
ListeningModeTrigger,
|
||||
PairedRemote,
|
||||
PairedRemoteResponse,
|
||||
PlaybackContentMetadata,
|
||||
PlaybackProgress,
|
||||
PlaybackState,
|
||||
PlayQueueSettings,
|
||||
PowerLinkTrigger,
|
||||
ProductState,
|
||||
RemoteMenuItem,
|
||||
RenderingState,
|
||||
@@ -372,19 +372,19 @@ def mock_mozart_client() -> Generator[AsyncMock]:
|
||||
id=TEST_SOUND_MODE,
|
||||
name=TEST_SOUND_MODE_NAME,
|
||||
features=ListeningModeFeatures(),
|
||||
triggers=[PowerLinkTrigger()],
|
||||
triggers=[ListeningModeTrigger()],
|
||||
),
|
||||
ListeningMode(
|
||||
id=TEST_SOUND_MODE_2,
|
||||
name=TEST_SOUND_MODE_NAME,
|
||||
features=ListeningModeFeatures(),
|
||||
triggers=[PowerLinkTrigger()],
|
||||
triggers=[ListeningModeTrigger()],
|
||||
),
|
||||
ListeningMode(
|
||||
id=345,
|
||||
name=f"{TEST_SOUND_MODE_NAME} 2",
|
||||
features=ListeningModeFeatures(),
|
||||
triggers=[PowerLinkTrigger()],
|
||||
triggers=[ListeningModeTrigger()],
|
||||
),
|
||||
]
|
||||
client.get_active_listening_mode = AsyncMock()
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Constants used for testing the bang_olufsen integration."""
|
||||
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
from unittest.mock import Mock
|
||||
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.models import (
|
||||
@@ -246,11 +245,7 @@ TEST_DEEZER_TRACK = PlayQueueItem(
|
||||
TEST_DEEZER_INVALID_FLOW = ApiException(
|
||||
status=400,
|
||||
reason="Bad Request",
|
||||
http_resp=Mock(
|
||||
status=400,
|
||||
reason="Bad Request",
|
||||
data='{"message": "Couldn\'t start user flow for me"}', # codespell:ignore
|
||||
),
|
||||
body='{"message": "Couldn\'t start user flow for me"}', # codespell:ignore
|
||||
)
|
||||
TEST_SOUND_MODE = 123
|
||||
TEST_SOUND_MODE_2 = 234
|
||||
|
||||
@@ -14,14 +14,6 @@ MOCK_OPTIONS = {
|
||||
|
||||
MOCK_CONFIG = {**MOCK_DATA, **MOCK_OPTIONS}
|
||||
|
||||
MOCK_YAML_INVALID = {
|
||||
"host": "127.0.0.1",
|
||||
"port": 65535,
|
||||
"prefix": "failtest",
|
||||
"rate": 1,
|
||||
}
|
||||
|
||||
|
||||
CONNECTION_TEST_METRIC = "connection_test"
|
||||
|
||||
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from homeassistant.components import datadog
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.components.datadog.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
import homeassistant.helpers.issue_registry as ir
|
||||
|
||||
from .common import MOCK_CONFIG, MOCK_DATA, MOCK_OPTIONS, MOCK_YAML_INVALID
|
||||
from .common import MOCK_CONFIG, MOCK_DATA, MOCK_OPTIONS
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@@ -22,7 +21,7 @@ async def test_user_flow_success(hass: HomeAssistant) -> None:
|
||||
mock_dogstatsd.return_value = mock_instance
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
datadog.DOMAIN, context={"source": SOURCE_USER}
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
@@ -42,7 +41,7 @@ async def test_user_flow_retry_after_connection_fail(hass: HomeAssistant) -> Non
|
||||
side_effect=OSError("Connection failed"),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
datadog.DOMAIN, context={"source": SOURCE_USER}
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
@@ -67,14 +66,14 @@ async def test_user_flow_abort_already_configured_service(
|
||||
) -> None:
|
||||
"""Abort user-initiated config flow if the same host/port is already configured."""
|
||||
existing_entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data=MOCK_DATA,
|
||||
options=MOCK_OPTIONS,
|
||||
)
|
||||
existing_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
datadog.DOMAIN,
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
|
||||
@@ -93,7 +92,7 @@ async def test_user_flow_abort_already_configured_service(
|
||||
async def test_options_flow_cannot_connect(hass: HomeAssistant) -> None:
|
||||
"""Test that the options flow shows an error when connection fails."""
|
||||
mock_entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data=MOCK_DATA,
|
||||
options=MOCK_OPTIONS,
|
||||
)
|
||||
@@ -123,67 +122,10 @@ async def test_options_flow_cannot_connect(hass: HomeAssistant) -> None:
|
||||
assert result3["data"] == MOCK_OPTIONS
|
||||
|
||||
|
||||
async def test_import_flow(
|
||||
hass: HomeAssistant, issue_registry: ir.IssueRegistry
|
||||
) -> None:
|
||||
"""Test import triggers config flow and is accepted."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.datadog.config_flow.DogStatsd"
|
||||
) as mock_dogstatsd,
|
||||
):
|
||||
mock_instance = MagicMock()
|
||||
mock_dogstatsd.return_value = mock_instance
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
datadog.DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=MOCK_CONFIG,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["data"] == MOCK_DATA
|
||||
assert result["options"] == MOCK_OPTIONS
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Deprecation issue should be created
|
||||
issue = issue_registry.async_get_issue(
|
||||
HOMEASSISTANT_DOMAIN, "deprecated_yaml_datadog"
|
||||
)
|
||||
assert issue is not None
|
||||
assert issue.translation_key == "deprecated_yaml"
|
||||
assert issue.severity == ir.IssueSeverity.WARNING
|
||||
|
||||
|
||||
async def test_import_connection_error(
|
||||
hass: HomeAssistant, issue_registry: ir.IssueRegistry
|
||||
) -> None:
|
||||
"""Test import triggers connection error issue."""
|
||||
with patch(
|
||||
"homeassistant.components.datadog.config_flow.DogStatsd",
|
||||
side_effect=OSError("connection refused"),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
datadog.DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=MOCK_YAML_INVALID,
|
||||
)
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "cannot_connect"
|
||||
|
||||
issue = issue_registry.async_get_issue(
|
||||
datadog.DOMAIN, "deprecated_yaml_import_connection_error"
|
||||
)
|
||||
assert issue is not None
|
||||
assert issue.translation_key == "deprecated_yaml_import_connection_error"
|
||||
assert issue.severity == ir.IssueSeverity.WARNING
|
||||
|
||||
|
||||
async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
"""Test updating options after setup."""
|
||||
mock_entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data=MOCK_DATA,
|
||||
options=MOCK_OPTIONS,
|
||||
)
|
||||
@@ -234,24 +176,3 @@ async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["data"] == new_options
|
||||
mock_instance.increment.assert_called_once_with("connection_test")
|
||||
|
||||
|
||||
async def test_import_flow_abort_already_configured_service(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Abort import if the same host/port is already configured."""
|
||||
existing_entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
data=MOCK_DATA,
|
||||
options=MOCK_OPTIONS,
|
||||
)
|
||||
existing_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
datadog.DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=MOCK_CONFIG,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
@@ -3,8 +3,13 @@
|
||||
from unittest import mock
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components import datadog
|
||||
from homeassistant.components.datadog import async_setup_entry
|
||||
from homeassistant.components.datadog.const import (
|
||||
DEFAULT_HOST,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_PREFIX,
|
||||
DEFAULT_RATE,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import EVENT_LOGBOOK_ENTRY, STATE_OFF, STATE_ON, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -17,7 +22,7 @@ from tests.common import EVENT_STATE_CHANGED, MockConfigEntry
|
||||
async def test_invalid_config(hass: HomeAssistant) -> None:
|
||||
"""Test invalid configuration."""
|
||||
entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data={"host1": "host1"},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
@@ -30,7 +35,7 @@ async def test_datadog_setup_full(hass: HomeAssistant) -> None:
|
||||
patch("homeassistant.components.datadog.DogStatsd") as mock_dogstatsd,
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"host": "host",
|
||||
"port": 123,
|
||||
@@ -56,7 +61,7 @@ async def test_datadog_setup_defaults(hass: HomeAssistant) -> None:
|
||||
patch("homeassistant.components.datadog.DogStatsd") as mock_dogstatsd,
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data=MOCK_DATA,
|
||||
options=MOCK_OPTIONS,
|
||||
)
|
||||
@@ -79,14 +84,14 @@ async def test_logbook_entry(hass: HomeAssistant) -> None:
|
||||
):
|
||||
mock_statsd = mock_statsd_class.return_value
|
||||
entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"host": datadog.DEFAULT_HOST,
|
||||
"port": datadog.DEFAULT_PORT,
|
||||
"host": DEFAULT_HOST,
|
||||
"port": DEFAULT_PORT,
|
||||
},
|
||||
options={
|
||||
"rate": datadog.DEFAULT_RATE,
|
||||
"prefix": datadog.DEFAULT_PREFIX,
|
||||
"rate": DEFAULT_RATE,
|
||||
"prefix": DEFAULT_PREFIX,
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
@@ -119,12 +124,12 @@ async def test_state_changed(hass: HomeAssistant) -> None:
|
||||
):
|
||||
mock_statsd = mock_statsd_class.return_value
|
||||
entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"host": "host",
|
||||
"port": datadog.DEFAULT_PORT,
|
||||
"port": DEFAULT_PORT,
|
||||
},
|
||||
options={"prefix": "ha", "rate": datadog.DEFAULT_RATE},
|
||||
options={"prefix": "ha", "rate": DEFAULT_RATE},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
@@ -176,7 +181,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None:
|
||||
patch("homeassistant.components.datadog.initialize"),
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data=MOCK_DATA,
|
||||
options=MOCK_OPTIONS,
|
||||
)
|
||||
@@ -203,13 +208,13 @@ async def test_state_changed_skips_unknown(hass: HomeAssistant) -> None:
|
||||
) as mock_dogstatsd,
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=datadog.DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data=MOCK_DATA,
|
||||
options=MOCK_OPTIONS,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await async_setup_entry(hass, entry)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
|
||||
# Test None state
|
||||
hass.bus.async_fire(EVENT_STATE_CHANGED, {"new_state": None})
|
||||
|
||||
@@ -84,93 +84,6 @@ async def test_setup_and_remove_config_entry(
|
||||
assert entity_registry.async_get(derivative_entity_id) is None
|
||||
|
||||
|
||||
async def test_device_cleaning(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test for source entity device for Derivative."""
|
||||
|
||||
# Source entity device config entry
|
||||
source_config_entry = MockConfigEntry()
|
||||
source_config_entry.add_to_hass(hass)
|
||||
|
||||
# Device entry of the source entity
|
||||
source_device1_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=source_config_entry.entry_id,
|
||||
identifiers={("sensor", "identifier_test1")},
|
||||
connections={("mac", "30:31:32:33:34:01")},
|
||||
)
|
||||
|
||||
# Source entity registry
|
||||
source_entity = entity_registry.async_get_or_create(
|
||||
"sensor",
|
||||
"test",
|
||||
"source",
|
||||
config_entry=source_config_entry,
|
||||
device_id=source_device1_entry.id,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert entity_registry.async_get("sensor.test_source") is not None
|
||||
|
||||
# Configure the configuration entry for Derivative
|
||||
derivative_config_entry = MockConfigEntry(
|
||||
data={},
|
||||
domain=DOMAIN,
|
||||
options={
|
||||
"name": "Derivative",
|
||||
"round": 1.0,
|
||||
"source": "sensor.test_source",
|
||||
"time_window": {"seconds": 0.0},
|
||||
"unit_prefix": "k",
|
||||
"unit_time": "min",
|
||||
},
|
||||
title="Derivative",
|
||||
)
|
||||
derivative_config_entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(derivative_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Confirm the link between the source entity device and the derivative sensor
|
||||
derivative_entity = entity_registry.async_get("sensor.derivative")
|
||||
assert derivative_entity is not None
|
||||
assert derivative_entity.device_id == source_entity.device_id
|
||||
|
||||
# Device entry incorrectly linked to Derivative config entry
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=derivative_config_entry.entry_id,
|
||||
identifiers={("sensor", "identifier_test2")},
|
||||
connections={("mac", "30:31:32:33:34:02")},
|
||||
)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=derivative_config_entry.entry_id,
|
||||
identifiers={("sensor", "identifier_test3")},
|
||||
connections={("mac", "30:31:32:33:34:03")},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Before reloading the config entry, two devices are expected to be linked
|
||||
devices_before_reload = device_registry.devices.get_devices_for_config_entry_id(
|
||||
derivative_config_entry.entry_id
|
||||
)
|
||||
assert len(devices_before_reload) == 2
|
||||
|
||||
# Config entry reload
|
||||
await hass.config_entries.async_reload(derivative_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Confirm the link between the source entity device and the derivative sensor after reload
|
||||
derivative_entity = entity_registry.async_get("sensor.derivative")
|
||||
assert derivative_entity is not None
|
||||
assert derivative_entity.device_id == source_entity.device_id
|
||||
|
||||
# After reloading the config entry, only one linked device is expected
|
||||
devices_after_reload = device_registry.devices.get_devices_for_config_entry_id(
|
||||
derivative_config_entry.entry_id
|
||||
)
|
||||
assert len(devices_after_reload) == 0
|
||||
|
||||
|
||||
async def test_async_handle_source_entity_changes_source_entity_removed(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
|
||||
@@ -19,6 +19,8 @@ CONFIG = {
|
||||
}
|
||||
|
||||
GLUCOSE_READING = GlucoseReading(json.loads(load_fixture("data.json", "dexcom")))
|
||||
TEST_ACCOUNT_ID = "99999999-9999-9999-9999-999999999999"
|
||||
TEST_SESSION_ID = "55555555-5555-5555-5555-555555555555"
|
||||
|
||||
|
||||
async def init_integration(
|
||||
@@ -38,8 +40,12 @@ async def init_integration(
|
||||
return_value=GLUCOSE_READING,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.dexcom.Dexcom.create_session",
|
||||
return_value="test_session_id",
|
||||
"homeassistant.components.dexcom.Dexcom._get_account_id",
|
||||
return_value=TEST_ACCOUNT_ID,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.dexcom.Dexcom._get_session_id",
|
||||
return_value=TEST_SESSION_ID,
|
||||
),
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"DT": "/Date(1587165223000+0000)/",
|
||||
"ST": "/Date(1587179623000)/",
|
||||
"Trend": 4,
|
||||
"Value": 110,
|
||||
"WT": "/Date(1587179623000)/"
|
||||
"WT": "Date(1745081913085)",
|
||||
"ST": "Date(1745081913085)",
|
||||
"DT": "Date(1745081913085-0400)",
|
||||
"Value": 100,
|
||||
"Trend": "Flat"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from pydexcom import AccountError, SessionError
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.dexcom.const import DOMAIN
|
||||
@@ -23,10 +23,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
assert result["errors"] == {}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.dexcom.config_flow.Dexcom.create_session",
|
||||
return_value="test_session_id",
|
||||
),
|
||||
patch("homeassistant.components.dexcom.config_flow.Dexcom"),
|
||||
patch(
|
||||
"homeassistant.components.dexcom.async_setup_entry",
|
||||
return_value=True,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from pydexcom import AccountError, SessionError
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
|
||||
from homeassistant.components.dexcom.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user