mirror of
https://github.com/home-assistant/core.git
synced 2026-02-05 14:55:35 +01:00
Compare commits
1 Commits
2026.2.0b4
...
local_only
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8d86ee454 |
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "heatit",
|
||||
"name": "Heatit",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "heiman",
|
||||
"name": "Heiman",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
}
|
||||
@@ -166,7 +166,7 @@
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms an alarm in the away mode.",
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -176,7 +176,7 @@
|
||||
"name": "Arm away"
|
||||
},
|
||||
"alarm_arm_custom_bypass": {
|
||||
"description": "Arms an alarm while allowing to bypass a custom area.",
|
||||
"description": "Arms the alarm while allowing to bypass a custom area.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to arm the alarm.",
|
||||
@@ -186,7 +186,7 @@
|
||||
"name": "Arm with custom bypass"
|
||||
},
|
||||
"alarm_arm_home": {
|
||||
"description": "Arms an alarm in the home mode.",
|
||||
"description": "Arms the alarm in the home mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -196,7 +196,7 @@
|
||||
"name": "Arm home"
|
||||
},
|
||||
"alarm_arm_night": {
|
||||
"description": "Arms an alarm in the night mode.",
|
||||
"description": "Arms the alarm in the night mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -206,7 +206,7 @@
|
||||
"name": "Arm night"
|
||||
},
|
||||
"alarm_arm_vacation": {
|
||||
"description": "Arms an alarm in the vacation mode.",
|
||||
"description": "Arms the alarm in the vacation mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
@@ -216,7 +216,7 @@
|
||||
"name": "Arm vacation"
|
||||
},
|
||||
"alarm_disarm": {
|
||||
"description": "Disarms an alarm.",
|
||||
"description": "Disarms the alarm.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "Code to disarm the alarm.",
|
||||
@@ -226,7 +226,7 @@
|
||||
"name": "Disarm"
|
||||
},
|
||||
"alarm_trigger": {
|
||||
"description": "Triggers an alarm manually.",
|
||||
"description": "Triggers the alarm manually.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"description": "[%key:component::alarm_control_panel::services::alarm_arm_custom_bypass::fields::code::description%]",
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==11.1.1"]
|
||||
"requirements": ["aioamazondevices==11.0.2"]
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ from homeassistant.helpers.typing import StateType
|
||||
from .const import CATEGORY_NOTIFICATIONS, CATEGORY_SENSORS
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_remove_unsupported_notification_sensors
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -106,9 +105,6 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Remove notification sensors from unsupported devices
|
||||
await async_remove_unsupported_notification_sensors(hass, coordinator)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -126,7 +122,6 @@ async def async_setup_entry(
|
||||
AmazonSensorEntity(coordinator, serial_num, notification_desc)
|
||||
for notification_desc in NOTIFICATIONS
|
||||
for serial_num in new_devices
|
||||
if coordinator.data[serial_num].notifications_supported
|
||||
]
|
||||
async_add_entities(sensors_list + notifications_list)
|
||||
|
||||
|
||||
@@ -5,14 +5,8 @@ from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const.schedules import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -87,27 +81,3 @@ async def async_remove_dnd_from_virtual_group(
|
||||
if entity_id and is_group:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||
|
||||
|
||||
async def async_remove_unsupported_notification_sensors(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
) -> None:
|
||||
"""Remove notification sensors from unsupported devices."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
for notification_key in (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
):
|
||||
unique_id = f"{serial_num}-{notification_key}"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
domain=SENSOR_DOMAIN, platform=DOMAIN, unique_id=unique_id
|
||||
)
|
||||
is_unsupported = not coordinator.data[serial_num].notifications_supported
|
||||
|
||||
if entity_id and is_unsupported:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed unsupported notification sensor %s", entity_id)
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"feedback_url": "https://forms.gle/GqvRmgmghSDco8M46",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2026/02/02/about-device-database/",
|
||||
"report_issue_url": "https://github.com/OHF-Device-Database/device-database/issues/new"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"snapshots": {
|
||||
"description": "We're creating the [Open Home Foundation Device Database](https://www.home-assistant.io/blog/2026/02/02/about-device-database/): a free, open source community-powered resource to help users find practical information about how smart home devices perform in real installations.\n\nYou can help us build it by opting in to share anonymized data about your devices. This data will only ever include device-specific details (like model or manufacturer) – never personally identifying information (like the names you assign).\n\nFind out how we process your data (should you choose to contribute) in our [Data Use Statement](https://www.openhomefoundation.org/device-database-data-use-statement).",
|
||||
"description": "This free, open source device database of the Open Home Foundation helps users find useful information about smart home devices used in real installations.\n\nYou can help build it by anonymously sharing data about your devices. Only device-specific details (like model or manufacturer) are shared — never personally identifying information (like the names you assign).\n\nLearn more about the device database and how we process your data in our [Data Use Statement](https://www.openhomefoundation.org/device-database-data-use-statement), which you accept by opting in.",
|
||||
"disable_confirmation": "Your data will no longer be shared with the Open Home Foundation's device database.",
|
||||
"enable_confirmation": "This feature is still in development and may change. The device database is being refined based on user feedback and is not yet complete.",
|
||||
"name": "Device database"
|
||||
|
||||
@@ -13,10 +13,9 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_TRACKED_APPS, CONF_TRACKED_INTEGRATIONS
|
||||
from .const import CONF_TRACKED_INTEGRATIONS
|
||||
from .coordinator import HomeassistantAnalyticsDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
@@ -60,30 +59,6 @@ async def async_setup_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, entry: AnalyticsInsightsConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate to a new version."""
|
||||
# Migration for switching add-ons to apps
|
||||
if entry.version < 2:
|
||||
ent_reg = er.async_get(hass)
|
||||
for entity_entry in er.async_entries_for_config_entry(ent_reg, entry.entry_id):
|
||||
if not entity_entry.unique_id.startswith("addon_"):
|
||||
continue
|
||||
|
||||
ent_reg.async_update_entity(
|
||||
entity_entry.entity_id,
|
||||
new_unique_id=entity_entry.unique_id.replace("addon_", "app_"),
|
||||
)
|
||||
|
||||
options = dict(entry.options)
|
||||
options[CONF_TRACKED_APPS] = options.pop("tracked_addons", [])
|
||||
|
||||
hass.config_entries.async_update_entry(entry, version=2, options=options)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AnalyticsInsightsConfigEntry
|
||||
) -> bool:
|
||||
|
||||
@@ -26,7 +26,7 @@ from homeassistant.helpers.selector import (
|
||||
|
||||
from . import AnalyticsInsightsConfigEntry
|
||||
from .const import (
|
||||
CONF_TRACKED_APPS,
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
CONF_TRACKED_INTEGRATIONS,
|
||||
DOMAIN,
|
||||
@@ -43,8 +43,6 @@ INTEGRATION_TYPES_WITHOUT_ANALYTICS = (
|
||||
class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Homeassistant Analytics."""
|
||||
|
||||
VERSION = 2
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
@@ -61,7 +59,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
if all(
|
||||
[
|
||||
not user_input.get(CONF_TRACKED_APPS),
|
||||
not user_input.get(CONF_TRACKED_ADDONS),
|
||||
not user_input.get(CONF_TRACKED_INTEGRATIONS),
|
||||
not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS),
|
||||
]
|
||||
@@ -72,7 +70,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title="Home Assistant Analytics Insights",
|
||||
data={},
|
||||
options={
|
||||
CONF_TRACKED_APPS: user_input.get(CONF_TRACKED_APPS, []),
|
||||
CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []),
|
||||
CONF_TRACKED_INTEGRATIONS: user_input.get(
|
||||
CONF_TRACKED_INTEGRATIONS, []
|
||||
),
|
||||
@@ -86,7 +84,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
apps = await client.get_addons()
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
@@ -109,9 +107,9 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TRACKED_APPS): SelectSelector(
|
||||
vol.Optional(CONF_TRACKED_ADDONS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(apps),
|
||||
options=list(addons),
|
||||
multiple=True,
|
||||
sort=True,
|
||||
)
|
||||
@@ -146,7 +144,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
if user_input is not None:
|
||||
if all(
|
||||
[
|
||||
not user_input.get(CONF_TRACKED_APPS),
|
||||
not user_input.get(CONF_TRACKED_ADDONS),
|
||||
not user_input.get(CONF_TRACKED_INTEGRATIONS),
|
||||
not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS),
|
||||
]
|
||||
@@ -156,7 +154,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data={
|
||||
CONF_TRACKED_APPS: user_input.get(CONF_TRACKED_APPS, []),
|
||||
CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []),
|
||||
CONF_TRACKED_INTEGRATIONS: user_input.get(
|
||||
CONF_TRACKED_INTEGRATIONS, []
|
||||
),
|
||||
@@ -170,7 +168,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
apps = await client.get_addons()
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations(Environment.NEXT)
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
@@ -191,9 +189,9 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TRACKED_APPS): SelectSelector(
|
||||
vol.Optional(CONF_TRACKED_ADDONS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(apps),
|
||||
options=list(addons),
|
||||
multiple=True,
|
||||
sort=True,
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@ import logging
|
||||
|
||||
DOMAIN = "analytics_insights"
|
||||
|
||||
CONF_TRACKED_APPS = "tracked_apps"
|
||||
CONF_TRACKED_ADDONS = "tracked_addons"
|
||||
CONF_TRACKED_INTEGRATIONS = "tracked_integrations"
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS = "tracked_custom_integrations"
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_TRACKED_APPS,
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
CONF_TRACKED_INTEGRATIONS,
|
||||
DOMAIN,
|
||||
@@ -35,7 +35,7 @@ class AnalyticsData:
|
||||
|
||||
active_installations: int
|
||||
reports_integrations: int
|
||||
apps: dict[str, int]
|
||||
addons: dict[str, int]
|
||||
core_integrations: dict[str, int]
|
||||
custom_integrations: dict[str, int]
|
||||
|
||||
@@ -60,7 +60,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
update_interval=timedelta(hours=12),
|
||||
)
|
||||
self._client = client
|
||||
self._tracked_apps = self.config_entry.options.get(CONF_TRACKED_APPS, [])
|
||||
self._tracked_addons = self.config_entry.options.get(CONF_TRACKED_ADDONS, [])
|
||||
self._tracked_integrations = self.config_entry.options[
|
||||
CONF_TRACKED_INTEGRATIONS
|
||||
]
|
||||
@@ -70,9 +70,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
|
||||
async def _async_update_data(self) -> AnalyticsData:
|
||||
try:
|
||||
apps_data = (
|
||||
await self._client.get_addons()
|
||||
) # Still add method name. Needs library update
|
||||
addons_data = await self._client.get_addons()
|
||||
data = await self._client.get_current_analytics()
|
||||
custom_data = await self._client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError as err:
|
||||
@@ -81,7 +79,9 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
) from err
|
||||
except HomeassistantAnalyticsNotModifiedError:
|
||||
return self.data
|
||||
apps = {app: get_app_value(apps_data, app) for app in self._tracked_apps}
|
||||
addons = {
|
||||
addon: get_addon_value(addons_data, addon) for addon in self._tracked_addons
|
||||
}
|
||||
core_integrations = {
|
||||
integration: data.integrations.get(integration, 0)
|
||||
for integration in self._tracked_integrations
|
||||
@@ -93,14 +93,14 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
return AnalyticsData(
|
||||
data.active_installations,
|
||||
data.reports_integrations,
|
||||
apps,
|
||||
addons,
|
||||
core_integrations,
|
||||
custom_integrations,
|
||||
)
|
||||
|
||||
|
||||
def get_app_value(data: dict[str, Addon], name_slug: str) -> int:
|
||||
"""Get app value."""
|
||||
def get_addon_value(data: dict[str, Addon], name_slug: str) -> int:
|
||||
"""Get addon value."""
|
||||
if name_slug in data:
|
||||
return data[name_slug].total
|
||||
return 0
|
||||
|
||||
@@ -29,17 +29,17 @@ class AnalyticsSensorEntityDescription(SensorEntityDescription):
|
||||
value_fn: Callable[[AnalyticsData], StateType]
|
||||
|
||||
|
||||
def get_app_entity_description(
|
||||
def get_addon_entity_description(
|
||||
name_slug: str,
|
||||
) -> AnalyticsSensorEntityDescription:
|
||||
"""Get app entity description."""
|
||||
"""Get addon entity description."""
|
||||
return AnalyticsSensorEntityDescription(
|
||||
key=f"app_{name_slug}_active_installations",
|
||||
translation_key="apps",
|
||||
key=f"addon_{name_slug}_active_installations",
|
||||
translation_key="addons",
|
||||
name=name_slug,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement="active installations",
|
||||
value_fn=lambda data: data.apps.get(name_slug),
|
||||
value_fn=lambda data: data.addons.get(name_slug),
|
||||
)
|
||||
|
||||
|
||||
@@ -106,9 +106,9 @@ async def async_setup_entry(
|
||||
entities.extend(
|
||||
HomeassistantAnalyticsSensor(
|
||||
coordinator,
|
||||
get_app_entity_description(app_name_slug),
|
||||
get_addon_entity_description(addon_name_slug),
|
||||
)
|
||||
for app_name_slug in coordinator.data.apps
|
||||
for addon_name_slug in coordinator.data.addons
|
||||
)
|
||||
entities.extend(
|
||||
HomeassistantAnalyticsSensor(
|
||||
|
||||
@@ -10,12 +10,12 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_apps": "Apps",
|
||||
"tracked_addons": "Add-ons",
|
||||
"tracked_custom_integrations": "Custom integrations",
|
||||
"tracked_integrations": "Integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_apps": "Select the apps you want to track",
|
||||
"tracked_addons": "Select the add-ons you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track"
|
||||
}
|
||||
@@ -45,12 +45,12 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"tracked_apps": "[%key:component::analytics_insights::config::step::user::data::tracked_apps%]",
|
||||
"tracked_addons": "[%key:component::analytics_insights::config::step::user::data::tracked_addons%]",
|
||||
"tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_custom_integrations%]",
|
||||
"tracked_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_integrations%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_apps": "[%key:component::analytics_insights::config::step::user::data_description::tracked_apps%]",
|
||||
"tracked_addons": "[%key:component::analytics_insights::config::step::user::data_description::tracked_addons%]",
|
||||
"tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_custom_integrations%]",
|
||||
"tracked_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_integrations%]"
|
||||
}
|
||||
|
||||
@@ -14,18 +14,10 @@ from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DATA_REPAIR_DEFER_RELOAD,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DEPRECATED_MODELS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
from .const import DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -35,7 +27,6 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Anthropic."""
|
||||
hass.data.setdefault(DOMAIN, {}).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
await async_migrate_integration(hass)
|
||||
return True
|
||||
|
||||
@@ -59,22 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
for subentry in entry.subentries.values():
|
||||
if (model := subentry.data.get(CONF_CHAT_MODEL)) and model.startswith(
|
||||
tuple(DEPRECATED_MODELS)
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"model_deprecated",
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
learn_more_url="https://platform.claude.com/docs/en/about-claude/model-deprecations",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="model_deprecated",
|
||||
)
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -87,11 +62,6 @@ async def async_update_options(
|
||||
hass: HomeAssistant, entry: AnthropicConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
defer_reload_entries: set[str] = hass.data.setdefault(DOMAIN, {}).setdefault(
|
||||
DATA_REPAIR_DEFER_RELOAD, set()
|
||||
)
|
||||
if entry.entry_id in defer_reload_entries:
|
||||
return
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
|
||||
@@ -92,40 +92,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
async def get_model_list(client: anthropic.AsyncAnthropic) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
try:
|
||||
models = (await client.models.list()).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
_LOGGER.debug("Available models: %s", models)
|
||||
model_options: list[SelectOptionDict] = []
|
||||
short_form = re.compile(r"[^\d]-\d$")
|
||||
for model_info in models:
|
||||
# Resolve alias from versioned model name:
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id
|
||||
not in (
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-opus-20240229",
|
||||
)
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias,
|
||||
)
|
||||
)
|
||||
return model_options
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
@@ -435,13 +401,38 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
async def _get_model_list(self) -> list[SelectOptionDict]:
|
||||
"""Get list of available models."""
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
try:
|
||||
client = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
anthropic.AsyncAnthropic,
|
||||
api_key=self._get_entry().data[CONF_API_KEY],
|
||||
)
|
||||
)
|
||||
)
|
||||
return await get_model_list(client)
|
||||
models = (await client.models.list()).data
|
||||
except anthropic.AnthropicError:
|
||||
models = []
|
||||
_LOGGER.debug("Available models: %s", models)
|
||||
model_options: list[SelectOptionDict] = []
|
||||
short_form = re.compile(r"[^\d]-\d$")
|
||||
for model_info in models:
|
||||
# Resolve alias from versioned model name:
|
||||
model_alias = (
|
||||
model_info.id[:-9]
|
||||
if model_info.id
|
||||
not in ("claude-3-haiku-20240307", "claude-3-opus-20240229")
|
||||
else model_info.id
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
value=model_alias,
|
||||
)
|
||||
)
|
||||
return model_options
|
||||
|
||||
async def _get_location_data(self) -> dict[str, str]:
|
||||
"""Get approximate location data of the user."""
|
||||
|
||||
@@ -22,10 +22,8 @@ CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
DATA_REPAIR_DEFER_RELOAD = "repair_defer_reload"
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-haiku-4-5",
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
@@ -48,10 +46,3 @@ WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
]
|
||||
|
||||
DEPRECATED_MODELS = [
|
||||
"claude-3-5-haiku",
|
||||
"claude-3-7-sonnet",
|
||||
"claude-3-5-sonnet",
|
||||
"claude-3-opus",
|
||||
]
|
||||
|
||||
@@ -1,275 +0,0 @@
|
||||
"""Issue repair flow for Anthropic."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterator
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
|
||||
from .config_flow import get_model_list
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DATA_REPAIR_DEFER_RELOAD,
|
||||
DEFAULT,
|
||||
DEPRECATED_MODELS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
_subentry_iter: Iterator[tuple[str, str]] | None
|
||||
_current_entry_id: str | None
|
||||
_current_subentry_id: str | None
|
||||
_reload_pending: set[str]
|
||||
_pending_updates: dict[str, dict[str, str]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the flow."""
|
||||
super().__init__()
|
||||
self._subentry_iter = None
|
||||
self._current_entry_id = None
|
||||
self._current_subentry_id = None
|
||||
self._reload_pending = set()
|
||||
self._pending_updates = {}
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
previous_entry_id: str | None = None
|
||||
if user_input is not None:
|
||||
previous_entry_id = self._async_update_current_subentry(user_input)
|
||||
self._clear_current_target()
|
||||
|
||||
target = await self._async_next_target()
|
||||
next_entry_id = target[0].entry_id if target else None
|
||||
if previous_entry_id and previous_entry_id != next_entry_id:
|
||||
await self._async_apply_pending_updates(previous_entry_id)
|
||||
if target is None:
|
||||
await self._async_apply_all_pending_updates()
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
entry, subentry, model = target
|
||||
client = entry.runtime_data
|
||||
model_list = [
|
||||
model_option
|
||||
for model_option in await get_model_list(client)
|
||||
if not model_option["value"].startswith(tuple(DEPRECATED_MODELS))
|
||||
]
|
||||
|
||||
if "opus" in model:
|
||||
suggested_model = "claude-opus-4-5"
|
||||
elif "haiku" in model:
|
||||
suggested_model = "claude-haiku-4-5"
|
||||
elif "sonnet" in model:
|
||||
suggested_model = "claude-sonnet-4-5"
|
||||
else:
|
||||
suggested_model = cast(str, DEFAULT[CONF_CHAT_MODEL])
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CHAT_MODEL,
|
||||
default=suggested_model,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=model_list, custom_value=True)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
description_placeholders={
|
||||
"entry_name": entry.title,
|
||||
"model": model,
|
||||
"subentry_name": subentry.title,
|
||||
"subentry_type": self._format_subentry_type(subentry.subentry_type),
|
||||
},
|
||||
)
|
||||
|
||||
def _iter_deprecated_subentries(self) -> Iterator[tuple[str, str]]:
|
||||
"""Yield entry/subentry pairs that use deprecated models."""
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN):
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
continue
|
||||
for subentry in entry.subentries.values():
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if model and model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
yield entry.entry_id, subentry.subentry_id
|
||||
|
||||
async def _async_next_target(
|
||||
self,
|
||||
) -> tuple[ConfigEntry, ConfigSubentry, str] | None:
|
||||
"""Return the next deprecated subentry target."""
|
||||
if self._subentry_iter is None:
|
||||
self._subentry_iter = self._iter_deprecated_subentries()
|
||||
|
||||
while True:
|
||||
try:
|
||||
entry_id, subentry_id = next(self._subentry_iter)
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is None:
|
||||
continue
|
||||
|
||||
subentry = entry.subentries.get(subentry_id)
|
||||
if subentry is None:
|
||||
continue
|
||||
|
||||
model = self._pending_model(entry_id, subentry_id)
|
||||
if model is None:
|
||||
model = subentry.data.get(CONF_CHAT_MODEL)
|
||||
if not model or not model.startswith(tuple(DEPRECATED_MODELS)):
|
||||
continue
|
||||
|
||||
self._current_entry_id = entry_id
|
||||
self._current_subentry_id = subentry_id
|
||||
return entry, subentry, model
|
||||
|
||||
def _async_update_current_subentry(self, user_input: dict[str, str]) -> str | None:
|
||||
"""Update the currently selected subentry."""
|
||||
if not self._current_entry_id or not self._current_subentry_id:
|
||||
return None
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(self._current_entry_id)
|
||||
if entry is None:
|
||||
return None
|
||||
|
||||
subentry = entry.subentries.get(self._current_subentry_id)
|
||||
if subentry is None:
|
||||
return None
|
||||
|
||||
updated_data = {
|
||||
**subentry.data,
|
||||
CONF_CHAT_MODEL: user_input[CONF_CHAT_MODEL],
|
||||
}
|
||||
if updated_data == subentry.data:
|
||||
return entry.entry_id
|
||||
self._queue_pending_update(
|
||||
entry.entry_id,
|
||||
subentry.subentry_id,
|
||||
updated_data[CONF_CHAT_MODEL],
|
||||
)
|
||||
return entry.entry_id
|
||||
|
||||
def _clear_current_target(self) -> None:
|
||||
"""Clear current target tracking."""
|
||||
self._current_entry_id = None
|
||||
self._current_subentry_id = None
|
||||
|
||||
def _format_subentry_type(self, subentry_type: str) -> str:
|
||||
"""Return a user-friendly subentry type label."""
|
||||
if subentry_type == "conversation":
|
||||
return "Conversation agent"
|
||||
if subentry_type in ("ai_task", "ai_task_data"):
|
||||
return "AI task"
|
||||
return subentry_type
|
||||
|
||||
def _queue_pending_update(
|
||||
self, entry_id: str, subentry_id: str, model: str
|
||||
) -> None:
|
||||
"""Store a pending model update for a subentry."""
|
||||
self._pending_updates.setdefault(entry_id, {})[subentry_id] = model
|
||||
|
||||
def _pending_model(self, entry_id: str, subentry_id: str) -> str | None:
|
||||
"""Return a pending model update if one exists."""
|
||||
return self._pending_updates.get(entry_id, {}).get(subentry_id)
|
||||
|
||||
def _mark_entry_for_reload(self, entry_id: str) -> None:
|
||||
"""Prevent reload until repairs are complete for the entry."""
|
||||
self._reload_pending.add(entry_id)
|
||||
defer_reload_entries: set[str] = self.hass.data.setdefault(
|
||||
DOMAIN, {}
|
||||
).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
defer_reload_entries.add(entry_id)
|
||||
|
||||
async def _async_reload_entry(self, entry_id: str) -> None:
|
||||
"""Reload an entry once all repairs are completed."""
|
||||
if entry_id not in self._reload_pending:
|
||||
return
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is not None and entry.state is not ConfigEntryState.LOADED:
|
||||
self._clear_defer_reload(entry_id)
|
||||
self._reload_pending.discard(entry_id)
|
||||
return
|
||||
|
||||
if entry is not None:
|
||||
await self.hass.config_entries.async_reload(entry_id)
|
||||
|
||||
self._clear_defer_reload(entry_id)
|
||||
self._reload_pending.discard(entry_id)
|
||||
|
||||
def _clear_defer_reload(self, entry_id: str) -> None:
|
||||
"""Remove entry from the deferred reload set."""
|
||||
defer_reload_entries: set[str] = self.hass.data.setdefault(
|
||||
DOMAIN, {}
|
||||
).setdefault(DATA_REPAIR_DEFER_RELOAD, set())
|
||||
defer_reload_entries.discard(entry_id)
|
||||
|
||||
async def _async_apply_pending_updates(self, entry_id: str) -> None:
|
||||
"""Apply pending subentry updates for a single entry."""
|
||||
updates = self._pending_updates.pop(entry_id, None)
|
||||
if not updates:
|
||||
return
|
||||
|
||||
entry = self.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry is None or entry.state is not ConfigEntryState.LOADED:
|
||||
return
|
||||
|
||||
changed = False
|
||||
for subentry_id, model in updates.items():
|
||||
subentry = entry.subentries.get(subentry_id)
|
||||
if subentry is None:
|
||||
continue
|
||||
|
||||
updated_data = {
|
||||
**subentry.data,
|
||||
CONF_CHAT_MODEL: model,
|
||||
}
|
||||
if updated_data == subentry.data:
|
||||
continue
|
||||
|
||||
if not changed:
|
||||
self._mark_entry_for_reload(entry_id)
|
||||
changed = True
|
||||
|
||||
self.hass.config_entries.async_update_subentry(
|
||||
entry,
|
||||
subentry,
|
||||
data=updated_data,
|
||||
)
|
||||
|
||||
if not changed:
|
||||
return
|
||||
|
||||
await self._async_reload_entry(entry_id)
|
||||
|
||||
async def _async_apply_all_pending_updates(self) -> None:
|
||||
"""Apply all pending updates across entries."""
|
||||
for entry_id in list(self._pending_updates):
|
||||
await self._async_apply_pending_updates(entry_id)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id == "model_deprecated":
|
||||
return ModelDeprecatedRepairFlow()
|
||||
raise HomeAssistantError("Unknown issue ID")
|
||||
@@ -109,21 +109,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"model_deprecated": {
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]"
|
||||
},
|
||||
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated. Select a supported model to continue.",
|
||||
"title": "Update model"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Model deprecated"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -540,17 +540,7 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
data = self.coordinator.data[key]
|
||||
|
||||
if self.entity_description.device_class == SensorDeviceClass.TIMESTAMP:
|
||||
# The date could be "N/A" for certain fields (e.g., XOFFBATT), indicating there is no value yet.
|
||||
if data == "N/A":
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
try:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
except (dateutil.parser.ParserError, OverflowError):
|
||||
# If parsing fails we should mark it as unknown, with a log for further debugging.
|
||||
_LOGGER.warning('Failed to parse date for %s: "%s"', key, data)
|
||||
self._attr_native_value = None
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
return
|
||||
|
||||
self._attr_native_value, inferred_unit = infer_unit(data)
|
||||
|
||||
@@ -52,7 +52,7 @@ from .const import (
|
||||
from .errors import AuthenticationRequired, CannotConnect
|
||||
from .hub import AxisHub, get_axis_api
|
||||
|
||||
AXIS_OUI = {"00:40:8c", "ac:cc:8e", "b8:a4:4f", "e8:27:25"}
|
||||
AXIS_OUI = {"00:40:8c", "ac:cc:8e", "b8:a4:4f"}
|
||||
DEFAULT_PORT = 443
|
||||
DEFAULT_PROTOCOL = "https"
|
||||
PROTOCOL_CHOICES = ["https", "http"]
|
||||
|
||||
@@ -12,25 +12,14 @@ from hass_nabucasa import Cloud, NabuCasaBaseError
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMRateLimitError,
|
||||
LLMResponseCompletedEvent,
|
||||
LLMResponseError,
|
||||
LLMResponseErrorEvent,
|
||||
LLMResponseFailedEvent,
|
||||
LLMResponseFunctionCallArgumentsDeltaEvent,
|
||||
LLMResponseFunctionCallArgumentsDoneEvent,
|
||||
LLMResponseFunctionCallOutputItem,
|
||||
LLMResponseImageOutputItem,
|
||||
LLMResponseIncompleteEvent,
|
||||
LLMResponseMessageOutputItem,
|
||||
LLMResponseOutputItemAddedEvent,
|
||||
LLMResponseOutputItemDoneEvent,
|
||||
LLMResponseOutputTextDeltaEvent,
|
||||
LLMResponseReasoningOutputItem,
|
||||
LLMResponseReasoningSummaryTextDeltaEvent,
|
||||
LLMResponseWebSearchCallOutputItem,
|
||||
LLMResponseWebSearchCallSearchingEvent,
|
||||
LLMServiceError,
|
||||
)
|
||||
from litellm import (
|
||||
ResponseFunctionToolCall,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIStreamEvents,
|
||||
)
|
||||
from openai.types.responses import (
|
||||
FunctionToolParam,
|
||||
ResponseInputItemParam,
|
||||
@@ -71,9 +60,9 @@ class ResponseItemType(str, Enum):
|
||||
|
||||
def _convert_content_to_param(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> list[ResponseInputItemParam]:
|
||||
) -> ResponseInputParam:
|
||||
"""Convert any native chat message for this agent to the native format."""
|
||||
messages: list[ResponseInputItemParam] = []
|
||||
messages: ResponseInputParam = []
|
||||
reasoning_summary: list[str] = []
|
||||
web_search_calls: dict[str, dict[str, Any]] = {}
|
||||
|
||||
@@ -249,7 +238,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"""Transform stream result into HA format."""
|
||||
last_summary_index = None
|
||||
last_role: Literal["assistant", "tool_result"] | None = None
|
||||
current_tool_call: LLMResponseFunctionCallOutputItem | None = None
|
||||
current_tool_call: ResponseFunctionToolCall | None = None
|
||||
|
||||
# Non-reasoning models don't follow our request to remove citations, so we remove
|
||||
# them manually here. They always follow the same pattern: the citation is always
|
||||
@@ -259,10 +248,19 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
|
||||
|
||||
async for event in stream:
|
||||
_LOGGER.debug("Event[%s]", getattr(event, "type", None))
|
||||
event_type = getattr(event, "type", None)
|
||||
event_item = getattr(event, "item", None)
|
||||
event_item_type = getattr(event_item, "type", None) if event_item else None
|
||||
|
||||
if isinstance(event, LLMResponseOutputItemAddedEvent):
|
||||
if isinstance(event.item, LLMResponseFunctionCallOutputItem):
|
||||
_LOGGER.debug(
|
||||
"Event[%s] | item: %s",
|
||||
event_type,
|
||||
event_item_type,
|
||||
)
|
||||
|
||||
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
|
||||
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
|
||||
if event_item_type == ResponseItemType.FUNCTION_CALL:
|
||||
# OpenAI has tool calls as individual events
|
||||
# while HA puts tool calls inside the assistant message.
|
||||
# We turn them into individual assistant content for HA
|
||||
@@ -270,11 +268,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
current_tool_call = event.item
|
||||
current_tool_call = cast(ResponseFunctionToolCall, event.item)
|
||||
elif (
|
||||
isinstance(event.item, LLMResponseMessageOutputItem)
|
||||
event_item_type == ResponseItemType.MESSAGE
|
||||
or (
|
||||
isinstance(event.item, LLMResponseReasoningOutputItem)
|
||||
event_item_type == ResponseItemType.REASONING
|
||||
and last_summary_index is not None
|
||||
) # Subsequent ResponseReasoningItem
|
||||
or last_role != "assistant"
|
||||
@@ -283,14 +281,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
|
||||
elif isinstance(event, LLMResponseOutputItemDoneEvent):
|
||||
if isinstance(event.item, LLMResponseReasoningOutputItem):
|
||||
encrypted_content = event.item.encrypted_content
|
||||
summary = event.item.summary
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
|
||||
if event_item_type == ResponseItemType.REASONING:
|
||||
encrypted_content = getattr(event.item, "encrypted_content", None)
|
||||
summary = getattr(event.item, "summary", []) or []
|
||||
|
||||
yield {
|
||||
"native": LLMResponseReasoningOutputItem(
|
||||
type=event.item.type,
|
||||
"native": ResponseReasoningItem(
|
||||
type="reasoning",
|
||||
id=event.item.id,
|
||||
summary=[],
|
||||
encrypted_content=encrypted_content,
|
||||
@@ -298,8 +296,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
}
|
||||
|
||||
last_summary_index = len(summary) - 1 if summary else None
|
||||
elif isinstance(event.item, LLMResponseWebSearchCallOutputItem):
|
||||
action_dict = event.item.action
|
||||
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
|
||||
action = getattr(event.item, "action", None)
|
||||
if isinstance(action, dict):
|
||||
action_dict = action
|
||||
elif action is not None:
|
||||
action_dict = action.to_dict()
|
||||
else:
|
||||
action_dict = {}
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
@@ -317,11 +321,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"tool_result": {"status": event.item.status},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
elif isinstance(event.item, LLMResponseImageOutputItem):
|
||||
yield {"native": event.item.raw}
|
||||
elif event_item_type == ResponseItemType.IMAGE:
|
||||
yield {"native": event.item}
|
||||
last_summary_index = -1 # Trigger new assistant message on next turn
|
||||
|
||||
elif isinstance(event, LLMResponseOutputTextDeltaEvent):
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
|
||||
data = event.delta
|
||||
if remove_parentheses:
|
||||
data = data.removeprefix(")")
|
||||
@@ -340,7 +344,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if data:
|
||||
yield {"content": data}
|
||||
|
||||
elif isinstance(event, LLMResponseReasoningSummaryTextDeltaEvent):
|
||||
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
|
||||
# OpenAI can output several reasoning summaries
|
||||
# in a single ResponseReasoningItem. We split them as separate
|
||||
# AssistantContent messages. Only last of them will have
|
||||
@@ -354,14 +358,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
last_summary_index = event.summary_index
|
||||
yield {"thinking_content": event.delta}
|
||||
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDeltaEvent):
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.arguments += event.delta
|
||||
|
||||
elif isinstance(event, LLMResponseWebSearchCallSearchingEvent):
|
||||
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
|
||||
yield {"role": "assistant"}
|
||||
|
||||
elif isinstance(event, LLMResponseFunctionCallArgumentsDoneEvent):
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.status = "completed"
|
||||
|
||||
@@ -381,36 +385,35 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
]
|
||||
}
|
||||
|
||||
elif isinstance(event, LLMResponseCompletedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
elif isinstance(event, LLMResponseIncompleteEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
incomplete_details = response.get("incomplete_details")
|
||||
reason = "unknown reason"
|
||||
if incomplete_details is not None and incomplete_details.get("reason"):
|
||||
reason = incomplete_details["reason"]
|
||||
if (
|
||||
event.response.incomplete_details
|
||||
and event.response.incomplete_details.reason
|
||||
):
|
||||
reason: str = event.response.incomplete_details.reason
|
||||
else:
|
||||
reason = "unknown reason"
|
||||
|
||||
if reason == "max_output_tokens":
|
||||
reason = "max output tokens reached"
|
||||
@@ -419,24 +422,22 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
|
||||
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
|
||||
|
||||
elif isinstance(event, LLMResponseFailedEvent):
|
||||
response = event.response
|
||||
if response and "usage" in response:
|
||||
usage = response["usage"]
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": usage.get("input_tokens"),
|
||||
"output_tokens": usage.get("output_tokens"),
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
reason = "unknown reason"
|
||||
if isinstance(error := response.get("error"), dict):
|
||||
reason = error.get("message") or reason
|
||||
if event.response.error is not None:
|
||||
reason = event.response.error.message
|
||||
raise HomeAssistantError(f"OpenAI response failed: {reason}")
|
||||
|
||||
elif isinstance(event, LLMResponseErrorEvent):
|
||||
elif event_type == ResponsesAPIStreamEvents.ERROR:
|
||||
raise HomeAssistantError(f"OpenAI response error: {event.message}")
|
||||
|
||||
|
||||
@@ -451,7 +452,7 @@ class BaseCloudLLMEntity(Entity):
|
||||
async def _prepare_chat_for_generation(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
messages: list[ResponseInputItemParam],
|
||||
messages: ResponseInputParam,
|
||||
response_format: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Prepare kwargs for Cloud LLM from the chat log."""
|
||||
@@ -459,17 +460,8 @@ class BaseCloudLLMEntity(Entity):
|
||||
last_content: Any = chat_log.content[-1]
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
files = await self._async_prepare_files_for_prompt(last_content.attachments)
|
||||
|
||||
last_message = cast(dict[str, Any], messages[-1])
|
||||
assert (
|
||||
last_message["type"] == "message"
|
||||
and last_message["role"] == "user"
|
||||
and isinstance(last_message["content"], str)
|
||||
)
|
||||
last_message["content"] = [
|
||||
{"type": "input_text", "text": last_message["content"]},
|
||||
*files,
|
||||
]
|
||||
current_content = last_content.content
|
||||
last_content = [*(current_content or []), *files]
|
||||
|
||||
tools: list[ToolParam] = []
|
||||
tool_choice: str | None = None
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.12.0", "openai==2.15.0"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["compit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["compit-inext-api==0.8.0"]
|
||||
"requirements": ["compit-inext-api==0.6.0"]
|
||||
}
|
||||
|
||||
@@ -58,13 +58,12 @@ C4_TO_HA_HVAC_MODE = {
|
||||
|
||||
HA_TO_C4_HVAC_MODE = {v: k for k, v in C4_TO_HA_HVAC_MODE.items()}
|
||||
|
||||
# Map the five known Control4 HVAC states to Home Assistant HVAC actions
|
||||
# Map Control4 HVAC state to Home Assistant HVAC action
|
||||
C4_TO_HA_HVAC_ACTION = {
|
||||
"heating": HVACAction.HEATING,
|
||||
"cooling": HVACAction.COOLING,
|
||||
"idle": HVACAction.IDLE,
|
||||
"off": HVACAction.OFF,
|
||||
"heat": HVACAction.HEATING,
|
||||
"cool": HVACAction.COOLING,
|
||||
"dry": HVACAction.DRYING,
|
||||
"fan": HVACAction.FAN,
|
||||
}
|
||||
|
||||
|
||||
@@ -237,10 +236,7 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
if c4_state is None:
|
||||
return None
|
||||
# Convert state to lowercase for mapping
|
||||
action = C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
if action is None:
|
||||
_LOGGER.debug("Unknown HVAC state received from Control4: %s", c4_state)
|
||||
return action
|
||||
return C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower())
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.2.3"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.6"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["denonavr"],
|
||||
"requirements": ["denonavr==1.3.1"],
|
||||
"requirements": ["denonavr==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""The Dexcom integration."""
|
||||
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -15,13 +14,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: DexcomConfigEntry) -> bo
|
||||
"""Set up Dexcom from a config entry."""
|
||||
try:
|
||||
dexcom = await hass.async_add_executor_job(
|
||||
lambda: Dexcom(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if entry.data[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
Dexcom,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_SERVER] == SERVER_OUS,
|
||||
)
|
||||
except AccountError:
|
||||
return False
|
||||
|
||||
@@ -5,8 +5,7 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydexcom import Dexcom, Region
|
||||
from pydexcom.errors import AccountError, SessionError
|
||||
from pydexcom import AccountError, Dexcom, SessionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -38,13 +37,10 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
lambda: Dexcom(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
region=Region.OUS
|
||||
if user_input[CONF_SERVER] == SERVER_OUS
|
||||
else Region.US,
|
||||
)
|
||||
Dexcom,
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
user_input[CONF_SERVER] == SERVER_OUS,
|
||||
)
|
||||
except SessionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -18,7 +18,7 @@ _SCAN_INTERVAL = timedelta(seconds=180)
|
||||
type DexcomConfigEntry = ConfigEntry[DexcomCoordinator]
|
||||
|
||||
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading | None]):
|
||||
class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]):
|
||||
"""Dexcom Coordinator."""
|
||||
|
||||
def __init__(
|
||||
@@ -37,7 +37,7 @@ class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading | None]):
|
||||
)
|
||||
self.dexcom = dexcom
|
||||
|
||||
async def _async_update_data(self) -> GlucoseReading | None:
|
||||
async def _async_update_data(self) -> GlucoseReading:
|
||||
"""Fetch data from API endpoint."""
|
||||
return await self.hass.async_add_executor_job(
|
||||
self.dexcom.get_current_glucose_reading
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydexcom"],
|
||||
"requirements": ["pydexcom==0.5.1"]
|
||||
"requirements": ["pydexcom==0.2.3"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ from doorbirdpy import DoorBird
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
@@ -219,9 +218,6 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
if existing_entry:
|
||||
if existing_entry.source == SOURCE_IGNORE:
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
# Check if the host is actually changing
|
||||
if existing_entry.data.get(CONF_HOST) != host:
|
||||
await self._async_verify_existing_device_for_discovery(
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.14.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.6.0"
|
||||
"bleak-esphome==3.5.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fritzconnection[qr]==1.15.1", "xmltodict==1.0.2"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.1"]
|
||||
"requirements": ["fritzconnection[qr]==1.15.0"]
|
||||
}
|
||||
|
||||
@@ -19,7 +19,9 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"preview_features": {
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260128.6"]
|
||||
"requirements": ["home-assistant-frontend==20260128.1"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.3"]
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.2"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["growattServer"],
|
||||
"requirements": ["growattServer==1.9.0"]
|
||||
"requirements": ["growattServer==1.7.1"]
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ class AddonManager:
|
||||
)
|
||||
|
||||
@api_error(
|
||||
"Failed to get the {addon_name} app discovery info",
|
||||
"Failed to get the {addon_name} add-on discovery info",
|
||||
expected_error_type=SupervisorError,
|
||||
)
|
||||
async def async_get_addon_discovery_info(self) -> dict:
|
||||
@@ -140,12 +140,12 @@ class AddonManager:
|
||||
)
|
||||
|
||||
if not discovery_info:
|
||||
raise AddonError(f"Failed to get {self.addon_name} app discovery info")
|
||||
raise AddonError(f"Failed to get {self.addon_name} add-on discovery info")
|
||||
|
||||
return discovery_info.config
|
||||
|
||||
@api_error(
|
||||
"Failed to get the {addon_name} app info",
|
||||
"Failed to get the {addon_name} add-on info",
|
||||
expected_error_type=SupervisorError,
|
||||
)
|
||||
async def async_get_addon_info(self) -> AddonInfo:
|
||||
@@ -153,7 +153,7 @@ class AddonManager:
|
||||
addon_store_info = await self._supervisor_client.store.addon_info(
|
||||
self.addon_slug
|
||||
)
|
||||
self._logger.debug("App store info: %s", addon_store_info.to_dict())
|
||||
self._logger.debug("Add-on store info: %s", addon_store_info.to_dict())
|
||||
if not addon_store_info.installed:
|
||||
return AddonInfo(
|
||||
available=addon_store_info.available,
|
||||
@@ -190,7 +190,7 @@ class AddonManager:
|
||||
return addon_state
|
||||
|
||||
@api_error(
|
||||
"Failed to set the {addon_name} app options",
|
||||
"Failed to set the {addon_name} add-on options",
|
||||
expected_error_type=SupervisorError,
|
||||
)
|
||||
async def async_set_addon_options(self, config: dict) -> None:
|
||||
@@ -202,10 +202,10 @@ class AddonManager:
|
||||
def _check_addon_available(self, addon_info: AddonInfo) -> None:
|
||||
"""Check if the managed add-on is available."""
|
||||
if not addon_info.available:
|
||||
raise AddonError(f"{self.addon_name} app is not available")
|
||||
raise AddonError(f"{self.addon_name} add-on is not available")
|
||||
|
||||
@api_error(
|
||||
"Failed to install the {addon_name} app", expected_error_type=SupervisorError
|
||||
"Failed to install the {addon_name} add-on", expected_error_type=SupervisorError
|
||||
)
|
||||
async def async_install_addon(self) -> None:
|
||||
"""Install the managed add-on."""
|
||||
@@ -216,14 +216,14 @@ class AddonManager:
|
||||
await self._supervisor_client.store.install_addon(self.addon_slug)
|
||||
|
||||
@api_error(
|
||||
"Failed to uninstall the {addon_name} app",
|
||||
"Failed to uninstall the {addon_name} add-on",
|
||||
expected_error_type=SupervisorError,
|
||||
)
|
||||
async def async_uninstall_addon(self) -> None:
|
||||
"""Uninstall the managed add-on."""
|
||||
await self._supervisor_client.addons.uninstall_addon(self.addon_slug)
|
||||
|
||||
@api_error("Failed to update the {addon_name} app")
|
||||
@api_error("Failed to update the {addon_name} add-on")
|
||||
async def async_update_addon(self) -> None:
|
||||
"""Update the managed add-on if needed."""
|
||||
addon_info = await self.async_get_addon_info()
|
||||
@@ -231,7 +231,7 @@ class AddonManager:
|
||||
self._check_addon_available(addon_info)
|
||||
|
||||
if addon_info.state is AddonState.NOT_INSTALLED:
|
||||
raise AddonError(f"{self.addon_name} app is not installed")
|
||||
raise AddonError(f"{self.addon_name} add-on is not installed")
|
||||
|
||||
if not addon_info.update_available:
|
||||
return
|
||||
@@ -242,28 +242,28 @@ class AddonManager:
|
||||
)
|
||||
|
||||
@api_error(
|
||||
"Failed to start the {addon_name} app", expected_error_type=SupervisorError
|
||||
"Failed to start the {addon_name} add-on", expected_error_type=SupervisorError
|
||||
)
|
||||
async def async_start_addon(self) -> None:
|
||||
"""Start the managed add-on."""
|
||||
await self._supervisor_client.addons.start_addon(self.addon_slug)
|
||||
|
||||
@api_error(
|
||||
"Failed to restart the {addon_name} app", expected_error_type=SupervisorError
|
||||
"Failed to restart the {addon_name} add-on", expected_error_type=SupervisorError
|
||||
)
|
||||
async def async_restart_addon(self) -> None:
|
||||
"""Restart the managed add-on."""
|
||||
await self._supervisor_client.addons.restart_addon(self.addon_slug)
|
||||
|
||||
@api_error(
|
||||
"Failed to stop the {addon_name} app", expected_error_type=SupervisorError
|
||||
"Failed to stop the {addon_name} add-on", expected_error_type=SupervisorError
|
||||
)
|
||||
async def async_stop_addon(self) -> None:
|
||||
"""Stop the managed add-on."""
|
||||
await self._supervisor_client.addons.stop_addon(self.addon_slug)
|
||||
|
||||
@api_error(
|
||||
"Failed to create a backup of the {addon_name} app",
|
||||
"Failed to create a backup of the {addon_name} add-on",
|
||||
expected_error_type=SupervisorError,
|
||||
)
|
||||
async def async_create_backup(self) -> None:
|
||||
@@ -284,7 +284,7 @@ class AddonManager:
|
||||
addon_info = await self.async_get_addon_info()
|
||||
|
||||
if addon_info.state is AddonState.NOT_INSTALLED:
|
||||
raise AddonError(f"{self.addon_name} app is not installed")
|
||||
raise AddonError(f"{self.addon_name} add-on is not installed")
|
||||
|
||||
if addon_config != addon_info.options:
|
||||
await self.async_set_addon_options(addon_config)
|
||||
@@ -297,7 +297,7 @@ class AddonManager:
|
||||
"""
|
||||
if not self._install_task or self._install_task.done():
|
||||
self._logger.info(
|
||||
"%s app is not installed. Installing app", self.addon_name
|
||||
"%s add-on is not installed. Installing add-on", self.addon_name
|
||||
)
|
||||
self._install_task = self._async_schedule_addon_operation(
|
||||
self.async_install_addon, catch_error=catch_error
|
||||
@@ -316,7 +316,7 @@ class AddonManager:
|
||||
"""
|
||||
if not self._install_task or self._install_task.done():
|
||||
self._logger.info(
|
||||
"%s app is not installed. Installing app", self.addon_name
|
||||
"%s add-on is not installed. Installing add-on", self.addon_name
|
||||
)
|
||||
self._install_task = self._async_schedule_addon_operation(
|
||||
self.async_install_addon,
|
||||
@@ -336,7 +336,7 @@ class AddonManager:
|
||||
Only schedule a new update task if the there's no running task.
|
||||
"""
|
||||
if not self._update_task or self._update_task.done():
|
||||
self._logger.info("Trying to update the %s app", self.addon_name)
|
||||
self._logger.info("Trying to update the %s add-on", self.addon_name)
|
||||
self._update_task = self._async_schedule_addon_operation(
|
||||
self.async_update_addon,
|
||||
catch_error=catch_error,
|
||||
@@ -350,7 +350,9 @@ class AddonManager:
|
||||
Only schedule a new start task if the there's no running task.
|
||||
"""
|
||||
if not self._start_task or self._start_task.done():
|
||||
self._logger.info("%s app is not running. Starting app", self.addon_name)
|
||||
self._logger.info(
|
||||
"%s add-on is not running. Starting add-on", self.addon_name
|
||||
)
|
||||
self._start_task = self._async_schedule_addon_operation(
|
||||
self.async_start_addon, catch_error=catch_error
|
||||
)
|
||||
@@ -363,7 +365,7 @@ class AddonManager:
|
||||
Only schedule a new restart task if the there's no running task.
|
||||
"""
|
||||
if not self._restart_task or self._restart_task.done():
|
||||
self._logger.info("Restarting %s app", self.addon_name)
|
||||
self._logger.info("Restarting %s add-on", self.addon_name)
|
||||
self._restart_task = self._async_schedule_addon_operation(
|
||||
self.async_restart_addon, catch_error=catch_error
|
||||
)
|
||||
@@ -380,7 +382,9 @@ class AddonManager:
|
||||
Only schedule a new setup task if there's no running task.
|
||||
"""
|
||||
if not self._start_task or self._start_task.done():
|
||||
self._logger.info("%s app is not running. Starting app", self.addon_name)
|
||||
self._logger.info(
|
||||
"%s add-on is not running. Starting add-on", self.addon_name
|
||||
)
|
||||
self._start_task = self._async_schedule_addon_operation(
|
||||
partial(
|
||||
self.async_configure_addon,
|
||||
|
||||
@@ -176,7 +176,7 @@ EXTRA_PLACEHOLDERS = {
|
||||
class SupervisorEntityModel(StrEnum):
|
||||
"""Supervisor entity model."""
|
||||
|
||||
ADDON = "Home Assistant App"
|
||||
ADDON = "Home Assistant Add-on"
|
||||
OS = "Home Assistant Operating System"
|
||||
CORE = "Home Assistant Core"
|
||||
SUPERVISOR = "Home Assistant Supervisor"
|
||||
|
||||
@@ -117,7 +117,7 @@ class HassIODiscovery(HomeAssistantView):
|
||||
try:
|
||||
addon_info = await self._supervisor_client.addons.addon_info(data.addon)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Can't read app info: %s", err)
|
||||
_LOGGER.error("Can't read add-on info: %s", err)
|
||||
return
|
||||
|
||||
data.config[ATTR_ADDON] = addon_info.name
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
},
|
||||
"step": {
|
||||
"fix_menu": {
|
||||
"description": "App {addon} is set to start at boot but failed to start. Usually this occurs when the configuration is incorrect or the same port is used in multiple apps. Check the configuration as well as logs for {addon} and Supervisor.\n\nUse Start to try again or Disable to turn off the start at boot option.",
|
||||
"description": "Add-on {addon} is set to start at boot but failed to start. Usually this occurs when the configuration is incorrect or the same port is used in multiple add-ons. Check the configuration as well as logs for {addon} and Supervisor.\n\nUse Start to try again or Disable to turn off the start at boot option.",
|
||||
"menu_options": {
|
||||
"addon_disable_boot": "[%key:common::action::disable%]",
|
||||
"addon_execute_start": "[%key:common::action::start%]"
|
||||
@@ -59,41 +59,41 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "App failed to start at boot"
|
||||
"title": "Add-on failed to start at boot"
|
||||
},
|
||||
"issue_addon_deprecated_addon": {
|
||||
"fix_flow": {
|
||||
"abort": {
|
||||
"apply_suggestion_fail": "Could not uninstall the app. Check the Supervisor logs for more details."
|
||||
"apply_suggestion_fail": "Could not uninstall the add-on. Check the Supervisor logs for more details."
|
||||
},
|
||||
"step": {
|
||||
"addon_execute_remove": {
|
||||
"description": "App {addon} is marked deprecated by the developer. This means it is no longer being maintained and so may break or become a security issue over time.\n\nReview the [readme]({addon_info}) and [documentation]({addon_documentation}) of the app to see if the developer provided instructions.\n\nSelecting **Submit** will uninstall this deprecated app. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to."
|
||||
"description": "Add-on {addon} is marked deprecated by the developer. This means it is no longer being maintained and so may break or become a security issue over time.\n\nReview the [readme]({addon_info}) and [documentation]({addon_documentation}) of the add-on to see if the developer provided instructions.\n\nSelecting **Submit** will uninstall this deprecated add-on. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to."
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Installed app is deprecated"
|
||||
"title": "Installed add-on is deprecated"
|
||||
},
|
||||
"issue_addon_detached_addon_missing": {
|
||||
"description": "Repository for app {addon} is missing. This means it will not get updates, and backups may not be restored correctly as the Home Assistant Supervisor may not be able to build/download the resources required.\n\nPlease check the [app's documentation]({addon_url}) for installation instructions and add the repository to the store.",
|
||||
"title": "Missing repository for an installed app"
|
||||
"description": "Repository for add-on {addon} is missing. This means it will not get updates, and backups may not be restored correctly as the Home Assistant Supervisor may not be able to build/download the resources required.\n\nPlease check the [add-on's documentation]({addon_url}) for installation instructions and add the repository to the store.",
|
||||
"title": "Missing repository for an installed add-on"
|
||||
},
|
||||
"issue_addon_detached_addon_removed": {
|
||||
"fix_flow": {
|
||||
"abort": {
|
||||
"apply_suggestion_fail": "Could not uninstall the app. Check the Supervisor logs for more details."
|
||||
"apply_suggestion_fail": "Could not uninstall the add-on. Check the Supervisor logs for more details."
|
||||
},
|
||||
"step": {
|
||||
"addon_execute_remove": {
|
||||
"description": "App {addon} has been removed from the repository it was installed from. This means it will not get updates, and backups may not be restored correctly as the Home Assistant Supervisor may not be able to build/download the resources required.\n\nSelecting **Submit** will uninstall this deprecated app. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to."
|
||||
"description": "Add-on {addon} has been removed from the repository it was installed from. This means it will not get updates, and backups may not be restored correctly as the Home Assistant Supervisor may not be able to build/download the resources required.\n\nSelecting **Submit** will uninstall this deprecated add-on. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to."
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Installed app has been removed from repository"
|
||||
"title": "Installed add-on has been removed from repository"
|
||||
},
|
||||
"issue_addon_pwned": {
|
||||
"description": "App {addon} uses secrets/passwords in its configuration which are detected as not secure. See [pwned passwords and secrets]({more_info_pwned}) for more information on this issue.",
|
||||
"title": "Insecure secrets detected in app configuration"
|
||||
"description": "Add-on {addon} uses secrets/passwords in its configuration which are detected as not secure. See [pwned passwords and secrets]({more_info_pwned}) for more information on this issue.",
|
||||
"title": "Insecure secrets detected in add-on configuration"
|
||||
},
|
||||
"issue_mount_mount_failed": {
|
||||
"fix_flow": {
|
||||
@@ -123,7 +123,7 @@
|
||||
},
|
||||
"step": {
|
||||
"system_execute_rebuild": {
|
||||
"description": "The default configuration for apps and Home Assistant has changed. To update the configuration with the new defaults, a restart is required for the following:\n\n- {components}"
|
||||
"description": "The default configuration for add-ons and Home Assistant has changed. To update the configuration with the new defaults, a restart is required for the following:\n\n- {components}"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -160,7 +160,7 @@
|
||||
},
|
||||
"step": {
|
||||
"system_execute_reboot": {
|
||||
"description": "Settings were changed which require a system reboot to take effect.\n\nThis fix will initiate a system reboot which will make Home Assistant and all the apps inaccessible for a brief period."
|
||||
"description": "Settings were changed which require a system reboot to take effect.\n\nThis fix will initiate a system reboot which will make Home Assistant and all the Add-ons inaccessible for a brief period."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -203,7 +203,7 @@
|
||||
"title": "Unsupported system - {reason}"
|
||||
},
|
||||
"unsupported_apparmor": {
|
||||
"description": "System is unsupported because AppArmor is working incorrectly and apps are running in an unprotected and insecure way. For troubleshooting information, select Learn more.",
|
||||
"description": "System is unsupported because AppArmor is working incorrectly and add-ons are running in an unprotected and insecure way. For troubleshooting information, select Learn more.",
|
||||
"title": "Unsupported system - AppArmor issues"
|
||||
},
|
||||
"unsupported_cgroup_version": {
|
||||
@@ -510,7 +510,7 @@
|
||||
"docker_version": "Docker version",
|
||||
"healthy": "Healthy",
|
||||
"host_os": "Host operating system",
|
||||
"installed_addons": "Installed apps",
|
||||
"installed_addons": "Installed add-ons",
|
||||
"nameservers": "Nameservers",
|
||||
"supervisor_api": "Supervisor API",
|
||||
"supervisor_version": "Supervisor version",
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyhik"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyHik==0.4.2"]
|
||||
"requirements": ["pyHik==0.4.1"]
|
||||
}
|
||||
|
||||
@@ -169,7 +169,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect binary sensor."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -73,7 +73,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect button entities."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -7,44 +7,18 @@ from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity
|
||||
|
||||
|
||||
def should_add_option_entity(
|
||||
description: EntityDescription,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
platform: Platform,
|
||||
) -> bool:
|
||||
"""Check if the option entity should be added for the appliance.
|
||||
|
||||
This function returns `True` if the option is available in the appliance options
|
||||
or if the entity was added in previous loads of this integration.
|
||||
"""
|
||||
description_key = description.key
|
||||
return description_key in appliance.options or (
|
||||
entity_registry.async_get_entity_id(
|
||||
platform, DOMAIN, f"{appliance.info.ha_id}-{description_key}"
|
||||
)
|
||||
is not None
|
||||
)
|
||||
|
||||
|
||||
def _create_option_entities(
|
||||
entity_registry: er.EntityRegistry,
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
known_entity_unique_ids: dict[str, str],
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData, er.EntityRegistry],
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
list[HomeConnectOptionEntity],
|
||||
],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
@@ -52,9 +26,7 @@ def _create_option_entities(
|
||||
"""Create the required option entities for the appliances."""
|
||||
option_entities_to_add = [
|
||||
entity
|
||||
for entity in get_option_entities_for_appliance(
|
||||
entry, appliance, entity_registry
|
||||
)
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
]
|
||||
known_entity_unique_ids.update(
|
||||
@@ -67,14 +39,13 @@ def _create_option_entities(
|
||||
|
||||
|
||||
def _handle_paired_or_connected_appliance(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeConnectConfigEntry,
|
||||
known_entity_unique_ids: dict[str, str],
|
||||
get_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData], list[HomeConnectEntity]
|
||||
],
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData, er.EntityRegistry],
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
list[HomeConnectOptionEntity],
|
||||
]
|
||||
| None,
|
||||
@@ -89,7 +60,6 @@ def _handle_paired_or_connected_appliance(
|
||||
already or it is the first time we see them when the appliance is connected.
|
||||
"""
|
||||
entities: list[HomeConnectEntity] = []
|
||||
entity_registry = er.async_get(hass)
|
||||
for appliance in entry.runtime_data.data.values():
|
||||
entities_to_add = [
|
||||
entity
|
||||
@@ -99,9 +69,7 @@ def _handle_paired_or_connected_appliance(
|
||||
if get_option_entities_for_appliance:
|
||||
entities_to_add.extend(
|
||||
entity
|
||||
for entity in get_option_entities_for_appliance(
|
||||
entry, appliance, entity_registry
|
||||
)
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
)
|
||||
for event_key in (
|
||||
@@ -112,7 +80,6 @@ def _handle_paired_or_connected_appliance(
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entity_registry,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
@@ -153,14 +120,13 @@ def _handle_depaired_appliance(
|
||||
|
||||
|
||||
def setup_home_connect_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeConnectConfigEntry,
|
||||
get_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData], list[HomeConnectEntity]
|
||||
],
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
get_option_entities_for_appliance: Callable[
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData, er.EntityRegistry],
|
||||
[HomeConnectConfigEntry, HomeConnectApplianceData],
|
||||
list[HomeConnectOptionEntity],
|
||||
]
|
||||
| None = None,
|
||||
@@ -175,7 +141,6 @@ def setup_home_connect_entry(
|
||||
entry.runtime_data.async_add_special_listener(
|
||||
partial(
|
||||
_handle_paired_or_connected_appliance,
|
||||
hass,
|
||||
entry,
|
||||
known_entity_unique_ids,
|
||||
get_entities_for_appliance,
|
||||
|
||||
@@ -96,7 +96,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect light."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -11,13 +11,12 @@ from homeassistant.components.number import (
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, Platform
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .common import setup_home_connect_entry, should_add_option_entity
|
||||
from .common import setup_home_connect_entry
|
||||
from .const import DOMAIN, UNIT_MAP
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity, constraint_fetcher
|
||||
@@ -137,15 +136,12 @@ def _get_entities_for_appliance(
|
||||
def _get_option_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> list[HomeConnectOptionEntity]:
|
||||
"""Get a list of currently available option entities."""
|
||||
return [
|
||||
HomeConnectOptionNumberEntity(entry.runtime_data, appliance, description)
|
||||
for description in NUMBER_OPTIONS
|
||||
if should_add_option_entity(
|
||||
description, appliance, entity_registry, Platform.NUMBER
|
||||
)
|
||||
if description.key in appliance.options
|
||||
]
|
||||
|
||||
|
||||
@@ -156,7 +152,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect number."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -11,13 +11,11 @@ from aiohomeconnect.model.error import HomeConnectError
|
||||
from aiohomeconnect.model.program import Execution
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .common import setup_home_connect_entry, should_add_option_entity
|
||||
from .common import setup_home_connect_entry
|
||||
from .const import (
|
||||
AVAILABLE_MAPS_ENUM,
|
||||
BEAN_AMOUNT_OPTIONS,
|
||||
@@ -360,13 +358,12 @@ def _get_entities_for_appliance(
|
||||
def _get_option_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> list[HomeConnectOptionEntity]:
|
||||
"""Get a list of entities."""
|
||||
return [
|
||||
HomeConnectSelectOptionEntity(entry.runtime_data, appliance, desc)
|
||||
for desc in PROGRAM_SELECT_OPTION_ENTITY_DESCRIPTIONS
|
||||
if should_add_option_entity(desc, appliance, entity_registry, Platform.SELECT)
|
||||
if desc.key in appliance.options
|
||||
]
|
||||
|
||||
|
||||
@@ -377,7 +374,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect select entities."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -115,6 +115,7 @@ SENSORS = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
|
||||
device_class=SensorDeviceClass.VOLUME,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
translation_key="hot_water_counter",
|
||||
),
|
||||
HomeConnectSensorEntityDescription(
|
||||
@@ -539,7 +540,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect sensor."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -7,14 +7,12 @@ from aiohomeconnect.model import OptionKey, SettingKey
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from .common import setup_home_connect_entry, should_add_option_entity
|
||||
from .common import setup_home_connect_entry
|
||||
from .const import BSH_POWER_OFF, BSH_POWER_ON, BSH_POWER_STANDBY, DOMAIN
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity
|
||||
@@ -192,15 +190,12 @@ def _get_entities_for_appliance(
|
||||
def _get_option_entities_for_appliance(
|
||||
entry: HomeConnectConfigEntry,
|
||||
appliance: HomeConnectApplianceData,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> list[HomeConnectOptionEntity]:
|
||||
"""Get a list of currently available option entities."""
|
||||
return [
|
||||
HomeConnectSwitchOptionEntity(entry.runtime_data, appliance, description)
|
||||
for description in SWITCH_OPTIONS
|
||||
if should_add_option_entity(
|
||||
description, appliance, entity_registry, Platform.SWITCH
|
||||
)
|
||||
if description.key in appliance.options
|
||||
]
|
||||
|
||||
|
||||
@@ -211,7 +206,6 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Home Connect switch."""
|
||||
setup_home_connect_entry(
|
||||
hass,
|
||||
entry,
|
||||
_get_entities_for_appliance,
|
||||
async_add_entities,
|
||||
|
||||
@@ -63,14 +63,16 @@ class HueBLELight(LightEntity):
|
||||
|
||||
self._api = light
|
||||
self._attr_unique_id = light.address
|
||||
if light.maximum_mireds:
|
||||
self._attr_min_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(light.maximum_mireds)
|
||||
)
|
||||
if light.minimum_mireds:
|
||||
self._attr_max_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(light.minimum_mireds)
|
||||
)
|
||||
self._attr_min_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(light.maximum_mireds)
|
||||
if light.maximum_mireds
|
||||
else None
|
||||
)
|
||||
self._attr_max_color_temp_kelvin = (
|
||||
color_util.color_temperature_mired_to_kelvin(light.minimum_mireds)
|
||||
if light.minimum_mireds
|
||||
else None
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=light.name,
|
||||
connections={(CONNECTION_BLUETOOTH, light.address)},
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["incomfortclient"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["incomfort-client==0.6.12"]
|
||||
"requirements": ["incomfort-client==0.6.11"]
|
||||
}
|
||||
|
||||
@@ -90,7 +90,6 @@
|
||||
"boiler_int": "Boiler internal",
|
||||
"buffer": "Buffer",
|
||||
"central_heating": "Central heating",
|
||||
"central_heating_low": "Central heating low",
|
||||
"central_heating_rf": "Central heating rf",
|
||||
"cv_temperature_too_high_e1": "Temperature too high",
|
||||
"flame_detection_fault_e6": "Flame detection fault",
|
||||
|
||||
@@ -10,7 +10,6 @@ import voluptuous as vol
|
||||
from homeassistant.components.script import CONF_MODE
|
||||
from homeassistant.const import CONF_DESCRIPTION, CONF_TYPE, SERVICE_RELOAD
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
intent,
|
||||
@@ -19,7 +18,6 @@ from homeassistant.helpers import (
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.script import async_validate_actions_config
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -87,29 +85,19 @@ async def async_reload(hass: HomeAssistant, service_call: ServiceCall) -> None:
|
||||
|
||||
new_intents = new_config[DOMAIN]
|
||||
|
||||
await async_load_intents(hass, new_intents)
|
||||
async_load_intents(hass, new_intents)
|
||||
|
||||
|
||||
async def async_load_intents(
|
||||
hass: HomeAssistant, intents: dict[str, ConfigType]
|
||||
) -> None:
|
||||
def async_load_intents(hass: HomeAssistant, intents: dict[str, ConfigType]) -> None:
|
||||
"""Load YAML intents into the intent system."""
|
||||
hass.data[DOMAIN] = intents
|
||||
|
||||
for intent_type, conf in intents.items():
|
||||
if CONF_ACTION in conf:
|
||||
try:
|
||||
actions = await async_validate_actions_config(hass, conf[CONF_ACTION])
|
||||
except (vol.Invalid, HomeAssistantError) as exc:
|
||||
_LOGGER.error(
|
||||
"Failed to validate actions for intent %s: %s", intent_type, exc
|
||||
)
|
||||
continue # Skip this intent
|
||||
|
||||
script_mode: str = conf.get(CONF_MODE, script.DEFAULT_SCRIPT_MODE)
|
||||
conf[CONF_ACTION] = script.Script(
|
||||
hass,
|
||||
actions,
|
||||
conf[CONF_ACTION],
|
||||
f"Intent Script {intent_type}",
|
||||
DOMAIN,
|
||||
script_mode=script_mode,
|
||||
@@ -121,7 +109,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the intent script component."""
|
||||
intents = config[DOMAIN]
|
||||
|
||||
await async_load_intents(hass, intents)
|
||||
async_load_intents(hass, intents)
|
||||
|
||||
async def _handle_reload(service_call: ServiceCall) -> None:
|
||||
return await async_reload(hass, service_call)
|
||||
|
||||
@@ -150,9 +150,7 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
|
||||
self._attr_state = state
|
||||
self._attr_is_volume_muted = volume_muted
|
||||
# Only update volume_level if the API provides it, otherwise preserve current value
|
||||
if volume_level is not None:
|
||||
self._attr_volume_level = volume_level
|
||||
self._attr_volume_level = volume_level
|
||||
self._attr_media_content_type = media_content_type
|
||||
self._attr_media_content_id = media_content_id
|
||||
self._attr_media_title = media_title
|
||||
@@ -192,9 +190,7 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
)
|
||||
features = MediaPlayerEntityFeature(0)
|
||||
|
||||
if "PlayMediaSource" in commands or self.capabilities.get(
|
||||
"SupportsMediaControl", False
|
||||
):
|
||||
if "PlayMediaSource" in commands:
|
||||
features |= (
|
||||
MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
@@ -205,10 +201,10 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
| MediaPlayerEntityFeature.SEARCH_MEDIA
|
||||
)
|
||||
|
||||
if "Mute" in commands and "Unmute" in commands:
|
||||
if "Mute" in commands:
|
||||
features |= MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
|
||||
if "VolumeSet" in commands or "SetVolume" in commands:
|
||||
if "VolumeSet" in commands:
|
||||
features |= MediaPlayerEntityFeature.VOLUME_SET
|
||||
|
||||
return features
|
||||
@@ -223,13 +219,11 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
"""Send pause command."""
|
||||
self.coordinator.api_client.jellyfin.remote_pause(self.session_id)
|
||||
self._attr_state = MediaPlayerState.PAUSED
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def media_play(self) -> None:
|
||||
"""Send play command."""
|
||||
self.coordinator.api_client.jellyfin.remote_unpause(self.session_id)
|
||||
self._attr_state = MediaPlayerState.PLAYING
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def media_play_pause(self) -> None:
|
||||
"""Send the PlayPause command to the session."""
|
||||
@@ -239,7 +233,6 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
"""Send stop command."""
|
||||
self.coordinator.api_client.jellyfin.remote_stop(self.session_id)
|
||||
self._attr_state = MediaPlayerState.IDLE
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def play_media(
|
||||
self, media_type: MediaType | str, media_id: str, **kwargs: Any
|
||||
@@ -254,8 +247,6 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
self.coordinator.api_client.jellyfin.remote_set_volume(
|
||||
self.session_id, int(volume * 100)
|
||||
)
|
||||
self._attr_volume_level = volume
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def mute_volume(self, mute: bool) -> None:
|
||||
"""Mute the volume."""
|
||||
@@ -263,8 +254,6 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
|
||||
self.coordinator.api_client.jellyfin.remote_mute(self.session_id)
|
||||
else:
|
||||
self.coordinator.api_client.jellyfin.remote_unmute(self.session_id)
|
||||
self._attr_is_volume_muted = mute
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
|
||||
@@ -2,19 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import math
|
||||
from typing import Any
|
||||
|
||||
from propcache.api import cached_property
|
||||
from xknx.devices import Fan as XknxFan
|
||||
from xknx.telegram.address import parse_device_group_address
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
@@ -40,58 +37,6 @@ from .storage.const import (
|
||||
)
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_yaml_uids(
|
||||
hass: HomeAssistant, platform_config: list[ConfigType]
|
||||
) -> None:
|
||||
"""Migrate entities unique_id for YAML switch-only fan entities."""
|
||||
# issue was introduced in 2026.1 - this migration in 2026.2
|
||||
ent_reg = er.async_get(hass)
|
||||
invalid_uid = str(None)
|
||||
if (
|
||||
none_entity_id := ent_reg.async_get_entity_id(Platform.FAN, DOMAIN, invalid_uid)
|
||||
) is None:
|
||||
return
|
||||
for config in platform_config:
|
||||
if not config.get(KNX_ADDRESS) and (
|
||||
new_uid_base := config.get(FanSchema.CONF_SWITCH_ADDRESS)
|
||||
):
|
||||
break
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"No YAML entry found to migrate fan entity '%s' unique_id from '%s'. Removing entry",
|
||||
none_entity_id,
|
||||
invalid_uid,
|
||||
)
|
||||
ent_reg.async_remove(none_entity_id)
|
||||
return
|
||||
new_uid = str(
|
||||
parse_device_group_address(
|
||||
new_uid_base[0], # list of group addresses - first item is sending address
|
||||
)
|
||||
)
|
||||
try:
|
||||
ent_reg.async_update_entity(none_entity_id, new_unique_id=str(new_uid))
|
||||
_LOGGER.info(
|
||||
"Migrating fan entity '%s' unique_id from '%s' to %s",
|
||||
none_entity_id,
|
||||
invalid_uid,
|
||||
new_uid,
|
||||
)
|
||||
except ValueError:
|
||||
# New unique_id already exists - remove invalid entry. User might have changed YAML
|
||||
_LOGGER.info(
|
||||
"Failed to migrate fan entity '%s' unique_id from '%s' to '%s'. "
|
||||
"Removing the invalid entry",
|
||||
none_entity_id,
|
||||
invalid_uid,
|
||||
new_uid,
|
||||
)
|
||||
ent_reg.async_remove(none_entity_id)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -112,7 +57,6 @@ async def async_setup_entry(
|
||||
|
||||
entities: list[_KnxFan] = []
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.FAN):
|
||||
async_migrate_yaml_uids(hass, yaml_platform_config)
|
||||
entities.extend(
|
||||
KnxYamlFan(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
@@ -233,10 +177,7 @@ class KnxYamlFan(_KnxFan, KnxYamlEntity):
|
||||
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
|
||||
if self._device.speed.group_address:
|
||||
self._attr_unique_id = str(self._device.speed.group_address)
|
||||
else:
|
||||
self._attr_unique_id = str(self._device.switch.group_address)
|
||||
self._attr_unique_id = str(self._device.speed.group_address)
|
||||
|
||||
|
||||
class KnxUiFan(_KnxFan, KnxUiEntity):
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==12.1.3"]
|
||||
"requirements": ["ical==12.1.2"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==12.1.3"]
|
||||
"requirements": ["ical==12.1.2"]
|
||||
}
|
||||
|
||||
@@ -33,7 +33,6 @@ from .const import ( # noqa: F401
|
||||
CONF_ALLOW_SINGLE_WORD,
|
||||
CONF_ICON,
|
||||
CONF_REQUIRE_ADMIN,
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_SHOW_IN_SIDEBAR,
|
||||
CONF_TITLE,
|
||||
CONF_URL_PATH,
|
||||
@@ -62,7 +61,7 @@ def _validate_url_slug(value: Any) -> str:
|
||||
"""Validate value is a valid url slug."""
|
||||
if value is None:
|
||||
raise vol.Invalid("Slug should not be None")
|
||||
if value != "lovelace" and "-" not in value:
|
||||
if "-" not in value:
|
||||
raise vol.Invalid("Url path needs to contain a hyphen (-)")
|
||||
str_value = str(value)
|
||||
slg = slugify(str_value, separator="-")
|
||||
@@ -85,13 +84,9 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(DOMAIN, default={}): vol.Schema(
|
||||
{
|
||||
# Deprecated - Remove in 2026.8
|
||||
vol.Optional(CONF_MODE, default=MODE_STORAGE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_RESOURCE_MODE): vol.All(
|
||||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_DASHBOARDS): cv.schema_with_slug_keys(
|
||||
YAML_DASHBOARD_SCHEMA,
|
||||
slug_validator=_validate_url_slug,
|
||||
@@ -108,7 +103,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
class LovelaceData:
|
||||
"""Dataclass to store information in hass.data."""
|
||||
|
||||
resource_mode: str # The mode used for resources (yaml or storage)
|
||||
mode: str
|
||||
dashboards: dict[str | None, dashboard.LovelaceConfig]
|
||||
resources: resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
yaml_dashboards: dict[str | None, ConfigType]
|
||||
@@ -119,9 +114,18 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
mode = config[DOMAIN][CONF_MODE]
|
||||
yaml_resources = config[DOMAIN].get(CONF_RESOURCES)
|
||||
|
||||
# resource_mode controls how resources are loaded (yaml vs storage)
|
||||
# Deprecated - Remove mode fallback in 2026.8
|
||||
resource_mode = config[DOMAIN].get(CONF_RESOURCE_MODE, mode)
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, register the default panel in yaml mode (temporary until user migrates)
|
||||
if mode == MODE_YAML:
|
||||
frontend.async_register_built_in_panel(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config={"mode": mode},
|
||||
sidebar_title="overview",
|
||||
sidebar_icon="mdi:view-dashboard",
|
||||
sidebar_default_visible=False,
|
||||
)
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
|
||||
async def reload_resources_service_handler(service_call: ServiceCall) -> None:
|
||||
"""Reload yaml resources."""
|
||||
@@ -145,13 +149,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
hass.data[LOVELACE_DATA].resources = resource_collection
|
||||
|
||||
default_config: dashboard.LovelaceConfig
|
||||
resource_collection: (
|
||||
resources.ResourceYAMLCollection | resources.ResourceStorageCollection
|
||||
)
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
# Load resources based on resource_mode
|
||||
if resource_mode == MODE_YAML:
|
||||
if mode == MODE_YAML:
|
||||
default_config = dashboard.LovelaceYAML(hass, None, None)
|
||||
resource_collection = await create_yaml_resource_col(hass, yaml_resources)
|
||||
|
||||
async_register_admin_service(
|
||||
@@ -174,6 +177,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
else:
|
||||
default_config = dashboard.LovelaceStorage(hass, None)
|
||||
|
||||
if yaml_resources is not None:
|
||||
_LOGGER.warning(
|
||||
"Lovelace is running in storage mode. Define resources via user"
|
||||
@@ -190,44 +195,18 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
RESOURCE_UPDATE_FIELDS,
|
||||
).async_setup(hass)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_info)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_config)
|
||||
websocket_api.async_register_command(hass, websocket.websocket_lovelace_save_config)
|
||||
websocket_api.async_register_command(
|
||||
hass, websocket.websocket_lovelace_delete_config
|
||||
)
|
||||
|
||||
yaml_dashboards = config[DOMAIN].get(CONF_DASHBOARDS, {})
|
||||
|
||||
# Deprecated - Remove in 2026.8
|
||||
# For YAML mode, add the default "lovelace" dashboard if not already defined
|
||||
# This migrates the legacy yaml mode to a proper yaml dashboard entry
|
||||
if mode == MODE_YAML and DOMAIN not in yaml_dashboards:
|
||||
translations = await async_get_translations(
|
||||
hass, hass.config.language, "dashboard", {onboarding.DOMAIN}
|
||||
)
|
||||
title = translations.get(
|
||||
"component.onboarding.dashboard.overview.title", "Overview"
|
||||
)
|
||||
yaml_dashboards = {
|
||||
DOMAIN: {
|
||||
CONF_TITLE: title,
|
||||
CONF_ICON: DEFAULT_ICON,
|
||||
CONF_SHOW_IN_SIDEBAR: True,
|
||||
CONF_REQUIRE_ADMIN: False,
|
||||
CONF_MODE: MODE_YAML,
|
||||
CONF_FILENAME: LOVELACE_CONFIG_FILE,
|
||||
},
|
||||
**yaml_dashboards,
|
||||
}
|
||||
_async_create_yaml_mode_repair(hass)
|
||||
|
||||
hass.data[LOVELACE_DATA] = LovelaceData(
|
||||
resource_mode=resource_mode,
|
||||
mode=mode,
|
||||
# We store a dictionary mapping url_path: config. None is the default.
|
||||
dashboards={None: default_config},
|
||||
resources=resource_collection,
|
||||
yaml_dashboards=yaml_dashboards,
|
||||
yaml_dashboards=config[DOMAIN].get(CONF_DASHBOARDS, {}),
|
||||
)
|
||||
|
||||
if hass.config.recovery_mode:
|
||||
@@ -471,7 +450,7 @@ async def _async_migrate_default_config(
|
||||
# Deprecated - Remove in 2026.8
|
||||
@callback
|
||||
def _async_create_yaml_mode_repair(hass: HomeAssistant) -> None:
|
||||
"""Create repair issue for YAML mode deprecation."""
|
||||
"""Create repair issue for YAML mode migration."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -158,15 +158,7 @@ async def _get_dashboard_info(
|
||||
"""Load a dashboard and return info on views."""
|
||||
if url_path == DEFAULT_DASHBOARD:
|
||||
url_path = None
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if dashboard is None:
|
||||
raise ValueError("Invalid dashboard specified")
|
||||
|
||||
@@ -57,7 +57,6 @@ RESOURCE_UPDATE_FIELDS: VolDictType = {
|
||||
SERVICE_RELOAD_RESOURCES = "reload_resources"
|
||||
RESOURCE_RELOAD_SERVICE_SCHEMA = vol.Schema({})
|
||||
|
||||
CONF_RESOURCE_MODE = "resource_mode"
|
||||
CONF_TITLE = "title"
|
||||
CONF_REQUIRE_ADMIN = "require_admin"
|
||||
CONF_SHOW_IN_SIDEBAR = "show_in_sidebar"
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
},
|
||||
"issues": {
|
||||
"yaml_mode_deprecated": {
|
||||
"description": "Your YAML dashboard configuration uses the legacy `mode: yaml` option, which will be removed in Home Assistant 2026.8. Your YAML dashboards will continue to work, you just need to update how they are defined.\n\nTo update your configuration:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. Add a dashboard entry instead:\n\n ```yaml\n lovelace:\n resource_mode: yaml\n dashboards:\n lovelace:\n mode: yaml\n filename: {config_file}\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n ```\n\n3. Restart Home Assistant\n\nNote: `resource_mode: yaml` keeps loading resources from YAML. If you want to manage resources through the UI instead, you can remove this line and move your resources to Settings > Dashboards > Resources.",
|
||||
"title": "Lovelace YAML configuration needs update"
|
||||
"description": "Starting with Home Assistant 2026.8, the default Lovelace dashboard will no longer support YAML mode. To migrate:\n\n1. Remove `mode: yaml` from `lovelace:` in your `configuration.yaml`\n2. Rename `{config_file}` to a new filename (e.g., `my-dashboard.yaml`)\n3. Add a dashboard entry in your `configuration.yaml`:\n\n```yaml\nlovelace:\n dashboards:\n lovelace:\n mode: yaml\n filename: my-dashboard.yaml\n title: Overview\n icon: mdi:view-dashboard\n show_in_sidebar: true\n```\n\n4. Restart Home Assistant",
|
||||
"title": "Lovelace YAML mode migration required"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -42,7 +42,9 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
else:
|
||||
health_info[key] = dashboard[key]
|
||||
|
||||
if MODE_STORAGE in modes:
|
||||
if hass.data[LOVELACE_DATA].mode == MODE_YAML:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
elif MODE_STORAGE in modes:
|
||||
health_info[CONF_MODE] = MODE_STORAGE
|
||||
elif MODE_YAML in modes:
|
||||
health_info[CONF_MODE] = MODE_YAML
|
||||
|
||||
@@ -14,13 +14,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.json import json_fragment
|
||||
|
||||
from .const import (
|
||||
CONF_RESOURCE_MODE,
|
||||
CONF_URL_PATH,
|
||||
DOMAIN,
|
||||
LOVELACE_DATA,
|
||||
ConfigNotFound,
|
||||
)
|
||||
from .const import CONF_URL_PATH, LOVELACE_DATA, ConfigNotFound
|
||||
from .dashboard import LovelaceConfig
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -44,15 +38,7 @@ def _handle_errors[_R](
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
url_path = msg.get(CONF_URL_PATH)
|
||||
|
||||
# When url_path is None, prefer "lovelace" dashboard if it exists (for YAML mode)
|
||||
# Otherwise fall back to dashboards[None] (storage mode default)
|
||||
if url_path is None:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(DOMAIN) or hass.data[
|
||||
LOVELACE_DATA
|
||||
].dashboards.get(None)
|
||||
else:
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
config = hass.data[LOVELACE_DATA].dashboards.get(url_path)
|
||||
|
||||
if config is None:
|
||||
connection.send_error(
|
||||
@@ -114,20 +100,6 @@ async def websocket_lovelace_resources_impl(
|
||||
connection.send_result(msg["id"], resources.async_items())
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": "lovelace/info"})
|
||||
@websocket_api.async_response
|
||||
async def websocket_lovelace_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Send Lovelace UI info over WebSocket connection."""
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{CONF_RESOURCE_MODE: hass.data[LOVELACE_DATA].resource_mode},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "lovelace/config",
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2026.02.04"],
|
||||
"requirements": ["yt-dlp[default]==2025.12.08"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -266,7 +266,7 @@
|
||||
"name": "Browse media"
|
||||
},
|
||||
"clear_playlist": {
|
||||
"description": "Removes all items from a media player's playlist.",
|
||||
"description": "Removes all items from the playlist.",
|
||||
"name": "Clear playlist"
|
||||
},
|
||||
"join": {
|
||||
@@ -284,15 +284,15 @@
|
||||
"name": "Next"
|
||||
},
|
||||
"media_pause": {
|
||||
"description": "Pauses playback on a media player.",
|
||||
"description": "Pauses.",
|
||||
"name": "[%key:common::action::pause%]"
|
||||
},
|
||||
"media_play": {
|
||||
"description": "Starts playback on a media player.",
|
||||
"description": "Starts playing.",
|
||||
"name": "Play"
|
||||
},
|
||||
"media_play_pause": {
|
||||
"description": "Toggles play/pause on a media player.",
|
||||
"description": "Toggles play/pause.",
|
||||
"name": "Play/Pause"
|
||||
},
|
||||
"media_previous_track": {
|
||||
@@ -310,7 +310,7 @@
|
||||
"name": "Seek"
|
||||
},
|
||||
"media_stop": {
|
||||
"description": "Stops playback on a media player.",
|
||||
"description": "Stops playing.",
|
||||
"name": "[%key:common::action::stop%]"
|
||||
},
|
||||
"play_media": {
|
||||
@@ -374,7 +374,7 @@
|
||||
"name": "Select sound mode"
|
||||
},
|
||||
"select_source": {
|
||||
"description": "Sends a media player the command to change the input source.",
|
||||
"description": "Sends the media player the command to change input source.",
|
||||
"fields": {
|
||||
"source": {
|
||||
"description": "Name of the source to switch to. Platform dependent.",
|
||||
@@ -398,23 +398,23 @@
|
||||
"name": "[%key:common::action::toggle%]"
|
||||
},
|
||||
"turn_off": {
|
||||
"description": "Turns off the power of a media player.",
|
||||
"description": "Turns off the power of the media player.",
|
||||
"name": "[%key:common::action::turn_off%]"
|
||||
},
|
||||
"turn_on": {
|
||||
"description": "Turns on the power of a media player.",
|
||||
"description": "Turns on the power of the media player.",
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
},
|
||||
"unjoin": {
|
||||
"description": "Removes a media player from a group. Only works on platforms which support player groups.",
|
||||
"description": "Removes the player from a group. Only works on platforms which support player groups.",
|
||||
"name": "Unjoin"
|
||||
},
|
||||
"volume_down": {
|
||||
"description": "Turns down the volume of a media player.",
|
||||
"description": "Turns down the volume.",
|
||||
"name": "Turn down volume"
|
||||
},
|
||||
"volume_mute": {
|
||||
"description": "Mutes or unmutes a media player.",
|
||||
"description": "Mutes or unmutes the media player.",
|
||||
"fields": {
|
||||
"is_volume_muted": {
|
||||
"description": "Defines whether or not it is muted.",
|
||||
@@ -424,7 +424,7 @@
|
||||
"name": "Mute/unmute volume"
|
||||
},
|
||||
"volume_set": {
|
||||
"description": "Sets the volume level of a media player.",
|
||||
"description": "Sets the volume level.",
|
||||
"fields": {
|
||||
"volume_level": {
|
||||
"description": "The volume. 0 is inaudible, 1 is the maximum volume.",
|
||||
@@ -434,7 +434,7 @@
|
||||
"name": "Set volume"
|
||||
},
|
||||
"volume_up": {
|
||||
"description": "Turns up the volume of a media player.",
|
||||
"description": "Turns up the volume.",
|
||||
"name": "Turn up volume"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["meteoclimatic"],
|
||||
"requirements": ["pymeteoclimatic==0.1.1"]
|
||||
"requirements": ["pymeteoclimatic==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -806,8 +806,6 @@ class CoffeeSystemProgramId(MieleEnum, missing_to_none=True):
|
||||
24813, # add profile
|
||||
)
|
||||
appliance_rinse = 24750, 24759, 24773, 24787, 24788
|
||||
intermediate_rinsing = 24758
|
||||
automatic_maintenance = 24778
|
||||
descaling = 24751
|
||||
brewing_unit_degrease = 24753
|
||||
milk_pipework_rinse = 24754
|
||||
|
||||
@@ -722,7 +722,7 @@ POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]]
|
||||
description=MieleSensorDescription[MieleFillingLevel](
|
||||
key="power_disk_level",
|
||||
translation_key="power_disk_level",
|
||||
value_fn=lambda value: value.power_disc_filling_level,
|
||||
value_fn=lambda value: None,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
|
||||
@@ -288,7 +288,6 @@
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"auto_roast": "Auto roast",
|
||||
"automatic": "Automatic",
|
||||
"automatic_maintenance": "Automatic maintenance",
|
||||
"automatic_plus": "Automatic plus",
|
||||
"baguettes": "Baguettes",
|
||||
"barista_assistant": "BaristaAssistant",
|
||||
@@ -552,7 +551,6 @@
|
||||
"hygiene": "Hygiene",
|
||||
"intensive": "Intensive",
|
||||
"intensive_bake": "Intensive bake",
|
||||
"intermediate_rinsing": "Intermediate rinsing",
|
||||
"iridescent_shark_fillet": "Iridescent shark (fillet)",
|
||||
"japanese_tea": "Japanese tea",
|
||||
"jasmine_rice_rapid_steam_cooking": "Jasmine rice (rapid steam cooking)",
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"domain": "Domain",
|
||||
"domain": "[%key:common::config_flow::data::username%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "Dynamic DNS password"
|
||||
},
|
||||
|
||||
@@ -96,6 +96,7 @@ async def validate_nibegw_input(
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
heatpump = HeatPump(Model[data[CONF_MODEL]])
|
||||
heatpump.word_swap = True
|
||||
await heatpump.initialize()
|
||||
|
||||
connection = NibeGW(
|
||||
@@ -113,9 +114,6 @@ async def validate_nibegw_input(
|
||||
"Address already in use", "listening_port", "address_in_use"
|
||||
) from exception
|
||||
|
||||
if heatpump.word_swap is None:
|
||||
heatpump.word_swap = True
|
||||
|
||||
try:
|
||||
await connection.verify_connectivity()
|
||||
except (ReadSendException, CoilWriteSendException) as exception:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.22.0"]
|
||||
"requirements": ["nibe==2.21.0"]
|
||||
}
|
||||
|
||||
@@ -594,8 +594,7 @@ UNIT_CONVERTERS: dict[NumberDeviceClass, type[BaseUnitConverter]] = {
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
# to units using recommended coding of μ \u03bc
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -605,9 +604,4 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ from onedrive_personal_sdk.exceptions import (
|
||||
NotFoundError,
|
||||
OneDriveException,
|
||||
)
|
||||
from onedrive_personal_sdk.models.items import ItemUpdate
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -71,6 +72,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
entry, data={**entry.data, CONF_FOLDER_ID: backup_folder.id}
|
||||
)
|
||||
|
||||
# write instance id to description
|
||||
if backup_folder.description != (instance_id := await async_get_instance_id(hass)):
|
||||
await _handle_item_operation(
|
||||
lambda: client.update_drive_item(
|
||||
backup_folder.id, ItemUpdate(description=instance_id)
|
||||
),
|
||||
folder_name,
|
||||
)
|
||||
|
||||
# update in case folder was renamed manually inside OneDrive
|
||||
if backup_folder.name != entry.data[CONF_FOLDER_NAME]:
|
||||
hass.config_entries.async_update_entry(
|
||||
@@ -112,11 +122,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
|
||||
|
||||
async def _migrate_backup_files(client: OneDriveClient, backup_folder_id: str) -> None:
|
||||
"""Migrate backup files from metadata version 1 to version 2.
|
||||
|
||||
Version 1: Backup metadata was stored in the backup file's description field.
|
||||
Version 2: Backup metadata is stored in a separate .metadata.json file.
|
||||
"""
|
||||
"""Migrate backup files to metadata version 2."""
|
||||
files = await client.list_drive_items(backup_folder_id)
|
||||
for file in files:
|
||||
if file.description and '"metadata_version": 1' in (
|
||||
@@ -125,11 +131,24 @@ async def _migrate_backup_files(client: OneDriveClient, backup_folder_id: str) -
|
||||
metadata = loads(metadata_json)
|
||||
del metadata["metadata_version"]
|
||||
metadata_filename = file.name.rsplit(".", 1)[0] + ".metadata.json"
|
||||
await client.upload_file(
|
||||
metadata_file = await client.upload_file(
|
||||
backup_folder_id,
|
||||
metadata_filename,
|
||||
dumps(metadata),
|
||||
)
|
||||
metadata_description = {
|
||||
"metadata_version": 2,
|
||||
"backup_id": metadata["backup_id"],
|
||||
"backup_file_id": file.id,
|
||||
}
|
||||
await client.update_drive_item(
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
await client.update_drive_item(
|
||||
path_or_id=file.id,
|
||||
data=ItemUpdate(description=""),
|
||||
)
|
||||
_LOGGER.debug("Migrated backup file %s", file.name)
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from functools import wraps
|
||||
from html import unescape
|
||||
from json import dumps, loads
|
||||
import logging
|
||||
from time import time
|
||||
from typing import Any, Concatenate
|
||||
@@ -15,6 +18,7 @@ from onedrive_personal_sdk.exceptions import (
|
||||
HashMismatchError,
|
||||
OneDriveException,
|
||||
)
|
||||
from onedrive_personal_sdk.models.items import ItemUpdate
|
||||
from onedrive_personal_sdk.models.upload import FileInfo
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
@@ -26,8 +30,6 @@ from homeassistant.components.backup import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
from .const import CONF_DELETE_PERMANENTLY, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
@@ -36,6 +38,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
MAX_CHUNK_SIZE = 60 * 1024 * 1024 # largest chunk possible, must be <= 60 MiB
|
||||
TARGET_CHUNKS = 20
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
CACHE_TTL = 300
|
||||
|
||||
|
||||
@@ -101,10 +104,13 @@ def handle_backup_errors[_R, **P](
|
||||
return wrapper
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata."""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||
@dataclass(kw_only=True)
|
||||
class OneDriveBackup:
|
||||
"""Define a OneDrive backup."""
|
||||
|
||||
backup: AgentBackup
|
||||
backup_file_id: str
|
||||
metadata_file_id: str
|
||||
|
||||
|
||||
class OneDriveBackupAgent(BackupAgent):
|
||||
@@ -123,7 +129,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self.name = entry.title
|
||||
assert entry.unique_id
|
||||
self.unique_id = entry.unique_id
|
||||
self._cache_backup_metadata: dict[str, AgentBackup] = {}
|
||||
self._backup_cache: dict[str, OneDriveBackup] = {}
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -131,11 +137,12 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
backup_filename, _ = suggested_filenames(backup)
|
||||
backups = await self._list_cached_backups()
|
||||
if backup_id not in backups:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
stream = await self._client.download_drive_item(
|
||||
f"{self._folder_id}:/{backup_filename}:", timeout=TIMEOUT
|
||||
backups[backup_id].backup_file_id, timeout=TIMEOUT
|
||||
)
|
||||
return stream.iter_chunked(1024)
|
||||
|
||||
@@ -148,9 +155,9 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
backup_filename, metadata_filename = suggested_filenames(backup)
|
||||
filename = suggested_filename(backup)
|
||||
file = FileInfo(
|
||||
backup_filename,
|
||||
filename,
|
||||
backup.size,
|
||||
self._folder_id,
|
||||
await open_stream(),
|
||||
@@ -166,7 +173,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
upload_chunk_size = max(upload_chunk_size, 320 * 1024)
|
||||
|
||||
try:
|
||||
await LargeFileUploadClient.upload(
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
self._token_function,
|
||||
file,
|
||||
upload_chunk_size=upload_chunk_size,
|
||||
@@ -178,27 +185,35 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
"Hash validation failed, backup file might be corrupt"
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Uploaded backup to %s", backup_filename)
|
||||
|
||||
# Store metadata in separate metadata file (just backup.as_dict(), no extra fields)
|
||||
metadata_content = json_dumps(backup.as_dict())
|
||||
# store metadata in metadata file
|
||||
description = dumps(backup.as_dict())
|
||||
_LOGGER.debug("Creating metadata: %s", description)
|
||||
metadata_filename = filename.rsplit(".", 1)[0] + ".metadata.json"
|
||||
try:
|
||||
await self._client.upload_file(
|
||||
metadata_file = await self._client.upload_file(
|
||||
self._folder_id,
|
||||
metadata_filename,
|
||||
metadata_content,
|
||||
description,
|
||||
)
|
||||
except OneDriveException:
|
||||
# Clean up the backup file if metadata upload fails
|
||||
_LOGGER.debug(
|
||||
"Uploading metadata failed, deleting backup file %s", backup_filename
|
||||
)
|
||||
await self._client.delete_drive_item(
|
||||
f"{self._folder_id}:/{backup_filename}:"
|
||||
)
|
||||
await self._client.delete_drive_item(backup_file.id)
|
||||
raise
|
||||
|
||||
_LOGGER.debug("Uploaded metadata file %s", metadata_filename)
|
||||
# add metadata to the metadata file
|
||||
metadata_description = {
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_file_id": backup_file.id,
|
||||
}
|
||||
try:
|
||||
await self._client.update_drive_item(
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
except OneDriveException:
|
||||
await self._client.delete_drive_item(backup_file.id)
|
||||
await self._client.delete_drive_item(metadata_file.id)
|
||||
raise
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -208,63 +223,66 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
backup_filename, metadata_filename = suggested_filenames(backup)
|
||||
backups = await self._list_cached_backups()
|
||||
if backup_id not in backups:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
backup = backups[backup_id]
|
||||
|
||||
delete_permanently = self._entry.options.get(CONF_DELETE_PERMANENTLY, False)
|
||||
|
||||
await self._client.delete_drive_item(backup.backup_file_id, delete_permanently)
|
||||
await self._client.delete_drive_item(
|
||||
f"{self._folder_id}:/{backup_filename}:", delete_permanently
|
||||
backup.metadata_file_id, delete_permanently
|
||||
)
|
||||
await self._client.delete_drive_item(
|
||||
f"{self._folder_id}:/{metadata_filename}:", delete_permanently
|
||||
)
|
||||
|
||||
_LOGGER.debug("Deleted backup %s", backup_filename)
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
return list((await self._list_cached_metadata_files()).values())
|
||||
return [
|
||||
backup.backup for backup in (await self._list_cached_backups()).values()
|
||||
]
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
return await self._find_backup_by_id(backup_id)
|
||||
backups = await self._list_cached_backups()
|
||||
if backup_id not in backups:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return backups[backup_id].backup
|
||||
|
||||
async def _list_cached_metadata_files(self) -> dict[str, AgentBackup]:
|
||||
"""List metadata files with a cache."""
|
||||
async def _list_cached_backups(self) -> dict[str, OneDriveBackup]:
|
||||
"""List backups with a cache."""
|
||||
if time() <= self._cache_expiration:
|
||||
return self._cache_backup_metadata
|
||||
return self._backup_cache
|
||||
|
||||
async def _download_metadata(item_id: str) -> AgentBackup | None:
|
||||
"""Download metadata file."""
|
||||
items = await self._client.list_drive_items(self._folder_id)
|
||||
|
||||
async def download_backup_metadata(item_id: str) -> AgentBackup | None:
|
||||
try:
|
||||
metadata_stream = await self._client.download_drive_item(item_id)
|
||||
except OneDriveException as err:
|
||||
_LOGGER.warning("Error downloading metadata for %s: %s", item_id, err)
|
||||
return None
|
||||
metadata_json = loads(await metadata_stream.read())
|
||||
return AgentBackup.from_dict(metadata_json)
|
||||
|
||||
return AgentBackup.from_dict(
|
||||
json_loads_object(await metadata_stream.read())
|
||||
)
|
||||
|
||||
items = await self._client.list_drive_items(self._folder_id)
|
||||
metadata_files: dict[str, AgentBackup] = {}
|
||||
backups: dict[str, OneDriveBackup] = {}
|
||||
for item in items:
|
||||
if item.name and item.name.endswith(".metadata.json"):
|
||||
if metadata := await _download_metadata(item.id):
|
||||
metadata_files[metadata.backup_id] = metadata
|
||||
if item.description and f'"metadata_version": {METADATA_VERSION}' in (
|
||||
metadata_description_json := unescape(item.description)
|
||||
):
|
||||
backup = await download_backup_metadata(item.id)
|
||||
if backup is None:
|
||||
continue
|
||||
metadata_description = loads(metadata_description_json)
|
||||
backups[backup.backup_id] = OneDriveBackup(
|
||||
backup=backup,
|
||||
backup_file_id=metadata_description["backup_file_id"],
|
||||
metadata_file_id=item.id,
|
||||
)
|
||||
|
||||
self._cache_backup_metadata = metadata_files
|
||||
self._cache_expiration = time() + CACHE_TTL
|
||||
return self._cache_backup_metadata
|
||||
|
||||
async def _find_backup_by_id(self, backup_id: str) -> AgentBackup:
|
||||
"""Find a backup by its backup ID on remote."""
|
||||
metadata_files = await self._list_cached_metadata_files()
|
||||
if backup := metadata_files.get(backup_id):
|
||||
return backup
|
||||
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
self._backup_cache = backups
|
||||
return backups
|
||||
|
||||
@@ -129,6 +129,9 @@ class OneDriveConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
except OneDriveException:
|
||||
self.logger.debug("Failed to create folder", exc_info=True)
|
||||
errors["base"] = "folder_creation_error"
|
||||
else:
|
||||
if folder.description and folder.description != instance_id:
|
||||
errors[CONF_FOLDER_NAME] = "folder_already_in_use"
|
||||
if not errors:
|
||||
title = (
|
||||
f"{self.approot.created_by.user.display_name}'s OneDrive"
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
},
|
||||
"error": {
|
||||
"folder_already_in_use": "Folder already used for backups from another Home Assistant instance",
|
||||
"folder_creation_error": "Failed to create folder",
|
||||
"folder_rename_error": "Failed to rename folder"
|
||||
},
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.NUMBER, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OpenEVSEConfigEntry) -> bool:
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
"""Support for OpenEVSE number entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_CONNECTIONS,
|
||||
ATTR_SERIAL_NUMBER,
|
||||
EntityCategory,
|
||||
UnitOfElectricCurrent,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class OpenEVSENumberDescription(NumberEntityDescription):
|
||||
"""Describes an OpenEVSE number entity."""
|
||||
|
||||
value_fn: Callable[[OpenEVSE], float]
|
||||
min_value_fn: Callable[[OpenEVSE], float]
|
||||
max_value_fn: Callable[[OpenEVSE], float]
|
||||
set_value_fn: Callable[[OpenEVSE, float], Awaitable[Any]]
|
||||
|
||||
|
||||
NUMBER_TYPES: tuple[OpenEVSENumberDescription, ...] = (
|
||||
OpenEVSENumberDescription(
|
||||
key="charge_rate",
|
||||
translation_key="charge_rate",
|
||||
value_fn=lambda ev: ev.max_current_soft,
|
||||
min_value_fn=lambda ev: ev.min_amps,
|
||||
max_value_fn=lambda ev: ev.max_amps,
|
||||
set_value_fn=lambda ev, value: ev.set_current(value),
|
||||
native_step=1.0,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=NumberDeviceClass.CURRENT,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: OpenEVSEConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up OpenEVSE sensors based on config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
identifier = entry.unique_id or entry.entry_id
|
||||
async_add_entities(
|
||||
OpenEVSENumber(coordinator, description, identifier, entry.unique_id)
|
||||
for description in NUMBER_TYPES
|
||||
)
|
||||
|
||||
|
||||
class OpenEVSENumber(CoordinatorEntity[OpenEVSEDataUpdateCoordinator], NumberEntity):
|
||||
"""Implementation of an OpenEVSE sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: OpenEVSENumberDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: OpenEVSEDataUpdateCoordinator,
|
||||
description: OpenEVSENumberDescription,
|
||||
identifier: str,
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{identifier}-{description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, identifier)},
|
||||
manufacturer="OpenEVSE",
|
||||
)
|
||||
if unique_id:
|
||||
self._attr_device_info[ATTR_CONNECTIONS] = {
|
||||
(CONNECTION_NETWORK_MAC, unique_id)
|
||||
}
|
||||
self._attr_device_info[ATTR_SERIAL_NUMBER] = unique_id
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the state of the number."""
|
||||
return self.entity_description.value_fn(self.coordinator.charger)
|
||||
|
||||
@property
|
||||
def native_min_value(self) -> float:
|
||||
"""Return the minimum value."""
|
||||
return self.entity_description.min_value_fn(self.coordinator.charger)
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return the maximum value."""
|
||||
return self.entity_description.max_value_fn(self.coordinator.charger)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set new value."""
|
||||
await self.entity_description.set_value_fn(self.coordinator.charger, value)
|
||||
@@ -30,11 +30,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"charge_rate": {
|
||||
"name": "Charge rate"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"ambient_temp": {
|
||||
"name": "Ambient temperature"
|
||||
|
||||
@@ -31,6 +31,7 @@ class OpenThermEntity(Entity):
|
||||
"""Represent an OpenTherm entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
entity_description: OpenThermEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -60,8 +61,6 @@ class OpenThermEntity(Entity):
|
||||
class OpenThermStatusEntity(OpenThermEntity):
|
||||
"""Represent an OpenTherm entity that receives status updates."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to updates from the component."""
|
||||
self.async_on_remove(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyotgw"],
|
||||
"requirements": ["pyotgw==2.2.3"]
|
||||
"requirements": ["pyotgw==2.2.2"]
|
||||
}
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["opower==0.17.0"]
|
||||
"requirements": ["opower==0.16.5"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/otbr",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==2.8.0"]
|
||||
"requirements": ["python-otbr-api==2.7.1"]
|
||||
}
|
||||
|
||||
@@ -4,18 +4,15 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.calendar import (
|
||||
CalendarEntity,
|
||||
CalendarEntityDescription,
|
||||
CalendarEvent,
|
||||
)
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import CalendarUpdateCoordinator, RadarrConfigEntry, RadarrEvent
|
||||
from .entity import RadarrEntity
|
||||
|
||||
CALENDAR_TYPE = CalendarEntityDescription(
|
||||
CALENDAR_TYPE = EntityDescription(
|
||||
key="calendar",
|
||||
name=None,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==12.1.3"]
|
||||
"requirements": ["ical==12.1.2"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.5.3"]
|
||||
"requirements": ["renault-api==0.5.2"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.18.2"]
|
||||
"requirements": ["reolink-aio==0.18.1"]
|
||||
}
|
||||
|
||||
@@ -840,8 +840,7 @@ STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]]
|
||||
}
|
||||
|
||||
# We translate units that were using using the legacy coding of μ \u00b5
|
||||
# to units using recommended coding of μ \u03bc and
|
||||
# we convert alternative accepted units to the preferred unit.
|
||||
# to units using recommended coding of μ \u03bc
|
||||
AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5Sv/h": "μSv/h", # aranet: radiation rate
|
||||
"\u00b5S/cm": UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
@@ -851,9 +850,4 @@ AMBIGUOUS_UNITS: dict[str | None, str] = {
|
||||
"\u00b5mol/s⋅m²": "μmol/s⋅m²", # fyta: light
|
||||
"\u00b5g": UnitOfMass.MICROGRAMS,
|
||||
"\u00b5s": UnitOfTime.MICROSECONDS,
|
||||
"mVAr": UnitOfReactivePower.MILLIVOLT_AMPERE_REACTIVE,
|
||||
"VAr": UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
"kVAr": UnitOfReactivePower.KILO_VOLT_AMPERE_REACTIVE,
|
||||
"VArh": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR,
|
||||
"kVArh": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR,
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from httpx import HTTPStatusError, RequestError
|
||||
import jwt
|
||||
from pysenz import SENZAPI, Thermostat
|
||||
@@ -71,26 +70,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: SENZConfigEntry) -> bool
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_ready",
|
||||
) from err
|
||||
except ClientResponseError as err:
|
||||
if err.status in (HTTPStatus.UNAUTHORIZED, HTTPStatus.BAD_REQUEST):
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_auth_failed",
|
||||
) from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_ready",
|
||||
) from err
|
||||
except RequestError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_ready",
|
||||
) from err
|
||||
except Exception as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_auth_failed",
|
||||
) from err
|
||||
|
||||
coordinator: SENZDataUpdateCoordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -81,12 +81,6 @@ class SENZSensor(CoordinatorEntity[SENZDataUpdateCoordinator], SensorEntity):
|
||||
serial_number=thermostat.serial_number,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._thermostat = self.coordinator.data[self._thermostat.serial_number]
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if the thermostat is available."""
|
||||
|
||||
@@ -59,6 +59,7 @@ from .coordinator import (
|
||||
)
|
||||
from .repairs import (
|
||||
async_manage_ble_scanner_firmware_unsupported_issue,
|
||||
async_manage_coiot_unconfigured_issue,
|
||||
async_manage_deprecated_firmware_issue,
|
||||
async_manage_open_wifi_ap_issue,
|
||||
async_manage_outbound_websocket_incorrectly_enabled_issue,
|
||||
@@ -232,6 +233,7 @@ async def _async_setup_block_entry(
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, runtime_data.platforms
|
||||
)
|
||||
async_manage_coiot_unconfigured_issue(hass, entry)
|
||||
remove_empty_sub_devices(hass, entry)
|
||||
elif (
|
||||
sleep_period is None
|
||||
|
||||
@@ -47,7 +47,6 @@ from .const import (
|
||||
ATTR_DEVICE,
|
||||
ATTR_GENERATION,
|
||||
BATTERY_DEVICES_WITH_PERMANENT_CONNECTION,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID,
|
||||
CONF_BLE_SCANNER_MODE,
|
||||
CONF_SLEEP_PERIOD,
|
||||
DOMAIN,
|
||||
@@ -73,7 +72,6 @@ from .const import (
|
||||
)
|
||||
from .utils import (
|
||||
async_create_issue_unsupported_firmware,
|
||||
async_manage_coiot_issues_task,
|
||||
get_block_device_sleep_period,
|
||||
get_device_entry_gen,
|
||||
get_host,
|
||||
@@ -444,19 +442,26 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]):
|
||||
DOMAIN,
|
||||
PUSH_UPDATE_ISSUE_ID.format(unique=self.mac),
|
||||
)
|
||||
ir.async_delete_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID.format(unique=self.mac),
|
||||
)
|
||||
self._push_update_failures = 0
|
||||
elif update_type is BlockUpdateType.COAP_REPLY:
|
||||
self._push_update_failures += 1
|
||||
if self._push_update_failures == MAX_PUSH_UPDATE_FAILURES:
|
||||
self.config_entry.async_create_background_task(
|
||||
LOGGER.debug(
|
||||
"Creating issue %s", PUSH_UPDATE_ISSUE_ID.format(unique=self.mac)
|
||||
)
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
async_manage_coiot_issues_task(self.hass, self.config_entry),
|
||||
"coiot_issues",
|
||||
DOMAIN,
|
||||
PUSH_UPDATE_ISSUE_ID.format(unique=self.mac),
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
learn_more_url="https://www.home-assistant.io/integrations/shelly/#shelly-device-configuration-generation-1",
|
||||
translation_key="push_update_failure",
|
||||
translation_placeholders={
|
||||
"device_name": self.config_entry.title,
|
||||
"ip_address": self.device.ip_address,
|
||||
},
|
||||
)
|
||||
if self._push_update_failures:
|
||||
LOGGER.debug(
|
||||
|
||||
@@ -5,7 +5,12 @@ from __future__ import annotations
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aioshelly.block_device import BlockDevice
|
||||
from aioshelly.const import MODEL_OUT_PLUG_S_G3, MODEL_PLUG_S_G3, RPC_GENERATIONS
|
||||
from aioshelly.const import (
|
||||
MODEL_OUT_PLUG_S_G3,
|
||||
MODEL_PLUG,
|
||||
MODEL_PLUG_S_G3,
|
||||
RPC_GENERATIONS,
|
||||
)
|
||||
from aioshelly.exceptions import DeviceConnectionError, RpcCallError
|
||||
from aioshelly.rpc_device import RpcDevice
|
||||
from awesomeversion import AwesomeVersion
|
||||
@@ -19,6 +24,7 @@ from homeassistant.helpers import issue_registry as ir
|
||||
from .const import (
|
||||
BLE_SCANNER_FIRMWARE_UNSUPPORTED_ISSUE_ID,
|
||||
BLE_SCANNER_MIN_FIRMWARE,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID,
|
||||
CONF_BLE_SCANNER_MODE,
|
||||
DEPRECATED_FIRMWARE_ISSUE_ID,
|
||||
DEPRECATED_FIRMWARES,
|
||||
@@ -156,6 +162,51 @@ def async_manage_outbound_websocket_incorrectly_enabled_issue(
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_coiot_unconfigured_issue(
|
||||
hass: HomeAssistant,
|
||||
entry: ShellyConfigEntry,
|
||||
) -> None:
|
||||
"""Manage the CoIoT unconfigured issue."""
|
||||
issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=entry.unique_id)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert entry.runtime_data.block is not None
|
||||
|
||||
device = entry.runtime_data.block.device
|
||||
|
||||
if device.model == MODEL_PLUG:
|
||||
# Shelly Plug Gen 1 does not have CoIoT settings
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
return
|
||||
|
||||
coiot_config = device.settings["coiot"]
|
||||
coiot_enabled = coiot_config.get("enabled")
|
||||
|
||||
# Check if CoIoT is disabled or peer address is not correctly set
|
||||
if not coiot_enabled or (
|
||||
(peer_config := coiot_config.get("peer"))
|
||||
and peer_config != get_coiot_address(hass)
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="coiot_unconfigured",
|
||||
translation_placeholders={
|
||||
"device_name": device.name,
|
||||
"ip_address": device.ip_address,
|
||||
},
|
||||
data={"entry_id": entry.entry_id},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_manage_open_wifi_ap_issue(
|
||||
hass: HomeAssistant,
|
||||
@@ -224,7 +275,7 @@ class CoiotConfigureFlow(ShellyBlockRepairsFlow):
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
coiot_addr = await get_coiot_address(self.hass)
|
||||
coiot_addr = get_coiot_address(self.hass)
|
||||
coiot_port = get_coiot_port(self.hass)
|
||||
if coiot_addr is None or coiot_port is None:
|
||||
return self.async_abort(reason="cannot_configure")
|
||||
|
||||
@@ -1283,7 +1283,6 @@ RPC_SENSORS: Final = {
|
||||
key="voltmeter",
|
||||
sub_key="xvoltage",
|
||||
translation_key="voltmeter_value",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
removal_condition=lambda _, status, key: (status[key].get("xvoltage") is None),
|
||||
unit=lambda config: config["xvoltage"]["unit"] or None,
|
||||
),
|
||||
@@ -1301,7 +1300,6 @@ RPC_SENSORS: Final = {
|
||||
key="input",
|
||||
sub_key="xpercent",
|
||||
translation_key="analog_value",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
removal_condition=lambda config, status, key: (
|
||||
config[key]["type"] != "analog"
|
||||
or config[key]["enable"] is False
|
||||
@@ -1346,7 +1344,6 @@ RPC_SENSORS: Final = {
|
||||
key="input",
|
||||
sub_key="xfreq",
|
||||
translation_key="pulse_counter_frequency_value",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
removal_condition=lambda config, status, key: (
|
||||
config[key]["type"] != "count"
|
||||
or config[key]["enable"] is False
|
||||
|
||||
@@ -22,7 +22,6 @@ from aioshelly.const import (
|
||||
MODEL_EM3,
|
||||
MODEL_I3,
|
||||
MODEL_NAMES,
|
||||
MODEL_PLUG,
|
||||
RPC_GENERATIONS,
|
||||
)
|
||||
from aioshelly.rpc_device import RpcDevice, WsServer
|
||||
@@ -30,7 +29,6 @@ from yarl import URL
|
||||
|
||||
from homeassistant.components import network
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.network import async_get_source_ip
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -56,7 +54,6 @@ from homeassistant.util.dt import utcnow
|
||||
from .const import (
|
||||
API_WS_URL,
|
||||
BASIC_INPUTS_EVENTS_TYPES,
|
||||
COIOT_UNCONFIGURED_ISSUE_ID,
|
||||
COMPONENT_ID_PATTERN,
|
||||
CONF_COAP_PORT,
|
||||
CONF_GEN,
|
||||
@@ -69,7 +66,6 @@ from .const import (
|
||||
GEN2_RELEASE_URL,
|
||||
LOGGER,
|
||||
MAX_SCRIPT_SIZE,
|
||||
PUSH_UPDATE_ISSUE_ID,
|
||||
ROLE_GENERIC,
|
||||
RPC_INPUTS_EVENTS_TYPES,
|
||||
SHAIR_MAX_WORK_HOURS,
|
||||
@@ -736,12 +732,12 @@ def _get_homeassistant_url(hass: HomeAssistant) -> URL | None:
|
||||
return URL(raw_url)
|
||||
|
||||
|
||||
async def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
def get_coiot_address(hass: HomeAssistant) -> str | None:
|
||||
"""Return the CoIoT ip address."""
|
||||
url = _get_homeassistant_url(hass)
|
||||
if url is None or url.host is None:
|
||||
if url is None:
|
||||
return None
|
||||
return await async_get_source_ip(hass, url.host)
|
||||
return str(url.host)
|
||||
|
||||
|
||||
def get_rpc_ws_url(hass: HomeAssistant) -> str | None:
|
||||
@@ -1007,86 +1003,3 @@ def is_rpc_ble_scanner_supported(entry: ConfigEntry) -> bool:
|
||||
entry.runtime_data.rpc_supports_scripts
|
||||
and not entry.runtime_data.rpc_zigbee_firmware
|
||||
)
|
||||
|
||||
|
||||
async def check_coiot_config(device: BlockDevice, hass: HomeAssistant) -> bool:
|
||||
"""Check if CoIoT is correctly configured."""
|
||||
if device.model == MODEL_PLUG:
|
||||
# Shelly Plug Gen 1 does not have CoIoT settings
|
||||
return True
|
||||
|
||||
coiot_config = device.settings["coiot"]
|
||||
|
||||
# Check if CoIoT is disabled
|
||||
if not coiot_config.get("enabled"):
|
||||
return False
|
||||
|
||||
coiot_address = await get_coiot_address(hass)
|
||||
if coiot_address is None:
|
||||
LOGGER.debug(
|
||||
"Skipping CoIoT peer check for device %s as no local address is available",
|
||||
device.name,
|
||||
)
|
||||
return True
|
||||
|
||||
coiot_peer = f"{coiot_address}:{get_coiot_port(hass)}"
|
||||
# Check if CoIoT address is not correctly set
|
||||
if (peer_config := coiot_config.get("peer")) and peer_config != coiot_peer:
|
||||
LOGGER.debug(
|
||||
"CoIoT is unconfigured for device %s, peer_config: %s, coiot_peer: %s",
|
||||
device.name,
|
||||
peer_config,
|
||||
coiot_peer,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_manage_coiot_issues_task(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""CoIoT configuration or push updates issues task."""
|
||||
config_issue_id = COIOT_UNCONFIGURED_ISSUE_ID.format(unique=entry.unique_id)
|
||||
push_updates_issue_id = PUSH_UPDATE_ISSUE_ID.format(unique=entry.unique_id)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert entry.runtime_data.block is not None
|
||||
|
||||
device = entry.runtime_data.block.device
|
||||
|
||||
if await check_coiot_config(device, hass):
|
||||
# CoIoT is correctly configured, create push updates issue
|
||||
ir.async_delete_issue(hass, DOMAIN, config_issue_id)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
push_updates_issue_id,
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
learn_more_url="https://www.home-assistant.io/integrations/shelly/#shelly-device-configuration-generation-1",
|
||||
translation_key="push_update_failure",
|
||||
translation_placeholders={
|
||||
"device_name": device.name,
|
||||
"ip_address": device.ip_address,
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
# CoIoT is not correctly configured, create config issue
|
||||
ir.async_delete_issue(hass, DOMAIN, push_updates_issue_id)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config_issue_id,
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="coiot_unconfigured",
|
||||
translation_placeholders={
|
||||
"device_name": device.name,
|
||||
"ip_address": device.ip_address,
|
||||
},
|
||||
data={"entry_id": entry.entry_id},
|
||||
)
|
||||
|
||||
@@ -110,7 +110,9 @@ async def async_setup_entry(
|
||||
serial,
|
||||
version,
|
||||
sensor,
|
||||
description,
|
||||
description.native_unit_of_measurement,
|
||||
description.state_class,
|
||||
description.device_class,
|
||||
)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
@@ -129,13 +131,17 @@ class InverterSensorEntity(CoordinatorEntity, SensorEntity):
|
||||
serial: str,
|
||||
version: str,
|
||||
key: str,
|
||||
entity_description: SensorEntityDescription,
|
||||
unit: str | None,
|
||||
state_class: SensorStateClass | str | None,
|
||||
device_class: SensorDeviceClass | None,
|
||||
) -> None:
|
||||
"""Initialize an inverter sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = uid
|
||||
self._attr_name = f"{manufacturer} {serial} {key}"
|
||||
self.entity_description = entity_description
|
||||
self._attr_native_unit_of_measurement = unit
|
||||
self._attr_state_class = state_class
|
||||
self._attr_device_class = device_class
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
manufacturer=MANUFACTURER,
|
||||
|
||||
@@ -284,8 +284,6 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
|
||||
try:
|
||||
battery = self._psutil.sensors_battery()
|
||||
_LOGGER.debug("battery: %s", battery)
|
||||
except (FileNotFoundError, PermissionError) as err:
|
||||
_LOGGER.debug("OS error when accessing battery sensors: %s", err)
|
||||
except (AttributeError, FileNotFoundError):
|
||||
_LOGGER.debug("OS does not provide battery sensors")
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user