Compare commits

..

1 Commits

Author SHA1 Message Date
abmantis
1c4baa8dca Move CONF_CONDITION to base condition schema
By the time the condition schema is validated, the correct condition
platform is already ensured.
This removes the need to specify it in individual condition schemas and
making them in line with triggers.
2025-09-16 17:39:48 +01:00
628 changed files with 7051 additions and 33842 deletions

View File

@@ -523,24 +523,22 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
- name: Restore apt cache
if: steps.cache-venv.outputs.cache-hit != 'true'
id: cache-apt
uses: actions/cache@v4.2.4
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
${{ env.APT_CACHE_DIR }}
${{ env.APT_LIST_CACHE_DIR }}
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
- name: Install additional OS dependencies
if: |
steps.cache-venv.outputs.cache-hit != 'true'
|| steps.cache-apt-check.outputs.cache-hit != 'true'
if: steps.cache-venv.outputs.cache-hit != 'true'
timeout-minutes: 10
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
mkdir -p ${{ env.APT_CACHE_DIR }}
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
fi
@@ -565,18 +563,9 @@ jobs:
libswscale-dev \
libudev-dev
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
${{ env.APT_LIST_CACHE_DIR }}
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |

View File

@@ -142,7 +142,6 @@ homeassistant.components.cloud.*
homeassistant.components.co2signal.*
homeassistant.components.comelit.*
homeassistant.components.command_line.*
homeassistant.components.compit.*
homeassistant.components.config.*
homeassistant.components.configurator.*
homeassistant.components.cookidoo.*

18
CODEOWNERS generated
View File

@@ -107,8 +107,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/ambient_station/ @bachya
/tests/components/ambient_station/ @bachya
/homeassistant/components/amcrest/ @flacjacket
/homeassistant/components/analytics/ @home-assistant/core
/tests/components/analytics/ @home-assistant/core
/homeassistant/components/analytics/ @home-assistant/core @ludeeus
/tests/components/analytics/ @home-assistant/core @ludeeus
/homeassistant/components/analytics_insights/ @joostlek
/tests/components/analytics_insights/ @joostlek
/homeassistant/components/android_ip_webcam/ @engrbm87
@@ -292,8 +292,6 @@ build.json @home-assistant/supervisor
/tests/components/command_line/ @gjohansson-ST
/homeassistant/components/compensation/ @Petro31
/tests/components/compensation/ @Petro31
/homeassistant/components/compit/ @Przemko92
/tests/components/compit/ @Przemko92
/homeassistant/components/config/ @home-assistant/core
/tests/components/config/ @home-assistant/core
/homeassistant/components/configurator/ @home-assistant/core
@@ -772,8 +770,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/iqvia/ @bachya
/tests/components/iqvia/ @bachya
/homeassistant/components/irish_rail_transport/ @ttroy50
/homeassistant/components/irm_kmi/ @jdejaegh
/tests/components/irm_kmi/ @jdejaegh
/homeassistant/components/iron_os/ @tr4nt0r
/tests/components/iron_os/ @tr4nt0r
/homeassistant/components/isal/ @bdraco
@@ -1537,8 +1533,8 @@ build.json @home-assistant/supervisor
/tests/components/switchbee/ @jafar-atili
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
/tests/components/switcher_kis/ @thecode @YogevBokobza
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
@@ -1714,8 +1710,6 @@ build.json @home-assistant/supervisor
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
/homeassistant/components/vicare/ @CFenner
/tests/components/vicare/ @CFenner
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
/tests/components/victron_remote_monitoring/ @AndyTempel
/homeassistant/components/vilfo/ @ManneW
/tests/components/vilfo/ @ManneW
/homeassistant/components/vivotek/ @HarlemSquirrel
@@ -1731,8 +1725,8 @@ build.json @home-assistant/supervisor
/tests/components/volumio/ @OnFreund
/homeassistant/components/volvo/ @thomasddn
/tests/components/volvo/ @thomasddn
/homeassistant/components/volvooncall/ @molobrakos @svrooij
/tests/components/volvooncall/ @molobrakos @svrooij
/homeassistant/components/volvooncall/ @molobrakos
/tests/components/volvooncall/ @molobrakos
/homeassistant/components/wake_on_lan/ @ntilley905
/tests/components/wake_on_lan/ @ntilley905
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam

View File

@@ -4,13 +4,10 @@ from __future__ import annotations
from datetime import timedelta
import logging
from typing import cast
from aioacaia.acaiascale import AcaiaScale
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
from bleak import BleakScanner
from homeassistant.components.bluetooth import async_get_scanner
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ADDRESS
from homeassistant.core import HomeAssistant
@@ -45,7 +42,6 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
name=entry.title,
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
notify_callback=self.async_update_listeners,
scanner=cast(BleakScanner, async_get_scanner(hass)),
)
@property

View File

@@ -26,5 +26,5 @@
"iot_class": "local_push",
"loggers": ["aioacaia"],
"quality_scale": "platinum",
"requirements": ["aioacaia==0.1.17"]
"requirements": ["aioacaia==0.1.14"]
}

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
from asyncio import timeout
from collections.abc import Mapping
from typing import Any
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
@@ -23,8 +22,6 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
"""Config flow for AccuWeather."""
VERSION = 1
_latitude: float | None = None
_longitude: float | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@@ -77,46 +74,3 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle configuration by re-auth."""
self._latitude = entry_data[CONF_LATITUDE]
self._longitude = entry_data[CONF_LONGITUDE]
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""
errors: dict[str, str] = {}
if user_input is not None:
websession = async_get_clientsession(self.hass)
try:
async with timeout(10):
accuweather = AccuWeather(
user_input[CONF_API_KEY],
websession,
latitude=self._latitude,
longitude=self._longitude,
)
await accuweather.async_get_location()
except (ApiError, ClientConnectorError, TimeoutError, ClientError):
errors["base"] = "cannot_connect"
except InvalidApiKeyError:
errors["base"] = "invalid_api_key"
except RequestsExceededError:
errors["base"] = "requests_exceeded"
else:
return self.async_update_reload_and_abort(
self._get_reauth_entry(), data_updates=user_input
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
errors=errors,
)

View File

@@ -15,7 +15,6 @@ from aiohttp.client_exceptions import ClientConnectorError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.update_coordinator import (
DataUpdateCoordinator,
@@ -31,7 +30,7 @@ from .const import (
UPDATE_INTERVAL_OBSERVATION,
)
EXCEPTIONS = (ApiError, ClientConnectorError, RequestsExceededError)
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
_LOGGER = logging.getLogger(__name__)
@@ -53,8 +52,6 @@ class AccuWeatherObservationDataUpdateCoordinator(
):
"""Class to manage fetching AccuWeather data API."""
config_entry: AccuWeatherConfigEntry
def __init__(
self,
hass: HomeAssistant,
@@ -90,12 +87,6 @@ class AccuWeatherObservationDataUpdateCoordinator(
translation_key="current_conditions_update_error",
translation_placeholders={"error": repr(error)},
) from error
except InvalidApiKeyError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="auth_error",
translation_placeholders={"entry": self.config_entry.title},
) from err
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
@@ -107,8 +98,6 @@ class AccuWeatherForecastDataUpdateCoordinator(
):
"""Base class for AccuWeather forecast."""
config_entry: AccuWeatherConfigEntry
def __init__(
self,
hass: HomeAssistant,
@@ -148,12 +137,6 @@ class AccuWeatherForecastDataUpdateCoordinator(
translation_key="forecast_update_error",
translation_placeholders={"error": repr(error)},
) from error
except InvalidApiKeyError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="auth_error",
translation_placeholders={"entry": self.config_entry.title},
) from err
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
return result

View File

@@ -7,17 +7,6 @@
"api_key": "[%key:common::config_flow::data::api_key%]",
"latitude": "[%key:common::config_flow::data::latitude%]",
"longitude": "[%key:common::config_flow::data::longitude%]"
},
"data_description": {
"api_key": "API key generated in the AccuWeather APIs portal."
}
},
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "[%key:component::accuweather::config::step::user::data_description::api_key%]"
}
}
},
@@ -30,8 +19,7 @@
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]"
}
},
"entity": {
@@ -251,9 +239,6 @@
}
},
"exceptions": {
"auth_error": {
"message": "Authentication failed for {entry}, please update your API key"
},
"current_conditions_update_error": {
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
},

View File

@@ -2,31 +2,21 @@
from __future__ import annotations
from pathlib import Path
from homeassistant.components.media_source import MediaSource, local_source
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR
async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
"""Set up local media source."""
media_dirs = list(hass.config.media_dirs.values())
if not media_dirs:
raise HomeAssistantError(
"AI Task media source requires at least one media directory configured"
)
media_dir = Path(media_dirs[0]) / DOMAIN / IMAGE_DIR
media_dir = hass.config.path(f"{DOMAIN}/{IMAGE_DIR}")
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
hass,
DOMAIN,
"AI Generated Images",
{IMAGE_DIR: str(media_dir)},
{IMAGE_DIR: media_dir},
f"/{DOMAIN}",
)
return source

View File

@@ -12,7 +12,7 @@ from typing import Any
import voluptuous as vol
from homeassistant.components import camera, conversation, image, media_source
from homeassistant.components import camera, conversation, media_source
from homeassistant.components.http.auth import async_sign_path
from homeassistant.core import HomeAssistant, ServiceResponse, callback
from homeassistant.exceptions import HomeAssistantError
@@ -31,14 +31,14 @@ from .const import (
)
def _save_camera_snapshot(image_data: camera.Image | image.Image) -> Path:
def _save_camera_snapshot(image: camera.Image) -> Path:
"""Save camera snapshot to temp file."""
with tempfile.NamedTemporaryFile(
mode="wb",
suffix=mimetypes.guess_extension(image_data.content_type, False),
suffix=mimetypes.guess_extension(image.content_type, False),
delete=False,
) as temp_file:
temp_file.write(image_data.content)
temp_file.write(image.content)
return Path(temp_file.name)
@@ -54,31 +54,26 @@ async def _resolve_attachments(
for attachment in attachments or []:
media_content_id = attachment["media_content_id"]
# Special case for certain media sources
for integration in camera, image:
media_source_prefix = f"media-source://{integration.DOMAIN}/"
if not media_content_id.startswith(media_source_prefix):
continue
# Special case for camera media sources
if media_content_id.startswith("media-source://camera/"):
# Extract entity_id from the media content ID
entity_id = media_content_id.removeprefix(media_source_prefix)
entity_id = media_content_id.removeprefix("media-source://camera/")
# Get snapshot from entity
image_data = await integration.async_get_image(hass, entity_id)
# Get snapshot from camera
image = await camera.async_get_image(hass, entity_id)
temp_filename = await hass.async_add_executor_job(
_save_camera_snapshot, image_data
_save_camera_snapshot, image
)
created_files.append(temp_filename)
resolved_attachments.append(
conversation.Attachment(
media_content_id=media_content_id,
mime_type=image_data.content_type,
mime_type=image.content_type,
path=temp_filename,
)
)
break
else:
# Handle regular media sources
media = await media_source.async_resolve_media(hass, media_content_id, None)

View File

@@ -41,7 +41,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE:
return []
call_ids = await async_extract_entity_ids(call)
call_ids = await async_extract_entity_ids(hass, call)
entity_ids = []
for entity_id in hass.data[DATA_AMCREST][CAMERAS]:
if entity_id not in call_ids:

View File

@@ -12,25 +12,10 @@ from homeassistant.helpers.event import async_call_later, async_track_time_inter
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
from .analytics import (
Analytics,
AnalyticsInput,
AnalyticsModifications,
DeviceAnalyticsModifications,
EntityAnalyticsModifications,
async_devices_payload,
)
from .analytics import Analytics
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
from .http import AnalyticsDevicesView
__all__ = [
"AnalyticsInput",
"AnalyticsModifications",
"DeviceAnalyticsModifications",
"EntityAnalyticsModifications",
"async_devices_payload",
]
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)

View File

@@ -4,10 +4,9 @@ from __future__ import annotations
import asyncio
from asyncio import timeout
from collections.abc import Awaitable, Callable, Iterable, Mapping
from dataclasses import asdict as dataclass_asdict, dataclass, field
from dataclasses import asdict as dataclass_asdict, dataclass
from datetime import datetime
from typing import Any, Protocol
from typing import Any
import uuid
import aiohttp
@@ -36,14 +35,11 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.singleton import singleton
from homeassistant.helpers.storage import Store
from homeassistant.helpers.system_info import async_get_system_info
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from homeassistant.loader import (
Integration,
IntegrationNotFound,
async_get_integration,
async_get_integrations,
)
from homeassistant.setup import async_get_loaded_integrations
@@ -79,116 +75,12 @@ from .const import (
ATTR_USER_COUNT,
ATTR_UUID,
ATTR_VERSION,
DOMAIN,
LOGGER,
PREFERENCE_SCHEMA,
STORAGE_KEY,
STORAGE_VERSION,
)
DATA_ANALYTICS_MODIFIERS = "analytics_modifiers"
type AnalyticsModifier = Callable[
[HomeAssistant, AnalyticsInput], Awaitable[AnalyticsModifications]
]
@singleton(DATA_ANALYTICS_MODIFIERS)
def _async_get_modifiers(
hass: HomeAssistant,
) -> dict[str, AnalyticsModifier | None]:
"""Return the analytics modifiers."""
return {}
@dataclass
class AnalyticsInput:
"""Analytics input for a single integration.
This is sent to integrations that implement the platform.
"""
device_ids: Iterable[str] = field(default_factory=list)
entity_ids: Iterable[str] = field(default_factory=list)
@dataclass
class AnalyticsModifications:
"""Analytics config for a single integration.
This is used by integrations that implement the platform.
"""
remove: bool = False
devices: Mapping[str, DeviceAnalyticsModifications] | None = None
entities: Mapping[str, EntityAnalyticsModifications] | None = None
@dataclass
class DeviceAnalyticsModifications:
"""Analytics config for a single device.
This is used by integrations that implement the platform.
"""
remove: bool = False
@dataclass
class EntityAnalyticsModifications:
"""Analytics config for a single entity.
This is used by integrations that implement the platform.
"""
remove: bool = False
capabilities: dict[str, Any] | None | UndefinedType = UNDEFINED
class AnalyticsPlatformProtocol(Protocol):
"""Define the format of analytics platforms."""
async def async_modify_analytics(
self,
hass: HomeAssistant,
analytics_input: AnalyticsInput,
) -> AnalyticsModifications:
"""Modify the analytics."""
async def _async_get_analytics_platform(
hass: HomeAssistant, domain: str
) -> AnalyticsPlatformProtocol | None:
"""Get analytics platform."""
try:
integration = await async_get_integration(hass, domain)
except IntegrationNotFound:
return None
try:
return await integration.async_get_platform(DOMAIN)
except ImportError:
return None
async def _async_get_modifier(
hass: HomeAssistant, domain: str
) -> AnalyticsModifier | None:
"""Get analytics modifier."""
modifiers = _async_get_modifiers(hass)
modifier = modifiers.get(domain, UNDEFINED)
if modifier is not UNDEFINED:
return modifier
platform = await _async_get_analytics_platform(hass, domain)
if platform is None:
modifiers[domain] = None
return None
modifier = getattr(platform, "async_modify_analytics", None)
modifiers[domain] = modifier
return modifier
def gen_uuid() -> str:
"""Generate a new UUID."""
@@ -501,20 +393,17 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
return domains
DEFAULT_ANALYTICS_CONFIG = AnalyticsModifications()
DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
async def async_devices_payload(hass: HomeAssistant) -> dict:
"""Return detailed information about entities and devices."""
integrations_info: dict[str, dict[str, Any]] = {}
dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass)
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
integration_configs: dict[str, AnalyticsModifications] = {}
# We need to refer to other devices, for example in `via_device` field.
# We don't however send the original device ids outside of Home Assistant,
# instead we refer to devices by (integration_domain, index_in_integration_device_list).
device_id_mapping: dict[str, tuple[str, int]] = {}
# Get device list
for device_entry in dev_reg.devices.values():
if not device_entry.primary_config_entry:
continue
@@ -527,96 +416,27 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
continue
integration_domain = config_entry.domain
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
integration_input[0].append(device_entry.id)
# Get entity list
for entity_entry in ent_reg.entities.values():
integration_domain = entity_entry.platform
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
integration_input[1].append(entity_entry.entity_id)
# Call integrations that implement the analytics platform
for integration_domain, integration_input in integration_inputs.items():
if (
modifier := await _async_get_modifier(hass, integration_domain)
) is not None:
try:
integration_config = await modifier(
hass, AnalyticsInput(*integration_input)
)
except Exception as err: # noqa: BLE001
LOGGER.exception(
"Calling async_modify_analytics for integration '%s' failed: %s",
integration_domain,
err,
)
integration_configs[integration_domain] = AnalyticsModifications(
remove=True
)
continue
if not isinstance(integration_config, AnalyticsModifications):
LOGGER.error( # type: ignore[unreachable]
"Calling async_modify_analytics for integration '%s' did not return an AnalyticsConfig",
integration_domain,
)
integration_configs[integration_domain] = AnalyticsModifications(
remove=True
)
continue
integration_configs[integration_domain] = integration_config
integrations_info: dict[str, dict[str, Any]] = {}
# We need to refer to other devices, for example in `via_device` field.
# We don't however send the original device ids outside of Home Assistant,
# instead we refer to devices by (integration_domain, index_in_integration_device_list).
device_id_mapping: dict[str, tuple[str, int]] = {}
# Fill out information about devices
for integration_domain, integration_input in integration_inputs.items():
integration_config = integration_configs.get(
integration_domain, DEFAULT_ANALYTICS_CONFIG
)
if integration_config.remove:
continue
integration_info = integrations_info.setdefault(
integration_domain, {"devices": [], "entities": []}
)
devices_info = integration_info["devices"]
for device_id in integration_input[0]:
device_config = DEFAULT_DEVICE_ANALYTICS_CONFIG
if integration_config.devices is not None:
device_config = integration_config.devices.get(device_id, device_config)
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
if device_config.remove:
continue
device_entry = dev_reg.devices[device_id]
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
devices_info.append(
{
"entities": [],
"entry_type": device_entry.entry_type,
"has_configuration_url": device_entry.configuration_url is not None,
"hw_version": device_entry.hw_version,
"manufacturer": device_entry.manufacturer,
"model": device_entry.model,
"model_id": device_entry.model_id,
"sw_version": device_entry.sw_version,
"via_device": device_entry.via_device_id,
}
)
devices_info.append(
{
"entities": [],
"entry_type": device_entry.entry_type,
"has_configuration_url": device_entry.configuration_url is not None,
"hw_version": device_entry.hw_version,
"manufacturer": device_entry.manufacturer,
"model": device_entry.model,
"model_id": device_entry.model_id,
"sw_version": device_entry.sw_version,
"via_device": device_entry.via_device_id,
}
)
# Fill out via_device with new device ids
for integration_info in integrations_info.values():
@@ -625,15 +445,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
continue
device_info["via_device"] = device_id_mapping.get(device_info["via_device"])
# Fill out information about entities
for integration_domain, integration_input in integration_inputs.items():
integration_config = integration_configs.get(
integration_domain, DEFAULT_ANALYTICS_CONFIG
)
if integration_config.remove:
continue
ent_reg = er.async_get(hass)
for entity_entry in ent_reg.entities.values():
integration_domain = entity_entry.platform
integration_info = integrations_info.setdefault(
integration_domain, {"devices": [], "entities": []}
)
@@ -641,52 +456,35 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
devices_info = integration_info["devices"]
entities_info = integration_info["entities"]
for entity_id in integration_input[1]:
entity_config = DEFAULT_ENTITY_ANALYTICS_CONFIG
if integration_config.entities is not None:
entity_config = integration_config.entities.get(
entity_id, entity_config
)
entity_state = hass.states.get(entity_entry.entity_id)
if entity_config.remove:
continue
entity_info = {
# LIMITATION: `assumed_state` can be overridden by users;
# we should replace it with the original value in the future.
# It is also not present, if entity is not in the state machine,
# which can happen for disabled entities.
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
if entity_state is not None
else None,
"capabilities": entity_entry.capabilities,
"domain": entity_entry.domain,
"entity_category": entity_entry.entity_category,
"has_entity_name": entity_entry.has_entity_name,
"original_device_class": entity_entry.original_device_class,
# LIMITATION: `unit_of_measurement` can be overridden by users;
# we should replace it with the original value in the future.
"unit_of_measurement": entity_entry.unit_of_measurement,
}
entity_entry = ent_reg.entities[entity_id]
entity_state = hass.states.get(entity_entry.entity_id)
entity_info = {
# LIMITATION: `assumed_state` can be overridden by users;
# we should replace it with the original value in the future.
# It is also not present, if entity is not in the state machine,
# which can happen for disabled entities.
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
if entity_state is not None
else None,
"capabilities": entity_config.capabilities
if entity_config.capabilities is not UNDEFINED
else entity_entry.capabilities,
"domain": entity_entry.domain,
"entity_category": entity_entry.entity_category,
"has_entity_name": entity_entry.has_entity_name,
"modified_by_integration": ["capabilities"]
if entity_config.capabilities is not UNDEFINED
else None,
"original_device_class": entity_entry.original_device_class,
# LIMITATION: `unit_of_measurement` can be overridden by users;
# we should replace it with the original value in the future.
"unit_of_measurement": entity_entry.unit_of_measurement,
}
if (
((device_id_ := entity_entry.device_id) is not None)
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
and (new_device_id[0] == integration_domain)
):
device_info = devices_info[new_device_id[1]]
device_info["entities"].append(entity_info)
else:
entities_info.append(entity_info)
if (
((device_id := entity_entry.device_id) is not None)
and ((new_device_id := device_id_mapping.get(device_id)) is not None)
and (new_device_id[0] == integration_domain)
):
device_info = devices_info[new_device_id[1]]
device_info["entities"].append(entity_info)
else:
entities_info.append(entity_info)
integrations = {
domain: integration

View File

@@ -2,7 +2,7 @@
"domain": "analytics",
"name": "Analytics",
"after_dependencies": ["energy", "hassio", "recorder"],
"codeowners": ["@home-assistant/core"],
"codeowners": ["@home-assistant/core", "@ludeeus"],
"dependencies": ["api", "websocket_api", "http"],
"documentation": "https://www.home-assistant.io/integrations/analytics",
"integration_type": "system",

View File

@@ -467,10 +467,7 @@ async def async_setup_entry(
# periodical (or manual) self test since last daemon restart. It might not be available
# when we set up the integration, and we do not know if it would ever be available. Here we
# add it anyway and mark it as unknown initially.
#
# We also sort the resources to ensure the order of entities created is deterministic since
# "APCMODEL" and "MODEL" resources map to the same "Model" name.
for resource in sorted(available_resources | {LAST_S_TEST}):
for resource in available_resources | {LAST_S_TEST}:
if resource not in SENSORS:
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
continue

View File

@@ -109,7 +109,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
)
state = await self.async_get_last_state()
if (state is not None) and (state.state in self.options):
if state is not None and state.state in self.options:
self._attr_current_option = state.state
if self.registry_entry and (device_id := self.registry_entry.device_id):
@@ -119,7 +119,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
def cleanup() -> None:
"""Clean up registered device."""
pipeline_data.pipeline_devices.pop(device_id, None)
pipeline_data.pipeline_devices.pop(device_id)
self.async_on_remove(cleanup)

View File

@@ -120,7 +120,6 @@ class AsusWrtBridge(ABC):
def __init__(self, host: str) -> None:
"""Initialize Bridge."""
self._configuration_url = f"http://{host}"
self._host = host
self._firmware: str | None = None
self._label_mac: str | None = None
@@ -128,11 +127,6 @@ class AsusWrtBridge(ABC):
self._model_id: str | None = None
self._serial_number: str | None = None
@property
def configuration_url(self) -> str:
"""Return configuration URL."""
return self._configuration_url
@property
def host(self) -> str:
"""Return hostname."""
@@ -377,7 +371,6 @@ class AsusWrtHttpBridge(AsusWrtBridge):
# get main router properties
if mac := _identity.mac:
self._label_mac = format_mac(mac)
self._configuration_url = self._api.webpanel
self._firmware = str(_identity.firmware)
self._model = _identity.model
self._model_id = _identity.product_id

View File

@@ -388,13 +388,13 @@ class AsusWrtRouter:
def device_info(self) -> DeviceInfo:
"""Return the device information."""
info = DeviceInfo(
configuration_url=self._api.configuration_url,
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
name=self.host,
model=self._api.model or "Asus Router",
model_id=self._api.model_id,
serial_number=self._api.serial_number,
manufacturer="Asus",
configuration_url=f"http://{self.host}",
)
if self._api.firmware:
info["sw_version"] = self._api.firmware

View File

@@ -2,12 +2,13 @@
from __future__ import annotations
from collections.abc import Callable, Coroutine
import logging
from typing import Any
from aiohttp import ClientResponseError
from yalexs.activity import ActivityType
from yalexs.lock import Lock, LockOperation, LockStatus
from yalexs.activity import ActivityType, ActivityTypes
from yalexs.lock import Lock, LockStatus
from yalexs.util import get_latest_activity, update_lock_detail_from_activity
from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature
@@ -49,25 +50,30 @@ class AugustLock(AugustEntity, RestoreEntity, LockEntity):
async def async_lock(self, **kwargs: Any) -> None:
"""Lock the device."""
await self._perform_lock_operation(LockOperation.LOCK)
if self._data.push_updates_connected:
await self._data.async_lock_async(self._device_id, self._hyper_bridge)
return
await self._call_lock_operation(self._data.async_lock)
async def async_open(self, **kwargs: Any) -> None:
"""Open/unlatch the device."""
await self._perform_lock_operation(LockOperation.OPEN)
if self._data.push_updates_connected:
await self._data.async_unlatch_async(self._device_id, self._hyper_bridge)
return
await self._call_lock_operation(self._data.async_unlatch)
async def async_unlock(self, **kwargs: Any) -> None:
"""Unlock the device."""
await self._perform_lock_operation(LockOperation.UNLOCK)
if self._data.push_updates_connected:
await self._data.async_unlock_async(self._device_id, self._hyper_bridge)
return
await self._call_lock_operation(self._data.async_unlock)
async def _perform_lock_operation(self, operation: LockOperation) -> None:
"""Perform a lock operation."""
async def _call_lock_operation(
self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]]
) -> None:
try:
activities = await self._data.async_operate_lock(
self._device_id,
operation,
self._data.push_updates_connected,
self._hyper_bridge,
)
activities = await lock_operation(self._device_id)
except ClientResponseError as err:
if err.status == LOCK_JAMMED_ERR:
self._detail.lock_status = LockStatus.JAMMED

View File

@@ -29,5 +29,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
"requirements": ["yalexs==9.0.1", "yalexs-ble==3.1.2"]
}

View File

@@ -1,24 +0,0 @@
"""Analytics platform."""
from homeassistant.components.analytics import (
AnalyticsInput,
AnalyticsModifications,
EntityAnalyticsModifications,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
async def async_modify_analytics(
hass: HomeAssistant, analytics_input: AnalyticsInput
) -> AnalyticsModifications:
"""Modify the analytics."""
ent_reg = er.async_get(hass)
entities: dict[str, EntityAnalyticsModifications] = {}
for entity_id in analytics_input.entity_ids:
entity_entry = ent_reg.entities[entity_id]
if entity_entry.capabilities is not None:
entities[entity_id] = EntityAnalyticsModifications(capabilities=None)
return AnalyticsModifications(entities=entities)

View File

@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
"tmp_backups/*.tar",
"OZW_Log.txt",
"tts/*",
"ai_task/*",
]
EXCLUDE_DATABASE_FROM_BACKUP = [

View File

@@ -205,7 +205,6 @@ class BringActivityCoordinator(BringBaseCoordinator[dict[str, BringActivityData]
async def _async_update_data(self) -> dict[str, BringActivityData]:
"""Fetch activity data from bring."""
self.lists = self.coordinator.lists
list_dict: dict[str, BringActivityData] = {}
for lst in self.lists:

View File

@@ -43,7 +43,7 @@ async def async_setup_entry(
)
lists_added |= new_lists
coordinator.data.async_add_listener(add_entities)
coordinator.activity.async_add_listener(add_entities)
add_entities()
@@ -67,8 +67,7 @@ class BringEventEntity(BringBaseEntity, EventEntity):
def _async_handle_event(self) -> None:
"""Handle the activity event."""
if (bring_list := self.coordinator.data.get(self._list_uuid)) is None:
return
bring_list = self.coordinator.data[self._list_uuid]
last_event_triggered = self.state
if bring_list.activity.timeline and (
last_event_triggered is None

View File

@@ -37,10 +37,6 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.loader import (
async_get_custom_components,
async_get_loaded_integration,
)
from homeassistant.util.location import async_detect_location_info
from .alexa_config import entity_supported as entity_supported_by_alexa
@@ -435,79 +431,6 @@ class DownloadSupportPackageView(HomeAssistantView):
url = "/api/cloud/support_package"
name = "api:cloud:support_package"
async def _get_integration_info(self, hass: HomeAssistant) -> dict[str, Any]:
"""Collect information about active and custom integrations."""
# Get loaded components from hass.config.components
loaded_components = hass.config.components.copy()
# Get custom integrations
custom_domains = set()
with suppress(Exception):
custom_domains = set(await async_get_custom_components(hass))
# Separate built-in and custom integrations
builtin_integrations = []
custom_integrations = []
for domain in sorted(loaded_components):
try:
integration = async_get_loaded_integration(hass, domain)
except Exception: # noqa: BLE001
# Broad exception catch for robustness in support package
# generation. If we can't get integration info,
# just add the domain
if domain in custom_domains:
custom_integrations.append(
{
"domain": domain,
"name": "Unknown",
"version": "Unknown",
"documentation": "Unknown",
}
)
else:
builtin_integrations.append(
{
"domain": domain,
"name": "Unknown",
}
)
else:
if domain in custom_domains:
# This is a custom integration
# include version and documentation link
version = (
str(integration.version) if integration.version else "Unknown"
)
if not (documentation := integration.documentation):
documentation = "Unknown"
custom_integrations.append(
{
"domain": domain,
"name": integration.name,
"version": version,
"documentation": documentation,
}
)
else:
# This is a built-in integration.
# No version needed, as it is always the same as the
# Home Assistant version
builtin_integrations.append(
{
"domain": domain,
"name": integration.name,
}
)
return {
"builtin_count": len(builtin_integrations),
"builtin_integrations": builtin_integrations,
"custom_count": len(custom_integrations),
"custom_integrations": custom_integrations,
}
async def _generate_markdown(
self,
hass: HomeAssistant,
@@ -530,38 +453,6 @@ class DownloadSupportPackageView(HomeAssistantView):
markdown = "## System Information\n\n"
markdown += get_domain_table_markdown(hass_info)
# Add integration information
try:
integration_info = await self._get_integration_info(hass)
except Exception: # noqa: BLE001
# Broad exception catch for robustness in support package generation
# If there's any error getting integration info, just note it
markdown += "## Active integrations\n\n"
markdown += "Unable to collect integration information\n\n"
else:
markdown += "## Active Integrations\n\n"
markdown += f"Built-in integrations: {integration_info['builtin_count']}\n"
markdown += f"Custom integrations: {integration_info['custom_count']}\n\n"
# Built-in integrations
if integration_info["builtin_integrations"]:
markdown += "<details><summary>Built-in integrations</summary>\n\n"
markdown += "Domain | Name\n"
markdown += "--- | ---\n"
for integration in integration_info["builtin_integrations"]:
markdown += f"{integration['domain']} | {integration['name']}\n"
markdown += "\n</details>\n\n"
# Custom integrations
if integration_info["custom_integrations"]:
markdown += "<details><summary>Custom integrations</summary>\n\n"
markdown += "Domain | Name | Version | Documentation\n"
markdown += "--- | --- | --- | ---\n"
for integration in integration_info["custom_integrations"]:
doc_url = integration.get("documentation") or "N/A"
markdown += f"{integration['domain']} | {integration['name']} | {integration['version']} | {doc_url}\n"
markdown += "\n</details>\n\n"
for domain, domain_info in domains_info.items():
domain_info_md = get_domain_table_markdown(domain_info)
markdown += (

View File

@@ -25,11 +25,7 @@ async def async_subscription_info(cloud: Cloud[CloudClient]) -> SubscriptionInfo
return await cloud.payments.subscription_info()
except PaymentsApiError as exception:
_LOGGER.error("Failed to fetch subscription information - %s", exception)
except TimeoutError:
_LOGGER.error(
"A timeout of %s was reached while trying to fetch subscription information",
REQUEST_TIMEOUT,
)
return None

View File

@@ -2,7 +2,7 @@
from abc import abstractmethod
from datetime import timedelta
from typing import Any, TypeVar
from typing import TypeVar
from aiocomelit.api import (
AlarmDataObject,
@@ -13,16 +13,7 @@ from aiocomelit.api import (
ComelitVedoAreaObject,
ComelitVedoZoneObject,
)
from aiocomelit.const import (
BRIDGE,
CLIMATE,
COVER,
IRRIGATION,
LIGHT,
OTHER,
SCENARIO,
VEDO,
)
from aiocomelit.const import BRIDGE, VEDO
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
from aiohttp import ClientSession
@@ -120,32 +111,6 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
async def _async_update_system_data(self) -> T:
"""Class method for updating data."""
async def _async_remove_stale_devices(
self,
previous_list: dict[int, Any],
current_list: dict[int, Any],
dev_type: str,
) -> None:
"""Remove stale devices."""
device_registry = dr.async_get(self.hass)
for i in previous_list:
if i not in current_list:
_LOGGER.debug(
"Detected change in %s devices: index %s removed",
dev_type,
i,
)
identifier = f"{self.config_entry.entry_id}-{dev_type}-{i}"
device = device_registry.async_get_device(
identifiers={(DOMAIN, identifier)}
)
if device:
device_registry.async_update_device(
device_id=device.id,
remove_config_entry_id=self.config_entry.entry_id,
)
class ComelitSerialBridge(
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
@@ -172,15 +137,7 @@ class ComelitSerialBridge(
self,
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
"""Specific method for updating data."""
data = await self.api.get_all_devices()
if self.data:
for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO):
await self._async_remove_stale_devices(
self.data[dev_type], data[dev_type], dev_type
)
return data
return await self.api.get_all_devices()
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
@@ -206,14 +163,4 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
self,
) -> AlarmDataObject:
"""Specific method for updating data."""
data = await self.api.get_all_areas_and_zones()
if self.data:
for obj_type in ("alarm_areas", "alarm_zones"):
await self._async_remove_stale_devices(
self.data[obj_type],
data[obj_type],
"area" if obj_type == "alarm_areas" else "zone",
)
return data
return await self.api.get_all_areas_and_zones()

View File

@@ -72,7 +72,9 @@ rules:
repair-issues:
status: exempt
comment: no known use cases for repair issues or flows, yet
stale-devices: done
stale-devices:
status: todo
comment: missing implementation
# Platinum
async-dependency: done

View File

@@ -1,45 +0,0 @@
"""The Compit integration."""
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PLATFORMS = [
Platform.CLIMATE,
]
async def async_setup_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
"""Set up Compit from a config entry."""
session = async_get_clientsession(hass)
connector = CompitApiConnector(session)
try:
connected = await connector.init(
entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], hass.config.language
)
except CannotConnect as e:
raise ConfigEntryNotReady(f"Error while connecting to Compit: {e}") from e
except InvalidAuth as e:
raise ConfigEntryAuthFailed(
f"Invalid credentials for {entry.data[CONF_EMAIL]}"
) from e
if not connected:
raise ConfigEntryAuthFailed("Authentication API error")
coordinator = CompitDataUpdateCoordinator(hass, entry, connector)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
"""Unload an entry for the Compit integration."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -1,265 +0,0 @@
"""Module contains the CompitClimate class for controlling climate entities."""
import logging
from typing import Any
from compit_inext_api import Param, Parameter
from compit_inext_api.consts import (
CompitFanMode,
CompitHVACMode,
CompitParameter,
CompitPresetMode,
)
from propcache.api import cached_property
from homeassistant.components.climate import (
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_OFF,
PRESET_AWAY,
PRESET_ECO,
PRESET_HOME,
PRESET_NONE,
ClimateEntity,
ClimateEntityFeature,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER_NAME
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
_LOGGER: logging.Logger = logging.getLogger(__name__)
# Device class for climate devices in Compit system
CLIMATE_DEVICE_CLASS = 10
PARALLEL_UPDATES = 0
COMPIT_MODE_MAP = {
CompitHVACMode.COOL: HVACMode.COOL,
CompitHVACMode.HEAT: HVACMode.HEAT,
CompitHVACMode.OFF: HVACMode.OFF,
}
COMPIT_FANSPEED_MAP = {
CompitFanMode.OFF: FAN_OFF,
CompitFanMode.AUTO: FAN_AUTO,
CompitFanMode.LOW: FAN_LOW,
CompitFanMode.MEDIUM: FAN_MEDIUM,
CompitFanMode.HIGH: FAN_HIGH,
CompitFanMode.HOLIDAY: FAN_AUTO,
}
COMPIT_PRESET_MAP = {
CompitPresetMode.AUTO: PRESET_HOME,
CompitPresetMode.HOLIDAY: PRESET_ECO,
CompitPresetMode.MANUAL: PRESET_NONE,
CompitPresetMode.AWAY: PRESET_AWAY,
}
HVAC_MODE_TO_COMPIT_MODE = {v: k for k, v in COMPIT_MODE_MAP.items()}
FAN_MODE_TO_COMPIT_FAN_MODE = {v: k for k, v in COMPIT_FANSPEED_MAP.items()}
PRESET_MODE_TO_COMPIT_PRESET_MODE = {v: k for k, v in COMPIT_PRESET_MAP.items()}
async def async_setup_entry(
hass: HomeAssistant,
entry: CompitConfigEntry,
async_add_devices: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the CompitClimate platform from a config entry."""
coordinator = entry.runtime_data
climate_entities = []
for device_id in coordinator.connector.all_devices:
device = coordinator.connector.all_devices[device_id]
if device.definition.device_class == CLIMATE_DEVICE_CLASS:
climate_entities.append(
CompitClimate(
coordinator,
device_id,
{
parameter.parameter_code: parameter
for parameter in device.definition.parameters
},
device.definition.name,
)
)
async_add_devices(climate_entities)
class CompitClimate(CoordinatorEntity[CompitDataUpdateCoordinator], ClimateEntity):
"""Representation of a Compit climate device."""
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_hvac_modes = [*COMPIT_MODE_MAP.values()]
_attr_name = None
_attr_has_entity_name = True
_attr_supported_features = (
ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.FAN_MODE
| ClimateEntityFeature.PRESET_MODE
)
def __init__(
self,
coordinator: CompitDataUpdateCoordinator,
device_id: int,
parameters: dict[str, Parameter],
device_name: str,
) -> None:
"""Initialize the climate device."""
super().__init__(coordinator)
self._attr_unique_id = f"{device_name}_{device_id}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, str(device_id))},
name=device_name,
manufacturer=MANUFACTURER_NAME,
model=device_name,
)
self.parameters = parameters
self.device_id = device_id
self.available_presets: Parameter | None = self.parameters.get(
CompitParameter.PRESET_MODE.value
)
self.available_fan_modes: Parameter | None = self.parameters.get(
CompitParameter.FAN_MODE.value
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
super().available
and self.device_id in self.coordinator.connector.all_devices
)
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
value = self.get_parameter_value(CompitParameter.CURRENT_TEMPERATURE)
if value is None:
return None
return float(value.value)
@property
def target_temperature(self) -> float | None:
"""Return the temperature we try to reach."""
value = self.get_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE)
if value is None:
return None
return float(value.value)
@cached_property
def preset_modes(self) -> list[str] | None:
"""Return the available preset modes."""
if self.available_presets is None or self.available_presets.details is None:
return []
preset_modes = []
for item in self.available_presets.details:
if item is not None:
ha_preset = COMPIT_PRESET_MAP.get(CompitPresetMode(item.state))
if ha_preset and ha_preset not in preset_modes:
preset_modes.append(ha_preset)
return preset_modes
@cached_property
def fan_modes(self) -> list[str] | None:
"""Return the available fan modes."""
if self.available_fan_modes is None or self.available_fan_modes.details is None:
return []
fan_modes = []
for item in self.available_fan_modes.details:
if item is not None:
ha_fan_mode = COMPIT_FANSPEED_MAP.get(CompitFanMode(item.state))
if ha_fan_mode and ha_fan_mode not in fan_modes:
fan_modes.append(ha_fan_mode)
return fan_modes
@property
def preset_mode(self) -> str | None:
"""Return the current preset mode."""
preset_mode = self.get_parameter_value(CompitParameter.PRESET_MODE)
if preset_mode:
compit_preset_mode = CompitPresetMode(preset_mode.value)
return COMPIT_PRESET_MAP.get(compit_preset_mode)
return None
@property
def fan_mode(self) -> str | None:
"""Return the current fan mode."""
fan_mode = self.get_parameter_value(CompitParameter.FAN_MODE)
if fan_mode:
compit_fan_mode = CompitFanMode(fan_mode.value)
return COMPIT_FANSPEED_MAP.get(compit_fan_mode)
return None
@property
def hvac_mode(self) -> HVACMode | None:
"""Return the current HVAC mode."""
hvac_mode = self.get_parameter_value(CompitParameter.HVAC_MODE)
if hvac_mode:
compit_hvac_mode = CompitHVACMode(hvac_mode.value)
return COMPIT_MODE_MAP.get(compit_hvac_mode)
return None
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is None:
raise ServiceValidationError("Temperature argument missing")
await self.set_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE, temp)
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new target HVAC mode."""
if not (mode := HVAC_MODE_TO_COMPIT_MODE.get(hvac_mode)):
raise ServiceValidationError(f"Invalid hvac mode {hvac_mode}")
await self.set_parameter_value(CompitParameter.HVAC_MODE, mode.value)
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new target preset mode."""
compit_preset = PRESET_MODE_TO_COMPIT_PRESET_MODE.get(preset_mode)
if compit_preset is None:
raise ServiceValidationError(f"Invalid preset mode: {preset_mode}")
await self.set_parameter_value(CompitParameter.PRESET_MODE, compit_preset.value)
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
compit_fan_mode = FAN_MODE_TO_COMPIT_FAN_MODE.get(fan_mode)
if compit_fan_mode is None:
raise ServiceValidationError(f"Invalid fan mode: {fan_mode}")
await self.set_parameter_value(CompitParameter.FAN_MODE, compit_fan_mode.value)
async def set_parameter_value(self, parameter: CompitParameter, value: int) -> None:
"""Call the API to set a parameter to a new value."""
await self.coordinator.connector.set_device_parameter(
self.device_id, parameter, value
)
self.async_write_ha_state()
def get_parameter_value(self, parameter: CompitParameter) -> Param | None:
"""Get the parameter value from the device state."""
return self.coordinator.connector.get_device_parameter(
self.device_id, parameter
)

View File

@@ -1,110 +0,0 @@
"""Config flow for Compit integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_EMAIL): str,
vol.Required(CONF_PASSWORD): str,
}
)
STEP_REAUTH_SCHEMA = vol.Schema(
{
vol.Required(CONF_PASSWORD): str,
}
)
class CompitConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Compit."""
VERSION = 1
async def async_step_user(
self,
user_input: dict[str, Any] | None = None,
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
session = async_create_clientsession(self.hass)
api = CompitApiConnector(session)
success = False
try:
success = await api.init(
user_input[CONF_EMAIL],
user_input[CONF_PASSWORD],
self.hass.config.language,
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
if not success:
# Api returned unexpected result but no exception
_LOGGER.error("Compit api returned unexpected result")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_input[CONF_EMAIL])
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
self._get_reauth_entry(), data_updates=user_input
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input[CONF_EMAIL], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult:
"""Handle re-auth."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm re-authentication."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
reauth_entry_data = reauth_entry.data
if user_input:
# Reuse async_step_user with combined credentials
return await self.async_step_user(
{
CONF_EMAIL: reauth_entry_data[CONF_EMAIL],
CONF_PASSWORD: user_input[CONF_PASSWORD],
}
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_REAUTH_SCHEMA,
description_placeholders={CONF_EMAIL: reauth_entry_data[CONF_EMAIL]},
errors=errors,
)

View File

@@ -1,4 +0,0 @@
"""Constants for the Compit integration."""
DOMAIN = "compit"
MANUFACTURER_NAME = "Compit"

View File

@@ -1,43 +0,0 @@
"""Define an object to manage fetching Compit data."""
from datetime import timedelta
import logging
from compit_inext_api import CompitApiConnector, DeviceInstance
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN
SCAN_INTERVAL = timedelta(seconds=30)
_LOGGER: logging.Logger = logging.getLogger(__name__)
type CompitConfigEntry = ConfigEntry[CompitDataUpdateCoordinator]
class CompitDataUpdateCoordinator(DataUpdateCoordinator[dict[int, DeviceInstance]]):
"""Class to manage fetching data from the API."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
connector: CompitApiConnector,
) -> None:
"""Initialize."""
self.connector = connector
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
config_entry=config_entry,
)
async def _async_update_data(self) -> dict[int, DeviceInstance]:
"""Update data via library."""
await self.connector.update_state(device_id=None) # Update all devices
return self.connector.all_devices

View File

@@ -1,12 +0,0 @@
{
"domain": "compit",
"name": "Compit",
"codeowners": ["@Przemko92"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/compit",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["compit"],
"quality_scale": "bronze",
"requirements": ["compit-inext-api==0.3.1"]
}

View File

@@ -1,86 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules:
status: exempt
comment: |
This integration does not use any common modules.
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
This integration does not provide additional actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
This integration does not have an options flow.
docs-installation-parameters: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: done
reauthentication-flow: done
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: |
This integration is a cloud service and does not support discovery.
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: done
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: |
This integration does not have any entities that should disabled by default.
entity-translations: done
exception-translations: todo
icon-translations:
status: exempt
comment: |
There is no need for icon translations.
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: todo
strict-typing: done

View File

@@ -1,35 +0,0 @@
{
"config": {
"step": {
"user": {
"description": "Please enter your https://inext.compit.pl/ credentials.",
"title": "Connect to Compit iNext",
"data": {
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "The email address of your inext.compit.pl account",
"password": "The password of your inext.compit.pl account"
}
},
"reauth_confirm": {
"description": "Please update your password for {email}",
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::compit::config::step::user::data_description::password%]"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
}
}
}

View File

@@ -50,13 +50,14 @@ from .const import (
ATTR_LANGUAGE,
ATTR_TEXT,
DATA_COMPONENT,
DATA_DEFAULT_ENTITY,
DOMAIN,
HOME_ASSISTANT_AGENT,
SERVICE_PROCESS,
SERVICE_RELOAD,
ConversationEntityFeature,
)
from .default_agent import async_setup_default_agent
from .default_agent import DefaultAgent, async_setup_default_agent
from .entity import ConversationEntity
from .http import async_setup as async_setup_conversation_http
from .models import AbstractConversationAgent, ConversationInput, ConversationResult
@@ -141,7 +142,7 @@ def async_unset_agent(
hass: HomeAssistant,
config_entry: ConfigEntry,
) -> None:
"""Unset the agent to handle the conversations."""
"""Set the agent to handle the conversations."""
get_agent_manager(hass).async_unset_agent(config_entry.entry_id)
@@ -240,10 +241,10 @@ async def async_handle_sentence_triggers(
Returns None if no match occurred.
"""
agent = get_agent_manager(hass).default_agent
assert agent is not None
default_agent = async_get_agent(hass)
assert isinstance(default_agent, DefaultAgent)
return await agent.async_handle_sentence_triggers(user_input)
return await default_agent.async_handle_sentence_triggers(user_input)
async def async_handle_intents(
@@ -256,10 +257,12 @@ async def async_handle_intents(
Returns None if no match occurred.
"""
agent = get_agent_manager(hass).default_agent
assert agent is not None
default_agent = async_get_agent(hass)
assert isinstance(default_agent, DefaultAgent)
return await agent.async_handle_intents(user_input, intent_filter=intent_filter)
return await default_agent.async_handle_intents(
user_input, intent_filter=intent_filter
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
@@ -295,9 +298,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def handle_reload(service: ServiceCall) -> None:
"""Reload intents."""
agent = get_agent_manager(hass).default_agent
if agent is not None:
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
await hass.data[DATA_DEFAULT_ENTITY].async_reload(
language=service.data.get(ATTR_LANGUAGE)
)
hass.services.async_register(
DOMAIN,

View File

@@ -4,21 +4,15 @@ from __future__ import annotations
import dataclasses
import logging
from typing import TYPE_CHECKING, Any
from typing import Any
import voluptuous as vol
from homeassistant.core import (
CALLBACK_TYPE,
Context,
HomeAssistant,
async_get_hass,
callback,
)
from homeassistant.core import Context, HomeAssistant, async_get_hass, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, intent, singleton
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY, HOME_ASSISTANT_AGENT
from .entity import ConversationEntity
from .models import (
AbstractConversationAgent,
@@ -34,10 +28,6 @@ from .trace import (
_LOGGER = logging.getLogger(__name__)
if TYPE_CHECKING:
from .default_agent import DefaultAgent
from .trigger import TriggerDetails
@singleton.singleton("conversation_agent")
@callback
@@ -59,10 +49,8 @@ def async_get_agent(
hass: HomeAssistant, agent_id: str | None = None
) -> AbstractConversationAgent | ConversationEntity | None:
"""Get specified agent."""
manager = get_agent_manager(hass)
if agent_id is None or agent_id == HOME_ASSISTANT_AGENT:
return manager.default_agent
return hass.data[DATA_DEFAULT_ENTITY]
if "." in agent_id:
return hass.data[DATA_COMPONENT].get_entity(agent_id)
@@ -146,8 +134,6 @@ class AgentManager:
"""Initialize the conversation agents."""
self.hass = hass
self._agents: dict[str, AbstractConversationAgent] = {}
self.default_agent: DefaultAgent | None = None
self.triggers_details: list[TriggerDetails] = []
@callback
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
@@ -196,23 +182,3 @@ class AgentManager:
def async_unset_agent(self, agent_id: str) -> None:
"""Unset the agent."""
self._agents.pop(agent_id, None)
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
"""Set up the default agent."""
agent.update_triggers(self.triggers_details)
self.default_agent = agent
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
"""Register a trigger."""
self.triggers_details.append(trigger_details)
if self.default_agent is not None:
self.default_agent.update_triggers(self.triggers_details)
@callback
def unregister_trigger() -> None:
"""Unregister the trigger."""
self.triggers_details.remove(trigger_details)
if self.default_agent is not None:
self.default_agent.update_triggers(self.triggers_details)
return unregister_trigger

View File

@@ -10,9 +10,11 @@ from homeassistant.util.hass_dict import HassKey
if TYPE_CHECKING:
from homeassistant.helpers.entity_component import EntityComponent
from .default_agent import DefaultAgent
from .entity import ConversationEntity
DOMAIN = "conversation"
DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
HOME_ASSISTANT_AGENT = "conversation.home_assistant"
ATTR_TEXT = "text"
@@ -24,6 +26,7 @@ SERVICE_PROCESS = "process"
SERVICE_RELOAD = "reload"
DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN)
DATA_DEFAULT_ENTITY: HassKey[DefaultAgent] = HassKey(f"{DOMAIN}_default_entity")
class ConversationEntityFeature(IntFlag):

View File

@@ -4,11 +4,13 @@ from __future__ import annotations
import asyncio
from collections import OrderedDict
from collections.abc import Callable, Iterable
from collections.abc import Awaitable, Callable, Iterable
from dataclasses import dataclass
from enum import Enum, auto
import functools
import logging
from pathlib import Path
import re
import time
from typing import IO, Any, cast
@@ -51,7 +53,6 @@ from homeassistant.components.homeassistant.exposed_entities import (
async_should_expose,
)
from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL
from homeassistant.core import Event, callback
from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
@@ -67,22 +68,25 @@ from homeassistant.helpers.event import async_track_state_added_domain
from homeassistant.util import language as language_util
from homeassistant.util.json import JsonObjectType, json_loads_object
from .agent_manager import get_agent_manager
from .chat_log import AssistantContent, ChatLog
from .const import DOMAIN, ConversationEntityFeature
from .const import (
DATA_DEFAULT_ENTITY,
DEFAULT_EXPOSED_ATTRIBUTES,
DOMAIN,
ConversationEntityFeature,
)
from .entity import ConversationEntity
from .models import ConversationInput, ConversationResult
from .trace import ConversationTraceEventType, async_conversation_trace_append
from .trigger import TriggerDetails
_LOGGER = logging.getLogger(__name__)
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
REGEX_TYPE = type(re.compile(""))
TRIGGER_CALLBACK_TYPE = Callable[
[ConversationInput, RecognizeResult], Awaitable[str | None]
]
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
@@ -108,6 +112,14 @@ class LanguageIntents:
fuzzy_responses: FuzzyLanguageResponses | None = None
@dataclass(slots=True)
class TriggerData:
"""List of sentences and the callback for a trigger."""
sentences: list[str]
callback: TRIGGER_CALLBACK_TYPE
@dataclass(slots=True)
class SentenceTriggerResult:
"""Result when matching a sentence trigger in an automation."""
@@ -143,8 +155,8 @@ class IntentCacheKey:
language: str
"""Language of text."""
satellite_id: str | None
"""Satellite id from user input."""
device_id: str | None
"""Device id from user input."""
@dataclass(frozen=True)
@@ -197,9 +209,9 @@ async def async_setup_default_agent(
config_intents: dict[str, Any],
) -> None:
"""Set up entity registry listener for the default agent."""
agent = DefaultAgent(hass, config_intents)
await entity_component.async_add_entities([agent])
await get_agent_manager(hass).async_setup_default_agent(agent)
entity = DefaultAgent(hass, config_intents)
await entity_component.async_add_entities([entity])
hass.data[DATA_DEFAULT_ENTITY] = entity
@core.callback
def async_entity_state_listener(
@@ -230,23 +242,21 @@ class DefaultAgent(ConversationEntity):
"""Initialize the default agent."""
self.hass = hass
self._lang_intents: dict[str, LanguageIntents | object] = {}
self._load_intents_lock = asyncio.Lock()
# intent -> [sentences]
self._config_intents: dict[str, Any] = config_intents
# Sentences that will trigger a callback (skipping intent recognition)
self._triggers_details: list[TriggerDetails] = []
self._trigger_intents: Intents | None = None
# Slot lists for entities, areas, etc.
self._slot_lists: dict[str, SlotList] | None = None
self._unsub_clear_slot_list: list[Callable[[], None]] | None = None
# Used to filter slot lists before intent matching
self._exposed_names_trie: Trie | None = None
self._unexposed_names_trie: Trie | None = None
# Sentences that will trigger a callback (skipping intent recognition)
self.trigger_sentences: list[TriggerData] = []
self._trigger_intents: Intents | None = None
self._unsub_clear_slot_list: list[Callable[[], None]] | None = None
self._load_intents_lock = asyncio.Lock()
# LRU cache to avoid unnecessary intent matching
self._intent_cache = IntentCache(capacity=128)
@@ -435,15 +445,9 @@ class DefaultAgent(ConversationEntity):
}
for entity in result.entities_list
}
satellite_id = user_input.satellite_id
device_id = user_input.device_id
satellite_area, device_id = self._get_satellite_area_and_device(
satellite_id, device_id
)
if satellite_area is not None:
slots["preferred_area_id"] = {"value": satellite_area.id}
device_area = self._get_device_area(user_input.device_id)
if device_area:
slots["preferred_area_id"] = {"value": device_area.id}
async_conversation_trace_append(
ConversationTraceEventType.TOOL_CALL,
{
@@ -465,8 +469,8 @@ class DefaultAgent(ConversationEntity):
user_input.context,
language,
assistant=DOMAIN,
device_id=device_id,
satellite_id=satellite_id,
device_id=user_input.device_id,
satellite_id=user_input.satellite_id,
conversation_agent_id=user_input.agent_id,
)
except intent.MatchFailedError as match_error:
@@ -532,9 +536,7 @@ class DefaultAgent(ConversationEntity):
# Try cache first
cache_key = IntentCacheKey(
text=user_input.text,
language=language,
satellite_id=user_input.satellite_id,
text=user_input.text, language=language, device_id=user_input.device_id
)
cache_value = self._intent_cache.get(cache_key)
if cache_value is not None:
@@ -844,7 +846,7 @@ class DefaultAgent(ConversationEntity):
context = {"domain": state.domain}
if state.attributes:
# Include some attributes
for attr in _DEFAULT_EXPOSED_ATTRIBUTES:
for attr in DEFAULT_EXPOSED_ATTRIBUTES:
if attr not in state.attributes:
continue
context[attr] = state.attributes[attr]
@@ -1190,8 +1192,8 @@ class DefaultAgent(ConversationEntity):
fuzzy_responses=fuzzy_responses,
)
@callback
def _async_clear_slot_list(self, event: Event[Any] | None = None) -> None:
@core.callback
def _async_clear_slot_list(self, event: core.Event[Any] | None = None) -> None:
"""Clear slot lists when a registry has changed."""
# Two subscribers can be scheduled at same time
_LOGGER.debug("Clearing slot lists")
@@ -1304,40 +1306,28 @@ class DefaultAgent(ConversationEntity):
self, user_input: ConversationInput
) -> dict[str, Any] | None:
"""Return intent recognition context for user input."""
satellite_area, _ = self._get_satellite_area_and_device(
user_input.satellite_id, user_input.device_id
)
if satellite_area is None:
if not user_input.device_id:
return None
return {"area": {"value": satellite_area.name, "text": satellite_area.name}}
device_area = self._get_device_area(user_input.device_id)
if device_area is None:
return None
def _get_satellite_area_and_device(
self, satellite_id: str | None, device_id: str | None = None
) -> tuple[ar.AreaEntry | None, str | None]:
"""Return area entry and device id."""
hass = self.hass
return {"area": {"value": device_area.name, "text": device_area.name}}
area_id: str | None = None
def _get_device_area(self, device_id: str | None) -> ar.AreaEntry | None:
"""Return area object for given device identifier."""
if device_id is None:
return None
if (
satellite_id is not None
and (entity_entry := er.async_get(hass).async_get(satellite_id)) is not None
):
area_id = entity_entry.area_id
device_id = entity_entry.device_id
devices = dr.async_get(self.hass)
device = devices.async_get(device_id)
if (device is None) or (device.area_id is None):
return None
if (
area_id is None
and device_id is not None
and (device_entry := dr.async_get(hass).async_get(device_id)) is not None
):
area_id = device_entry.area_id
areas = ar.async_get(self.hass)
if area_id is None:
return None, device_id
return ar.async_get(hass).async_get_area(area_id), device_id
return areas.async_get_area(device.area_id)
def _get_error_text(
self,
@@ -1361,14 +1351,22 @@ class DefaultAgent(ConversationEntity):
return response_template.async_render(response_args)
@callback
def update_triggers(self, triggers_details: list[TriggerDetails]) -> None:
"""Update triggers."""
self._triggers_details = triggers_details
@core.callback
def register_trigger(
self,
sentences: list[str],
callback: TRIGGER_CALLBACK_TYPE,
) -> core.CALLBACK_TYPE:
"""Register a list of sentences that will trigger a callback when recognized."""
trigger_data = TriggerData(sentences=sentences, callback=callback)
self.trigger_sentences.append(trigger_data)
# Force rebuild on next use
self._trigger_intents = None
return functools.partial(self._unregister_trigger, trigger_data)
@core.callback
def _rebuild_trigger_intents(self) -> None:
"""Rebuild the HassIL intents object from the current trigger sentences."""
intents_dict = {
@@ -1377,8 +1375,8 @@ class DefaultAgent(ConversationEntity):
# Use trigger data index as a virtual intent name for HassIL.
# This works because the intents are rebuilt on every
# register/unregister.
str(trigger_id): {"data": [{"sentences": trigger_details.sentences}]}
for trigger_id, trigger_details in enumerate(self._triggers_details)
str(trigger_id): {"data": [{"sentences": trigger_data.sentences}]}
for trigger_id, trigger_data in enumerate(self.trigger_sentences)
},
}
@@ -1398,6 +1396,14 @@ class DefaultAgent(ConversationEntity):
_LOGGER.debug("Rebuilt trigger intents: %s", intents_dict)
@core.callback
def _unregister_trigger(self, trigger_data: TriggerData) -> None:
"""Unregister a set of trigger sentences."""
self.trigger_sentences.remove(trigger_data)
# Force rebuild on next use
self._trigger_intents = None
async def async_recognize_sentence_trigger(
self, user_input: ConversationInput
) -> SentenceTriggerResult | None:
@@ -1406,7 +1412,7 @@ class DefaultAgent(ConversationEntity):
Calls the registered callbacks if there's a match and returns a sentence
trigger result.
"""
if not self._triggers_details:
if not self.trigger_sentences:
# No triggers registered
return None
@@ -1451,7 +1457,7 @@ class DefaultAgent(ConversationEntity):
# Gather callback responses in parallel
trigger_callbacks = [
self._triggers_details[trigger_id].callback(user_input, trigger_result)
self.trigger_sentences[trigger_id].callback(user_input, trigger_result)
for trigger_id, trigger_result in result.matched_triggers.items()
]

View File

@@ -25,7 +25,7 @@ from .agent_manager import (
async_get_agent,
get_agent_manager,
)
from .const import DATA_COMPONENT
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
from .default_agent import (
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
@@ -169,11 +169,11 @@ async def websocket_list_sentences(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""List custom registered sentences."""
manager = get_agent_manager(hass)
agent = hass.data[DATA_DEFAULT_ENTITY]
sentences = []
for trigger_details in manager.triggers_details:
sentences.extend(trigger_details.sentences)
for trigger_data in agent.trigger_sentences:
sentences.extend(trigger_data.sentences)
connection.send_result(msg["id"], {"trigger_sentences": sentences})
@@ -191,8 +191,7 @@ async def websocket_hass_agent_debug(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""Return intents that would be matched by the default agent for a list of sentences."""
agent = get_agent_manager(hass).default_agent
assert agent is not None
agent = hass.data[DATA_DEFAULT_ENTITY]
# Return results for each sentence in the same order as the input.
result_dicts: list[dict[str, Any] | None] = []

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from typing import Any
from hassil.recognize import RecognizeResult
@@ -17,27 +15,14 @@ import voluptuous as vol
from homeassistant.const import CONF_COMMAND, CONF_PLATFORM
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.script import ScriptRunResult
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
from homeassistant.helpers.typing import UNDEFINED, ConfigType
from .agent_manager import get_agent_manager
from .const import DOMAIN
from .const import DATA_DEFAULT_ENTITY, DOMAIN
from .models import ConversationInput
TRIGGER_CALLBACK_TYPE = Callable[
[ConversationInput, RecognizeResult], Awaitable[str | None]
]
@dataclass(slots=True)
class TriggerDetails:
"""List of sentences and the callback for a trigger."""
sentences: list[str]
callback: TRIGGER_CALLBACK_TYPE
def has_no_punctuation(value: list[str]) -> list[str]:
"""Validate result does not contain punctuation."""
@@ -85,8 +70,6 @@ async def async_attach_trigger(
trigger_data = trigger_info["trigger_data"]
sentences = config.get(CONF_COMMAND, [])
ent_reg = er.async_get(hass)
job = HassJob(action)
async def call_action(
@@ -108,14 +91,6 @@ async def async_attach_trigger(
for entity_name, entity in result.entities.items()
}
satellite_id = user_input.satellite_id
device_id = user_input.device_id
if (
satellite_id is not None
and (satellite_entry := ent_reg.async_get(satellite_id)) is not None
):
device_id = satellite_entry.device_id
trigger_input: dict[str, Any] = { # Satisfy type checker
**trigger_data,
"platform": DOMAIN,
@@ -124,8 +99,8 @@ async def async_attach_trigger(
"slots": { # direct access to values
entity_name: entity["value"] for entity_name, entity in details.items()
},
"device_id": device_id,
"satellite_id": satellite_id,
"device_id": user_input.device_id,
"satellite_id": user_input.satellite_id,
"user_input": user_input.as_dict(),
}
@@ -148,6 +123,4 @@ async def async_attach_trigger(
# two trigger copies for who will provide a response.
return None
return get_agent_manager(hass).register_trigger(
TriggerDetails(sentences=sentences, callback=call_action)
)
return hass.data[DATA_DEFAULT_ENTITY].register_trigger(sentences, call_action)

View File

@@ -9,7 +9,6 @@
"conversation",
"dhcp",
"energy",
"file",
"go2rtc",
"history",
"homeassistant_alerts",

View File

@@ -1,23 +0,0 @@
"""Diagnostics support for derivative."""
from __future__ import annotations
from typing import Any
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
registry = er.async_get(hass)
entities = registry.entities.get_entries_for_config_entry_id(config_entry.entry_id)
return {
"config_entry": config_entry.as_dict(),
"entity": [entity.extended_dict for entity in entities],
}

View File

@@ -227,28 +227,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
weight = calculate_weight(start, end, current_time)
derivative = derivative + (value * Decimal(weight))
_LOGGER.debug(
"%s: Calculated new derivative as %f from %d segments",
self.entity_id,
derivative,
len(self._state_list),
)
return derivative
def _prune_state_list(self, current_time: datetime) -> None:
# filter out all derivatives older than `time_window` from our window list
old_len = len(self._state_list)
self._state_list = [
(time_start, time_end, state)
for time_start, time_end, state in self._state_list
if (current_time - time_end).total_seconds() < self._time_window
]
_LOGGER.debug(
"%s: Pruned %d elements from state list",
self.entity_id,
old_len - len(self._state_list),
)
def _handle_invalid_source_state(self, state: State | None) -> bool:
# Check the source state for unknown/unavailable condition. If unusable, write unknown/unavailable state and return false.
@@ -305,10 +292,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
) -> None:
"""Calculate derivative based on time and reschedule."""
_LOGGER.debug(
"%s: Recalculating derivative due to max_sub_interval time elapsed",
self.entity_id,
)
self._prune_state_list(now)
derivative = self._calc_derivative_from_state_list(now)
self._write_native_value(derivative)
@@ -317,11 +300,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
if derivative != 0:
schedule_max_sub_interval_exceeded(source_state)
_LOGGER.debug(
"%s: Scheduling max_sub_interval_callback in %s",
self.entity_id,
self._max_sub_interval,
)
self._cancel_max_sub_interval_exceeded_callback = async_call_later(
self.hass,
self._max_sub_interval,
@@ -331,9 +309,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
@callback
def on_state_reported(event: Event[EventStateReportedData]) -> None:
"""Handle constant sensor state."""
_LOGGER.debug(
"%s: New state reported event: %s", self.entity_id, event.data
)
self._cancel_max_sub_interval_exceeded_callback()
new_state = event.data["new_state"]
if not self._handle_invalid_source_state(new_state):
@@ -355,7 +330,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
@callback
def on_state_changed(event: Event[EventStateChangedData]) -> None:
"""Handle changed sensor state."""
_LOGGER.debug("%s: New state changed event: %s", self.entity_id, event.data)
self._cancel_max_sub_interval_exceeded_callback()
new_state = event.data["new_state"]
if not self._handle_invalid_source_state(new_state):
@@ -408,32 +382,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
/ Decimal(self._unit_prefix)
* Decimal(self._unit_time)
)
_LOGGER.debug(
"%s: Calculated new derivative segment as %f / %f / %f * %f = %f",
self.entity_id,
delta_value,
elapsed_time,
self._unit_prefix,
self._unit_time,
new_derivative,
)
except ValueError as err:
_LOGGER.warning(
"%s: While calculating derivative: %s", self.entity_id, err
)
_LOGGER.warning("While calculating derivative: %s", err)
except DecimalException as err:
_LOGGER.warning(
"%s: Invalid state (%s > %s): %s",
self.entity_id,
old_value,
new_state.state,
err,
"Invalid state (%s > %s): %s", old_value, new_state.state, err
)
except AssertionError as err:
_LOGGER.error(
"%s: Could not calculate derivative: %s", self.entity_id, err
)
_LOGGER.error("Could not calculate derivative: %s", err)
# For total inreasing sensors, the value is expected to continuously increase.
# A negative derivative for a total increasing sensor likely indicates the
@@ -443,10 +400,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
== SensorStateClass.TOTAL_INCREASING
and new_derivative < 0
):
_LOGGER.debug(
"%s: Dropping sample as source total_increasing sensor decreased",
self.entity_id,
)
return
# add latest derivative to the window list

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/droplet",
"iot_class": "local_push",
"quality_scale": "bronze",
"requirements": ["pydroplet==2.3.3"],
"requirements": ["pydroplet==2.3.2"],
"zeroconf": ["_droplet._tcp.local."]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==14.0.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==13.7.0"]
}

View File

@@ -5,11 +5,9 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from deebot_client.capabilities import CapabilityNumber, CapabilitySet
from deebot_client.device import Device
from deebot_client.capabilities import CapabilitySet
from deebot_client.events import CleanCountEvent, CutDirectionEvent, VolumeEvent
from deebot_client.events.base import Event
from deebot_client.events.water_info import WaterCustomAmountEvent
from homeassistant.components.number import (
NumberEntity,
@@ -77,19 +75,6 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsNumberEntityDescription, ...] = (
native_step=1.0,
mode=NumberMode.BOX,
),
EcovacsNumberEntityDescription[WaterCustomAmountEvent](
capability_fn=lambda caps: (
caps.water.amount
if caps.water and isinstance(caps.water.amount, CapabilityNumber)
else None
),
value_fn=lambda e: e.value,
key="water_amount",
translation_key="water_amount",
entity_category=EntityCategory.CONFIG,
native_step=1.0,
mode=NumberMode.BOX,
),
)
@@ -115,18 +100,6 @@ class EcovacsNumberEntity[EventT: Event](
entity_description: EcovacsNumberEntityDescription
def __init__(
self,
device: Device,
capability: CapabilitySet[EventT, [int]],
entity_description: EcovacsNumberEntityDescription,
) -> None:
"""Initialize entity."""
super().__init__(device, capability, entity_description)
if isinstance(capability, CapabilityNumber):
self._attr_native_min_value = capability.min
self._attr_native_max_value = capability.max
async def async_added_to_hass(self) -> None:
"""Set up the event listeners now that hass is ready."""
await super().async_added_to_hass()

View File

@@ -33,11 +33,7 @@ class EcovacsSelectEntityDescription[EventT: Event](
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
EcovacsSelectEntityDescription[WaterAmountEvent](
capability_fn=lambda caps: (
caps.water.amount
if caps.water and isinstance(caps.water.amount, CapabilitySetTypes)
else None
),
capability_fn=lambda caps: caps.water.amount if caps.water else None,
current_option_fn=lambda e: get_name_key(e.value),
options_fn=lambda water: [get_name_key(amount) for amount in water.types],
key="water_amount",

View File

@@ -102,9 +102,6 @@
},
"volume": {
"name": "Volume"
},
"water_amount": {
"name": "Water flow level"
}
},
"sensor": {
@@ -155,10 +152,8 @@
"station_state": {
"name": "Station state",
"state": {
"drying_mop": "Drying mop",
"idle": "[%key:common::state::idle%]",
"emptying_dustbin": "Emptying dustbin",
"washing_mop": "Washing mop"
"emptying_dustbin": "Emptying dustbin"
}
},
"stats_area": {
@@ -179,7 +174,7 @@
},
"select": {
"water_amount": {
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
"name": "Water flow level",
"state": {
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",

View File

@@ -7,6 +7,8 @@ import random
import string
from typing import TYPE_CHECKING
from deebot_client.events.station import State
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import slugify
@@ -47,6 +49,9 @@ def get_supported_entities(
@callback
def get_name_key(enum: Enum) -> str:
"""Return the lower case name of the enum."""
if enum is State.EMPTYING:
# Will be fixed in the next major release of deebot-client
return "emptying_dustbin"
return enum.name.lower()

View File

@@ -234,17 +234,6 @@ ECOWITT_SENSORS_MAPPING: Final = {
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
EcoWittSensorTypes.DISTANCE_MM: SensorEntityDescription(
key="DISTANCE_MM",
device_class=SensorDeviceClass.DISTANCE,
native_unit_of_measurement=UnitOfLength.MILLIMETERS,
state_class=SensorStateClass.MEASUREMENT,
),
EcoWittSensorTypes.HEAT_COUNT: SensorEntityDescription(
key="HEAT_COUNT",
state_class=SensorStateClass.TOTAL_INCREASING,
entity_category=EntityCategory.DIAGNOSTIC,
),
EcoWittSensorTypes.PM1: SensorEntityDescription(
key="PM1",
device_class=SensorDeviceClass.PM1,
@@ -253,7 +242,6 @@ ECOWITT_SENSORS_MAPPING: Final = {
),
EcoWittSensorTypes.PM4: SensorEntityDescription(
key="PM4",
device_class=SensorDeviceClass.PM4,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
),

View File

@@ -51,7 +51,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> b
client_info=CLIENT_INFO,
zeroconf_instance=zeroconf_instance,
noise_psk=noise_psk,
timezone=hass.config.time_zone,
)
domain_data = DomainData.get(hass)

View File

@@ -138,16 +138,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
return await self._async_authenticate_or_add()
if error is None and entry_data.get(CONF_NOISE_PSK):
# Device was configured with encryption but now connects without it.
# Check if it's the same device before offering to remove encryption.
if self._reauth_entry.unique_id and self._device_mac:
expected_mac = format_mac(self._reauth_entry.unique_id)
actual_mac = format_mac(self._device_mac)
if expected_mac != actual_mac:
# Different device at the same IP - do not offer to remove encryption
return self._async_abort_wrong_device(
self._reauth_entry, expected_mac, actual_mac
)
return await self.async_step_reauth_encryption_removed_confirm()
return await self.async_step_reauth_confirm()
@@ -518,28 +508,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
CONF_DEVICE_NAME: self._device_name,
}
@callback
def _async_abort_wrong_device(
self, entry: ConfigEntry, expected_mac: str, actual_mac: str
) -> ConfigFlowResult:
"""Abort flow because a different device was found at the IP address."""
assert self._host is not None
assert self._device_name is not None
if self.source == SOURCE_RECONFIGURE:
reason = "reconfigure_unique_id_changed"
else:
reason = "reauth_unique_id_changed"
return self.async_abort(
reason=reason,
description_placeholders={
"name": entry.data.get(CONF_DEVICE_NAME, entry.title),
"host": self._host,
"expected_mac": expected_mac,
"unexpected_mac": actual_mac,
"unexpected_device_name": self._device_name,
},
)
async def _async_validated_connection(self) -> ConfigFlowResult:
"""Handle validated connection."""
if self.source == SOURCE_RECONFIGURE:
@@ -571,10 +539,17 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
# Reauth was triggered a while ago, and since than
# a new device resides at the same IP address.
assert self._device_name is not None
return self._async_abort_wrong_device(
self._reauth_entry,
format_mac(self._reauth_entry.unique_id),
format_mac(self.unique_id),
return self.async_abort(
reason="reauth_unique_id_changed",
description_placeholders={
"name": self._reauth_entry.data.get(
CONF_DEVICE_NAME, self._reauth_entry.title
),
"host": self._host,
"expected_mac": format_mac(self._reauth_entry.unique_id),
"unexpected_mac": format_mac(self.unique_id),
"unexpected_device_name": self._device_name,
},
)
async def _async_reconfig_validated_connection(self) -> ConfigFlowResult:
@@ -614,10 +589,17 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
if self._reconfig_entry.data.get(CONF_DEVICE_NAME) == self._device_name:
self._entry_with_name_conflict = self._reconfig_entry
return await self.async_step_name_conflict()
return self._async_abort_wrong_device(
self._reconfig_entry,
format_mac(self._reconfig_entry.unique_id),
format_mac(self.unique_id),
return self.async_abort(
reason="reconfigure_unique_id_changed",
description_placeholders={
"name": self._reconfig_entry.data.get(
CONF_DEVICE_NAME, self._reconfig_entry.title
),
"host": self._host,
"expected_mac": format_mac(self._reconfig_entry.unique_id),
"unexpected_mac": format_mac(self.unique_id),
"unexpected_device_name": self._device_name,
},
)
async def async_step_encryption_key(

View File

@@ -49,13 +49,11 @@ from aioesphomeapi import (
from aioesphomeapi.model import ButtonInfo
from bleak_esphome.backend.device import ESPHomeBluetoothDevice
from homeassistant import config_entries
from homeassistant.components.assist_satellite import AssistSatelliteConfiguration
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers import discovery_flow, entity_registry as er
from homeassistant.helpers.service_info.esphome import ESPHomeServiceInfo
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.storage import Store
from .const import DOMAIN
@@ -470,7 +468,7 @@ class RuntimeEntryData:
@callback
def async_on_connect(
self, hass: HomeAssistant, device_info: DeviceInfo, api_version: APIVersion
self, device_info: DeviceInfo, api_version: APIVersion
) -> None:
"""Call when the entry has been connected."""
self.available = True
@@ -486,29 +484,6 @@ class RuntimeEntryData:
# be marked as unavailable or not.
self.expected_disconnect = True
if not device_info.zwave_proxy_feature_flags:
return
assert self.client.connected_address
discovery_flow.async_create_flow(
hass,
"zwave_js",
{"source": config_entries.SOURCE_ESPHOME},
ESPHomeServiceInfo(
name=device_info.name,
zwave_home_id=device_info.zwave_home_id or None,
ip_address=self.client.connected_address,
port=self.client.port,
noise_psk=self.client.noise_psk,
),
discovery_key=discovery_flow.DiscoveryKey(
domain=DOMAIN,
key=device_info.mac_address,
version=1,
),
)
@callback
def async_register_assist_satellite_config_updated_callback(
self,

View File

@@ -505,7 +505,7 @@ class ESPHomeManager:
api_version = cli.api_version
assert api_version is not None, "API version must be set"
entry_data.async_on_connect(hass, device_info, api_version)
entry_data.async_on_connect(device_info, api_version)
await self._handle_dynamic_encryption_key(device_info)

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==41.9.0",
"aioesphomeapi==41.0.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.3.0"
],

View File

@@ -194,21 +194,6 @@ class EsphomeAssistSatelliteWakeWordSelect(
self._attr_options = [NO_WAKE_WORD, *sorted(self._wake_words)]
option = self._attr_current_option
if (
(self._wake_word_index == 0)
and (len(config.active_wake_words) == 1)
and (option in (None, NO_WAKE_WORD))
):
option = next(
(
wake_word
for wake_word, wake_word_id in self._wake_words.items()
if wake_word_id == config.active_wake_words[0]
),
None,
)
if (
(option is None)
or ((wake_word_id := self._wake_words.get(option)) is None)

View File

@@ -162,12 +162,12 @@ def setup_service_functions(
It appears that all TCC-compatible systems support the same three zones modes.
"""
@verify_domain_control(DOMAIN)
@verify_domain_control(hass, DOMAIN)
async def force_refresh(call: ServiceCall) -> None:
"""Obtain the latest state data via the vendor's RESTful API."""
await coordinator.async_refresh()
@verify_domain_control(DOMAIN)
@verify_domain_control(hass, DOMAIN)
async def set_system_mode(call: ServiceCall) -> None:
"""Set the system mode."""
assert coordinator.tcs is not None # mypy
@@ -179,7 +179,7 @@ def setup_service_functions(
}
async_dispatcher_send(hass, DOMAIN, payload)
@verify_domain_control(DOMAIN)
@verify_domain_control(hass, DOMAIN)
async def set_zone_override(call: ServiceCall) -> None:
"""Set the zone override (setpoint)."""
entity_id = call.data[ATTR_ENTITY_ID]

View File

@@ -66,6 +66,26 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = {
key="last_alarm_type_name",
translation_key="last_alarm_type_name",
),
"Record_Mode": SensorEntityDescription(
key="Record_Mode",
translation_key="record_mode",
entity_registry_enabled_default=False,
),
"battery_camera_work_mode": SensorEntityDescription(
key="battery_camera_work_mode",
translation_key="battery_camera_work_mode",
entity_registry_enabled_default=False,
),
"powerStatus": SensorEntityDescription(
key="powerStatus",
translation_key="power_status",
entity_registry_enabled_default=False,
),
"OnlineStatus": SensorEntityDescription(
key="OnlineStatus",
translation_key="online_status",
entity_registry_enabled_default=False,
),
}
@@ -76,16 +96,26 @@ async def async_setup_entry(
) -> None:
"""Set up EZVIZ sensors based on a config entry."""
coordinator = entry.runtime_data
entities: list[EzvizSensor] = []
async_add_entities(
[
for camera, sensors in coordinator.data.items():
entities.extend(
EzvizSensor(coordinator, camera, sensor)
for camera in coordinator.data
for sensor, value in coordinator.data[camera].items()
if sensor in SENSOR_TYPES
if value is not None
]
)
for sensor, value in sensors.items()
if sensor in SENSOR_TYPES and value is not None
)
optionals = sensors.get("optionals", {})
entities.extend(
EzvizSensor(coordinator, camera, optional_key)
for optional_key in ("powerStatus", "OnlineStatus")
if optional_key in optionals
)
if "mode" in optionals.get("Record_Mode", {}):
entities.append(EzvizSensor(coordinator, camera, "mode"))
async_add_entities(entities)
class EzvizSensor(EzvizEntity, SensorEntity):

View File

@@ -147,6 +147,18 @@
},
"last_alarm_type_name": {
"name": "Last alarm type name"
},
"record_mode": {
"name": "Record mode"
},
"battery_camera_work_mode": {
"name": "Battery work mode"
},
"power_status": {
"name": "Power status"
},
"online_status": {
"name": "Online status"
}
},
"switch": {

View File

@@ -7,22 +7,11 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
from .services import async_register_services
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the file component."""
async_register_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a file component entry."""

View File

@@ -6,7 +6,3 @@ CONF_TIMESTAMP = "timestamp"
DEFAULT_NAME = "File"
FILE_ICON = "mdi:file"
SERVICE_READ_FILE = "read_file"
ATTR_FILE_NAME = "file_name"
ATTR_FILE_ENCODING = "file_encoding"

View File

@@ -1,7 +0,0 @@
{
"services": {
"read_file": {
"service": "mdi:file"
}
}
}

View File

@@ -1,88 +0,0 @@
"""File Service calls."""
from collections.abc import Callable
import json
import voluptuous as vol
import yaml
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
from .const import ATTR_FILE_ENCODING, ATTR_FILE_NAME, DOMAIN, SERVICE_READ_FILE
def async_register_services(hass: HomeAssistant) -> None:
"""Register services for File integration."""
if not hass.services.has_service(DOMAIN, SERVICE_READ_FILE):
hass.services.async_register(
DOMAIN,
SERVICE_READ_FILE,
read_file,
schema=vol.Schema(
{
vol.Required(ATTR_FILE_NAME): cv.string,
vol.Required(ATTR_FILE_ENCODING): cv.string,
}
),
supports_response=SupportsResponse.ONLY,
)
ENCODING_LOADERS: dict[str, tuple[Callable, type[Exception]]] = {
"json": (json.loads, json.JSONDecodeError),
"yaml": (yaml.safe_load, yaml.YAMLError),
}
def read_file(call: ServiceCall) -> dict:
"""Handle read_file service call."""
file_name = call.data[ATTR_FILE_NAME]
file_encoding = call.data[ATTR_FILE_ENCODING].lower()
if not call.hass.config.is_allowed_path(file_name):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="no_access_to_path",
translation_placeholders={"filename": file_name},
)
if file_encoding not in ENCODING_LOADERS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="unsupported_file_encoding",
translation_placeholders={
"filename": file_name,
"encoding": file_encoding,
},
)
try:
with open(file_name, encoding="utf-8") as file:
file_content = file.read()
except FileNotFoundError as err:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="file_not_found",
translation_placeholders={"filename": file_name},
) from err
except OSError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="file_read_error",
translation_placeholders={"filename": file_name},
) from err
loader, error_type = ENCODING_LOADERS[file_encoding]
try:
data = loader(file_content)
except error_type as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="file_decoding",
translation_placeholders={"filename": file_name, "encoding": file_encoding},
) from err
return {"data": data}

View File

@@ -1,14 +0,0 @@
# Describes the format for available file services
read_file:
fields:
file_name:
example: "www/my_file.json"
selector:
text:
file_encoding:
example: "JSON"
selector:
select:
options:
- "JSON"
- "YAML"

View File

@@ -64,37 +64,6 @@
},
"write_access_failed": {
"message": "Write access to {filename} failed: {exc}."
},
"no_access_to_path": {
"message": "Cannot read {filename}, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`"
},
"unsupported_file_encoding": {
"message": "Cannot read {filename}, unsupported file encoding {encoding}."
},
"file_decoding": {
"message": "Cannot read file {filename} as {encoding}."
},
"file_not_found": {
"message": "File {filename} not found."
},
"file_read_error": {
"message": "Error reading {filename}."
}
},
"services": {
"read_file": {
"name": "Read file",
"description": "Reads a file and returns the contents.",
"fields": {
"file_name": {
"name": "File name",
"description": "Name of the file to read."
},
"file_encoding": {
"name": "File encoding",
"description": "Encoding of the file (JSON, YAML.)"
}
}
}
}
}

View File

@@ -31,12 +31,11 @@ SERVICE_SCHEMA_SET_GUEST_WIFI_PW = vol.Schema(
async def _async_set_guest_wifi_password(service_call: ServiceCall) -> None:
"""Call Fritz set guest wifi password service."""
target_entry_ids = await async_extract_config_entry_ids(service_call)
hass = service_call.hass
target_entry_ids = await async_extract_config_entry_ids(hass, service_call)
target_entries: list[FritzConfigEntry] = [
loaded_entry
for loaded_entry in service_call.hass.config_entries.async_loaded_entries(
DOMAIN
)
for loaded_entry in hass.config_entries.async_loaded_entries(DOMAIN)
if loaded_entry.entry_id in target_entry_ids
]

View File

@@ -124,7 +124,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: GeniusHubConfigEntry) ->
def setup_service_functions(hass: HomeAssistant, broker):
"""Set up the service functions."""
@verify_domain_control(DOMAIN)
@verify_domain_control(hass, DOMAIN)
async def set_zone_mode(call: ServiceCall) -> None:
"""Set the system mode."""
entity_id = call.data[ATTR_ENTITY_ID]

View File

@@ -29,7 +29,6 @@ from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_registry as er,
issue_registry as ir,
)
from homeassistant.helpers.typing import ConfigType
@@ -71,21 +70,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def generate_content(call: ServiceCall) -> ServiceResponse:
"""Generate content from text and optionally images."""
LOGGER.warning(
"Action '%s.%s' is deprecated and will be removed in the 2026.4.0 release. "
"Please use the 'ai_task.generate_data' action instead",
DOMAIN,
SERVICE_GENERATE_CONTENT,
)
ir.async_create_issue(
hass,
DOMAIN,
"deprecated_generate_content",
breaks_in_ha_version="2026.4.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_generate_content",
)
prompt_parts = [call.data[CONF_PROMPT]]

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["google-genai==1.38.0"]
"requirements": ["google-genai==1.29.0"]
}

View File

@@ -150,16 +150,10 @@
}
}
},
"issues": {
"deprecated_generate_content": {
"title": "Deprecated 'generate_content' action",
"description": "Action 'google_generative_ai_conversation.generate_content' is deprecated and will be removed in the 2026.4.0 release. Please use the 'ai_task.generate_data' action instead"
}
},
"services": {
"generate_content": {
"name": "Generate content (deprecated)",
"description": "Generate content from a prompt consisting of text and optionally images (deprecated)",
"name": "Generate content",
"description": "Generate content from a prompt consisting of text and optionally images",
"fields": {
"prompt": {
"name": "Prompt",

View File

@@ -51,7 +51,7 @@ async def _extract_gmail_config_entries(
) -> list[GoogleMailConfigEntry]:
return [
entry
for entry_id in await async_extract_config_entry_ids(call)
for entry_id in await async_extract_config_entry_ids(call.hass, call)
if (entry := call.hass.config_entries.async_get_entry(entry_id))
and entry.domain == DOMAIN
]

View File

@@ -10,8 +10,9 @@ from typing import Self, cast
from google_photos_library_api.exceptions import GooglePhotosApiError
from google_photos_library_api.model import Album, MediaItem
from homeassistant.components.media_player import BrowseError, MediaClass, MediaType
from homeassistant.components.media_player import MediaClass, MediaType
from homeassistant.components.media_source import (
BrowseError,
BrowseMediaSource,
MediaSource,
MediaSourceItem,

View File

@@ -39,7 +39,6 @@ ATTR_ADD_CHECKLIST_ITEM = "add_checklist_item"
ATTR_REMOVE_CHECKLIST_ITEM = "remove_checklist_item"
ATTR_SCORE_CHECKLIST_ITEM = "score_checklist_item"
ATTR_UNSCORE_CHECKLIST_ITEM = "unscore_checklist_item"
ATTR_COLLAPSE_CHECKLIST = "collapse_checklist"
ATTR_REMINDER = "reminder"
ATTR_REMOVE_REMINDER = "remove_reminder"
ATTR_CLEAR_REMINDER = "clear_reminder"

View File

@@ -47,7 +47,6 @@ from .const import (
ATTR_ALIAS,
ATTR_CLEAR_DATE,
ATTR_CLEAR_REMINDER,
ATTR_COLLAPSE_CHECKLIST,
ATTR_CONFIG_ENTRY,
ATTR_COST,
ATTR_COUNTER_DOWN,
@@ -131,11 +130,6 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
}
)
COLLAPSE_CHECKLIST_MAP = {
"collapsed": True,
"expanded": False,
}
BASE_TASK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
@@ -166,7 +160,6 @@ BASE_TASK_SCHEMA = vol.Schema(
vol.Optional(ATTR_REMOVE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]),
vol.Optional(ATTR_SCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]),
vol.Optional(ATTR_UNSCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]),
vol.Optional(ATTR_COLLAPSE_CHECKLIST): vol.In(COLLAPSE_CHECKLIST_MAP),
vol.Optional(ATTR_START_DATE): cv.date,
vol.Optional(ATTR_INTERVAL): vol.All(int, vol.Range(0)),
vol.Optional(ATTR_REPEAT): vol.All(cv.ensure_list, [vol.In(WEEK_DAYS)]),
@@ -230,7 +223,6 @@ ITEMID_MAP = {
"shiny_seed": Skill.SHINY_SEED,
}
SERVICE_TASK_TYPE_MAP = {
SERVICE_UPDATE_REWARD: TaskType.REWARD,
SERVICE_CREATE_REWARD: TaskType.REWARD,
@@ -722,9 +714,6 @@ async def _create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa:
):
data["checklist"] = checklist
if collapse_checklist := call.data.get(ATTR_COLLAPSE_CHECKLIST):
data["collapseChecklist"] = COLLAPSE_CHECKLIST_MAP[collapse_checklist]
reminders = current_task.reminders if current_task else []
if add_reminders := call.data.get(ATTR_REMINDER):

View File

@@ -275,15 +275,6 @@ update_todo:
selector:
text:
multiple: true
collapse_checklist: &collapse_checklist
required: false
selector:
select:
options:
- collapsed
- expanded
mode: list
translation_key: collapse_checklist
priority: *priority
duedate_options:
collapsed: true
@@ -327,7 +318,6 @@ create_todo:
name: *name
notes: *notes
add_checklist_item: *add_checklist_item
collapse_checklist: *collapse_checklist
priority: *priority
date: *due_date
reminder: *reminder
@@ -429,7 +419,6 @@ create_daily:
name: *name
notes: *notes
add_checklist_item: *add_checklist_item
collapse_checklist: *collapse_checklist
priority: *priority
start_date: *start_date
frequency: *frequency_daily

View File

@@ -66,9 +66,7 @@
"repeat_weekly_options_description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly.",
"repeat_monthly_options_name": "Monthly repeat day",
"repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly.",
"quest_name": "Quest",
"collapse_checklist_name": "Collapse/expand checklist",
"collapse_checklist_description": "Whether the checklist of a task is displayed as collapsed or expanded in Habitica."
"quest_name": "Quest"
},
"config": {
"abort": {
@@ -1008,10 +1006,6 @@
"unscore_checklist_item": {
"name": "[%key:component::habitica::common::unscore_checklist_item_name%]",
"description": "[%key:component::habitica::common::unscore_checklist_item_description%]"
},
"collapse_checklist": {
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
}
},
"sections": {
@@ -1076,10 +1070,6 @@
"add_checklist_item": {
"name": "[%key:component::habitica::common::checklist_options_name%]",
"description": "[%key:component::habitica::common::add_checklist_item_description%]"
},
"collapse_checklist": {
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
}
},
"sections": {
@@ -1161,10 +1151,6 @@
"name": "[%key:component::habitica::common::unscore_checklist_item_name%]",
"description": "[%key:component::habitica::common::unscore_checklist_item_description%]"
},
"collapse_checklist": {
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
},
"streak": {
"name": "Adjust streak",
"description": "Adjust or reset the streak counter of the daily."
@@ -1261,10 +1247,6 @@
"name": "[%key:component::habitica::common::checklist_options_name%]",
"description": "[%key:component::habitica::common::add_checklist_item_description%]"
},
"collapse_checklist": {
"name": "[%key:component::habitica::common::collapse_checklist_name%]",
"description": "[%key:component::habitica::common::collapse_checklist_description%]"
},
"reminder": {
"name": "[%key:component::habitica::common::reminder_options_name%]",
"description": "[%key:component::habitica::common::reminder_description%]"
@@ -1343,12 +1325,6 @@
"day_of_month": "Day of the month",
"day_of_week": "Day of the week"
}
},
"collapse_checklist": {
"options": {
"collapsed": "Collapsed",
"expanded": "Expanded"
}
}
}
}

View File

@@ -73,7 +73,6 @@ from . import ( # noqa: F401
config_flow,
diagnostics,
sensor,
switch,
system_health,
update,
)
@@ -150,7 +149,7 @@ _DEPRECATED_HassioServiceInfo = DeprecatedConstant(
# If new platforms are added, be sure to import them above
# so we do not make other components that depend on hassio
# wait for the import of the platforms
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.UPDATE]
CONF_FRONTEND_REPO = "development_repo"

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
import asyncio
from collections import defaultdict
from copy import deepcopy
import logging
from typing import TYPE_CHECKING, Any
@@ -546,15 +545,3 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
await super()._async_refresh(
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
)
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
"""Force refresh of addon info data for a specific addon."""
try:
slug, info = await self._update_addon_info(addon_slug)
if info is not None and DATA_KEY_ADDONS in self.data:
if slug in self.data[DATA_KEY_ADDONS]:
data = deepcopy(self.data)
data[DATA_KEY_ADDONS][slug].update(info)
self.async_set_updated_data(data)
except SupervisorError as err:
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)

View File

@@ -70,7 +70,7 @@ PATHS_ADMIN = re.compile(
r"|backups/new/upload"
r"|audio/logs(/follow|/boots/-?\d+(/follow)?)?"
r"|cli/logs(/follow|/boots/-?\d+(/follow)?)?"
r"|core/logs(/latest|/follow|/boots/-?\d+(/follow)?)?"
r"|core/logs(/follow|/boots/-?\d+(/follow)?)?"
r"|dns/logs(/follow|/boots/-?\d+(/follow)?)?"
r"|host/logs(/follow|/boots(/-?\d+(/follow)?)?)?"
r"|multicast/logs(/follow|/boots/-?\d+(/follow)?)?"

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/hassio",
"iot_class": "local_polling",
"quality_scale": "internal",
"requirements": ["aiohasupervisor==0.3.3b0"],
"requirements": ["aiohasupervisor==0.3.2"],
"single_config_entry": true
}

View File

@@ -193,7 +193,7 @@
},
"unsupported_docker_version": {
"title": "Unsupported system - Docker version",
"description": "System is unsupported because the Docker version is out of date. For information about the required version and how to fix this, select Learn more."
"description": "System is unsupported because the wrong version of Docker is in use. Use the link to learn the correct version and how to fix this."
},
"unsupported_job_conditions": {
"title": "Unsupported system - Protections disabled",
@@ -209,7 +209,7 @@
},
"unsupported_os": {
"title": "Unsupported system - Operating System",
"description": "System is unsupported because the operating system in use is not tested or maintained for use with Supervisor. For information about supported operating systems and how to fix this, select Learn more."
"description": "System is unsupported because the operating system in use is not tested or maintained for use with Supervisor. Use the link to which operating systems are supported and how to fix this."
},
"unsupported_os_agent": {
"title": "Unsupported system - OS-Agent issues",
@@ -250,10 +250,6 @@
"unsupported_os_version": {
"title": "Unsupported system - Home Assistant OS version",
"description": "System is unsupported because the Home Assistant OS version in use is not supported. For troubleshooting information, select Learn more."
},
"unsupported_home_assistant_core_version": {
"title": "Unsupported system - Home Assistant Core version",
"description": "System is unsupported because the Home Assistant Core version in use is not supported. For troubleshooting information, select Learn more."
}
},
"entity": {

View File

@@ -1,90 +0,0 @@
"""Switch platform for Hass.io addons."""
from __future__ import annotations
import logging
from typing import Any
from aiohasupervisor import SupervisorError
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ICON
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
from .entity import HassioAddonEntity
from .handler import get_supervisor_client
_LOGGER = logging.getLogger(__name__)
ENTITY_DESCRIPTION = SwitchEntityDescription(
key=ATTR_STATE,
name=None,
icon="mdi:puzzle",
entity_registry_enabled_default=False,
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Switch set up for Hass.io config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
async_add_entities(
HassioAddonSwitch(
addon=addon,
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
)
class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
"""Switch for Hass.io add-ons."""
@property
def is_on(self) -> bool | None:
"""Return true if the add-on is on."""
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
state = addon_data.get(self.entity_description.key)
return state == ATTR_STARTED
@property
def entity_picture(self) -> str | None:
"""Return the icon of the add-on if any."""
if not self.available:
return None
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
if addon_data.get(ATTR_ICON):
return f"/api/hassio/addons/{self._addon_slug}/icon"
return None
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
supervisor_client = get_supervisor_client(self.hass)
try:
await supervisor_client.addons.start_addon(self._addon_slug)
except SupervisorError as err:
_LOGGER.error("Failed to start addon %s: %s", self._addon_slug, err)
raise HomeAssistantError(err) from err
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
supervisor_client = get_supervisor_client(self.hass)
try:
await supervisor_client.addons.stop_addon(self._addon_slug)
except SupervisorError as err:
_LOGGER.error("Failed to stop addon %s: %s", self._addon_slug, err)
raise HomeAssistantError(err) from err
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)

View File

@@ -6,14 +6,9 @@ import logging
from homeassistant.const import CONF_API_KEY, CONF_MODE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.issue_registry import (
IssueSeverity,
async_create_issue,
async_delete_issue,
)
from homeassistant.helpers.start import async_at_started
from .const import CONF_TRAFFIC_MODE, DOMAIN, TRAVEL_MODE_PUBLIC
from .const import CONF_TRAFFIC_MODE, TRAVEL_MODE_PUBLIC
from .coordinator import (
HereConfigEntry,
HERERoutingDataUpdateCoordinator,
@@ -29,8 +24,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: HereConfigEntry)
"""Set up HERE Travel Time from a config entry."""
api_key = config_entry.data[CONF_API_KEY]
alert_for_multiple_entries(hass)
cls: type[HERETransitDataUpdateCoordinator | HERERoutingDataUpdateCoordinator]
if config_entry.data[CONF_MODE] in {TRAVEL_MODE_PUBLIC, "publicTransportTimeTable"}:
cls = HERETransitDataUpdateCoordinator
@@ -49,29 +42,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: HereConfigEntry)
return True
def alert_for_multiple_entries(hass: HomeAssistant) -> None:
"""Check if there are multiple entries for the same API key."""
if len(hass.config_entries.async_entries(DOMAIN)) > 1:
async_create_issue(
hass,
DOMAIN,
"multiple_here_travel_time_entries",
learn_more_url="https://www.home-assistant.io/integrations/here_travel_time/",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="multiple_here_travel_time_entries",
translation_placeholders={
"pricing_page": "https://www.here.com/get-started/pricing",
},
)
else:
async_delete_issue(
hass,
DOMAIN,
"multiple_here_travel_time_entries",
)
async def async_unload_entry(
hass: HomeAssistant, config_entry: HereConfigEntry
) -> bool:

View File

@@ -44,7 +44,7 @@ from .coordinator import (
HERETransitDataUpdateCoordinator,
)
SCAN_INTERVAL = timedelta(minutes=30)
SCAN_INTERVAL = timedelta(minutes=5)
def sensor_descriptions(travel_mode: str) -> tuple[SensorEntityDescription, ...]:

View File

@@ -107,11 +107,5 @@
"name": "Destination"
}
}
},
"issues": {
"multiple_here_travel_time_entries": {
"title": "More than one HERE Travel Time integration detected",
"description": "HERE deprecated the previous free tier. The new Base Plan has only 5000 instead of the previous 30000 free requests per month.\n\nSince you have more than one HERE Travel Time integration configured, you will need to disable or remove the additional integrations to avoid exceeding the free request limit.\nYou can ignore this issue if you are okay with the additional cost."
}
}
}

View File

@@ -1,23 +0,0 @@
"""Diagnostics support for history_stats."""
from __future__ import annotations
from typing import Any
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
registry = er.async_get(hass)
entities = registry.entities.get_entries_for_config_entry_id(config_entry.entry_id)
return {
"config_entry": config_entry.as_dict(),
"entity": [entity.extended_dict for entity in entities],
}

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.81", "babel==2.15.0"]
"requirements": ["holidays==0.80", "babel==2.15.0"]
}

View File

@@ -37,6 +37,7 @@ PLATFORMS = [
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
Platform.TIME,
]

View File

@@ -66,14 +66,6 @@
"default": "mdi:stop"
}
},
"number": {
"start_in_relative": {
"default": "mdi:progress-clock"
},
"finish_in_relative": {
"default": "mdi:progress-clock"
}
},
"sensor": {
"operation_state": {
"default": "mdi:state-machine",
@@ -259,6 +251,14 @@
"i_dos_2_active": {
"default": "mdi:numeric-2-circle"
}
},
"time": {
"start_in_relative": {
"default": "mdi:progress-clock"
},
"finish_in_relative": {
"default": "mdi:progress-clock"
}
}
}
}

View File

@@ -1852,6 +1852,11 @@
"i_dos2_active": {
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_i_dos2_active::name%]"
}
},
"time": {
"alarm_clock": {
"name": "Alarm clock"
}
}
}
}

View File

@@ -0,0 +1,172 @@
"""Provides time entities for Home Connect."""
from datetime import time
from typing import cast
from aiohomeconnect.model import SettingKey
from aiohomeconnect.model.error import HomeConnectError
from homeassistant.components.automation import automations_with_entity
from homeassistant.components.script import scripts_with_entity
from homeassistant.components.time import TimeEntity, TimeEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.issue_registry import (
IssueSeverity,
async_create_issue,
async_delete_issue,
)
from .common import setup_home_connect_entry
from .const import DOMAIN
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
from .entity import HomeConnectEntity
from .utils import get_dict_from_home_connect_error
PARALLEL_UPDATES = 1
TIME_ENTITIES = (
TimeEntityDescription(
key=SettingKey.BSH_COMMON_ALARM_CLOCK,
translation_key="alarm_clock",
entity_registry_enabled_default=False,
),
)
def _get_entities_for_appliance(
entry: HomeConnectConfigEntry,
appliance: HomeConnectApplianceData,
) -> list[HomeConnectEntity]:
"""Get a list of entities."""
return [
HomeConnectTimeEntity(entry.runtime_data, appliance, description)
for description in TIME_ENTITIES
if description.key in appliance.settings
]
async def async_setup_entry(
hass: HomeAssistant,
entry: HomeConnectConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Home Connect switch."""
setup_home_connect_entry(
entry,
_get_entities_for_appliance,
async_add_entities,
)
def seconds_to_time(seconds: int) -> time:
"""Convert seconds to a time object."""
minutes, sec = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
return time(hour=hours, minute=minutes, second=sec)
def time_to_seconds(t: time) -> int:
"""Convert a time object to seconds."""
return t.hour * 3600 + t.minute * 60 + t.second
class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity):
"""Time setting class for Home Connect."""
async def async_added_to_hass(self) -> None:
"""Call when entity is added to hass."""
await super().async_added_to_hass()
if self.bsh_key is SettingKey.BSH_COMMON_ALARM_CLOCK:
automations = automations_with_entity(self.hass, self.entity_id)
scripts = scripts_with_entity(self.hass, self.entity_id)
items = automations + scripts
if not items:
return
entity_reg: er.EntityRegistry = er.async_get(self.hass)
entity_automations = [
automation_entity
for automation_id in automations
if (automation_entity := entity_reg.async_get(automation_id))
]
entity_scripts = [
script_entity
for script_id in scripts
if (script_entity := entity_reg.async_get(script_id))
]
items_list = [
f"- [{item.original_name}](/config/automation/edit/{item.unique_id})"
for item in entity_automations
] + [
f"- [{item.original_name}](/config/script/edit/{item.unique_id})"
for item in entity_scripts
]
async_create_issue(
self.hass,
DOMAIN,
f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}",
breaks_in_ha_version="2025.10.0",
is_fixable=True,
is_persistent=True,
severity=IssueSeverity.WARNING,
translation_key="deprecated_time_alarm_clock",
translation_placeholders={
"entity_id": self.entity_id,
"items": "\n".join(items_list),
},
)
async def async_will_remove_from_hass(self) -> None:
"""Call when entity will be removed from hass."""
if self.bsh_key is SettingKey.BSH_COMMON_ALARM_CLOCK:
async_delete_issue(
self.hass,
DOMAIN,
f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}",
)
async_delete_issue(
self.hass, DOMAIN, f"deprecated_time_alarm_clock_{self.entity_id}"
)
async def async_set_value(self, value: time) -> None:
"""Set the native value of the entity."""
async_create_issue(
self.hass,
DOMAIN,
f"deprecated_time_alarm_clock_{self.entity_id}",
breaks_in_ha_version="2025.10.0",
is_fixable=True,
is_persistent=True,
severity=IssueSeverity.WARNING,
translation_key="deprecated_time_alarm_clock",
translation_placeholders={
"entity_id": self.entity_id,
},
)
try:
await self.coordinator.client.set_setting(
self.appliance.info.ha_id,
setting_key=SettingKey(self.bsh_key),
value=time_to_seconds(value),
)
except HomeConnectError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_setting_entity",
translation_placeholders={
**get_dict_from_home_connect_error(err),
"entity_id": self.entity_id,
"key": self.bsh_key,
"value": str(value),
},
) from err
def update_native_value(self) -> None:
"""Set the value of the entity."""
data = self.appliance.settings[cast(SettingKey, self.bsh_key)]
self._attr_native_value = seconds_to_time(data.value)

View File

@@ -339,7 +339,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
reload_entries: set[str] = set()
if ATTR_ENTRY_ID in call.data:
reload_entries.add(call.data[ATTR_ENTRY_ID])
reload_entries.update(await async_extract_config_entry_ids(call))
reload_entries.update(await async_extract_config_entry_ids(hass, call))
if not reload_entries:
raise ValueError("There were no matching config entries to reload")
await asyncio.gather(

View File

@@ -272,7 +272,7 @@ async def async_setup_platform(
async def delete_service(call: ServiceCall) -> None:
"""Delete a dynamically created scene."""
entity_ids = await async_extract_entity_ids(call)
entity_ids = await async_extract_entity_ids(hass, call)
for entity_id in entity_ids:
scene = platform.entities.get(entity_id)

View File

@@ -90,7 +90,7 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
firmware_name="OpenThread",
expected_installed_firmware_type=ApplicationType.SPINEL,
step_id="install_thread_firmware",
next_step_id="finish_thread_installation",
next_step_id="start_otbr_addon",
)
@@ -103,7 +103,6 @@ class HomeAssistantConnectZBT2ConfigFlow(
VERSION = 1
MINOR_VERSION = 1
ZIGBEE_BAUDRATE = 460800
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Initialize the config flow."""

View File

@@ -52,16 +52,8 @@
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
"menu_options": {
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
},
"menu_option_descriptions": {
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]"
}
},
"confirm_zigbee": {
@@ -83,29 +75,6 @@
"confirm_otbr": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]"
},
"zigbee_installation_type": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::description%]",
"menu_options": {
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_recommended%]",
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_custom%]"
},
"menu_option_descriptions": {
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_recommended%]",
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_custom%]"
}
},
"zigbee_integration": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::title%]",
"menu_options": {
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_zha%]",
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_other%]"
},
"menu_option_descriptions": {
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_zha%]",
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_other%]"
}
}
},
"error": {
@@ -142,15 +111,7 @@
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
"menu_options": {
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]",
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]",
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee_migrate%]",
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread_migrate%]"
},
"menu_option_descriptions": {
"pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee%]",
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread%]",
"pick_firmware_zigbee_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_zigbee_migrate%]",
"pick_firmware_thread_migrate": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_option_descriptions::pick_firmware_thread_migrate%]"
"pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]"
}
},
"confirm_zigbee": {
@@ -172,29 +133,6 @@
"confirm_otbr": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]"
},
"zigbee_installation_type": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::description%]",
"menu_options": {
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_recommended%]",
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_options::zigbee_intent_custom%]"
},
"menu_option_descriptions": {
"zigbee_intent_recommended": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_recommended%]",
"zigbee_intent_custom": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_installation_type::menu_option_descriptions::zigbee_intent_custom%]"
}
},
"zigbee_integration": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::title%]",
"menu_options": {
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_zha%]",
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_options::zigbee_integration_other%]"
},
"menu_option_descriptions": {
"zigbee_integration_zha": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_zha%]",
"zigbee_integration_other": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_integration::menu_option_descriptions::zigbee_integration_other%]"
}
}
},
"abort": {

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from abc import ABC, abstractmethod
import asyncio
from enum import StrEnum
import logging
from typing import Any
@@ -24,7 +23,6 @@ from homeassistant.config_entries import (
ConfigEntryBaseFlow,
ConfigFlow,
ConfigFlowResult,
FlowType,
OptionsFlow,
)
from homeassistant.core import callback
@@ -50,31 +48,13 @@ _LOGGER = logging.getLogger(__name__)
STEP_PICK_FIRMWARE_THREAD = "pick_firmware_thread"
STEP_PICK_FIRMWARE_ZIGBEE = "pick_firmware_zigbee"
STEP_PICK_FIRMWARE_THREAD_MIGRATE = "pick_firmware_thread_migrate"
STEP_PICK_FIRMWARE_ZIGBEE_MIGRATE = "pick_firmware_zigbee_migrate"
class PickedFirmwareType(StrEnum):
"""Firmware types that can be picked."""
THREAD = "thread"
ZIGBEE = "zigbee"
class ZigbeeIntegration(StrEnum):
"""Zigbee integrations that can be picked."""
OTHER = "other"
ZHA = "zha"
class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
"""Base flow to install firmware."""
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
_failed_addon_name: str
_failed_addon_reason: str
_picked_firmware_type: PickedFirmwareType
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Instantiate base flow."""
@@ -83,12 +63,11 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
self._probed_firmware_info: FirmwareInfo | None = None
self._device: str | None = None # To be set in a subclass
self._hardware_name: str = "unknown" # To be set in a subclass
self._zigbee_integration = ZigbeeIntegration.ZHA
self.addon_install_task: asyncio.Task | None = None
self.addon_start_task: asyncio.Task | None = None
self.addon_uninstall_task: asyncio.Task | None = None
self.firmware_install_task: asyncio.Task[None] | None = None
self.firmware_install_task: asyncio.Task | None = None
self.installing_firmware_name: str | None = None
def _get_translation_placeholders(self) -> dict[str, str]:
@@ -126,23 +105,11 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Pick Thread or Zigbee firmware."""
# Determine if ZHA or Thread are already configured to present migrate options
zha_entries = self.hass.config_entries.async_entries(ZHA_DOMAIN)
otbr_entries = self.hass.config_entries.async_entries(OTBR_DOMAIN)
return self.async_show_menu(
step_id="pick_firmware",
menu_options=[
(
STEP_PICK_FIRMWARE_ZIGBEE_MIGRATE
if zha_entries
else STEP_PICK_FIRMWARE_ZIGBEE
),
(
STEP_PICK_FIRMWARE_THREAD_MIGRATE
if otbr_entries
else STEP_PICK_FIRMWARE_THREAD
),
STEP_PICK_FIRMWARE_ZIGBEE,
STEP_PICK_FIRMWARE_THREAD,
],
description_placeholders=self._get_translation_placeholders(),
)
@@ -184,17 +151,91 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
step_id: str,
next_step_id: str,
) -> ConfigFlowResult:
"""Show progress dialog for installing firmware."""
assert self._device is not None
if not self.firmware_install_task:
self.firmware_install_task = self.hass.async_create_task(
self._install_firmware(
fw_update_url,
fw_type,
firmware_name,
expected_installed_firmware_type,
),
f"Install {firmware_name} firmware",
# Keep track of the firmware we're working with, for error messages
self.installing_firmware_name = firmware_name
# Installing new firmware is only truly required if the wrong type is
# installed: upgrading to the latest release of the current firmware type
# isn't strictly necessary for functionality.
firmware_install_required = self._probed_firmware_info is None or (
self._probed_firmware_info.firmware_type
!= expected_installed_firmware_type
)
session = async_get_clientsession(self.hass)
client = FirmwareUpdateClient(fw_update_url, session)
try:
manifest = await client.async_update_data()
fw_manifest = next(
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
)
except (StopIteration, TimeoutError, ClientError, ManifestMissing):
_LOGGER.warning(
"Failed to fetch firmware update manifest", exc_info=True
)
# Not having internet access should not prevent setup
if not firmware_install_required:
_LOGGER.debug(
"Skipping firmware upgrade due to index download failure"
)
return self.async_show_progress_done(next_step_id=next_step_id)
return self.async_show_progress_done(
next_step_id="firmware_download_failed"
)
if not firmware_install_required:
assert self._probed_firmware_info is not None
# Make sure we do not downgrade the firmware
fw_metadata = NabuCasaMetadata.from_json(fw_manifest.metadata)
fw_version = fw_metadata.get_public_version()
probed_fw_version = Version(self._probed_firmware_info.firmware_version)
if probed_fw_version >= fw_version:
_LOGGER.debug(
"Not downgrading firmware, installed %s is newer than available %s",
probed_fw_version,
fw_version,
)
return self.async_show_progress_done(next_step_id=next_step_id)
try:
fw_data = await client.async_fetch_firmware(fw_manifest)
except (TimeoutError, ClientError, ValueError):
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
# If we cannot download new firmware, we shouldn't block setup
if not firmware_install_required:
_LOGGER.debug(
"Skipping firmware upgrade due to image download failure"
)
return self.async_show_progress_done(next_step_id=next_step_id)
# Otherwise, fail
return self.async_show_progress_done(
next_step_id="firmware_download_failed"
)
self.firmware_install_task = self.hass.async_create_task(
async_flash_silabs_firmware(
hass=self.hass,
device=self._device,
fw_data=fw_data,
expected_installed_firmware_type=expected_installed_firmware_type,
bootloader_reset_type=None,
progress_callback=lambda offset, total: self.async_update_progress(
offset / total
),
),
f"Flash {firmware_name} firmware",
)
if not self.firmware_install_task.done():
return self.async_show_progress(
step_id=step_id,
@@ -208,141 +249,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
try:
await self.firmware_install_task
except AbortFlow as err:
return self.async_show_progress_done(
next_step_id=err.reason,
)
except HomeAssistantError:
_LOGGER.exception("Failed to flash firmware")
return self.async_show_progress_done(next_step_id="firmware_install_failed")
finally:
self.firmware_install_task = None
return self.async_show_progress_done(next_step_id=next_step_id)
async def _install_firmware(
self,
fw_update_url: str,
fw_type: str,
firmware_name: str,
expected_installed_firmware_type: ApplicationType,
) -> None:
"""Install firmware."""
if not await self._probe_firmware_info():
raise AbortFlow(
reason="unsupported_firmware",
description_placeholders=self._get_translation_placeholders(),
)
assert self._device is not None
# Keep track of the firmware we're working with, for error messages
self.installing_firmware_name = firmware_name
# Installing new firmware is only truly required if the wrong type is
# installed: upgrading to the latest release of the current firmware type
# isn't strictly necessary for functionality.
firmware_install_required = self._probed_firmware_info is None or (
self._probed_firmware_info.firmware_type != expected_installed_firmware_type
)
session = async_get_clientsession(self.hass)
client = FirmwareUpdateClient(fw_update_url, session)
try:
manifest = await client.async_update_data()
fw_manifest = next(
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
)
except (StopIteration, TimeoutError, ClientError, ManifestMissing) as err:
_LOGGER.warning("Failed to fetch firmware update manifest", exc_info=True)
# Not having internet access should not prevent setup
if not firmware_install_required:
_LOGGER.debug("Skipping firmware upgrade due to index download failure")
return
raise AbortFlow(reason="firmware_download_failed") from err
if not firmware_install_required:
assert self._probed_firmware_info is not None
# Make sure we do not downgrade the firmware
fw_metadata = NabuCasaMetadata.from_json(fw_manifest.metadata)
fw_version = fw_metadata.get_public_version()
probed_fw_version = Version(self._probed_firmware_info.firmware_version)
if probed_fw_version >= fw_version:
_LOGGER.debug(
"Not downgrading firmware, installed %s is newer than available %s",
probed_fw_version,
fw_version,
)
return
try:
fw_data = await client.async_fetch_firmware(fw_manifest)
except (TimeoutError, ClientError, ValueError) as err:
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
# If we cannot download new firmware, we shouldn't block setup
if not firmware_install_required:
_LOGGER.debug("Skipping firmware upgrade due to image download failure")
return
# Otherwise, fail
raise AbortFlow(reason="firmware_download_failed") from err
await async_flash_silabs_firmware(
hass=self.hass,
device=self._device,
fw_data=fw_data,
expected_installed_firmware_type=expected_installed_firmware_type,
bootloader_reset_type=None,
progress_callback=lambda offset, total: self.async_update_progress(
offset / total
),
)
async def _configure_and_start_otbr_addon(self) -> None:
"""Configure and start the OTBR addon."""
# Before we start the addon, confirm that the correct firmware is running
# and populate `self._probed_firmware_info` with the correct information
if not await self._probe_firmware_info(probe_methods=(ApplicationType.SPINEL,)):
raise AbortFlow(
"unsupported_firmware",
description_placeholders=self._get_translation_placeholders(),
)
otbr_manager = get_otbr_addon_manager(self.hass)
addon_info = await self._async_get_addon_info(otbr_manager)
assert self._device is not None
new_addon_config = {
**addon_info.options,
"device": self._device,
"baudrate": 460800,
"flow_control": True,
"autoflash_firmware": False,
}
_LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config)
try:
await otbr_manager.async_set_addon_options(new_addon_config)
except AddonError as err:
_LOGGER.error(err)
raise AbortFlow(
"addon_set_config_failed",
description_placeholders={
**self._get_translation_placeholders(),
"addon_name": otbr_manager.addon_name,
},
) from err
await otbr_manager.async_start_addon_waiting()
async def async_step_firmware_download_failed(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -369,116 +281,17 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
},
)
async def async_step_unsupported_firmware(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Abort when unsupported firmware is detected."""
return self.async_abort(
reason="unsupported_firmware",
description_placeholders=self._get_translation_placeholders(),
)
async def async_step_zigbee_installation_type(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the installation type step."""
return self.async_show_menu(
step_id="zigbee_installation_type",
menu_options=[
"zigbee_intent_recommended",
"zigbee_intent_custom",
],
)
async def async_step_zigbee_intent_recommended(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Select recommended installation type."""
self._zigbee_integration = ZigbeeIntegration.ZHA
return await self._async_continue_picked_firmware()
async def async_step_zigbee_intent_custom(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Select custom installation type."""
return await self.async_step_zigbee_integration()
async def async_step_zigbee_integration(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Select Zigbee integration."""
return self.async_show_menu(
step_id="zigbee_integration",
menu_options=[
"zigbee_integration_zha",
"zigbee_integration_other",
],
)
async def async_step_zigbee_integration_zha(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Select ZHA integration."""
self._zigbee_integration = ZigbeeIntegration.ZHA
return await self._async_continue_picked_firmware()
async def async_step_zigbee_integration_other(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Select other Zigbee integration."""
self._zigbee_integration = ZigbeeIntegration.OTHER
return await self._async_continue_picked_firmware()
async def _async_continue_picked_firmware(self) -> ConfigFlowResult:
"""Continue to the picked firmware step."""
if self._picked_firmware_type == PickedFirmwareType.ZIGBEE:
return await self.async_step_install_zigbee_firmware()
return await self.async_step_prepare_thread_installation()
async def async_step_prepare_thread_installation(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Prepare for Thread installation by stopping the OTBR addon if needed."""
if not is_hassio(self.hass):
return self.async_abort(
reason="not_hassio_thread",
description_placeholders=self._get_translation_placeholders(),
)
otbr_manager = get_otbr_addon_manager(self.hass)
addon_info = await self._async_get_addon_info(otbr_manager)
if addon_info.state == AddonState.RUNNING:
# Stop the addon before continuing to flash firmware
await otbr_manager.async_stop_addon()
return await self.async_step_install_thread_firmware()
async def async_step_finish_thread_installation(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Finish Thread installation by starting the OTBR addon."""
otbr_manager = get_otbr_addon_manager(self.hass)
addon_info = await self._async_get_addon_info(otbr_manager)
if addon_info.state == AddonState.NOT_INSTALLED:
return await self.async_step_install_otbr_addon()
return await self.async_step_start_otbr_addon()
async def async_step_pick_firmware_zigbee(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Pick Zigbee firmware."""
self._picked_firmware_type = PickedFirmwareType.ZIGBEE
return await self.async_step_zigbee_installation_type()
if not await self._probe_firmware_info():
return self.async_abort(
reason="unsupported_firmware",
description_placeholders=self._get_translation_placeholders(),
)
async def async_step_pick_firmware_zigbee_migrate(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Pick Zigbee firmware. Migration is automatic."""
return await self.async_step_pick_firmware_zigbee()
return await self.async_step_install_zigbee_firmware()
async def async_step_install_zigbee_firmware(
self, user_input: dict[str, Any] | None = None
@@ -504,56 +317,90 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
"""Pre-confirm Zigbee setup."""
# This step is necessary to prevent `user_input` from being passed through
return await self.async_step_continue_zigbee()
return await self.async_step_confirm_zigbee()
async def async_step_continue_zigbee(
async def async_step_confirm_zigbee(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Continue Zigbee setup."""
"""Confirm Zigbee setup."""
assert self._device is not None
assert self._hardware_name is not None
if user_input is None:
return self.async_show_form(
step_id="confirm_zigbee",
description_placeholders=self._get_translation_placeholders(),
)
if not await self._probe_firmware_info(probe_methods=(ApplicationType.EZSP,)):
return self.async_abort(
reason="unsupported_firmware",
description_placeholders=self._get_translation_placeholders(),
)
if self._zigbee_integration == ZigbeeIntegration.OTHER:
return self._async_flow_finished()
result = await self.hass.config_entries.flow.async_init(
await self.hass.config_entries.flow.async_init(
ZHA_DOMAIN,
context={"source": "hardware"},
data={
"name": self._hardware_name,
"port": {
"path": self._device,
"baudrate": self.ZIGBEE_BAUDRATE,
"baudrate": 115200,
"flow_control": "hardware",
},
"radio_type": "ezsp",
},
)
return self._continue_zha_flow(result)
@callback
def _continue_zha_flow(self, zha_result: ConfigFlowResult) -> ConfigFlowResult:
"""Continue the ZHA flow."""
raise NotImplementedError
return self._async_flow_finished()
async def _ensure_thread_addon_setup(self) -> ConfigFlowResult | None:
"""Ensure the OTBR addon is set up and not running."""
# We install the OTBR addon no matter what, since it is required to use Thread
if not is_hassio(self.hass):
return self.async_abort(
reason="not_hassio_thread",
description_placeholders=self._get_translation_placeholders(),
)
otbr_manager = get_otbr_addon_manager(self.hass)
addon_info = await self._async_get_addon_info(otbr_manager)
if addon_info.state == AddonState.NOT_INSTALLED:
return await self.async_step_install_otbr_addon()
if addon_info.state == AddonState.RUNNING:
# We only fail setup if we have an instance of OTBR running *and* it's
# pointing to different hardware
if addon_info.options["device"] != self._device:
return self.async_abort(
reason="otbr_addon_already_running",
description_placeholders={
**self._get_translation_placeholders(),
"addon_name": otbr_manager.addon_name,
},
)
# Otherwise, stop the addon before continuing to flash firmware
await otbr_manager.async_stop_addon()
return None
async def async_step_pick_firmware_thread(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Pick Thread firmware."""
self._picked_firmware_type = PickedFirmwareType.THREAD
return await self._async_continue_picked_firmware()
if not await self._probe_firmware_info():
return self.async_abort(
reason="unsupported_firmware",
description_placeholders=self._get_translation_placeholders(),
)
async def async_step_pick_firmware_thread_migrate(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Pick Thread firmware. Migration is automatic."""
return await self.async_step_pick_firmware_thread()
if result := await self._ensure_thread_addon_setup():
return result
return await self.async_step_install_thread_firmware()
async def async_step_install_thread_firmware(
self, user_input: dict[str, Any] | None = None
@@ -597,7 +444,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
finally:
self.addon_install_task = None
return self.async_show_progress_done(next_step_id="finish_thread_installation")
return self.async_show_progress_done(next_step_id="install_thread_firmware")
async def async_step_start_otbr_addon(
self, user_input: dict[str, Any] | None = None
@@ -606,8 +453,43 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
otbr_manager = get_otbr_addon_manager(self.hass)
if not self.addon_start_task:
# Before we start the addon, confirm that the correct firmware is running
# and populate `self._probed_firmware_info` with the correct information
if not await self._probe_firmware_info(
probe_methods=(ApplicationType.SPINEL,)
):
return self.async_abort(
reason="unsupported_firmware",
description_placeholders=self._get_translation_placeholders(),
)
addon_info = await self._async_get_addon_info(otbr_manager)
assert self._device is not None
new_addon_config = {
**addon_info.options,
"device": self._device,
"baudrate": 460800,
"flow_control": True,
"autoflash_firmware": False,
}
_LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config)
try:
await otbr_manager.async_set_addon_options(new_addon_config)
except AddonError as err:
_LOGGER.error(err)
raise AbortFlow(
"addon_set_config_failed",
description_placeholders={
**self._get_translation_placeholders(),
"addon_name": otbr_manager.addon_name,
},
) from err
self.addon_start_task = self.hass.async_create_task(
self._configure_and_start_otbr_addon()
otbr_manager.async_start_addon_waiting()
)
if not self.addon_start_task.done():
@@ -626,9 +508,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
except (AddonError, AbortFlow) as err:
_LOGGER.error(err)
self._failed_addon_name = otbr_manager.addon_name
self._failed_addon_reason = (
err.reason if isinstance(err, AbortFlow) else "addon_start_failed"
)
self._failed_addon_reason = "addon_start_failed"
return self.async_show_progress_done(next_step_id="addon_operation_failed")
finally:
self.addon_start_task = None
@@ -692,21 +572,6 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow):
return await self.async_step_pick_firmware()
@callback
def _continue_zha_flow(self, zha_result: ConfigFlowResult) -> ConfigFlowResult:
"""Continue the ZHA flow."""
next_flow_id = zha_result["flow_id"]
result = self._async_flow_finished()
return (
self.async_create_entry(
title=result["title"] or self._hardware_name,
data=result["data"],
next_flow=(FlowType.CONFIG_FLOW, next_flow_id),
)
| result # update all items with the child result
)
class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow):
"""Zigbee and Thread options flow handlers."""
@@ -764,10 +629,3 @@ class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow):
)
return await super().async_step_pick_firmware_thread(user_input)
@callback
def _continue_zha_flow(self, zha_result: ConfigFlowResult) -> ConfigFlowResult:
"""Continue the ZHA flow."""
# The options flow cannot return a next_flow yet, so we just finish here.
# The options flow should be changed to a reconfigure flow.
return self._async_flow_finished()

View File

@@ -3,19 +3,11 @@
"options": {
"step": {
"pick_firmware": {
"title": "Pick your protocol",
"description": "You can use your {model} for a Zigbee or Thread network. Please check what type of devices you want to add to Home Assistant. You can always change this later.",
"title": "Pick your firmware",
"description": "Let's get started with setting up your {model}. Do you want to use it to set up a Zigbee or Thread network?",
"menu_options": {
"pick_firmware_zigbee": "Use as Zigbee adapter",
"pick_firmware_thread": "Use as Thread adapter",
"pick_firmware_zigbee_migrate": "Migrate Zigbee to a new adapter",
"pick_firmware_thread_migrate": "Migrate Thread to a new adapter"
},
"menu_option_descriptions": {
"pick_firmware_zigbee": "Most common protocol.",
"pick_firmware_thread": "Often used for Matter over Thread devices.",
"pick_firmware_zigbee_migrate": "This will move your Zigbee network to the new adapter.",
"pick_firmware_thread_migrate": "This will migrate your Thread Border Router to the new adapter."
"pick_firmware_zigbee": "Zigbee",
"pick_firmware_thread": "Thread"
}
},
"confirm_zigbee": {
@@ -37,29 +29,6 @@
"confirm_otbr": {
"title": "OpenThread Border Router setup complete",
"description": "Your {model} is now an OpenThread Border Router and will show up in the Thread integration."
},
"zigbee_installation_type": {
"title": "Set up Zigbee",
"description": "Choose the installation type for the Zigbee adapter.",
"menu_options": {
"zigbee_intent_recommended": "Recommended installation",
"zigbee_intent_custom": "Custom"
},
"menu_option_descriptions": {
"zigbee_intent_recommended": "Automatically install and configure Zigbee.",
"zigbee_intent_custom": "Manually install and configure Zigbee, for example with Zigbee2MQTT."
}
},
"zigbee_integration": {
"title": "Select Zigbee method",
"menu_options": {
"zigbee_integration_zha": "Zigbee Home Automation",
"zigbee_integration_other": "Other"
},
"menu_option_descriptions": {
"zigbee_integration_zha": "Lets Home Assistant control a Zigbee network.",
"zigbee_integration_other": "For example if you want to use the adapter with Zigbee2MQTT."
}
}
},
"abort": {

View File

@@ -106,7 +106,7 @@ class SkyConnectFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
firmware_name="OpenThread",
expected_installed_firmware_type=ApplicationType.SPINEL,
step_id="install_thread_firmware",
next_step_id="finish_thread_installation",
next_step_id="start_otbr_addon",
)

Some files were not shown because too many files have changed in this diff Show More