mirror of
https://github.com/home-assistant/core.git
synced 2026-02-26 20:11:30 +01:00
Compare commits
33 Commits
drop-ignor
...
rename_rac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a13c0effb8 | ||
|
|
9cc3c850aa | ||
|
|
8927960fca | ||
|
|
49b8232260 | ||
|
|
1d5e8a9e5a | ||
|
|
501e095578 | ||
|
|
dc5eab6810 | ||
|
|
25787d2b75 | ||
|
|
e57613af65 | ||
|
|
89ff86a941 | ||
|
|
c62ceee8fc | ||
|
|
d732e3d5ae | ||
|
|
dd78da929e | ||
|
|
c2b74b7612 | ||
|
|
6570b413d4 | ||
|
|
ea7732e9ee | ||
|
|
4c885e7ce8 | ||
|
|
67395f1cf5 | ||
|
|
a552266bfc | ||
|
|
e6c2d54232 | ||
|
|
994eae8412 | ||
|
|
b712207b75 | ||
|
|
fa38f25d4f | ||
|
|
3a27fa782e | ||
|
|
ffeb759aba | ||
|
|
e96da42996 | ||
|
|
ce71e540ae | ||
|
|
9b2bcaed92 | ||
|
|
f564ad3ebe | ||
|
|
bd1b060718 | ||
|
|
f4cab72228 | ||
|
|
733d381a7c | ||
|
|
6fba886edb |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -705,7 +705,7 @@ jobs:
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint homeassistant
|
||||
pylint --ignore-missing-annotations=y homeassistant
|
||||
- name: Run pylint (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
@@ -714,7 +714,7 @@ jobs:
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
|
||||
pylint --ignore-missing-annotations=y $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
|
||||
|
||||
pylint-tests:
|
||||
name: Check pylint on tests
|
||||
|
||||
@@ -583,6 +583,7 @@ homeassistant.components.vacuum.*
|
||||
homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.velux.*
|
||||
homeassistant.components.vivotek.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1082,6 +1082,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
/tests/components/my/ @home-assistant/core
|
||||
/homeassistant/components/myneomitis/ @l-pr
|
||||
/tests/components/myneomitis/ @l-pr
|
||||
/homeassistant/components/mysensors/ @MartinHjelmare @functionpointer
|
||||
/tests/components/mysensors/ @MartinHjelmare @functionpointer
|
||||
/homeassistant/components/mystrom/ @fabaff
|
||||
|
||||
@@ -23,6 +23,7 @@ from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
73
homeassistant/components/airos/button.py
Normal file
73
homeassistant/components/airos/button.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""AirOS button component for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from airos.exceptions import AirOSException
|
||||
|
||||
from homeassistant.components.button import (
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import DOMAIN, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
REBOOT_BUTTON = ButtonEntityDescription(
|
||||
key="reboot",
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_registry_enabled_default=False,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirOSConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS button from a config entry."""
|
||||
async_add_entities([AirOSRebootButton(config_entry.runtime_data, REBOOT_BUTTON)])
|
||||
|
||||
|
||||
class AirOSRebootButton(AirOSEntity, ButtonEntity):
|
||||
"""Button to reboot device."""
|
||||
|
||||
entity_description: ButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirOSDataUpdateCoordinator,
|
||||
description: ButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the AirOS client button."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.derived.mac}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press to reboot the device."""
|
||||
try:
|
||||
await self.coordinator.airos_device.login()
|
||||
result = await self.coordinator.airos_device.reboot()
|
||||
|
||||
except AirOSException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
|
||||
if not result:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="reboot_failed",
|
||||
) from None
|
||||
@@ -2,16 +2,20 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from airos.discovery import airos_discover_devices
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDataMissingError,
|
||||
AirOSDeviceConnectionError,
|
||||
AirOSEndpointError,
|
||||
AirOSKeyDataMissingError,
|
||||
AirOSListenerError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -36,15 +40,27 @@ from homeassistant.helpers.selector import (
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .const import (
|
||||
DEFAULT_SSL,
|
||||
DEFAULT_USERNAME,
|
||||
DEFAULT_VERIFY_SSL,
|
||||
DEVICE_NAME,
|
||||
DOMAIN,
|
||||
HOSTNAME,
|
||||
IP_ADDRESS,
|
||||
MAC_ADDRESS,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from .coordinator import AirOS8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
# Discovery duration in seconds, airOS announces every 20 seconds
|
||||
DISCOVER_INTERVAL: int = 30
|
||||
|
||||
STEP_DISCOVERY_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
||||
vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
@@ -58,6 +74,10 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
STEP_MANUAL_DATA_SCHEMA = STEP_DISCOVERY_DATA_SCHEMA.extend(
|
||||
{vol.Required(CONF_HOST): str}
|
||||
)
|
||||
|
||||
|
||||
class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ubiquiti airOS."""
|
||||
@@ -65,14 +85,29 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 1
|
||||
|
||||
_discovery_task: asyncio.Task | None = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self.airos_device: AirOS8
|
||||
self.errors: dict[str, str] = {}
|
||||
self.discovered_devices: dict[str, dict[str, Any]] = {}
|
||||
self.discovery_abort_reason: str | None = None
|
||||
self.selected_device_info: dict[str, Any] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
self.errors = {}
|
||||
|
||||
return self.async_show_menu(
|
||||
step_id="user", menu_options=["discovery", "manual"]
|
||||
)
|
||||
|
||||
async def async_step_manual(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the manual input of host and credentials."""
|
||||
self.errors = {}
|
||||
@@ -84,7 +119,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data=validated_info["data"],
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
|
||||
step_id="manual", data_schema=STEP_MANUAL_DATA_SCHEMA, errors=self.errors
|
||||
)
|
||||
|
||||
async def _validate_and_get_device_info(
|
||||
@@ -220,3 +255,163 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
errors=self.errors,
|
||||
)
|
||||
|
||||
async def async_step_discovery(
|
||||
self,
|
||||
discovery_info: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Start the discovery process."""
|
||||
if self._discovery_task and self._discovery_task.done():
|
||||
self._discovery_task = None
|
||||
|
||||
# Handle appropriate 'errors' as abort through progress_done
|
||||
if self.discovery_abort_reason:
|
||||
return self.async_show_progress_done(
|
||||
next_step_id=self.discovery_abort_reason
|
||||
)
|
||||
|
||||
# Abort through progress_done if no devices were found
|
||||
if not self.discovered_devices:
|
||||
_LOGGER.debug(
|
||||
"No (new or unconfigured) airOS devices found during discovery"
|
||||
)
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="discovery_no_devices"
|
||||
)
|
||||
|
||||
# Skip selecting a device if only one new/unconfigured device was found
|
||||
if len(self.discovered_devices) == 1:
|
||||
self.selected_device_info = list(self.discovered_devices.values())[0]
|
||||
return self.async_show_progress_done(next_step_id="configure_device")
|
||||
|
||||
return self.async_show_progress_done(next_step_id="select_device")
|
||||
|
||||
if not self._discovery_task:
|
||||
self.discovered_devices = {}
|
||||
self._discovery_task = self.hass.async_create_task(
|
||||
self._async_run_discovery_with_progress()
|
||||
)
|
||||
|
||||
# Show the progress bar and wait for discovery to complete
|
||||
return self.async_show_progress(
|
||||
step_id="discovery",
|
||||
progress_action="discovering",
|
||||
progress_task=self._discovery_task,
|
||||
description_placeholders={"seconds": str(DISCOVER_INTERVAL)},
|
||||
)
|
||||
|
||||
async def async_step_select_device(
|
||||
self,
|
||||
discovery_info: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Select a discovered device."""
|
||||
if discovery_info is not None:
|
||||
selected_mac = discovery_info[MAC_ADDRESS]
|
||||
self.selected_device_info = self.discovered_devices[selected_mac]
|
||||
return await self.async_step_configure_device()
|
||||
|
||||
list_options = {
|
||||
mac: f"{device.get(HOSTNAME, mac)} ({device.get(IP_ADDRESS, DEVICE_NAME)})"
|
||||
for mac, device in self.discovered_devices.items()
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="select_device",
|
||||
data_schema=vol.Schema({vol.Required(MAC_ADDRESS): vol.In(list_options)}),
|
||||
)
|
||||
|
||||
async def async_step_configure_device(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Configure the selected device."""
|
||||
self.errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
config_data = {
|
||||
**user_input,
|
||||
CONF_HOST: self.selected_device_info[IP_ADDRESS],
|
||||
}
|
||||
validated_info = await self._validate_and_get_device_info(config_data)
|
||||
|
||||
if validated_info:
|
||||
return self.async_create_entry(
|
||||
title=validated_info["title"],
|
||||
data=validated_info["data"],
|
||||
)
|
||||
|
||||
device_name = self.selected_device_info.get(
|
||||
HOSTNAME, self.selected_device_info.get(IP_ADDRESS, DEVICE_NAME)
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="configure_device",
|
||||
data_schema=STEP_DISCOVERY_DATA_SCHEMA,
|
||||
errors=self.errors,
|
||||
description_placeholders={"device_name": device_name},
|
||||
)
|
||||
|
||||
async def _async_run_discovery_with_progress(self) -> None:
|
||||
"""Run discovery with an embedded progress update loop."""
|
||||
progress_bar = self.hass.async_create_task(self._async_update_progress_bar())
|
||||
|
||||
known_mac_addresses = {
|
||||
entry.unique_id.lower()
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN)
|
||||
if entry.unique_id
|
||||
}
|
||||
|
||||
try:
|
||||
devices = await airos_discover_devices(DISCOVER_INTERVAL)
|
||||
except AirOSEndpointError:
|
||||
self.discovery_abort_reason = "discovery_detect_error"
|
||||
except AirOSListenerError:
|
||||
self.discovery_abort_reason = "discovery_listen_error"
|
||||
except Exception:
|
||||
self.discovery_abort_reason = "discovery_failed"
|
||||
_LOGGER.exception("An error occurred during discovery")
|
||||
else:
|
||||
self.discovered_devices = {
|
||||
mac_addr: info
|
||||
for mac_addr, info in devices.items()
|
||||
if mac_addr.lower() not in known_mac_addresses
|
||||
}
|
||||
_LOGGER.debug(
|
||||
"Discovery task finished. Found %s new devices",
|
||||
len(self.discovered_devices),
|
||||
)
|
||||
finally:
|
||||
progress_bar.cancel()
|
||||
|
||||
async def _async_update_progress_bar(self) -> None:
|
||||
"""Update progress bar every second."""
|
||||
try:
|
||||
for i in range(DISCOVER_INTERVAL):
|
||||
progress = (i + 1) / DISCOVER_INTERVAL
|
||||
self.async_update_progress(progress)
|
||||
await asyncio.sleep(1)
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
async def async_step_discovery_no_devices(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort if discovery finds no (unconfigured) devices."""
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
async def async_step_discovery_listen_error(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort if discovery is unable to listen on the port."""
|
||||
return self.async_abort(reason="listen_error")
|
||||
|
||||
async def async_step_discovery_detect_error(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort if discovery receives incorrect broadcasts."""
|
||||
return self.async_abort(reason="detect_error")
|
||||
|
||||
async def async_step_discovery_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort if discovery fails for other reasons."""
|
||||
return self.async_abort(reason="discovery_failed")
|
||||
|
||||
@@ -12,3 +12,10 @@ DEFAULT_VERIFY_SSL = False
|
||||
DEFAULT_SSL = True
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
||||
# Discovery related
|
||||
DEFAULT_USERNAME = "ubnt"
|
||||
HOSTNAME = "hostname"
|
||||
IP_ADDRESS = "ip_address"
|
||||
MAC_ADDRESS = "mac_address"
|
||||
DEVICE_NAME = "airOS device"
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"detect_error": "Unable to process discovered devices data, check the documentation for supported devices",
|
||||
"discovery_failed": "Unable to start discovery, check logs for details",
|
||||
"listen_error": "Unable to start listening for devices",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
|
||||
@@ -13,37 +17,36 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"flow_title": "Ubiquiti airOS device",
|
||||
"progress": {
|
||||
"connecting": "Connecting to the airOS device",
|
||||
"discovering": "Listening for any airOS devices for {seconds} seconds"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"configure_device": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airos::config::step::user::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airos::config::step::user::data_description::password%]"
|
||||
"password": "[%key:component::airos::config::step::manual::data_description::password%]",
|
||||
"username": "[%key:component::airos::config::step::manual::data_description::username%]"
|
||||
},
|
||||
"description": "Enter the username and password for {device_name}",
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"data": {
|
||||
"ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data::ssl%]",
|
||||
"ssl": "[%key:component::airos::config::step::manual::sections::advanced_settings::data::ssl%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::ssl%]",
|
||||
"verify_ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::verify_ssl%]"
|
||||
"ssl": "[%key:component::airos::config::step::manual::sections::advanced_settings::data_description::ssl%]",
|
||||
"verify_ssl": "[%key:component::airos::config::step::manual::sections::advanced_settings::data_description::verify_ssl%]"
|
||||
},
|
||||
"name": "[%key:component::airos::config::step::user::sections::advanced_settings::name%]"
|
||||
"name": "[%key:component::airos::config::step::manual::sections::advanced_settings::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"manual": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
@@ -67,6 +70,49 @@
|
||||
"name": "Advanced settings"
|
||||
}
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airos::config::step::manual::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airos::config::step::manual::data_description::password%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"data": {
|
||||
"ssl": "[%key:component::airos::config::step::manual::sections::advanced_settings::data::ssl%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ssl": "[%key:component::airos::config::step::manual::sections::advanced_settings::data_description::ssl%]",
|
||||
"verify_ssl": "[%key:component::airos::config::step::manual::sections::advanced_settings::data_description::verify_ssl%]"
|
||||
},
|
||||
"name": "[%key:component::airos::config::step::manual::sections::advanced_settings::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"select_device": {
|
||||
"data": {
|
||||
"mac_address": "Select the device to configure"
|
||||
},
|
||||
"data_description": {
|
||||
"mac_address": "Select the device MAC address"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"menu_options": {
|
||||
"discovery": "Listen for airOS devices on the network",
|
||||
"manual": "Manually configure airOS device"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -157,6 +203,9 @@
|
||||
},
|
||||
"key_data_missing": {
|
||||
"message": "Key data not returned from device"
|
||||
},
|
||||
"reboot_failed": {
|
||||
"message": "The device did not accept the reboot request. Try again, or check your device web interface for errors."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,11 +132,21 @@ class ContentDetails:
|
||||
"""Native data for AssistantContent."""
|
||||
|
||||
citation_details: list[CitationDetails] = field(default_factory=list)
|
||||
thinking_signature: str | None = None
|
||||
redacted_thinking: str | None = None
|
||||
|
||||
def has_content(self) -> bool:
|
||||
"""Check if there is any content."""
|
||||
"""Check if there is any text content."""
|
||||
return any(detail.length > 0 for detail in self.citation_details)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Check if there is any thinking content or citations."""
|
||||
return (
|
||||
self.thinking_signature is not None
|
||||
or self.redacted_thinking is not None
|
||||
or self.has_citations()
|
||||
)
|
||||
|
||||
def has_citations(self) -> bool:
|
||||
"""Check if there are any citations."""
|
||||
return any(detail.citations for detail in self.citation_details)
|
||||
@@ -246,29 +256,28 @@ def _convert_content(
|
||||
content=[],
|
||||
)
|
||||
)
|
||||
elif isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
]
|
||||
|
||||
if isinstance(content.native, ThinkingBlock):
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=content.thinking_content or "",
|
||||
signature=content.native.signature,
|
||||
if isinstance(content.native, ContentDetails):
|
||||
if content.native.thinking_signature:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
ThinkingBlockParam(
|
||||
type="thinking",
|
||||
thinking=content.thinking_content or "",
|
||||
signature=content.native.thinking_signature,
|
||||
)
|
||||
)
|
||||
)
|
||||
elif isinstance(content.native, RedactedThinkingBlock):
|
||||
redacted_thinking_block = RedactedThinkingBlockParam(
|
||||
type="redacted_thinking",
|
||||
data=content.native.data,
|
||||
)
|
||||
if isinstance(messages[-1]["content"], str):
|
||||
messages[-1]["content"] = [
|
||||
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||
redacted_thinking_block,
|
||||
]
|
||||
else:
|
||||
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||
redacted_thinking_block
|
||||
if content.native.redacted_thinking:
|
||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||
RedactedThinkingBlockParam(
|
||||
type="redacted_thinking",
|
||||
data=content.native.redacted_thinking,
|
||||
)
|
||||
)
|
||||
|
||||
if content.content:
|
||||
current_index = 0
|
||||
for detail in (
|
||||
@@ -309,6 +318,7 @@ def _convert_content(
|
||||
text=content.content[current_index:],
|
||||
)
|
||||
)
|
||||
|
||||
if content.tool_calls:
|
||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||
[
|
||||
@@ -328,6 +338,14 @@ def _convert_content(
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
if (
|
||||
isinstance(messages[-1]["content"], list)
|
||||
and len(messages[-1]["content"]) == 1
|
||||
and messages[-1]["content"][0]["type"] == "text"
|
||||
):
|
||||
# If there is only one text block, simplify the content to a string
|
||||
messages[-1]["content"] = messages[-1]["content"][0]["text"]
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
@@ -379,8 +397,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
input_usage: Usage | None = None
|
||||
has_native = False
|
||||
first_block: bool
|
||||
first_block: bool = True
|
||||
|
||||
async for response in stream:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
@@ -401,13 +418,12 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
if first_block or content_details.has_content():
|
||||
if content_details.has_citations():
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||
@@ -418,12 +434,11 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
and content_details.has_content()
|
||||
)
|
||||
):
|
||||
if content_details.has_citations():
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
content_details.add_citation_detail()
|
||||
if response.content_block.text:
|
||||
@@ -432,14 +447,13 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
)
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
if first_block or has_native:
|
||||
if content_details.has_citations():
|
||||
if first_block or content_details.thinking_signature:
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
LOGGER.debug(
|
||||
@@ -447,17 +461,15 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
if has_native:
|
||||
if content_details.has_citations():
|
||||
if first_block or content_details.redacted_thinking:
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
yield {"native": response.content_block}
|
||||
has_native = True
|
||||
content_details.redacted_thinking = response.content_block.data
|
||||
elif isinstance(response.content_block, ServerToolUseBlock):
|
||||
current_tool_block = ServerToolUseBlockParam(
|
||||
type="server_tool_use",
|
||||
@@ -467,7 +479,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||
if content_details.has_citations():
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
@@ -510,19 +522,16 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
else:
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
content_details.citation_details[-1].length += len(response.delta.text)
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
yield {"thinking_content": response.delta.thinking}
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
yield {
|
||||
"native": ThinkingBlock(
|
||||
type="thinking",
|
||||
thinking="",
|
||||
signature=response.delta.signature,
|
||||
if response.delta.text:
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.delta.text
|
||||
)
|
||||
}
|
||||
has_native = True
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
if response.delta.thinking:
|
||||
yield {"thinking_content": response.delta.thinking}
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
content_details.thinking_signature = response.delta.signature
|
||||
elif isinstance(response.delta, CitationsDelta):
|
||||
content_details.add_citation(response.delta.citation)
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
@@ -549,7 +558,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if content_details.has_citations():
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
|
||||
@@ -10,15 +10,7 @@ rules:
|
||||
Integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
* Remove integration setup from the config flow init test
|
||||
* Make `mock_setup_entry` a separate fixture
|
||||
* Use the mock_config_entry fixture in `test_duplicate_entry`
|
||||
* `test_duplicate_entry`: Patch `homeassistant.components.anthropic.config_flow.anthropic.resources.models.AsyncModels.list`
|
||||
* Fix docstring and name for `test_form_invalid_auth` (does not only test auth)
|
||||
* In `test_form_invalid_auth`, make sure the test run until CREATE_ENTRY to test that the flow is able to recover
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
|
||||
@@ -21,7 +21,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from . import BSBLanConfigEntry, BSBLanData
|
||||
from .const import ATTR_TARGET_TEMPERATURE, DOMAIN
|
||||
@@ -113,7 +112,7 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
return target_temp.value
|
||||
|
||||
@property
|
||||
def _hvac_mode_value(self) -> int | str | None:
|
||||
def _hvac_mode_value(self) -> int | None:
|
||||
"""Return the raw hvac_mode value from the coordinator."""
|
||||
if (hvac_mode := self.coordinator.data.state.hvac_mode) is None:
|
||||
return None
|
||||
@@ -124,16 +123,14 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
if (hvac_mode_value := self._hvac_mode_value) is None:
|
||||
return None
|
||||
# BSB-Lan returns integer values: 0=off, 1=auto, 2=eco, 3=heat
|
||||
if isinstance(hvac_mode_value, int):
|
||||
return BSBLAN_TO_HA_HVAC_MODE.get(hvac_mode_value)
|
||||
return try_parse_enum(HVACMode, hvac_mode_value)
|
||||
return BSBLAN_TO_HA_HVAC_MODE.get(hvac_mode_value)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current running hvac action."""
|
||||
action = self.coordinator.data.state.hvac_action
|
||||
if not action or not isinstance(action.value, int):
|
||||
if (
|
||||
action := self.coordinator.data.state.hvac_action
|
||||
) is None or action.value is None:
|
||||
return None
|
||||
category = get_hvac_action_category(action.value)
|
||||
return HVACAction(category.name.lower())
|
||||
|
||||
@@ -17,24 +17,24 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
# Build diagnostic data from both coordinators
|
||||
diagnostics = {
|
||||
"info": data.info.to_dict(),
|
||||
"device": data.device.to_dict(),
|
||||
"info": data.info.model_dump(),
|
||||
"device": data.device.model_dump(),
|
||||
"fast_coordinator_data": {
|
||||
"state": data.fast_coordinator.data.state.to_dict(),
|
||||
"sensor": data.fast_coordinator.data.sensor.to_dict(),
|
||||
"dhw": data.fast_coordinator.data.dhw.to_dict(),
|
||||
"state": data.fast_coordinator.data.state.model_dump(),
|
||||
"sensor": data.fast_coordinator.data.sensor.model_dump(),
|
||||
"dhw": data.fast_coordinator.data.dhw.model_dump(),
|
||||
},
|
||||
"static": data.static.to_dict(),
|
||||
"static": data.static.model_dump(),
|
||||
}
|
||||
|
||||
# Add DHW config and schedule from slow coordinator if available
|
||||
if data.slow_coordinator.data:
|
||||
slow_data = {}
|
||||
if data.slow_coordinator.data.dhw_config:
|
||||
slow_data["dhw_config"] = data.slow_coordinator.data.dhw_config.to_dict()
|
||||
slow_data["dhw_config"] = data.slow_coordinator.data.dhw_config.model_dump()
|
||||
if data.slow_coordinator.data.dhw_schedule:
|
||||
slow_data["dhw_schedule"] = (
|
||||
data.slow_coordinator.data.dhw_schedule.to_dict()
|
||||
data.slow_coordinator.data.dhw_schedule.model_dump()
|
||||
)
|
||||
if slow_data:
|
||||
diagnostics["slow_coordinator_data"] = slow_data
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==4.2.1"],
|
||||
"requirements": ["python-bsblan==5.0.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -110,12 +110,11 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
@property
|
||||
def current_operation(self) -> str | None:
|
||||
"""Return current operation."""
|
||||
if (operating_mode := self.coordinator.data.dhw.operating_mode) is None:
|
||||
if (
|
||||
operating_mode := self.coordinator.data.dhw.operating_mode
|
||||
) is None or operating_mode.value is None:
|
||||
return None
|
||||
# The operating_mode.value is an integer (0=Off, 1=On, 2=Eco)
|
||||
if isinstance(operating_mode.value, int):
|
||||
return BSBLAN_TO_HA_OPERATION_MODE.get(operating_mode.value)
|
||||
return None
|
||||
return BSBLAN_TO_HA_OPERATION_MODE.get(operating_mode.value)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["enocean"],
|
||||
"requirements": ["enocean==0.50"],
|
||||
|
||||
@@ -11,5 +11,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"device": {
|
||||
"google_translate": {
|
||||
"name": "Google Translate {lang} {tld}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.components.tts import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
@@ -26,6 +27,7 @@ from .const import (
|
||||
CONF_TLD,
|
||||
DEFAULT_LANG,
|
||||
DEFAULT_TLD,
|
||||
DOMAIN,
|
||||
MAP_LANG_TLD,
|
||||
SUPPORT_LANGUAGES,
|
||||
SUPPORT_TLD,
|
||||
@@ -66,6 +68,9 @@ async def async_setup_entry(
|
||||
class GoogleTTSEntity(TextToSpeechEntity):
|
||||
"""The Google speech API entity."""
|
||||
|
||||
_attr_supported_languages = SUPPORT_LANGUAGES
|
||||
_attr_supported_options = SUPPORT_OPTIONS
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry, lang: str, tld: str) -> None:
|
||||
"""Init Google TTS service."""
|
||||
if lang in MAP_LANG_TLD:
|
||||
@@ -77,20 +82,15 @@ class GoogleTTSEntity(TextToSpeechEntity):
|
||||
self._attr_name = f"Google Translate {self._lang} {self._tld}"
|
||||
self._attr_unique_id = config_entry.entry_id
|
||||
|
||||
@property
|
||||
def default_language(self) -> str:
|
||||
"""Return the default language."""
|
||||
return self._lang
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> list[str]:
|
||||
"""Return list of supported languages."""
|
||||
return SUPPORT_LANGUAGES
|
||||
|
||||
@property
|
||||
def supported_options(self) -> list[str]:
|
||||
"""Return a list of supported options."""
|
||||
return SUPPORT_OPTIONS
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, config_entry.entry_id)},
|
||||
manufacturer="Google",
|
||||
model="Google Translate TTS",
|
||||
translation_key="google_translate",
|
||||
translation_placeholders={"lang": self._lang, "tld": self._tld},
|
||||
)
|
||||
self._attr_default_language = self._lang
|
||||
|
||||
def get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any] | None = None
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import HomevoltConfigEntry, HomevoltDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HomevoltConfigEntry) -> bool:
|
||||
|
||||
67
homeassistant/components/homevolt/entity.py
Normal file
67
homeassistant/components/homevolt/entity.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Shared entity helpers for Homevolt."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from homevolt import HomevoltAuthenticationError, HomevoltConnectionError, HomevoltError
|
||||
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import HomevoltDataUpdateCoordinator
|
||||
|
||||
|
||||
class HomevoltEntity(CoordinatorEntity[HomevoltDataUpdateCoordinator]):
|
||||
"""Base Homevolt entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self, coordinator: HomevoltDataUpdateCoordinator, device_identifier: str
|
||||
) -> None:
|
||||
"""Initialize the Homevolt entity."""
|
||||
super().__init__(coordinator)
|
||||
device_id = coordinator.data.unique_id
|
||||
device_metadata = coordinator.data.device_metadata.get(device_identifier)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{device_id}_{device_identifier}")},
|
||||
configuration_url=coordinator.client.base_url,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=device_metadata.model if device_metadata else None,
|
||||
name=device_metadata.name if device_metadata else None,
|
||||
)
|
||||
|
||||
|
||||
def homevolt_exception_handler[_HomevoltEntityT: HomevoltEntity, **_P](
|
||||
func: Callable[Concatenate[_HomevoltEntityT, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_HomevoltEntityT, _P], Coroutine[Any, Any, None]]:
|
||||
"""Decorate Homevolt calls to handle exceptions."""
|
||||
|
||||
async def handler(
|
||||
self: _HomevoltEntityT, *args: _P.args, **kwargs: _P.kwargs
|
||||
) -> None:
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except HomevoltAuthenticationError as error:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed",
|
||||
) from error
|
||||
except HomevoltConnectionError as error:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="communication_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
except HomevoltError as error:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unknown_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
|
||||
return handler
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["homevolt==0.4.4"],
|
||||
"requirements": ["homevolt==0.5.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "homevolt*",
|
||||
|
||||
@@ -22,13 +22,11 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import HomevoltConfigEntry, HomevoltDataUpdateCoordinator
|
||||
from .entity import HomevoltEntity
|
||||
|
||||
PARALLEL_UPDATES = 0 # Coordinator-based updates
|
||||
|
||||
@@ -309,11 +307,10 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class HomevoltSensor(CoordinatorEntity[HomevoltDataUpdateCoordinator], SensorEntity):
|
||||
class HomevoltSensor(HomevoltEntity, SensorEntity):
|
||||
"""Representation of a Homevolt sensor."""
|
||||
|
||||
entity_description: SensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -322,24 +319,12 @@ class HomevoltSensor(CoordinatorEntity[HomevoltDataUpdateCoordinator], SensorEnt
|
||||
sensor_key: str,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
unique_id = coordinator.data.unique_id
|
||||
self._attr_unique_id = f"{unique_id}_{sensor_key}"
|
||||
sensor_data = coordinator.data.sensors[sensor_key]
|
||||
super().__init__(coordinator, sensor_data.device_identifier)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.unique_id}_{sensor_key}"
|
||||
self._sensor_key = sensor_key
|
||||
|
||||
device_metadata = coordinator.data.device_metadata.get(
|
||||
sensor_data.device_identifier
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{unique_id}_{sensor_data.device_identifier}")},
|
||||
configuration_url=coordinator.client.base_url,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=device_metadata.model if device_metadata else None,
|
||||
name=device_metadata.name if device_metadata else None,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
|
||||
@@ -160,6 +160,22 @@
|
||||
"tmin": {
|
||||
"name": "Minimum temperature"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"local_mode": {
|
||||
"name": "Local mode"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_failed": {
|
||||
"message": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"communication_error": {
|
||||
"message": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"unknown_error": {
|
||||
"message": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
55
homeassistant/components/homevolt/switch.py
Normal file
55
homeassistant/components/homevolt/switch.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Support for Homevolt switch entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import HomevoltConfigEntry, HomevoltDataUpdateCoordinator
|
||||
from .entity import HomevoltEntity, homevolt_exception_handler
|
||||
|
||||
PARALLEL_UPDATES = 0 # Coordinator-based updates
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomevoltConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Homevolt switch entities."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities([HomevoltLocalModeSwitch(coordinator)])
|
||||
|
||||
|
||||
class HomevoltLocalModeSwitch(HomevoltEntity, SwitchEntity):
|
||||
"""Switch entity for Homevolt local mode."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_translation_key = "local_mode"
|
||||
|
||||
def __init__(self, coordinator: HomevoltDataUpdateCoordinator) -> None:
|
||||
"""Initialize the switch entity."""
|
||||
self._attr_unique_id = f"{coordinator.data.unique_id}_local_mode"
|
||||
device_id = coordinator.data.unique_id
|
||||
super().__init__(coordinator, f"ems_{device_id}")
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if local mode is enabled."""
|
||||
return self.coordinator.client.local_mode_enabled
|
||||
|
||||
@homevolt_exception_handler
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Enable local mode."""
|
||||
await self.coordinator.client.enable_local_mode()
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@homevolt_exception_handler
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Disable local mode."""
|
||||
await self.coordinator.client.disable_local_mode()
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -9,7 +9,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from .const import LOGGER
|
||||
from .coordinator import IntelliClimaConfigEntry, IntelliClimaCoordinator
|
||||
|
||||
PLATFORMS = [Platform.FAN]
|
||||
PLATFORMS = [Platform.FAN, Platform.SELECT]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -27,8 +27,6 @@ class IntelliClimaEntity(CoordinatorEntity[IntelliClimaCoordinator]):
|
||||
"""Class initializer."""
|
||||
super().__init__(coordinator=coordinator)
|
||||
|
||||
self._attr_unique_id = device.id
|
||||
|
||||
# Make this HA "device" use the IntelliClima device name.
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
|
||||
@@ -62,6 +62,7 @@ class IntelliClimaVMCFan(IntelliClimaECOEntity, FanEntity):
|
||||
super().__init__(coordinator, device)
|
||||
|
||||
self._speed_range = (int(FanSpeed.sleep), int(FanSpeed.high))
|
||||
self._attr_unique_id = device.id
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
|
||||
@@ -49,7 +49,7 @@ rules:
|
||||
comment: |
|
||||
Unclear if discovery is possible.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
|
||||
96
homeassistant/components/intelliclima/select.py
Normal file
96
homeassistant/components/intelliclima/select.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Select platform for IntelliClima VMC."""
|
||||
|
||||
from pyintelliclima.const import FanMode, FanSpeed
|
||||
from pyintelliclima.intelliclima_types import IntelliClimaECO
|
||||
|
||||
from homeassistant.components.select import SelectEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import IntelliClimaConfigEntry, IntelliClimaCoordinator
|
||||
from .entity import IntelliClimaECOEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
FAN_MODE_TO_INTELLICLIMA_MODE = {
|
||||
"forward": FanMode.inward,
|
||||
"reverse": FanMode.outward,
|
||||
"alternate": FanMode.alternate,
|
||||
"sensor": FanMode.sensor,
|
||||
}
|
||||
INTELLICLIMA_MODE_TO_FAN_MODE = {v: k for k, v in FAN_MODE_TO_INTELLICLIMA_MODE.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: IntelliClimaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up IntelliClima VMC fan mode select."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entities: list[IntelliClimaVMCFanModeSelect] = [
|
||||
IntelliClimaVMCFanModeSelect(
|
||||
coordinator=coordinator,
|
||||
device=ecocomfort2,
|
||||
)
|
||||
for ecocomfort2 in coordinator.data.ecocomfort2_devices.values()
|
||||
]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class IntelliClimaVMCFanModeSelect(IntelliClimaECOEntity, SelectEntity):
|
||||
"""Representation of an IntelliClima VMC fan mode selector."""
|
||||
|
||||
_attr_translation_key = "fan_mode"
|
||||
_attr_options = ["forward", "reverse", "alternate", "sensor"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: IntelliClimaCoordinator,
|
||||
device: IntelliClimaECO,
|
||||
) -> None:
|
||||
"""Class initializer."""
|
||||
super().__init__(coordinator, device)
|
||||
|
||||
self._attr_unique_id = f"{device.id}_fan_mode"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
device_data = self._device_data
|
||||
|
||||
if device_data.mode_set == FanMode.off:
|
||||
return None
|
||||
|
||||
# If in auto mode (sensor mode with auto speed), return None (handled by fan entity preset mode)
|
||||
if (
|
||||
device_data.speed_set == FanSpeed.auto
|
||||
and device_data.mode_set == FanMode.sensor
|
||||
):
|
||||
return None
|
||||
|
||||
return INTELLICLIMA_MODE_TO_FAN_MODE.get(FanMode(device_data.mode_set))
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the fan mode."""
|
||||
device_data = self._device_data
|
||||
|
||||
mode = FAN_MODE_TO_INTELLICLIMA_MODE[option]
|
||||
|
||||
# Determine speed: keep current speed if available, otherwise default to sleep
|
||||
if (
|
||||
device_data.speed_set == FanSpeed.auto
|
||||
or device_data.mode_set == FanMode.off
|
||||
):
|
||||
speed = FanSpeed.sleep
|
||||
else:
|
||||
speed = device_data.speed_set
|
||||
|
||||
await self.coordinator.api.ecocomfort.set_mode_speed(
|
||||
self._device_sn, mode, speed
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -22,5 +22,18 @@
|
||||
"description": "Authenticate against IntelliClima cloud"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"fan_mode": {
|
||||
"name": "Fan direction mode",
|
||||
"state": {
|
||||
"alternate": "Alternating",
|
||||
"forward": "Forward",
|
||||
"reverse": "Reverse",
|
||||
"sensor": "Sensor"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/kaleidescape",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["pykaleidescape==1.1.1"],
|
||||
"requirements": ["pykaleidescape==1.1.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "schemas-upnp-org:device:Basic:1",
|
||||
|
||||
@@ -65,7 +65,7 @@ class LitterRobotDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
except LitterRobotLoginException as ex:
|
||||
raise ConfigEntryAuthFailed("Invalid credentials") from ex
|
||||
except LitterRobotException as ex:
|
||||
raise UpdateFailed("Unable to connect to Litter-Robot API") from ex
|
||||
raise UpdateFailed("Unable to connect to Whisker API") from ex
|
||||
|
||||
def litter_robots(self) -> Generator[LitterRobot]:
|
||||
"""Get Litter-Robots from the account."""
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "litterrobot",
|
||||
"name": "Litter-Robot",
|
||||
"name": "Whisker",
|
||||
"codeowners": ["@natekspencer", "@tkdrob"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/matter",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["python-matter-server==8.1.2"],
|
||||
"requirements": ["matter-python-client==0.4.1"],
|
||||
"zeroconf": ["_matter._tcp.local.", "_matterc._udp.local."]
|
||||
}
|
||||
|
||||
130
homeassistant/components/myneomitis/__init__.py
Normal file
130
homeassistant/components/myneomitis/__init__.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Integration for MyNeomitis."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import pyaxencoapi
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_EMAIL,
|
||||
CONF_PASSWORD,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.SELECT]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MyNeomitisRuntimeData:
|
||||
"""Runtime data for MyNeomitis integration."""
|
||||
|
||||
api: pyaxencoapi.PyAxencoAPI
|
||||
devices: list[dict[str, Any]]
|
||||
|
||||
|
||||
type MyNeomitisConfigEntry = ConfigEntry[MyNeomitisRuntimeData]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyNeomitisConfigEntry) -> bool:
|
||||
"""Set up MyNeomitis from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
email: str = entry.data[CONF_EMAIL]
|
||||
password: str = entry.data[CONF_PASSWORD]
|
||||
|
||||
api = pyaxencoapi.PyAxencoAPI(session)
|
||||
connected = False
|
||||
try:
|
||||
await api.login(email, password)
|
||||
await api.connect_websocket()
|
||||
connected = True
|
||||
_LOGGER.debug("Successfully connected to Login/WebSocket")
|
||||
|
||||
# Retrieve the user's devices
|
||||
devices: list[dict[str, Any]] = await api.get_devices()
|
||||
|
||||
except aiohttp.ClientResponseError as err:
|
||||
if connected:
|
||||
try:
|
||||
await api.disconnect_websocket()
|
||||
except (
|
||||
TimeoutError,
|
||||
ConnectionError,
|
||||
aiohttp.ClientError,
|
||||
) as disconnect_err:
|
||||
_LOGGER.error(
|
||||
"Error while disconnecting WebSocket for %s: %s",
|
||||
entry.entry_id,
|
||||
disconnect_err,
|
||||
)
|
||||
if err.status == 401:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Authentication failed, please update your credentials"
|
||||
) from err
|
||||
raise ConfigEntryNotReady(f"Error connecting to API: {err}") from err
|
||||
except (TimeoutError, ConnectionError, aiohttp.ClientError) as err:
|
||||
if connected:
|
||||
try:
|
||||
await api.disconnect_websocket()
|
||||
except (
|
||||
TimeoutError,
|
||||
ConnectionError,
|
||||
aiohttp.ClientError,
|
||||
) as disconnect_err:
|
||||
_LOGGER.error(
|
||||
"Error while disconnecting WebSocket for %s: %s",
|
||||
entry.entry_id,
|
||||
disconnect_err,
|
||||
)
|
||||
raise ConfigEntryNotReady(f"Error connecting to API/WebSocket: {err}") from err
|
||||
|
||||
entry.runtime_data = MyNeomitisRuntimeData(api=api, devices=devices)
|
||||
|
||||
async def _async_disconnect_websocket(_event: Event) -> None:
|
||||
"""Disconnect WebSocket on Home Assistant shutdown."""
|
||||
try:
|
||||
await api.disconnect_websocket()
|
||||
except (TimeoutError, ConnectionError, aiohttp.ClientError) as err:
|
||||
_LOGGER.error(
|
||||
"Error while disconnecting WebSocket for %s: %s",
|
||||
entry.entry_id,
|
||||
err,
|
||||
)
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, _async_disconnect_websocket
|
||||
)
|
||||
)
|
||||
|
||||
# Load platforms
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MyNeomitisConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
try:
|
||||
await entry.runtime_data.api.disconnect_websocket()
|
||||
except (TimeoutError, ConnectionError) as err:
|
||||
_LOGGER.error(
|
||||
"Error while disconnecting WebSocket for %s: %s",
|
||||
entry.entry_id,
|
||||
err,
|
||||
)
|
||||
|
||||
return unload_ok
|
||||
78
homeassistant/components/myneomitis/config_flow.py
Normal file
78
homeassistant/components/myneomitis/config_flow.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""Config flow for MyNeomitis integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from pyaxencoapi import PyAxencoAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_USER_ID, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MyNeoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the configuration flow for the MyNeomitis integration."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step of the configuration flow."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
email: str = user_input[CONF_EMAIL]
|
||||
password: str = user_input[CONF_PASSWORD]
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
api = PyAxencoAPI(session)
|
||||
|
||||
try:
|
||||
await api.login(email, password)
|
||||
except aiohttp.ClientResponseError as e:
|
||||
if e.status == 401:
|
||||
errors["base"] = "invalid_auth"
|
||||
elif e.status >= 500:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
errors["base"] = "unknown"
|
||||
except aiohttp.ClientConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except aiohttp.ClientError:
|
||||
errors["base"] = "unknown"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error during login")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
# Prevent duplicate configuration with the same user ID
|
||||
await self.async_set_unique_id(api.user_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"MyNeomitis ({email})",
|
||||
data={
|
||||
CONF_EMAIL: email,
|
||||
CONF_PASSWORD: password,
|
||||
CONF_USER_ID: api.user_id,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_EMAIL): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
4
homeassistant/components/myneomitis/const.py
Normal file
4
homeassistant/components/myneomitis/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for the MyNeomitis integration."""
|
||||
|
||||
DOMAIN = "myneomitis"
|
||||
CONF_USER_ID = "user_id"
|
||||
31
homeassistant/components/myneomitis/icons.json
Normal file
31
homeassistant/components/myneomitis/icons.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"entity": {
|
||||
"select": {
|
||||
"pilote": {
|
||||
"state": {
|
||||
"antifrost": "mdi:snowflake",
|
||||
"auto": "mdi:refresh-auto",
|
||||
"boost": "mdi:rocket-launch",
|
||||
"comfort": "mdi:fire",
|
||||
"eco": "mdi:leaf",
|
||||
"eco_1": "mdi:leaf",
|
||||
"eco_2": "mdi:leaf",
|
||||
"standby": "mdi:toggle-switch-off-outline"
|
||||
}
|
||||
},
|
||||
"relais": {
|
||||
"state": {
|
||||
"auto": "mdi:refresh-auto",
|
||||
"off": "mdi:toggle-switch-off-outline",
|
||||
"on": "mdi:toggle-switch"
|
||||
}
|
||||
},
|
||||
"ufh": {
|
||||
"state": {
|
||||
"cooling": "mdi:snowflake",
|
||||
"heating": "mdi:fire"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
11
homeassistant/components/myneomitis/manifest.json
Normal file
11
homeassistant/components/myneomitis/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "myneomitis",
|
||||
"name": "MyNeomitis",
|
||||
"codeowners": ["@l-pr"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/myneomitis",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyaxencoapi==1.0.6"]
|
||||
}
|
||||
76
homeassistant/components/myneomitis/quality_scale.yaml
Normal file
76
homeassistant/components/myneomitis/quality_scale.yaml
Normal file
@@ -0,0 +1,76 @@
|
||||
rules:
|
||||
# Bronze tier rules
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register service actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: Integration uses WebSocket push updates, not polling.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not provide service actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver tier rules
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not provide service actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration has no configuration parameters beyond initial setup.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: Integration uses WebSocket callbacks to push updates directly to entities, not coordinator-based polling.
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold tier rules
|
||||
devices: todo
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Integration is cloud-based and does not use local discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Integration requires manual authentication via cloud service.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum tier rules
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
208
homeassistant/components/myneomitis/select.py
Normal file
208
homeassistant/components/myneomitis/select.py
Normal file
@@ -0,0 +1,208 @@
|
||||
"""Select entities for MyNeomitis integration.
|
||||
|
||||
This module defines and sets up the select entities for the MyNeomitis integration.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyaxencoapi import PyAxencoAPI
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import MyNeomitisConfigEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SUPPORTED_MODELS: frozenset[str] = frozenset({"EWS"})
|
||||
SUPPORTED_SUB_MODELS: frozenset[str] = frozenset({"UFH"})
|
||||
|
||||
PRESET_MODE_MAP = {
|
||||
"comfort": 1,
|
||||
"eco": 2,
|
||||
"antifrost": 3,
|
||||
"standby": 4,
|
||||
"boost": 6,
|
||||
"setpoint": 8,
|
||||
"comfort_plus": 20,
|
||||
"eco_1": 40,
|
||||
"eco_2": 41,
|
||||
"auto": 60,
|
||||
}
|
||||
|
||||
PRESET_MODE_MAP_RELAIS = {
|
||||
"on": 1,
|
||||
"off": 2,
|
||||
"auto": 60,
|
||||
}
|
||||
|
||||
PRESET_MODE_MAP_UFH = {
|
||||
"heating": 0,
|
||||
"cooling": 1,
|
||||
}
|
||||
|
||||
REVERSE_PRESET_MODE_MAP = {v: k for k, v in PRESET_MODE_MAP.items()}
|
||||
|
||||
REVERSE_PRESET_MODE_MAP_RELAIS = {v: k for k, v in PRESET_MODE_MAP_RELAIS.items()}
|
||||
|
||||
REVERSE_PRESET_MODE_MAP_UFH = {v: k for k, v in PRESET_MODE_MAP_UFH.items()}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MyNeoSelectEntityDescription(SelectEntityDescription):
|
||||
"""Describe MyNeomitis select entity."""
|
||||
|
||||
preset_mode_map: dict[str, int]
|
||||
reverse_preset_mode_map: dict[int, str]
|
||||
state_key: str
|
||||
|
||||
|
||||
SELECT_TYPES: dict[str, MyNeoSelectEntityDescription] = {
|
||||
"relais": MyNeoSelectEntityDescription(
|
||||
key="relais",
|
||||
translation_key="relais",
|
||||
options=list(PRESET_MODE_MAP_RELAIS),
|
||||
preset_mode_map=PRESET_MODE_MAP_RELAIS,
|
||||
reverse_preset_mode_map=REVERSE_PRESET_MODE_MAP_RELAIS,
|
||||
state_key="targetMode",
|
||||
),
|
||||
"pilote": MyNeoSelectEntityDescription(
|
||||
key="pilote",
|
||||
translation_key="pilote",
|
||||
options=list(PRESET_MODE_MAP),
|
||||
preset_mode_map=PRESET_MODE_MAP,
|
||||
reverse_preset_mode_map=REVERSE_PRESET_MODE_MAP,
|
||||
state_key="targetMode",
|
||||
),
|
||||
"ufh": MyNeoSelectEntityDescription(
|
||||
key="ufh",
|
||||
translation_key="ufh",
|
||||
options=list(PRESET_MODE_MAP_UFH),
|
||||
preset_mode_map=PRESET_MODE_MAP_UFH,
|
||||
reverse_preset_mode_map=REVERSE_PRESET_MODE_MAP_UFH,
|
||||
state_key="changeOverUser",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MyNeomitisConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Select entities from a config entry."""
|
||||
api = config_entry.runtime_data.api
|
||||
devices = config_entry.runtime_data.devices
|
||||
|
||||
def _create_entity(device: dict) -> MyNeoSelect:
|
||||
"""Create a select entity for a device."""
|
||||
if device["model"] == "EWS":
|
||||
# According to the MyNeomitis API, EWS "relais" devices expose a "relayMode"
|
||||
# field in their state, while "pilote" devices do not. We therefore use the
|
||||
# presence of "relayMode" as an explicit heuristic to distinguish relais
|
||||
# from pilote devices. If the upstream API changes this behavior, this
|
||||
# detection logic must be revisited.
|
||||
if "relayMode" in device.get("state", {}):
|
||||
description = SELECT_TYPES["relais"]
|
||||
else:
|
||||
description = SELECT_TYPES["pilote"]
|
||||
else: # UFH
|
||||
description = SELECT_TYPES["ufh"]
|
||||
|
||||
return MyNeoSelect(api, device, description)
|
||||
|
||||
select_entities = [
|
||||
_create_entity(device)
|
||||
for device in devices
|
||||
if device["model"] in SUPPORTED_MODELS | SUPPORTED_SUB_MODELS
|
||||
]
|
||||
|
||||
async_add_entities(select_entities)
|
||||
|
||||
|
||||
class MyNeoSelect(SelectEntity):
|
||||
"""Select entity for MyNeomitis devices."""
|
||||
|
||||
entity_description: MyNeoSelectEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None # Entity represents the device itself
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
api: PyAxencoAPI,
|
||||
device: dict[str, Any],
|
||||
description: MyNeoSelectEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the MyNeoSelect entity."""
|
||||
self.entity_description = description
|
||||
self._api = api
|
||||
self._device = device
|
||||
self._attr_unique_id = device["_id"]
|
||||
self._attr_available = device["connected"]
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, device["_id"])},
|
||||
name=device["name"],
|
||||
manufacturer="Axenco",
|
||||
model=device["model"],
|
||||
)
|
||||
# Set current option based on device state
|
||||
current_mode = device.get("state", {}).get(description.state_key)
|
||||
self._attr_current_option = description.reverse_preset_mode_map.get(
|
||||
current_mode
|
||||
)
|
||||
self._unavailable_logged: bool = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register listener when entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if unsubscribe := self._api.register_listener(
|
||||
self._device["_id"], self.handle_ws_update
|
||||
):
|
||||
self.async_on_remove(unsubscribe)
|
||||
|
||||
@callback
|
||||
def handle_ws_update(self, new_state: dict[str, Any]) -> None:
|
||||
"""Handle WebSocket updates for the device."""
|
||||
if not new_state:
|
||||
return
|
||||
|
||||
if "connected" in new_state:
|
||||
self._attr_available = new_state["connected"]
|
||||
if not self._attr_available:
|
||||
if not self._unavailable_logged:
|
||||
_LOGGER.info("The entity %s is unavailable", self.entity_id)
|
||||
self._unavailable_logged = True
|
||||
elif self._unavailable_logged:
|
||||
_LOGGER.info("The entity %s is back online", self.entity_id)
|
||||
self._unavailable_logged = False
|
||||
|
||||
# Check for state updates using the description's state_key
|
||||
state_key = self.entity_description.state_key
|
||||
if state_key in new_state:
|
||||
mode = new_state.get(state_key)
|
||||
if mode is not None:
|
||||
self._attr_current_option = (
|
||||
self.entity_description.reverse_preset_mode_map.get(mode)
|
||||
)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Send the new mode via the API."""
|
||||
mode_code = self.entity_description.preset_mode_map.get(option)
|
||||
|
||||
if mode_code is None:
|
||||
_LOGGER.warning("Unknown mode selected: %s", option)
|
||||
return
|
||||
|
||||
await self._api.set_device_mode(self._device["_id"], mode_code)
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
57
homeassistant/components/myneomitis/strings.json
Normal file
57
homeassistant/components/myneomitis/strings.json
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "This integration is already configured."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Could not connect to the MyNeomitis service. Please try again later.",
|
||||
"invalid_auth": "Authentication failed. Please check your email address and password.",
|
||||
"unknown": "An unexpected error occurred. Please try again."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "Your email address used for your MyNeomitis account",
|
||||
"password": "Your MyNeomitis account password"
|
||||
},
|
||||
"description": "Enter your MyNeomitis account credentials.",
|
||||
"title": "Connect to MyNeomitis"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"pilote": {
|
||||
"state": {
|
||||
"antifrost": "Frost protection",
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"boost": "Boost",
|
||||
"comfort": "Comfort",
|
||||
"comfort_plus": "Comfort +",
|
||||
"eco": "Eco",
|
||||
"eco_1": "Eco -1",
|
||||
"eco_2": "Eco -2",
|
||||
"setpoint": "Setpoint",
|
||||
"standby": "[%key:common::state::standby%]"
|
||||
}
|
||||
},
|
||||
"relais": {
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"ufh": {
|
||||
"state": {
|
||||
"cooling": "Cooling",
|
||||
"heating": "Heating"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,5 +9,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-overseerr==0.8.0"]
|
||||
"requirements": ["python-overseerr==0.9.0"]
|
||||
}
|
||||
|
||||
@@ -79,6 +79,14 @@ async def _async_get_requests(call: ServiceCall) -> ServiceResponse:
|
||||
req["media"] = await _get_media(
|
||||
client, request.media.media_type, request.media.tmdb_id
|
||||
)
|
||||
for user in (req["modified_by"], req["requested_by"]):
|
||||
del user["avatar_e_tag"]
|
||||
del user["avatar_version"]
|
||||
del user["permissions"]
|
||||
del user["recovery_link_expiration_date"]
|
||||
del user["settings"]
|
||||
del user["user_type"]
|
||||
del user["warnings"]
|
||||
result.append(req)
|
||||
|
||||
return {"requests": cast(list[JsonValueType], result)}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import asdict, dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
@@ -38,10 +38,7 @@ class PlugwiseClimateExtraStoredData(ExtraStoredData):
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dict representation of the text data."""
|
||||
return {
|
||||
"last_active_schedule": self.last_active_schedule,
|
||||
"previous_action_mode": self.previous_action_mode,
|
||||
}
|
||||
return asdict(self)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, restored: dict[str, Any]) -> PlugwiseClimateExtraStoredData:
|
||||
@@ -102,7 +99,9 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity, RestoreEntity):
|
||||
extra_data.as_dict()
|
||||
)
|
||||
self._last_active_schedule = plugwise_extra_data.last_active_schedule
|
||||
self._previous_action_mode = plugwise_extra_data.previous_action_mode
|
||||
self._previous_action_mode = (
|
||||
plugwise_extra_data.previous_action_mode or HVACAction.HEATING.value
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -202,11 +201,10 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity, RestoreEntity):
|
||||
|
||||
if self.coordinator.api.cooling_present:
|
||||
if "regulation_modes" in self._gateway_data:
|
||||
selected = self._gateway_data.get("select_regulation_mode")
|
||||
if selected == HVACAction.COOLING.value:
|
||||
hvac_modes.append(HVACMode.COOL)
|
||||
if selected == HVACAction.HEATING.value:
|
||||
if "heating" in self._gateway_data["regulation_modes"]:
|
||||
hvac_modes.append(HVACMode.HEAT)
|
||||
if "cooling" in self._gateway_data["regulation_modes"]:
|
||||
hvac_modes.append(HVACMode.COOL)
|
||||
else:
|
||||
hvac_modes.append(HVACMode.HEAT_COOL)
|
||||
else:
|
||||
@@ -253,40 +251,75 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity, RestoreEntity):
|
||||
|
||||
await self.coordinator.api.set_temperature(self._location, data)
|
||||
|
||||
def _regulation_mode_for_hvac(self, hvac_mode: HVACMode) -> str | None:
|
||||
"""Return the API regulation value for a manual HVAC mode, or None."""
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
return HVACAction.HEATING.value
|
||||
if hvac_mode == HVACMode.COOL:
|
||||
return HVACAction.COOLING.value
|
||||
return None
|
||||
|
||||
@plugwise_command
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the hvac mode."""
|
||||
"""Set the HVAC mode (off, heat, cool, heat_cool, or auto/schedule)."""
|
||||
if hvac_mode == self.hvac_mode:
|
||||
return
|
||||
|
||||
api = self.coordinator.api
|
||||
current_schedule = self.device.get("select_schedule")
|
||||
|
||||
# OFF: single API call
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.api.set_regulation_mode(hvac_mode.value)
|
||||
else:
|
||||
current = self.device.get("select_schedule")
|
||||
desired = current
|
||||
await api.set_regulation_mode(hvac_mode.value)
|
||||
return
|
||||
|
||||
# Capture the last valid schedule
|
||||
if desired and desired != "off":
|
||||
self._last_active_schedule = desired
|
||||
elif desired == "off":
|
||||
desired = self._last_active_schedule
|
||||
|
||||
# Enabling HVACMode.AUTO requires a previously set schedule for saving and restoring
|
||||
if hvac_mode == HVACMode.AUTO and not desired:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=ERROR_NO_SCHEDULE,
|
||||
)
|
||||
|
||||
await self.coordinator.api.set_schedule_state(
|
||||
self._location,
|
||||
STATE_ON if hvac_mode == HVACMode.AUTO else STATE_OFF,
|
||||
desired,
|
||||
# Manual mode (heat/cool/heat_cool) without a schedule: set regulation only
|
||||
if (
|
||||
current_schedule is None
|
||||
and hvac_mode != HVACMode.AUTO
|
||||
and (
|
||||
regulation := self._regulation_mode_for_hvac(hvac_mode)
|
||||
or self._previous_action_mode
|
||||
)
|
||||
if self.hvac_mode == HVACMode.OFF and self._previous_action_mode:
|
||||
await self.coordinator.api.set_regulation_mode(
|
||||
self._previous_action_mode
|
||||
):
|
||||
await api.set_regulation_mode(regulation)
|
||||
return
|
||||
|
||||
# Manual mode: ensure regulation and turn off schedule when needed
|
||||
if hvac_mode in (HVACMode.HEAT, HVACMode.COOL, HVACMode.HEAT_COOL):
|
||||
regulation = self._regulation_mode_for_hvac(hvac_mode) or (
|
||||
self._previous_action_mode
|
||||
if self.hvac_mode in (HVACMode.HEAT_COOL, HVACMode.OFF)
|
||||
else None
|
||||
)
|
||||
if regulation:
|
||||
await api.set_regulation_mode(regulation)
|
||||
|
||||
if (
|
||||
self.hvac_mode == HVACMode.OFF and current_schedule not in (None, "off")
|
||||
) or (self.hvac_mode == HVACMode.AUTO and current_schedule is not None):
|
||||
await api.set_schedule_state(
|
||||
self._location, STATE_OFF, current_schedule
|
||||
)
|
||||
return
|
||||
|
||||
# AUTO: restore schedule and regulation
|
||||
desired_schedule = current_schedule
|
||||
if desired_schedule and desired_schedule != "off":
|
||||
self._last_active_schedule = desired_schedule
|
||||
elif desired_schedule == "off":
|
||||
desired_schedule = self._last_active_schedule
|
||||
|
||||
if not desired_schedule:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=ERROR_NO_SCHEDULE,
|
||||
)
|
||||
|
||||
if self._previous_action_mode:
|
||||
if self.hvac_mode == HVACMode.OFF:
|
||||
await api.set_regulation_mode(self._previous_action_mode)
|
||||
await api.set_schedule_state(self._location, STATE_ON, desired_schedule)
|
||||
|
||||
@plugwise_command
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.helpers.device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -137,3 +138,26 @@ async def async_migrate_entry(hass: HomeAssistant, entry: PortainerConfigEntry)
|
||||
hass.config_entries.async_update_entry(entry=entry, version=4)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant,
|
||||
entry: PortainerConfigEntry,
|
||||
device: DeviceEntry,
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
coordinator = entry.runtime_data
|
||||
valid_identifiers: set[tuple[str, str]] = set()
|
||||
|
||||
# The Portainer integration creates devices for both endpoints and containers. That's why we're doing it double
|
||||
valid_identifiers.update(
|
||||
(DOMAIN, f"{entry.entry_id}_{endpoint_id}") for endpoint_id in coordinator.data
|
||||
)
|
||||
|
||||
valid_identifiers.update(
|
||||
(DOMAIN, f"{entry.entry_id}_{container_name}")
|
||||
for endpoint in coordinator.data.values()
|
||||
for container_name in endpoint.containers
|
||||
)
|
||||
|
||||
return not device.identifiers.intersection(valid_identifiers)
|
||||
|
||||
28
homeassistant/components/proxmoxve/diagnostics.py
Normal file
28
homeassistant/components/proxmoxve/diagnostics.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Diagnostics support for Proxmox VE."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import ProxmoxConfigEntry
|
||||
|
||||
TO_REDACT = [CONF_USERNAME, CONF_PASSWORD, CONF_HOST]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ProxmoxConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a Proxmox VE config entry."""
|
||||
|
||||
return {
|
||||
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
|
||||
"devices": {
|
||||
node: asdict(node_data)
|
||||
for node, node_data in config_entry.runtime_data.data.items()
|
||||
},
|
||||
}
|
||||
@@ -28,6 +28,11 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
assert event.data["action"] == "update" and "old_entity_id" in event.data
|
||||
old_entity_id = event.data["old_entity_id"]
|
||||
new_entity_id = event.data["entity_id"]
|
||||
# Notify the states meta manager about the pending rename so
|
||||
# that any StatisticsTask that runs before the actual database
|
||||
# update can still resolve the new entity_id to the correct
|
||||
# metadata_id.
|
||||
instance.states_meta_manager.queue_rename(old_entity_id, new_entity_id)
|
||||
async_update_statistics_metadata(
|
||||
hass, old_entity_id, new_statistic_id=new_entity_id
|
||||
)
|
||||
|
||||
@@ -952,7 +952,13 @@ def async_update_statistics_metadata(
|
||||
f"for unit_class '{new_unit_class}'"
|
||||
)
|
||||
|
||||
get_instance(hass).async_update_statistics_metadata(
|
||||
instance = get_instance(hass)
|
||||
# Notify the statistics meta manager about the pending rename so
|
||||
# that any StatisticsTask that runs before the actual database
|
||||
# update can still resolve the new statistic_id.
|
||||
if new_statistic_id is not UNDEFINED and new_statistic_id is not None:
|
||||
instance.statistics_meta_manager.queue_rename(statistic_id, new_statistic_id)
|
||||
instance.async_update_statistics_metadata(
|
||||
statistic_id,
|
||||
new_statistic_id=new_statistic_id,
|
||||
new_unit_class=new_unit_class,
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable, Sequence
|
||||
from queue import SimpleQueue
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from sqlalchemy.orm.session import Session
|
||||
@@ -27,8 +28,32 @@ class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
|
||||
def __init__(self, recorder: Recorder) -> None:
|
||||
"""Initialize the states meta manager."""
|
||||
self._did_first_load = False
|
||||
# Thread-safe queue for entity_id renames from the event loop.
|
||||
# Items are (old_entity_id, new_entity_id) tuples.
|
||||
self._rename_queue: SimpleQueue[tuple[str, str]] = SimpleQueue()
|
||||
# Recorder-thread-only dict mapping new_entity_id -> old_entity_id
|
||||
# for renames that haven't been applied to the database yet.
|
||||
self._pending_rename: dict[str, str] = {}
|
||||
super().__init__(recorder, CACHE_SIZE)
|
||||
|
||||
def queue_rename(self, old_entity_id: str, new_entity_id: str) -> None:
|
||||
"""Queue an entity_id rename notification.
|
||||
|
||||
This method is thread-safe and is called from the event loop
|
||||
to notify the recorder thread about a pending entity_id rename.
|
||||
"""
|
||||
self._rename_queue.put((old_entity_id, new_entity_id))
|
||||
|
||||
def drain_pending_renames(self) -> None:
|
||||
"""Drain the rename queue into the pending rename dict.
|
||||
|
||||
This call is not thread-safe and must be called from the
|
||||
recorder thread.
|
||||
"""
|
||||
while not self._rename_queue.empty():
|
||||
old_entity_id, new_entity_id = self._rename_queue.get_nowait()
|
||||
self._pending_rename[new_entity_id] = old_entity_id
|
||||
|
||||
def load(
|
||||
self, events: list[Event[EventStateChangedData]], session: Session
|
||||
) -> None:
|
||||
@@ -117,6 +142,21 @@ class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
|
||||
if update_cache:
|
||||
self._id_map[entity_id] = metadata_id
|
||||
|
||||
if not from_recorder:
|
||||
return results
|
||||
|
||||
# Check pending renames for any entity_ids still not resolved.
|
||||
# If an entity_id was renamed but the database hasn't been updated
|
||||
# yet, we can resolve the new entity_id by looking up the old one.
|
||||
pending_rename = self._pending_rename
|
||||
for entity_id in missing:
|
||||
if (
|
||||
results.get(entity_id) is None
|
||||
and (old_entity_id := pending_rename.get(entity_id)) is not None
|
||||
and (metadata_id := self._id_map.get(old_entity_id)) is not None
|
||||
):
|
||||
results[entity_id] = metadata_id
|
||||
|
||||
return results
|
||||
|
||||
def add_pending(self, db_states_meta: StatesMeta) -> None:
|
||||
@@ -155,12 +195,18 @@ class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
|
||||
new_entity_id: str,
|
||||
) -> bool:
|
||||
"""Update states metadata for an entity_id."""
|
||||
# Clear the pending rename before the collision check so
|
||||
# get() doesn't resolve new_entity_id via the side channel.
|
||||
self._pending_rename.pop(new_entity_id, None)
|
||||
if self.get(new_entity_id, session, True) is not None:
|
||||
# If the new entity id already exists we have
|
||||
# a collision and should not update.
|
||||
return False
|
||||
metadata_id = self._id_map.get(entity_id)
|
||||
session.query(StatesMeta).filter(StatesMeta.entity_id == entity_id).update(
|
||||
{StatesMeta.entity_id: new_entity_id}
|
||||
)
|
||||
self._id_map.pop(entity_id, None)
|
||||
if metadata_id is not None:
|
||||
self._id_map[new_entity_id] = metadata_id
|
||||
return True
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
"""Support managing StatesMeta."""
|
||||
"""Support managing StatisticsMeta."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from queue import SimpleQueue
|
||||
import threading
|
||||
from typing import TYPE_CHECKING, Any, Final, Literal
|
||||
|
||||
@@ -88,12 +89,36 @@ class StatisticsMetaManager:
|
||||
self._stat_id_to_id_meta: LRU[str, tuple[int, StatisticMetaData]] = LRU(
|
||||
CACHE_SIZE
|
||||
)
|
||||
# Thread-safe queue for statistic_id renames from the event loop.
|
||||
# Items are (old_statistic_id, new_statistic_id) tuples.
|
||||
self._rename_queue: SimpleQueue[tuple[str, str]] = SimpleQueue()
|
||||
# Recorder-thread-only dict mapping new_statistic_id -> old_statistic_id
|
||||
# for renames that haven't been applied to the database yet.
|
||||
self._pending_rename: dict[str, str] = {}
|
||||
|
||||
def _clear_cache(self, statistic_ids: list[str]) -> None:
|
||||
"""Clear the cache."""
|
||||
for statistic_id in statistic_ids:
|
||||
self._stat_id_to_id_meta.pop(statistic_id, None)
|
||||
|
||||
def queue_rename(self, old_statistic_id: str, new_statistic_id: str) -> None:
|
||||
"""Queue a statistic_id rename notification.
|
||||
|
||||
This method is thread-safe and is called from the event loop
|
||||
to notify the recorder thread about a pending statistic_id rename.
|
||||
"""
|
||||
self._rename_queue.put((old_statistic_id, new_statistic_id))
|
||||
|
||||
def drain_pending_renames(self) -> None:
|
||||
"""Drain the rename queue into the pending rename dict.
|
||||
|
||||
This call is not thread-safe and must be called from the
|
||||
recorder thread.
|
||||
"""
|
||||
while not self._rename_queue.empty():
|
||||
old_statistic_id, new_statistic_id = self._rename_queue.get_nowait()
|
||||
self._pending_rename[new_statistic_id] = old_statistic_id
|
||||
|
||||
def _get_from_database(
|
||||
self,
|
||||
session: Session,
|
||||
@@ -293,9 +318,28 @@ class StatisticsMetaManager:
|
||||
return results
|
||||
|
||||
# Fetch metadata from the database
|
||||
return results | self._get_from_database(
|
||||
session, statistic_ids=missing_statistic_id
|
||||
)
|
||||
results |= self._get_from_database(session, statistic_ids=missing_statistic_id)
|
||||
|
||||
# Check pending renames for any statistic_ids still not resolved.
|
||||
# If a statistic_id was renamed but the database hasn't been
|
||||
# updated yet, resolve the new statistic_id using the old one.
|
||||
if self.recorder.thread_id == threading.get_ident() and (
|
||||
pending_rename := self._pending_rename
|
||||
):
|
||||
for statistic_id in missing_statistic_id:
|
||||
if (
|
||||
statistic_id not in results
|
||||
and (old_id := pending_rename.get(statistic_id)) is not None
|
||||
):
|
||||
# Try cache first, then database for the old statistic_id
|
||||
if id_meta := self._stat_id_to_id_meta.get(old_id):
|
||||
results[statistic_id] = id_meta
|
||||
elif db_result := self._get_from_database(
|
||||
session, statistic_ids={old_id}
|
||||
):
|
||||
results[statistic_id] = next(iter(db_result.values()))
|
||||
|
||||
return results
|
||||
|
||||
def get_from_cache_threadsafe(
|
||||
self, statistic_ids: set[str]
|
||||
@@ -377,6 +421,9 @@ class StatisticsMetaManager:
|
||||
recorder thread.
|
||||
"""
|
||||
self._assert_in_recorder_thread()
|
||||
# Clear the pending rename before the collision check so
|
||||
# get() doesn't resolve new_statistic_id via the side channel.
|
||||
self._pending_rename.pop(new_statistic_id, None)
|
||||
if self.get(session, new_statistic_id):
|
||||
_LOGGER.error(
|
||||
"Cannot rename statistic_id `%s` to `%s` because the new statistic_id is already in use",
|
||||
|
||||
@@ -82,6 +82,7 @@ class UpdateStatisticsMetadataTask(RecorderTask):
|
||||
|
||||
def run(self, instance: Recorder) -> None:
|
||||
"""Handle the task."""
|
||||
instance.statistics_meta_manager.drain_pending_renames()
|
||||
statistics.update_statistics_metadata(
|
||||
instance,
|
||||
self.statistic_id,
|
||||
@@ -102,6 +103,7 @@ class UpdateStatesMetadataTask(RecorderTask):
|
||||
|
||||
def run(self, instance: Recorder) -> None:
|
||||
"""Handle the task."""
|
||||
instance.states_meta_manager.drain_pending_renames()
|
||||
entity_registry.update_states_metadata(
|
||||
instance,
|
||||
self.entity_id,
|
||||
@@ -169,6 +171,11 @@ class StatisticsTask(RecorderTask):
|
||||
|
||||
def run(self, instance: Recorder) -> None:
|
||||
"""Run statistics task."""
|
||||
# Drain any pending entity_id/statistic_id renames so the
|
||||
# compilation can resolve new ids that the database doesn't
|
||||
# know about yet.
|
||||
instance.states_meta_manager.drain_pending_renames()
|
||||
instance.statistics_meta_manager.drain_pending_renames()
|
||||
if statistics.compile_statistics(instance, self.start, self.fire_events):
|
||||
return
|
||||
# Schedule a new statistics task if this one didn't finish
|
||||
@@ -181,6 +188,8 @@ class CompileMissingStatisticsTask(RecorderTask):
|
||||
|
||||
def run(self, instance: Recorder) -> None:
|
||||
"""Run statistics task to compile missing statistics."""
|
||||
instance.states_meta_manager.drain_pending_renames()
|
||||
instance.statistics_meta_manager.drain_pending_renames()
|
||||
if statistics.compile_missing_statistics(instance):
|
||||
return
|
||||
# Schedule a new statistics task if this one didn't finish
|
||||
|
||||
@@ -160,7 +160,10 @@ class SnapcastClientDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
if self._device.connected:
|
||||
if self.is_volume_muted or self._current_group.muted:
|
||||
return MediaPlayerState.IDLE
|
||||
return STREAM_STATUS.get(self._current_group.stream_status)
|
||||
try:
|
||||
return STREAM_STATUS.get(self._current_group.stream_status)
|
||||
except KeyError:
|
||||
pass
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
@property
|
||||
@@ -275,10 +278,15 @@ class SnapcastClientDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
@property
|
||||
def metadata(self) -> Mapping[str, Any]:
|
||||
"""Get metadata from the current stream."""
|
||||
if metadata := self.coordinator.server.stream(
|
||||
self._current_group.stream
|
||||
).metadata:
|
||||
return metadata
|
||||
try:
|
||||
if metadata := self.coordinator.server.stream(
|
||||
self._current_group.stream
|
||||
).metadata:
|
||||
return metadata
|
||||
except (
|
||||
KeyError
|
||||
): # the stream function raises KeyError if the stream does not exist
|
||||
pass
|
||||
|
||||
# Fallback to an empty dict
|
||||
return {}
|
||||
@@ -333,11 +341,15 @@ class SnapcastClientDevice(SnapcastCoordinatorEntity, MediaPlayerEntity):
|
||||
@property
|
||||
def media_position(self) -> int | None:
|
||||
"""Position of current playing media in seconds."""
|
||||
# Position is part of properties object, not metadata object
|
||||
if properties := self.coordinator.server.stream(
|
||||
self._current_group.stream
|
||||
).properties:
|
||||
if (value := properties.get("position")) is not None:
|
||||
return int(value)
|
||||
|
||||
try:
|
||||
# Position is part of properties object, not metadata object
|
||||
if properties := self.coordinator.server.stream(
|
||||
self._current_group.stream
|
||||
).properties:
|
||||
if (value := properties.get("position")) is not None:
|
||||
return int(value)
|
||||
except (
|
||||
KeyError
|
||||
): # the stream function raises KeyError if the stream does not exist
|
||||
pass
|
||||
return None
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@Ernst79", "@dontinelli"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/solarlog",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["solarlog_cli"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -500,7 +500,13 @@ async def _async_send_telegram_message(service: ServiceCall) -> ServiceResponse:
|
||||
errors.append((ex, target))
|
||||
|
||||
if len(errors) == 1:
|
||||
raise errors[0][0]
|
||||
if isinstance(errors[0][0], HomeAssistantError):
|
||||
raise errors[0][0]
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="action_failed",
|
||||
translation_placeholders={"error": str(errors[0][0])},
|
||||
) from errors[0][0]
|
||||
|
||||
if len(errors) > 1:
|
||||
error_messages: list[str] = []
|
||||
|
||||
@@ -37,7 +37,7 @@ rules:
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -8,5 +8,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["tesla-fleet-api==1.4.3", "teslemetry-stream==0.9.0"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"requirements": ["pyTibber==0.35.0"]
|
||||
"requirements": ["pyTibber==0.36.0"]
|
||||
}
|
||||
|
||||
@@ -8,14 +8,16 @@ from steamloop import (
|
||||
ThermostatConnection,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_SECRET_KEY, DOMAIN, MANUFACTURER, PLATFORMS
|
||||
from .const import CONF_SECRET_KEY, DOMAIN, MANUFACTURER
|
||||
from .types import TraneConfigEntry
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: TraneConfigEntry) -> bool:
|
||||
"""Set up Trane Local from a config entry."""
|
||||
|
||||
200
homeassistant/components/trane/climate.py
Normal file
200
homeassistant/components/trane/climate.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""Climate platform for the Trane Local integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from steamloop import FanMode, HoldType, ThermostatConnection, ZoneMode
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .entity import TraneZoneEntity
|
||||
from .types import TraneConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
HA_TO_ZONE_MODE = {
|
||||
HVACMode.OFF: ZoneMode.OFF,
|
||||
HVACMode.HEAT: ZoneMode.HEAT,
|
||||
HVACMode.COOL: ZoneMode.COOL,
|
||||
HVACMode.HEAT_COOL: ZoneMode.AUTO,
|
||||
HVACMode.AUTO: ZoneMode.AUTO,
|
||||
}
|
||||
|
||||
ZONE_MODE_TO_HA = {
|
||||
ZoneMode.OFF: HVACMode.OFF,
|
||||
ZoneMode.HEAT: HVACMode.HEAT,
|
||||
ZoneMode.COOL: HVACMode.COOL,
|
||||
ZoneMode.AUTO: HVACMode.AUTO,
|
||||
}
|
||||
|
||||
HA_TO_FAN_MODE = {
|
||||
"auto": FanMode.AUTO,
|
||||
"on": FanMode.ALWAYS_ON,
|
||||
"circulate": FanMode.CIRCULATE,
|
||||
}
|
||||
|
||||
FAN_MODE_TO_HA = {v: k for k, v in HA_TO_FAN_MODE.items()}
|
||||
|
||||
SINGLE_SETPOINT_MODES = frozenset({ZoneMode.COOL, ZoneMode.HEAT})
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: TraneConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Trane Local climate entities."""
|
||||
conn = config_entry.runtime_data
|
||||
async_add_entities(
|
||||
TraneClimateEntity(conn, config_entry.entry_id, zone_id)
|
||||
for zone_id in conn.state.zones
|
||||
)
|
||||
|
||||
|
||||
class TraneClimateEntity(TraneZoneEntity, ClimateEntity):
|
||||
"""Climate entity for a Trane thermostat zone."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "zone"
|
||||
_attr_fan_modes = list(HA_TO_FAN_MODE)
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
_attr_target_temperature_step = 1.0
|
||||
|
||||
def __init__(self, conn: ThermostatConnection, entry_id: str, zone_id: str) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
super().__init__(conn, entry_id, zone_id, "zone")
|
||||
modes: list[HVACMode] = []
|
||||
for zone_mode in conn.state.supported_modes:
|
||||
ha_mode = ZONE_MODE_TO_HA.get(zone_mode)
|
||||
if ha_mode is None:
|
||||
continue
|
||||
modes.append(ha_mode)
|
||||
# AUTO in steamloop maps to both AUTO (schedule) and HEAT_COOL (manual hold)
|
||||
if zone_mode == ZoneMode.AUTO:
|
||||
modes.append(HVACMode.HEAT_COOL)
|
||||
self._attr_hvac_modes = modes
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
# indoor_temperature is a string from the protocol (e.g. "72.00")
|
||||
# or empty string if not yet received
|
||||
if temp := self._zone.indoor_temperature:
|
||||
return float(temp)
|
||||
return None
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
# relative_humidity is a string from the protocol (e.g. "45")
|
||||
# or empty string if not yet received
|
||||
if humidity := self._conn.state.relative_humidity:
|
||||
return int(humidity)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return the current HVAC mode."""
|
||||
zone = self._zone
|
||||
if zone.mode == ZoneMode.AUTO and zone.hold_type == HoldType.MANUAL:
|
||||
return HVACMode.HEAT_COOL
|
||||
return ZONE_MODE_TO_HA.get(zone.mode, HVACMode.OFF)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return the current HVAC action."""
|
||||
# heating_active and cooling_active are system-level strings from the
|
||||
# protocol ("0"=off, "1"=idle, "2"=running); filter by zone mode so
|
||||
# a zone in COOL never reports HEATING and vice versa
|
||||
zone_mode = self._zone.mode
|
||||
if zone_mode == ZoneMode.OFF:
|
||||
return HVACAction.OFF
|
||||
state = self._conn.state
|
||||
if zone_mode != ZoneMode.HEAT and state.cooling_active == "2":
|
||||
return HVACAction.COOLING
|
||||
if zone_mode != ZoneMode.COOL and state.heating_active == "2":
|
||||
return HVACAction.HEATING
|
||||
return HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return target temperature for single-setpoint modes."""
|
||||
# Setpoints are strings from the protocol or empty string if not yet received
|
||||
zone = self._zone
|
||||
if zone.mode == ZoneMode.COOL:
|
||||
return float(zone.cool_setpoint) if zone.cool_setpoint else None
|
||||
if zone.mode == ZoneMode.HEAT:
|
||||
return float(zone.heat_setpoint) if zone.heat_setpoint else None
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the upper bound target temperature."""
|
||||
zone = self._zone
|
||||
if zone.mode in SINGLE_SETPOINT_MODES:
|
||||
return None
|
||||
return float(zone.cool_setpoint) if zone.cool_setpoint else None
|
||||
|
||||
@property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the lower bound target temperature."""
|
||||
zone = self._zone
|
||||
if zone.mode in SINGLE_SETPOINT_MODES:
|
||||
return None
|
||||
return float(zone.heat_setpoint) if zone.heat_setpoint else None
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str:
|
||||
"""Return the current fan mode."""
|
||||
return FAN_MODE_TO_HA.get(self._conn.state.fan_mode, "auto")
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC mode."""
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
self._conn.set_zone_mode(self._zone_id, ZoneMode.OFF)
|
||||
return
|
||||
|
||||
hold_type = HoldType.SCHEDULE if hvac_mode == HVACMode.AUTO else HoldType.MANUAL
|
||||
self._conn.set_temperature_setpoint(self._zone_id, hold_type=hold_type)
|
||||
|
||||
self._conn.set_zone_mode(self._zone_id, HA_TO_ZONE_MODE[hvac_mode])
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set target temperature."""
|
||||
heat_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)
|
||||
cool_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)
|
||||
set_temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
|
||||
if set_temp is not None:
|
||||
if self._zone.mode == ZoneMode.COOL:
|
||||
cool_temp = set_temp
|
||||
elif self._zone.mode == ZoneMode.HEAT:
|
||||
heat_temp = set_temp
|
||||
|
||||
self._conn.set_temperature_setpoint(
|
||||
self._zone_id,
|
||||
heat_setpoint=str(round(heat_temp)) if heat_temp is not None else None,
|
||||
cool_setpoint=str(round(cool_temp)) if cool_temp is not None else None,
|
||||
)
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set the fan mode."""
|
||||
self._conn.set_fan_mode(HA_TO_FAN_MODE[fan_mode])
|
||||
@@ -25,8 +25,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
class TraneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Trane Local."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
"""Constants for the Trane Local integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "trane"
|
||||
|
||||
PLATFORMS = [Platform.SWITCH]
|
||||
|
||||
CONF_SECRET_KEY = "secret_key"
|
||||
|
||||
MANUFACTURER = "Trane"
|
||||
|
||||
@@ -25,6 +25,19 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"climate": {
|
||||
"zone": {
|
||||
"state_attributes": {
|
||||
"fan_mode": {
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"circulate": "Circulate",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"hold": {
|
||||
"name": "Hold"
|
||||
|
||||
@@ -11,7 +11,7 @@ from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
@@ -127,7 +127,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: VeluxConfigEntry) -> boo
|
||||
connections=connections,
|
||||
)
|
||||
|
||||
async def on_hass_stop(event):
|
||||
async def on_hass_stop(_: Event) -> None:
|
||||
"""Close connection when hass stops."""
|
||||
LOGGER.debug("Velux interface terminated")
|
||||
await pyvlx.disconnect()
|
||||
|
||||
@@ -70,7 +70,7 @@ class VeluxEntity(Entity):
|
||||
via_device=(DOMAIN, f"gateway_{config_entry_id}"),
|
||||
)
|
||||
|
||||
async def after_update_callback(self, node) -> None:
|
||||
async def after_update_callback(self, _: Node) -> None:
|
||||
"""Call after device was updated."""
|
||||
self._attr_available = self.node.pyvlx.get_connected()
|
||||
if not self._attr_available:
|
||||
|
||||
@@ -57,4 +57,4 @@ rules:
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
strict-typing: done
|
||||
|
||||
@@ -13,6 +13,8 @@ from PyViCare.PyViCareHeatingDevice import (
|
||||
HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent,
|
||||
)
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -242,3 +244,7 @@ class ViCareBinarySensor(ViCareEntity, BinarySensorEntity):
|
||||
_LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception)
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.warning("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.warning("Vicare server error: %s", server_exception)
|
||||
|
||||
@@ -9,6 +9,8 @@ import logging
|
||||
from PyViCare.PyViCareDevice import Device as PyViCareDevice
|
||||
from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -113,3 +115,7 @@ class ViCareButton(ViCareEntity, ButtonEntity):
|
||||
_LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception)
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.warning("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.warning("Vicare server error: %s", server_exception)
|
||||
|
||||
@@ -11,6 +11,8 @@ from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig
|
||||
from PyViCare.PyViCareHeatingDevice import HeatingCircuit as PyViCareHeatingCircuit
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareCommandError,
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -222,6 +224,10 @@ class ViCareClimate(ViCareEntity, ClimateEntity):
|
||||
_LOGGER.error("Unable to decode data from ViCare server")
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.warning("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.warning("Vicare server error: %s", server_exception)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
|
||||
@@ -10,6 +10,8 @@ from typing import Any
|
||||
from PyViCare.PyViCareDevice import Device as PyViCareDevice
|
||||
from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -193,6 +195,10 @@ class ViCareFan(ViCareEntity, FanEntity):
|
||||
_LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception)
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.warning("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.warning("Vicare server error: %s", server_exception)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
|
||||
@@ -14,6 +14,8 @@ from PyViCare.PyViCareHeatingDevice import (
|
||||
HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent,
|
||||
)
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -463,6 +465,10 @@ class ViCareNumber(ViCareEntity, NumberEntity):
|
||||
_LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception)
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.warning("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.warning("Vicare server error: %s", server_exception)
|
||||
|
||||
|
||||
def _get_value(
|
||||
|
||||
@@ -13,6 +13,8 @@ from PyViCare.PyViCareHeatingDevice import (
|
||||
HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent,
|
||||
)
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -168,6 +170,16 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="primary_circuit_pump_rotation",
|
||||
translation_key="primary_circuit_pump_rotation",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_getter=lambda api: api.getPrimaryCircuitPumpRotation(),
|
||||
unit_getter=lambda api: api.getPrimaryCircuitPumpRotationUnit(),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="secondary_circuit_supply_temperature",
|
||||
translation_key="secondary_circuit_supply_temperature",
|
||||
@@ -184,6 +196,36 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="hot_gas_temperature",
|
||||
translation_key="hot_gas_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_getter=lambda api: api.getHotGasTemperature(),
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="liquid_gas_temperature",
|
||||
translation_key="liquid_gas_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_getter=lambda api: api.getLiquidGasTemperature(),
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="suction_gas_temperature",
|
||||
translation_key="suction_gas_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_getter=lambda api: api.getSuctionGasTemperature(),
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="hotwater_out_temperature",
|
||||
translation_key="hotwater_out_temperature",
|
||||
@@ -971,6 +1013,28 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
value_getter=lambda api: api.getSupplyPressure(),
|
||||
unit_getter=lambda api: api.getSupplyPressureUnit(),
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="hot_gas_pressure",
|
||||
translation_key="hot_gas_pressure",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.BAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_getter=lambda api: api.getHotGasPressure(),
|
||||
unit_getter=lambda api: api.getHotGasPressureUnit(),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="suction_gas_pressure",
|
||||
translation_key="suction_gas_pressure",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.BAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_getter=lambda api: api.getSuctionGasPressure(),
|
||||
unit_getter=lambda api: api.getSuctionGasPressureUnit(),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="heating_rod_starts",
|
||||
translation_key="heating_rod_starts",
|
||||
@@ -1007,6 +1071,35 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_getter=lambda api: api.getSeasonalPerformanceFactorHeating(),
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="cop_heating",
|
||||
translation_key="cop_heating",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_getter=lambda api: api.getCoefficientOfPerformanceHeating(),
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="cop_dhw",
|
||||
translation_key="cop_dhw",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_getter=lambda api: api.getCoefficientOfPerformanceDHW(),
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="cop_total",
|
||||
translation_key="cop_total",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_getter=lambda api: api.getCoefficientOfPerformanceTotal(),
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="cop_cooling",
|
||||
translation_key="cop_cooling",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_getter=lambda api: api.getCoefficientOfPerformanceCooling(),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="battery_level",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
@@ -1187,6 +1280,23 @@ BURNER_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
)
|
||||
|
||||
COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
|
||||
ViCareSensorEntityDescription(
|
||||
key="compressor_power",
|
||||
translation_key="compressor_power",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
value_getter=lambda api: api.getPower(),
|
||||
unit_getter=lambda api: api.getPowerUnit(),
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="compressor_modulation",
|
||||
translation_key="compressor_modulation",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_getter=lambda api: api.getModulation(),
|
||||
unit_getter=lambda api: api.getModulationUnit(),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
ViCareSensorEntityDescription(
|
||||
key="compressor_starts",
|
||||
translation_key="compressor_starts",
|
||||
@@ -1462,6 +1572,10 @@ class ViCareSensor(ViCareEntity, SensorEntity):
|
||||
_LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception)
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.warning("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.warning("Vicare server error: %s", server_exception)
|
||||
|
||||
if vicare_unit is not None:
|
||||
if (
|
||||
|
||||
@@ -221,6 +221,9 @@
|
||||
"compressor_inlet_temperature": {
|
||||
"name": "Compressor inlet temperature"
|
||||
},
|
||||
"compressor_modulation": {
|
||||
"name": "Compressor modulation"
|
||||
},
|
||||
"compressor_outlet_pressure": {
|
||||
"name": "Compressor outlet pressure"
|
||||
},
|
||||
@@ -241,6 +244,9 @@
|
||||
"ready": "[%key:common::state::idle%]"
|
||||
}
|
||||
},
|
||||
"compressor_power": {
|
||||
"name": "Compressor power"
|
||||
},
|
||||
"compressor_starts": {
|
||||
"name": "Compressor starts"
|
||||
},
|
||||
@@ -250,6 +256,18 @@
|
||||
"condenser_subcooling_temperature": {
|
||||
"name": "Condenser subcooling temperature"
|
||||
},
|
||||
"cop_cooling": {
|
||||
"name": "Coefficient of performance - cooling"
|
||||
},
|
||||
"cop_dhw": {
|
||||
"name": "Coefficient of performance - domestic hot water"
|
||||
},
|
||||
"cop_heating": {
|
||||
"name": "Coefficient of performance - heating"
|
||||
},
|
||||
"cop_total": {
|
||||
"name": "Coefficient of performance"
|
||||
},
|
||||
"dhw_storage_bottom_temperature": {
|
||||
"name": "DHW storage bottom temperature"
|
||||
},
|
||||
@@ -396,6 +414,12 @@
|
||||
"heating_rod_starts": {
|
||||
"name": "Heating rod starts"
|
||||
},
|
||||
"hot_gas_pressure": {
|
||||
"name": "Hot gas pressure"
|
||||
},
|
||||
"hot_gas_temperature": {
|
||||
"name": "Hot gas temperature"
|
||||
},
|
||||
"hotwater_gas_consumption_heating_this_month": {
|
||||
"name": "DHW gas consumption this month"
|
||||
},
|
||||
@@ -441,6 +465,9 @@
|
||||
"inverter_temperature": {
|
||||
"name": "Inverter temperature"
|
||||
},
|
||||
"liquid_gas_temperature": {
|
||||
"name": "Liquid gas temperature"
|
||||
},
|
||||
"outside_humidity": {
|
||||
"name": "Outside humidity"
|
||||
},
|
||||
@@ -508,6 +535,9 @@
|
||||
"power_production_today": {
|
||||
"name": "Energy production today"
|
||||
},
|
||||
"primary_circuit_pump_rotation": {
|
||||
"name": "Primary circuit pump rotation"
|
||||
},
|
||||
"primary_circuit_return_temperature": {
|
||||
"name": "Primary circuit return temperature"
|
||||
},
|
||||
@@ -547,6 +577,12 @@
|
||||
"spf_total": {
|
||||
"name": "Seasonal performance factor"
|
||||
},
|
||||
"suction_gas_pressure": {
|
||||
"name": "Suction gas pressure"
|
||||
},
|
||||
"suction_gas_temperature": {
|
||||
"name": "Suction gas temperature"
|
||||
},
|
||||
"supply_fan_hours": {
|
||||
"name": "Supply fan hours"
|
||||
},
|
||||
|
||||
@@ -13,6 +13,8 @@ from PyViCare.PyViCareHeatingDevice import (
|
||||
HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent,
|
||||
)
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -72,6 +74,10 @@ def get_device_serial(device: PyViCareDevice) -> str | None:
|
||||
_LOGGER.debug("Vicare API rate limit exceeded: %s", limit_exception)
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.debug("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.debug("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.debug("Vicare server error: %s", server_exception)
|
||||
except requests.exceptions.ConnectionError:
|
||||
_LOGGER.debug("Unable to retrieve data from ViCare server")
|
||||
except ValueError:
|
||||
|
||||
@@ -10,6 +10,8 @@ from PyViCare.PyViCareDevice import Device as PyViCareDevice
|
||||
from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig
|
||||
from PyViCare.PyViCareHeatingDevice import HeatingCircuit as PyViCareHeatingCircuit
|
||||
from PyViCare.PyViCareUtils import (
|
||||
PyViCareDeviceCommunicationError,
|
||||
PyViCareInternalServerError,
|
||||
PyViCareInvalidDataError,
|
||||
PyViCareNotSupportedFeatureError,
|
||||
PyViCareRateLimitError,
|
||||
@@ -143,6 +145,10 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity):
|
||||
_LOGGER.error("Unable to decode data from ViCare server")
|
||||
except PyViCareInvalidDataError as invalid_data_exception:
|
||||
_LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
|
||||
except PyViCareDeviceCommunicationError as comm_exception:
|
||||
_LOGGER.warning("Device communication error: %s", comm_exception)
|
||||
except PyViCareInternalServerError as server_exception:
|
||||
_LOGGER.warning("Vicare server error: %s", server_exception)
|
||||
|
||||
def set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperatures."""
|
||||
|
||||
@@ -13,7 +13,7 @@ OAUTH2_AUTHORIZE = (
|
||||
OAUTH2_TOKEN = (
|
||||
"https://auth.weheat.nl/auth/realms/Weheat/protocol/openid-connect/token/"
|
||||
)
|
||||
API_URL = "https://api.weheat.nl"
|
||||
API_URL = "https://api.weheat.nl/third_party"
|
||||
OAUTH2_SCOPES = ["openid", "offline_access"]
|
||||
|
||||
|
||||
|
||||
@@ -39,6 +39,33 @@
|
||||
"electricity_used": {
|
||||
"default": "mdi:flash"
|
||||
},
|
||||
"electricity_used_cooling": {
|
||||
"default": "mdi:flash"
|
||||
},
|
||||
"electricity_used_defrost": {
|
||||
"default": "mdi:flash"
|
||||
},
|
||||
"electricity_used_dhw": {
|
||||
"default": "mdi:flash"
|
||||
},
|
||||
"electricity_used_heating": {
|
||||
"default": "mdi:flash"
|
||||
},
|
||||
"energy_output": {
|
||||
"default": "mdi:flash"
|
||||
},
|
||||
"energy_output_cooling": {
|
||||
"default": "mdi:snowflake"
|
||||
},
|
||||
"energy_output_defrost": {
|
||||
"default": "mdi:snowflake"
|
||||
},
|
||||
"energy_output_dhw": {
|
||||
"default": "mdi:heat-wave"
|
||||
},
|
||||
"energy_output_heating": {
|
||||
"default": "mdi:heat-wave"
|
||||
},
|
||||
"heat_pump_state": {
|
||||
"default": "mdi:state-machine"
|
||||
},
|
||||
|
||||
@@ -221,6 +221,73 @@ ENERGY_SENSORS = [
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda status: status.energy_output,
|
||||
),
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="electricity_used_heating",
|
||||
key="electricity_used_heating",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda status: status.energy_in_heating,
|
||||
),
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="electricity_used_cooling",
|
||||
key="electricity_used_cooling",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda status: status.energy_in_cooling,
|
||||
),
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="electricity_used_defrost",
|
||||
key="electricity_used_defrost",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda status: status.energy_in_defrost,
|
||||
),
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="energy_output_heating",
|
||||
key="energy_output_heating",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda status: status.energy_out_heating,
|
||||
),
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="energy_output_cooling",
|
||||
key="energy_output_cooling",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value_fn=lambda status: status.energy_out_cooling,
|
||||
),
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="energy_output_defrost",
|
||||
key="energy_output_defrost",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value_fn=lambda status: status.energy_out_defrost,
|
||||
),
|
||||
]
|
||||
|
||||
DHW_ENERGY_SENSORS = [
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="electricity_used_dhw",
|
||||
key="electricity_used_dhw",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda status: status.energy_in_dhw,
|
||||
),
|
||||
WeHeatSensorEntityDescription(
|
||||
translation_key="energy_output_dhw",
|
||||
key="energy_output_dhw",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda status: status.energy_out_dhw,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -253,6 +320,16 @@ async def async_setup_entry(
|
||||
if entity_description.value_fn(weheatdata.data_coordinator.data)
|
||||
is not None
|
||||
)
|
||||
entities.extend(
|
||||
WeheatHeatPumpSensor(
|
||||
weheatdata.heat_pump_info,
|
||||
weheatdata.energy_coordinator,
|
||||
entity_description,
|
||||
)
|
||||
for entity_description in DHW_ENERGY_SENSORS
|
||||
if entity_description.value_fn(weheatdata.energy_coordinator.data)
|
||||
is not None
|
||||
)
|
||||
entities.extend(
|
||||
WeheatHeatPumpSensor(
|
||||
weheatdata.heat_pump_info,
|
||||
|
||||
@@ -84,9 +84,33 @@
|
||||
"electricity_used": {
|
||||
"name": "Electricity used"
|
||||
},
|
||||
"electricity_used_cooling": {
|
||||
"name": "Electricity used cooling"
|
||||
},
|
||||
"electricity_used_defrost": {
|
||||
"name": "Electricity used defrost"
|
||||
},
|
||||
"electricity_used_dhw": {
|
||||
"name": "Electricity used DHW"
|
||||
},
|
||||
"electricity_used_heating": {
|
||||
"name": "Electricity used heating"
|
||||
},
|
||||
"energy_output": {
|
||||
"name": "Total energy output"
|
||||
},
|
||||
"energy_output_cooling": {
|
||||
"name": "Energy output cooling"
|
||||
},
|
||||
"energy_output_defrost": {
|
||||
"name": "Energy output defrost"
|
||||
},
|
||||
"energy_output_dhw": {
|
||||
"name": "Energy output DHW"
|
||||
},
|
||||
"energy_output_heating": {
|
||||
"name": "Energy output heating"
|
||||
},
|
||||
"heat_pump_state": {
|
||||
"state": {
|
||||
"cooling": "Cooling",
|
||||
|
||||
@@ -17,7 +17,7 @@ from .const import BRANDS_CONF_MAP, CONF_BRAND, DOMAIN, REGIONS_CONF_MAP
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.SELECT, Platform.SENSOR]
|
||||
|
||||
type WhirlpoolConfigEntry = ConfigEntry[AppliancesManager]
|
||||
|
||||
|
||||
@@ -75,6 +75,7 @@ async def authenticate(
|
||||
and not appliances_manager.washers
|
||||
and not appliances_manager.dryers
|
||||
and not appliances_manager.ovens
|
||||
and not appliances_manager.refrigerators
|
||||
):
|
||||
return "no_appliances"
|
||||
|
||||
|
||||
@@ -14,4 +14,5 @@ BRANDS_CONF_MAP = {
|
||||
"Whirlpool": Brand.Whirlpool,
|
||||
"Maytag": Brand.Maytag,
|
||||
"KitchenAid": Brand.KitchenAid,
|
||||
"Consul": Brand.Consul,
|
||||
}
|
||||
|
||||
@@ -52,6 +52,10 @@ async def async_get_config_entry_diagnostics(
|
||||
oven.name: get_appliance_diagnostics(oven)
|
||||
for oven in appliances_manager.ovens
|
||||
},
|
||||
"refrigerators": {
|
||||
refrigerator.name: get_appliance_diagnostics(refrigerator)
|
||||
for refrigerator in appliances_manager.refrigerators
|
||||
},
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
88
homeassistant/components/whirlpool/select.py
Normal file
88
homeassistant/components/whirlpool/select.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""The select platform for Whirlpool Appliances."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final, override
|
||||
|
||||
from whirlpool.appliance import Appliance
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import WhirlpoolConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import WhirlpoolEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class WhirlpoolSelectDescription(SelectEntityDescription):
|
||||
"""Class describing Whirlpool select entities."""
|
||||
|
||||
value_fn: Callable[[Appliance], str | None]
|
||||
set_fn: Callable[[Appliance, str], Awaitable[bool]]
|
||||
|
||||
|
||||
REFRIGERATOR_DESCRIPTIONS: Final[tuple[WhirlpoolSelectDescription, ...]] = (
|
||||
WhirlpoolSelectDescription(
|
||||
key="refrigerator_temperature_level",
|
||||
translation_key="refrigerator_temperature_level",
|
||||
options=["-4", "-2", "0", "3", "5"],
|
||||
unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda fridge: (
|
||||
str(val) if (val := fridge.get_offset_temp()) is not None else None
|
||||
),
|
||||
set_fn=lambda fridge, option: fridge.set_offset_temp(int(option)),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: WhirlpoolConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the select platform."""
|
||||
appliances_manager = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
WhirlpoolSelectEntity(refrigerator, description)
|
||||
for refrigerator in appliances_manager.refrigerators
|
||||
for description in REFRIGERATOR_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class WhirlpoolSelectEntity(WhirlpoolEntity, SelectEntity):
|
||||
"""Whirlpool select entity."""
|
||||
|
||||
def __init__(
|
||||
self, appliance: Appliance, description: WhirlpoolSelectDescription
|
||||
) -> None:
|
||||
"""Initialize the select entity."""
|
||||
super().__init__(appliance, unique_id_suffix=f"-{description.key}")
|
||||
self.entity_description: WhirlpoolSelectDescription = description
|
||||
|
||||
@override
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Retrieve currently selected option."""
|
||||
return self.entity_description.value_fn(self._appliance)
|
||||
|
||||
@override
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the selected option."""
|
||||
try:
|
||||
WhirlpoolSelectEntity._check_service_request(
|
||||
await self.entity_description.set_fn(self._appliance, option)
|
||||
)
|
||||
except ValueError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_value_set",
|
||||
) from err
|
||||
@@ -46,6 +46,11 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"refrigerator_temperature_level": {
|
||||
"name": "Temperature level"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"dryer_state": {
|
||||
"name": "[%key:component::whirlpool::entity::sensor::washer_state::name%]",
|
||||
@@ -211,6 +216,9 @@
|
||||
"appliances_fetch_failed": {
|
||||
"message": "Failed to fetch appliances"
|
||||
},
|
||||
"invalid_value_set": {
|
||||
"message": "Invalid value provided"
|
||||
},
|
||||
"request_failed": {
|
||||
"message": "Request failed"
|
||||
}
|
||||
|
||||
@@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.number import RestoreNumber
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberMode, RestoreNumber
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -15,6 +16,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .entity import ZHAEntity
|
||||
from .helpers import (
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
EntityData,
|
||||
async_add_entities as zha_async_add_entities,
|
||||
convert_zha_error_to_ha_error,
|
||||
get_zha_data,
|
||||
@@ -45,6 +47,14 @@ async def async_setup_entry(
|
||||
class ZhaNumber(ZHAEntity, RestoreNumber):
|
||||
"""Representation of a ZHA Number entity."""
|
||||
|
||||
def __init__(self, entity_data: EntityData, **kwargs: Any) -> None:
|
||||
"""Initialize the ZHA number entity."""
|
||||
super().__init__(entity_data, **kwargs)
|
||||
entity = entity_data.entity
|
||||
if entity.device_class is not None:
|
||||
self._attr_device_class = NumberDeviceClass(entity.device_class)
|
||||
self._attr_mode = NumberMode(entity.mode)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the current value."""
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -451,6 +451,7 @@ FLOWS = {
|
||||
"mullvad",
|
||||
"music_assistant",
|
||||
"mutesync",
|
||||
"myneomitis",
|
||||
"mysensors",
|
||||
"mystrom",
|
||||
"myuplink",
|
||||
|
||||
@@ -1784,7 +1784,7 @@
|
||||
},
|
||||
"enocean": {
|
||||
"name": "EnOcean",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"single_config_entry": true
|
||||
@@ -3754,7 +3754,7 @@
|
||||
"single_config_entry": true
|
||||
},
|
||||
"litterrobot": {
|
||||
"name": "Litter-Robot",
|
||||
"name": "Whisker",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_push"
|
||||
@@ -4415,6 +4415,12 @@
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"myneomitis": {
|
||||
"name": "MyNeomitis",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_push"
|
||||
},
|
||||
"mysensors": {
|
||||
"name": "MySensors",
|
||||
"integration_type": "hub",
|
||||
|
||||
@@ -223,9 +223,6 @@ num2words==0.5.14
|
||||
# This ensures all use the same version
|
||||
pymodbus==3.11.2
|
||||
|
||||
# Some packages don't support gql 4.0.0 yet
|
||||
gql<4.0.0
|
||||
|
||||
# Pin pytest-rerunfailures to prevent accidental breaks
|
||||
pytest-rerunfailures==16.0.1
|
||||
|
||||
|
||||
10
mypy.ini
generated
10
mypy.ini
generated
@@ -5589,6 +5589,16 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.velux.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.vivotek.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
||||
19
requirements_all.txt
generated
19
requirements_all.txt
generated
@@ -1238,7 +1238,7 @@ homelink-integration-api==0.0.1
|
||||
homematicip==2.6.0
|
||||
|
||||
# homeassistant.components.homevolt
|
||||
homevolt==0.4.4
|
||||
homevolt==0.5.0
|
||||
|
||||
# homeassistant.components.horizon
|
||||
horimote==0.4.1
|
||||
@@ -1466,6 +1466,9 @@ lxml==6.0.1
|
||||
# homeassistant.components.matrix
|
||||
matrix-nio==0.25.2
|
||||
|
||||
# homeassistant.components.matter
|
||||
matter-python-client==0.4.1
|
||||
|
||||
# homeassistant.components.maxcube
|
||||
maxcube-api==0.4.3
|
||||
|
||||
@@ -1901,7 +1904,7 @@ pyRFXtrx==0.31.1
|
||||
pySDCP==1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.35.0
|
||||
pyTibber==0.36.0
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.8.0
|
||||
@@ -1952,6 +1955,9 @@ pyatv==0.17.0
|
||||
# homeassistant.components.aussie_broadband
|
||||
pyaussiebb==0.1.5
|
||||
|
||||
# homeassistant.components.myneomitis
|
||||
pyaxencoapi==1.0.6
|
||||
|
||||
# homeassistant.components.balboa
|
||||
pybalboa==1.1.3
|
||||
|
||||
@@ -2176,7 +2182,7 @@ pyituran==0.1.5
|
||||
pyjvcprojector==2.0.1
|
||||
|
||||
# homeassistant.components.kaleidescape
|
||||
pykaleidescape==1.1.1
|
||||
pykaleidescape==1.1.3
|
||||
|
||||
# homeassistant.components.kira
|
||||
pykira==0.1.1
|
||||
@@ -2524,7 +2530,7 @@ python-awair==0.2.5
|
||||
python-blockchain-api==0.0.2
|
||||
|
||||
# homeassistant.components.bsblan
|
||||
python-bsblan==4.2.1
|
||||
python-bsblan==5.0.1
|
||||
|
||||
# homeassistant.components.citybikes
|
||||
python-citybikes==0.3.3
|
||||
@@ -2580,9 +2586,6 @@ python-kasa[speedups]==0.10.2
|
||||
# homeassistant.components.linkplay
|
||||
python-linkplay==0.2.12
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==8.1.2
|
||||
|
||||
# homeassistant.components.melcloud
|
||||
python-melcloud==0.1.2
|
||||
|
||||
@@ -2612,7 +2615,7 @@ python-opensky==1.0.1
|
||||
python-otbr-api==2.8.0
|
||||
|
||||
# homeassistant.components.overseerr
|
||||
python-overseerr==0.8.0
|
||||
python-overseerr==0.9.0
|
||||
|
||||
# homeassistant.components.picnic
|
||||
python-picnic-api2==1.3.1
|
||||
|
||||
19
requirements_test_all.txt
generated
19
requirements_test_all.txt
generated
@@ -1099,7 +1099,7 @@ homelink-integration-api==0.0.1
|
||||
homematicip==2.6.0
|
||||
|
||||
# homeassistant.components.homevolt
|
||||
homevolt==0.4.4
|
||||
homevolt==0.5.0
|
||||
|
||||
# homeassistant.components.remember_the_milk
|
||||
httplib2==0.20.4
|
||||
@@ -1282,6 +1282,9 @@ lxml==6.0.1
|
||||
# homeassistant.components.matrix
|
||||
matrix-nio==0.25.2
|
||||
|
||||
# homeassistant.components.matter
|
||||
matter-python-client==0.4.1
|
||||
|
||||
# homeassistant.components.maxcube
|
||||
maxcube-api==0.4.3
|
||||
|
||||
@@ -1638,7 +1641,7 @@ pyHomee==1.3.8
|
||||
pyRFXtrx==0.31.1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.35.0
|
||||
pyTibber==0.36.0
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.8.0
|
||||
@@ -1683,6 +1686,9 @@ pyatv==0.17.0
|
||||
# homeassistant.components.aussie_broadband
|
||||
pyaussiebb==0.1.5
|
||||
|
||||
# homeassistant.components.myneomitis
|
||||
pyaxencoapi==1.0.6
|
||||
|
||||
# homeassistant.components.balboa
|
||||
pybalboa==1.1.3
|
||||
|
||||
@@ -1853,7 +1859,7 @@ pyituran==0.1.5
|
||||
pyjvcprojector==2.0.1
|
||||
|
||||
# homeassistant.components.kaleidescape
|
||||
pykaleidescape==1.1.1
|
||||
pykaleidescape==1.1.3
|
||||
|
||||
# homeassistant.components.kira
|
||||
pykira==0.1.1
|
||||
@@ -2147,7 +2153,7 @@ python-MotionMount==2.3.0
|
||||
python-awair==0.2.5
|
||||
|
||||
# homeassistant.components.bsblan
|
||||
python-bsblan==4.2.1
|
||||
python-bsblan==5.0.1
|
||||
|
||||
# homeassistant.components.ecobee
|
||||
python-ecobee-api==0.3.2
|
||||
@@ -2176,9 +2182,6 @@ python-kasa[speedups]==0.10.2
|
||||
# homeassistant.components.linkplay
|
||||
python-linkplay==0.2.12
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==8.1.2
|
||||
|
||||
# homeassistant.components.melcloud
|
||||
python-melcloud==0.1.2
|
||||
|
||||
@@ -2208,7 +2211,7 @@ python-opensky==1.0.1
|
||||
python-otbr-api==2.8.0
|
||||
|
||||
# homeassistant.components.overseerr
|
||||
python-overseerr==0.8.0
|
||||
python-overseerr==0.9.0
|
||||
|
||||
# homeassistant.components.picnic
|
||||
python-picnic-api2==1.3.1
|
||||
|
||||
@@ -212,9 +212,6 @@ num2words==0.5.14
|
||||
# This ensures all use the same version
|
||||
pymodbus==3.11.2
|
||||
|
||||
# Some packages don't support gql 4.0.0 yet
|
||||
gql<4.0.0
|
||||
|
||||
# Pin pytest-rerunfailures to prevent accidental breaks
|
||||
pytest-rerunfailures==16.0.1
|
||||
|
||||
|
||||
@@ -1954,7 +1954,6 @@ INTEGRATIONS_WITHOUT_SCALE = [
|
||||
"template",
|
||||
"tesla_fleet",
|
||||
"tesla_wall_connector",
|
||||
"teslemetry",
|
||||
"tessie",
|
||||
"tfiac",
|
||||
"thermobeacon",
|
||||
|
||||
@@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from airos.airos8 import AirOS8Data
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airos.const import DOMAIN
|
||||
from homeassistant.components.airos.const import DEFAULT_USERNAME, DOMAIN
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
|
||||
from tests.common import MockConfigEntry, load_json_object_fixture
|
||||
@@ -47,6 +47,7 @@ def mock_airos_client(
|
||||
client = mock_airos_class.return_value
|
||||
client.status.return_value = ap_fixture
|
||||
client.login.return_value = True
|
||||
client.reboot.return_value = True
|
||||
return client
|
||||
|
||||
|
||||
@@ -59,7 +60,17 @@ def mock_config_entry() -> MockConfigEntry:
|
||||
data={
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_PASSWORD: "test-password",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
},
|
||||
unique_id="01:23:45:67:89:AB",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_discovery_method() -> Generator[AsyncMock]:
|
||||
"""Mock the internal discovery method of the config flow."""
|
||||
with patch(
|
||||
"homeassistant.components.airos.config_flow.airos_discover_devices",
|
||||
new_callable=AsyncMock,
|
||||
) as mock_method:
|
||||
yield mock_method
|
||||
|
||||
116
tests/components/airos/test_button.py
Normal file
116
tests/components/airos/test_button.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Test the Ubiquiti airOS buttons."""
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from airos.exceptions import AirOSDataMissingError, AirOSDeviceConnectionError
|
||||
import pytest
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
REBOOT_ENTITY_ID = "button.nanostation_5ac_ap_name_restart"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_reboot_button_press_success(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test that pressing the reboot button utilizes the correct calls."""
|
||||
await setup_integration(hass, mock_config_entry, [Platform.BUTTON])
|
||||
|
||||
entity = entity_registry.async_get(REBOOT_ENTITY_ID)
|
||||
assert entity
|
||||
assert entity.unique_id == f"{mock_config_entry.unique_id}_reboot"
|
||||
|
||||
await hass.services.async_call(
|
||||
"button",
|
||||
"press",
|
||||
{ATTR_ENTITY_ID: REBOOT_ENTITY_ID},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
mock_airos_client.reboot.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_reboot_button_press_fail(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test that pressing the reboot button utilizes the correct calls."""
|
||||
await setup_integration(hass, mock_config_entry, [Platform.BUTTON])
|
||||
|
||||
mock_airos_client.reboot.return_value = False
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
"button",
|
||||
"press",
|
||||
{ATTR_ENTITY_ID: REBOOT_ENTITY_ID},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
mock_airos_client.reboot.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
@pytest.mark.parametrize(
|
||||
"exception",
|
||||
[
|
||||
AirOSDeviceConnectionError,
|
||||
AirOSDataMissingError,
|
||||
],
|
||||
)
|
||||
async def test_reboot_button_press_exceptions(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
exception: Exception,
|
||||
) -> None:
|
||||
"""Test reboot failure is handled gracefully."""
|
||||
await setup_integration(hass, mock_config_entry, [Platform.BUTTON])
|
||||
|
||||
mock_airos_client.login.side_effect = exception
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
"button",
|
||||
"press",
|
||||
{ATTR_ENTITY_ID: REBOOT_ENTITY_ID},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
mock_airos_client.reboot.assert_not_awaited()
|
||||
|
||||
mock_airos_client.login.side_effect = None
|
||||
mock_airos_client.reboot.side_effect = exception
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
"button",
|
||||
"press",
|
||||
{ATTR_ENTITY_ID: REBOOT_ENTITY_ID},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
mock_airos_client.reboot.assert_awaited_once()
|
||||
|
||||
mock_airos_client.reboot.side_effect = None
|
||||
|
||||
await hass.services.async_call(
|
||||
"button",
|
||||
"press",
|
||||
{ATTR_ENTITY_ID: REBOOT_ENTITY_ID},
|
||||
blocking=True,
|
||||
)
|
||||
mock_airos_client.reboot.assert_awaited()
|
||||
@@ -5,12 +5,23 @@ from unittest.mock import AsyncMock
|
||||
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
AirOSEndpointError,
|
||||
AirOSKeyDataMissingError,
|
||||
AirOSListenerError,
|
||||
)
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.airos.const import DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from homeassistant.components.airos.const import (
|
||||
DEFAULT_USERNAME,
|
||||
DOMAIN,
|
||||
HOSTNAME,
|
||||
IP_ADDRESS,
|
||||
MAC_ADDRESS,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -28,39 +39,64 @@ NEW_PASSWORD = "new_password"
|
||||
REAUTH_STEP = "reauth_confirm"
|
||||
RECONFIGURE_STEP = "reconfigure"
|
||||
|
||||
MOCK_ADVANCED_SETTINGS = {
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: False,
|
||||
}
|
||||
|
||||
MOCK_CONFIG = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: {
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
SECTION_ADVANCED_SETTINGS: MOCK_ADVANCED_SETTINGS,
|
||||
}
|
||||
MOCK_CONFIG_REAUTH = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: "wrong-password",
|
||||
}
|
||||
|
||||
MOCK_DISC_DEV1 = {
|
||||
MAC_ADDRESS: "00:11:22:33:44:55",
|
||||
IP_ADDRESS: "192.168.1.100",
|
||||
HOSTNAME: "Test-Device-1",
|
||||
}
|
||||
MOCK_DISC_DEV2 = {
|
||||
MAC_ADDRESS: "AA:BB:CC:DD:EE:FF",
|
||||
IP_ADDRESS: "192.168.1.101",
|
||||
HOSTNAME: "Test-Device-2",
|
||||
}
|
||||
MOCK_DISC_EXISTS = {
|
||||
MAC_ADDRESS: "01:23:45:67:89:AB",
|
||||
IP_ADDRESS: "192.168.1.102",
|
||||
HOSTNAME: "Existing-Device",
|
||||
}
|
||||
|
||||
async def test_form_creates_entry(
|
||||
|
||||
async def test_manual_flow_creates_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_airos_client: AsyncMock,
|
||||
ap_fixture: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test we get the form and create the appropriate entry."""
|
||||
"""Test we get the user form and create the appropriate entry."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
assert result["type"] is FlowResultType.MENU
|
||||
assert "manual" in result["menu_options"]
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
MOCK_CONFIG,
|
||||
result["flow_id"], {"next_step_id": "manual"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "manual"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], MOCK_CONFIG
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
@@ -73,22 +109,26 @@ async def test_form_creates_entry(
|
||||
async def test_form_duplicate_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Test the form does not allow duplicate entries."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
mock_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id="01:23:45:67:89:AB",
|
||||
data=MOCK_CONFIG,
|
||||
)
|
||||
mock_entry.add_to_hass(hass)
|
||||
|
||||
flow_start = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
|
||||
menu = await hass.config_entries.flow.async_configure(
|
||||
flow_start["flow_id"], {"next_step_id": "manual"}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert not result["errors"]
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
MOCK_CONFIG,
|
||||
menu["flow_id"], MOCK_CONFIG
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
@@ -98,6 +138,8 @@ async def test_form_duplicate_entry(
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(AirOSConnectionAuthenticationError, "invalid_auth"),
|
||||
(AirOSConnectionSetupError, "cannot_connect"),
|
||||
(AirOSDeviceConnectionError, "cannot_connect"),
|
||||
(AirOSKeyDataMissingError, "key_data_missing"),
|
||||
(Exception, "unknown"),
|
||||
@@ -113,13 +155,17 @@ async def test_form_exception_handling(
|
||||
"""Test we handle exceptions."""
|
||||
mock_airos_client.login.side_effect = exception
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
flow_start = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
|
||||
menu = await hass.config_entries.flow.async_configure(
|
||||
flow_start["flow_id"], {"next_step_id": "manual"}
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
MOCK_CONFIG,
|
||||
menu["flow_id"], MOCK_CONFIG
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
@@ -402,3 +448,235 @@ async def test_reconfigure_unique_id_mismatch(
|
||||
updated_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
|
||||
== MOCK_CONFIG[SECTION_ADVANCED_SETTINGS][CONF_SSL]
|
||||
)
|
||||
|
||||
|
||||
async def test_discover_flow_no_devices_found(
|
||||
hass: HomeAssistant, mock_discovery_method
|
||||
) -> None:
|
||||
"""Test discovery flow aborts when no devices are found."""
|
||||
mock_discovery_method.return_value = {}
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {"next_step_id": "discovery"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.SHOW_PROGRESS
|
||||
assert result["step_id"] == "discovery"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "no_devices_found"
|
||||
|
||||
|
||||
async def test_discover_flow_one_device_found(
|
||||
hass: HomeAssistant, mock_discovery_method, mock_airos_client, mock_setup_entry
|
||||
) -> None:
|
||||
"""Test discovery flow goes straight to credentials when one device is found."""
|
||||
mock_discovery_method.return_value = {MOCK_DISC_DEV1[MAC_ADDRESS]: MOCK_DISC_DEV1}
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {"next_step_id": "discovery"}
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
# With only one device, the flow should skip the select step and
|
||||
# go directly to configure_device.
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "configure_device"
|
||||
assert result["description_placeholders"]["device_name"] == MOCK_DISC_DEV1[HOSTNAME]
|
||||
|
||||
# Provide credentials and complete the flow
|
||||
mock_airos_client.status.return_value.derived.mac = MOCK_DISC_DEV1[MAC_ADDRESS]
|
||||
mock_airos_client.status.return_value.host.hostname = MOCK_DISC_DEV1[HOSTNAME]
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: MOCK_ADVANCED_SETTINGS,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == MOCK_DISC_DEV1[HOSTNAME]
|
||||
assert result["data"][CONF_HOST] == MOCK_DISC_DEV1[IP_ADDRESS]
|
||||
|
||||
|
||||
async def test_discover_flow_multiple_devices_found(
|
||||
hass: HomeAssistant, mock_discovery_method, mock_airos_client, mock_setup_entry
|
||||
) -> None:
|
||||
"""Test discovery flow with multiple devices found, requiring a selection step."""
|
||||
mock_discovery_method.return_value = {
|
||||
MOCK_DISC_DEV1[MAC_ADDRESS]: MOCK_DISC_DEV1,
|
||||
MOCK_DISC_DEV2[MAC_ADDRESS]: MOCK_DISC_DEV2,
|
||||
}
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.MENU
|
||||
assert "discovery" in result["menu_options"]
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {"next_step_id": "discovery"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.SHOW_PROGRESS
|
||||
assert result["step_id"] == "discovery"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "select_device"
|
||||
|
||||
expected_options = {
|
||||
MOCK_DISC_DEV1[MAC_ADDRESS]: (
|
||||
f"{MOCK_DISC_DEV1[HOSTNAME]} ({MOCK_DISC_DEV1[IP_ADDRESS]})"
|
||||
),
|
||||
MOCK_DISC_DEV2[MAC_ADDRESS]: (
|
||||
f"{MOCK_DISC_DEV2[HOSTNAME]} ({MOCK_DISC_DEV2[IP_ADDRESS]})"
|
||||
),
|
||||
}
|
||||
actual_options = result["data_schema"].schema[vol.Required(MAC_ADDRESS)].container
|
||||
assert actual_options == expected_options
|
||||
|
||||
# Select one of the devices
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {MAC_ADDRESS: MOCK_DISC_DEV1[MAC_ADDRESS]}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "configure_device"
|
||||
assert result["description_placeholders"]["device_name"] == MOCK_DISC_DEV1[HOSTNAME]
|
||||
|
||||
# Provide credentials and complete the flow
|
||||
mock_airos_client.status.return_value.derived.mac = MOCK_DISC_DEV1[MAC_ADDRESS]
|
||||
mock_airos_client.status.return_value.host.hostname = MOCK_DISC_DEV1[HOSTNAME]
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: MOCK_ADVANCED_SETTINGS,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == MOCK_DISC_DEV1[HOSTNAME]
|
||||
assert result["data"][CONF_HOST] == MOCK_DISC_DEV1[IP_ADDRESS]
|
||||
|
||||
|
||||
async def test_discover_flow_with_existing_device(
|
||||
hass: HomeAssistant, mock_discovery_method, mock_airos_client
|
||||
) -> None:
|
||||
"""Test that discovery ignores devices that are already configured."""
|
||||
# Add a mock config entry for an existing device
|
||||
mock_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=MOCK_DISC_EXISTS[MAC_ADDRESS],
|
||||
data=MOCK_CONFIG,
|
||||
)
|
||||
mock_entry.add_to_hass(hass)
|
||||
|
||||
# Mock discovery to find both a new device and the existing one
|
||||
mock_discovery_method.return_value = {
|
||||
MOCK_DISC_DEV1[MAC_ADDRESS]: MOCK_DISC_DEV1,
|
||||
MOCK_DISC_EXISTS[MAC_ADDRESS]: MOCK_DISC_EXISTS,
|
||||
}
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {"next_step_id": "discovery"}
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
# The flow should proceed with only the new device
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "configure_device"
|
||||
assert result["description_placeholders"]["device_name"] == MOCK_DISC_DEV1[HOSTNAME]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "reason"),
|
||||
[
|
||||
(AirOSEndpointError, "detect_error"),
|
||||
(AirOSListenerError, "listen_error"),
|
||||
(Exception, "discovery_failed"),
|
||||
],
|
||||
)
|
||||
async def test_discover_flow_discovery_exceptions(
|
||||
hass: HomeAssistant,
|
||||
mock_discovery_method,
|
||||
exception: Exception,
|
||||
reason: str,
|
||||
) -> None:
|
||||
"""Test discovery flow aborts on various discovery exceptions."""
|
||||
mock_discovery_method.side_effect = exception
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {"next_step_id": "discovery"}
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == reason
|
||||
|
||||
|
||||
async def test_configure_device_flow_exceptions(
|
||||
hass: HomeAssistant, mock_discovery_method, mock_airos_client
|
||||
) -> None:
|
||||
"""Test configure_device step handles authentication and connection exceptions."""
|
||||
mock_discovery_method.return_value = {MOCK_DISC_DEV1[MAC_ADDRESS]: MOCK_DISC_DEV1}
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {"next_step_id": "discovery"}
|
||||
)
|
||||
|
||||
mock_airos_client.login.side_effect = AirOSConnectionAuthenticationError
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_USERNAME: "wrong-user",
|
||||
CONF_PASSWORD: "wrong-password",
|
||||
SECTION_ADVANCED_SETTINGS: MOCK_ADVANCED_SETTINGS,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "invalid_auth"}
|
||||
|
||||
mock_airos_client.login.side_effect = AirOSDeviceConnectionError
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_USERNAME: DEFAULT_USERNAME,
|
||||
CONF_PASSWORD: "some-password",
|
||||
SECTION_ADVANCED_SETTINGS: MOCK_ADVANCED_SETTINGS,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
@@ -165,6 +165,16 @@ async def setup_ha(hass: HomeAssistant) -> None:
|
||||
assert await async_setup_component(hass, "homeassistant", {})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
"""Mock setup entry."""
|
||||
with patch(
|
||||
"homeassistant.components.anthropic.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup:
|
||||
yield mock_setup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_create_stream() -> Generator[AsyncMock]:
|
||||
"""Mock stream response."""
|
||||
|
||||
@@ -8,12 +8,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': '{"characters": ["Mario", "Luigi"]}',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': '{"characters": ["Mario", "Luigi"]}',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
]),
|
||||
@@ -66,12 +61,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': '{"characters": ["Mario", "Luigi"]}',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': '{"characters": ["Mario", "Luigi"]}',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
]),
|
||||
@@ -129,6 +119,85 @@
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
'thinking': "Let's use the tool to respond",
|
||||
'type': 'thinking',
|
||||
}),
|
||||
dict({
|
||||
'text': '{"characters": ["Mario", "Luigi"]}',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'role': 'assistant',
|
||||
}),
|
||||
]),
|
||||
'model': 'claude-sonnet-4-0',
|
||||
'stream': True,
|
||||
'system': list([
|
||||
dict({
|
||||
'cache_control': dict({
|
||||
'type': 'ephemeral',
|
||||
}),
|
||||
'text': '''
|
||||
You are a Home Assistant expert and help users with their tasks.
|
||||
Current time is 04:00:00. Today's date is 2026-01-01.
|
||||
''',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'text': "Claude MUST use the 'test_task' tool to provide the final answer instead of plain text.",
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'thinking': dict({
|
||||
'budget_tokens': 1500,
|
||||
'type': 'enabled',
|
||||
}),
|
||||
'tool_choice': dict({
|
||||
'type': 'auto',
|
||||
}),
|
||||
'tools': list([
|
||||
dict({
|
||||
'description': 'Use this tool to reply to the user',
|
||||
'input_schema': dict({
|
||||
'properties': dict({
|
||||
'characters': dict({
|
||||
'items': dict({
|
||||
'type': 'string',
|
||||
}),
|
||||
'type': 'array',
|
||||
}),
|
||||
}),
|
||||
'required': list([
|
||||
'characters',
|
||||
]),
|
||||
'type': 'object',
|
||||
}),
|
||||
'name': 'test_task',
|
||||
}),
|
||||
]),
|
||||
})
|
||||
# ---
|
||||
# name: test_generate_structured_data_legacy_extra_text_block
|
||||
dict({
|
||||
'max_tokens': 3000,
|
||||
'messages': list([
|
||||
dict({
|
||||
'content': 'Generate test data',
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
'thinking': "Let's use the tool to respond",
|
||||
'type': 'thinking',
|
||||
}),
|
||||
dict({
|
||||
'text': 'Sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'text': '{"characters": ["Mario", "Luigi"]}',
|
||||
'type': 'text',
|
||||
@@ -193,12 +262,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': '{"characters": ["Mario", "Luigi"]}',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': '{"characters": ["Mario", "Luigi"]}',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
]),
|
||||
|
||||
@@ -37,12 +37,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Hello, how can I help you today?',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Hello, how can I help you today?',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
]),
|
||||
@@ -136,25 +131,26 @@
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': None,
|
||||
'created': HAFakeDatetime(2024, 6, 3, 23, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': ThinkingBlock(signature='ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==', thinking='', type='thinking'),
|
||||
'native': dict({
|
||||
'citation_details': list([
|
||||
]),
|
||||
'redacted_thinking': 'EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny',
|
||||
'thinking_signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': 'The user asked me to call a test function.Is it a test? What would the function do? Would it violate any privacy or security policies?',
|
||||
'tool_calls': None,
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': None,
|
||||
'created': HAFakeDatetime(2024, 6, 3, 23, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': RedactedThinkingBlock(data='EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny', type='redacted_thinking'),
|
||||
'role': 'assistant',
|
||||
'thinking_content': None,
|
||||
'thinking_content': 'The user asked me to call a test function. Is it a test? What would the function do? Would it violate any privacy or security policies?',
|
||||
'tool_calls': None,
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': 'Certainly, calling it now!',
|
||||
'created': HAFakeDatetime(2024, 6, 3, 23, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': ThinkingBlock(signature='ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==', thinking='', type='thinking'),
|
||||
'native': dict({
|
||||
'citation_details': list([
|
||||
]),
|
||||
'redacted_thinking': None,
|
||||
'thinking_signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': "Okay, let's give it a shot. Will I pass the test?",
|
||||
'tool_calls': list([
|
||||
@@ -197,7 +193,7 @@
|
||||
'content': list([
|
||||
dict({
|
||||
'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
'thinking': 'The user asked me to call a test function.Is it a test? What would the function do? Would it violate any privacy or security policies?',
|
||||
'thinking': 'The user asked me to call a test function. Is it a test? What would the function do? Would it violate any privacy or security policies?',
|
||||
'type': 'thinking',
|
||||
}),
|
||||
dict({
|
||||
@@ -235,12 +231,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'I have successfully called the function',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'I have successfully called the function',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -252,12 +243,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Yes, I am sure!',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -269,12 +255,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'A donut is a torus.',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'A donut is a torus.',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
dict({
|
||||
@@ -282,12 +263,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Yes, I am sure!',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -325,12 +301,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Yes, I am sure!',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -376,12 +347,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Yes, I am sure!',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -436,12 +402,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Should I add milk to the shopping list?',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Should I add milk to the shopping list?',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
dict({
|
||||
@@ -449,12 +410,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Yes, I am sure!',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -566,12 +522,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Yes, I am sure!',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -609,12 +560,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'It is currently 2:30 PM.',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'It is currently 2:30 PM.',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
dict({
|
||||
@@ -622,12 +568,7 @@
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'content': 'Yes, I am sure!',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
@@ -644,7 +585,12 @@
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': None,
|
||||
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': RedactedThinkingBlock(data='EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny', type='redacted_thinking'),
|
||||
'native': dict({
|
||||
'citation_details': list([
|
||||
]),
|
||||
'redacted_thinking': 'EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny',
|
||||
'thinking_signature': None,
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': None,
|
||||
'tool_calls': None,
|
||||
@@ -653,7 +599,12 @@
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': None,
|
||||
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': RedactedThinkingBlock(data='EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny', type='redacted_thinking'),
|
||||
'native': dict({
|
||||
'citation_details': list([
|
||||
]),
|
||||
'redacted_thinking': 'EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny',
|
||||
'thinking_signature': None,
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': None,
|
||||
'tool_calls': None,
|
||||
@@ -662,7 +613,12 @@
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': 'How can I help you today?',
|
||||
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': RedactedThinkingBlock(data='EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny', type='redacted_thinking'),
|
||||
'native': dict({
|
||||
'citation_details': list([
|
||||
]),
|
||||
'redacted_thinking': 'EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny',
|
||||
'thinking_signature': None,
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': None,
|
||||
'tool_calls': None,
|
||||
@@ -715,7 +671,12 @@
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': "To get today's news, I'll perform a web search",
|
||||
'created': HAFakeDatetime(2025, 10, 31, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': ThinkingBlock(signature='ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==', thinking='', type='thinking'),
|
||||
'native': dict({
|
||||
'citation_details': list([
|
||||
]),
|
||||
'redacted_thinking': None,
|
||||
'thinking_signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': "The user is asking about today's news, which requires current, real-time information. This is clearly something that requires recent information beyond my knowledge cutoff. I should use the web_search tool to find today's news.",
|
||||
'tool_calls': list([
|
||||
@@ -758,6 +719,22 @@
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': '''
|
||||
Here's what I found on the web about today's news:
|
||||
|
||||
''',
|
||||
'created': HAFakeDatetime(2025, 10, 31, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': dict({
|
||||
'citation_details': list([
|
||||
]),
|
||||
'redacted_thinking': None,
|
||||
'thinking_signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': "Great! All clear, let's reply to the user!",
|
||||
'tool_calls': None,
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.claude_conversation',
|
||||
'content': '''
|
||||
1. New Home Assistant release
|
||||
2. Something incredible happened
|
||||
Those are the main headlines making news today.
|
||||
@@ -775,7 +752,7 @@
|
||||
'url': 'https://www.example.com/todays-news',
|
||||
}),
|
||||
]),
|
||||
'index': 54,
|
||||
'index': 3,
|
||||
'length': 26,
|
||||
}),
|
||||
dict({
|
||||
@@ -795,10 +772,12 @@
|
||||
'url': 'https://www.newssite.com/breaking-news',
|
||||
}),
|
||||
]),
|
||||
'index': 84,
|
||||
'index': 33,
|
||||
'length': 29,
|
||||
}),
|
||||
]),
|
||||
'redacted_thinking': None,
|
||||
'thinking_signature': None,
|
||||
}),
|
||||
'role': 'assistant',
|
||||
'thinking_content': None,
|
||||
@@ -806,3 +785,116 @@
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_web_search.1
|
||||
list([
|
||||
dict({
|
||||
'content': "What's on the news today?",
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
'thinking': "The user is asking about today's news, which requires current, real-time information. This is clearly something that requires recent information beyond my knowledge cutoff. I should use the web_search tool to find today's news.",
|
||||
'type': 'thinking',
|
||||
}),
|
||||
dict({
|
||||
'text': "To get today's news, I'll perform a web search",
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'id': 'srvtoolu_12345ABC',
|
||||
'input': dict({
|
||||
'query': "today's news",
|
||||
}),
|
||||
'name': 'web_search',
|
||||
'type': 'server_tool_use',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'encrypted_content': 'ABCDEFG',
|
||||
'page_age': '2 days ago',
|
||||
'title': "Today's News - Example.com",
|
||||
'type': 'web_search_result',
|
||||
'url': 'https://www.example.com/todays-news',
|
||||
}),
|
||||
dict({
|
||||
'encrypted_content': 'ABCDEFG',
|
||||
'page_age': None,
|
||||
'title': 'Breaking News - NewsSite.com',
|
||||
'type': 'web_search_result',
|
||||
'url': 'https://www.newssite.com/breaking-news',
|
||||
}),
|
||||
]),
|
||||
'tool_use_id': 'srvtoolu_12345ABC',
|
||||
'type': 'web_search_tool_result',
|
||||
}),
|
||||
dict({
|
||||
'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
|
||||
'thinking': "Great! All clear, let's reply to the user!",
|
||||
'type': 'thinking',
|
||||
}),
|
||||
dict({
|
||||
'text': '''
|
||||
Here's what I found on the web about today's news:
|
||||
|
||||
''',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'text': '1. ',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'citations': list([
|
||||
dict({
|
||||
'cited_text': 'This release iterates on some of the features we introduced in the last couple of releases, but also...',
|
||||
'encrypted_index': 'AAA==',
|
||||
'title': 'Home Assistant Release',
|
||||
'type': 'web_search_result_location',
|
||||
'url': 'https://www.example.com/todays-news',
|
||||
}),
|
||||
]),
|
||||
'text': 'New Home Assistant release',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'text': '''
|
||||
|
||||
2.
|
||||
''',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'citations': list([
|
||||
dict({
|
||||
'cited_text': 'Breaking news from around the world today includes major events in technology, politics, and culture...',
|
||||
'encrypted_index': 'AQE=',
|
||||
'title': 'Breaking News',
|
||||
'type': 'web_search_result_location',
|
||||
'url': 'https://www.newssite.com/breaking-news',
|
||||
}),
|
||||
dict({
|
||||
'cited_text': 'Well, this happened...',
|
||||
'encrypted_index': 'AgI=',
|
||||
'title': 'Breaking News',
|
||||
'type': 'web_search_result_location',
|
||||
'url': 'https://www.newssite.com/breaking-news',
|
||||
}),
|
||||
]),
|
||||
'text': 'Something incredible happened',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'text': '''
|
||||
|
||||
Those are the main headlines making news today.
|
||||
''',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er, selector
|
||||
|
||||
from . import create_content_block, create_tool_use_block
|
||||
from . import create_content_block, create_thinking_block, create_tool_use_block
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@@ -95,7 +95,7 @@ async def test_generate_structured_data_legacy(
|
||||
|
||||
mock_create_stream.return_value = [
|
||||
create_tool_use_block(
|
||||
1,
|
||||
0,
|
||||
"toolu_0123456789AbCdEfGhIjKlM",
|
||||
"test_task",
|
||||
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
|
||||
@@ -135,7 +135,7 @@ async def test_generate_structured_data_legacy_tools(
|
||||
"""Test AI Task structured data generation with legacy method and tools enabled."""
|
||||
mock_create_stream.return_value = [
|
||||
create_tool_use_block(
|
||||
1,
|
||||
0,
|
||||
"toolu_0123456789AbCdEfGhIjKlM",
|
||||
"test_task",
|
||||
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
|
||||
@@ -181,11 +181,74 @@ async def test_generate_structured_data_legacy_extended_thinking(
|
||||
) -> None:
|
||||
"""Test AI Task structured data generation with legacy method and extended_thinking."""
|
||||
mock_create_stream.return_value = [
|
||||
create_tool_use_block(
|
||||
1,
|
||||
"toolu_0123456789AbCdEfGhIjKlM",
|
||||
"test_task",
|
||||
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
|
||||
(
|
||||
*create_thinking_block(
|
||||
0,
|
||||
["Let's use the tool to respond"],
|
||||
),
|
||||
*create_tool_use_block(
|
||||
1,
|
||||
"toolu_0123456789AbCdEfGhIjKlM",
|
||||
"test_task",
|
||||
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
for subentry in mock_config_entry.subentries.values():
|
||||
hass.config_entries.async_update_subentry(
|
||||
mock_config_entry,
|
||||
subentry,
|
||||
data={
|
||||
"chat_model": "claude-sonnet-4-0",
|
||||
"thinking_budget": 1500,
|
||||
},
|
||||
)
|
||||
|
||||
result = await ai_task.async_generate_data(
|
||||
hass,
|
||||
task_name="Test Task",
|
||||
entity_id="ai_task.claude_ai_task",
|
||||
instructions="Generate test data",
|
||||
structure=vol.Schema(
|
||||
{
|
||||
vol.Required("characters"): selector.selector(
|
||||
{
|
||||
"text": {
|
||||
"multiple": True,
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
assert result.data == {"characters": ["Mario", "Luigi"]}
|
||||
assert mock_create_stream.call_args.kwargs.copy() == snapshot
|
||||
|
||||
|
||||
@freeze_time("2026-01-01 12:00:00")
|
||||
async def test_generate_structured_data_legacy_extra_text_block(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_init_component,
|
||||
mock_create_stream: AsyncMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test AI Task structured data generation with legacy method and extra text block."""
|
||||
mock_create_stream.return_value = [
|
||||
(
|
||||
*create_thinking_block(
|
||||
0,
|
||||
["Let's use the tool to respond"],
|
||||
),
|
||||
*create_content_block(1, ["Sure!"]),
|
||||
*create_tool_use_block(
|
||||
2,
|
||||
"toolu_0123456789AbCdEfGhIjKlM",
|
||||
"test_task",
|
||||
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -239,7 +302,7 @@ async def test_generate_invalid_structured_data_legacy(
|
||||
|
||||
mock_create_stream.return_value = [
|
||||
create_tool_use_block(
|
||||
1,
|
||||
0,
|
||||
"toolu_0123456789AbCdEfGhIjKlM",
|
||||
"test_task",
|
||||
"INVALID JSON RESPONSE",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user