forked from home-assistant/core
Merge branch 'dev' into mqtt-subentry-entity_category
This commit is contained in:
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.1.2"]
|
||||
"requirements": ["aioamazondevices==3.1.12"]
|
||||
}
|
||||
|
@@ -366,11 +366,11 @@ class AnthropicConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
options.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
@@ -12,6 +12,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
UnitOfApparentPower,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
@@ -35,6 +36,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"alarmdel": SensorEntityDescription(
|
||||
key="alarmdel",
|
||||
translation_key="alarm_delay",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"ambtemp": SensorEntityDescription(
|
||||
key="ambtemp",
|
||||
@@ -47,15 +49,18 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="apc",
|
||||
translation_key="apc_status",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"apcmodel": SensorEntityDescription(
|
||||
key="apcmodel",
|
||||
translation_key="apc_model",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"badbatts": SensorEntityDescription(
|
||||
key="badbatts",
|
||||
translation_key="bad_batteries",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"battdate": SensorEntityDescription(
|
||||
key="battdate",
|
||||
@@ -82,6 +87,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="cable",
|
||||
translation_key="cable_type",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"cumonbatt": SensorEntityDescription(
|
||||
key="cumonbatt",
|
||||
@@ -94,52 +100,63 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="date",
|
||||
translation_key="date",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"dipsw": SensorEntityDescription(
|
||||
key="dipsw",
|
||||
translation_key="dip_switch_settings",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"dlowbatt": SensorEntityDescription(
|
||||
key="dlowbatt",
|
||||
translation_key="low_battery_signal",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"driver": SensorEntityDescription(
|
||||
key="driver",
|
||||
translation_key="driver",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"dshutd": SensorEntityDescription(
|
||||
key="dshutd",
|
||||
translation_key="shutdown_delay",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"dwake": SensorEntityDescription(
|
||||
key="dwake",
|
||||
translation_key="wake_delay",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"end apc": SensorEntityDescription(
|
||||
key="end apc",
|
||||
translation_key="date_and_time",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"extbatts": SensorEntityDescription(
|
||||
key="extbatts",
|
||||
translation_key="external_batteries",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"firmware": SensorEntityDescription(
|
||||
key="firmware",
|
||||
translation_key="firmware_version",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"hitrans": SensorEntityDescription(
|
||||
key="hitrans",
|
||||
translation_key="transfer_high",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"hostname": SensorEntityDescription(
|
||||
key="hostname",
|
||||
translation_key="hostname",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
@@ -163,10 +180,12 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="lastxfer",
|
||||
translation_key="last_transfer",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"linefail": SensorEntityDescription(
|
||||
key="linefail",
|
||||
translation_key="line_failure",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"linefreq": SensorEntityDescription(
|
||||
key="linefreq",
|
||||
@@ -198,15 +217,18 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
translation_key="transfer_low",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"mandate": SensorEntityDescription(
|
||||
key="mandate",
|
||||
translation_key="manufacture_date",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"masterupd": SensorEntityDescription(
|
||||
key="masterupd",
|
||||
translation_key="master_update",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"maxlinev": SensorEntityDescription(
|
||||
key="maxlinev",
|
||||
@@ -217,11 +239,13 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"maxtime": SensorEntityDescription(
|
||||
key="maxtime",
|
||||
translation_key="max_time",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"mbattchg": SensorEntityDescription(
|
||||
key="mbattchg",
|
||||
translation_key="max_battery_charge",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"minlinev": SensorEntityDescription(
|
||||
key="minlinev",
|
||||
@@ -232,41 +256,48 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"mintimel": SensorEntityDescription(
|
||||
key="mintimel",
|
||||
translation_key="min_time",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"model": SensorEntityDescription(
|
||||
key="model",
|
||||
translation_key="model",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nombattv": SensorEntityDescription(
|
||||
key="nombattv",
|
||||
translation_key="battery_nominal_voltage",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nominv": SensorEntityDescription(
|
||||
key="nominv",
|
||||
translation_key="nominal_input_voltage",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nomoutv": SensorEntityDescription(
|
||||
key="nomoutv",
|
||||
translation_key="nominal_output_voltage",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nompower": SensorEntityDescription(
|
||||
key="nompower",
|
||||
translation_key="nominal_output_power",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nomapnt": SensorEntityDescription(
|
||||
key="nomapnt",
|
||||
translation_key="nominal_apparent_power",
|
||||
native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE,
|
||||
device_class=SensorDeviceClass.APPARENT_POWER,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"numxfers": SensorEntityDescription(
|
||||
key="numxfers",
|
||||
@@ -291,21 +322,25 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="reg1",
|
||||
translation_key="register_1_fault",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"reg2": SensorEntityDescription(
|
||||
key="reg2",
|
||||
translation_key="register_2_fault",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"reg3": SensorEntityDescription(
|
||||
key="reg3",
|
||||
translation_key="register_3_fault",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"retpct": SensorEntityDescription(
|
||||
key="retpct",
|
||||
translation_key="restore_capacity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"selftest": SensorEntityDescription(
|
||||
key="selftest",
|
||||
@@ -315,20 +350,24 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="sense",
|
||||
translation_key="sensitivity",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"serialno": SensorEntityDescription(
|
||||
key="serialno",
|
||||
translation_key="serial_number",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"starttime": SensorEntityDescription(
|
||||
key="starttime",
|
||||
translation_key="startup_time",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"statflag": SensorEntityDescription(
|
||||
key="statflag",
|
||||
translation_key="online_status",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"status": SensorEntityDescription(
|
||||
key="status",
|
||||
@@ -337,6 +376,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"stesti": SensorEntityDescription(
|
||||
key="stesti",
|
||||
translation_key="self_test_interval",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"timeleft": SensorEntityDescription(
|
||||
key="timeleft",
|
||||
@@ -360,23 +400,28 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="upsname",
|
||||
translation_key="ups_name",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"version": SensorEntityDescription(
|
||||
key="version",
|
||||
translation_key="version",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"xoffbat": SensorEntityDescription(
|
||||
key="xoffbat",
|
||||
translation_key="transfer_from_battery",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"xoffbatt": SensorEntityDescription(
|
||||
key="xoffbatt",
|
||||
translation_key="transfer_from_battery",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"xonbatt": SensorEntityDescription(
|
||||
key="xonbatt",
|
||||
translation_key="transfer_to_battery",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
}
|
||||
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.12.4"]
|
||||
"requirements": ["bthome-ble==3.13.1"]
|
||||
}
|
||||
|
@@ -105,11 +105,6 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE, ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH]
|
||||
|
||||
# Can be removed in 2025.1 after deprecation period of the new feature flags
|
||||
CHECK_TURN_ON_OFF_FEATURE_FLAG = (
|
||||
ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
SET_TEMPERATURE_SCHEMA = vol.All(
|
||||
cv.has_at_least_one_key(
|
||||
ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.101.0"],
|
||||
"requirements": ["hass-nabucasa==0.102.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -14,12 +14,11 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import chat_session, intent, llm, template
|
||||
from homeassistant.helpers import chat_session, frame, intent, llm, template
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import trace
|
||||
from .const import DOMAIN
|
||||
from .models import ConversationInput, ConversationResult
|
||||
|
||||
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
|
||||
@@ -359,7 +358,7 @@ class ChatLog:
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
prompt: str,
|
||||
language: str,
|
||||
language: str | None,
|
||||
user_name: str | None = None,
|
||||
) -> str:
|
||||
try:
|
||||
@@ -373,7 +372,7 @@ class ChatLog:
|
||||
)
|
||||
except TemplateError as err:
|
||||
LOGGER.error("Error rendering prompt: %s", err)
|
||||
intent_response = intent.IntentResponse(language=language)
|
||||
intent_response = intent.IntentResponse(language=language or "")
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Sorry, I had a problem with my template",
|
||||
@@ -392,15 +391,25 @@ class ChatLog:
|
||||
user_llm_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_context = llm.LLMContext(
|
||||
platform=conversing_domain,
|
||||
context=user_input.context,
|
||||
user_prompt=user_input.text,
|
||||
language=user_input.language,
|
||||
assistant=DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
frame.report_usage(
|
||||
"ChatLog.async_update_llm_data",
|
||||
breaks_in_ha_version="2026.1",
|
||||
)
|
||||
return await self.async_provide_llm_data(
|
||||
llm_context=user_input.as_llm_context(conversing_domain),
|
||||
user_llm_hass_api=user_llm_hass_api,
|
||||
user_llm_prompt=user_llm_prompt,
|
||||
user_extra_system_prompt=user_input.extra_system_prompt,
|
||||
)
|
||||
|
||||
async def async_provide_llm_data(
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
user_llm_hass_api: str | list[str] | None = None,
|
||||
user_llm_prompt: str | None = None,
|
||||
user_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api:
|
||||
@@ -414,10 +423,12 @@ class ChatLog:
|
||||
LOGGER.error(
|
||||
"Error getting LLM API %s for %s: %s",
|
||||
user_llm_hass_api,
|
||||
conversing_domain,
|
||||
llm_context.platform,
|
||||
err,
|
||||
)
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response = intent.IntentResponse(
|
||||
language=llm_context.language or ""
|
||||
)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Error preparing LLM API",
|
||||
@@ -431,10 +442,10 @@ class ChatLog:
|
||||
user_name: str | None = None
|
||||
|
||||
if (
|
||||
user_input.context
|
||||
and user_input.context.user_id
|
||||
llm_context.context
|
||||
and llm_context.context.user_id
|
||||
and (
|
||||
user := await self.hass.auth.async_get_user(user_input.context.user_id)
|
||||
user := await self.hass.auth.async_get_user(llm_context.context.user_id)
|
||||
)
|
||||
):
|
||||
user_name = user.name
|
||||
@@ -444,7 +455,7 @@ class ChatLog:
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
(user_llm_prompt or llm.DEFAULT_INSTRUCTIONS_PROMPT),
|
||||
user_input.language,
|
||||
llm_context.language,
|
||||
user_name,
|
||||
)
|
||||
)
|
||||
@@ -456,14 +467,14 @@ class ChatLog:
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
llm.BASE_PROMPT,
|
||||
user_input.language,
|
||||
llm_context.language,
|
||||
user_name,
|
||||
)
|
||||
)
|
||||
|
||||
if extra_system_prompt := (
|
||||
# Take new system prompt if one was given
|
||||
user_input.extra_system_prompt or self.extra_system_prompt
|
||||
user_extra_system_prompt or self.extra_system_prompt
|
||||
):
|
||||
prompt_parts.append(extra_system_prompt)
|
||||
|
||||
|
@@ -7,7 +7,9 @@ from dataclasses import dataclass
|
||||
from typing import Any, Literal
|
||||
|
||||
from homeassistant.core import Context
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers import intent, llm
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -56,6 +58,16 @@ class ConversationInput:
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
}
|
||||
|
||||
def as_llm_context(self, conversing_domain: str) -> llm.LLMContext:
|
||||
"""Return input as an LLM context."""
|
||||
return llm.LLMContext(
|
||||
platform=conversing_domain,
|
||||
context=self.context,
|
||||
language=self.language,
|
||||
assistant=DOMAIN,
|
||||
device_id=self.device_id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ConversationResult:
|
||||
|
@@ -300,10 +300,6 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def supported_features(self) -> CoverEntityFeature:
|
||||
"""Flag supported features."""
|
||||
if (features := self._attr_supported_features) is not None:
|
||||
if type(features) is int:
|
||||
new_features = CoverEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
supported_features = (
|
||||
|
@@ -91,7 +91,9 @@ async def async_unload_entry(
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry
|
||||
hass: HomeAssistant,
|
||||
config_entry: DevoloHomeControlConfigEntry,
|
||||
device_entry: DeviceEntry,
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
return True
|
||||
|
@@ -87,6 +87,7 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
):
|
||||
"""Representation of a devolo device tracker."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "device_tracker"
|
||||
|
||||
def __init__(
|
||||
@@ -99,6 +100,7 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
super().__init__(coordinator)
|
||||
self._device = device
|
||||
self._attr_mac_address = mac
|
||||
self._attr_name = mac
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["discord"],
|
||||
"requirements": ["nextcord==2.6.0"]
|
||||
"requirements": ["nextcord==3.1.0"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["sml"],
|
||||
"requirements": ["pysml==0.0.12"]
|
||||
"requirements": ["pysml==0.1.5"]
|
||||
}
|
||||
|
@@ -6,7 +6,6 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
from eq3btsmart.thermostat_config import ThermostatConfig
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -53,12 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
f"[{eq3_config.mac_address}] Device could not be found"
|
||||
)
|
||||
|
||||
thermostat = Thermostat(
|
||||
thermostat_config=ThermostatConfig(
|
||||
mac_address=mac_address,
|
||||
),
|
||||
ble_device=device,
|
||||
)
|
||||
thermostat = Thermostat(mac_address=device) # type: ignore[arg-type]
|
||||
|
||||
entry.runtime_data = Eq3ConfigEntryData(
|
||||
eq3_config=eq3_config, thermostat=thermostat
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
@@ -80,7 +79,4 @@ class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the binary sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
@@ -1,9 +1,16 @@
|
||||
"""Platform for eQ-3 climate entities."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode
|
||||
from eq3btsmart.const import (
|
||||
EQ3_DEFAULT_AWAY_TEMP,
|
||||
EQ3_MAX_TEMP,
|
||||
EQ3_OFF_TEMP,
|
||||
Eq3OperationMode,
|
||||
Eq3Preset,
|
||||
)
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
@@ -20,9 +27,11 @@ from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEFAULT_AWAY_HOURS,
|
||||
EQ_TO_HA_HVAC,
|
||||
HA_TO_EQ_HVAC,
|
||||
CurrentTemperatureSelector,
|
||||
@@ -57,8 +66,8 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_min_temp = EQ3BT_OFF_TEMP
|
||||
_attr_max_temp = EQ3BT_MAX_TEMP
|
||||
_attr_min_temp = EQ3_OFF_TEMP
|
||||
_attr_max_temp = EQ3_MAX_TEMP
|
||||
_attr_precision = PRECISION_HALVES
|
||||
_attr_hvac_modes = list(HA_TO_EQ_HVAC.keys())
|
||||
_attr_preset_modes = list(Preset)
|
||||
@@ -70,38 +79,21 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
_target_temperature: float | None = None
|
||||
|
||||
@callback
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
||||
if self._thermostat.status is not None:
|
||||
self._async_on_status_updated()
|
||||
|
||||
if self._thermostat.device_data is not None:
|
||||
self._async_on_device_updated()
|
||||
|
||||
super()._async_on_updated()
|
||||
|
||||
@callback
|
||||
def _async_on_status_updated(self) -> None:
|
||||
def _async_on_status_updated(self, data: Any) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
if self._thermostat.status is None:
|
||||
return
|
||||
|
||||
self._target_temperature = self._thermostat.status.target_temperature.value
|
||||
self._target_temperature = self._thermostat.status.target_temperature
|
||||
self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode]
|
||||
self._attr_current_temperature = self._get_current_temperature()
|
||||
self._attr_target_temperature = self._get_target_temperature()
|
||||
self._attr_preset_mode = self._get_current_preset_mode()
|
||||
self._attr_hvac_action = self._get_current_hvac_action()
|
||||
super()._async_on_status_updated(data)
|
||||
|
||||
@callback
|
||||
def _async_on_device_updated(self) -> None:
|
||||
def _async_on_device_updated(self, data: Any) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
if self._thermostat.device_data is None:
|
||||
return
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
if device := device_registry.async_get_device(
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
@@ -109,8 +101,9 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
sw_version=str(self._thermostat.device_data.firmware_version),
|
||||
serial_number=self._thermostat.device_data.device_serial.value,
|
||||
serial_number=self._thermostat.device_data.device_serial,
|
||||
)
|
||||
super()._async_on_device_updated(data)
|
||||
|
||||
def _get_current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
@@ -119,17 +112,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case CurrentTemperatureSelector.NOTHING:
|
||||
return None
|
||||
case CurrentTemperatureSelector.VALVE:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.valve_temperature)
|
||||
case CurrentTemperatureSelector.UI:
|
||||
return self._target_temperature
|
||||
case CurrentTemperatureSelector.DEVICE:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.target_temperature.value)
|
||||
return float(self._thermostat.status.target_temperature)
|
||||
case CurrentTemperatureSelector.ENTITY:
|
||||
state = self.hass.states.get(self._eq3_config.external_temp_sensor)
|
||||
if state is not None:
|
||||
@@ -147,16 +134,12 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case TargetTemperatureSelector.TARGET:
|
||||
return self._target_temperature
|
||||
case TargetTemperatureSelector.LAST_REPORTED:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.target_temperature.value)
|
||||
return float(self._thermostat.status.target_temperature)
|
||||
|
||||
def _get_current_preset_mode(self) -> str:
|
||||
"""Return the current preset mode."""
|
||||
|
||||
if (status := self._thermostat.status) is None:
|
||||
return PRESET_NONE
|
||||
status = self._thermostat.status
|
||||
if status.is_window_open:
|
||||
return Preset.WINDOW_OPEN
|
||||
if status.is_boost:
|
||||
@@ -165,7 +148,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
return Preset.LOW_BATTERY
|
||||
if status.is_away:
|
||||
return Preset.AWAY
|
||||
if status.operation_mode is OperationMode.ON:
|
||||
if status.operation_mode is Eq3OperationMode.ON:
|
||||
return Preset.OPEN
|
||||
if status.presets is None:
|
||||
return PRESET_NONE
|
||||
@@ -179,10 +162,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
def _get_current_hvac_action(self) -> HVACAction:
|
||||
"""Return the current hvac action."""
|
||||
|
||||
if (
|
||||
self._thermostat.status is None
|
||||
or self._thermostat.status.operation_mode is OperationMode.OFF
|
||||
):
|
||||
if self._thermostat.status.operation_mode is Eq3OperationMode.OFF:
|
||||
return HVACAction.OFF
|
||||
if self._thermostat.status.valve == 0:
|
||||
return HVACAction.IDLE
|
||||
@@ -227,7 +207,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
"""Set new target hvac mode."""
|
||||
|
||||
if hvac_mode is HVACMode.OFF:
|
||||
await self.async_set_temperature(temperature=EQ3BT_OFF_TEMP)
|
||||
await self.async_set_temperature(temperature=EQ3_OFF_TEMP)
|
||||
|
||||
try:
|
||||
await self._thermostat.async_set_mode(HA_TO_EQ_HVAC[hvac_mode])
|
||||
@@ -241,10 +221,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case Preset.BOOST:
|
||||
await self._thermostat.async_set_boost(True)
|
||||
case Preset.AWAY:
|
||||
await self._thermostat.async_set_away(True)
|
||||
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
|
||||
await self._thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
|
||||
case Preset.ECO:
|
||||
await self._thermostat.async_set_preset(Eq3Preset.ECO)
|
||||
case Preset.COMFORT:
|
||||
await self._thermostat.async_set_preset(Eq3Preset.COMFORT)
|
||||
case Preset.OPEN:
|
||||
await self._thermostat.async_set_mode(OperationMode.ON)
|
||||
await self._thermostat.async_set_mode(Eq3OperationMode.ON)
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from eq3btsmart.const import OperationMode
|
||||
from eq3btsmart.const import Eq3OperationMode
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
@@ -34,17 +34,17 @@ ENTITY_KEY_AWAY_UNTIL = "away_until"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
|
||||
EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = {
|
||||
OperationMode.OFF: HVACMode.OFF,
|
||||
OperationMode.ON: HVACMode.HEAT,
|
||||
OperationMode.AUTO: HVACMode.AUTO,
|
||||
OperationMode.MANUAL: HVACMode.HEAT,
|
||||
EQ_TO_HA_HVAC: dict[Eq3OperationMode, HVACMode] = {
|
||||
Eq3OperationMode.OFF: HVACMode.OFF,
|
||||
Eq3OperationMode.ON: HVACMode.HEAT,
|
||||
Eq3OperationMode.AUTO: HVACMode.AUTO,
|
||||
Eq3OperationMode.MANUAL: HVACMode.HEAT,
|
||||
}
|
||||
|
||||
HA_TO_EQ_HVAC = {
|
||||
HVACMode.OFF: OperationMode.OFF,
|
||||
HVACMode.AUTO: OperationMode.AUTO,
|
||||
HVACMode.HEAT: OperationMode.MANUAL,
|
||||
HVACMode.OFF: Eq3OperationMode.OFF,
|
||||
HVACMode.AUTO: Eq3OperationMode.AUTO,
|
||||
HVACMode.HEAT: Eq3OperationMode.MANUAL,
|
||||
}
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ class TargetTemperatureSelector(str, Enum):
|
||||
DEFAULT_CURRENT_TEMP_SELECTOR = CurrentTemperatureSelector.DEVICE
|
||||
DEFAULT_TARGET_TEMP_SELECTOR = TargetTemperatureSelector.TARGET
|
||||
DEFAULT_SCAN_INTERVAL = 10 # seconds
|
||||
DEFAULT_AWAY_HOURS = 30 * 24
|
||||
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected"
|
||||
SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected"
|
||||
|
@@ -1,5 +1,10 @@
|
||||
"""Base class for all eQ-3 entities."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Eq3Exception
|
||||
from eq3btsmart.const import Eq3Event
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
@@ -45,7 +50,15 @@ class Eq3Entity(Entity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
|
||||
)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
@@ -65,10 +78,25 @@ class Eq3Entity(Entity):
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
|
||||
)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
@callback
|
||||
def _async_on_status_updated(self, data: Any) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_device_updated(self, data: Any) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -90,4 +118,9 @@ class Eq3Entity(Entity):
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available."""
|
||||
|
||||
return self._thermostat.status is not None and self._attr_available
|
||||
try:
|
||||
_ = self._thermostat.status
|
||||
except Eq3Exception:
|
||||
return False
|
||||
|
||||
return self._attr_available
|
||||
|
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.16.0"]
|
||||
"requirements": ["eq3btsmart==2.1.0", "bleak-esphome==2.16.0"]
|
||||
}
|
||||
|
@@ -1,17 +1,12 @@
|
||||
"""Platform for eq3 number entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import (
|
||||
EQ3BT_MAX_OFFSET,
|
||||
EQ3BT_MAX_TEMP,
|
||||
EQ3BT_MIN_OFFSET,
|
||||
EQ3BT_MIN_TEMP,
|
||||
)
|
||||
from eq3btsmart.models import Presets
|
||||
from eq3btsmart.const import EQ3_MAX_OFFSET, EQ3_MAX_TEMP, EQ3_MIN_OFFSET, EQ3_MIN_TEMP
|
||||
from eq3btsmart.models import Presets, Status
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
@@ -42,7 +37,7 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
value_func: Callable[[Presets], float]
|
||||
value_set_func: Callable[
|
||||
[Thermostat],
|
||||
Callable[[float], Awaitable[None]],
|
||||
Callable[[float], Coroutine[None, None, Status]],
|
||||
]
|
||||
mode: NumberMode = NumberMode.BOX
|
||||
entity_category: EntityCategory | None = EntityCategory.CONFIG
|
||||
@@ -51,44 +46,44 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_COMFORT,
|
||||
value_func=lambda presets: presets.comfort_temperature.value,
|
||||
value_func=lambda presets: presets.comfort_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature,
|
||||
translation_key=ENTITY_KEY_COMFORT,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_ECO,
|
||||
value_func=lambda presets: presets.eco_temperature.value,
|
||||
value_func=lambda presets: presets.eco_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature,
|
||||
translation_key=ENTITY_KEY_ECO,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
value_func=lambda presets: presets.window_open_temperature.value,
|
||||
value_func=lambda presets: presets.window_open_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_OFFSET,
|
||||
value_func=lambda presets: presets.offset_temperature.value,
|
||||
value_func=lambda presets: presets.offset_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset,
|
||||
translation_key=ENTITY_KEY_OFFSET,
|
||||
native_min_value=EQ3BT_MIN_OFFSET,
|
||||
native_max_value=EQ3BT_MAX_OFFSET,
|
||||
native_min_value=EQ3_MIN_OFFSET,
|
||||
native_max_value=EQ3_MAX_OFFSET,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
@@ -96,7 +91,7 @@ NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration,
|
||||
value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60,
|
||||
value_func=lambda presets: presets.window_open_time.total_seconds() / 60,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
native_min_value=0,
|
||||
native_max_value=60,
|
||||
@@ -137,7 +132,6 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Return the state of the entity."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
assert self._thermostat.status.presets is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status.presets)
|
||||
@@ -152,7 +146,7 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Return whether the entity is available."""
|
||||
|
||||
return (
|
||||
self._thermostat.status is not None
|
||||
super().available
|
||||
and self._thermostat.status.presets is not None
|
||||
and self._attr_available
|
||||
)
|
||||
|
@@ -1,12 +1,12 @@
|
||||
"""Voluptuous schemas for eq3btsmart."""
|
||||
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_MIN_TEMP
|
||||
from eq3btsmart.const import EQ3_MAX_TEMP, EQ3_MIN_TEMP
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
SCHEMA_TEMPERATURE = vol.Range(min=EQ3BT_MIN_TEMP, max=EQ3BT_MAX_TEMP)
|
||||
SCHEMA_TEMPERATURE = vol.Range(min=EQ3_MIN_TEMP, max=EQ3_MAX_TEMP)
|
||||
SCHEMA_DEVICE = vol.Schema({vol.Required(CONF_MAC): cv.string})
|
||||
SCHEMA_MAC = vol.Schema(
|
||||
{
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
@@ -40,9 +39,7 @@ SENSOR_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SensorEntityDescription(
|
||||
key=ENTITY_KEY_AWAY_UNTIL,
|
||||
translation_key=ENTITY_KEY_AWAY_UNTIL,
|
||||
value_func=lambda status: (
|
||||
status.away_until.value if status.away_until else None
|
||||
),
|
||||
value_func=lambda status: (status.away_until if status.away_until else None),
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
),
|
||||
]
|
||||
@@ -78,7 +75,4 @@ class Eq3SensorEntity(Eq3Entity, SensorEntity):
|
||||
def native_value(self) -> int | datetime | None:
|
||||
"""Return the value reported by the sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
@@ -1,26 +1,45 @@
|
||||
"""Platform for eq3 switch entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import EQ3_DEFAULT_AWAY_TEMP, Eq3OperationMode
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK
|
||||
from .const import (
|
||||
DEFAULT_AWAY_HOURS,
|
||||
ENTITY_KEY_AWAY,
|
||||
ENTITY_KEY_BOOST,
|
||||
ENTITY_KEY_LOCK,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
async def async_set_away(thermostat: Thermostat, enable: bool) -> Status:
|
||||
"""Backport old async_set_away behavior."""
|
||||
|
||||
if not enable:
|
||||
return await thermostat.async_set_mode(Eq3OperationMode.AUTO)
|
||||
|
||||
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
|
||||
return await thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3SwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Entity description for eq3 switch entities."""
|
||||
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]]
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Coroutine[None, None, Status]]]
|
||||
value_func: Callable[[Status], bool]
|
||||
|
||||
|
||||
@@ -40,7 +59,7 @@ SWITCH_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_AWAY,
|
||||
translation_key=ENTITY_KEY_AWAY,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_away,
|
||||
toggle_func=lambda thermostat: partial(async_set_away, thermostat),
|
||||
value_func=lambda status: status.is_away,
|
||||
),
|
||||
]
|
||||
@@ -88,7 +107,4 @@ class Eq3SwitchEntity(Eq3Entity, SwitchEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the switch."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
@@ -332,7 +332,7 @@ class EsphomeAssistSatellite(
|
||||
}
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_RUN_START:
|
||||
assert event.data is not None
|
||||
if tts_output := event.data["tts_output"]:
|
||||
if tts_output := event.data.get("tts_output"):
|
||||
path = tts_output["url"]
|
||||
url = async_process_play_media_url(self.hass, path)
|
||||
data_to_send = {"url": url}
|
||||
|
@@ -109,6 +109,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="timestamp",
|
||||
translation_key="timestamp",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==7.1.0", "oauth2client==4.1.3", "ical==10.0.0"]
|
||||
"requirements": ["gcal-sync==7.1.0", "oauth2client==4.1.3", "ical==10.0.4"]
|
||||
}
|
||||
|
@@ -45,7 +45,10 @@ CONF_IMAGE_FILENAME = "image_filename"
|
||||
CONF_FILENAMES = "filenames"
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
PLATFORMS = (
|
||||
Platform.CONVERSATION,
|
||||
Platform.TTS,
|
||||
)
|
||||
|
||||
type GoogleGenerativeAIConfigEntry = ConfigEntry[Client]
|
||||
|
||||
|
@@ -6,9 +6,11 @@ DOMAIN = "google_generative_ai_conversation"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
CONF_PROMPT = "prompt"
|
||||
|
||||
ATTR_MODEL = "model"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.0-flash"
|
||||
RECOMMENDED_TTS_MODEL = "gemini-2.5-flash-preview-tts"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
|
@@ -2,63 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
GoogleSearch,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
from typing import Literal
|
||||
|
||||
from homeassistant.components import assist_pipeline, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD,
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
CONF_HATE_BLOCK_THRESHOLD,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
ERROR_GETTING_RESPONSE = (
|
||||
"Sorry, I had a problem getting a response from Google Generative AI."
|
||||
)
|
||||
from .const import CONF_PROMPT, DOMAIN, LOGGER
|
||||
from .entity import ERROR_GETTING_RESPONSE, GoogleGenerativeAILLMBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -71,267 +26,18 @@ async def async_setup_entry(
|
||||
async_add_entities([agent])
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"format",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
def _camel_to_snake(name: str) -> str:
|
||||
"""Convert camel case to snake case."""
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
"""Format the schema to be compatible with Gemini API."""
|
||||
if subschemas := schema.get("allOf"):
|
||||
for subschema in subschemas: # Gemini API does not support allOf keys
|
||||
if "type" in subschema: # Fallback to first subschema with 'type' field
|
||||
return _format_schema(subschema)
|
||||
return _format_schema(
|
||||
subschemas[0]
|
||||
) # Or, if not found, to any of the subschemas
|
||||
|
||||
result = {}
|
||||
for key, val in schema.items():
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
result[key] = val
|
||||
|
||||
if result.get("enum") and result.get("type") != "STRING":
|
||||
# enum is only allowed for STRING type. This is safe as long as the schema
|
||||
# contains vol.Coerce for the respective type, for example:
|
||||
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
|
||||
result["type"] = "STRING"
|
||||
result["enum"] = [str(item) for item in result["enum"]]
|
||||
|
||||
if result.get("type") == "OBJECT" and not result.get("properties"):
|
||||
# An object with undefined properties is not supported by Gemini API.
|
||||
# Fallback to JSON string. This will probably fail for most tools that want it,
|
||||
# but we don't have a better fallback strategy so far.
|
||||
result["properties"] = {"json": {"type": "STRING"}}
|
||||
result["required"] = []
|
||||
return cast(Schema, result)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> Tool:
|
||||
"""Format tool specification."""
|
||||
|
||||
if tool.parameters.schema:
|
||||
parameters = _format_schema(
|
||||
convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
)
|
||||
else:
|
||||
parameters = None
|
||||
|
||||
return Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration(
|
||||
name=tool.name,
|
||||
description=tool.description,
|
||||
parameters=parameters,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _escape_decode(value: Any) -> Any:
|
||||
"""Recursively call codecs.escape_decode on all values."""
|
||||
if isinstance(value, str):
|
||||
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
|
||||
if isinstance(value, list):
|
||||
return [_escape_decode(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: _escape_decode(v) for k, v in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
def _create_google_tool_response_parts(
|
||||
parts: list[conversation.ToolResultContent],
|
||||
) -> list[Part]:
|
||||
"""Create Google tool response parts."""
|
||||
return [
|
||||
Part.from_function_response(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
for tool_result in parts
|
||||
]
|
||||
|
||||
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> Content:
|
||||
"""Create a Google tool response content."""
|
||||
return Content(
|
||||
role="user",
|
||||
parts=_create_google_tool_response_parts(content),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
content: (
|
||||
conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent
|
||||
),
|
||||
) -> Content:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls:
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return Content(
|
||||
role=role,
|
||||
parts=[
|
||||
Part.from_text(text=content.content if content.content else ""),
|
||||
],
|
||||
)
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts: list[Part] = []
|
||||
|
||||
if content.content:
|
||||
parts.append(Part.from_text(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
Part.from_function_call(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return Content(role="model", parts=parts)
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncGenerator[GenerateContentResponse],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
new_message = True
|
||||
try:
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response chunk: %s", response)
|
||||
chunk: conversation.AssistantContentDeltaDict = {}
|
||||
|
||||
if new_message:
|
||||
chunk["role"] = "assistant"
|
||||
new_message = False
|
||||
|
||||
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
|
||||
if response.prompt_feedback or not response.candidates:
|
||||
reason = (
|
||||
response.prompt_feedback.block_reason_message
|
||||
if response.prompt_feedback
|
||||
else "unknown"
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"The message got blocked due to content violations, reason: {reason}"
|
||||
)
|
||||
|
||||
candidate = response.candidates[0]
|
||||
|
||||
if (
|
||||
candidate.finish_reason is not None
|
||||
and candidate.finish_reason != "STOP"
|
||||
):
|
||||
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
|
||||
LOGGER.error(
|
||||
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
|
||||
candidate.finish_reason,
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
|
||||
)
|
||||
|
||||
response_parts = (
|
||||
candidate.content.parts
|
||||
if candidate.content is not None and candidate.content.parts is not None
|
||||
else []
|
||||
)
|
||||
|
||||
content = "".join([part.text for part in response_parts if part.text])
|
||||
tool_calls = []
|
||||
for part in response_parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = part.function_call
|
||||
tool_name = tool_call.name if tool_call.name else ""
|
||||
tool_args = _escape_decode(tool_call.args)
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
|
||||
if tool_calls:
|
||||
chunk["tool_calls"] = tool_calls
|
||||
|
||||
chunk["content"] = content
|
||||
yield chunk
|
||||
except (
|
||||
APIError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
if isinstance(err, APIError):
|
||||
message = err.message
|
||||
else:
|
||||
message = type(err).__name__
|
||||
error = f"{ERROR_GETTING_RESPONSE}: {message}"
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
|
||||
class GoogleGenerativeAIConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
conversation.ConversationEntity,
|
||||
conversation.AbstractConversationAgent,
|
||||
GoogleGenerativeAILLMBaseEntity,
|
||||
):
|
||||
"""Google Generative AI conversation agent."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
super().__init__(entry)
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
@@ -358,13 +64,6 @@ class GoogleGenerativeAIConversationEntity(
|
||||
conversation.async_unset_agent(self.hass, self.entry)
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
def _fix_tool_name(self, tool_name: str) -> str:
|
||||
"""Fix tool name if needed."""
|
||||
# The Gemini 2.0+ tokenizer seemingly has a issue with the HassListAddItem tool
|
||||
# name. This makes sure when it incorrectly changes the name, that we change it
|
||||
# back for HA to call.
|
||||
return tool_name if tool_name != "HasListAddItem" else "HassListAddItem"
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@@ -374,11 +73,11 @@ class GoogleGenerativeAIConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
options.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
@@ -399,163 +98,6 @@ class GoogleGenerativeAIConversationEntity(
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
# Using search grounding allows the model to retrieve information from the web,
|
||||
# however, it may interfere with how the model decides to use some tools, or entities
|
||||
# for example weather entity may be disregarded if the model chooses to Google it.
|
||||
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||
)
|
||||
|
||||
prompt_content = cast(
|
||||
conversation.SystemContent,
|
||||
chat_log.content[0],
|
||||
)
|
||||
|
||||
if prompt_content.content:
|
||||
prompt = prompt_content.content
|
||||
else:
|
||||
raise HomeAssistantError("Invalid prompt content")
|
||||
|
||||
messages: list[Content] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:-1]:
|
||||
if chat_content.role == "tool_result":
|
||||
tool_results.append(chat_content)
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(chat_content, conversation.ToolResultContent)
|
||||
and chat_content.content == ""
|
||||
):
|
||||
# Skipping is not possible since the number of function calls need to match the number of function responses
|
||||
# and skipping one would mean removing the other and hence this would prevent a proper chat log
|
||||
chat_content = replace(chat_content, content=" ")
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
temperature=self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
],
|
||||
tools=tools or None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
automatic_function_calling=AutomaticFunctionCallingConfig(
|
||||
disable=True, maximum_remote_calls=None
|
||||
),
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
Content(role="user", parts=[Part.from_text(text=prompt)]),
|
||||
Content(role="model", parts=[Part.from_text(text="Ok")]),
|
||||
*messages,
|
||||
]
|
||||
chat = self._genai_client.aio.chats.create(
|
||||
model=model_name, history=messages, config=generateContentConfig
|
||||
)
|
||||
user_message = chat_log.content[-1]
|
||||
assert isinstance(user_message, conversation.UserContent)
|
||||
chat_request: str | list[Part] = user_message.content
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
chat_response_generator = await chat.send_message_stream(
|
||||
message=chat_request
|
||||
)
|
||||
except (
|
||||
APIError,
|
||||
ClientError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
error = ERROR_GETTING_RESPONSE
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
chat_request = _create_google_tool_response_parts(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_response_generator),
|
||||
)
|
||||
if isinstance(content, conversation.ToolResultContent)
|
||||
]
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
|
@@ -0,0 +1,475 @@
|
||||
"""Conversation support for the Google Generative AI Conversation integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
from typing import Any, cast
|
||||
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
GoogleSearch,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD,
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
CONF_HATE_BLOCK_THRESHOLD,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
ERROR_GETTING_RESPONSE = (
|
||||
"Sorry, I had a problem getting a response from Google Generative AI."
|
||||
)
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"format",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
def _camel_to_snake(name: str) -> str:
|
||||
"""Convert camel case to snake case."""
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
"""Format the schema to be compatible with Gemini API."""
|
||||
if subschemas := schema.get("allOf"):
|
||||
for subschema in subschemas: # Gemini API does not support allOf keys
|
||||
if "type" in subschema: # Fallback to first subschema with 'type' field
|
||||
return _format_schema(subschema)
|
||||
return _format_schema(
|
||||
subschemas[0]
|
||||
) # Or, if not found, to any of the subschemas
|
||||
|
||||
result = {}
|
||||
for key, val in schema.items():
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
result[key] = val
|
||||
|
||||
if result.get("enum") and result.get("type") != "STRING":
|
||||
# enum is only allowed for STRING type. This is safe as long as the schema
|
||||
# contains vol.Coerce for the respective type, for example:
|
||||
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
|
||||
result["type"] = "STRING"
|
||||
result["enum"] = [str(item) for item in result["enum"]]
|
||||
|
||||
if result.get("type") == "OBJECT" and not result.get("properties"):
|
||||
# An object with undefined properties is not supported by Gemini API.
|
||||
# Fallback to JSON string. This will probably fail for most tools that want it,
|
||||
# but we don't have a better fallback strategy so far.
|
||||
result["properties"] = {"json": {"type": "STRING"}}
|
||||
result["required"] = []
|
||||
return cast(Schema, result)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> Tool:
|
||||
"""Format tool specification."""
|
||||
|
||||
if tool.parameters.schema:
|
||||
parameters = _format_schema(
|
||||
convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
)
|
||||
else:
|
||||
parameters = None
|
||||
|
||||
return Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration(
|
||||
name=tool.name,
|
||||
description=tool.description,
|
||||
parameters=parameters,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _escape_decode(value: Any) -> Any:
|
||||
"""Recursively call codecs.escape_decode on all values."""
|
||||
if isinstance(value, str):
|
||||
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
|
||||
if isinstance(value, list):
|
||||
return [_escape_decode(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: _escape_decode(v) for k, v in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
def _create_google_tool_response_parts(
|
||||
parts: list[conversation.ToolResultContent],
|
||||
) -> list[Part]:
|
||||
"""Create Google tool response parts."""
|
||||
return [
|
||||
Part.from_function_response(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
for tool_result in parts
|
||||
]
|
||||
|
||||
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> Content:
|
||||
"""Create a Google tool response content."""
|
||||
return Content(
|
||||
role="user",
|
||||
parts=_create_google_tool_response_parts(content),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
content: (
|
||||
conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent
|
||||
),
|
||||
) -> Content:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls:
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return Content(
|
||||
role=role,
|
||||
parts=[
|
||||
Part.from_text(text=content.content if content.content else ""),
|
||||
],
|
||||
)
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts: list[Part] = []
|
||||
|
||||
if content.content:
|
||||
parts.append(Part.from_text(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
Part.from_function_call(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return Content(role="model", parts=parts)
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncGenerator[GenerateContentResponse],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
new_message = True
|
||||
try:
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response chunk: %s", response)
|
||||
chunk: conversation.AssistantContentDeltaDict = {}
|
||||
|
||||
if new_message:
|
||||
chunk["role"] = "assistant"
|
||||
new_message = False
|
||||
|
||||
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
|
||||
if response.prompt_feedback or not response.candidates:
|
||||
reason = (
|
||||
response.prompt_feedback.block_reason_message
|
||||
if response.prompt_feedback
|
||||
else "unknown"
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"The message got blocked due to content violations, reason: {reason}"
|
||||
)
|
||||
|
||||
candidate = response.candidates[0]
|
||||
|
||||
if (
|
||||
candidate.finish_reason is not None
|
||||
and candidate.finish_reason != "STOP"
|
||||
):
|
||||
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
|
||||
LOGGER.error(
|
||||
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
|
||||
candidate.finish_reason,
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
|
||||
)
|
||||
|
||||
response_parts = (
|
||||
candidate.content.parts
|
||||
if candidate.content is not None and candidate.content.parts is not None
|
||||
else []
|
||||
)
|
||||
|
||||
content = "".join([part.text for part in response_parts if part.text])
|
||||
tool_calls = []
|
||||
for part in response_parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = part.function_call
|
||||
tool_name = tool_call.name if tool_call.name else ""
|
||||
tool_args = _escape_decode(tool_call.args)
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
|
||||
if tool_calls:
|
||||
chunk["tool_calls"] = tool_calls
|
||||
|
||||
chunk["content"] = content
|
||||
yield chunk
|
||||
except (
|
||||
APIError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
if isinstance(err, APIError):
|
||||
message = err.message
|
||||
else:
|
||||
message = type(err).__name__
|
||||
error = f"{ERROR_GETTING_RESPONSE}: {message}"
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
|
||||
class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Google Generative AI base entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
# Using search grounding allows the model to retrieve information from the web,
|
||||
# however, it may interfere with how the model decides to use some tools, or entities
|
||||
# for example weather entity may be disregarded if the model chooses to Google it.
|
||||
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||
)
|
||||
|
||||
prompt_content = cast(
|
||||
conversation.SystemContent,
|
||||
chat_log.content[0],
|
||||
)
|
||||
|
||||
if prompt_content.content:
|
||||
prompt = prompt_content.content
|
||||
else:
|
||||
raise HomeAssistantError("Invalid prompt content")
|
||||
|
||||
messages: list[Content] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:-1]:
|
||||
if chat_content.role == "tool_result":
|
||||
tool_results.append(chat_content)
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(chat_content, conversation.ToolResultContent)
|
||||
and chat_content.content == ""
|
||||
):
|
||||
# Skipping is not possible since the number of function calls need to match the number of function responses
|
||||
# and skipping one would mean removing the other and hence this would prevent a proper chat log
|
||||
chat_content = replace(chat_content, content=" ")
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
temperature=self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
],
|
||||
tools=tools or None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
automatic_function_calling=AutomaticFunctionCallingConfig(
|
||||
disable=True, maximum_remote_calls=None
|
||||
),
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
Content(role="user", parts=[Part.from_text(text=prompt)]),
|
||||
Content(role="model", parts=[Part.from_text(text="Ok")]),
|
||||
*messages,
|
||||
]
|
||||
chat = self._genai_client.aio.chats.create(
|
||||
model=model_name, history=messages, config=generateContentConfig
|
||||
)
|
||||
user_message = chat_log.content[-1]
|
||||
assert isinstance(user_message, conversation.UserContent)
|
||||
chat_request: str | list[Part] = user_message.content
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
chat_response_generator = await chat.send_message_stream(
|
||||
message=chat_request
|
||||
)
|
||||
except (
|
||||
APIError,
|
||||
ClientError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
error = ERROR_GETTING_RESPONSE
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
chat_request = _create_google_tool_response_parts(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_response_generator),
|
||||
)
|
||||
if isinstance(content, conversation.ToolResultContent)
|
||||
]
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
@@ -0,0 +1,216 @@
|
||||
"""Text to speech support for Google Generative AI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import io
|
||||
import logging
|
||||
from typing import Any
|
||||
import wave
|
||||
|
||||
from google.genai import types
|
||||
|
||||
from homeassistant.components.tts import (
|
||||
ATTR_VOICE,
|
||||
TextToSpeechEntity,
|
||||
TtsAudioType,
|
||||
Voice,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_MODEL, DOMAIN, RECOMMENDED_TTS_MODEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up TTS entity."""
|
||||
tts_entity = GoogleGenerativeAITextToSpeechEntity(config_entry)
|
||||
async_add_entities([tts_entity])
|
||||
|
||||
|
||||
class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
|
||||
"""Google Generative AI text-to-speech entity."""
|
||||
|
||||
_attr_supported_options = [ATTR_VOICE, ATTR_MODEL]
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#languages
|
||||
_attr_supported_languages = [
|
||||
"ar-EG",
|
||||
"bn-BD",
|
||||
"de-DE",
|
||||
"en-IN",
|
||||
"en-US",
|
||||
"es-US",
|
||||
"fr-FR",
|
||||
"hi-IN",
|
||||
"id-ID",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"ko-KR",
|
||||
"mr-IN",
|
||||
"nl-NL",
|
||||
"pl-PL",
|
||||
"pt-BR",
|
||||
"ro-RO",
|
||||
"ru-RU",
|
||||
"ta-IN",
|
||||
"te-IN",
|
||||
"th-TH",
|
||||
"tr-TR",
|
||||
"uk-UA",
|
||||
"vi-VN",
|
||||
]
|
||||
_attr_default_language = "en-US"
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#voices
|
||||
_supported_voices = [
|
||||
Voice(voice.split(" ", 1)[0].lower(), voice)
|
||||
for voice in (
|
||||
"Zephyr (Bright)",
|
||||
"Puck (Upbeat)",
|
||||
"Charon (Informative)",
|
||||
"Kore (Firm)",
|
||||
"Fenrir (Excitable)",
|
||||
"Leda (Youthful)",
|
||||
"Orus (Firm)",
|
||||
"Aoede (Breezy)",
|
||||
"Callirrhoe (Easy-going)",
|
||||
"Autonoe (Bright)",
|
||||
"Enceladus (Breathy)",
|
||||
"Iapetus (Clear)",
|
||||
"Umbriel (Easy-going)",
|
||||
"Algieba (Smooth)",
|
||||
"Despina (Smooth)",
|
||||
"Erinome (Clear)",
|
||||
"Algenib (Gravelly)",
|
||||
"Rasalgethi (Informative)",
|
||||
"Laomedeia (Upbeat)",
|
||||
"Achernar (Soft)",
|
||||
"Alnilam (Firm)",
|
||||
"Schedar (Even)",
|
||||
"Gacrux (Mature)",
|
||||
"Pulcherrima (Forward)",
|
||||
"Achird (Friendly)",
|
||||
"Zubenelgenubi (Casual)",
|
||||
"Vindemiatrix (Gentle)",
|
||||
"Sadachbia (Lively)",
|
||||
"Sadaltager (Knowledgeable)",
|
||||
"Sulafat (Warm)",
|
||||
)
|
||||
]
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize Google Generative AI Conversation speech entity."""
|
||||
self.entry = entry
|
||||
self._attr_name = "Google Generative AI TTS"
|
||||
self._attr_unique_id = f"{entry.entry_id}_tts"
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._genai_client = entry.runtime_data
|
||||
self._default_voice_id = self._supported_voices[0].voice_id
|
||||
|
||||
@callback
|
||||
def async_get_supported_voices(self, language: str) -> list[Voice] | None:
|
||||
"""Return a list of supported voices for a language."""
|
||||
return self._supported_voices
|
||||
|
||||
async def async_get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any]
|
||||
) -> TtsAudioType:
|
||||
"""Load tts audio file from the engine."""
|
||||
try:
|
||||
response = self._genai_client.models.generate_content(
|
||||
model=options.get(ATTR_MODEL, RECOMMENDED_TTS_MODEL),
|
||||
contents=message,
|
||||
config=types.GenerateContentConfig(
|
||||
response_modalities=["AUDIO"],
|
||||
speech_config=types.SpeechConfig(
|
||||
voice_config=types.VoiceConfig(
|
||||
prebuilt_voice_config=types.PrebuiltVoiceConfig(
|
||||
voice_name=options.get(
|
||||
ATTR_VOICE, self._default_voice_id
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
data = response.candidates[0].content.parts[0].inline_data.data
|
||||
mime_type = response.candidates[0].content.parts[0].inline_data.mime_type
|
||||
except Exception as exc:
|
||||
_LOGGER.warning(
|
||||
"Error during processing of TTS request %s", exc, exc_info=True
|
||||
)
|
||||
raise HomeAssistantError(exc) from exc
|
||||
return "wav", self._convert_to_wav(data, mime_type)
|
||||
|
||||
def _convert_to_wav(self, audio_data: bytes, mime_type: str) -> bytes:
|
||||
"""Generate a WAV file header for the given audio data and parameters.
|
||||
|
||||
Args:
|
||||
audio_data: The raw audio data as a bytes object.
|
||||
mime_type: Mime type of the audio data.
|
||||
|
||||
Returns:
|
||||
A bytes object representing the WAV file header.
|
||||
|
||||
"""
|
||||
parameters = self._parse_audio_mime_type(mime_type)
|
||||
|
||||
wav_buffer = io.BytesIO()
|
||||
with wave.open(wav_buffer, "wb") as wf:
|
||||
wf.setnchannels(1)
|
||||
wf.setsampwidth(parameters["bits_per_sample"] // 8)
|
||||
wf.setframerate(parameters["rate"])
|
||||
wf.writeframes(audio_data)
|
||||
|
||||
return wav_buffer.getvalue()
|
||||
|
||||
def _parse_audio_mime_type(self, mime_type: str) -> dict[str, int]:
|
||||
"""Parse bits per sample and rate from an audio MIME type string.
|
||||
|
||||
Assumes bits per sample is encoded like "L16" and rate as "rate=xxxxx".
|
||||
|
||||
Args:
|
||||
mime_type: The audio MIME type string (e.g., "audio/L16;rate=24000").
|
||||
|
||||
Returns:
|
||||
A dictionary with "bits_per_sample" and "rate" keys. Values will be
|
||||
integers if found, otherwise None.
|
||||
|
||||
"""
|
||||
if not mime_type.startswith("audio/L"):
|
||||
_LOGGER.warning("Received unexpected MIME type %s", mime_type)
|
||||
raise HomeAssistantError(f"Unsupported audio MIME type: {mime_type}")
|
||||
|
||||
bits_per_sample = 16
|
||||
rate = 24000
|
||||
|
||||
# Extract rate from parameters
|
||||
parts = mime_type.split(";")
|
||||
for param in parts: # Skip the main type part
|
||||
param = param.strip()
|
||||
if param.lower().startswith("rate="):
|
||||
# Handle cases like "rate=" with no value or non-integer value and keep rate as default
|
||||
with suppress(ValueError, IndexError):
|
||||
rate_str = param.split("=", 1)[1]
|
||||
rate = int(rate_str)
|
||||
elif param.startswith("audio/L"):
|
||||
# Keep bits_per_sample as default if conversion fails
|
||||
with suppress(ValueError, IndexError):
|
||||
bits_per_sample = int(param.split("L", 1)[1])
|
||||
|
||||
return {"bits_per_sample": bits_per_sample, "rate": rate}
|
@@ -78,86 +78,85 @@ def _read_file_contents(
|
||||
return results
|
||||
|
||||
|
||||
async def _async_handle_upload(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
config_entry: GooglePhotosConfigEntry | None = (
|
||||
call.hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
|
||||
)
|
||||
if not config_entry:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
scopes = config_entry.data["token"]["scope"].split(" ")
|
||||
if UPLOAD_SCOPE not in scopes:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_upload_permission",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
client_api = coordinator.client
|
||||
upload_tasks = []
|
||||
file_results = await call.hass.async_add_executor_job(
|
||||
_read_file_contents, call.hass, call.data[CONF_FILENAME]
|
||||
)
|
||||
|
||||
album = call.data[CONF_ALBUM]
|
||||
try:
|
||||
album_id = await coordinator.get_or_create_album(album)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="create_album_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
|
||||
for mime_type, content in file_results:
|
||||
upload_tasks.append(client_api.upload_content(content, mime_type))
|
||||
try:
|
||||
upload_results = await asyncio.gather(*upload_tasks)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="upload_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
try:
|
||||
upload_result = await client_api.create_media_items(
|
||||
[
|
||||
NewMediaItem(SimpleMediaItem(upload_token=upload_result.upload_token))
|
||||
for upload_result in upload_results
|
||||
],
|
||||
album_id=album_id,
|
||||
)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {
|
||||
"media_items": [
|
||||
{"media_item_id": item_result.media_item.id}
|
||||
for item_result in upload_result.new_media_item_results
|
||||
if item_result.media_item and item_result.media_item.id
|
||||
],
|
||||
"album_id": album_id,
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register Google Photos services."""
|
||||
|
||||
async def async_handle_upload(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
config_entry: GooglePhotosConfigEntry | None = (
|
||||
hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
|
||||
)
|
||||
if not config_entry:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
scopes = config_entry.data["token"]["scope"].split(" ")
|
||||
if UPLOAD_SCOPE not in scopes:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_upload_permission",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
client_api = coordinator.client
|
||||
upload_tasks = []
|
||||
file_results = await hass.async_add_executor_job(
|
||||
_read_file_contents, hass, call.data[CONF_FILENAME]
|
||||
)
|
||||
|
||||
album = call.data[CONF_ALBUM]
|
||||
try:
|
||||
album_id = await coordinator.get_or_create_album(album)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="create_album_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
|
||||
for mime_type, content in file_results:
|
||||
upload_tasks.append(client_api.upload_content(content, mime_type))
|
||||
try:
|
||||
upload_results = await asyncio.gather(*upload_tasks)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="upload_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
try:
|
||||
upload_result = await client_api.create_media_items(
|
||||
[
|
||||
NewMediaItem(
|
||||
SimpleMediaItem(upload_token=upload_result.upload_token)
|
||||
)
|
||||
for upload_result in upload_results
|
||||
],
|
||||
album_id=album_id,
|
||||
)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {
|
||||
"media_items": [
|
||||
{"media_item_id": item_result.media_item.id}
|
||||
for item_result in upload_result.new_media_item_results
|
||||
if item_result.media_item and item_result.media_item.id
|
||||
],
|
||||
"album_id": album_id,
|
||||
}
|
||||
return None
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
UPLOAD_SERVICE,
|
||||
async_handle_upload,
|
||||
_async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -133,8 +133,8 @@ class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]
|
||||
def _parse_routing_response(self, response: dict[str, Any]) -> HERETravelTimeData:
|
||||
"""Parse the routing response dict to a HERETravelTimeData."""
|
||||
distance: float = 0.0
|
||||
duration: float = 0.0
|
||||
duration_in_traffic: float = 0.0
|
||||
duration: int = 0
|
||||
duration_in_traffic: int = 0
|
||||
|
||||
for section in response["routes"][0]["sections"]:
|
||||
distance += DistanceConverter.convert(
|
||||
@@ -167,8 +167,8 @@ class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]
|
||||
destination_name = names[0]["value"]
|
||||
return HERETravelTimeData(
|
||||
attribution=None,
|
||||
duration=round(duration / 60),
|
||||
duration_in_traffic=round(duration_in_traffic / 60),
|
||||
duration=duration,
|
||||
duration_in_traffic=duration_in_traffic,
|
||||
distance=distance,
|
||||
origin=f"{mapped_origin_lat},{mapped_origin_lon}",
|
||||
destination=f"{mapped_destination_lat},{mapped_destination_lon}",
|
||||
@@ -271,13 +271,13 @@ class HERETransitDataUpdateCoordinator(
|
||||
UnitOfLength.METERS,
|
||||
UnitOfLength.KILOMETERS,
|
||||
)
|
||||
duration: float = sum(
|
||||
duration: int = sum(
|
||||
section["travelSummary"]["duration"] for section in sections
|
||||
)
|
||||
return HERETravelTimeData(
|
||||
attribution=attribution,
|
||||
duration=round(duration / 60),
|
||||
duration_in_traffic=round(duration / 60),
|
||||
duration=duration,
|
||||
duration_in_traffic=duration,
|
||||
distance=distance,
|
||||
origin=f"{mapped_origin_lat},{mapped_origin_lon}",
|
||||
destination=f"{mapped_destination_lat},{mapped_destination_lon}",
|
||||
|
@@ -55,14 +55,18 @@ def sensor_descriptions(travel_mode: str) -> tuple[SensorEntityDescription, ...]
|
||||
icon=ICONS.get(travel_mode, ICON_CAR),
|
||||
key=ATTR_DURATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
translation_key="duration_in_traffic",
|
||||
icon=ICONS.get(travel_mode, ICON_CAR),
|
||||
key=ATTR_DURATION_IN_TRAFFIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
translation_key="distance",
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.74", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.75", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -21,6 +21,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"requirements": ["aiohomeconnect==0.17.1"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.18.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
71
homeassistant/components/home_connect/quality_scale.yaml
Normal file
71
homeassistant/components/home_connect/quality_scale.yaml
Normal file
@@ -0,0 +1,71 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: |
|
||||
Full polling is performed at the configuration entry setup and
|
||||
device polling is performed when a CONNECTED or a PAIRED event is received.
|
||||
If many CONNECTED or PAIRED events are received for a device within a short time span,
|
||||
the integration will stop polling for that device and will create a repair issue.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: done
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: done
|
||||
comment: |
|
||||
Event entities are disabled by default to prevent user confusion regarding
|
||||
which events are supported by its appliance.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have settings in its configuration flow.
|
||||
repair-issues: done
|
||||
stale-devices: done
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@@ -1,9 +1,13 @@
|
||||
"""The homee event platform."""
|
||||
|
||||
from pyHomee.const import AttributeType
|
||||
from pyHomee.const import AttributeType, NodeProfile
|
||||
from pyHomee.model import HomeeAttribute
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.components.event import (
|
||||
EventDeviceClass,
|
||||
EventEntity,
|
||||
EventEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -13,6 +17,38 @@ from .entity import HomeeEntity
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
REMOTE_PROFILES = [
|
||||
NodeProfile.REMOTE,
|
||||
NodeProfile.TWO_BUTTON_REMOTE,
|
||||
NodeProfile.THREE_BUTTON_REMOTE,
|
||||
NodeProfile.FOUR_BUTTON_REMOTE,
|
||||
]
|
||||
|
||||
EVENT_DESCRIPTIONS: dict[AttributeType, EventEntityDescription] = {
|
||||
AttributeType.BUTTON_STATE: EventEntityDescription(
|
||||
key="button_state",
|
||||
device_class=EventDeviceClass.BUTTON,
|
||||
event_types=["upper", "lower", "released"],
|
||||
),
|
||||
AttributeType.UP_DOWN_REMOTE: EventEntityDescription(
|
||||
key="up_down_remote",
|
||||
device_class=EventDeviceClass.BUTTON,
|
||||
event_types=[
|
||||
"released",
|
||||
"up",
|
||||
"down",
|
||||
"stop",
|
||||
"up_long",
|
||||
"down_long",
|
||||
"stop_long",
|
||||
"c_button",
|
||||
"b_button",
|
||||
"a_button",
|
||||
],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: HomeeConfigEntry,
|
||||
@@ -21,30 +57,31 @@ async def async_setup_entry(
|
||||
"""Add event entities for homee."""
|
||||
|
||||
async_add_entities(
|
||||
HomeeEvent(attribute, config_entry)
|
||||
HomeeEvent(attribute, config_entry, EVENT_DESCRIPTIONS[attribute.type])
|
||||
for node in config_entry.runtime_data.nodes
|
||||
for attribute in node.attributes
|
||||
if attribute.type == AttributeType.UP_DOWN_REMOTE
|
||||
if attribute.type in EVENT_DESCRIPTIONS
|
||||
and node.profile in REMOTE_PROFILES
|
||||
and not attribute.editable
|
||||
)
|
||||
|
||||
|
||||
class HomeeEvent(HomeeEntity, EventEntity):
|
||||
"""Representation of a homee event."""
|
||||
|
||||
_attr_translation_key = "up_down_remote"
|
||||
_attr_event_types = [
|
||||
"released",
|
||||
"up",
|
||||
"down",
|
||||
"stop",
|
||||
"up_long",
|
||||
"down_long",
|
||||
"stop_long",
|
||||
"c_button",
|
||||
"b_button",
|
||||
"a_button",
|
||||
]
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
def __init__(
|
||||
self,
|
||||
attribute: HomeeAttribute,
|
||||
entry: HomeeConfigEntry,
|
||||
description: EventEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the homee event entity."""
|
||||
super().__init__(attribute, entry)
|
||||
self.entity_description = description
|
||||
self._attr_translation_key = description.key
|
||||
if attribute.instance > 0:
|
||||
self._attr_translation_key = f"{self._attr_translation_key}_instance"
|
||||
self._attr_translation_placeholders = {"instance": str(attribute.instance)}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Add the homee event entity to home assistant."""
|
||||
@@ -56,6 +93,5 @@ class HomeeEvent(HomeeEntity, EventEntity):
|
||||
@callback
|
||||
def _event_triggered(self, event: HomeeAttribute) -> None:
|
||||
"""Handle a homee event."""
|
||||
if event.type == AttributeType.UP_DOWN_REMOTE:
|
||||
self._trigger_event(self.event_types[int(event.current_value)])
|
||||
self.schedule_update_ha_state()
|
||||
self._trigger_event(self.event_types[int(event.current_value)])
|
||||
self.schedule_update_ha_state()
|
||||
|
@@ -160,12 +160,36 @@
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"button_state": {
|
||||
"name": "Switch",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"upper": "Upper button",
|
||||
"lower": "Lower button",
|
||||
"released": "Released"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"button_state_instance": {
|
||||
"name": "Switch {instance}",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"upper": "[%key:component::homee::entity::event::button_state::state_attributes::event_type::state::upper%]",
|
||||
"lower": "[%key:component::homee::entity::event::button_state::state_attributes::event_type::state::lower%]",
|
||||
"released": "[%key:component::homee::entity::event::button_state::state_attributes::event_type::state::released%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"up_down_remote": {
|
||||
"name": "Up/down remote",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"release": "Released",
|
||||
"release": "[%key:component::homee::entity::event::button_state::state_attributes::event_type::state::released%]",
|
||||
"up": "Up",
|
||||
"down": "Down",
|
||||
"stop": "Stop",
|
||||
|
@@ -128,6 +128,7 @@ class HomematicipHAP:
|
||||
self.config_entry.data.get(HMIPC_AUTHTOKEN),
|
||||
self.config_entry.data.get(HMIPC_NAME),
|
||||
)
|
||||
|
||||
except HmipcConnectionError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except Exception as err: # noqa: BLE001
|
||||
@@ -210,41 +211,13 @@ class HomematicipHAP:
|
||||
for device in self.home.devices:
|
||||
device.fire_update_event()
|
||||
|
||||
async def async_connect(self) -> None:
|
||||
"""Start WebSocket connection."""
|
||||
tries = 0
|
||||
while True:
|
||||
retry_delay = 2 ** min(tries, 8)
|
||||
async def async_connect(self, home: AsyncHome) -> None:
|
||||
"""Connect to HomematicIP Cloud Websocket."""
|
||||
await home.enable_events()
|
||||
|
||||
try:
|
||||
await self.home.get_current_state_async()
|
||||
hmip_events = self.home.enable_events()
|
||||
self.home.set_on_connected_handler(self.ws_connected_handler)
|
||||
self.home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
tries = 0
|
||||
await hmip_events
|
||||
except HmipConnectionError:
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Error connecting to HomematicIP with HAP %s. "
|
||||
"Retrying in %d seconds"
|
||||
),
|
||||
self.config_entry.unique_id,
|
||||
retry_delay,
|
||||
)
|
||||
|
||||
if self._ws_close_requested:
|
||||
break
|
||||
self._ws_close_requested = False
|
||||
tries += 1
|
||||
|
||||
try:
|
||||
self._retry_task = self.hass.async_create_task(
|
||||
asyncio.sleep(retry_delay)
|
||||
)
|
||||
await self._retry_task
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
home.set_on_connected_handler(self.ws_connected_handler)
|
||||
home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
home.set_on_reconnect_handler(self.ws_reconnected_handler)
|
||||
|
||||
async def async_reset(self) -> bool:
|
||||
"""Close the websocket connection."""
|
||||
@@ -272,14 +245,22 @@ class HomematicipHAP:
|
||||
|
||||
async def ws_connected_handler(self) -> None:
|
||||
"""Handle websocket connected."""
|
||||
_LOGGER.debug("WebSocket connection to HomematicIP established")
|
||||
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
||||
if self._ws_connection_closed.is_set():
|
||||
await self.get_state()
|
||||
self._ws_connection_closed.clear()
|
||||
|
||||
async def ws_disconnected_handler(self) -> None:
|
||||
"""Handle websocket disconnection."""
|
||||
_LOGGER.warning("WebSocket connection to HomematicIP closed")
|
||||
_LOGGER.warning("Websocket connection to HomematicIP Cloud closed")
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def ws_reconnected_handler(self, reason: str) -> None:
|
||||
"""Handle websocket reconnection."""
|
||||
_LOGGER.info(
|
||||
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
|
||||
reason,
|
||||
)
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def get_hap(
|
||||
@@ -306,6 +287,6 @@ class HomematicipHAP:
|
||||
home.on_update(self.async_update)
|
||||
home.on_create(self.async_create_entity)
|
||||
|
||||
hass.loop.create_task(self.async_connect())
|
||||
await self.async_connect(home)
|
||||
|
||||
return home
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.0.4"]
|
||||
"requirements": ["homematicip==2.0.5"]
|
||||
}
|
||||
|
@@ -4,13 +4,14 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homematicip.base.enums import DeviceType
|
||||
from homematicip.base.enums import DeviceType, FunctionalChannelType
|
||||
from homematicip.device import (
|
||||
BrandSwitch2,
|
||||
DinRailSwitch,
|
||||
DinRailSwitch4,
|
||||
FullFlushInputSwitch,
|
||||
HeatingSwitch2,
|
||||
MotionDetectorSwitchOutdoor,
|
||||
MultiIOBox,
|
||||
OpenCollector8Module,
|
||||
PlugableSwitch,
|
||||
@@ -47,18 +48,34 @@ async def async_setup_entry(
|
||||
and getattr(device, "deviceType", None) != DeviceType.BRAND_SWITCH_MEASURING
|
||||
):
|
||||
entities.append(HomematicipSwitchMeasuring(hap, device))
|
||||
elif isinstance(device, WiredSwitch8):
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
WiredSwitch8,
|
||||
OpenCollector8Module,
|
||||
BrandSwitch2,
|
||||
PrintedCircuitBoardSwitch2,
|
||||
HeatingSwitch2,
|
||||
MultiIOBox,
|
||||
MotionDetectorSwitchOutdoor,
|
||||
DinRailSwitch,
|
||||
DinRailSwitch4,
|
||||
),
|
||||
):
|
||||
channel_indices = [
|
||||
ch.index
|
||||
for ch in device.functionalChannels
|
||||
if ch.functionalChannelType
|
||||
in (
|
||||
FunctionalChannelType.SWITCH_CHANNEL,
|
||||
FunctionalChannelType.MULTI_MODE_INPUT_SWITCH_CHANNEL,
|
||||
)
|
||||
]
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 9)
|
||||
)
|
||||
elif isinstance(device, DinRailSwitch):
|
||||
entities.append(HomematicipMultiSwitch(hap, device, channel=1))
|
||||
elif isinstance(device, DinRailSwitch4):
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 5)
|
||||
for channel in channel_indices
|
||||
)
|
||||
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
@@ -68,24 +85,6 @@ async def async_setup_entry(
|
||||
),
|
||||
):
|
||||
entities.append(HomematicipSwitch(hap, device))
|
||||
elif isinstance(device, OpenCollector8Module):
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 9)
|
||||
)
|
||||
elif isinstance(
|
||||
device,
|
||||
(
|
||||
BrandSwitch2,
|
||||
PrintedCircuitBoardSwitch2,
|
||||
HeatingSwitch2,
|
||||
MultiIOBox,
|
||||
),
|
||||
):
|
||||
entities.extend(
|
||||
HomematicipMultiSwitch(hap, device, channel=channel)
|
||||
for channel in range(1, 3)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -108,15 +107,15 @@ class HomematicipMultiSwitch(HomematicipGenericEntity, SwitchEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if switch is on."""
|
||||
return self._device.functionalChannels[self._channel].on
|
||||
return self.functional_channel.on
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self._device.turn_on_async(self._channel)
|
||||
await self.functional_channel.async_turn_on()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self._device.turn_off_async(self._channel)
|
||||
await self.functional_channel.async_turn_off()
|
||||
|
||||
|
||||
class HomematicipSwitch(HomematicipMultiSwitch, SwitchEntity):
|
||||
|
@@ -23,9 +23,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
|
||||
api: HomeWizardEnergy
|
||||
|
||||
is_battery = entry.unique_id.startswith("HWE-BAT") if entry.unique_id else False
|
||||
|
||||
if (token := entry.data.get(CONF_TOKEN)) and is_battery:
|
||||
if token := entry.data.get(CONF_TOKEN):
|
||||
api = HomeWizardEnergyV2(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
token=token,
|
||||
@@ -37,8 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeWizardConfigEntry) -
|
||||
clientsession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
if is_battery:
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
await async_check_v2_support_and_create_issue(hass, entry)
|
||||
|
||||
coordinator = HWEnergyDeviceUpdateCoordinator(hass, entry, api)
|
||||
try:
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2025.5.1"]
|
||||
"requirements": ["aioautomower==2025.6.0"]
|
||||
}
|
||||
|
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.8"]
|
||||
"requirements": ["pylamarzocco==2.0.9"]
|
||||
}
|
||||
|
@@ -58,6 +58,10 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
CoffeeBoiler, machine.dashboard.config[WidgetType.CM_COFFEE_BOILER]
|
||||
).target_temperature
|
||||
),
|
||||
available_fn=(
|
||||
lambda coordinator: WidgetType.CM_COFFEE_BOILER
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
key="smart_standby_time",
|
||||
@@ -221,7 +225,7 @@ class LaMarzoccoNumberEntity(LaMarzoccoEntity, NumberEntity):
|
||||
entity_description: LaMarzoccoNumberEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
def native_value(self) -> float | int:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.native_value_fn(self.coordinator.device)
|
||||
|
||||
|
@@ -57,6 +57,10 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
).ready_start_time
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
available_fn=(
|
||||
lambda coordinator: WidgetType.CM_COFFEE_BOILER
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="steam_boiler_ready_time",
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"requirements": ["pypck==0.8.7", "lcn-frontend==0.2.5"]
|
||||
"requirements": ["pypck==0.8.8", "lcn-frontend==0.2.5"]
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from letpot.deviceclient import LetPotDeviceClient
|
||||
@@ -42,6 +43,7 @@ class LetPotDeviceCoordinator(DataUpdateCoordinator[LetPotDeviceStatus]):
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"LetPot {device.serial_number}",
|
||||
update_interval=timedelta(minutes=10),
|
||||
)
|
||||
self._info = info
|
||||
self.device = device
|
||||
|
@@ -5,9 +5,9 @@ rules:
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
status: done
|
||||
comment: |
|
||||
This integration only receives push-based updates.
|
||||
Primarily uses push, but polls with a long interval for availability and missed updates.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
@@ -39,7 +39,7 @@ rules:
|
||||
comment: |
|
||||
The integration does not have configuration options.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
|
@@ -13,5 +13,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylitterbot"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pylitterbot==2024.0.0"]
|
||||
"requirements": ["pylitterbot==2024.2.0"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==10.0.0"]
|
||||
"requirements": ["ical==10.0.4"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==10.0.0"]
|
||||
"requirements": ["ical==10.0.4"]
|
||||
}
|
||||
|
@@ -88,7 +88,6 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
||||
context = llm.LLMContext(
|
||||
platform=DOMAIN,
|
||||
context=self.context(request),
|
||||
user_prompt=None,
|
||||
language="*",
|
||||
assistant=conversation.DOMAIN,
|
||||
device_id=None,
|
||||
|
@@ -1,93 +1,28 @@
|
||||
"""The Meater Temperature Probe integration."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from meater import (
|
||||
AuthenticationError,
|
||||
MeaterApi,
|
||||
ServiceUnavailableError,
|
||||
TooManyRequestsError,
|
||||
)
|
||||
from meater.MeaterApi import MeaterProbe
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import MeaterConfigEntry, MeaterCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MeaterConfigEntry) -> bool:
|
||||
"""Set up Meater Temperature Probe from a config entry."""
|
||||
# Store an API object to access
|
||||
session = async_get_clientsession(hass)
|
||||
meater_api = MeaterApi(session)
|
||||
|
||||
# Add the credentials
|
||||
try:
|
||||
_LOGGER.debug("Authenticating with the Meater API")
|
||||
await meater_api.authenticate(
|
||||
entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD]
|
||||
)
|
||||
except (ServiceUnavailableError, TooManyRequestsError) as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Unable to authenticate with the Meater API: {err}"
|
||||
) from err
|
||||
|
||||
async def async_update_data() -> dict[str, MeaterProbe]:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
# Note: TimeoutError and aiohttp.ClientError are already
|
||||
# handled by the data update coordinator.
|
||||
async with asyncio.timeout(10):
|
||||
devices: list[MeaterProbe] = await meater_api.get_all_devices()
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("The API call wasn't authenticated") from err
|
||||
except TooManyRequestsError as err:
|
||||
raise UpdateFailed(
|
||||
"Too many requests have been made to the API, rate limiting is in place"
|
||||
) from err
|
||||
|
||||
return {device.id: device for device in devices}
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
# Name of the data. For logging purposes.
|
||||
name="meater_api",
|
||||
update_method=async_update_data,
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=timedelta(seconds=30),
|
||||
)
|
||||
coordinator = MeaterCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN].setdefault("known_probes", set())
|
||||
hass.data.setdefault(DOMAIN, {}).setdefault("known_probes", set())
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
"api": meater_api,
|
||||
"coordinator": coordinator,
|
||||
}
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MeaterConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
77
homeassistant/components/meater/coordinator.py
Normal file
77
homeassistant/components/meater/coordinator.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Meater Coordinator."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from meater.MeaterApi import (
|
||||
AuthenticationError,
|
||||
MeaterApi,
|
||||
MeaterProbe,
|
||||
ServiceUnavailableError,
|
||||
TooManyRequestsError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type MeaterConfigEntry = ConfigEntry[MeaterCoordinator]
|
||||
|
||||
|
||||
class MeaterCoordinator(DataUpdateCoordinator[dict[str, MeaterProbe]]):
|
||||
"""Meater Coordinator."""
|
||||
|
||||
config_entry: MeaterConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: MeaterConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the Meater Coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=f"Meater {entry.title}",
|
||||
update_interval=timedelta(seconds=30),
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
self.client = MeaterApi(session)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the Meater Coordinator."""
|
||||
try:
|
||||
_LOGGER.debug("Authenticating with the Meater API")
|
||||
await self.client.authenticate(
|
||||
self.config_entry.data[CONF_USERNAME],
|
||||
self.config_entry.data[CONF_PASSWORD],
|
||||
)
|
||||
except (ServiceUnavailableError, TooManyRequestsError) as err:
|
||||
raise UpdateFailed from err
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Unable to authenticate with the Meater API: {err}"
|
||||
) from err
|
||||
|
||||
async def _async_update_data(self) -> dict[str, MeaterProbe]:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
# Note: TimeoutError and aiohttp.ClientError are already
|
||||
# handled by the data update coordinator.
|
||||
async with asyncio.timeout(10):
|
||||
devices: list[MeaterProbe] = await self.client.get_all_devices()
|
||||
except AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("The API call wasn't authenticated") from err
|
||||
except TooManyRequestsError as err:
|
||||
raise UpdateFailed(
|
||||
"Too many requests have been made to the API, rate limiting is in place"
|
||||
) from err
|
||||
|
||||
return {device.id: device for device in devices}
|
55
homeassistant/components/meater/diagnostics.py
Normal file
55
homeassistant/components/meater/diagnostics.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Diagnostics support for the Meater integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import MeaterConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: MeaterConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
return {
|
||||
identifier: {
|
||||
"id": probe.id,
|
||||
"internal_temperature": probe.internal_temperature,
|
||||
"ambient_temperature": probe.ambient_temperature,
|
||||
"time_updated": probe.time_updated.isoformat(),
|
||||
"cook": (
|
||||
{
|
||||
"id": probe.cook.id,
|
||||
"name": probe.cook.name,
|
||||
"state": probe.cook.state,
|
||||
"target_temperature": (
|
||||
probe.cook.target_temperature
|
||||
if hasattr(probe.cook, "target_temperature")
|
||||
else None
|
||||
),
|
||||
"peak_temperature": (
|
||||
probe.cook.peak_temperature
|
||||
if hasattr(probe.cook, "peak_temperature")
|
||||
else None
|
||||
),
|
||||
"time_remaining": (
|
||||
probe.cook.time_remaining
|
||||
if hasattr(probe.cook, "time_remaining")
|
||||
else None
|
||||
),
|
||||
"time_elapsed": (
|
||||
probe.cook.time_elapsed
|
||||
if hasattr(probe.cook, "time_elapsed")
|
||||
else None
|
||||
),
|
||||
}
|
||||
if probe.cook
|
||||
else None
|
||||
),
|
||||
}
|
||||
for identifier, probe in coordinator.data.items()
|
||||
}
|
@@ -14,18 +14,28 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import MeaterCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import MeaterConfigEntry
|
||||
|
||||
COOK_STATES = {
|
||||
"Not Started": "not_started",
|
||||
"Configured": "configured",
|
||||
"Started": "started",
|
||||
"Ready For Resting": "ready_for_resting",
|
||||
"Resting": "resting",
|
||||
"Slightly Underdone": "slightly_underdone",
|
||||
"Finished": "finished",
|
||||
"Slightly Overdone": "slightly_overdone",
|
||||
"OVERCOOK!": "overcooked",
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -82,13 +92,13 @@ SENSOR_TYPES = (
|
||||
available=lambda probe: probe is not None and probe.cook is not None,
|
||||
value=lambda probe: probe.cook.name if probe.cook else None,
|
||||
),
|
||||
# One of Not Started, Configured, Started, Ready For Resting, Resting,
|
||||
# Slightly Underdone, Finished, Slightly Overdone, OVERCOOK!. Not translated.
|
||||
MeaterSensorEntityDescription(
|
||||
key="cook_state",
|
||||
translation_key="cook_state",
|
||||
available=lambda probe: probe is not None and probe.cook is not None,
|
||||
value=lambda probe: probe.cook.state if probe.cook else None,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(COOK_STATES.values()),
|
||||
value=lambda probe: COOK_STATES.get(probe.cook.state) if probe.cook else None,
|
||||
),
|
||||
# Target temperature
|
||||
MeaterSensorEntityDescription(
|
||||
@@ -137,13 +147,11 @@ SENSOR_TYPES = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: MeaterConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the entry."""
|
||||
coordinator: DataUpdateCoordinator[dict[str, MeaterProbe]] = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
]["coordinator"]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
@callback
|
||||
def async_update_data():
|
||||
@@ -174,11 +182,10 @@ async def async_setup_entry(
|
||||
|
||||
# Add a subscriber to the coordinator to discover new temperature probes
|
||||
coordinator.async_add_listener(async_update_data)
|
||||
async_update_data()
|
||||
|
||||
|
||||
class MeaterProbeTemperature(
|
||||
SensorEntity, CoordinatorEntity[DataUpdateCoordinator[dict[str, MeaterProbe]]]
|
||||
):
|
||||
class MeaterProbeTemperature(SensorEntity, CoordinatorEntity[MeaterCoordinator]):
|
||||
"""Meater Temperature Sensor Entity."""
|
||||
|
||||
entity_description: MeaterSensorEntityDescription
|
||||
|
@@ -40,7 +40,18 @@
|
||||
"name": "Cooking"
|
||||
},
|
||||
"cook_state": {
|
||||
"name": "Cook state"
|
||||
"name": "Cook state",
|
||||
"state": {
|
||||
"not_started": "Not started",
|
||||
"configured": "Configured",
|
||||
"started": "Started",
|
||||
"ready_for_resting": "Ready for resting",
|
||||
"resting": "Resting",
|
||||
"slightly_underdone": "Slightly underdone",
|
||||
"finished": "Finished",
|
||||
"slightly_overdone": "Slightly overdone",
|
||||
"overcooked": "Overcooked"
|
||||
}
|
||||
},
|
||||
"cook_target_temp": {
|
||||
"name": "Target temperature"
|
||||
|
@@ -814,19 +814,6 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag media player features that are supported."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> MediaPlayerEntityFeature:
|
||||
"""Return the supported features as MediaPlayerEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int:
|
||||
new_features = MediaPlayerEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
def turn_on(self) -> None:
|
||||
"""Turn the media player on."""
|
||||
raise NotImplementedError
|
||||
@@ -966,87 +953,85 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def support_play(self) -> bool:
|
||||
"""Boolean if play is supported."""
|
||||
return MediaPlayerEntityFeature.PLAY in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.PLAY in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_pause(self) -> bool:
|
||||
"""Boolean if pause is supported."""
|
||||
return MediaPlayerEntityFeature.PAUSE in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.PAUSE in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_stop(self) -> bool:
|
||||
"""Boolean if stop is supported."""
|
||||
return MediaPlayerEntityFeature.STOP in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.STOP in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_seek(self) -> bool:
|
||||
"""Boolean if seek is supported."""
|
||||
return MediaPlayerEntityFeature.SEEK in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.SEEK in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_volume_set(self) -> bool:
|
||||
"""Boolean if setting volume is supported."""
|
||||
return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_volume_mute(self) -> bool:
|
||||
"""Boolean if muting volume is supported."""
|
||||
return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_previous_track(self) -> bool:
|
||||
"""Boolean if previous track command supported."""
|
||||
return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_next_track(self) -> bool:
|
||||
"""Boolean if next track command supported."""
|
||||
return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_play_media(self) -> bool:
|
||||
"""Boolean if play media command supported."""
|
||||
return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_select_source(self) -> bool:
|
||||
"""Boolean if select source command supported."""
|
||||
return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_select_sound_mode(self) -> bool:
|
||||
"""Boolean if select sound mode command supported."""
|
||||
return (
|
||||
MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features_compat
|
||||
)
|
||||
return MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_clear_playlist(self) -> bool:
|
||||
"""Boolean if clear playlist command supported."""
|
||||
return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_shuffle_set(self) -> bool:
|
||||
"""Boolean if shuffle is supported."""
|
||||
return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
def support_grouping(self) -> bool:
|
||||
"""Boolean if player grouping is supported."""
|
||||
return MediaPlayerEntityFeature.GROUPING in self.supported_features_compat
|
||||
return MediaPlayerEntityFeature.GROUPING in self.supported_features
|
||||
|
||||
async def async_toggle(self) -> None:
|
||||
"""Toggle the power on the media player."""
|
||||
@@ -1074,7 +1059,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if (
|
||||
self.volume_level is not None
|
||||
and self.volume_level < 1
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
):
|
||||
await self.async_set_volume_level(
|
||||
min(1, self.volume_level + self.volume_step)
|
||||
@@ -1092,7 +1077,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if (
|
||||
self.volume_level is not None
|
||||
and self.volume_level > 0
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat
|
||||
and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features
|
||||
):
|
||||
await self.async_set_volume_level(
|
||||
max(0, self.volume_level - self.volume_step)
|
||||
@@ -1135,7 +1120,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def capability_attributes(self) -> dict[str, Any]:
|
||||
"""Return capability attributes."""
|
||||
data: dict[str, Any] = {}
|
||||
supported_features = self.supported_features_compat
|
||||
supported_features = self.supported_features
|
||||
|
||||
if (
|
||||
source_list := self.source_list
|
||||
@@ -1364,7 +1349,7 @@ async def websocket_browse_media(
|
||||
connection.send_error(msg["id"], "entity_not_found", "Entity not found")
|
||||
return
|
||||
|
||||
if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features_compat:
|
||||
if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features:
|
||||
connection.send_message(
|
||||
websocket_api.error_message(
|
||||
msg["id"], ERR_NOT_SUPPORTED, "Player does not support browsing media"
|
||||
@@ -1447,7 +1432,7 @@ async def websocket_search_media(
|
||||
connection.send_error(msg["id"], "entity_not_found", "Entity not found")
|
||||
return
|
||||
|
||||
if MediaPlayerEntityFeature.SEARCH_MEDIA not in player.supported_features_compat:
|
||||
if MediaPlayerEntityFeature.SEARCH_MEDIA not in player.supported_features:
|
||||
connection.send_message(
|
||||
websocket_api.error_message(
|
||||
msg["id"], ERR_NOT_SUPPORTED, "Player does not support searching media"
|
||||
|
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import dns.asyncresolver
|
||||
import dns.rdata
|
||||
import dns.rdataclass
|
||||
import dns.rdatatype
|
||||
@@ -22,20 +23,23 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_dnspython_rdata_classes() -> None:
|
||||
"""Load dnspython rdata classes used by mcstatus."""
|
||||
def prevent_dnspython_blocking_operations() -> None:
|
||||
"""Prevent dnspython blocking operations by pre-loading required data."""
|
||||
|
||||
# Blocking import: https://github.com/rthalley/dnspython/issues/1083
|
||||
for rdtype in dns.rdatatype.RdataType:
|
||||
if not dns.rdatatype.is_metatype(rdtype) or rdtype == dns.rdatatype.OPT:
|
||||
dns.rdata.get_rdata_class(dns.rdataclass.IN, rdtype) # type: ignore[no-untyped-call]
|
||||
|
||||
# Blocking open: https://github.com/rthalley/dnspython/issues/1200
|
||||
dns.asyncresolver.get_default_resolver()
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: MinecraftServerConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Minecraft Server from a config entry."""
|
||||
|
||||
# Workaround to avoid blocking imports from dnspython (https://github.com/rthalley/dnspython/issues/1083)
|
||||
await hass.async_add_executor_job(load_dnspython_rdata_classes)
|
||||
await hass.async_add_executor_job(prevent_dnspython_blocking_operations)
|
||||
|
||||
# Create coordinator instance and store it.
|
||||
coordinator = MinecraftServerCoordinator(hass, entry)
|
||||
|
@@ -62,6 +62,7 @@ TILT_DEVICE_MAP = {
|
||||
BlindType.VerticalBlind: CoverDeviceClass.BLIND,
|
||||
BlindType.VerticalBlindLeft: CoverDeviceClass.BLIND,
|
||||
BlindType.VerticalBlindRight: CoverDeviceClass.BLIND,
|
||||
BlindType.RollerTiltMotor: CoverDeviceClass.BLIND,
|
||||
}
|
||||
|
||||
TILT_ONLY_DEVICE_MAP = {
|
||||
|
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/motion_blinds",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["motionblinds"],
|
||||
"requirements": ["motionblinds==0.6.27"]
|
||||
"requirements": ["motionblinds==0.6.28"]
|
||||
}
|
||||
|
@@ -35,7 +35,6 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.service_info.mqtt import ReceivePayloadType
|
||||
from homeassistant.helpers.typing import ConfigType, VolSchemaType
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -48,7 +47,6 @@ from .const import (
|
||||
CONF_OPTIONS,
|
||||
CONF_STATE_TOPIC,
|
||||
CONF_SUGGESTED_DISPLAY_PRECISION,
|
||||
DOMAIN,
|
||||
PAYLOAD_NONE,
|
||||
)
|
||||
from .entity import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper
|
||||
@@ -138,12 +136,9 @@ def validate_sensor_state_and_device_class_config(config: ConfigType) -> ConfigT
|
||||
device_class in DEVICE_CLASS_UNITS
|
||||
and unit_of_measurement not in DEVICE_CLASS_UNITS[device_class]
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"The unit of measurement `%s` is not valid "
|
||||
"together with device class `%s`. "
|
||||
"this will stop working in HA Core 2025.7.0",
|
||||
unit_of_measurement,
|
||||
device_class,
|
||||
raise vol.Invalid(
|
||||
f"The unit of measurement `{unit_of_measurement}` is not valid "
|
||||
f"together with device class `{device_class}`",
|
||||
)
|
||||
|
||||
return config
|
||||
@@ -194,40 +189,8 @@ class MqttSensor(MqttEntity, RestoreSensor):
|
||||
None
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_check_uom(self) -> None:
|
||||
"""Check if the unit of measurement is valid with the device class."""
|
||||
if (
|
||||
self._discovery_data is not None
|
||||
or self.device_class is None
|
||||
or self.native_unit_of_measurement is None
|
||||
):
|
||||
return
|
||||
if (
|
||||
self.device_class in DEVICE_CLASS_UNITS
|
||||
and self.native_unit_of_measurement
|
||||
not in DEVICE_CLASS_UNITS[self.device_class]
|
||||
):
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
self.entity_id,
|
||||
issue_domain=sensor.DOMAIN,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
learn_more_url=URL_DOCS_SUPPORTED_SENSOR_UOM,
|
||||
translation_placeholders={
|
||||
"uom": self.native_unit_of_measurement,
|
||||
"device_class": self.device_class.value,
|
||||
"entity_id": self.entity_id,
|
||||
},
|
||||
translation_key="invalid_unit_of_measurement",
|
||||
breaks_in_ha_version="2025.7.0",
|
||||
)
|
||||
|
||||
async def mqtt_async_added_to_hass(self) -> None:
|
||||
"""Restore state for entities with expire_after set."""
|
||||
self.async_check_uom()
|
||||
last_state: State | None
|
||||
last_sensor_data: SensorExtraStoredData | None
|
||||
if (
|
||||
|
@@ -3,10 +3,6 @@
|
||||
"invalid_platform_config": {
|
||||
"title": "Invalid config found for MQTT {domain} item",
|
||||
"description": "Home Assistant detected an invalid config for a manually configured item.\n\nPlatform domain: **{domain}**\nConfiguration file: **{config_file}**\nNear line: **{line}**\nConfiguration found:\n```yaml\n{config}\n```\nError: **{error}**.\n\nMake sure the configuration is valid and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
|
||||
},
|
||||
"invalid_unit_of_measurement": {
|
||||
"title": "Sensor with invalid unit of measurement",
|
||||
"description": "Manual configured Sensor entity **{entity_id}** has a configured unit of measurement **{uom}** which is not valid with configured device class **{device_class}**. Make sure a valid unit of measurement is configured or remove the device class, and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mysensors",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["mysensors"],
|
||||
"requirements": ["pymysensors==0.24.0"]
|
||||
"requirements": ["pymysensors==0.25.0"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nessclient"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["nessclient==1.1.2"]
|
||||
"requirements": ["nessclient==1.2.0"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nextbus",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["py_nextbus"],
|
||||
"requirements": ["py-nextbusnext==2.2.0"]
|
||||
"requirements": ["py-nextbusnext==2.3.0"]
|
||||
}
|
||||
|
@@ -13,14 +13,13 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import NextDnsConfigEntry
|
||||
from .coordinator import NextDnsUpdateCoordinator
|
||||
from .entity import NextDnsEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -61,30 +60,14 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class NextDnsBinarySensor(
|
||||
CoordinatorEntity[NextDnsUpdateCoordinator[ConnectionStatus]], BinarySensorEntity
|
||||
):
|
||||
class NextDnsBinarySensor(NextDnsEntity, BinarySensorEntity):
|
||||
"""Define an NextDNS binary sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: NextDnsBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NextDnsUpdateCoordinator[ConnectionStatus],
|
||||
description: NextDnsBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{coordinator.profile_id}_{description.key}"
|
||||
self._attr_is_on = description.state(coordinator.data, coordinator.profile_id)
|
||||
self.entity_description = description
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_is_on = self.entity_description.state(
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the binary sensor is on."""
|
||||
return self.entity_description.state(
|
||||
self.coordinator.data, self.coordinator.profile_id
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
@@ -4,21 +4,21 @@ from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientError
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from nextdns import AnalyticsStatus, ApiError, InvalidApiKeyError
|
||||
from nextdns import ApiError, InvalidApiKeyError
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import NextDnsConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import NextDnsUpdateCoordinator
|
||||
from .entity import NextDnsEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
CLEAR_LOGS_BUTTON = ButtonEntityDescription(
|
||||
key="clear_logs",
|
||||
translation_key="clear_logs",
|
||||
@@ -37,24 +37,9 @@ async def async_setup_entry(
|
||||
async_add_entities([NextDnsButton(coordinator, CLEAR_LOGS_BUTTON)])
|
||||
|
||||
|
||||
class NextDnsButton(
|
||||
CoordinatorEntity[NextDnsUpdateCoordinator[AnalyticsStatus]], ButtonEntity
|
||||
):
|
||||
class NextDnsButton(NextDnsEntity, ButtonEntity):
|
||||
"""Define an NextDNS button."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NextDnsUpdateCoordinator[AnalyticsStatus],
|
||||
description: ButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{coordinator.profile_id}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Trigger cleaning logs."""
|
||||
try:
|
||||
|
@@ -24,7 +24,6 @@ from tenacity import RetryError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -53,14 +52,6 @@ class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]):
|
||||
"""Initialize."""
|
||||
self.nextdns = nextdns
|
||||
self.profile_id = profile_id
|
||||
self.profile_name = nextdns.get_profile_name(profile_id)
|
||||
self.device_info = DeviceInfo(
|
||||
configuration_url=f"https://my.nextdns.io/{profile_id}/setup",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, str(profile_id))},
|
||||
manufacturer="NextDNS Inc.",
|
||||
name=self.profile_name,
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
|
31
homeassistant/components/nextdns/entity.py
Normal file
31
homeassistant/components/nextdns/entity.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Define NextDNS entities."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CoordinatorDataT, NextDnsUpdateCoordinator
|
||||
|
||||
|
||||
class NextDnsEntity(CoordinatorEntity[NextDnsUpdateCoordinator[CoordinatorDataT]]):
|
||||
"""Define NextDNS entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NextDnsUpdateCoordinator[CoordinatorDataT],
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"https://my.nextdns.io/{coordinator.profile_id}/setup",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, str(coordinator.profile_id))},
|
||||
manufacturer="NextDNS Inc.",
|
||||
name=coordinator.nextdns.get_profile_name(coordinator.profile_id),
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.profile_id}_{description.key}"
|
||||
self.entity_description = description
|
@@ -20,10 +20,9 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import NextDnsConfigEntry
|
||||
from .const import (
|
||||
@@ -33,9 +32,10 @@ from .const import (
|
||||
ATTR_PROTOCOLS,
|
||||
ATTR_STATUS,
|
||||
)
|
||||
from .coordinator import CoordinatorDataT, NextDnsUpdateCoordinator
|
||||
from .coordinator import CoordinatorDataT
|
||||
from .entity import NextDnsEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -297,27 +297,12 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class NextDnsSensor(
|
||||
CoordinatorEntity[NextDnsUpdateCoordinator[CoordinatorDataT]], SensorEntity
|
||||
):
|
||||
class NextDnsSensor(NextDnsEntity, SensorEntity):
|
||||
"""Define an NextDNS sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: NextDnsSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NextDnsUpdateCoordinator[CoordinatorDataT],
|
||||
description: NextDnsSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{coordinator.profile_id}_{description.key}"
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self.entity_description: NextDnsSensorEntityDescription = description
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_native_value = self.entity_description.value(self.coordinator.data)
|
||||
self.async_write_ha_state()
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value(self.coordinator.data)
|
||||
|
@@ -4,16 +4,25 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The API key for your NextDNS account"
|
||||
}
|
||||
},
|
||||
"profiles": {
|
||||
"data": {
|
||||
"profile": "Profile"
|
||||
"profile_name": "Profile"
|
||||
},
|
||||
"data_description": {
|
||||
"profile_name": "The NextDNS configuration profile you want to integrate"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::nextdns::config::step::user::data_description::api_key%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -15,11 +15,11 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import NextDnsConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import NextDnsUpdateCoordinator
|
||||
from .entity import NextDnsEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -536,12 +536,9 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class NextDnsSwitch(
|
||||
CoordinatorEntity[NextDnsUpdateCoordinator[Settings]], SwitchEntity
|
||||
):
|
||||
class NextDnsSwitch(NextDnsEntity, SwitchEntity):
|
||||
"""Define an NextDNS switch."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: NextDnsSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -550,11 +547,8 @@ class NextDnsSwitch(
|
||||
description: NextDnsSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{coordinator.profile_id}_{description.key}"
|
||||
super().__init__(coordinator, description)
|
||||
self._attr_is_on = description.state(coordinator.data)
|
||||
self.entity_description = description
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
|
@@ -219,11 +219,11 @@ class OllamaConversationEntity(
|
||||
settings = {**self.entry.data, **self.entry.options}
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
settings.get(CONF_LLM_HASS_API),
|
||||
settings.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
@@ -66,6 +66,7 @@ class OneDriveUpdateCoordinator(DataUpdateCoordinator[Drive]):
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from err
|
||||
except OneDriveException as err:
|
||||
_LOGGER.debug("Failed to fetch drive data: %s", err, exc_info=True)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN, translation_key="update_failed"
|
||||
) from err
|
||||
|
@@ -279,11 +279,11 @@ class OpenAIConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
options.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/osoenergy",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyosoenergyapi==1.1.4"]
|
||||
"requirements": ["pyosoenergyapi==1.1.5"]
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@ from pypaperless.exceptions import (
|
||||
PaperlessInvalidTokenError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, Platform
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
@@ -69,7 +69,7 @@ async def _get_paperless_api(
|
||||
api = Paperless(
|
||||
entry.data[CONF_URL],
|
||||
entry.data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
session=async_get_clientsession(hass, entry.data.get(CONF_VERIFY_SSL, True)),
|
||||
)
|
||||
|
||||
try:
|
||||
|
@@ -16,7 +16,7 @@ from pypaperless.exceptions import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
@@ -25,6 +25,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Required(CONF_VERIFY_SSL, default=True): bool,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -78,15 +79,19 @@ class PaperlessConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(entry, data=user_input)
|
||||
|
||||
if user_input is not None:
|
||||
suggested_values = user_input
|
||||
else:
|
||||
suggested_values = {
|
||||
CONF_URL: entry.data[CONF_URL],
|
||||
CONF_VERIFY_SSL: entry.data.get(CONF_VERIFY_SSL, True),
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
suggested_values={
|
||||
CONF_URL: user_input[CONF_URL]
|
||||
if user_input is not None
|
||||
else entry.data[CONF_URL],
|
||||
},
|
||||
suggested_values=suggested_values,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -122,13 +127,15 @@ class PaperlessConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _validate_input(self, user_input: dict[str, str]) -> dict[str, str]:
|
||||
async def _validate_input(self, user_input: dict[str, Any]) -> dict[str, str]:
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
client = Paperless(
|
||||
user_input[CONF_URL],
|
||||
user_input[CONF_API_KEY],
|
||||
session=async_get_clientsession(self.hass),
|
||||
session=async_get_clientsession(
|
||||
self.hass, user_input.get(CONF_VERIFY_SSL, True)
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
|
@@ -4,11 +4,13 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "URL to connect to the Paperless-ngx instance",
|
||||
"api_key": "API key to connect to the Paperless-ngx API"
|
||||
"api_key": "API key to connect to the Paperless-ngx API",
|
||||
"verify_ssl": "Verify the SSL certificate of the Paperless-ngx instance. Disable this option if you’re using a self-signed certificate."
|
||||
},
|
||||
"title": "Add Paperless-ngx instance"
|
||||
},
|
||||
@@ -24,11 +26,13 @@
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "[%key:component::paperless_ngx::config::step::user::data_description::url%]",
|
||||
"api_key": "[%key:component::paperless_ngx::config::step::user::data_description::api_key%]"
|
||||
"api_key": "[%key:component::paperless_ngx::config::step::user::data_description::api_key%]",
|
||||
"verify_ssl": "[%key:component::paperless_ngx::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"title": "Reconfigure Paperless-ngx instance"
|
||||
}
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==10.0.0"]
|
||||
"requirements": ["ical==10.0.4"]
|
||||
}
|
||||
|
@@ -75,7 +75,10 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None]
|
||||
)
|
||||
|
||||
http_s = "https" if self._host.api.use_https else "http"
|
||||
self._conf_url = f"{http_s}://{self._host.api.host}:{self._host.api.port}"
|
||||
if self._host.api.baichuan_only:
|
||||
self._conf_url = None
|
||||
else:
|
||||
self._conf_url = f"{http_s}://{self._host.api.host}:{self._host.api.port}"
|
||||
self._dev_id = self._host.unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._dev_id)},
|
||||
@@ -184,6 +187,11 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity):
|
||||
if mac := self._host.api.baichuan.mac_address(dev_ch):
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac))
|
||||
|
||||
if self._conf_url is None:
|
||||
conf_url = None
|
||||
else:
|
||||
conf_url = f"{self._conf_url}/?ch={dev_ch}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._dev_id)},
|
||||
connections=connections,
|
||||
@@ -195,7 +203,7 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity):
|
||||
hw_version=self._host.api.camera_hardware_version(dev_ch),
|
||||
sw_version=self._host.api.camera_sw_version(dev_ch),
|
||||
serial_number=self._host.api.camera_uid(dev_ch),
|
||||
configuration_url=f"{self._conf_url}/?ch={dev_ch}",
|
||||
configuration_url=conf_url,
|
||||
)
|
||||
|
||||
@property
|
||||
|
@@ -220,6 +220,9 @@
|
||||
"ai_animal_sensitivity": {
|
||||
"default": "mdi:paw"
|
||||
},
|
||||
"cry_sensitivity": {
|
||||
"default": "mdi:emoticon-cry-outline"
|
||||
},
|
||||
"crossline_sensitivity": {
|
||||
"default": "mdi:fence"
|
||||
},
|
||||
@@ -488,6 +491,12 @@
|
||||
"state": {
|
||||
"on": "mdi:eye-off"
|
||||
}
|
||||
},
|
||||
"privacy_mask": {
|
||||
"default": "mdi:eye",
|
||||
"state": {
|
||||
"on": "mdi:eye-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.14.0"]
|
||||
"requirements": ["reolink-aio==0.14.1"]
|
||||
}
|
||||
|
@@ -272,6 +272,18 @@ NUMBER_ENTITIES = (
|
||||
value=lambda api, ch: api.ai_sensitivity(ch, "dog_cat"),
|
||||
method=lambda api, ch, value: api.set_ai_sensitivity(ch, int(value), "dog_cat"),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="cry_sensitivity",
|
||||
cmd_key="299",
|
||||
translation_key="cry_sensitivity",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=1,
|
||||
native_min_value=1,
|
||||
native_max_value=5,
|
||||
supported=lambda api, ch: api.supported(ch, "ai_cry"),
|
||||
value=lambda api, ch: api.baichuan.cry_sensitivity(ch),
|
||||
method=lambda api, ch, value: api.baichuan.set_cry_detection(ch, int(value)),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="ai_face_delay",
|
||||
cmd_key="GetAiAlarm",
|
||||
|
@@ -571,6 +571,9 @@
|
||||
"ai_animal_sensitivity": {
|
||||
"name": "AI animal sensitivity"
|
||||
},
|
||||
"cry_sensitivity": {
|
||||
"name": "Baby cry sensitivity"
|
||||
},
|
||||
"crossline_sensitivity": {
|
||||
"name": "AI crossline {zone_name} sensitivity"
|
||||
},
|
||||
@@ -957,6 +960,9 @@
|
||||
},
|
||||
"privacy_mode": {
|
||||
"name": "Privacy mode"
|
||||
},
|
||||
"privacy_mask": {
|
||||
"name": "Privacy mask"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -216,6 +216,15 @@ SWITCH_ENTITIES = (
|
||||
value=lambda api, ch: api.baichuan.privacy_mode(ch),
|
||||
method=lambda api, ch, value: api.baichuan.set_privacy_mode(ch, value),
|
||||
),
|
||||
ReolinkSwitchEntityDescription(
|
||||
key="privacy_mask",
|
||||
cmd_key="GetMask",
|
||||
translation_key="privacy_mask",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api, ch: api.supported(ch, "privacy_mask"),
|
||||
value=lambda api, ch: api.privacy_mask_enabled(ch),
|
||||
method=lambda api, ch, value: api.set_privacy_mask(ch, enable=value),
|
||||
),
|
||||
ReolinkSwitchEntityDescription(
|
||||
key="hardwired_chime_enabled",
|
||||
cmd_key="483",
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"iot_class": "assumed_state",
|
||||
"loggers": ["rflink"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["rflink==0.0.66"]
|
||||
"requirements": ["rflink==0.0.67"]
|
||||
}
|
||||
|
@@ -10,7 +10,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["rokuecp"],
|
||||
"requirements": ["rokuecp==0.19.3"],
|
||||
"requirements": ["rokuecp==0.19.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "roku:ecp",
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiorussound==4.5.2"],
|
||||
"requirements": ["aiorussound==4.6.0"],
|
||||
"zeroconf": ["_rio._tcp.local."]
|
||||
}
|
||||
|
@@ -235,11 +235,15 @@ class ShellyButton(ShellyBaseButton):
|
||||
self._attr_unique_id = f"{coordinator.mac}_{description.key}"
|
||||
if isinstance(coordinator, ShellyBlockCoordinator):
|
||||
self._attr_device_info = get_block_device_info(
|
||||
coordinator.device, coordinator.mac
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
else:
|
||||
self._attr_device_info = get_rpc_device_info(
|
||||
coordinator.device, coordinator.mac
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.mac)}
|
||||
|
@@ -211,7 +211,10 @@ class BlockSleepingClimate(
|
||||
elif entry is not None:
|
||||
self._unique_id = entry.unique_id
|
||||
self._attr_device_info = get_block_device_info(
|
||||
coordinator.device, coordinator.mac, sensor_block
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
sensor_block,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
self._attr_name = get_block_entity_name(
|
||||
self.coordinator.device, sensor_block, None
|
||||
|
@@ -31,7 +31,11 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
device_registry as dr,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -114,6 +118,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice](
|
||||
self.device = device
|
||||
self.device_id: str | None = None
|
||||
self._pending_platforms: list[Platform] | None = None
|
||||
self.suggested_area: str | None = None
|
||||
device_name = device.name if device.initialized else entry.title
|
||||
interval_td = timedelta(seconds=update_interval)
|
||||
# The device has come online at least once. In the case of a sleeping RPC
|
||||
@@ -176,6 +181,11 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice](
|
||||
hw_version=f"gen{get_device_entry_gen(self.config_entry)}",
|
||||
configuration_url=f"http://{get_host(self.config_entry.data[CONF_HOST])}:{get_http_port(self.config_entry.data)}",
|
||||
)
|
||||
# We want to use the main device area as the suggested area for sub-devices.
|
||||
if (area_id := device_entry.area_id) is not None:
|
||||
area_registry = ar.async_get(self.hass)
|
||||
if (area := area_registry.async_get_area(area_id)) is not None:
|
||||
self.suggested_area = area.name
|
||||
self.device_id = device_entry.id
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
|
@@ -362,7 +362,10 @@ class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]):
|
||||
self.block = block
|
||||
self._attr_name = get_block_entity_name(coordinator.device, block)
|
||||
self._attr_device_info = get_block_device_info(
|
||||
coordinator.device, coordinator.mac, block
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
block,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.mac}-{block.description}"
|
||||
|
||||
@@ -405,7 +408,10 @@ class ShellyRpcEntity(CoordinatorEntity[ShellyRpcCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
self.key = key
|
||||
self._attr_device_info = get_rpc_device_info(
|
||||
coordinator.device, coordinator.mac, key
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
key,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.mac}-{key}"
|
||||
self._attr_name = get_rpc_entity_name(coordinator.device, key)
|
||||
@@ -521,7 +527,9 @@ class ShellyRestAttributeEntity(CoordinatorEntity[ShellyBlockCoordinator]):
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.mac}-{attribute}"
|
||||
self._attr_device_info = get_block_device_info(
|
||||
coordinator.device, coordinator.mac
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
self._last_value = None
|
||||
|
||||
@@ -630,7 +638,10 @@ class ShellySleepingBlockAttributeEntity(ShellyBlockAttributeEntity):
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_device_info = get_block_device_info(
|
||||
coordinator.device, coordinator.mac, block
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
block,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
|
||||
if block is not None:
|
||||
@@ -698,7 +709,10 @@ class ShellySleepingRpcAttributeEntity(ShellyRpcAttributeEntity):
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_device_info = get_rpc_device_info(
|
||||
coordinator.device, coordinator.mac, key
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
key,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
self._attr_unique_id = self._attr_unique_id = (
|
||||
f"{coordinator.mac}-{key}-{attribute}"
|
||||
|
@@ -207,7 +207,10 @@ class ShellyRpcEvent(CoordinatorEntity[ShellyRpcCoordinator], EventEntity):
|
||||
super().__init__(coordinator)
|
||||
self.event_id = int(key.split(":")[-1])
|
||||
self._attr_device_info = get_rpc_device_info(
|
||||
coordinator.device, coordinator.mac, key
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
key,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.mac}-{key}"
|
||||
self._attr_name = get_rpc_entity_name(coordinator.device, key)
|
||||
|
@@ -139,7 +139,11 @@ class RpcEmeterPhaseSensor(RpcSensor):
|
||||
super().__init__(coordinator, key, attribute, description)
|
||||
|
||||
self._attr_device_info = get_rpc_device_info(
|
||||
coordinator.device, coordinator.mac, key, description.emeter_phase
|
||||
coordinator.device,
|
||||
coordinator.mac,
|
||||
key,
|
||||
emeter_phase=description.emeter_phase,
|
||||
suggested_area=coordinator.suggested_area,
|
||||
)
|
||||
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user