Compare commits

..

1 Commits

Author SHA1 Message Date
J. Nick Koston
a13c0effb8 attempt to fix rename race 2026-02-24 10:04:27 -06:00
202 changed files with 1395 additions and 9986 deletions

View File

@@ -321,7 +321,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2025.11.0 # zizmor: ignore[unpinned-uses]
uses: home-assistant/builder@21bc64d76dad7a5184c67826aab41c6b6f89023a # 2025.11.0
with:
args: |
$BUILD_ARGS \

2
CODEOWNERS generated
View File

@@ -555,6 +555,8 @@ build.json @home-assistant/supervisor
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
/tests/components/fritzbox/ @mib1185 @flabbamann
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
/tests/components/fritzbox_callmonitor/ @cdce8p
/homeassistant/components/fronius/ @farmio
/tests/components/fronius/ @farmio
/homeassistant/components/frontend/ @home-assistant/frontend

View File

@@ -34,13 +34,11 @@ from homeassistant.const import (
)
from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from .const import (
DEFAULT_SSL,
@@ -394,18 +392,6 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
except asyncio.CancelledError:
pass
async def async_step_dhcp(
self, discovery_info: DhcpServiceInfo
) -> ConfigFlowResult:
"""Automatically handle a DHCP discovered IP change."""
ip_address = discovery_info.ip
# python-airos defaults to upper for derived mac_address
normalized_mac = format_mac(discovery_info.macaddress).upper()
await self.async_set_unique_id(normalized_mac)
self._abort_if_unique_id_configured(updates={CONF_HOST: ip_address})
return self.async_abort(reason="unreachable")
async def async_step_discovery_no_devices(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:

View File

@@ -3,7 +3,6 @@
"name": "Ubiquiti airOS",
"codeowners": ["@CoMPaTech"],
"config_flow": true,
"dhcp": [{ "registered_devices": true }],
"documentation": "https://www.home-assistant.io/integrations/airos",
"integration_type": "device",
"iot_class": "local_polling",

View File

@@ -2,12 +2,10 @@
from __future__ import annotations
import aiohttp
from genie_partner_sdk.client import AladdinConnectClient
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import (
aiohttp_client,
config_entry_oauth2_flow,
@@ -33,27 +31,11 @@ async def async_setup_entry(
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
try:
await session.async_ensure_token_valid()
except aiohttp.ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(err) from err
raise ConfigEntryNotReady from err
except aiohttp.ClientError as err:
raise ConfigEntryNotReady from err
client = AladdinConnectClient(
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
)
try:
doors = await client.get_doors()
except aiohttp.ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(err) from err
raise ConfigEntryNotReady from err
except aiohttp.ClientError as err:
raise ConfigEntryNotReady from err
doors = await client.get_doors()
entry.runtime_data = {
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)

View File

@@ -11,18 +11,6 @@ API_URL = "https://twdvzuefzh.execute-api.us-east-2.amazonaws.com/v1"
API_KEY = "k6QaiQmcTm2zfaNns5L1Z8duBtJmhDOW8JawlCC3"
class AsyncConfigFlowAuth(Auth):
"""Provide Aladdin Connect Genie authentication for config flow validation."""
def __init__(self, websession: ClientSession, access_token: str) -> None:
"""Initialize Aladdin Connect Genie auth."""
super().__init__(websession, API_URL, access_token, API_KEY)
async def async_get_access_token(self) -> str:
"""Return the access token."""
return self.access_token
class AsyncConfigEntryAuth(Auth):
"""Provide Aladdin Connect Genie authentication tied to an OAuth2 based config entry."""

View File

@@ -4,14 +4,12 @@ from collections.abc import Mapping
import logging
from typing import Any
from genie_partner_sdk.client import AladdinConnectClient
import jwt
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
from homeassistant.helpers import config_entry_oauth2_flow
from .api import AsyncConfigFlowAuth
from .const import CONFIG_FLOW_MINOR_VERSION, CONFIG_FLOW_VERSION, DOMAIN
@@ -54,25 +52,11 @@ class OAuth2FlowHandler(
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Create an oauth config entry or update existing entry for reauth."""
try:
token = jwt.decode(
data["token"]["access_token"], options={"verify_signature": False}
)
user_id = token["sub"]
except jwt.DecodeError, KeyError:
return self.async_abort(reason="oauth_error")
client = AladdinConnectClient(
AsyncConfigFlowAuth(
aiohttp_client.async_get_clientsession(self.hass),
data["token"]["access_token"],
)
# Extract the user ID from the JWT token's 'sub' field
token = jwt.decode(
data["token"]["access_token"], options={"verify_signature": False}
)
try:
await client.get_doors()
except Exception: # noqa: BLE001
return self.async_abort(reason="cannot_connect")
user_id = token["sub"]
await self.async_set_unique_id(user_id)
if self.source == SOURCE_REAUTH:

View File

@@ -7,31 +7,39 @@ rules:
brands: done
common-modules: done
config-flow: done
config-flow-test-coverage: done
config-flow-test-coverage: todo
dependency-transparency: done
docs-actions:
status: exempt
comment: Integration does not register any service actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
docs-installation-instructions:
status: todo
comment: Documentation needs to be created.
docs-removal-instructions:
status: todo
comment: Documentation needs to be created.
entity-event-setup:
status: exempt
comment: Integration does not subscribe to external events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
test-before-configure:
status: todo
comment: Config flow does not currently test connection during setup.
test-before-setup: todo
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: Integration does not have an options flow.
docs-installation-parameters: done
status: todo
comment: Documentation needs to be created.
docs-installation-parameters:
status: todo
comment: Documentation needs to be created.
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
@@ -44,17 +52,29 @@ rules:
# Gold
devices: done
diagnostics: todo
discovery: done
discovery-update-info:
status: exempt
comment: Integration connects via the cloud and not locally.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
discovery: todo
discovery-update-info: todo
docs-data-update:
status: todo
comment: Documentation needs to be created.
docs-examples:
status: todo
comment: Documentation needs to be created.
docs-known-limitations:
status: todo
comment: Documentation needs to be created.
docs-supported-devices:
status: todo
comment: Documentation needs to be created.
docs-supported-functions:
status: todo
comment: Documentation needs to be created.
docs-troubleshooting:
status: todo
comment: Documentation needs to be created.
docs-use-cases:
status: todo
comment: Documentation needs to be created.
dynamic-devices: todo
entity-category: done
entity-device-class: done
@@ -66,7 +86,7 @@ rules:
repair-issues: todo
stale-devices:
status: todo
comment: We can automatically remove removed devices
comment: Stale devices can be done dynamically
# Platinum
async-dependency: todo

View File

@@ -4,7 +4,6 @@
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml.",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",

View File

@@ -112,12 +112,19 @@ async def get_model_list(client: anthropic.AsyncAnthropic) -> list[SelectOptionD
# Resolve alias from versioned model name:
model_alias = (
model_info.id[:-9]
if model_info.id != "claude-3-haiku-20240307"
if model_info.id
not in (
"claude-3-haiku-20240307",
"claude-3-5-haiku-20241022",
"claude-3-opus-20240229",
)
and model_info.id[-2:-1] != "-"
else model_info.id
)
if short_form.search(model_alias):
model_alias += "-0"
if model_alias.endswith(("haiku", "opus", "sonnet")):
model_alias += "-latest"
model_options.append(
SelectOptionDict(
label=model_info.display_name,

View File

@@ -37,6 +37,8 @@ DEFAULT = {
MIN_THINKING_BUDGET = 1024
NON_THINKING_MODELS = [
"claude-3-5", # Both sonnet and haiku
"claude-3-opus",
"claude-3-haiku",
]
@@ -49,7 +51,7 @@ NON_ADAPTIVE_THINKING_MODELS = [
"claude-opus-4-20250514",
"claude-sonnet-4-0",
"claude-sonnet-4-20250514",
"claude-3-haiku",
"claude-3",
]
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS = [
@@ -58,13 +60,19 @@ UNSUPPORTED_STRUCTURED_OUTPUT_MODELS = [
"claude-opus-4-20250514",
"claude-sonnet-4-0",
"claude-sonnet-4-20250514",
"claude-3-haiku",
"claude-3",
]
WEB_SEARCH_UNSUPPORTED_MODELS = [
"claude-3-haiku",
"claude-3-opus",
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
]
DEPRECATED_MODELS = [
"claude-3",
"claude-3-5-haiku",
"claude-3-7-sonnet",
"claude-3-5-sonnet",
"claude-3-opus",
]

View File

@@ -8,6 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["anthropic==0.83.0"]
"requirements": ["anthropic==0.78.0"]
}

View File

@@ -3,7 +3,7 @@
from __future__ import annotations
from collections.abc import Iterator
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, cast
import voluptuous as vol
@@ -19,7 +19,7 @@ from homeassistant.helpers.selector import (
)
from .config_flow import get_model_list
from .const import CONF_CHAT_MODEL, DEPRECATED_MODELS, DOMAIN
from .const import CONF_CHAT_MODEL, DEFAULT, DEPRECATED_MODELS, DOMAIN
if TYPE_CHECKING:
from . import AnthropicConfigEntry
@@ -67,23 +67,13 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
self._model_list_cache[entry.entry_id] = model_list
if "opus" in model:
family = "claude-opus"
suggested_model = "claude-opus-4-5"
elif "haiku" in model:
suggested_model = "claude-haiku-4-5"
elif "sonnet" in model:
family = "claude-sonnet"
suggested_model = "claude-sonnet-4-5"
else:
family = "claude-haiku"
suggested_model = next(
(
model_option["value"]
for model_option in sorted(
(m for m in model_list if family in m["value"]),
key=lambda x: x["value"],
reverse=True,
)
),
vol.UNDEFINED,
)
suggested_model = cast(str, DEFAULT[CONF_CHAT_MODEL])
schema = vol.Schema(
{

View File

@@ -16,7 +16,6 @@ from typing import IO, Any, cast
import aiohttp
from securetar import (
InvalidPasswordError,
SecureTarArchive,
SecureTarError,
SecureTarFile,
@@ -166,7 +165,7 @@ def validate_password(path: Path, password: str | None) -> bool:
):
# If we can read the tar file, the password is correct
return True
except tarfile.ReadError, InvalidPasswordError, SecureTarReadError:
except tarfile.ReadError, SecureTarReadError:
LOGGER.debug("Invalid password")
return False
except Exception: # noqa: BLE001
@@ -193,14 +192,13 @@ def validate_password_stream(
for obj in input_archive.tar:
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
continue
try:
with input_archive.extract_tar(obj) as decrypted:
if decrypted.plaintext_size is None:
raise UnsupportedSecureTarVersion
with input_archive.extract_tar(obj) as decrypted:
if decrypted.plaintext_size is None:
raise UnsupportedSecureTarVersion
try:
decrypted.read(1) # Read a single byte to trigger the decryption
except (InvalidPasswordError, SecureTarReadError) as err:
raise IncorrectPassword from err
else:
except SecureTarReadError as err:
raise IncorrectPassword from err
return
raise BackupEmpty

View File

@@ -1,4 +1,4 @@
"""The BSB-LAN integration."""
"""The BSB-Lan integration."""
import asyncio
import dataclasses
@@ -36,7 +36,7 @@ from .const import CONF_PASSKEY, DOMAIN
from .coordinator import BSBLanFastCoordinator, BSBLanSlowCoordinator
from .services import async_setup_services
PLATFORMS = [Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
@@ -56,13 +56,13 @@ class BSBLanData:
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the BSB-LAN integration."""
"""Set up the BSB-Lan integration."""
async_setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bool:
"""Set up BSB-LAN from a config entry."""
"""Set up BSB-Lan from a config entry."""
# create config using BSBLANConfig
config = BSBLANConfig(

View File

@@ -1,59 +0,0 @@
"""Button platform for BSB-Lan integration."""
from __future__ import annotations
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BSBLanConfigEntry, BSBLanData
from .coordinator import BSBLanFastCoordinator
from .entity import BSBLanEntity
from .helpers import async_sync_device_time
PARALLEL_UPDATES = 1
BUTTON_DESCRIPTIONS: tuple[ButtonEntityDescription, ...] = (
ButtonEntityDescription(
key="sync_time",
translation_key="sync_time",
entity_category=EntityCategory.CONFIG,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: BSBLanConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up BSB-Lan button entities from a config entry."""
data = entry.runtime_data
async_add_entities(
BSBLanButtonEntity(data.fast_coordinator, data, description)
for description in BUTTON_DESCRIPTIONS
)
class BSBLanButtonEntity(BSBLanEntity, ButtonEntity):
"""Defines a BSB-Lan button entity."""
entity_description: ButtonEntityDescription
def __init__(
self,
coordinator: BSBLanFastCoordinator,
data: BSBLanData,
description: ButtonEntityDescription,
) -> None:
"""Initialize BSB-Lan button entity."""
super().__init__(coordinator, data)
self.entity_description = description
self._attr_unique_id = f"{data.device.MAC}-{description.key}"
self._data = data
async def async_press(self) -> None:
"""Handle the button press."""
await async_sync_device_time(self._data.client, self._data.device.name)

View File

@@ -39,15 +39,15 @@ PRESET_MODES = [
PRESET_NONE,
]
# Mapping from Home Assistant HVACMode to BSB-LAN integer values
# BSB-LAN uses: 0=off, 1=auto, 2=eco/reduced, 3=heat/comfort
# Mapping from Home Assistant HVACMode to BSB-Lan integer values
# BSB-Lan uses: 0=off, 1=auto, 2=eco/reduced, 3=heat/comfort
HA_TO_BSBLAN_HVAC_MODE: Final[dict[HVACMode, int]] = {
HVACMode.OFF: 0,
HVACMode.AUTO: 1,
HVACMode.HEAT: 3,
}
# Mapping from BSB-LAN integer values to Home Assistant HVACMode
# Mapping from BSB-Lan integer values to Home Assistant HVACMode
BSBLAN_TO_HA_HVAC_MODE: Final[dict[int, HVACMode]] = {
0: HVACMode.OFF,
1: HVACMode.AUTO,
@@ -69,6 +69,7 @@ async def async_setup_entry(
class BSBLANClimate(BSBLanEntity, ClimateEntity):
"""Defines a BSBLAN climate device."""
_attr_has_entity_name = True
_attr_name = None
# Determine preset modes
_attr_supported_features = (
@@ -137,7 +138,7 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
@property
def preset_mode(self) -> str | None:
"""Return the current preset mode."""
# BSB-LAN mode 2 is eco/reduced mode
# BSB-Lan mode 2 is eco/reduced mode
if self._hvac_mode_value == 2:
return PRESET_ECO
return PRESET_NONE
@@ -162,7 +163,7 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
if ATTR_HVAC_MODE in kwargs:
data[ATTR_HVAC_MODE] = HA_TO_BSBLAN_HVAC_MODE[kwargs[ATTR_HVAC_MODE]]
if ATTR_PRESET_MODE in kwargs:
# eco preset uses BSB-LAN mode 2, none preset uses mode 1 (auto)
# eco preset uses BSB-Lan mode 2, none preset uses mode 1 (auto)
if kwargs[ATTR_PRESET_MODE] == PRESET_ECO:
data[ATTR_HVAC_MODE] = 2
elif kwargs[ATTR_PRESET_MODE] == PRESET_NONE:

View File

@@ -1,4 +1,4 @@
"""Config flow for BSB-LAN integration."""
"""Config flow for BSB-Lan integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""Constants for the BSB-LAN integration."""
"""Constants for the BSB-Lan integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""DataUpdateCoordinator for the BSB-LAN integration."""
"""DataUpdateCoordinator for the BSB-Lan integration."""
from __future__ import annotations
@@ -29,13 +29,8 @@ if TYPE_CHECKING:
# Filter lists for optimized API calls - only fetch parameters we actually use
# This significantly reduces response time (~0.2s per parameter saved)
STATE_INCLUDE = [
"current_temperature",
"target_temperature",
"hvac_mode",
"hvac_action",
]
SENSOR_INCLUDE = ["current_temperature", "outside_temperature", "total_energy"]
STATE_INCLUDE = ["current_temperature", "target_temperature", "hvac_mode"]
SENSOR_INCLUDE = ["current_temperature", "outside_temperature"]
DHW_STATE_INCLUDE = [
"operating_mode",
"nominal_setpoint",
@@ -62,7 +57,7 @@ class BSBLanSlowData:
class BSBLanCoordinator[T](DataUpdateCoordinator[T]):
"""Base BSB-LAN coordinator."""
"""Base BSB-Lan coordinator."""
config_entry: BSBLanConfigEntry
@@ -74,7 +69,7 @@ class BSBLanCoordinator[T](DataUpdateCoordinator[T]):
name: str,
update_interval: timedelta,
) -> None:
"""Initialize the BSB-LAN coordinator."""
"""Initialize the BSB-Lan coordinator."""
super().__init__(
hass,
logger=LOGGER,
@@ -86,7 +81,7 @@ class BSBLanCoordinator[T](DataUpdateCoordinator[T]):
class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
"""The BSB-LAN fast update coordinator for frequently changing data."""
"""The BSB-Lan fast update coordinator for frequently changing data."""
def __init__(
self,
@@ -94,7 +89,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
config_entry: BSBLanConfigEntry,
client: BSBLAN,
) -> None:
"""Initialize the BSB-LAN fast coordinator."""
"""Initialize the BSB-Lan fast coordinator."""
super().__init__(
hass,
config_entry,
@@ -104,7 +99,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
)
async def _async_update_data(self) -> BSBLanFastData:
"""Fetch fast-changing data from the BSB-LAN device."""
"""Fetch fast-changing data from the BSB-Lan device."""
try:
# Client is already initialized in async_setup_entry
# Use include filtering to only fetch parameters we actually use
@@ -115,15 +110,12 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
except BSBLANAuthError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="coordinator_auth_error",
"Authentication failed for BSB-Lan device"
) from err
except BSBLANConnectionError as err:
host = self.config_entry.data[CONF_HOST]
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="coordinator_connection_error",
translation_placeholders={"host": host},
f"Error while establishing connection with BSB-Lan device at {host}"
) from err
return BSBLanFastData(
@@ -134,7 +126,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
"""The BSB-LAN slow update coordinator for infrequently changing data."""
"""The BSB-Lan slow update coordinator for infrequently changing data."""
def __init__(
self,
@@ -142,7 +134,7 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
config_entry: BSBLanConfigEntry,
client: BSBLAN,
) -> None:
"""Initialize the BSB-LAN slow coordinator."""
"""Initialize the BSB-Lan slow coordinator."""
super().__init__(
hass,
config_entry,
@@ -152,7 +144,7 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
)
async def _async_update_data(self) -> BSBLanSlowData:
"""Fetch slow-changing data from the BSB-LAN device."""
"""Fetch slow-changing data from the BSB-Lan device."""
try:
# Client is already initialized in async_setup_entry
# Use include filtering to only fetch parameters we actually use

View File

@@ -32,15 +32,6 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
model=(
data.info.device_identification.value
if data.info.device_identification
and data.info.device_identification.value
else None
),
model_id=(
f"{data.info.controller_family.value}_{data.info.controller_variant.value}"
if data.info.controller_family
and data.info.controller_variant
and data.info.controller_family.value
and data.info.controller_variant.value
else None
),
sw_version=data.device.version,

View File

@@ -1,42 +0,0 @@
"""Helper functions for BSB-Lan integration."""
from __future__ import annotations
from bsblan import BSBLAN, BSBLANError
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from .const import DOMAIN
async def async_sync_device_time(client: BSBLAN, device_name: str) -> None:
"""Synchronize BSB-LAN device time with Home Assistant.
Only updates if device time differs from Home Assistant time.
Args:
client: The BSB-LAN client instance.
device_name: The name of the device (used in error messages).
Raises:
HomeAssistantError: If the time sync operation fails.
"""
try:
device_time = await client.time()
current_time = dt_util.now()
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
# Only sync if device time differs from HA time
if device_time.time.value != current_time_str:
await client.set_time(current_time_str)
except BSBLANError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="sync_time_failed",
translation_placeholders={
"device_name": device_name,
"error": str(err),
},
) from err

View File

@@ -1,11 +1,4 @@
{
"entity": {
"button": {
"sync_time": {
"default": "mdi:timer-sync-outline"
}
}
},
"services": {
"set_hot_water_schedule": {
"service": "mdi:calendar-clock"

View File

@@ -1,6 +1,6 @@
{
"domain": "bsblan",
"name": "BSB-LAN",
"name": "BSB-Lan",
"codeowners": ["@liudger"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bsblan",

View File

@@ -1,4 +1,4 @@
"""Support for BSB-LAN sensors."""
"""Support for BSB-Lan sensors."""
from __future__ import annotations
@@ -11,7 +11,7 @@ from homeassistant.components.sensor import (
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import UnitOfEnergy, UnitOfTemperature
from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
@@ -25,7 +25,7 @@ PARALLEL_UPDATES = 1
@dataclass(frozen=True, kw_only=True)
class BSBLanSensorEntityDescription(SensorEntityDescription):
"""Describes BSB-LAN sensor entity."""
"""Describes BSB-Lan sensor entity."""
value_fn: Callable[[BSBLanFastData], StateType]
exists_fn: Callable[[BSBLanFastData], bool] = lambda data: True
@@ -58,19 +58,6 @@ SENSOR_TYPES: tuple[BSBLanSensorEntityDescription, ...] = (
),
exists_fn=lambda data: data.sensor.outside_temperature is not None,
),
BSBLanSensorEntityDescription(
key="total_energy",
translation_key="total_energy",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
value_fn=lambda data: (
data.sensor.total_energy.value
if data.sensor.total_energy is not None
else None
),
exists_fn=lambda data: data.sensor.total_energy is not None,
),
)
@@ -79,7 +66,7 @@ async def async_setup_entry(
entry: BSBLanConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up BSB-LAN sensor based on a config entry."""
"""Set up BSB-Lan sensor based on a config entry."""
data = entry.runtime_data
# Only create sensors for available data points
@@ -94,7 +81,7 @@ async def async_setup_entry(
class BSBLanSensor(BSBLanEntity, SensorEntity):
"""Defines a BSB-LAN sensor."""
"""Defines a BSB-Lan sensor."""
entity_description: BSBLanSensorEntityDescription
@@ -103,7 +90,7 @@ class BSBLanSensor(BSBLanEntity, SensorEntity):
data: BSBLanData,
description: BSBLanSensorEntityDescription,
) -> None:
"""Initialize BSB-LAN sensor."""
"""Initialize BSB-Lan sensor."""
super().__init__(data.fast_coordinator, data)
self.entity_description = description
self._attr_unique_id = f"{data.device.MAC}-{description.key}"

View File

@@ -1,4 +1,4 @@
"""Support for BSB-LAN services."""
"""Support for BSB-Lan services."""
from __future__ import annotations
@@ -13,9 +13,9 @@ from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.util import dt as dt_util
from .const import DOMAIN
from .helpers import async_sync_device_time
if TYPE_CHECKING:
from . import BSBLanConfigEntry
@@ -192,7 +192,7 @@ async def set_hot_water_schedule(service_call: ServiceCall) -> None:
)
try:
# Call the BSB-LAN API to set the schedule
# Call the BSB-Lan API to set the schedule
await client.set_hot_water_schedule(dhw_schedule)
except BSBLANError as err:
raise HomeAssistantError(
@@ -245,7 +245,25 @@ async def async_sync_time(service_call: ServiceCall) -> None:
)
client = entry.runtime_data.client
await async_sync_device_time(client, device_entry.name or device_id)
try:
# Get current device time
device_time = await client.time()
current_time = dt_util.now()
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
# Only sync if device time differs from HA time
if device_time.time.value != current_time_str:
await client.set_time(current_time_str)
except BSBLANError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="sync_time_failed",
translation_placeholders={
"device_name": device_entry.name or device_id,
"error": str(err),
},
) from err
SYNC_TIME_SCHEMA = vol.Schema(
@@ -257,7 +275,7 @@ SYNC_TIME_SCHEMA = vol.Schema(
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register the BSB-LAN services."""
"""Register the BSB-Lan services."""
hass.services.async_register(
DOMAIN,
SERVICE_SET_HOT_WATER_SCHEDULE,

View File

@@ -22,8 +22,8 @@
"password": "[%key:component::bsblan::config::step::user::data_description::password%]",
"username": "[%key:component::bsblan::config::step::user::data_description::username%]"
},
"description": "A BSB-LAN device was discovered at {host}. Please provide credentials if required.",
"title": "BSB-LAN device discovered"
"description": "A BSB-Lan device was discovered at {host}. Please provide credentials if required.",
"title": "BSB-Lan device discovered"
},
"reauth_confirm": {
"data": {
@@ -36,7 +36,7 @@
"password": "[%key:component::bsblan::config::step::user::data_description::password%]",
"username": "[%key:component::bsblan::config::step::user::data_description::username%]"
},
"description": "The BSB-LAN integration needs to re-authenticate with {name}",
"description": "The BSB-Lan integration needs to re-authenticate with {name}",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
@@ -48,32 +48,24 @@
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"host": "The hostname or IP address of your BSB-LAN device.",
"passkey": "The passkey for your BSB-LAN device.",
"password": "The password for your BSB-LAN device.",
"port": "The port number of your BSB-LAN device.",
"username": "The username for your BSB-LAN device."
"host": "The hostname or IP address of your BSB-Lan device.",
"passkey": "The passkey for your BSB-Lan device.",
"password": "The password for your BSB-Lan device.",
"port": "The port number of your BSB-Lan device.",
"username": "The username for your BSB-Lan device."
},
"description": "Set up your BSB-LAN device to integrate with Home Assistant.",
"title": "Connect to the BSB-LAN device"
"description": "Set up your BSB-Lan device to integrate with Home Assistant.",
"title": "Connect to the BSB-Lan device"
}
}
},
"entity": {
"button": {
"sync_time": {
"name": "Sync time"
}
},
"sensor": {
"current_temperature": {
"name": "Current temperature"
},
"outside_temperature": {
"name": "Outside temperature"
},
"total_energy": {
"name": "Total energy"
}
}
},
@@ -81,12 +73,6 @@
"config_entry_not_loaded": {
"message": "The device `{device_name}` is not currently loaded or available"
},
"coordinator_auth_error": {
"message": "Authentication failed for BSB-LAN device"
},
"coordinator_connection_error": {
"message": "Error while establishing connection with BSB-LAN device at {host}"
},
"end_time_before_start_time": {
"message": "End time ({end_time}) must be after start time ({start_time})"
},
@@ -97,11 +83,14 @@
"message": "No configuration entry found for device: {device_id}"
},
"set_data_error": {
"message": "An error occurred while sending the data to the BSB-LAN device"
"message": "An error occurred while sending the data to the BSB-Lan device"
},
"set_operation_mode_error": {
"message": "An error occurred while setting the operation mode"
},
"set_preset_mode_error": {
"message": "Can't set preset mode to {preset_mode} when HVAC mode is not set to auto"
},
"set_schedule_failed": {
"message": "Failed to set hot water schedule: {error}"
},
@@ -112,7 +101,7 @@
"message": "Authentication failed while retrieving static device data"
},
"setup_connection_error": {
"message": "Failed to retrieve static device data from BSB-LAN device at {host}"
"message": "Failed to retrieve static device data from BSB-Lan device at {host}"
},
"setup_general_error": {
"message": "An unknown error occurred while retrieving static device data"
@@ -161,7 +150,7 @@
"name": "Set hot water schedule"
},
"sync_time": {
"description": "Synchronize Home Assistant time to the BSB-LAN device. Only updates if device time differs from Home Assistant time.",
"description": "Synchronize Home Assistant time to the BSB-Lan device. Only updates if device time differs from Home Assistant time.",
"fields": {
"device_id": {
"description": "The BSB-LAN device to sync time for.",

View File

@@ -63,7 +63,6 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
"""Defines a BSBLAN water heater entity."""
_attr_name = None
_attr_operation_list = list(HA_TO_BSBLAN_OPERATION_MODE.keys())
_attr_supported_features = (
WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.OPERATION_MODE
@@ -74,6 +73,7 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
"""Initialize BSBLAN water heater."""
super().__init__(data.fast_coordinator, data.slow_coordinator, data)
self._attr_unique_id = format_mac(data.device.MAC)
self._attr_operation_list = list(HA_TO_BSBLAN_OPERATION_MODE.keys())
# Set temperature unit
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit

View File

@@ -10,7 +10,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.NUMBER,
Platform.SELECT,

View File

@@ -1,189 +0,0 @@
"""Binary sensor platform for Compit integration."""
from dataclasses import dataclass
from compit_inext_api.consts import CompitParameter
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER_NAME
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PARALLEL_UPDATES = 0
NO_SENSOR = "no_sensor"
ON_STATES = ["on", "yes", "charging", "alert", "exceeded"]
DESCRIPTIONS: dict[CompitParameter, BinarySensorEntityDescription] = {
CompitParameter.AIRING: BinarySensorEntityDescription(
key=CompitParameter.AIRING.value,
translation_key="airing",
device_class=BinarySensorDeviceClass.WINDOW,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.BATTERY_CHARGE_STATUS: BinarySensorEntityDescription(
key=CompitParameter.BATTERY_CHARGE_STATUS.value,
device_class=BinarySensorDeviceClass.BATTERY_CHARGING,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.CO2_ALERT: BinarySensorEntityDescription(
key=CompitParameter.CO2_ALERT.value,
translation_key="co2_alert",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.CO2_LEVEL: BinarySensorEntityDescription(
key=CompitParameter.CO2_LEVEL.value,
translation_key="co2_level",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.DUST_ALERT: BinarySensorEntityDescription(
key=CompitParameter.DUST_ALERT.value,
translation_key="dust_alert",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.PUMP_STATUS: BinarySensorEntityDescription(
key=CompitParameter.PUMP_STATUS.value,
translation_key="pump_status",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.TEMPERATURE_ALERT: BinarySensorEntityDescription(
key=CompitParameter.TEMPERATURE_ALERT.value,
translation_key="temperature_alert",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
}
@dataclass(frozen=True, kw_only=True)
class CompitDeviceDescription:
"""Class to describe a Compit device."""
name: str
parameters: dict[CompitParameter, BinarySensorEntityDescription]
DEVICE_DEFINITIONS: dict[int, CompitDeviceDescription] = {
12: CompitDeviceDescription(
name="Nano Color",
parameters={
CompitParameter.CO2_LEVEL: DESCRIPTIONS[CompitParameter.CO2_LEVEL],
},
),
78: CompitDeviceDescription(
name="SPM - Nano Color 2",
parameters={
CompitParameter.DUST_ALERT: DESCRIPTIONS[CompitParameter.DUST_ALERT],
CompitParameter.TEMPERATURE_ALERT: DESCRIPTIONS[
CompitParameter.TEMPERATURE_ALERT
],
CompitParameter.CO2_ALERT: DESCRIPTIONS[CompitParameter.CO2_ALERT],
},
),
223: CompitDeviceDescription(
name="Nano Color 2",
parameters={
CompitParameter.AIRING: DESCRIPTIONS[CompitParameter.AIRING],
CompitParameter.CO2_LEVEL: DESCRIPTIONS[CompitParameter.CO2_LEVEL],
},
),
225: CompitDeviceDescription(
name="SPM - Nano Color",
parameters={
CompitParameter.CO2_LEVEL: DESCRIPTIONS[CompitParameter.CO2_LEVEL],
},
),
226: CompitDeviceDescription(
name="AF-1",
parameters={
CompitParameter.BATTERY_CHARGE_STATUS: DESCRIPTIONS[
CompitParameter.BATTERY_CHARGE_STATUS
],
CompitParameter.PUMP_STATUS: DESCRIPTIONS[CompitParameter.PUMP_STATUS],
},
),
}
async def async_setup_entry(
hass: HomeAssistant,
entry: CompitConfigEntry,
async_add_devices: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Compit binary sensor entities from a config entry."""
coordinator = entry.runtime_data
async_add_devices(
CompitBinarySensor(
coordinator,
device_id,
device_definition.name,
code,
entity_description,
)
for device_id, device in coordinator.connector.all_devices.items()
if (device_definition := DEVICE_DEFINITIONS.get(device.definition.code))
for code, entity_description in device_definition.parameters.items()
if coordinator.connector.get_current_value(device_id, code) != NO_SENSOR
)
class CompitBinarySensor(
CoordinatorEntity[CompitDataUpdateCoordinator], BinarySensorEntity
):
"""Representation of a Compit binary sensor entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: CompitDataUpdateCoordinator,
device_id: int,
device_name: str,
parameter_code: CompitParameter,
entity_description: BinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor entity."""
super().__init__(coordinator)
self.device_id = device_id
self.entity_description = entity_description
self._attr_unique_id = f"{device_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, str(device_id))},
name=device_name,
manufacturer=MANUFACTURER_NAME,
model=device_name,
)
self.parameter_code = parameter_code
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
super().available
and self.coordinator.connector.get_device(self.device_id) is not None
)
@property
def is_on(self) -> bool | None:
"""Return the state of the binary sensor."""
value = self.coordinator.connector.get_current_value(
self.device_id, self.parameter_code
)
if value is None:
return None
return value in ON_STATES

View File

@@ -1,25 +1,5 @@
{
"entity": {
"binary_sensor": {
"airing": {
"default": "mdi:window-open-variant"
},
"co2_alert": {
"default": "mdi:alert"
},
"co2_level": {
"default": "mdi:molecule-co2"
},
"dust_alert": {
"default": "mdi:alert"
},
"pump_status": {
"default": "mdi:pump"
},
"temperature_alert": {
"default": "mdi:alert"
}
},
"number": {
"boiler_target_temperature": {
"default": "mdi:water-boiler"

View File

@@ -33,26 +33,6 @@
}
},
"entity": {
"binary_sensor": {
"airing": {
"name": "Airing"
},
"co2_alert": {
"name": "CO2 alert"
},
"co2_level": {
"name": "CO2 level"
},
"dust_alert": {
"name": "Dust alert"
},
"pump_status": {
"name": "Pump status"
},
"temperature_alert": {
"name": "Temperature alert"
}
},
"number": {
"boiler_target_temperature": {
"name": "Boiler target temperature"

View File

@@ -28,7 +28,6 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
Platform.WATER_HEATER,

View File

@@ -5,7 +5,7 @@ from typing import Any
from pyeconet.equipment import EquipmentType
from pyeconet.equipment.thermostat import (
Thermostat,
ThermostatFanSpeed,
ThermostatFanMode,
ThermostatOperationMode,
)
@@ -16,7 +16,6 @@ from homeassistant.components.climate import (
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_TOP,
ClimateEntity,
ClimateEntityFeature,
HVACMode,
@@ -42,16 +41,13 @@ HA_STATE_TO_ECONET = {
if key != ThermostatOperationMode.EMERGENCY_HEAT
}
ECONET_FAN_SPEED_TO_HA = {
ThermostatFanSpeed.AUTO: FAN_AUTO,
ThermostatFanSpeed.LOW: FAN_LOW,
ThermostatFanSpeed.MEDIUM: FAN_MEDIUM,
ThermostatFanSpeed.HIGH: FAN_HIGH,
ThermostatFanSpeed.MAX: FAN_TOP,
}
HA_FAN_STATE_TO_ECONET_FAN_SPEED = {
value: key for key, value in ECONET_FAN_SPEED_TO_HA.items()
ECONET_FAN_STATE_TO_HA = {
ThermostatFanMode.AUTO: FAN_AUTO,
ThermostatFanMode.LOW: FAN_LOW,
ThermostatFanMode.MEDIUM: FAN_MEDIUM,
ThermostatFanMode.HIGH: FAN_HIGH,
}
HA_FAN_STATE_TO_ECONET = {value: key for key, value in ECONET_FAN_STATE_TO_HA.items()}
SUPPORT_FLAGS_THERMOSTAT = (
ClimateEntityFeature.TARGET_TEMPERATURE
@@ -107,7 +103,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
return self._econet.set_point
@property
def current_humidity(self) -> int | None:
def current_humidity(self) -> int:
"""Return the current humidity."""
return self._econet.humidity
@@ -153,7 +149,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
@property
def hvac_mode(self) -> HVACMode:
"""Return hvac operation i.e. heat, cool, mode.
"""Return hvac operation ie. heat, cool, mode.
Needs to be one of HVAC_MODE_*.
"""
@@ -178,35 +174,35 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
@property
def fan_mode(self) -> str:
"""Return the current fan mode."""
econet_fan_speed = self._econet.fan_speed
econet_fan_mode = self._econet.fan_mode
# Remove this after we figure out how to handle med lo and med hi
if econet_fan_speed in [ThermostatFanSpeed.MEDHI, ThermostatFanSpeed.MEDLO]:
econet_fan_speed = ThermostatFanSpeed.MEDIUM
if econet_fan_mode in [ThermostatFanMode.MEDHI, ThermostatFanMode.MEDLO]:
econet_fan_mode = ThermostatFanMode.MEDIUM
_current_fan_speed = FAN_AUTO
if econet_fan_speed is not None:
_current_fan_speed = ECONET_FAN_SPEED_TO_HA[econet_fan_speed]
return _current_fan_speed
_current_fan_mode = FAN_AUTO
if econet_fan_mode is not None:
_current_fan_mode = ECONET_FAN_STATE_TO_HA[econet_fan_mode]
return _current_fan_mode
@property
def fan_modes(self) -> list[str]:
"""Return the fan modes."""
# Remove the MEDLO MEDHI once we figure out how to handle it
return [
ECONET_FAN_SPEED_TO_HA[mode]
for mode in self._econet.fan_speeds
ECONET_FAN_STATE_TO_HA[mode]
for mode in self._econet.fan_modes
# Remove the MEDLO MEDHI once we figure out how to handle it
if mode
not in [
ThermostatFanSpeed.UNKNOWN,
ThermostatFanSpeed.MEDLO,
ThermostatFanSpeed.MEDHI,
ThermostatFanMode.UNKNOWN,
ThermostatFanMode.MEDLO,
ThermostatFanMode.MEDHI,
]
]
def set_fan_mode(self, fan_mode: str) -> None:
"""Set the fan mode."""
self._econet.set_fan_speed(HA_FAN_STATE_TO_ECONET_FAN_SPEED[fan_mode])
self._econet.set_fan_mode(HA_FAN_STATE_TO_ECONET[fan_mode])
@property
def min_temp(self) -> float:

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["paho_mqtt", "pyeconet"],
"requirements": ["pyeconet==0.2.1"]
"requirements": ["pyeconet==0.1.28"]
}

View File

@@ -1,53 +0,0 @@
"""Support for Rheem EcoNet thermostats with variable fan speeds and fan modes."""
from __future__ import annotations
from pyeconet.equipment import EquipmentType
from pyeconet.equipment.thermostat import Thermostat, ThermostatFanMode
from homeassistant.components.select import SelectEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import EconetConfigEntry
from .entity import EcoNetEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: EconetConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the econet thermostat select entity."""
equipment = entry.runtime_data
async_add_entities(
EconetFanModeSelect(thermostat)
for thermostat in equipment[EquipmentType.THERMOSTAT]
if thermostat.supports_fan_mode
)
class EconetFanModeSelect(EcoNetEntity[Thermostat], SelectEntity):
"""Select entity."""
def __init__(self, thermostat: Thermostat) -> None:
"""Initialize EcoNet platform."""
super().__init__(thermostat)
self._attr_name = f"{thermostat.device_name} fan mode"
self._attr_unique_id = (
f"{thermostat.device_id}_{thermostat.device_name}_fan_mode"
)
@property
def options(self) -> list[str]:
"""Return available select options."""
return [e.value for e in self._econet.fan_modes]
@property
def current_option(self) -> str:
"""Return current select option."""
return self._econet.fan_mode.value
def select_option(self, option: str) -> None:
"""Set the selected option."""
self._econet.set_fan_mode(ThermostatFanMode.by_string(option))

View File

@@ -23,20 +23,19 @@ async def async_setup_entry(
entry: EconetConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the econet thermostat switch entity."""
"""Set up the ecobee thermostat switch entity."""
equipment = entry.runtime_data
async_add_entities(
EcoNetSwitchAuxHeatOnly(thermostat)
for thermostat in equipment[EquipmentType.THERMOSTAT]
if ThermostatOperationMode.EMERGENCY_HEAT in thermostat.modes
)
class EcoNetSwitchAuxHeatOnly(EcoNetEntity[Thermostat], SwitchEntity):
"""Representation of an aux_heat_only EcoNet switch."""
"""Representation of a aux_heat_only EcoNet switch."""
def __init__(self, thermostat: Thermostat) -> None:
"""Initialize EcoNet platform."""
"""Initialize EcoNet ventilator platform."""
super().__init__(thermostat)
self._attr_name = f"{thermostat.device_name} emergency heat"
self._attr_unique_id = (

View File

@@ -8,24 +8,17 @@ from typing import TYPE_CHECKING, Any
from deebot_client.capabilities import Capabilities, DeviceType
from deebot_client.device import Device
from deebot_client.events import (
CachedMapInfoEvent,
FanSpeedEvent,
RoomsEvent,
StateEvent,
)
from deebot_client.events.map import Map
from deebot_client.models import CleanAction, CleanMode, State
from deebot_client.events import FanSpeedEvent, RoomsEvent, StateEvent
from deebot_client.models import CleanAction, CleanMode, Room, State
import sucks
from homeassistant.components.vacuum import (
Segment,
StateVacuumEntity,
StateVacuumEntityDescription,
VacuumActivity,
VacuumEntityFeature,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import slugify
@@ -36,7 +29,6 @@ from .entity import EcovacsEntity, EcovacsLegacyEntity
from .util import get_name_key
_LOGGER = logging.getLogger(__name__)
_SEGMENTS_SEPARATOR = "_"
ATTR_ERROR = "error"
@@ -226,8 +218,7 @@ class EcovacsVacuum(
"""Initialize the vacuum."""
super().__init__(device, device.capabilities)
self._room_event: RoomsEvent | None = None
self._maps: dict[str, Map] = {}
self._rooms: list[Room] = []
if fan_speed := self._capability.fan_speed:
self._attr_supported_features |= VacuumEntityFeature.FAN_SPEED
@@ -235,13 +226,14 @@ class EcovacsVacuum(
get_name_key(level) for level in fan_speed.types
]
if self._capability.map and self._capability.clean.action.area:
self._attr_supported_features |= VacuumEntityFeature.CLEAN_AREA
async def async_added_to_hass(self) -> None:
"""Set up the event listeners now that hass is ready."""
await super().async_added_to_hass()
async def on_rooms(event: RoomsEvent) -> None:
self._rooms = event.rooms
self.async_write_ha_state()
async def on_status(event: StateEvent) -> None:
self._attr_activity = _STATE_TO_VACUUM_STATE[event.state]
self.async_write_ha_state()
@@ -257,20 +249,8 @@ class EcovacsVacuum(
self._subscribe(self._capability.fan_speed.event, on_fan_speed)
if map_caps := self._capability.map:
async def on_rooms(event: RoomsEvent) -> None:
self._room_event = event
self._check_segments_changed()
self.async_write_ha_state()
self._subscribe(map_caps.rooms.event, on_rooms)
async def on_map_info(event: CachedMapInfoEvent) -> None:
self._maps = {map_obj.id: map_obj for map_obj in event.maps}
self._check_segments_changed()
self._subscribe(map_caps.cached_info.event, on_map_info)
@property
def extra_state_attributes(self) -> Mapping[str, Any] | None:
"""Return entity specific state attributes.
@@ -279,10 +259,7 @@ class EcovacsVacuum(
is lowercase snake_case.
"""
rooms: dict[str, Any] = {}
if self._room_event is None:
return rooms
for room in self._room_event.rooms:
for room in self._rooms:
# convert room name to snake_case to meet the convention
room_name = slugify(room.name)
room_values = rooms.get(room_name)
@@ -397,116 +374,3 @@ class EcovacsVacuum(
)
return await self._device.execute_command(position_commands[0])
@callback
def _check_segments_changed(self) -> None:
"""Check if segments have changed and create repair issue."""
last_seen = self.last_seen_segments
if last_seen is None:
return
last_seen_ids = {seg.id for seg in last_seen}
current_ids = {seg.id for seg in self._get_segments()}
if current_ids != last_seen_ids:
self.async_create_segments_issue()
def _get_segments(self) -> list[Segment]:
"""Get the segments that can be cleaned."""
last_seen = self.last_seen_segments or []
if self._room_event is None or not self._maps:
# If we don't have the necessary information to determine segments, return the last
# seen segments to avoid temporarily losing all segments until we get the necessary
# information, which could cause unnecessary issues to be created
return last_seen
map_id = self._room_event.map_id
if (map_obj := self._maps.get(map_id)) is None:
_LOGGER.warning("Map ID %s not found in available maps", map_id)
return []
id_prefix = f"{map_id}{_SEGMENTS_SEPARATOR}"
other_map_ids = {
map_obj.id
for map_obj in self._maps.values()
if map_obj.id != self._room_event.map_id
}
# Include segments from the current map and any segments from other maps that were
# previously seen, as we want to continue showing segments from other maps for
# mapping purposes
segments = [
seg for seg in last_seen if _split_composite_id(seg.id)[0] in other_map_ids
]
segments.extend(
Segment(
id=f"{id_prefix}{room.id}",
name=room.name,
group=map_obj.name,
)
for room in self._room_event.rooms
)
return segments
async def async_get_segments(self) -> list[Segment]:
"""Get the segments that can be cleaned."""
return self._get_segments()
async def async_clean_segments(self, segment_ids: list[str], **kwargs: Any) -> None:
"""Perform an area clean.
Only cleans segments from the currently selected map.
"""
if not self._maps:
_LOGGER.warning("No map information available, cannot clean segments")
return
valid_room_ids: list[int | float] = []
for composite_id in segment_ids:
map_id, segment_id = _split_composite_id(composite_id)
if (map_obj := self._maps.get(map_id)) is None:
_LOGGER.warning("Map ID %s not found in available maps", map_id)
continue
if not map_obj.using:
room_name = next(
(
segment.name
for segment in self.last_seen_segments or []
if segment.id == composite_id
),
"",
)
_LOGGER.warning(
'Map "%s" is not currently selected, skipping segment "%s" (%s)',
map_obj.name,
room_name,
segment_id,
)
continue
valid_room_ids.append(int(segment_id))
if not valid_room_ids:
_LOGGER.warning(
"No valid segments to clean after validation, skipping clean segments command"
)
return
if TYPE_CHECKING:
# Supported feature is only added if clean.action.area is not None
assert self._capability.clean.action.area is not None
await self._device.execute_command(
self._capability.clean.action.area(
CleanMode.SPOT_AREA,
valid_room_ids,
1,
)
)
@callback
def _split_composite_id(composite_id: str) -> tuple[str, str]:
"""Split a composite ID into its components."""
map_id, _, segment_id = composite_id.partition(_SEGMENTS_SEPARATOR)
return map_id, segment_id

View File

@@ -4,23 +4,17 @@ from typing import Any
import voluptuous as vol
from homeassistant.components import usb
from homeassistant.components.usb import (
human_readable_device_name,
usb_unique_id_from_service_info,
)
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import ATTR_MANUFACTURER, CONF_DEVICE, CONF_NAME
from homeassistant.const import CONF_DEVICE
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.selector import (
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from homeassistant.helpers.service_info.usb import UsbServiceInfo
from . import dongle
from .const import DOMAIN, ERROR_INVALID_DONGLE_PATH, LOGGER, MANUFACTURER
from .const import DOMAIN, ERROR_INVALID_DONGLE_PATH, LOGGER
MANUAL_SCHEMA = vol.Schema(
{
@@ -37,48 +31,8 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize the EnOcean config flow."""
self.data: dict[str, Any] = {}
async def async_step_usb(self, discovery_info: UsbServiceInfo) -> ConfigFlowResult:
"""Handle usb discovery."""
unique_id = usb_unique_id_from_service_info(discovery_info)
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured(
updates={CONF_DEVICE: discovery_info.device}
)
discovery_info.device = await self.hass.async_add_executor_job(
usb.get_serial_by_id, discovery_info.device
)
self.data[CONF_DEVICE] = discovery_info.device
self.context["title_placeholders"] = {
CONF_NAME: human_readable_device_name(
discovery_info.device,
discovery_info.serial_number,
discovery_info.manufacturer,
discovery_info.description,
discovery_info.vid,
discovery_info.pid,
)
}
return await self.async_step_usb_confirm()
async def async_step_usb_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle USB Discovery confirmation."""
if user_input is not None:
return await self.async_step_manual({CONF_DEVICE: self.data[CONF_DEVICE]})
self._set_confirm_only()
return self.async_show_form(
step_id="usb_confirm",
description_placeholders={
ATTR_MANUFACTURER: MANUFACTURER,
CONF_DEVICE: self.data.get(CONF_DEVICE, ""),
},
)
self.dongle_path = None
self.discovery_info = None
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Import a yaml configuration."""
@@ -150,4 +104,4 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
def create_enocean_entry(self, user_input):
"""Create an entry for the provided configuration."""
return self.async_create_entry(title=MANUFACTURER, data=user_input)
return self.async_create_entry(title="EnOcean", data=user_input)

View File

@@ -6,8 +6,6 @@ from homeassistant.const import Platform
DOMAIN = "enocean"
MANUFACTURER = "EnOcean"
ERROR_INVALID_DONGLE_PATH = "invalid_dongle_path"
SIGNAL_RECEIVE_MESSAGE = "enocean.receive_message"

View File

@@ -3,19 +3,10 @@
"name": "EnOcean",
"codeowners": [],
"config_flow": true,
"dependencies": ["usb"],
"documentation": "https://www.home-assistant.io/integrations/enocean",
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["enocean"],
"requirements": ["enocean==0.50"],
"single_config_entry": true,
"usb": [
{
"description": "*usb 300*",
"manufacturer": "*enocean*",
"pid": "6001",
"vid": "0403"
}
]
"single_config_entry": true
}

View File

@@ -25,9 +25,6 @@
"device": "[%key:component::enocean::config::step::detect::data_description::device%]"
},
"description": "Enter the path to your EnOcean USB dongle."
},
"usb_confirm": {
"description": "{manufacturer} USB dongle detected at {device}. Do you want to set up this device?"
}
}
},

View File

@@ -300,23 +300,16 @@ class RuntimeEntryData:
needed_platforms.add(Platform.BINARY_SENSOR)
needed_platforms.add(Platform.SELECT)
needed_platforms.update(INFO_TYPE_TO_PLATFORM[type(info)] for info in infos)
await self._ensure_platforms_loaded(hass, entry, needed_platforms)
# Make a dict of the EntityInfo by type and send
# them to the listeners for each specific EntityInfo type
info_types_to_platform = INFO_TYPE_TO_PLATFORM
infos_by_type: defaultdict[type[EntityInfo], list[EntityInfo]] = defaultdict(
list
)
for info in infos:
info_type = type(info)
if platform := info_types_to_platform.get(info_type):
needed_platforms.add(platform)
infos_by_type[info_type].append(info)
else:
_LOGGER.warning(
"Entity type %s is not supported in this version of Home Assistant",
info_type,
)
await self._ensure_platforms_loaded(hass, entry, needed_platforms)
infos_by_type[type(info)].append(info)
for type_, callbacks in self.entity_info_callbacks.items():
# If all entities for a type are removed, we

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==44.1.0",
"aioesphomeapi==44.0.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.6.0"
],

View File

@@ -12,7 +12,11 @@ import re
from typing import Any, TypedDict, cast
from fritzconnection import FritzConnection
from fritzconnection.core.exceptions import FritzActionError
from fritzconnection.core.exceptions import (
FritzActionError,
FritzConnectionException,
FritzSecurityError,
)
from fritzconnection.lib.fritzcall import FritzCall
from fritzconnection.lib.fritzhosts import FritzHosts
from fritzconnection.lib.fritzstatus import FritzStatus
@@ -43,7 +47,6 @@ from .const import (
DEFAULT_SSL,
DEFAULT_USERNAME,
DOMAIN,
FRITZ_AUTH_EXCEPTIONS,
FRITZ_EXCEPTIONS,
SCAN_INTERVAL,
MeshRoles,
@@ -422,18 +425,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
hosts_info: list[HostInfo] = []
try:
try:
hosts_attributes = cast(
list[HostAttributes],
await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_attributes
),
hosts_attributes = await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_attributes
)
except FritzActionError:
hosts_info = cast(
list[HostInfo],
await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_info
),
hosts_info = await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_info
)
except Exception as ex:
if not self.hass.is_stopping:
@@ -589,7 +586,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
topology := await self.hass.async_add_executor_job(
self.fritz_hosts.get_mesh_topology
)
) or not isinstance(topology, dict):
):
raise Exception("Mesh supported but empty topology reported") # noqa: TRY002
except FritzActionError:
self.mesh_role = MeshRoles.SLAVE
@@ -745,7 +742,7 @@ class AvmWrapper(FritzBoxTools):
**kwargs,
)
)
except FRITZ_AUTH_EXCEPTIONS:
except FritzSecurityError:
_LOGGER.exception(
"Authorization Error: Please check the provided credentials and"
" verify that you can log into the web interface"
@@ -758,6 +755,12 @@ class AvmWrapper(FritzBoxTools):
action_name,
)
return {}
except FritzConnectionException:
_LOGGER.exception(
"Connection Error: Please check the device is properly configured"
" for remote login"
)
return {}
return result
async def async_get_upnp_configuration(self) -> dict[str, Any]:

View File

@@ -1,7 +1,7 @@
{
"domain": "fritzbox_callmonitor",
"name": "FRITZ!Box Call Monitor",
"codeowners": [],
"codeowners": ["@cdce8p"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/fritzbox_callmonitor",
"integration_type": "device",

View File

@@ -1752,15 +1752,15 @@ class FanSpeedTrait(_Trait):
"""Initialize a trait for a state."""
super().__init__(hass, state, config)
if state.domain == fan.DOMAIN:
speed_count = round(
100 / (self.state.attributes.get(fan.ATTR_PERCENTAGE_STEP) or 1.0)
speed_count = min(
FAN_SPEED_MAX_SPEED_COUNT,
round(
100 / (self.state.attributes.get(fan.ATTR_PERCENTAGE_STEP) or 1.0)
),
)
if speed_count <= FAN_SPEED_MAX_SPEED_COUNT:
self._ordered_speed = [
f"{speed}/{speed_count}" for speed in range(1, speed_count + 1)
]
else:
self._ordered_speed = []
self._ordered_speed = [
f"{speed}/{speed_count}" for speed in range(1, speed_count + 1)
]
@staticmethod
def supported(domain, features, device_class, _):
@@ -1786,11 +1786,7 @@ class FanSpeedTrait(_Trait):
result.update(
{
"reversible": reversible,
# supportsFanSpeedPercent is mutually exclusive with
# availableFanSpeeds, where supportsFanSpeedPercent takes
# precedence. Report it only when step speeds are not
# supported so Google renders a percent slider (1-100%).
"supportsFanSpeedPercent": not self._ordered_speed,
"supportsFanSpeedPercent": True,
}
)
@@ -1836,12 +1832,10 @@ class FanSpeedTrait(_Trait):
if domain == fan.DOMAIN:
percent = attrs.get(fan.ATTR_PERCENTAGE) or 0
if self._ordered_speed:
response["currentFanSpeedSetting"] = percentage_to_ordered_list_item(
self._ordered_speed, percent
)
else:
response["currentFanSpeedPercent"] = percent
response["currentFanSpeedPercent"] = percent
response["currentFanSpeedSetting"] = percentage_to_ordered_list_item(
self._ordered_speed, percent
)
return response
@@ -1861,7 +1855,7 @@ class FanSpeedTrait(_Trait):
)
if domain == fan.DOMAIN:
if self._ordered_speed and (fan_speed := params.get("fanSpeed")):
if fan_speed := params.get("fanSpeed"):
fan_speed_percent = ordered_list_item_to_percentage(
self._ordered_speed, fan_speed
)

View File

@@ -181,7 +181,8 @@ class HassIOIngress(HomeAssistantView):
skip_auto_headers={hdrs.CONTENT_TYPE},
) as result:
headers = _response_header(result)
content_length_int = 0
content_length = result.headers.get(hdrs.CONTENT_LENGTH, UNDEFINED)
# Avoid parsing content_type in simple cases for better performance
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
content_type: str = (maybe_content_type.partition(";"))[0].strip()
@@ -189,30 +190,17 @@ class HassIOIngress(HomeAssistantView):
# default value according to RFC 2616
content_type = "application/octet-stream"
# Empty body responses (304, 204, HEAD, etc.) should not be streamed,
# otherwise aiohttp < 3.9.0 may generate an invalid "0\r\n\r\n" chunk
# This also avoids setting content_type for empty responses.
if must_be_empty_body(request.method, result.status):
# If upstream contains content-type, preserve it (e.g. for HEAD requests)
# Note: This still is omitting content-length. We can't simply forward
# the upstream length since the proxy might change the body length
# (e.g. due to compression).
if maybe_content_type:
headers[hdrs.CONTENT_TYPE] = content_type
return web.Response(
headers=headers,
status=result.status,
)
# Simple request
content_length_int = 0
content_length = result.headers.get(hdrs.CONTENT_LENGTH, UNDEFINED)
if (
if (empty_body := must_be_empty_body(result.method, result.status)) or (
content_length is not UNDEFINED
and (content_length_int := int(content_length))
<= MAX_SIMPLE_RESPONSE_SIZE
):
body = await result.read()
# Return Response
if empty_body:
body = None
else:
body = await result.read()
simple_response = web.Response(
headers=headers,
status=result.status,

View File

@@ -7,5 +7,6 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["pyhik"],
"quality_scale": "legacy",
"requirements": ["pyHik==0.4.2"]
}

View File

@@ -1,75 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling:
status: exempt
comment: |
This integration uses local_push and does not poll.
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: todo
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
This integration does not provide additional actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
This integration has no configuration parameters.
docs-installation-parameters: todo
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: done
reauthentication-flow: todo
test-coverage: todo
# Gold
devices: todo
diagnostics: todo
discovery: todo
discovery-update-info: todo
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: todo
inject-websession: todo
strict-typing: todo

View File

@@ -11,12 +11,7 @@ from pyHomee import (
)
import voluptuous as vol
from homeassistant.config_entries import (
SOURCE_USER,
ConfigEntryState,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
@@ -118,22 +113,7 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN):
if discovery_info.ip_address.version == 6:
return self.async_abort(reason="ipv6_address")
# If an already configured homee reports with a second IP, abort.
existing_entry = await self.async_set_unique_id(self._name)
if (
existing_entry
and existing_entry.state == ConfigEntryState.LOADED
and existing_entry.runtime_data.connected
and existing_entry.data[CONF_HOST] != self._host
):
_LOGGER.debug(
"Aborting config flow for discovered homee with IP %s "
"since it is already configured at IP %s",
self._host,
existing_entry.data[CONF_HOST],
)
return self.async_abort(reason="2nd_ip_address")
await self.async_set_unique_id(self._name)
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
# Cause an auth-error to see if homee is reachable.

View File

@@ -20,7 +20,6 @@ PARALLEL_UPDATES = 0
REMOTE_PROFILES = [
NodeProfile.REMOTE,
NodeProfile.ONE_BUTTON_REMOTE,
NodeProfile.TWO_BUTTON_REMOTE,
NodeProfile.THREE_BUTTON_REMOTE,
NodeProfile.FOUR_BUTTON_REMOTE,

View File

@@ -1,7 +1,6 @@
{
"config": {
"abort": {
"2nd_ip_address": "Your homee is already connected using another IP address",
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",

View File

@@ -1,27 +0,0 @@
"""Diagnostics support for HomematicIP Cloud."""
from __future__ import annotations
import json
from typing import Any
from homematicip.base.helpers import handle_config
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.core import HomeAssistant
from .hap import HomematicIPConfigEntry
TO_REDACT_CONFIG = {"city", "latitude", "longitude", "refreshToken"}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: HomematicIPConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
hap = config_entry.runtime_data
json_state = await hap.home.download_configuration_async()
anonymized = handle_config(json_state, anonymize=True)
config = json.loads(anonymized)
return async_redact_data(config, TO_REDACT_CONFIG)

View File

@@ -6,7 +6,6 @@ from enum import Enum
import logging
from typing import Any
from bleak.backends.scanner import AdvertisementData
from HueBLE import ConnectionError, HueBleError, HueBleLight, PairingError
import voluptuous as vol
@@ -27,17 +26,6 @@ from .light import get_available_color_modes
_LOGGER = logging.getLogger(__name__)
SERVICE_UUID = SERVICE_DATA_UUID = "0000fe0f-0000-1000-8000-00805f9b34fb"
def device_filter(advertisement_data: AdvertisementData) -> bool:
"""Return True if the device is supported."""
return (
SERVICE_UUID in advertisement_data.service_uuids
and SERVICE_DATA_UUID in advertisement_data.service_data
)
async def validate_input(hass: HomeAssistant, address: str) -> Error | None:
"""Return error if cannot connect and validate."""
@@ -82,66 +70,28 @@ class HueBleConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize the config flow."""
self._discovered_devices: dict[str, bluetooth.BluetoothServiceInfoBleak] = {}
self._discovery_info: bluetooth.BluetoothServiceInfoBleak | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the user step to pick discovered device."""
errors: dict[str, str] = {}
if user_input is not None:
unique_id = dr.format_mac(user_input[CONF_MAC])
# Don't raise on progress because there may be discovery flows
await self.async_set_unique_id(unique_id, raise_on_progress=False)
# Guard against the user selecting a device which has been configured by
# another flow.
self._abort_if_unique_id_configured()
self._discovery_info = self._discovered_devices[user_input[CONF_MAC]]
return await self.async_step_confirm()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery in bluetooth.async_discovered_service_info(self.hass):
if (
discovery.address in current_addresses
or discovery.address in self._discovered_devices
or not device_filter(discovery.advertisement)
):
continue
self._discovered_devices[discovery.address] = discovery
if not self._discovered_devices:
return self.async_abort(reason="no_devices_found")
data_schema = vol.Schema(
{
vol.Required(CONF_MAC): vol.In(
{
service_info.address: (
f"{service_info.name} ({service_info.address})"
)
for service_info in self._discovered_devices.values()
}
),
}
)
return self.async_show_form(
step_id="user",
data_schema=data_schema,
errors=errors,
)
async def async_step_bluetooth(
self, discovery_info: bluetooth.BluetoothServiceInfoBleak
) -> ConfigFlowResult:
"""Handle a flow initialized by the home assistant scanner."""
_LOGGER.debug(
"HA found light %s. Use user flow to show in UI and connect",
"HA found light %s. Will show in UI but not auto connect",
discovery_info.name,
)
return self.async_abort(reason="discovery_unsupported")
unique_id = dr.format_mac(discovery_info.address)
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
name = f"{discovery_info.name} ({discovery_info.address})"
self.context.update({"title_placeholders": {CONF_NAME: name}})
self._discovery_info = discovery_info
return await self.async_step_confirm()
async def async_step_confirm(
self, user_input: dict[str, Any] | None = None
@@ -153,10 +103,7 @@ class HueBleConfigFlow(ConfigFlow, domain=DOMAIN):
if user_input is not None:
unique_id = dr.format_mac(self._discovery_info.address)
# Don't raise on progress because there may be discovery flows
await self.async_set_unique_id(unique_id, raise_on_progress=False)
# Guard against the user selecting a device which has been configured by
# another flow.
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
error = await validate_input(self.hass, unique_id)
if error:

View File

@@ -2,8 +2,7 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"discovery_unsupported": "Discovery flow is not supported by the Hue BLE integration.",
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]"
"not_implemented": "This integration can only be set up via discovery."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -15,16 +14,7 @@
},
"step": {
"confirm": {
"description": "Do you want to set up {name} ({mac})?\nMake sure the light is [made discoverable to voice assistants]({url_pairing_mode}) or has been [factory reset]({url_factory_reset})."
},
"user": {
"data": {
"mac": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"mac": "Select the Hue device you want to set up"
},
"description": "[%key:component::bluetooth::config::step::user::description%]"
"description": "Do you want to set up {name} ({mac})?. Make sure the light is [made discoverable to voice assistants]({url_pairing_mode}) or has been [factory reset]({url_factory_reset})."
}
}
}

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["imgw_pib==2.0.2"]
"requirements": ["imgw_pib==2.0.1"]
}

View File

@@ -24,7 +24,6 @@
"hydrological_alert": {
"name": "Hydrological alert",
"state": {
"exceeding_the_alarm_level": "Exceeding the alarm level",
"exceeding_the_warning_level": "Exceeding the warning level",
"hydrological_drought": "Hydrological drought",
"no_alert": "No alert",

View File

@@ -1,4 +1,4 @@
"""The Liebherr integration."""
"""The liebherr integration."""
from __future__ import annotations
@@ -17,12 +17,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
PLATFORMS: list[Platform] = [
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
]
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
async def async_setup_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) -> bool:

View File

@@ -1,55 +1,5 @@
{
"entity": {
"select": {
"bio_fresh_plus": {
"default": "mdi:leaf"
},
"bio_fresh_plus_bottom_zone": {
"default": "mdi:leaf"
},
"bio_fresh_plus_middle_zone": {
"default": "mdi:leaf"
},
"bio_fresh_plus_top_zone": {
"default": "mdi:leaf"
},
"hydro_breeze": {
"default": "mdi:weather-windy"
},
"hydro_breeze_bottom_zone": {
"default": "mdi:weather-windy"
},
"hydro_breeze_middle_zone": {
"default": "mdi:weather-windy"
},
"hydro_breeze_top_zone": {
"default": "mdi:weather-windy"
},
"ice_maker": {
"default": "mdi:cube-outline",
"state": {
"off": "mdi:cube-outline-off"
}
},
"ice_maker_bottom_zone": {
"default": "mdi:cube-outline",
"state": {
"off": "mdi:cube-outline-off"
}
},
"ice_maker_middle_zone": {
"default": "mdi:cube-outline",
"state": {
"off": "mdi:cube-outline-off"
}
},
"ice_maker_top_zone": {
"default": "mdi:cube-outline",
"state": {
"off": "mdi:cube-outline-off"
}
}
},
"switch": {
"night_mode": {
"default": "mdi:sleep",

View File

@@ -1,216 +0,0 @@
"""Select platform for Liebherr integration."""
from __future__ import annotations
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from enum import StrEnum
from typing import TYPE_CHECKING, Any
from pyliebherrhomeapi import (
BioFreshPlusControl,
BioFreshPlusMode,
HydroBreezeControl,
HydroBreezeMode,
IceMakerControl,
IceMakerMode,
ZonePosition,
)
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
from .entity import ZONE_POSITION_MAP, LiebherrEntity
PARALLEL_UPDATES = 1
type SelectControl = IceMakerControl | HydroBreezeControl | BioFreshPlusControl
@dataclass(frozen=True, kw_only=True)
class LiebherrSelectEntityDescription(SelectEntityDescription):
"""Describes a Liebherr select entity."""
control_type: type[SelectControl]
mode_enum: type[StrEnum]
current_mode_fn: Callable[[SelectControl], StrEnum | str | None]
options_fn: Callable[[SelectControl], list[str]]
set_fn: Callable[[LiebherrCoordinator, int, StrEnum], Coroutine[Any, Any, None]]
def _ice_maker_options(control: SelectControl) -> list[str]:
"""Return available ice maker options."""
if TYPE_CHECKING:
assert isinstance(control, IceMakerControl)
options = [IceMakerMode.OFF.value, IceMakerMode.ON.value]
if control.has_max_ice:
options.append(IceMakerMode.MAX_ICE.value)
return options
def _hydro_breeze_options(control: SelectControl) -> list[str]:
"""Return available HydroBreeze options."""
return [mode.value for mode in HydroBreezeMode]
def _bio_fresh_plus_options(control: SelectControl) -> list[str]:
"""Return available BioFresh-Plus options."""
if TYPE_CHECKING:
assert isinstance(control, BioFreshPlusControl)
return [
mode.value
for mode in control.supported_modes
if isinstance(mode, BioFreshPlusMode)
]
SELECT_TYPES: list[LiebherrSelectEntityDescription] = [
LiebherrSelectEntityDescription(
key="ice_maker",
translation_key="ice_maker",
control_type=IceMakerControl,
mode_enum=IceMakerMode,
current_mode_fn=lambda c: c.ice_maker_mode, # type: ignore[union-attr]
options_fn=_ice_maker_options,
set_fn=lambda coordinator, zone_id, mode: coordinator.client.set_ice_maker(
device_id=coordinator.device_id,
zone_id=zone_id,
mode=mode, # type: ignore[arg-type]
),
),
LiebherrSelectEntityDescription(
key="hydro_breeze",
translation_key="hydro_breeze",
control_type=HydroBreezeControl,
mode_enum=HydroBreezeMode,
current_mode_fn=lambda c: c.current_mode, # type: ignore[union-attr]
options_fn=_hydro_breeze_options,
set_fn=lambda coordinator, zone_id, mode: coordinator.client.set_hydro_breeze(
device_id=coordinator.device_id,
zone_id=zone_id,
mode=mode, # type: ignore[arg-type]
),
),
LiebherrSelectEntityDescription(
key="bio_fresh_plus",
translation_key="bio_fresh_plus",
control_type=BioFreshPlusControl,
mode_enum=BioFreshPlusMode,
current_mode_fn=lambda c: c.current_mode, # type: ignore[union-attr]
options_fn=_bio_fresh_plus_options,
set_fn=lambda coordinator, zone_id, mode: coordinator.client.set_bio_fresh_plus(
device_id=coordinator.device_id,
zone_id=zone_id,
mode=mode, # type: ignore[arg-type]
),
),
]
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr select entities."""
entities: list[LiebherrSelectEntity] = []
for coordinator in entry.runtime_data.values():
has_multiple_zones = len(coordinator.data.get_temperature_controls()) > 1
for control in coordinator.data.controls:
for description in SELECT_TYPES:
if isinstance(control, description.control_type):
if TYPE_CHECKING:
assert isinstance(
control,
IceMakerControl | HydroBreezeControl | BioFreshPlusControl,
)
entities.append(
LiebherrSelectEntity(
coordinator=coordinator,
description=description,
zone_id=control.zone_id,
has_multiple_zones=has_multiple_zones,
)
)
async_add_entities(entities)
class LiebherrSelectEntity(LiebherrEntity, SelectEntity):
"""Representation of a Liebherr select entity."""
entity_description: LiebherrSelectEntityDescription
def __init__(
self,
coordinator: LiebherrCoordinator,
description: LiebherrSelectEntityDescription,
zone_id: int,
has_multiple_zones: bool,
) -> None:
"""Initialize the select entity."""
super().__init__(coordinator)
self.entity_description = description
self._zone_id = zone_id
self._attr_unique_id = f"{coordinator.device_id}_{description.key}_{zone_id}"
# Set options from the control
control = self._select_control
if control is not None:
self._attr_options = description.options_fn(control)
# Add zone suffix only for multi-zone devices
if has_multiple_zones:
temp_controls = coordinator.data.get_temperature_controls()
if (
(tc := temp_controls.get(zone_id))
and isinstance(tc.zone_position, ZonePosition)
and (zone_key := ZONE_POSITION_MAP.get(tc.zone_position))
):
self._attr_translation_key = f"{description.translation_key}_{zone_key}"
@property
def _select_control(self) -> SelectControl | None:
"""Get the select control for this entity."""
for control in self.coordinator.data.controls:
if (
isinstance(control, self.entity_description.control_type)
and control.zone_id == self._zone_id
):
if TYPE_CHECKING:
assert isinstance(
control,
IceMakerControl | HydroBreezeControl | BioFreshPlusControl,
)
return control
return None
@property
def current_option(self) -> str | None:
"""Return the current selected option."""
control = self._select_control
if TYPE_CHECKING:
assert isinstance(
control,
IceMakerControl | HydroBreezeControl | BioFreshPlusControl,
)
mode = self.entity_description.current_mode_fn(control)
if isinstance(mode, StrEnum):
return mode.value
return None
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._select_control is not None
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""
mode = self.entity_description.mode_enum(option)
await self._async_send_command(
self.entity_description.set_fn(self.coordinator, self._zone_id, mode),
)

View File

@@ -47,112 +47,6 @@
"name": "Top zone setpoint"
}
},
"select": {
"bio_fresh_plus": {
"name": "BioFresh-Plus",
"state": {
"minus_two_minus_two": "-2°C | -2°C",
"minus_two_zero": "-2°C | 0°C",
"zero_minus_two": "0°C | -2°C",
"zero_zero": "0°C | 0°C"
}
},
"bio_fresh_plus_bottom_zone": {
"name": "Bottom zone BioFresh-Plus",
"state": {
"minus_two_minus_two": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::minus_two_minus_two%]",
"minus_two_zero": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::minus_two_zero%]",
"zero_minus_two": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::zero_minus_two%]",
"zero_zero": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::zero_zero%]"
}
},
"bio_fresh_plus_middle_zone": {
"name": "Middle zone BioFresh-Plus",
"state": {
"minus_two_minus_two": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::minus_two_minus_two%]",
"minus_two_zero": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::minus_two_zero%]",
"zero_minus_two": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::zero_minus_two%]",
"zero_zero": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::zero_zero%]"
}
},
"bio_fresh_plus_top_zone": {
"name": "Top zone BioFresh-Plus",
"state": {
"minus_two_minus_two": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::minus_two_minus_two%]",
"minus_two_zero": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::minus_two_zero%]",
"zero_minus_two": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::zero_minus_two%]",
"zero_zero": "[%key:component::liebherr::entity::select::bio_fresh_plus::state::zero_zero%]"
}
},
"hydro_breeze": {
"name": "HydroBreeze",
"state": {
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"medium": "[%key:common::state::medium%]",
"off": "[%key:common::state::off%]"
}
},
"hydro_breeze_bottom_zone": {
"name": "Bottom zone HydroBreeze",
"state": {
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"medium": "[%key:common::state::medium%]",
"off": "[%key:common::state::off%]"
}
},
"hydro_breeze_middle_zone": {
"name": "Middle zone HydroBreeze",
"state": {
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"medium": "[%key:common::state::medium%]",
"off": "[%key:common::state::off%]"
}
},
"hydro_breeze_top_zone": {
"name": "Top zone HydroBreeze",
"state": {
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"medium": "[%key:common::state::medium%]",
"off": "[%key:common::state::off%]"
}
},
"ice_maker": {
"name": "IceMaker",
"state": {
"max_ice": "MaxIce",
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
},
"ice_maker_bottom_zone": {
"name": "Bottom zone IceMaker",
"state": {
"max_ice": "[%key:component::liebherr::entity::select::ice_maker::state::max_ice%]",
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
},
"ice_maker_middle_zone": {
"name": "Middle zone IceMaker",
"state": {
"max_ice": "[%key:component::liebherr::entity::select::ice_maker::state::max_ice%]",
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
},
"ice_maker_top_zone": {
"name": "Top zone IceMaker",
"state": {
"max_ice": "[%key:component::liebherr::entity::select::ice_maker::state::max_ice%]",
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
},
"sensor": {
"bottom_zone": {
"name": "Bottom zone"

View File

@@ -109,18 +109,14 @@ class LunatoneLight(
return self._device is not None and self._device.is_on
@property
def brightness(self) -> int | None:
def brightness(self) -> int:
"""Return the brightness of this light between 0..255."""
return (
value_to_brightness(self.BRIGHTNESS_SCALE, self._device.brightness)
if self._device.brightness is not None
else None
)
return value_to_brightness(self.BRIGHTNESS_SCALE, self._device.brightness)
@property
def color_mode(self) -> ColorMode:
"""Return the color mode of the light."""
if self._device is not None and self._device.brightness is not None:
if self._device is not None and self._device.is_dimmable:
return ColorMode.BRIGHTNESS
return ColorMode.ONOFF
@@ -153,8 +149,7 @@ class LunatoneLight(
async def async_turn_off(self, **kwargs: Any) -> None:
"""Instruct the light to turn off."""
if brightness_supported(self.supported_color_modes):
if self.brightness:
self._last_brightness = self.brightness
self._last_brightness = self.brightness
await self._device.fade_to_brightness(0)
else:
await self._device.switch_off()

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["lunatone-rest-api-client==0.7.0"]
"requirements": ["lunatone-rest-api-client==0.6.3"]
}

View File

@@ -8,6 +8,6 @@
"iot_class": "calculated",
"loggers": ["yt_dlp"],
"quality_scale": "internal",
"requirements": ["yt-dlp[default]==2026.02.21"],
"requirements": ["yt-dlp[default]==2026.02.04"],
"single_config_entry": true
}

View File

@@ -2,12 +2,10 @@
from __future__ import annotations
import asyncio
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import DOMAIN as DOMAIN, SUBENTRY_TYPE_BUS, SUBENTRY_TYPE_SUBWAY
from .const import DOMAIN as DOMAIN
from .coordinator import MTAConfigEntry, MTADataUpdateCoordinator
PLATFORMS = [Platform.SENSOR]
@@ -15,36 +13,16 @@ PLATFORMS = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: MTAConfigEntry) -> bool:
"""Set up MTA from a config entry."""
coordinators: dict[str, MTADataUpdateCoordinator] = {}
coordinator = MTADataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
for subentry_id, subentry in entry.subentries.items():
if subentry.subentry_type not in (SUBENTRY_TYPE_SUBWAY, SUBENTRY_TYPE_BUS):
continue
coordinators[subentry_id] = MTADataUpdateCoordinator(hass, entry, subentry)
# Refresh all coordinators in parallel
await asyncio.gather(
*(
coordinator.async_config_entry_first_refresh()
for coordinator in coordinators.values()
)
)
entry.runtime_data = coordinators
entry.async_on_unload(entry.add_update_listener(async_update_entry))
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_update_entry(hass: HomeAssistant, entry: MTAConfigEntry) -> None:
"""Handle config entry update (e.g., subentry changes)."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: MTAConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -2,43 +2,22 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from pymta import LINE_TO_FEED, BusFeed, MTAFeedError, SubwayFeed
from pymta import LINE_TO_FEED, MTAFeedError, SubwayFeed
import voluptuous as vol
from homeassistant.config_entries import (
SOURCE_REAUTH,
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryFlow,
SubentryFlowResult,
)
from homeassistant.const import CONF_API_KEY
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import (
CONF_LINE,
CONF_ROUTE,
CONF_STOP_ID,
CONF_STOP_NAME,
DOMAIN,
SUBENTRY_TYPE_BUS,
SUBENTRY_TYPE_SUBWAY,
)
from .const import CONF_LINE, CONF_STOP_ID, CONF_STOP_NAME, DOMAIN
_LOGGER = logging.getLogger(__name__)
@@ -49,79 +28,17 @@ class MTAConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
MINOR_VERSION = 1
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this handler."""
return {
SUBENTRY_TYPE_SUBWAY: SubwaySubentryFlowHandler,
SUBENTRY_TYPE_BUS: BusSubentryFlowHandler,
}
def __init__(self) -> None:
"""Initialize the config flow."""
self.data: dict[str, Any] = {}
self.stops: dict[str, str] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
api_key = user_input.get(CONF_API_KEY)
self._async_abort_entries_match({CONF_API_KEY: api_key})
if api_key:
# Test the API key by trying to fetch bus data
session = async_get_clientsession(self.hass)
bus_feed = BusFeed(api_key=api_key, session=session)
try:
# Try to get stops for a known route to validate the key
await bus_feed.get_stops(route_id="M15")
except MTAFeedError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected error validating API key")
errors["base"] = "unknown"
if not errors:
if self.source == SOURCE_REAUTH:
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates={CONF_API_KEY: api_key or None},
)
return self.async_create_entry(
title="MTA",
data={CONF_API_KEY: api_key or None},
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Optional(CONF_API_KEY): TextSelector(
TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
}
),
errors=errors,
)
async def async_step_reauth(
self, _entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauth when user wants to add or update API key."""
return await self.async_step_user()
class SubwaySubentryFlowHandler(ConfigSubentryFlow):
"""Handle subway stop subentry flow."""
def __init__(self) -> None:
"""Initialize the subentry flow."""
self.data: dict[str, Any] = {}
self.stops: dict[str, str] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Handle the line selection step."""
if user_input is not None:
self.data[CONF_LINE] = user_input[CONF_LINE]
return await self.async_step_stop()
@@ -141,12 +58,13 @@ class SubwaySubentryFlowHandler(ConfigSubentryFlow):
),
}
),
errors=errors,
)
async def async_step_stop(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Handle the stop selection step."""
) -> ConfigFlowResult:
"""Handle the stop step."""
errors: dict[str, str] = {}
if user_input is not None:
@@ -156,30 +74,25 @@ class SubwaySubentryFlowHandler(ConfigSubentryFlow):
self.data[CONF_STOP_NAME] = stop_name
unique_id = f"{self.data[CONF_LINE]}_{stop_id}"
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
# Check for duplicate subentries across all entries
for entry in self.hass.config_entries.async_entries(DOMAIN):
for subentry in entry.subentries.values():
if subentry.unique_id == unique_id:
return self.async_abort(reason="already_configured")
# Test connection to real-time GTFS-RT feed
# Test connection to real-time GTFS-RT feed (different from static GTFS used by get_stops)
try:
await self._async_test_connection()
except MTAFeedError:
errors["base"] = "cannot_connect"
else:
title = f"{self.data[CONF_LINE]} - {stop_name}"
title = f"{self.data[CONF_LINE]} Line - {stop_name}"
return self.async_create_entry(
title=title,
data=self.data,
unique_id=unique_id,
)
try:
self.stops = await self._async_get_stops(self.data[CONF_LINE])
except MTAFeedError:
_LOGGER.debug("Error fetching stops for line %s", self.data[CONF_LINE])
_LOGGER.exception("Error fetching stops for line %s", self.data[CONF_LINE])
return self.async_abort(reason="cannot_connect")
if not self.stops:
@@ -210,7 +123,7 @@ class SubwaySubentryFlowHandler(ConfigSubentryFlow):
async def _async_get_stops(self, line: str) -> dict[str, str]:
"""Get stops for a line from the library."""
feed_id = SubwayFeed.get_feed_id_for_route(line)
session = async_get_clientsession(self.hass)
session = aiohttp_client.async_get_clientsession(self.hass)
subway_feed = SubwayFeed(feed_id=feed_id, session=session)
stops_list = await subway_feed.get_stops(route_id=line)
@@ -228,7 +141,7 @@ class SubwaySubentryFlowHandler(ConfigSubentryFlow):
async def _async_test_connection(self) -> None:
"""Test connection to MTA feed."""
feed_id = SubwayFeed.get_feed_id_for_route(self.data[CONF_LINE])
session = async_get_clientsession(self.hass)
session = aiohttp_client.async_get_clientsession(self.hass)
subway_feed = SubwayFeed(feed_id=feed_id, session=session)
await subway_feed.get_arrivals(
@@ -236,133 +149,3 @@ class SubwaySubentryFlowHandler(ConfigSubentryFlow):
stop_id=self.data[CONF_STOP_ID],
max_arrivals=1,
)
class BusSubentryFlowHandler(ConfigSubentryFlow):
"""Handle bus stop subentry flow."""
def __init__(self) -> None:
"""Initialize the subentry flow."""
self.data: dict[str, Any] = {}
self.stops: dict[str, str] = {}
def _get_api_key(self) -> str:
"""Get API key from parent entry."""
return self._get_entry().data.get(CONF_API_KEY) or ""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Handle the route input step."""
errors: dict[str, str] = {}
if user_input is not None:
route = user_input[CONF_ROUTE].upper().strip()
self.data[CONF_ROUTE] = route
# Validate route by fetching stops
try:
self.stops = await self._async_get_stops(route)
if not self.stops:
errors["base"] = "invalid_route"
else:
return await self.async_step_stop()
except MTAFeedError:
_LOGGER.debug("Error fetching stops for route %s", route)
errors["base"] = "invalid_route"
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_ROUTE): TextSelector(),
}
),
errors=errors,
)
async def async_step_stop(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Handle the stop selection step."""
errors: dict[str, str] = {}
if user_input is not None:
stop_id = user_input[CONF_STOP_ID]
self.data[CONF_STOP_ID] = stop_id
stop_name = self.stops.get(stop_id, stop_id)
self.data[CONF_STOP_NAME] = stop_name
unique_id = f"bus_{self.data[CONF_ROUTE]}_{stop_id}"
# Check for duplicate subentries across all entries
for entry in self.hass.config_entries.async_entries(DOMAIN):
for subentry in entry.subentries.values():
if subentry.unique_id == unique_id:
return self.async_abort(reason="already_configured")
# Test connection to real-time feed
try:
await self._async_test_connection()
except MTAFeedError:
errors["base"] = "cannot_connect"
else:
title = f"{self.data[CONF_ROUTE]} - {stop_name}"
return self.async_create_entry(
title=title,
data=self.data,
unique_id=unique_id,
)
stop_options = [
SelectOptionDict(value=stop_id, label=stop_name)
for stop_id, stop_name in sorted(self.stops.items(), key=lambda x: x[1])
]
return self.async_show_form(
step_id="stop",
data_schema=vol.Schema(
{
vol.Required(CONF_STOP_ID): SelectSelector(
SelectSelectorConfig(
options=stop_options,
mode=SelectSelectorMode.DROPDOWN,
)
),
}
),
errors=errors,
description_placeholders={"route": self.data[CONF_ROUTE]},
)
async def _async_get_stops(self, route: str) -> dict[str, str]:
"""Get stops for a bus route from the library."""
session = async_get_clientsession(self.hass)
api_key = self._get_api_key()
bus_feed = BusFeed(api_key=api_key, session=session)
stops_list = await bus_feed.get_stops(route_id=route)
stops = {}
for stop in stops_list:
stop_id = stop["stop_id"]
stop_name = stop["stop_name"]
# Add direction if available (e.g., "to South Ferry")
if direction := stop.get("direction_name"):
stops[stop_id] = f"{stop_name} (to {direction})"
else:
stops[stop_id] = stop_name
return stops
async def _async_test_connection(self) -> None:
"""Test connection to MTA bus feed."""
session = async_get_clientsession(self.hass)
api_key = self._get_api_key()
bus_feed = BusFeed(api_key=api_key, session=session)
await bus_feed.get_arrivals(
route_id=self.data[CONF_ROUTE],
stop_id=self.data[CONF_STOP_ID],
max_arrivals=1,
)

View File

@@ -7,9 +7,5 @@ DOMAIN = "mta"
CONF_LINE = "line"
CONF_STOP_ID = "stop_id"
CONF_STOP_NAME = "stop_name"
CONF_ROUTE = "route"
SUBENTRY_TYPE_SUBWAY = "subway"
SUBENTRY_TYPE_BUS = "bus"
UPDATE_INTERVAL = timedelta(seconds=30)

View File

@@ -6,30 +6,22 @@ from dataclasses import dataclass
from datetime import datetime
import logging
from pymta import BusFeed, MTAFeedError, SubwayFeed
from pymta import MTAFeedError, SubwayFeed
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_API_KEY
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from .const import (
CONF_LINE,
CONF_ROUTE,
CONF_STOP_ID,
DOMAIN,
SUBENTRY_TYPE_BUS,
UPDATE_INTERVAL,
)
from .const import CONF_LINE, CONF_STOP_ID, DOMAIN, UPDATE_INTERVAL
_LOGGER = logging.getLogger(__name__)
@dataclass
class MTAArrival:
"""Represents a single transit arrival."""
"""Represents a single train arrival."""
arrival_time: datetime
minutes_until: int
@@ -44,7 +36,7 @@ class MTAData:
arrivals: list[MTAArrival]
type MTAConfigEntry = ConfigEntry[dict[str, MTADataUpdateCoordinator]]
type MTAConfigEntry = ConfigEntry[MTADataUpdateCoordinator]
class MTADataUpdateCoordinator(DataUpdateCoordinator[MTAData]):
@@ -52,48 +44,35 @@ class MTADataUpdateCoordinator(DataUpdateCoordinator[MTAData]):
config_entry: MTAConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: MTAConfigEntry,
subentry: ConfigSubentry,
) -> None:
def __init__(self, hass: HomeAssistant, config_entry: MTAConfigEntry) -> None:
"""Initialize."""
self.subentry = subentry
self.stop_id = subentry.data[CONF_STOP_ID]
self.line = config_entry.data[CONF_LINE]
self.stop_id = config_entry.data[CONF_STOP_ID]
self.feed_id = SubwayFeed.get_feed_id_for_route(self.line)
session = async_get_clientsession(hass)
if subentry.subentry_type == SUBENTRY_TYPE_BUS:
api_key = config_entry.data.get(CONF_API_KEY) or ""
self.feed: BusFeed | SubwayFeed = BusFeed(api_key=api_key, session=session)
self.route_id = subentry.data[CONF_ROUTE]
else:
# Subway feed
line = subentry.data[CONF_LINE]
feed_id = SubwayFeed.get_feed_id_for_route(line)
self.feed = SubwayFeed(feed_id=feed_id, session=session)
self.route_id = line
self.subway_feed = SubwayFeed(feed_id=self.feed_id, session=session)
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN}_{subentry.subentry_id}",
name=DOMAIN,
update_interval=UPDATE_INTERVAL,
)
async def _async_update_data(self) -> MTAData:
"""Fetch data from MTA."""
_LOGGER.debug(
"Fetching data for route=%s, stop=%s",
self.route_id,
"Fetching data for line=%s, stop=%s, feed=%s",
self.line,
self.stop_id,
self.feed_id,
)
try:
library_arrivals = await self.feed.get_arrivals(
route_id=self.route_id,
library_arrivals = await self.subway_feed.get_arrivals(
route_id=self.line,
stop_id=self.stop_id,
max_arrivals=3,
)

View File

@@ -38,7 +38,9 @@ rules:
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
reauthentication-flow:
status: exempt
comment: No authentication required.
test-coverage: done
# Gold

View File

@@ -11,13 +11,12 @@ from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigSubentry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_LINE, CONF_ROUTE, CONF_STOP_NAME, DOMAIN, SUBENTRY_TYPE_BUS
from .const import CONF_LINE, CONF_STOP_ID, CONF_STOP_NAME, DOMAIN
from .coordinator import MTAArrival, MTAConfigEntry, MTADataUpdateCoordinator
PARALLEL_UPDATES = 0
@@ -98,19 +97,16 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up MTA sensor based on a config entry."""
for subentry_id, coordinator in entry.runtime_data.items():
subentry = entry.subentries[subentry_id]
async_add_entities(
(
MTASensor(coordinator, subentry, description)
for description in SENSOR_DESCRIPTIONS
),
config_subentry_id=subentry_id,
)
coordinator = entry.runtime_data
async_add_entities(
MTASensor(coordinator, entry, description)
for description in SENSOR_DESCRIPTIONS
)
class MTASensor(CoordinatorEntity[MTADataUpdateCoordinator], SensorEntity):
"""Sensor for MTA transit arrivals."""
"""Sensor for MTA train arrivals."""
_attr_has_entity_name = True
entity_description: MTASensorEntityDescription
@@ -118,32 +114,24 @@ class MTASensor(CoordinatorEntity[MTADataUpdateCoordinator], SensorEntity):
def __init__(
self,
coordinator: MTADataUpdateCoordinator,
subentry: ConfigSubentry,
entry: MTAConfigEntry,
description: MTASensorEntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = description
line = entry.data[CONF_LINE]
stop_id = entry.data[CONF_STOP_ID]
stop_name = entry.data.get(CONF_STOP_NAME, stop_id)
is_bus = subentry.subentry_type == SUBENTRY_TYPE_BUS
if is_bus:
route = subentry.data[CONF_ROUTE]
model = "Bus"
else:
route = subentry.data[CONF_LINE]
model = "Subway"
stop_name = subentry.data.get(CONF_STOP_NAME, subentry.subentry_id)
unique_id = subentry.unique_id or subentry.subentry_id
self._attr_unique_id = f"{unique_id}-{description.key}"
self._attr_unique_id = f"{entry.unique_id}-{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
name=f"{route} - {stop_name}",
identifiers={(DOMAIN, entry.entry_id)},
name=f"{line} Line - {stop_name} ({stop_id})",
manufacturer="MTA",
model=model,
model="Subway",
entry_type=DeviceEntryType.SERVICE,
)

View File

@@ -2,95 +2,32 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_stops": "No stops found for this line. The line may not be currently running."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
},
"step": {
"user": {
"stop": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
"stop_id": "Stop and direction"
},
"data_description": {
"api_key": "API key from MTA Bus Time. Required for bus tracking, optional for subway only."
"stop_id": "Select the stop and direction you want to track"
},
"description": "Enter your MTA Bus Time API key to enable bus tracking. Leave blank if you only want to track subways."
}
}
},
"config_subentries": {
"bus": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
"description": "Choose a stop on the {line} line. The direction is included with each stop.",
"title": "Select stop and direction"
},
"entry_type": "Bus stop",
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_route": "Invalid bus route. Please check the route name and try again."
},
"initiate_flow": {
"user": "Add bus stop"
},
"step": {
"stop": {
"data": {
"stop_id": "Stop"
},
"data_description": {
"stop_id": "Select the stop you want to track"
},
"description": "Choose a stop on the {route} route.",
"title": "Select stop"
"user": {
"data": {
"line": "Line"
},
"user": {
"data": {
"route": "Route"
},
"data_description": {
"route": "The bus route identifier"
},
"description": "Enter the bus route you want to track (for example, M15, B46, Q10).",
"title": "Enter bus route"
}
}
},
"subway": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_stops": "No stops found for this line. The line may not be currently running."
},
"entry_type": "Subway stop",
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
},
"initiate_flow": {
"user": "Add subway stop"
},
"step": {
"stop": {
"data": {
"stop_id": "Stop and direction"
},
"data_description": {
"stop_id": "Select the stop and direction you want to track"
},
"description": "Choose a stop on the {line} line. The direction is included with each stop.",
"title": "Select stop and direction"
"data_description": {
"line": "The subway line to track"
},
"user": {
"data": {
"line": "Line"
},
"data_description": {
"line": "The subway line to track"
},
"description": "Choose the subway line you want to track.",
"title": "Select subway line"
}
"description": "Choose the subway line you want to track.",
"title": "Select subway line"
}
}
},

View File

@@ -120,31 +120,6 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
self._discovered_name: str | None = None
self._pending_host: str | None = None
async def _async_validate_host(
self,
host: str,
errors: dict[str, str],
) -> tuple[dict[str, Any] | None, bool]:
"""Validate host connection and populate errors dict on failure.
Returns (info, needs_auth). When needs_auth is True, the caller
should store the host and redirect to the appropriate auth step.
"""
try:
return await validate_input(self.hass, host), False
except NRGkickApiClientApiDisabledError:
errors["base"] = "json_api_disabled"
except NRGkickApiClientAuthenticationError:
return None, True
except NRGkickApiClientInvalidResponseError:
errors["base"] = "invalid_response"
except NRGkickApiClientCommunicationError:
errors["base"] = "cannot_connect"
except NRGkickApiClientError:
_LOGGER.exception("Unexpected error")
errors["base"] = "unknown"
return None, False
async def _async_validate_credentials(
self,
host: str,
@@ -181,11 +156,21 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
except vol.Invalid:
errors["base"] = "cannot_connect"
else:
info, needs_auth = await self._async_validate_host(host, errors)
if needs_auth:
try:
info = await validate_input(self.hass, host)
except NRGkickApiClientApiDisabledError:
errors["base"] = "json_api_disabled"
except NRGkickApiClientAuthenticationError:
self._pending_host = host
return await self.async_step_user_auth()
if info:
except NRGkickApiClientInvalidResponseError:
errors["base"] = "invalid_response"
except NRGkickApiClientCommunicationError:
errors["base"] = "cannot_connect"
except NRGkickApiClientError:
_LOGGER.exception("Unexpected error")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(
info["serial"], raise_on_progress=False
)
@@ -272,81 +257,6 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of the integration."""
errors: dict[str, str] = {}
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
try:
host = _normalize_host(user_input[CONF_HOST])
except vol.Invalid:
errors["base"] = "cannot_connect"
else:
info, needs_auth = await self._async_validate_host(host, errors)
if needs_auth:
self._pending_host = host
return await self.async_step_reconfigure_auth()
if info:
await self.async_set_unique_id(
info["serial"], raise_on_progress=False
)
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
reconfigure_entry,
data_updates={CONF_HOST: host},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_DATA_SCHEMA,
reconfigure_entry.data,
),
errors=errors,
)
async def async_step_reconfigure_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration authentication step."""
errors: dict[str, str] = {}
if TYPE_CHECKING:
assert self._pending_host is not None
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
if info := await self._async_validate_credentials(
self._pending_host,
errors,
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
):
await self.async_set_unique_id(info["serial"], raise_on_progress=False)
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
reconfigure_entry,
data_updates={
CONF_HOST: self._pending_host,
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
return self.async_show_form(
step_id="reconfigure_auth",
data_schema=self.add_suggested_values_to_schema(
STEP_AUTH_DATA_SCHEMA,
reconfigure_entry.data,
),
errors=errors,
description_placeholders={
"device_ip": self._pending_host,
},
)
async def async_step_zeroconf(
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
@@ -411,13 +321,21 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
assert self._discovered_name is not None
if user_input is not None:
info, needs_auth = await self._async_validate_host(
self._discovered_host, errors
)
if needs_auth:
try:
info = await validate_input(self.hass, self._discovered_host)
except NRGkickApiClientApiDisabledError:
errors["base"] = "json_api_disabled"
except NRGkickApiClientAuthenticationError:
self._pending_host = self._discovered_host
return await self.async_step_user_auth()
if info:
except NRGkickApiClientInvalidResponseError:
errors["base"] = "invalid_response"
except NRGkickApiClientCommunicationError:
errors["base"] = "cannot_connect"
except NRGkickApiClientError:
_LOGGER.exception("Unexpected error")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title=info["title"], data={CONF_HOST: self._discovered_host}
)

View File

@@ -68,7 +68,7 @@ rules:
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: done
reconfiguration-flow: todo
repair-issues: todo
stale-devices:
status: exempt

View File

@@ -6,7 +6,6 @@
"json_api_disabled": "JSON API is disabled on the device. Enable it in the NRGkick mobile app under Extended \u2192 Local API \u2192 API Variants.",
"no_serial_number": "Device does not provide a serial number",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unique_id_mismatch": "The device does not match the previous device"
},
"error": {
@@ -29,26 +28,6 @@
},
"description": "Reauthenticate with your NRGkick device.\n\nGet your username and password in the NRGkick mobile app:\n1. Open the NRGkick mobile app \u2192 Extended \u2192 Local API\n2. Under Authentication (JSON), check or set your username and password"
},
"reconfigure": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "[%key:component::nrgkick::config::step::user::data_description::host%]"
},
"description": "Reconfigure your NRGkick device. This allows you to change the IP address or hostname of your NRGkick device."
},
"reconfigure_auth": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"password": "[%key:component::nrgkick::config::step::user_auth::data_description::password%]",
"username": "[%key:component::nrgkick::config::step::user_auth::data_description::username%]"
},
"description": "[%key:component::nrgkick::config::step::user_auth::description%]"
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"

View File

@@ -81,9 +81,6 @@
"service": "mdi:comment-remove"
},
"publish": {
"sections": {
"actions": "mdi:gesture-tap-button"
},
"service": "mdi:send"
}
}

View File

@@ -27,14 +27,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import NtfyConfigEntry
from .entity import NtfyBaseEntity
from .services import (
ACTIONS_MAP,
ATTR_ACTION,
ATTR_ACTIONS,
ATTR_ATTACH_FILE,
ATTR_FILENAME,
ATTR_SEQUENCE_ID,
)
from .services import ATTR_ATTACH_FILE, ATTR_FILENAME, ATTR_SEQUENCE_ID
_LOGGER = logging.getLogger(__name__)
@@ -112,15 +105,6 @@ class NtfyNotifyEntity(NtfyBaseEntity, NotifyEntity):
params.setdefault(ATTR_FILENAME, media.path.name)
actions: list[dict[str, Any]] | None = params.get(ATTR_ACTIONS)
if actions:
params["actions"] = [
ACTIONS_MAP[action[ATTR_ACTION]](
**{k: v for k, v in action.items() if k != ATTR_ACTION}
)
for action in actions
]
msg = Message(topic=self.topic, **params)
try:
await self.ntfy.publish(msg, attachment)

View File

@@ -3,7 +3,6 @@
from datetime import timedelta
from typing import Any
from aiontfy import BroadcastAction, CopyAction, HttpAction, ViewAction
import voluptuous as vol
from yarl import URL
@@ -35,28 +34,6 @@ ATTR_ATTACH_FILE = "attach_file"
ATTR_FILENAME = "filename"
GRP_ATTACHMENT = "attachment"
MSG_ATTACHMENT = "Only one attachment source is allowed: URL or local file"
ATTR_ACTIONS = "actions"
ATTR_ACTION = "action"
ATTR_VIEW = "view"
ATTR_BROADCAST = "broadcast"
ATTR_HTTP = "http"
ATTR_LABEL = "label"
ATTR_URL = "url"
ATTR_CLEAR = "clear"
ATTR_INTENT = "intent"
ATTR_EXTRAS = "extras"
ATTR_METHOD = "method"
ATTR_HEADERS = "headers"
ATTR_BODY = "body"
ATTR_VALUE = "value"
ATTR_COPY = "copy"
ACTIONS_MAP = {
ATTR_VIEW: ViewAction,
ATTR_BROADCAST: BroadcastAction,
ATTR_HTTP: HttpAction,
ATTR_COPY: CopyAction,
}
MAX_ACTIONS_ALLOWED = 3 # ntfy only supports up to 3 actions per notification
def validate_filename(params: dict[str, Any]) -> dict[str, Any]:
@@ -68,40 +45,6 @@ def validate_filename(params: dict[str, Any]) -> dict[str, Any]:
return params
ACTION_SCHEMA = vol.Schema(
{
vol.Required(ATTR_LABEL): cv.string,
vol.Optional(ATTR_CLEAR, default=False): cv.boolean,
}
)
VIEW_SCHEMA = ACTION_SCHEMA.extend(
{
vol.Required(ATTR_ACTION): vol.Equal("view"),
vol.Required(ATTR_URL): vol.All(vol.Url(), vol.Coerce(URL)),
}
)
BROADCAST_SCHEMA = ACTION_SCHEMA.extend(
{
vol.Required(ATTR_ACTION): vol.Equal("broadcast"),
vol.Optional(ATTR_INTENT): cv.string,
vol.Optional(ATTR_EXTRAS): dict[str, str],
}
)
HTTP_SCHEMA = VIEW_SCHEMA.extend(
{
vol.Required(ATTR_ACTION): vol.Equal("http"),
vol.Optional(ATTR_METHOD): cv.string,
vol.Optional(ATTR_HEADERS): dict[str, str],
vol.Optional(ATTR_BODY): cv.string,
}
)
COPY_SCHEMA = ACTION_SCHEMA.extend(
{
vol.Required(ATTR_ACTION): vol.Equal("copy"),
vol.Required(ATTR_VALUE): cv.string,
}
)
SERVICE_PUBLISH_SCHEMA = vol.All(
cv.make_entity_service_schema(
{
@@ -126,14 +69,6 @@ SERVICE_PUBLISH_SCHEMA = vol.All(
ATTR_ATTACH_FILE, GRP_ATTACHMENT, MSG_ATTACHMENT
): MediaSelector({"accept": ["*/*"]}),
vol.Optional(ATTR_FILENAME): cv.string,
vol.Optional(ATTR_ACTIONS): vol.All(
cv.ensure_list,
vol.Length(
max=MAX_ACTIONS_ALLOWED,
msg="Too many actions defined. A maximum of 3 is supported",
),
[vol.Any(VIEW_SCHEMA, BROADCAST_SCHEMA, HTTP_SCHEMA, COPY_SCHEMA)],
),
}
),
validate_filename,

View File

@@ -99,65 +99,6 @@ publish:
type: url
autocomplete: url
example: https://example.org/logo.png
actions:
selector:
object:
label_field: "label"
description_field: "url"
multiple: true
translation_key: actions
fields:
action:
required: true
selector:
select:
options:
- value: view
label: Open website/app
- value: http
label: Send HTTP request
- value: broadcast
label: Send Android broadcast
- value: copy
label: Copy to clipboard
translation_key: action_type
mode: dropdown
label:
selector:
text:
required: true
clear:
selector:
boolean:
url:
selector:
text:
type: url
method:
selector:
select:
options:
- GET
- POST
- PUT
- DELETE
custom_value: true
headers:
selector:
object:
body:
selector:
text:
multiline: true
intent:
selector:
text:
extras:
selector:
object:
value:
selector:
text:
sequence_id:
required: false
selector:

View File

@@ -318,50 +318,6 @@
}
},
"selector": {
"actions": {
"fields": {
"action": {
"description": "Select the type of action to add to the notification",
"name": "Action type"
},
"body": {
"description": "The body of the HTTP request for `http` actions.",
"name": "HTTP body"
},
"clear": {
"description": "Clear notification after action button is tapped",
"name": "Clear notification"
},
"extras": {
"description": "Extras to include in the intent as key-value pairs for 'broadcast' actions",
"name": "Intent extras"
},
"headers": {
"description": "Additional HTTP headers as key-value pairs for 'http' actions",
"name": "HTTP headers"
},
"intent": {
"description": "Android intent to send when the 'broadcast' action is triggered",
"name": "Intent"
},
"label": {
"description": "Label of the action button",
"name": "Label"
},
"method": {
"description": "HTTP method to use for the 'http' action",
"name": "HTTP method"
},
"url": {
"description": "URL to open for the 'view' action or to request for the 'http' action",
"name": "URL"
},
"value": {
"description": "Value to copy to clipboard when the 'copy' action is triggered",
"name": "Value"
}
}
},
"priority": {
"options": {
"1": "Minimum",
@@ -394,12 +350,8 @@
"name": "Delete notification"
},
"publish": {
"description": "Publishes a notification message to a ntfy topic.",
"description": "Publishes a notification message to a ntfy topic",
"fields": {
"actions": {
"description": "Up to three actions (`view`, `broadcast`, `http`, or `copy`) can be added as buttons below the notification. Actions are executed when the corresponding button is tapped or clicked.",
"name": "Action buttons"
},
"attach": {
"description": "Attach images or other files by URL.",
"name": "Attachment URL"

View File

@@ -30,8 +30,11 @@ rules:
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
parallel-updates: todo
reauthentication-flow:
status: todo
comment: |
No reauthentication flow is defined. It will be done in a next iteration.
test-coverage: done
# Gold
devices: done
@@ -44,27 +47,25 @@ rules:
status: exempt
comment: |
No discovery is implemented, since it's software based.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices: done
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: done
repair-issues:
status: exempt
comment: |
No repair issues are implemented, currently.
stale-devices: done
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
async-dependency: todo
inject-websession: done
strict-typing: done

View File

@@ -1,6 +1,6 @@
{
"domain": "powerfox",
"name": "Powerfox Cloud",
"name": "Powerfox",
"codeowners": ["@klaasnicolaas"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/powerfox",

View File

@@ -37,10 +37,7 @@ from .const import (
)
from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.BUTTON,
]
PLATFORMS = [Platform.BINARY_SENSOR]
CONFIG_SCHEMA = vol.Schema(

View File

@@ -1,339 +0,0 @@
"""Button platform for Proxmox VE."""
from __future__ import annotations
from abc import abstractmethod
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any
from proxmoxer import AuthenticationError
from proxmoxer.core import ResourceException
import requests
from requests.exceptions import ConnectTimeout, SSLError
from homeassistant.components.button import (
ButtonDeviceClass,
ButtonEntity,
ButtonEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator, ProxmoxNodeData
from .entity import ProxmoxContainerEntity, ProxmoxNodeEntity, ProxmoxVMEntity
@dataclass(frozen=True, kw_only=True)
class ProxmoxNodeButtonNodeEntityDescription(ButtonEntityDescription):
"""Class to hold Proxmox node button description."""
press_action: Callable[[ProxmoxCoordinator, str], None]
@dataclass(frozen=True, kw_only=True)
class ProxmoxVMButtonEntityDescription(ButtonEntityDescription):
"""Class to hold Proxmox VM button description."""
press_action: Callable[[ProxmoxCoordinator, str, int], None]
@dataclass(frozen=True, kw_only=True)
class ProxmoxContainerButtonEntityDescription(ButtonEntityDescription):
"""Class to hold Proxmox container button description."""
press_action: Callable[[ProxmoxCoordinator, str, int], None]
NODE_BUTTONS: tuple[ProxmoxNodeButtonNodeEntityDescription, ...] = (
ProxmoxNodeButtonNodeEntityDescription(
key="reboot",
press_action=lambda coordinator, node: coordinator.proxmox.nodes(
node
).status.post(command="reboot"),
entity_category=EntityCategory.CONFIG,
device_class=ButtonDeviceClass.RESTART,
),
ProxmoxNodeButtonNodeEntityDescription(
key="shutdown",
translation_key="shutdown",
press_action=lambda coordinator, node: coordinator.proxmox.nodes(
node
).status.post(command="shutdown"),
entity_category=EntityCategory.CONFIG,
),
ProxmoxNodeButtonNodeEntityDescription(
key="start_all",
translation_key="start_all",
press_action=lambda coordinator, node: coordinator.proxmox.nodes(
node
).startall.post(),
entity_category=EntityCategory.CONFIG,
),
ProxmoxNodeButtonNodeEntityDescription(
key="stop_all",
translation_key="stop_all",
press_action=lambda coordinator, node: coordinator.proxmox.nodes(
node
).stopall.post(),
entity_category=EntityCategory.CONFIG,
),
)
VM_BUTTONS: tuple[ProxmoxVMButtonEntityDescription, ...] = (
ProxmoxVMButtonEntityDescription(
key="start",
translation_key="start",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).qemu(vmid).status.start.post()
),
entity_category=EntityCategory.CONFIG,
),
ProxmoxVMButtonEntityDescription(
key="stop",
translation_key="stop",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).qemu(vmid).status.stop.post()
),
entity_category=EntityCategory.CONFIG,
),
ProxmoxVMButtonEntityDescription(
key="restart",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).qemu(vmid).status.restart.post()
),
entity_category=EntityCategory.CONFIG,
device_class=ButtonDeviceClass.RESTART,
),
ProxmoxVMButtonEntityDescription(
key="hibernate",
translation_key="hibernate",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).qemu(vmid).status.hibernate.post()
),
entity_category=EntityCategory.CONFIG,
),
ProxmoxVMButtonEntityDescription(
key="reset",
translation_key="reset",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).qemu(vmid).status.reset.post()
),
entity_category=EntityCategory.CONFIG,
),
)
CONTAINER_BUTTONS: tuple[ProxmoxContainerButtonEntityDescription, ...] = (
ProxmoxContainerButtonEntityDescription(
key="start",
translation_key="start",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).lxc(vmid).status.start.post()
),
entity_category=EntityCategory.CONFIG,
),
ProxmoxContainerButtonEntityDescription(
key="stop",
translation_key="stop",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).lxc(vmid).status.stop.post()
),
entity_category=EntityCategory.CONFIG,
),
ProxmoxContainerButtonEntityDescription(
key="restart",
press_action=lambda coordinator, node, vmid: (
coordinator.proxmox.nodes(node).lxc(vmid).status.restart.post()
),
entity_category=EntityCategory.CONFIG,
device_class=ButtonDeviceClass.RESTART,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ProxmoxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up ProxmoxVE buttons."""
coordinator = entry.runtime_data
def _async_add_new_nodes(nodes: list[ProxmoxNodeData]) -> None:
"""Add new node buttons."""
async_add_entities(
ProxmoxNodeButtonEntity(coordinator, entity_description, node)
for node in nodes
for entity_description in NODE_BUTTONS
)
def _async_add_new_vms(
vms: list[tuple[ProxmoxNodeData, dict[str, Any]]],
) -> None:
"""Add new VM buttons."""
async_add_entities(
ProxmoxVMButtonEntity(coordinator, entity_description, vm, node_data)
for (node_data, vm) in vms
for entity_description in VM_BUTTONS
)
def _async_add_new_containers(
containers: list[tuple[ProxmoxNodeData, dict[str, Any]]],
) -> None:
"""Add new container buttons."""
async_add_entities(
ProxmoxContainerButtonEntity(
coordinator, entity_description, container, node_data
)
for (node_data, container) in containers
for entity_description in CONTAINER_BUTTONS
)
coordinator.new_nodes_callbacks.append(_async_add_new_nodes)
coordinator.new_vms_callbacks.append(_async_add_new_vms)
coordinator.new_containers_callbacks.append(_async_add_new_containers)
_async_add_new_nodes(
[
node_data
for node_data in coordinator.data.values()
if node_data.node["node"] in coordinator.known_nodes
]
)
_async_add_new_vms(
[
(node_data, vm_data)
for node_data in coordinator.data.values()
for vmid, vm_data in node_data.vms.items()
if (node_data.node["node"], vmid) in coordinator.known_vms
]
)
_async_add_new_containers(
[
(node_data, container_data)
for node_data in coordinator.data.values()
for vmid, container_data in node_data.containers.items()
if (node_data.node["node"], vmid) in coordinator.known_containers
]
)
class ProxmoxBaseButton(ButtonEntity):
"""Common base for Proxmox buttons. Basically to ensure the async_press logic isn't duplicated."""
entity_description: ButtonEntityDescription
coordinator: ProxmoxCoordinator
@abstractmethod
async def _async_press_call(self) -> None:
"""Abstract method used per Proxmox button class."""
async def async_press(self) -> None:
"""Trigger the Proxmox button press service."""
try:
await self._async_press_call()
except AuthenticationError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="cannot_connect_no_details",
) from err
except SSLError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="invalid_auth_no_details",
) from err
except ConnectTimeout as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="timeout_connect_no_details",
) from err
except (ResourceException, requests.exceptions.ConnectionError) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error_no_details",
) from err
class ProxmoxNodeButtonEntity(ProxmoxNodeEntity, ProxmoxBaseButton):
"""Represents a Proxmox Node button entity."""
entity_description: ProxmoxNodeButtonNodeEntityDescription
def __init__(
self,
coordinator: ProxmoxCoordinator,
entity_description: ProxmoxNodeButtonNodeEntityDescription,
node_data: ProxmoxNodeData,
) -> None:
"""Initialize the Proxmox Node button entity."""
self.entity_description = entity_description
super().__init__(coordinator, node_data)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
async def _async_press_call(self) -> None:
"""Execute the node button action via executor."""
await self.hass.async_add_executor_job(
self.entity_description.press_action,
self.coordinator,
self._node_data.node["node"],
)
class ProxmoxVMButtonEntity(ProxmoxVMEntity, ProxmoxBaseButton):
"""Represents a Proxmox VM button entity."""
entity_description: ProxmoxVMButtonEntityDescription
def __init__(
self,
coordinator: ProxmoxCoordinator,
entity_description: ProxmoxVMButtonEntityDescription,
vm_data: dict[str, Any],
node_data: ProxmoxNodeData,
) -> None:
"""Initialize the Proxmox VM button entity."""
self.entity_description = entity_description
super().__init__(coordinator, vm_data, node_data)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
async def _async_press_call(self) -> None:
"""Execute the VM button action via executor."""
await self.hass.async_add_executor_job(
self.entity_description.press_action,
self.coordinator,
self._node_name,
self.vm_data["vmid"],
)
class ProxmoxContainerButtonEntity(ProxmoxContainerEntity, ProxmoxBaseButton):
"""Represents a Proxmox Container button entity."""
entity_description: ProxmoxContainerButtonEntityDescription
def __init__(
self,
coordinator: ProxmoxCoordinator,
entity_description: ProxmoxContainerButtonEntityDescription,
container_data: dict[str, Any],
node_data: ProxmoxNodeData,
) -> None:
"""Initialize the Proxmox Container button entity."""
self.entity_description = entity_description
super().__init__(coordinator, container_data, node_data)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
async def _async_press_call(self) -> None:
"""Execute the container button action via executor."""
await self.hass.async_add_executor_job(
self.entity_description.press_action,
self.coordinator,
self._node_name,
self.container_data["vmid"],
)

View File

@@ -1,18 +0,0 @@
{
"entity": {
"button": {
"hibernate": {
"default": "mdi:power-sleep"
},
"reset": {
"default": "mdi:restart"
},
"start": {
"default": "mdi:play"
},
"stop": {
"default": "mdi:stop"
}
}
}
}

View File

@@ -54,47 +54,15 @@
"status": {
"name": "Status"
}
},
"button": {
"hibernate": {
"name": "Hibernate"
},
"reset": {
"name": "Reset"
},
"shutdown": {
"name": "Shutdown"
},
"start": {
"name": "Start"
},
"start_all": {
"name": "Start all"
},
"stop": {
"name": "Stop"
},
"stop_all": {
"name": "Stop all"
}
}
},
"exceptions": {
"api_error_no_details": {
"message": "An error occurred while communicating with the Proxmox VE instance."
},
"cannot_connect": {
"message": "An error occurred while trying to connect to the Proxmox VE instance: {error}"
},
"cannot_connect_no_details": {
"message": "Could not connect to the Proxmox VE instance."
},
"invalid_auth": {
"message": "An error occurred while trying to authenticate: {error}"
},
"invalid_auth_no_details": {
"message": "Authentication failed for the Proxmox VE instance."
},
"no_nodes_found": {
"message": "No active nodes were found on the Proxmox VE server."
},
@@ -103,9 +71,6 @@
},
"timeout_connect": {
"message": "A timeout occurred while trying to connect to the Proxmox VE instance: {error}"
},
"timeout_connect_no_details": {
"message": "A timeout occurred while trying to connect to the Proxmox VE instance."
}
},
"issues": {

View File

@@ -28,6 +28,11 @@ def async_setup(hass: HomeAssistant) -> None:
assert event.data["action"] == "update" and "old_entity_id" in event.data
old_entity_id = event.data["old_entity_id"]
new_entity_id = event.data["entity_id"]
# Notify the states meta manager about the pending rename so
# that any StatisticsTask that runs before the actual database
# update can still resolve the new entity_id to the correct
# metadata_id.
instance.states_meta_manager.queue_rename(old_entity_id, new_entity_id)
async_update_statistics_metadata(
hass, old_entity_id, new_statistic_id=new_entity_id
)

View File

@@ -952,7 +952,13 @@ def async_update_statistics_metadata(
f"for unit_class '{new_unit_class}'"
)
get_instance(hass).async_update_statistics_metadata(
instance = get_instance(hass)
# Notify the statistics meta manager about the pending rename so
# that any StatisticsTask that runs before the actual database
# update can still resolve the new statistic_id.
if new_statistic_id is not UNDEFINED and new_statistic_id is not None:
instance.statistics_meta_manager.queue_rename(statistic_id, new_statistic_id)
instance.async_update_statistics_metadata(
statistic_id,
new_statistic_id=new_statistic_id,
new_unit_class=new_unit_class,

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
from collections.abc import Iterable, Sequence
from queue import SimpleQueue
from typing import TYPE_CHECKING, cast
from sqlalchemy.orm.session import Session
@@ -27,8 +28,32 @@ class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
def __init__(self, recorder: Recorder) -> None:
"""Initialize the states meta manager."""
self._did_first_load = False
# Thread-safe queue for entity_id renames from the event loop.
# Items are (old_entity_id, new_entity_id) tuples.
self._rename_queue: SimpleQueue[tuple[str, str]] = SimpleQueue()
# Recorder-thread-only dict mapping new_entity_id -> old_entity_id
# for renames that haven't been applied to the database yet.
self._pending_rename: dict[str, str] = {}
super().__init__(recorder, CACHE_SIZE)
def queue_rename(self, old_entity_id: str, new_entity_id: str) -> None:
"""Queue an entity_id rename notification.
This method is thread-safe and is called from the event loop
to notify the recorder thread about a pending entity_id rename.
"""
self._rename_queue.put((old_entity_id, new_entity_id))
def drain_pending_renames(self) -> None:
"""Drain the rename queue into the pending rename dict.
This call is not thread-safe and must be called from the
recorder thread.
"""
while not self._rename_queue.empty():
old_entity_id, new_entity_id = self._rename_queue.get_nowait()
self._pending_rename[new_entity_id] = old_entity_id
def load(
self, events: list[Event[EventStateChangedData]], session: Session
) -> None:
@@ -117,6 +142,21 @@ class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
if update_cache:
self._id_map[entity_id] = metadata_id
if not from_recorder:
return results
# Check pending renames for any entity_ids still not resolved.
# If an entity_id was renamed but the database hasn't been updated
# yet, we can resolve the new entity_id by looking up the old one.
pending_rename = self._pending_rename
for entity_id in missing:
if (
results.get(entity_id) is None
and (old_entity_id := pending_rename.get(entity_id)) is not None
and (metadata_id := self._id_map.get(old_entity_id)) is not None
):
results[entity_id] = metadata_id
return results
def add_pending(self, db_states_meta: StatesMeta) -> None:
@@ -155,12 +195,18 @@ class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
new_entity_id: str,
) -> bool:
"""Update states metadata for an entity_id."""
# Clear the pending rename before the collision check so
# get() doesn't resolve new_entity_id via the side channel.
self._pending_rename.pop(new_entity_id, None)
if self.get(new_entity_id, session, True) is not None:
# If the new entity id already exists we have
# a collision and should not update.
return False
metadata_id = self._id_map.get(entity_id)
session.query(StatesMeta).filter(StatesMeta.entity_id == entity_id).update(
{StatesMeta.entity_id: new_entity_id}
)
self._id_map.pop(entity_id, None)
if metadata_id is not None:
self._id_map[new_entity_id] = metadata_id
return True

View File

@@ -1,8 +1,9 @@
"""Support managing StatesMeta."""
"""Support managing StatisticsMeta."""
from __future__ import annotations
import logging
from queue import SimpleQueue
import threading
from typing import TYPE_CHECKING, Any, Final, Literal
@@ -88,12 +89,36 @@ class StatisticsMetaManager:
self._stat_id_to_id_meta: LRU[str, tuple[int, StatisticMetaData]] = LRU(
CACHE_SIZE
)
# Thread-safe queue for statistic_id renames from the event loop.
# Items are (old_statistic_id, new_statistic_id) tuples.
self._rename_queue: SimpleQueue[tuple[str, str]] = SimpleQueue()
# Recorder-thread-only dict mapping new_statistic_id -> old_statistic_id
# for renames that haven't been applied to the database yet.
self._pending_rename: dict[str, str] = {}
def _clear_cache(self, statistic_ids: list[str]) -> None:
"""Clear the cache."""
for statistic_id in statistic_ids:
self._stat_id_to_id_meta.pop(statistic_id, None)
def queue_rename(self, old_statistic_id: str, new_statistic_id: str) -> None:
"""Queue a statistic_id rename notification.
This method is thread-safe and is called from the event loop
to notify the recorder thread about a pending statistic_id rename.
"""
self._rename_queue.put((old_statistic_id, new_statistic_id))
def drain_pending_renames(self) -> None:
"""Drain the rename queue into the pending rename dict.
This call is not thread-safe and must be called from the
recorder thread.
"""
while not self._rename_queue.empty():
old_statistic_id, new_statistic_id = self._rename_queue.get_nowait()
self._pending_rename[new_statistic_id] = old_statistic_id
def _get_from_database(
self,
session: Session,
@@ -293,9 +318,28 @@ class StatisticsMetaManager:
return results
# Fetch metadata from the database
return results | self._get_from_database(
session, statistic_ids=missing_statistic_id
)
results |= self._get_from_database(session, statistic_ids=missing_statistic_id)
# Check pending renames for any statistic_ids still not resolved.
# If a statistic_id was renamed but the database hasn't been
# updated yet, resolve the new statistic_id using the old one.
if self.recorder.thread_id == threading.get_ident() and (
pending_rename := self._pending_rename
):
for statistic_id in missing_statistic_id:
if (
statistic_id not in results
and (old_id := pending_rename.get(statistic_id)) is not None
):
# Try cache first, then database for the old statistic_id
if id_meta := self._stat_id_to_id_meta.get(old_id):
results[statistic_id] = id_meta
elif db_result := self._get_from_database(
session, statistic_ids={old_id}
):
results[statistic_id] = next(iter(db_result.values()))
return results
def get_from_cache_threadsafe(
self, statistic_ids: set[str]
@@ -377,6 +421,9 @@ class StatisticsMetaManager:
recorder thread.
"""
self._assert_in_recorder_thread()
# Clear the pending rename before the collision check so
# get() doesn't resolve new_statistic_id via the side channel.
self._pending_rename.pop(new_statistic_id, None)
if self.get(session, new_statistic_id):
_LOGGER.error(
"Cannot rename statistic_id `%s` to `%s` because the new statistic_id is already in use",

View File

@@ -82,6 +82,7 @@ class UpdateStatisticsMetadataTask(RecorderTask):
def run(self, instance: Recorder) -> None:
"""Handle the task."""
instance.statistics_meta_manager.drain_pending_renames()
statistics.update_statistics_metadata(
instance,
self.statistic_id,
@@ -102,6 +103,7 @@ class UpdateStatesMetadataTask(RecorderTask):
def run(self, instance: Recorder) -> None:
"""Handle the task."""
instance.states_meta_manager.drain_pending_renames()
entity_registry.update_states_metadata(
instance,
self.entity_id,
@@ -169,6 +171,11 @@ class StatisticsTask(RecorderTask):
def run(self, instance: Recorder) -> None:
"""Run statistics task."""
# Drain any pending entity_id/statistic_id renames so the
# compilation can resolve new ids that the database doesn't
# know about yet.
instance.states_meta_manager.drain_pending_renames()
instance.statistics_meta_manager.drain_pending_renames()
if statistics.compile_statistics(instance, self.start, self.fire_events):
return
# Schedule a new statistics task if this one didn't finish
@@ -181,6 +188,8 @@ class CompileMissingStatisticsTask(RecorderTask):
def run(self, instance: Recorder) -> None:
"""Run statistics task to compile missing statistics."""
instance.states_meta_manager.drain_pending_renames()
instance.statistics_meta_manager.drain_pending_renames()
if statistics.compile_missing_statistics(instance):
return
# Schedule a new statistics task if this one didn't finish

View File

@@ -9,7 +9,6 @@ from satel_integra.satel_integra import AlarmState
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .client import SatelClient
@@ -17,8 +16,6 @@ from .const import ZONES
_LOGGER = logging.getLogger(__name__)
PARTITION_UPDATE_DEBOUNCE_DELAY = 0.15
@dataclass
class SatelIntegraData:
@@ -109,21 +106,9 @@ class SatelIntegraPartitionsCoordinator(
self.data = {}
self._debouncer = Debouncer(
hass=self.hass,
logger=_LOGGER,
cooldown=PARTITION_UPDATE_DEBOUNCE_DELAY,
immediate=False,
function=callback(
lambda: self.async_set_updated_data(
self.client.controller.partition_states
)
),
)
@callback
def partitions_update_callback(self) -> None:
"""Update partition objects as per notification from the alarm."""
_LOGGER.debug("Sending request to update panel state")
self._debouncer.async_schedule_call()
self.async_set_updated_data(self.client.controller.partition_states)

View File

@@ -9,7 +9,7 @@ from pysmarlaapi.connection.exceptions import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryError
from .const import HOST, PLATFORMS
@@ -23,24 +23,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: FederwiegeConfigEntry) -
# Check if token still has access
try:
await connection.refresh_token()
except AuthenticationException as e:
raise ConfigEntryAuthFailed("Invalid authentication") from e
except ConnectionException as e:
raise ConfigEntryNotReady("Unable to connect to server") from e
except (ConnectionException, AuthenticationException) as e:
raise ConfigEntryError("Invalid authentication") from e
async def on_auth_failure():
entry.async_start_reauth(hass)
federwiege = Federwiege(hass.loop, connection, on_auth_failure)
federwiege = Federwiege(hass.loop, connection)
federwiege.register()
entry.runtime_data = federwiege
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Starts a task to keep reconnecting, e.g. when device gets unreachable.
# When an authentication error occurs, it automatically stops and calls
# the on_auth_failure function.
federwiege.connect()
return True

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from pysmarlaapi import Connection
@@ -12,12 +11,12 @@ from pysmarlaapi.connection.exceptions import (
)
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN
from .const import DOMAIN, HOST
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str})
STEP_USER_DATA_SCHEMA = vol.Schema({CONF_ACCESS_TOKEN: str})
class SmarlaConfigFlow(ConfigFlow, domain=DOMAIN):
@@ -25,89 +24,45 @@ class SmarlaConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.errors: dict[str, str] = {}
async def _handle_token(self, token: str) -> str | None:
async def _handle_token(self, token: str) -> tuple[dict[str, str], str | None]:
"""Handle the token input."""
errors: dict[str, str] = {}
try:
conn = Connection(url=HOST, token_b64=token)
except ValueError:
self.errors["base"] = "malformed_token"
return None
errors["base"] = "malformed_token"
return errors, None
try:
await conn.refresh_token()
except ConnectionException:
self.errors["base"] = "cannot_connect"
return None
except AuthenticationException:
self.errors["base"] = "invalid_auth"
return None
except ConnectionException, AuthenticationException:
errors["base"] = "invalid_auth"
return errors, None
return conn.token.serialNumber
async def _validate_input(
self, user_input: dict[str, Any]
) -> dict[str, Any] | None:
"""Validate the user input."""
token = user_input[CONF_ACCESS_TOKEN]
serial_number = await self._handle_token(token=token)
if serial_number is not None:
await self.async_set_unique_id(serial_number)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured()
return {"token": token, "serial_number": serial_number}
return None
return errors, conn.token.serialNumber
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
self.errors = {}
errors: dict[str, str] = {}
if user_input is not None:
validated_info = await self._validate_input(user_input)
if validated_info is not None:
raw_token = user_input[CONF_ACCESS_TOKEN]
errors, serial_number = await self._handle_token(token=raw_token)
if not errors and serial_number is not None:
await self.async_set_unique_id(serial_number)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=validated_info["serial_number"],
data={CONF_ACCESS_TOKEN: validated_info["token"]},
title=serial_number,
data={CONF_ACCESS_TOKEN: raw_token},
)
return self.async_show_form(
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
errors=self.errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication dialog."""
self.errors = {}
if user_input is not None:
validated_info = await self._validate_input(user_input)
if validated_info is not None:
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates={CONF_ACCESS_TOKEN: validated_info["token"]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_USER_DATA_SCHEMA,
errors=self.errors,
errors=errors,
)

View File

@@ -28,7 +28,7 @@ rules:
integration-owner: done
log-when-unavailable: todo
parallel-updates: done
reauthentication-flow: done
reauthentication-flow: todo
test-coverage: done
# Gold

View File

@@ -1,24 +1,13 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Please ensure you reconfigure against the same device."
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"malformed_token": "Malformed access token"
},
"step": {
"reauth_confirm": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]"
},
"data_description": {
"access_token": "[%key:component::smarla::config::step::user::data_description::access_token%]"
}
},
"user": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]"

View File

@@ -585,30 +585,10 @@ def get_media(
item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:])
if item_id.startswith("A:ALBUM/") or search_type == "tracks":
# Some Sonos libraries return album ids in the shape:
# A:ALBUM/<album>/<artist>, where the artist part disambiguates results.
# Use the album segment for searching.
if item_id.startswith("A:ALBUM/"):
splits = item_id.split("/")
search_term = urllib.parse.unquote(splits[1]) if len(splits) > 1 else ""
album_title: str | None = search_term
else:
search_term = urllib.parse.unquote(item_id.split("/")[-1])
album_title = None
search_term = urllib.parse.unquote(item_id.split("/")[-1])
matches = media_library.get_music_library_information(
search_type, search_term=search_term, full_album_art_uri=True
)
if item_id.startswith("A:ALBUM/") and len(matches) > 1:
if result := next(
(item for item in matches if item_id == item.item_id), None
):
matches = [result]
elif album_title:
if result := next(
(item for item in matches if album_title == item.title), None
):
matches = [result]
elif search_type == SONOS_SHARE:
# In order to get the MusicServiceItem, we browse the parent folder
# and find one that matches on item_id.

View File

@@ -7,7 +7,7 @@
"integration_type": "service",
"iot_class": "local_push",
"loggers": ["hass_splunk"],
"quality_scale": "bronze",
"quality_scale": "legacy",
"requirements": ["hass-splunk==0.1.4"],
"single_config_entry": true
}

View File

@@ -18,9 +18,18 @@ rules:
status: exempt
comment: |
Integration does not provide custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
docs-high-level-description:
status: todo
comment: |
Verify integration docs at https://www.home-assistant.io/integrations/splunk/ include a high-level description of Splunk with a link to https://www.splunk.com/ and explain the integration's purpose for users unfamiliar with Splunk.
docs-installation-instructions:
status: todo
comment: |
Verify integration docs include clear prerequisites and step-by-step setup instructions including how to configure Splunk HTTP Event Collector and obtain the required token.
docs-removal-instructions:
status: todo
comment: |
Verify integration docs include instructions on how to remove the integration and clarify what happens to data already in Splunk.
entity-event-setup:
status: exempt
comment: |

View File

@@ -2,7 +2,7 @@
from abc import abstractmethod
import asyncio
from collections.abc import Awaitable, Callable, Sequence
from collections.abc import Callable, Sequence
import io
import logging
import os
@@ -430,35 +430,48 @@ class TelegramNotificationService:
params[ATTR_PARSER] = None
return params
async def _send_msg_formatted(
async def _send_msgs(
self,
func_send: Callable[..., Awaitable[Message]],
func_send: Callable,
message_tag: str | None,
*args_msg: Any,
context: Context | None = None,
**kwargs_msg: Any,
) -> dict[str, JsonValueType]:
"""Sends a message and formats the response.
"""Sends a message to each of the targets.
If there is only 1 targtet, an error is raised if the send fails.
For multiple targets, errors are logged and the caller is responsible for checking which target is successful/failed based on the return value.
:return: dict with chat_id keys and message_id values for successful sends
"""
chat_id: int = kwargs_msg.pop(ATTR_CHAT_ID)
_LOGGER.debug("%s to chat ID %s", func_send.__name__, chat_id)
chat_ids = [kwargs_msg.pop(ATTR_CHAT_ID)]
msg_ids: dict[str, JsonValueType] = {}
for chat_id in chat_ids:
_LOGGER.debug("%s to chat ID %s", func_send.__name__, chat_id)
response: Message = await self._send_msg(
func_send,
message_tag,
chat_id,
*args_msg,
context=context,
**kwargs_msg,
)
for file_type in _FILE_TYPES:
if file_type in kwargs_msg and isinstance(
kwargs_msg[file_type], io.BytesIO
):
kwargs_msg[file_type].seek(0)
return {str(chat_id): response.id}
response: Message = await self._send_msg(
func_send,
message_tag,
chat_id,
*args_msg,
context=context,
**kwargs_msg,
)
if response:
msg_ids[str(chat_id)] = response.id
return msg_ids
async def _send_msg(
self,
func_send: Callable[..., Awaitable[Any]],
func_send: Callable,
message_tag: str | None,
*args_msg: Any,
context: Context | None = None,
@@ -505,7 +518,7 @@ class TelegramNotificationService:
title = kwargs.get(ATTR_TITLE)
text = f"{title}\n{message}" if title else message
params = self._get_msg_kwargs(kwargs)
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_message,
params[ATTR_MESSAGE_TAG],
text,
@@ -746,7 +759,7 @@ class TelegramNotificationService:
)
if file_type == SERVICE_SEND_PHOTO:
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_photo,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -762,7 +775,7 @@ class TelegramNotificationService:
)
if file_type == SERVICE_SEND_STICKER:
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_sticker,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -776,7 +789,7 @@ class TelegramNotificationService:
)
if file_type == SERVICE_SEND_VIDEO:
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_video,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -792,7 +805,7 @@ class TelegramNotificationService:
)
if file_type == SERVICE_SEND_DOCUMENT:
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_document,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -808,7 +821,7 @@ class TelegramNotificationService:
)
if file_type == SERVICE_SEND_VOICE:
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_voice,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -823,7 +836,7 @@ class TelegramNotificationService:
)
# SERVICE_SEND_ANIMATION
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_animation,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -848,7 +861,7 @@ class TelegramNotificationService:
stickerid = kwargs.get(ATTR_STICKER_ID)
if stickerid:
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_sticker,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -873,7 +886,7 @@ class TelegramNotificationService:
latitude = float(latitude)
longitude = float(longitude)
params = self._get_msg_kwargs(kwargs)
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_location,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],
@@ -898,7 +911,7 @@ class TelegramNotificationService:
"""Send a poll."""
params = self._get_msg_kwargs(kwargs)
openperiod = kwargs.get(ATTR_OPEN_PERIOD)
return await self._send_msg_formatted(
return await self._send_msgs(
self.bot.send_poll,
params[ATTR_MESSAGE_TAG],
chat_id=kwargs[ATTR_CHAT_ID],

Some files were not shown because too many files have changed in this diff Show More