mirror of
https://github.com/home-assistant/core.git
synced 2026-01-02 12:12:00 +01:00
Compare commits
14 Commits
dev
...
block_pyse
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
21bbabc88e | ||
|
|
3a79fb273e | ||
|
|
162c27b92c | ||
|
|
e7e42dc318 | ||
|
|
9aa288ed44 | ||
|
|
5aacb6e1b8 | ||
|
|
1428ce4084 | ||
|
|
dba07ac90d | ||
|
|
264df97069 | ||
|
|
c3f493394a | ||
|
|
7e3e82746f | ||
|
|
33724240d7 | ||
|
|
998a4eab9e | ||
|
|
9985262a53 |
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -100,7 +100,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@0bd50d53a6d7fb5cb921e607957e9cc12b4ce392 # v12
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@0bd50d53a6d7fb5cb921e607957e9cc12b4ce392 # v12
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build variables
|
||||
id: vars
|
||||
@@ -405,7 +405,7 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.7.1
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -551,7 +551,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.2"
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
uses: github/codeql-action/init@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
uses: github/codeql-action/analyze@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -92,7 +92,6 @@ pip-selfcheck.json
|
||||
venv
|
||||
.venv
|
||||
Pipfile*
|
||||
uv.lock
|
||||
share/*
|
||||
/Scripts/
|
||||
|
||||
|
||||
@@ -567,7 +567,6 @@ homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
homeassistant.components.waqi.*
|
||||
homeassistant.components.water_heater.*
|
||||
homeassistant.components.watts.*
|
||||
homeassistant.components.watttime.*
|
||||
homeassistant.components.weather.*
|
||||
homeassistant.components.webhook.*
|
||||
|
||||
20
CODEOWNERS
generated
20
CODEOWNERS
generated
@@ -516,8 +516,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/fireservicerota/ @cyberjunky
|
||||
/homeassistant/components/firmata/ @DaAwesomeP
|
||||
/tests/components/firmata/ @DaAwesomeP
|
||||
/homeassistant/components/fish_audio/ @noambav
|
||||
/tests/components/fish_audio/ @noambav
|
||||
/homeassistant/components/fitbit/ @allenporter
|
||||
/tests/components/fitbit/ @allenporter
|
||||
/homeassistant/components/fivem/ @Sander0542
|
||||
@@ -532,8 +530,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/flo/ @dmulcahey
|
||||
/homeassistant/components/flume/ @ChrisMandich @bdraco @jeeftor
|
||||
/tests/components/flume/ @ChrisMandich @bdraco @jeeftor
|
||||
/homeassistant/components/fluss/ @fluss
|
||||
/tests/components/fluss/ @fluss
|
||||
/homeassistant/components/flux_led/ @icemanch
|
||||
/tests/components/flux_led/ @icemanch
|
||||
/homeassistant/components/forecast_solar/ @klaasnicolaas @frenck
|
||||
@@ -668,8 +664,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/heos/ @andrewsayre
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
/tests/components/here_travel_time/ @eifinger
|
||||
/homeassistant/components/hikvision/ @mezz64 @ptarjan
|
||||
/tests/components/hikvision/ @mezz64 @ptarjan
|
||||
/homeassistant/components/hikvision/ @mezz64
|
||||
/tests/components/hikvision/ @mezz64
|
||||
/homeassistant/components/hikvisioncam/ @fbradyirl
|
||||
/homeassistant/components/hisense_aehw4a1/ @bannhead
|
||||
/tests/components/hisense_aehw4a1/ @bannhead
|
||||
@@ -798,8 +794,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intent_script/ @arturpragacz
|
||||
/tests/components/intent_script/ @arturpragacz
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @jukrebs
|
||||
/tests/components/iometer/ @jukrebs
|
||||
@@ -1201,8 +1195,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek @AmGarera
|
||||
/tests/components/overseerr/ @joostlek @AmGarera
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@@ -1699,8 +1693,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/trafikverket_train/ @gjohansson-ST
|
||||
/homeassistant/components/trafikverket_weatherstation/ @gjohansson-ST
|
||||
/tests/components/trafikverket_weatherstation/ @gjohansson-ST
|
||||
/homeassistant/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
|
||||
/tests/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
|
||||
/homeassistant/components/transmission/ @engrbm87 @JPHutchins
|
||||
/tests/components/transmission/ @engrbm87 @JPHutchins
|
||||
/homeassistant/components/trend/ @jpbede
|
||||
/tests/components/trend/ @jpbede
|
||||
/homeassistant/components/triggercmd/ @rvmey
|
||||
@@ -1804,8 +1798,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/watergate/ @adam-the-hero
|
||||
/tests/components/watergate/ @adam-the-hero
|
||||
/homeassistant/components/watson_tts/ @rutkai
|
||||
/homeassistant/components/watts/ @theobld-ww @devender-verma-ww @ssi-spyro
|
||||
/tests/components/watts/ @theobld-ww @devender-verma-ww @ssi-spyro
|
||||
/homeassistant/components/watttime/ @bachya
|
||||
/tests/components/watttime/ @bachya
|
||||
/homeassistant/components/waze_travel_time/ @eifinger
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -24,7 +24,7 @@ ENV \
|
||||
COPY rootfs /
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:f394f6329f5389a4c9a7fc54b09fdec9621bbb78bf7a672b973440bbdfb02241 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
|
||||
@@ -402,8 +402,6 @@ class AuthManager:
|
||||
if user.is_owner:
|
||||
raise ValueError("Unable to deactivate the owner")
|
||||
await self._store.async_deactivate_user(user)
|
||||
for refresh_token in list(user.refresh_tokens.values()):
|
||||
self.async_remove_refresh_token(refresh_token)
|
||||
|
||||
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
|
||||
"""Remove credentials."""
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==5.0.0"]
|
||||
"requirements": ["accuweather==4.2.2"]
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ from .coordinator import (
|
||||
ActronAirSystemCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SWITCH]
|
||||
PLATFORM = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
@@ -50,10 +50,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
|
||||
system_coordinators=system_coordinators,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORM)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORM)
|
||||
|
||||
@@ -15,10 +15,12 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity, ActronAirZoneEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -54,7 +56,8 @@ async def async_setup_entry(
|
||||
|
||||
for coordinator in system_coordinators.values():
|
||||
status = coordinator.data
|
||||
entities.append(ActronSystemClimate(coordinator))
|
||||
name = status.ac_system.system_name
|
||||
entities.append(ActronSystemClimate(coordinator, name))
|
||||
|
||||
entities.extend(
|
||||
ActronZoneClimate(coordinator, zone)
|
||||
@@ -65,9 +68,10 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class ActronAirClimateEntity(ClimateEntity):
|
||||
class BaseClimateEntity(CoordinatorEntity[ActronAirSystemCoordinator], ClimateEntity):
|
||||
"""Base class for Actron Air climate entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
@@ -79,17 +83,43 @@ class ActronAirClimateEntity(ClimateEntity):
|
||||
_attr_fan_modes = list(FAN_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||
_attr_hvac_modes = list(HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator)
|
||||
self._serial_number = coordinator.serial_number
|
||||
|
||||
class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
|
||||
class ActronSystemClimate(BaseClimateEntity):
|
||||
"""Representation of the Actron Air system."""
|
||||
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = self._serial_number
|
||||
super().__init__(coordinator, name)
|
||||
serial_number = coordinator.serial_number
|
||||
self._attr_unique_id = serial_number
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial_number)},
|
||||
name=self._status.ac_system.system_name,
|
||||
manufacturer="Actron Air",
|
||||
model_id=self._status.ac_system.master_wc_model,
|
||||
sw_version=self._status.ac_system.master_wc_firmware_version,
|
||||
serial_number=serial_number,
|
||||
)
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
@@ -118,7 +148,7 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_mode = self._status.user_aircon_settings.base_fan_mode
|
||||
fan_mode = self._status.user_aircon_settings.fan_mode
|
||||
return FAN_MODE_MAPPING_ACTRONAIR_TO_HA.get(fan_mode)
|
||||
|
||||
@property
|
||||
@@ -138,7 +168,7 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set a new fan mode."""
|
||||
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode)
|
||||
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode.lower())
|
||||
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
@@ -152,7 +182,7 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
await self._status.user_aircon_settings.set_temperature(temperature=temp)
|
||||
|
||||
|
||||
class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
class ActronZoneClimate(BaseClimateEntity):
|
||||
"""Representation of a zone within the Actron Air system."""
|
||||
|
||||
_attr_supported_features = (
|
||||
@@ -167,8 +197,18 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
zone: ActronAirZone,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator, zone)
|
||||
self._attr_unique_id: str = self._zone_identifier
|
||||
super().__init__(coordinator, zone.title)
|
||||
serial_number = coordinator.serial_number
|
||||
self._zone_id: int = zone.zone_id
|
||||
self._attr_unique_id: str = f"{serial_number}_zone_{zone.zone_id}"
|
||||
self._attr_device_info: DeviceInfo = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._attr_unique_id)},
|
||||
name=zone.title,
|
||||
manufacturer="Actron Air",
|
||||
model="Zone",
|
||||
suggested_area=zone.title,
|
||||
via_device=(DOMAIN, serial_number),
|
||||
)
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
@@ -216,4 +256,4 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the temperature."""
|
||||
await self._zone.set_temperature(temperature=kwargs.get(ATTR_TEMPERATURE))
|
||||
await self._zone.set_temperature(temperature=kwargs["temperature"])
|
||||
|
||||
@@ -8,7 +8,6 @@ from datetime import timedelta
|
||||
from actron_neo_api import (
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAPIError,
|
||||
ActronAirAuthError,
|
||||
ActronAirStatus,
|
||||
)
|
||||
@@ -16,7 +15,7 @@ from actron_neo_api import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
@@ -71,12 +70,6 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
) from err
|
||||
except ActronAirAPIError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
"""Base entity classes for Actron Air integration."""
|
||||
|
||||
from actron_neo_api import ActronAirZone
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ActronAirSystemCoordinator
|
||||
|
||||
|
||||
class ActronAirEntity(CoordinatorEntity[ActronAirSystemCoordinator]):
|
||||
"""Base class for Actron Air entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: ActronAirSystemCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._serial_number = coordinator.serial_number
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return not self.coordinator.is_device_stale()
|
||||
|
||||
|
||||
class ActronAirAcEntity(ActronAirEntity):
|
||||
"""Base class for Actron Air entities."""
|
||||
|
||||
def __init__(self, coordinator: ActronAirSystemCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._serial_number)},
|
||||
name=coordinator.data.ac_system.system_name,
|
||||
manufacturer="Actron Air",
|
||||
model_id=coordinator.data.ac_system.master_wc_model,
|
||||
sw_version=coordinator.data.ac_system.master_wc_firmware_version,
|
||||
serial_number=self._serial_number,
|
||||
)
|
||||
|
||||
|
||||
class ActronAirZoneEntity(ActronAirEntity):
|
||||
"""Base class for Actron Air zone entities."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
zone: ActronAirZone,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone_id: int = zone.zone_id
|
||||
self._zone_identifier = f"{self._serial_number}_zone_{zone.zone_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._zone_identifier)},
|
||||
name=zone.title,
|
||||
manufacturer="Actron Air",
|
||||
model="Zone",
|
||||
suggested_area=zone.title,
|
||||
via_device=(DOMAIN, self._serial_number),
|
||||
)
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"switch": {
|
||||
"away_mode": {
|
||||
"default": "mdi:home-export-outline",
|
||||
"state": {
|
||||
"off": "mdi:home-import-outline"
|
||||
}
|
||||
},
|
||||
"continuous_fan": {
|
||||
"default": "mdi:fan",
|
||||
"state": {
|
||||
"off": "mdi:fan-off"
|
||||
}
|
||||
},
|
||||
"quiet_mode": {
|
||||
"default": "mdi:volume-low",
|
||||
"state": {
|
||||
"off": "mdi:volume-high"
|
||||
}
|
||||
},
|
||||
"turbo_mode": {
|
||||
"default": "mdi:fan-plus",
|
||||
"state": {
|
||||
"off": "mdi:fan"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,5 +13,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.4.1"]
|
||||
"requirements": ["actron-neo-api==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -32,28 +32,9 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"switch": {
|
||||
"away_mode": {
|
||||
"name": "Away mode"
|
||||
},
|
||||
"continuous_fan": {
|
||||
"name": "Continuous fan"
|
||||
},
|
||||
"quiet_mode": {
|
||||
"name": "Quiet mode"
|
||||
},
|
||||
"turbo_mode": {
|
||||
"name": "Turbo mode"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the Actron Air API: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
"""Switch platform for Actron Air integration."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
from .entity import ActronAirAcEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ActronAirSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Class describing Actron Air switch entities."""
|
||||
|
||||
is_on_fn: Callable[[ActronAirSystemCoordinator], bool]
|
||||
set_fn: Callable[[ActronAirSystemCoordinator, bool], Awaitable[None]]
|
||||
is_supported_fn: Callable[[ActronAirSystemCoordinator], bool] = lambda _: True
|
||||
|
||||
|
||||
SWITCHES: tuple[ActronAirSwitchEntityDescription, ...] = (
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="away_mode",
|
||||
translation_key="away_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.away_mode,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_away_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="continuous_fan",
|
||||
translation_key="continuous_fan",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.continuous_fan_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_continuous_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="quiet_mode",
|
||||
translation_key="quiet_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.quiet_mode_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_quiet_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="turbo_mode",
|
||||
translation_key="turbo_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_turbo_mode(enabled),
|
||||
is_supported_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_supported,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Actron Air switch entities."""
|
||||
system_coordinators = entry.runtime_data.system_coordinators
|
||||
async_add_entities(
|
||||
ActronAirSwitch(coordinator, description)
|
||||
for coordinator in system_coordinators.values()
|
||||
for description in SWITCHES
|
||||
if description.is_supported_fn(coordinator)
|
||||
)
|
||||
|
||||
|
||||
class ActronAirSwitch(ActronAirAcEntity, SwitchEntity):
|
||||
"""Actron Air switch."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
entity_description: ActronAirSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
description: ActronAirSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.serial_number}_{description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the switch is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.entity_description.set_fn(self.coordinator, True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.entity_description.set_fn(self.coordinator, False)
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/adax",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["adax", "adax_local"],
|
||||
"requirements": ["adax==0.4.0", "Adax-local==0.3.0"]
|
||||
"requirements": ["adax==0.4.0", "Adax-local==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
@@ -175,42 +175,6 @@ class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Verify the device ID matches the existing config entry
|
||||
await self.async_set_unique_id(info.device_id)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_device")
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data_updates=user_input,
|
||||
title=info.title,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, reconfigure_entry.data
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"default": "mdi:delta"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,99 +0,0 @@
|
||||
"""Number platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyairobotrest.const import HYSTERESIS_BAND_MAX, HYSTERESIS_BAND_MIN
|
||||
from pyairobotrest.exceptions import AirobotError
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotDataUpdateCoordinator
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotNumberEntityDescription(NumberEntityDescription):
|
||||
"""Describes Airobot number entity."""
|
||||
|
||||
value_fn: Callable[[AirobotDataUpdateCoordinator], float]
|
||||
set_value_fn: Callable[[AirobotDataUpdateCoordinator, float], Awaitable[None]]
|
||||
|
||||
|
||||
NUMBERS: tuple[AirobotNumberEntityDescription, ...] = (
|
||||
AirobotNumberEntityDescription(
|
||||
key="hysteresis_band",
|
||||
translation_key="hysteresis_band",
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
native_min_value=HYSTERESIS_BAND_MIN / 10.0,
|
||||
native_max_value=HYSTERESIS_BAND_MAX / 10.0,
|
||||
native_step=0.1,
|
||||
value_fn=lambda coordinator: coordinator.data.settings.hysteresis_band,
|
||||
set_value_fn=lambda coordinator, value: coordinator.client.set_hysteresis_band(
|
||||
value
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot number platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AirobotNumber(coordinator, description) for description in NUMBERS
|
||||
)
|
||||
|
||||
|
||||
class AirobotNumber(AirobotEntity, NumberEntity):
|
||||
"""Representation of an Airobot number entity."""
|
||||
|
||||
entity_description: AirobotNumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
description: AirobotNumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the number entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.value_fn(self.coordinator)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the value."""
|
||||
try:
|
||||
await self.entity_description.set_value_fn(self.coordinator, value)
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_value_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
else:
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -48,7 +48,7 @@ rules:
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
@@ -57,8 +57,8 @@ rules:
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_device": "Device ID does not match the existing configuration. Please use the correct device credentials."
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -30,19 +28,6 @@
|
||||
},
|
||||
"description": "The authentication for Airobot thermostat at {host} (Device ID: {username}) has expired. Please enter the password to reauthenticate. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "Device ID"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::airobot::config::step::user::data_description::host%]",
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]",
|
||||
"username": "[%key:component::airobot::config::step::user::data_description::username%]"
|
||||
},
|
||||
"description": "Update your Airobot thermostat connection details. Note: The Device ID must remain the same as the original configuration."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
@@ -59,11 +44,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"name": "Hysteresis band"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
@@ -94,9 +74,6 @@
|
||||
},
|
||||
"set_temperature_failed": {
|
||||
"message": "Failed to set temperature to {temperature}."
|
||||
},
|
||||
"set_value_failed": {
|
||||
"message": "Failed to set value: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,11 +88,21 @@ class AirPatrolClimate(AirPatrolEntity, ClimateEntity):
|
||||
super().__init__(coordinator, unit_id)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}-{unit_id}"
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data."""
|
||||
return self.device_data.get("climate") or {}
|
||||
|
||||
@property
|
||||
def params(self) -> dict[str, Any]:
|
||||
"""Return the current parameters for the climate entity."""
|
||||
return self.climate_data.get("ParametersData") or {}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and bool(self.climate_data)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.const import Platform
|
||||
DOMAIN = "airpatrol"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
AIRPATROL_ERRORS = (AirPatrolAuthenticationError, AirPatrolError)
|
||||
|
||||
@@ -38,17 +38,7 @@ class AirPatrolEntity(CoordinatorEntity[AirPatrolDataUpdateCoordinator]):
|
||||
"""Return the device data."""
|
||||
return self.coordinator.data[self._unit_id]
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data for this unit."""
|
||||
return self.device_data["climate"]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self._unit_id in self.coordinator.data
|
||||
and "climate" in self.device_data
|
||||
and self.climate_data is not None
|
||||
)
|
||||
return super().available and self._unit_id in self.coordinator.data
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
"""Sensors for AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirPatrolConfigEntry
|
||||
from .coordinator import AirPatrolDataUpdateCoordinator
|
||||
from .entity import AirPatrolEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirPatrolSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes AirPatrol sensor entity."""
|
||||
|
||||
data_field: str
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = (
|
||||
AirPatrolSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
data_field="RoomTemp",
|
||||
),
|
||||
AirPatrolSensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
data_field="RoomHumidity",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirPatrolConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AirPatrol sensors."""
|
||||
coordinator = config_entry.runtime_data
|
||||
units = coordinator.data
|
||||
|
||||
async_add_entities(
|
||||
AirPatrolSensor(coordinator, unit_id, description)
|
||||
for unit_id, unit in units.items()
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if "climate" in unit and unit["climate"] is not None
|
||||
)
|
||||
|
||||
|
||||
class AirPatrolSensor(AirPatrolEntity, SensorEntity):
|
||||
"""AirPatrol sensor entity."""
|
||||
|
||||
entity_description: AirPatrolSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirPatrolDataUpdateCoordinator,
|
||||
unit_id: str,
|
||||
description: AirPatrolSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize AirPatrol sensor."""
|
||||
super().__init__(coordinator, unit_id)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.unique_id}-{unit_id}-{description.key}"
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
if value := self.climate_data.get(self.entity_description.data_field):
|
||||
return float(value)
|
||||
return None
|
||||
@@ -45,7 +45,7 @@ def make_entity_state_trigger_required_features(
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_to_states = {to_state}
|
||||
_to_state = to_state
|
||||
_required_features = required_features
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==11.0.2"]
|
||||
"requirements": ["aioamazondevices==10.0.0"]
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
@@ -30,11 +30,14 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
|
||||
async def validate_credentials(
|
||||
auth: MSOB2CAuth, account_number: str
|
||||
) -> str | MSOB2CAuth:
|
||||
"""Validate the provided credentials."""
|
||||
try:
|
||||
await auth.send_login_request()
|
||||
@@ -43,33 +46,6 @@ async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return "unknown"
|
||||
return auth
|
||||
|
||||
|
||||
def humanize_account_data(account: dict) -> str:
|
||||
"""Convert an account data into a human-readable format."""
|
||||
if account["address"]["company_name"] != "":
|
||||
return f"{account['account_number']} - {account['address']['company_name']}"
|
||||
if account["address"]["building_name"] != "":
|
||||
return f"{account['account_number']} - {account['address']['building_name']}"
|
||||
return f"{account['account_number']} - {account['address']['postcode']}"
|
||||
|
||||
|
||||
async def get_accounts(auth: MSOB2CAuth) -> list[selector.SelectOptionDict]:
|
||||
"""Retrieve the list of accounts associated with the authenticated user."""
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
accounts = await _aw.api.get_associated_accounts()
|
||||
return [
|
||||
selector.SelectOptionDict(
|
||||
value=str(account["account_number"]),
|
||||
label=humanize_account_data(account),
|
||||
)
|
||||
for account in accounts["result"]["active"]
|
||||
]
|
||||
|
||||
|
||||
async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2CAuth:
|
||||
"""Validate the provided account number."""
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
try:
|
||||
await _aw.validate_smart_meter(account_number)
|
||||
@@ -81,91 +57,36 @@ async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2
|
||||
class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anglian Water."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self.authenticator: MSOB2CAuth | None = None
|
||||
self.accounts: list[selector.SelectOptionDict] = []
|
||||
self.user_input: dict[str, Any] | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self.authenticator = MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
validation_response = await validate_credentials(
|
||||
MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
),
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
validation_response = await validate_credentials(self.authenticator)
|
||||
if isinstance(validation_response, str):
|
||||
errors["base"] = validation_response
|
||||
else:
|
||||
self.accounts = await get_accounts(self.authenticator)
|
||||
if len(self.accounts) > 1:
|
||||
self.user_input = user_input
|
||||
return await self.async_step_select_account()
|
||||
account_number = self.accounts[0]["value"]
|
||||
self.user_input = user_input
|
||||
return await self.async_step_complete(
|
||||
{
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
}
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the account selection step."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
if TYPE_CHECKING:
|
||||
assert self.authenticator
|
||||
validation_result = await validate_account(
|
||||
self.authenticator,
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
if isinstance(validation_result, str):
|
||||
errors["base"] = validation_result
|
||||
else:
|
||||
return await self.async_step_complete(user_input)
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.SelectSelector(
|
||||
selector.SelectSelectorConfig(
|
||||
options=self.accounts,
|
||||
multiple=False,
|
||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_complete(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle the final configuration step."""
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
if TYPE_CHECKING:
|
||||
assert self.authenticator
|
||||
assert self.user_input
|
||||
config_entry_data = {
|
||||
**self.user_input,
|
||||
CONF_ACCOUNT_NUMBER: user_input[CONF_ACCOUNT_NUMBER],
|
||||
CONF_ACCESS_TOKEN: self.authenticator.refresh_token,
|
||||
}
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data=config_entry_data,
|
||||
)
|
||||
|
||||
@@ -4,28 +4,13 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.exceptions import ExpiredAccessTokenError, UnknownEndpointError
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
statistics_during_period,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import VolumeConverter
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
|
||||
@@ -59,107 +44,6 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data from Anglian Water's API."""
|
||||
try:
|
||||
await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
await self._insert_statistics()
|
||||
return await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
except (ExpiredAccessTokenError, UnknownEndpointError) as err:
|
||||
raise UpdateFailed from err
|
||||
|
||||
async def _insert_statistics(self) -> None:
|
||||
"""Insert statistics for water meters into Home Assistant."""
|
||||
for meter in self.api.meters.values():
|
||||
id_prefix = (
|
||||
f"{self.config_entry.data[CONF_ACCOUNT_NUMBER]}_{meter.serial_number}"
|
||||
)
|
||||
usage_statistic_id = f"{DOMAIN}:{id_prefix}_usage".lower()
|
||||
_LOGGER.debug("Updating statistics for meter %s", meter.serial_number)
|
||||
name_prefix = (
|
||||
f"Anglian Water {self.config_entry.data[CONF_ACCOUNT_NUMBER]} "
|
||||
f"{meter.serial_number}"
|
||||
)
|
||||
usage_metadata = StatisticMetaData(
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"{name_prefix} Usage",
|
||||
source=DOMAIN,
|
||||
statistic_id=usage_statistic_id,
|
||||
unit_class=VolumeConverter.UNIT_CLASS,
|
||||
unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
)
|
||||
last_stat = await get_instance(self.hass).async_add_executor_job(
|
||||
get_last_statistics, self.hass, 1, usage_statistic_id, True, set()
|
||||
)
|
||||
if not last_stat:
|
||||
_LOGGER.debug("Updating statistics for the first time")
|
||||
usage_sum = 0.0
|
||||
last_stats_time = None
|
||||
else:
|
||||
if not meter.readings or len(meter.readings) == 0:
|
||||
_LOGGER.debug("No recent usage statistics found, skipping update")
|
||||
continue
|
||||
# Anglian Water stats are hourly, the read_at time is the time that the meter took the reading
|
||||
# We remove 1 hour from this so that the data is shown in the correct hour on the dashboards
|
||||
parsed_read_at = dt_util.parse_datetime(meter.readings[0]["read_at"])
|
||||
if not parsed_read_at:
|
||||
_LOGGER.debug(
|
||||
"Could not parse read_at time %s, skipping update",
|
||||
meter.readings[0]["read_at"],
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
_LOGGER.debug("Getting statistics at %s", start)
|
||||
for end in (start + timedelta(seconds=1), None):
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
self.hass,
|
||||
start,
|
||||
end,
|
||||
{
|
||||
usage_statistic_id,
|
||||
},
|
||||
"hour",
|
||||
None,
|
||||
{"sum"},
|
||||
)
|
||||
if stats:
|
||||
break
|
||||
if end:
|
||||
_LOGGER.debug(
|
||||
"Not found, trying to find oldest statistic after %s",
|
||||
start,
|
||||
)
|
||||
assert stats
|
||||
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
|
||||
usage_sum = _safe_get_sum(stats.get(usage_statistic_id, []))
|
||||
last_stats_time = stats[usage_statistic_id][0]["start"]
|
||||
|
||||
usage_statistics = []
|
||||
|
||||
for read in meter.readings:
|
||||
parsed_read_at = dt_util.parse_datetime(read["read_at"])
|
||||
if not parsed_read_at:
|
||||
_LOGGER.debug(
|
||||
"Could not parse read_at time %s, skipping reading",
|
||||
read["read_at"],
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
continue
|
||||
usage_state = max(0, read["consumption"] / 1000)
|
||||
usage_sum = max(0, read["read"])
|
||||
usage_statistics.append(
|
||||
StatisticData(
|
||||
start=start,
|
||||
state=usage_state,
|
||||
sum=usage_sum,
|
||||
)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Adding %s statistics for %s", len(usage_statistics), usage_statistic_id
|
||||
)
|
||||
async_add_external_statistics(self.hass, usage_metadata, usage_statistics)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"domain": "anglian_water",
|
||||
"name": "Anglian Water",
|
||||
"after_dependencies": ["recorder"],
|
||||
"codeowners": ["@pantherale0"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
|
||||
@@ -10,21 +10,14 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"select_account": {
|
||||
"data": {
|
||||
"account_number": "Billing account number"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Select the billing account you wish to use."
|
||||
},
|
||||
"description": "Multiple active billing accounts were found with your credentials. Please select the account you wish to use. If this is unexpected, contact Anglian Water to confirm your active accounts."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"account_number": "Billing Account Number",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Your account number found on your latest bill.",
|
||||
"password": "Your password",
|
||||
"username": "Username or email used to log in to the Anglian Water website."
|
||||
},
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.4"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.2"]
|
||||
}
|
||||
|
||||
@@ -27,7 +27,6 @@ from homeassistant.const import (
|
||||
CONF_EVENT_DATA,
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_OPTIONS,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TRIGGERS,
|
||||
@@ -131,13 +130,9 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"fan",
|
||||
"humidifier",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"lock",
|
||||
"media_player",
|
||||
"scene",
|
||||
"siren",
|
||||
"switch",
|
||||
"text",
|
||||
"update",
|
||||
@@ -1219,7 +1214,7 @@ def _trigger_extract_entities(trigger_conf: dict) -> list[str]:
|
||||
return trigger_conf[CONF_ENTITY_ID] # type: ignore[no-any-return]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "calendar":
|
||||
return [trigger_conf[CONF_OPTIONS][CONF_ENTITY_ID]]
|
||||
return [trigger_conf[CONF_ENTITY_ID]]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "zone":
|
||||
return trigger_conf[CONF_ENTITY_ID] + [trigger_conf[CONF_ZONE]] # type: ignore[no-any-return]
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==66"],
|
||||
"requirements": ["axis==65"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -80,7 +80,7 @@ class AzureDataExplorerClient:
|
||||
def test_connection(self) -> None:
|
||||
"""Test connection, will throw Exception if it cannot connect."""
|
||||
|
||||
query = f"['{self._table}'] | take 1"
|
||||
query = f"{self._table} | take 1"
|
||||
|
||||
self.query_client.execute_query(self._database, query)
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ class ADXConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def validate_input(self, data: dict[str, Any]) -> dict[str, str]:
|
||||
async def validate_input(self, data: dict[str, Any]) -> dict[str, Any] | None:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
@@ -54,40 +54,36 @@ class ADXConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(client.test_connection)
|
||||
except KustoAuthenticationError as err:
|
||||
_LOGGER.error("Authentication failed: %s", err)
|
||||
|
||||
except KustoAuthenticationError as exp:
|
||||
_LOGGER.error(exp)
|
||||
return {"base": "invalid_auth"}
|
||||
except KustoServiceError as err:
|
||||
_LOGGER.error("Could not connect: %s", err)
|
||||
|
||||
except KustoServiceError as exp:
|
||||
_LOGGER.error(exp)
|
||||
return {"base": "cannot_connect"}
|
||||
|
||||
return {}
|
||||
return None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
data_schema = STEP_USER_DATA_SCHEMA
|
||||
|
||||
if user_input is not None:
|
||||
errors = await self.validate_input(user_input)
|
||||
errors: dict = {}
|
||||
if user_input:
|
||||
errors = await self.validate_input(user_input) # type: ignore[assignment]
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
data=user_input,
|
||||
title=f"{user_input[CONF_ADX_CLUSTER_INGEST_URI].replace('https://', '')} / {user_input[CONF_ADX_DATABASE_NAME]} ({user_input[CONF_ADX_TABLE_NAME]})",
|
||||
title=user_input[CONF_ADX_CLUSTER_INGEST_URI].replace(
|
||||
"https://", ""
|
||||
),
|
||||
options=DEFAULT_OPTIONS,
|
||||
)
|
||||
|
||||
# Keep previously entered values when we re-show the form after an error.
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=data_schema,
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
last_step=True,
|
||||
)
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
"use_queued_ingestion": "Use queued ingestion"
|
||||
},
|
||||
"data_description": {
|
||||
"authority_id": "In Azure portal this is also known as Directory (tenant) ID",
|
||||
"cluster_ingest_uri": "Ingestion URI of the cluster",
|
||||
"use_queued_ingestion": "Must be enabled when using ADX free cluster"
|
||||
},
|
||||
|
||||
@@ -6,15 +6,13 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import Bucket, exception
|
||||
from b2sdk.v2 import B2Api, Bucket, InMemoryAccountInfo, exception
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
# Import from b2_client to ensure timeout configuration is applied
|
||||
from .b2_client import B2Api, InMemoryAccountInfo
|
||||
from .const import (
|
||||
BACKBLAZE_REALM,
|
||||
CONF_APPLICATION_KEY,
|
||||
@@ -74,11 +72,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_bucket_name",
|
||||
) from err
|
||||
except (
|
||||
exception.B2ConnectionError,
|
||||
exception.B2RequestTimeout,
|
||||
exception.ConnectionReset,
|
||||
) as err:
|
||||
except exception.ConnectionReset as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
"""Backblaze B2 client with extended timeouts.
|
||||
|
||||
The b2sdk library uses class-level timeout attributes. To avoid modifying
|
||||
global library state, we subclass the relevant classes to provide extended
|
||||
timeouts suitable for backup operations involving large files.
|
||||
"""
|
||||
|
||||
from b2sdk.v2 import B2Api as BaseB2Api, InMemoryAccountInfo
|
||||
from b2sdk.v2.b2http import B2Http as BaseB2Http
|
||||
from b2sdk.v2.session import B2Session as BaseB2Session
|
||||
|
||||
# Extended timeouts for Home Assistant backup operations
|
||||
# Default CONNECTION_TIMEOUT is 46 seconds, which can be too short for slow connections
|
||||
CONNECTION_TIMEOUT = 120 # 2 minutes
|
||||
|
||||
# Default TIMEOUT_FOR_UPLOAD is 128 seconds, which is too short for large backups
|
||||
TIMEOUT_FOR_UPLOAD = 43200 # 12 hours
|
||||
|
||||
|
||||
class B2Http(BaseB2Http): # type: ignore[misc]
|
||||
"""B2Http with extended timeouts for backup operations."""
|
||||
|
||||
CONNECTION_TIMEOUT = CONNECTION_TIMEOUT
|
||||
TIMEOUT_FOR_UPLOAD = TIMEOUT_FOR_UPLOAD
|
||||
|
||||
|
||||
class B2Session(BaseB2Session): # type: ignore[misc]
|
||||
"""B2Session using custom B2Http with extended timeouts."""
|
||||
|
||||
B2HTTP_CLASS = B2Http
|
||||
|
||||
|
||||
class B2Api(BaseB2Api): # type: ignore[misc]
|
||||
"""B2Api using custom session with extended timeouts."""
|
||||
|
||||
SESSION_CLASS = B2Session
|
||||
|
||||
|
||||
__all__ = ["B2Api", "InMemoryAccountInfo"]
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import exception
|
||||
from b2sdk.v2 import B2Api, InMemoryAccountInfo, exception
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
@@ -17,8 +17,6 @@ from homeassistant.helpers.selector import (
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
# Import from b2_client to ensure timeout configuration is applied
|
||||
from .b2_client import B2Api, InMemoryAccountInfo
|
||||
from .const import (
|
||||
BACKBLAZE_REALM,
|
||||
CONF_APPLICATION_KEY,
|
||||
@@ -174,12 +172,8 @@ class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
|
||||
)
|
||||
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||
except (
|
||||
exception.B2ConnectionError,
|
||||
exception.B2RequestTimeout,
|
||||
exception.ConnectionReset,
|
||||
) as err:
|
||||
_LOGGER.error("Failed to connect to Backblaze B2: %s", err)
|
||||
except exception.ConnectionReset:
|
||||
_LOGGER.error("Failed to connect to Backblaze B2. Connection reset")
|
||||
errors["base"] = "cannot_connect"
|
||||
except exception.MissingAccountData:
|
||||
# This generally indicates an issue with how InMemoryAccountInfo is used
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["b2sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["b2sdk==2.10.1"]
|
||||
"requirements": ["b2sdk==2.8.1"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import BeoConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .util import get_device_buttons, get_remote_keys, get_remotes
|
||||
from .util import get_device_buttons
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -53,23 +53,4 @@ async def async_get_config_entry_diagnostics(
|
||||
state_dict.pop("context")
|
||||
data[f"{device_button}_event"] = state_dict
|
||||
|
||||
# Get remotes
|
||||
for remote in await get_remotes(config_entry.runtime_data.client):
|
||||
# Get key Event entity states (if enabled)
|
||||
for key_type in get_remote_keys():
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
EVENT_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{remote.serial_number}_{config_entry.unique_id}_{key_type}",
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data[f"remote_{remote.serial_number}_{key_type}_event"] = state_dict
|
||||
|
||||
# Add remote Mozart model
|
||||
data[f"remote_{remote.serial_number}"] = dict(remote)
|
||||
|
||||
return data
|
||||
|
||||
@@ -16,7 +16,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BeoConfigEntry
|
||||
from .const import (
|
||||
BEO_REMOTE_CONTROL_KEYS,
|
||||
BEO_REMOTE_KEY_EVENTS,
|
||||
BEO_REMOTE_KEYS,
|
||||
BEO_REMOTE_SUBMENU_CONTROL,
|
||||
BEO_REMOTE_SUBMENU_LIGHT,
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DOMAIN,
|
||||
@@ -25,7 +29,7 @@ from .const import (
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BeoEntity
|
||||
from .util import get_device_buttons, get_remote_keys, get_remotes
|
||||
from .util import get_device_buttons, get_remotes
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -36,19 +40,38 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Event entities from config entry."""
|
||||
entities: list[BeoEvent] = [
|
||||
entities: list[BeoEvent] = []
|
||||
|
||||
async_add_entities(
|
||||
BeoButtonEvent(config_entry, button_type)
|
||||
for button_type in get_device_buttons(config_entry.data[CONF_MODEL])
|
||||
]
|
||||
)
|
||||
|
||||
# Check for connected Beoremote One
|
||||
remotes = await get_remotes(config_entry.runtime_data.client)
|
||||
|
||||
for remote in remotes:
|
||||
# Add Light keys
|
||||
entities.extend(
|
||||
[
|
||||
BeoRemoteKeyEvent(config_entry, remote, key_type)
|
||||
for key_type in get_remote_keys()
|
||||
BeoRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}",
|
||||
)
|
||||
for key_type in BEO_REMOTE_KEYS
|
||||
]
|
||||
)
|
||||
|
||||
# Add Control keys
|
||||
entities.extend(
|
||||
[
|
||||
BeoRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}",
|
||||
)
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -11,16 +11,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .const import (
|
||||
BEO_REMOTE_CONTROL_KEYS,
|
||||
BEO_REMOTE_KEYS,
|
||||
BEO_REMOTE_SUBMENU_CONTROL,
|
||||
BEO_REMOTE_SUBMENU_LIGHT,
|
||||
DEVICE_BUTTONS,
|
||||
DOMAIN,
|
||||
BeoButtons,
|
||||
BeoModel,
|
||||
)
|
||||
from .const import DEVICE_BUTTONS, DOMAIN, BeoButtons, BeoModel
|
||||
|
||||
|
||||
def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
@@ -73,14 +64,3 @@ def get_device_buttons(model: BeoModel) -> list[str]:
|
||||
buttons.remove(BeoButtons.BLUETOOTH)
|
||||
|
||||
return buttons
|
||||
|
||||
|
||||
def get_remote_keys() -> list[str]:
|
||||
"""Get remote keys for the Beoremote One. Formatted for Home Assistant use."""
|
||||
return [
|
||||
*[f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}" for key_type in BEO_REMOTE_KEYS],
|
||||
*[
|
||||
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}"
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
],
|
||||
]
|
||||
|
||||
@@ -47,7 +47,7 @@ def make_binary_sensor_trigger(
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
_to_states = {to_state}
|
||||
_to_state = to_state
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"domain": "blackbird",
|
||||
"name": "Monoprice Blackbird Matrix Switch",
|
||||
"codeowners": [],
|
||||
"disabled": "This integration is disabled because it references pyserial-asyncio, which does blocking I/O in the asyncio loop and is not maintained.",
|
||||
"documentation": "https://www.home-assistant.io/integrations/blackbird",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyblackbird"],
|
||||
|
||||
@@ -2,25 +2,16 @@
|
||||
|
||||
from pyblu import Player
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
ATTR_MASTER,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TIMER,
|
||||
SERVICE_JOIN,
|
||||
SERVICE_SET_TIMER,
|
||||
SERVICE_UNJOIN,
|
||||
)
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
BluesoundConfigEntry,
|
||||
BluesoundCoordinator,
|
||||
@@ -37,38 +28,6 @@ PLATFORMS = [
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bluesound."""
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SET_TIMER,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="async_increase_timer",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TIMER,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="async_clear_timer",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_JOIN,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={vol.Required(ATTR_MASTER): cv.entity_id},
|
||||
func="async_bluesound_join",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_UNJOIN,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="async_bluesound_unjoin",
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -4,8 +4,3 @@ DOMAIN = "bluesound"
|
||||
INTEGRATION_TITLE = "Bluesound"
|
||||
ATTR_BLUESOUND_GROUP = "bluesound_group"
|
||||
ATTR_MASTER = "master"
|
||||
|
||||
SERVICE_CLEAR_TIMER = "clear_sleep_timer"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
@@ -8,6 +8,7 @@ import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -21,7 +22,11 @@ from homeassistant.components.media_player import (
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import entity_registry as er, issue_registry as ir
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
@@ -35,22 +40,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .const import (
|
||||
ATTR_BLUESOUND_GROUP,
|
||||
ATTR_MASTER,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TIMER,
|
||||
SERVICE_JOIN,
|
||||
SERVICE_SET_TIMER,
|
||||
SERVICE_UNJOIN,
|
||||
)
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
from .utils import (
|
||||
dispatcher_join_signal,
|
||||
dispatcher_unjoin_signal,
|
||||
format_unique_id,
|
||||
id_to_paired_player,
|
||||
)
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import BluesoundConfigEntry
|
||||
@@ -62,6 +54,11 @@ SCAN_INTERVAL = timedelta(minutes=15)
|
||||
DATA_BLUESOUND = DOMAIN
|
||||
DEFAULT_PORT = 11000
|
||||
|
||||
SERVICE_CLEAR_TIMER = "clear_sleep_timer"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
POLL_TIMEOUT = 120
|
||||
|
||||
|
||||
@@ -78,6 +75,18 @@ async def async_setup_entry(
|
||||
config_entry.runtime_data.player,
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_TIMER, None, "async_increase_timer"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_CLEAR_TIMER, None, "async_clear_timer"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join"
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
|
||||
@@ -111,7 +120,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self._presets: list[Preset] = coordinator.data.presets
|
||||
self._group_name: str | None = None
|
||||
self._group_list: list[str] = []
|
||||
self._group_members: list[str] | None = None
|
||||
self._bluesound_device_name = sync_status.name
|
||||
self._player = player
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
@@ -172,7 +180,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._group_list = self.rebuild_bluesound_group()
|
||||
self._group_members = self.rebuild_group_members()
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -358,13 +365,11 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
)
|
||||
|
||||
supported = (
|
||||
MediaPlayerEntityFeature.CLEAR_PLAYLIST
|
||||
| MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
)
|
||||
|
||||
if not self._status.indexing:
|
||||
@@ -416,57 +421,8 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
return shuffle
|
||||
|
||||
@property
|
||||
def group_members(self) -> list[str] | None:
|
||||
"""Get list of group members. Leader is always first."""
|
||||
return self._group_members
|
||||
|
||||
async def async_join_players(self, group_members: list[str]) -> None:
|
||||
"""Join `group_members` as a player group with the current player."""
|
||||
if self.entity_id in group_members:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
|
||||
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
|
||||
|
||||
paired_players = []
|
||||
for group_member in group_members:
|
||||
sync_status = entity_ids_with_sync_status.get(group_member)
|
||||
if sync_status is None:
|
||||
continue
|
||||
paired_player = id_to_paired_player(sync_status.id)
|
||||
if paired_player:
|
||||
paired_players.append(paired_player)
|
||||
|
||||
if paired_players:
|
||||
await self._player.add_followers(paired_players)
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Remove this player from any group."""
|
||||
if self._sync_status.leader is not None:
|
||||
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
|
||||
)
|
||||
|
||||
if self._sync_status.followers is not None:
|
||||
await self._player.remove_follower(self.host, self.port)
|
||||
|
||||
async def async_bluesound_join(self, master: str) -> None:
|
||||
async def async_join(self, master: str) -> None:
|
||||
"""Join the player to a group."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_JOIN}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_join",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
|
||||
if master == self.entity_id:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
|
||||
@@ -475,23 +431,17 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self.hass, dispatcher_join_signal(master), self.host, self.port
|
||||
)
|
||||
|
||||
async def async_bluesound_unjoin(self) -> None:
|
||||
async def async_unjoin(self) -> None:
|
||||
"""Unjoin the player from a group."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_UNJOIN}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_unjoin",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
if self._sync_status.leader is None:
|
||||
return
|
||||
|
||||
await self.async_unjoin_player()
|
||||
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
|
||||
|
||||
_LOGGER.debug("Trying to unjoin player: %s", self.id)
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
@@ -538,63 +488,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
follower_names.insert(0, leader_sync_status.name)
|
||||
return follower_names
|
||||
|
||||
def rebuild_group_members(self) -> list[str] | None:
|
||||
"""Get list of group members. Leader is always first."""
|
||||
if self.sync_status.leader is None and self.sync_status.followers is None:
|
||||
return None
|
||||
|
||||
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
|
||||
|
||||
leader_entity_id = None
|
||||
followers = None
|
||||
if self.sync_status.followers is not None:
|
||||
leader_entity_id = self.entity_id
|
||||
followers = self.sync_status.followers
|
||||
elif self.sync_status.leader is not None:
|
||||
leader_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
|
||||
for entity_id, sync_status in entity_ids_with_sync_status.items():
|
||||
if sync_status.id == leader_id:
|
||||
leader_entity_id = entity_id
|
||||
followers = sync_status.followers
|
||||
break
|
||||
|
||||
if leader_entity_id is None or followers is None:
|
||||
return None
|
||||
|
||||
grouped_entity_ids = [leader_entity_id]
|
||||
for follower in followers:
|
||||
follower_id = f"{follower.ip}:{follower.port}"
|
||||
entity_ids = [
|
||||
entity_id
|
||||
for entity_id, sync_status in entity_ids_with_sync_status.items()
|
||||
if sync_status.id == follower_id
|
||||
]
|
||||
match entity_ids:
|
||||
case [entity_id]:
|
||||
grouped_entity_ids.append(entity_id)
|
||||
|
||||
return grouped_entity_ids
|
||||
|
||||
def _entity_ids_with_sync_status(self) -> dict[str, SyncStatus]:
|
||||
result = {}
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
config_entries: list[BluesoundConfigEntry] = (
|
||||
self.hass.config_entries.async_entries(DOMAIN)
|
||||
)
|
||||
for config_entry in config_entries:
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry.entry_id
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
if entity_entry.domain == "media_player":
|
||||
result[entity_entry.entity_id] = (
|
||||
config_entry.runtime_data.coordinator.data.sync_status
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
async def async_add_follower(self, host: str, port: int) -> None:
|
||||
"""Add follower to leader."""
|
||||
await self._player.add_follower(host, port)
|
||||
|
||||
@@ -41,17 +41,9 @@
|
||||
"description": "Use `button.{name}_clear_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.clear_sleep_timer"
|
||||
},
|
||||
"deprecated_service_join": {
|
||||
"description": "Use the `media_player.join` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.join"
|
||||
},
|
||||
"deprecated_service_set_sleep_timer": {
|
||||
"description": "Use `button.{name}_set_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.set_sleep_timer"
|
||||
},
|
||||
"deprecated_service_unjoin": {
|
||||
"description": "Use the `media_player.unjoin` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.unjoin"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Utility functions for the Bluesound component."""
|
||||
|
||||
from pyblu import PairedPlayer
|
||||
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
|
||||
@@ -21,12 +19,3 @@ def dispatcher_unjoin_signal(leader_id: str) -> str:
|
||||
Id is ip_address:port. This can be obtained from sync_status.id.
|
||||
"""
|
||||
return f"bluesound_unjoin_{leader_id}"
|
||||
|
||||
|
||||
def id_to_paired_player(id: str) -> PairedPlayer | None:
|
||||
"""Try to convert id in format 'ip:port' to PairedPlayer. Returns None if unable to do so."""
|
||||
match id.rsplit(":", 1):
|
||||
case [str() as ip, str() as port] if port.isdigit():
|
||||
return PairedPlayer(ip, int(port))
|
||||
case _:
|
||||
return None
|
||||
|
||||
@@ -27,18 +27,13 @@ from homeassistant.exceptions import (
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_PASSKEY, DOMAIN
|
||||
from .coordinator import BSBLanFastCoordinator, BSBLanSlowCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type BSBLanConfigEntry = ConfigEntry[BSBLanData]
|
||||
|
||||
|
||||
@@ -54,12 +49,6 @@ class BSBLanData:
|
||||
static: StaticState
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the BSB-Lan integration."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bool:
|
||||
"""Set up BSB-Lan from a config entry."""
|
||||
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"set_hot_water_schedule": {
|
||||
"service": "mdi:calendar-clock"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,214 +0,0 @@
|
||||
"""Support for BSB-Lan services."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import time
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bsblan import BSBLANError, DaySchedule, DHWSchedule, TimeSlot
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import BSBLanConfigEntry
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_DEVICE_ID = "device_id"
|
||||
ATTR_MONDAY_SLOTS = "monday_slots"
|
||||
ATTR_TUESDAY_SLOTS = "tuesday_slots"
|
||||
ATTR_WEDNESDAY_SLOTS = "wednesday_slots"
|
||||
ATTR_THURSDAY_SLOTS = "thursday_slots"
|
||||
ATTR_FRIDAY_SLOTS = "friday_slots"
|
||||
ATTR_SATURDAY_SLOTS = "saturday_slots"
|
||||
ATTR_SUNDAY_SLOTS = "sunday_slots"
|
||||
|
||||
# Service name
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
|
||||
|
||||
|
||||
# Schema for a single time slot
|
||||
_SLOT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("start_time"): cv.time,
|
||||
vol.Required("end_time"): cv.time,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
vol.Optional(ATTR_MONDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_TUESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_WEDNESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_THURSDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_FRIDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_SATURDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
vol.Optional(ATTR_SUNDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _convert_time_slots_to_day_schedule(
|
||||
slots: list[dict[str, time]] | None,
|
||||
) -> DaySchedule | None:
|
||||
"""Convert list of time slot dicts to a DaySchedule object.
|
||||
|
||||
Example: [{"start_time": time(6, 0), "end_time": time(8, 0)},
|
||||
{"start_time": time(17, 0), "end_time": time(21, 0)}]
|
||||
becomes: DaySchedule with two TimeSlot objects
|
||||
|
||||
None returns None (don't modify this day).
|
||||
Empty list returns DaySchedule with empty slots (clear this day).
|
||||
"""
|
||||
if slots is None:
|
||||
return None
|
||||
|
||||
if not slots:
|
||||
return DaySchedule(slots=[])
|
||||
|
||||
time_slots = []
|
||||
for slot in slots:
|
||||
start_time = slot["start_time"]
|
||||
end_time = slot["end_time"]
|
||||
|
||||
# Validate that end time is after start time
|
||||
if end_time <= start_time:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_time_before_start_time",
|
||||
translation_placeholders={
|
||||
"start_time": start_time.strftime("%H:%M"),
|
||||
"end_time": end_time.strftime("%H:%M"),
|
||||
},
|
||||
)
|
||||
|
||||
time_slots.append(TimeSlot(start=start_time, end=end_time))
|
||||
LOGGER.debug(
|
||||
"Created time slot: %s-%s",
|
||||
start_time.strftime("%H:%M"),
|
||||
end_time.strftime("%H:%M"),
|
||||
)
|
||||
|
||||
LOGGER.debug("Created DaySchedule with %d slots", len(time_slots))
|
||||
return DaySchedule(slots=time_slots)
|
||||
|
||||
|
||||
async def set_hot_water_schedule(service_call: ServiceCall) -> None:
|
||||
"""Set hot water heating schedule."""
|
||||
device_id = service_call.data[ATTR_DEVICE_ID]
|
||||
|
||||
# Get the device and config entry
|
||||
device_registry = dr.async_get(service_call.hass)
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
|
||||
if device_entry is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_device_id",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
# Find the config entry for this device
|
||||
matching_entries: list[BSBLanConfigEntry] = [
|
||||
entry
|
||||
for entry in service_call.hass.config_entries.async_entries(DOMAIN)
|
||||
if entry.entry_id in device_entry.config_entries
|
||||
]
|
||||
|
||||
if not matching_entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_config_entry_for_device",
|
||||
translation_placeholders={"device_id": device_entry.name or device_id},
|
||||
)
|
||||
|
||||
entry = matching_entries[0]
|
||||
|
||||
# Verify the config entry is loaded
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_loaded",
|
||||
translation_placeholders={"device_name": device_entry.name or device_id},
|
||||
)
|
||||
|
||||
client = entry.runtime_data.client
|
||||
|
||||
# Convert time slots to DaySchedule objects
|
||||
monday = _convert_time_slots_to_day_schedule(
|
||||
service_call.data.get(ATTR_MONDAY_SLOTS)
|
||||
)
|
||||
tuesday = _convert_time_slots_to_day_schedule(
|
||||
service_call.data.get(ATTR_TUESDAY_SLOTS)
|
||||
)
|
||||
wednesday = _convert_time_slots_to_day_schedule(
|
||||
service_call.data.get(ATTR_WEDNESDAY_SLOTS)
|
||||
)
|
||||
thursday = _convert_time_slots_to_day_schedule(
|
||||
service_call.data.get(ATTR_THURSDAY_SLOTS)
|
||||
)
|
||||
friday = _convert_time_slots_to_day_schedule(
|
||||
service_call.data.get(ATTR_FRIDAY_SLOTS)
|
||||
)
|
||||
saturday = _convert_time_slots_to_day_schedule(
|
||||
service_call.data.get(ATTR_SATURDAY_SLOTS)
|
||||
)
|
||||
sunday = _convert_time_slots_to_day_schedule(
|
||||
service_call.data.get(ATTR_SUNDAY_SLOTS)
|
||||
)
|
||||
|
||||
# Create the DHWSchedule object
|
||||
dhw_schedule = DHWSchedule(
|
||||
monday=monday,
|
||||
tuesday=tuesday,
|
||||
wednesday=wednesday,
|
||||
thursday=thursday,
|
||||
friday=friday,
|
||||
saturday=saturday,
|
||||
sunday=sunday,
|
||||
)
|
||||
|
||||
LOGGER.debug(
|
||||
"Setting hot water schedule - Monday: %s, Tuesday: %s, Wednesday: %s, "
|
||||
"Thursday: %s, Friday: %s, Saturday: %s, Sunday: %s",
|
||||
monday,
|
||||
tuesday,
|
||||
wednesday,
|
||||
thursday,
|
||||
friday,
|
||||
saturday,
|
||||
sunday,
|
||||
)
|
||||
|
||||
try:
|
||||
# Call the BSB-Lan API to set the schedule
|
||||
await client.set_hot_water_schedule(dhw_schedule)
|
||||
except BSBLANError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_schedule_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
|
||||
# Refresh the slow coordinator to get the updated schedule
|
||||
await entry.runtime_data.slow_coordinator.async_request_refresh()
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the BSB-Lan services."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE,
|
||||
set_hot_water_schedule,
|
||||
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
|
||||
)
|
||||
@@ -1,113 +0,0 @@
|
||||
set_hot_water_schedule:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
example: "abc123device456"
|
||||
selector:
|
||||
device:
|
||||
integration: bsblan
|
||||
monday_slots:
|
||||
selector:
|
||||
object:
|
||||
multiple: true
|
||||
label_field: start_time
|
||||
description_field: end_time
|
||||
fields:
|
||||
start_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
end_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
tuesday_slots:
|
||||
selector:
|
||||
object:
|
||||
multiple: true
|
||||
label_field: start_time
|
||||
description_field: end_time
|
||||
fields:
|
||||
start_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
end_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
wednesday_slots:
|
||||
selector:
|
||||
object:
|
||||
multiple: true
|
||||
label_field: start_time
|
||||
description_field: end_time
|
||||
fields:
|
||||
start_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
end_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
thursday_slots:
|
||||
selector:
|
||||
object:
|
||||
multiple: true
|
||||
label_field: start_time
|
||||
description_field: end_time
|
||||
fields:
|
||||
start_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
end_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
friday_slots:
|
||||
selector:
|
||||
object:
|
||||
multiple: true
|
||||
label_field: start_time
|
||||
description_field: end_time
|
||||
fields:
|
||||
start_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
end_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
saturday_slots:
|
||||
selector:
|
||||
object:
|
||||
multiple: true
|
||||
label_field: start_time
|
||||
description_field: end_time
|
||||
fields:
|
||||
start_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
end_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
sunday_slots:
|
||||
selector:
|
||||
object:
|
||||
multiple: true
|
||||
label_field: start_time
|
||||
description_field: end_time
|
||||
fields:
|
||||
start_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
end_time:
|
||||
required: true
|
||||
selector:
|
||||
time:
|
||||
@@ -70,21 +70,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"config_entry_not_loaded": {
|
||||
"message": "The device `{device_name}` is not currently loaded or available"
|
||||
},
|
||||
"end_time_before_start_time": {
|
||||
"message": "End time ({end_time}) must be after start time ({start_time})"
|
||||
},
|
||||
"invalid_device_id": {
|
||||
"message": "Invalid device ID: {device_id}"
|
||||
},
|
||||
"invalid_time_format": {
|
||||
"message": "Invalid time format provided"
|
||||
},
|
||||
"no_config_entry_for_device": {
|
||||
"message": "No configuration entry found for device: {device_id}"
|
||||
},
|
||||
"set_data_error": {
|
||||
"message": "An error occurred while sending the data to the BSB-Lan device"
|
||||
},
|
||||
@@ -94,9 +79,6 @@
|
||||
"set_preset_mode_error": {
|
||||
"message": "Can't set preset mode to {preset_mode} when HVAC mode is not set to auto"
|
||||
},
|
||||
"set_schedule_failed": {
|
||||
"message": "Failed to set hot water schedule: {error}"
|
||||
},
|
||||
"set_temperature_error": {
|
||||
"message": "An error occurred while setting the temperature"
|
||||
},
|
||||
@@ -109,45 +91,5 @@
|
||||
"setup_general_error": {
|
||||
"message": "An unknown error occurred while retrieving static device data"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_hot_water_schedule": {
|
||||
"description": "Set the hot water heating schedule for a BSB-LAN device.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"description": "The BSB-LAN device to configure.",
|
||||
"name": "Device"
|
||||
},
|
||||
"friday_slots": {
|
||||
"description": "Time periods for Friday. Add multiple slots for different heating periods throughout the day.",
|
||||
"name": "Friday time slots"
|
||||
},
|
||||
"monday_slots": {
|
||||
"description": "Time periods for Monday. Add multiple slots for different heating periods throughout the day.",
|
||||
"name": "Monday time slots"
|
||||
},
|
||||
"saturday_slots": {
|
||||
"description": "Time periods for Saturday. Add multiple slots for different heating periods throughout the day.",
|
||||
"name": "Saturday time slots"
|
||||
},
|
||||
"sunday_slots": {
|
||||
"description": "Time periods for Sunday. Add multiple slots for different heating periods throughout the day.",
|
||||
"name": "Sunday time slots"
|
||||
},
|
||||
"thursday_slots": {
|
||||
"description": "Time periods for Thursday. Add multiple slots for different heating periods throughout the day.",
|
||||
"name": "Thursday time slots"
|
||||
},
|
||||
"tuesday_slots": {
|
||||
"description": "Time periods for Tuesday. Add multiple slots for different heating periods throughout the day.",
|
||||
"name": "Tuesday time slots"
|
||||
},
|
||||
"wednesday_slots": {
|
||||
"description": "Time periods for Wednesday. Add multiple slots for different heating periods throughout the day.",
|
||||
"name": "Wednesday time slots"
|
||||
}
|
||||
},
|
||||
"name": "Set hot water schedule"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.17.0"]
|
||||
"requirements": ["bthome-ble==3.15.0"]
|
||||
}
|
||||
|
||||
@@ -2,30 +2,29 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_OPTIONS
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_PLATFORM
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
async_track_time_interval,
|
||||
)
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import CalendarEntity, CalendarEvent
|
||||
from .const import DATA_COMPONENT
|
||||
from .const import DATA_COMPONENT, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -33,17 +32,13 @@ EVENT_START = "start"
|
||||
EVENT_END = "end"
|
||||
UPDATE_INTERVAL = datetime.timedelta(minutes=15)
|
||||
|
||||
|
||||
_OPTIONS_SCHEMA_DICT = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
|
||||
_CONFIG_SCHEMA = vol.Schema(
|
||||
TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): _OPTIONS_SCHEMA_DICT,
|
||||
},
|
||||
vol.Required(CONF_PLATFORM): DOMAIN,
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
)
|
||||
|
||||
# mypy: disallow-any-generics
|
||||
@@ -174,14 +169,14 @@ class CalendarEventListener:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
action_runner: TriggerActionRunner,
|
||||
trigger_payload: dict[str, Any],
|
||||
job: HassJob[..., Coroutine[Any, Any, None] | Any],
|
||||
trigger_data: dict[str, Any],
|
||||
fetcher: QueuedEventFetcher,
|
||||
) -> None:
|
||||
"""Initialize CalendarEventListener."""
|
||||
self._hass = hass
|
||||
self._action_runner = action_runner
|
||||
self._trigger_payload = trigger_payload
|
||||
self._job = job
|
||||
self._trigger_data = trigger_data
|
||||
self._unsub_event: CALLBACK_TYPE | None = None
|
||||
self._unsub_refresh: CALLBACK_TYPE | None = None
|
||||
self._fetcher = fetcher
|
||||
@@ -238,11 +233,15 @@ class CalendarEventListener:
|
||||
while self._events and self._events[0].trigger_time <= now:
|
||||
queued_event = self._events.pop(0)
|
||||
_LOGGER.debug("Dispatching event: %s", queued_event.event)
|
||||
payload = {
|
||||
**self._trigger_payload,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
self._action_runner(payload, "calendar event state change")
|
||||
self._hass.async_run_hass_job(
|
||||
self._job,
|
||||
{
|
||||
"trigger": {
|
||||
**self._trigger_data,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
async def _handle_refresh(self, now_utc: datetime.datetime) -> None:
|
||||
"""Handle core config update."""
|
||||
@@ -260,69 +259,31 @@ class CalendarEventListener:
|
||||
self._listen_next_calendar_event()
|
||||
|
||||
|
||||
class EventTrigger(Trigger):
|
||||
"""Calendar event trigger."""
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
trigger_info: TriggerInfo,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach trigger for the specified calendar."""
|
||||
entity_id = config[CONF_ENTITY_ID]
|
||||
event_type = config[CONF_EVENT]
|
||||
offset = config[CONF_OFFSET]
|
||||
|
||||
_options: dict[str, Any]
|
||||
# Validate the entity id is valid
|
||||
get_entity(hass, entity_id)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _CONFIG_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
|
||||
entity_id = self._options[CONF_ENTITY_ID]
|
||||
event_type = self._options[CONF_EVENT]
|
||||
offset = self._options[CONF_OFFSET]
|
||||
|
||||
# Validate the entity id is valid
|
||||
get_entity(self._hass, entity_id)
|
||||
|
||||
trigger_data = {
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
listener = CalendarEventListener(
|
||||
self._hass,
|
||||
run_action,
|
||||
trigger_data,
|
||||
queued_event_fetcher(
|
||||
event_fetcher(self._hass, entity_id), event_type, offset
|
||||
),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"_": EventTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for calendars."""
|
||||
return TRIGGERS
|
||||
trigger_data = {
|
||||
**trigger_info["trigger_data"],
|
||||
"platform": DOMAIN,
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
listener = CalendarEventListener(
|
||||
hass,
|
||||
HassJob(action),
|
||||
trigger_data,
|
||||
queued_event_fetcher(event_fetcher(hass, entity_id), event_type, offset),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
"codeowners": ["@emontnemery"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["casttube", "pychromecast"],
|
||||
"requirements": ["PyChromecast==14.0.9"],
|
||||
|
||||
@@ -98,21 +98,6 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"current_temperature_crossed_threshold": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"hvac_mode_changed": {
|
||||
"trigger": "mdi:thermostat"
|
||||
},
|
||||
"started_cooling": {
|
||||
"trigger": "mdi:snowflake"
|
||||
},
|
||||
@@ -122,18 +107,6 @@
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:power-off"
|
||||
},
|
||||
|
||||
@@ -192,26 +192,12 @@
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"number_or_entity": {
|
||||
"choices": {
|
||||
"entity": "Entity",
|
||||
"number": "Number"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
},
|
||||
"trigger_threshold_type": {
|
||||
"options": {
|
||||
"above": "Above a value",
|
||||
"below": "Below a value",
|
||||
"between": "In a range",
|
||||
"outside": "Outside a range"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -312,92 +298,6 @@
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the humidity is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the humidity is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity changed"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity crossed threshold"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the temperature is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the temperature is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature changed"
|
||||
},
|
||||
"current_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature crossed threshold"
|
||||
},
|
||||
"hvac_mode_changed": {
|
||||
"description": "Triggers after the mode of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"hvac_mode": {
|
||||
"description": "The HVAC modes to trigger on.",
|
||||
"name": "Modes"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device mode changed"
|
||||
},
|
||||
"started_cooling": {
|
||||
"description": "Triggers after one or more climate-control devices start cooling.",
|
||||
"fields": {
|
||||
@@ -428,78 +328,6 @@
|
||||
},
|
||||
"name": "Climate-control device started heating"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target humidity is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the target humidity is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity changed"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity crossed threshold"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target temperature is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the target temperature is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature changed"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature crossed threshold"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more climate-control devices turn off.",
|
||||
"fields": {
|
||||
|
||||
@@ -1,89 +1,22 @@
|
||||
"""Provides triggers for climates."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_TEMPERATURE, CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityTargetStateTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_attribute_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_HUMIDITY,
|
||||
ATTR_HVAC_ACTION,
|
||||
DOMAIN,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
|
||||
CONF_HVAC_MODE = "hvac_mode"
|
||||
|
||||
HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(CONF_HVAC_MODE): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), [HVACMode]
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class HVACModeChangedTrigger(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_schema = HVAC_MODE_CHANGED_TRIGGER_SCHEMA
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
self._to_states = set(self._options[CONF_HVAC_MODE])
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"current_humidity_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"current_humidity_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"current_temperature_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_CURRENT_TEMPERATURE
|
||||
),
|
||||
"current_temperature_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_CURRENT_TEMPERATURE
|
||||
),
|
||||
"hvac_mode_changed": HVACModeChangedTrigger,
|
||||
"started_cooling": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.COOLING
|
||||
),
|
||||
"started_drying": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.DRYING
|
||||
),
|
||||
"target_humidity_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_HUMIDITY
|
||||
),
|
||||
"target_humidity_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_HUMIDITY
|
||||
),
|
||||
"target_temperature_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_TEMPERATURE
|
||||
),
|
||||
"target_temperature_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_TEMPERATURE
|
||||
),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.trigger_common: &trigger_common
|
||||
target: &trigger_climate_target
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
behavior: &trigger_behavior
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
@@ -14,109 +14,8 @@
|
||||
- last
|
||||
- any
|
||||
|
||||
.number_or_entity: &number_or_entity
|
||||
required: false
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain:
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
.trigger_threshold_type: &trigger_threshold_type
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- above
|
||||
- below
|
||||
- between
|
||||
- outside
|
||||
translation_key: trigger_threshold_type
|
||||
|
||||
started_cooling: *trigger_common
|
||||
started_drying: *trigger_common
|
||||
started_heating: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
|
||||
hvac_mode_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
hvac_mode:
|
||||
context:
|
||||
filter_target: target
|
||||
required: true
|
||||
selector:
|
||||
state:
|
||||
hide_states:
|
||||
- unavailable
|
||||
- unknown
|
||||
multiple: true
|
||||
|
||||
current_humidity_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
current_humidity_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
target_humidity_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
target_humidity_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
current_temperature_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
current_temperature_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
target_temperature_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
target_temperature_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
@@ -5,7 +5,7 @@ from aiocomelit.const import BRIDGE
|
||||
from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONF_VEDO_PIN, DEFAULT_PORT
|
||||
from .const import DEFAULT_PORT
|
||||
from .coordinator import (
|
||||
ComelitBaseCoordinator,
|
||||
ComelitConfigEntry,
|
||||
@@ -22,16 +22,6 @@ BRIDGE_PLATFORMS = [
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
BRIDGE_AND_VEDO_PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.LIGHT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
VEDO_PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -47,20 +37,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
session = await async_client_session(hass)
|
||||
|
||||
if entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
vedo_pin = entry.data.get(CONF_VEDO_PIN)
|
||||
coordinator = ComelitSerialBridge(
|
||||
hass,
|
||||
entry,
|
||||
entry.data[CONF_HOST],
|
||||
entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
entry.data[CONF_PIN],
|
||||
vedo_pin,
|
||||
session,
|
||||
)
|
||||
platforms = BRIDGE_PLATFORMS
|
||||
# Add VEDO platforms if vedo_pin is configured
|
||||
if vedo_pin:
|
||||
platforms = BRIDGE_AND_VEDO_PLATFORMS
|
||||
else:
|
||||
coordinator = ComelitVedoSystem(
|
||||
hass,
|
||||
@@ -86,9 +71,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ComelitConfigEntry) ->
|
||||
|
||||
if entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
platforms = BRIDGE_PLATFORMS
|
||||
# Add VEDO platforms if vedo_pin was configured
|
||||
if entry.data.get(CONF_VEDO_PIN):
|
||||
platforms = BRIDGE_AND_VEDO_PLATFORMS
|
||||
else:
|
||||
platforms = VEDO_PLATFORMS
|
||||
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from typing import cast
|
||||
|
||||
from aiocomelit.api import ComelitVedoAreaObject
|
||||
from aiocomelit.const import ALARM_AREA, AlarmAreaState
|
||||
from aiocomelit.const import AlarmAreaState
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -56,25 +56,15 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Comelit VEDO system alarm control panel devices."""
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
is_bridge = isinstance(coordinator, ComelitSerialBridge)
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
if is_bridge:
|
||||
assert isinstance(coordinator, ComelitSerialBridge)
|
||||
else:
|
||||
assert isinstance(coordinator, ComelitVedoSystem)
|
||||
|
||||
if data := coordinator.data[ALARM_AREA]:
|
||||
async_add_entities(
|
||||
ComelitAlarmEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in data.values()
|
||||
)
|
||||
async_add_entities(
|
||||
ComelitAlarmEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data["alarm_areas"].values()
|
||||
)
|
||||
|
||||
|
||||
class ComelitAlarmEntity(
|
||||
CoordinatorEntity[ComelitVedoSystem | ComelitSerialBridge], AlarmControlPanelEntity
|
||||
):
|
||||
class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanelEntity):
|
||||
"""Representation of a Ness alarm panel."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -88,7 +78,7 @@ class ComelitAlarmEntity(
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ComelitVedoSystem | ComelitSerialBridge,
|
||||
coordinator: ComelitVedoSystem,
|
||||
area: ComelitVedoAreaObject,
|
||||
config_entry_entry_id: str,
|
||||
) -> None:
|
||||
@@ -105,9 +95,7 @@ class ComelitAlarmEntity(
|
||||
@property
|
||||
def _area(self) -> ComelitVedoAreaObject:
|
||||
"""Return area object."""
|
||||
return cast(
|
||||
ComelitVedoAreaObject, self.coordinator.data[ALARM_AREA][self._area_index]
|
||||
)
|
||||
return self.coordinator.data["alarm_areas"][self._area_index]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from typing import cast
|
||||
|
||||
from aiocomelit.api import ComelitVedoZoneObject
|
||||
from aiocomelit.const import ALARM_ZONE, AlarmZoneState
|
||||
from aiocomelit import ComelitVedoZoneObject
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -16,7 +15,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ObjectClassType
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
from .utils import new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
@@ -30,32 +29,25 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Comelit VEDO presence sensors."""
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
is_bridge = isinstance(coordinator, ComelitSerialBridge)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
if is_bridge:
|
||||
assert isinstance(coordinator, ComelitSerialBridge)
|
||||
else:
|
||||
assert isinstance(coordinator, ComelitVedoSystem)
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, ALARM_ZONE)
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
)
|
||||
|
||||
|
||||
class ComelitVedoBinarySensorEntity(
|
||||
CoordinatorEntity[ComelitVedoSystem | ComelitSerialBridge], BinarySensorEntity
|
||||
CoordinatorEntity[ComelitVedoSystem], BinarySensorEntity
|
||||
):
|
||||
"""Sensor device."""
|
||||
|
||||
@@ -64,7 +56,7 @@ class ComelitVedoBinarySensorEntity(
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ComelitVedoSystem | ComelitSerialBridge,
|
||||
coordinator: ComelitVedoSystem,
|
||||
zone: ComelitVedoZoneObject,
|
||||
config_entry_entry_id: str,
|
||||
) -> None:
|
||||
@@ -76,25 +68,9 @@ class ComelitVedoBinarySensorEntity(
|
||||
self._attr_unique_id = f"{config_entry_entry_id}-presence-{zone.index}"
|
||||
self._attr_device_info = coordinator.platform_device_info(zone, "zone")
|
||||
|
||||
@property
|
||||
def _zone(self) -> ComelitVedoZoneObject:
|
||||
"""Return zone object."""
|
||||
return cast(
|
||||
ComelitVedoZoneObject, self.coordinator.data[ALARM_ZONE][self._zone_index]
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if alarm is available."""
|
||||
if self._zone.human_status in [
|
||||
AlarmZoneState.FAULTY,
|
||||
AlarmZoneState.UNAVAILABLE,
|
||||
AlarmZoneState.UNKNOWN,
|
||||
]:
|
||||
return False
|
||||
return super().available
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Presence detected."""
|
||||
return self._zone.status_api == "0001"
|
||||
return (
|
||||
self.coordinator.data["alarm_zones"][self._zone_index].status_api == "0001"
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from asyncio.exceptions import TimeoutError
|
||||
from collections.abc import Mapping
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
from aiocomelit import (
|
||||
ComeliteSerialBridgeApi,
|
||||
@@ -22,7 +22,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import _LOGGER, CONF_VEDO_PIN, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .utils import async_client_session
|
||||
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
@@ -34,12 +34,9 @@ USER_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
vol.Optional(CONF_VEDO_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PIN): cv.string, vol.Optional(CONF_VEDO_PIN): cv.string}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
@@ -75,18 +72,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
finally:
|
||||
await api.logout()
|
||||
|
||||
# Validate VEDO PIN if provided and device type is BRIDGE
|
||||
if data.get(CONF_VEDO_PIN) and data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_VEDO_PIN]):
|
||||
raise InvalidVedoPin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(api, ComeliteSerialBridgeApi)
|
||||
|
||||
# Verify VEDO is enabled with the provided PIN
|
||||
if not await api.vedo_enabled(data[CONF_VEDO_PIN]):
|
||||
raise InvalidVedoAuth
|
||||
|
||||
return {"title": data[CONF_HOST]}
|
||||
|
||||
|
||||
@@ -114,10 +99,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except InvalidVedoPin:
|
||||
errors["base"] = "invalid_vedo_pin"
|
||||
except InvalidVedoAuth:
|
||||
errors["base"] = "invalid_vedo_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -201,8 +182,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
||||
}
|
||||
if CONF_VEDO_PIN in user_input:
|
||||
data_to_validate[CONF_VEDO_PIN] = user_input[CONF_VEDO_PIN]
|
||||
await validate_input(self.hass, data_to_validate)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
@@ -210,10 +189,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except InvalidVedoPin:
|
||||
errors["base"] = "invalid_vedo_pin"
|
||||
except InvalidVedoAuth:
|
||||
errors["base"] = "invalid_vedo_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -223,8 +198,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
}
|
||||
if CONF_VEDO_PIN in user_input:
|
||||
data_updates[CONF_VEDO_PIN] = user_input[CONF_VEDO_PIN]
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates=data_updates
|
||||
)
|
||||
@@ -238,7 +211,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
|
||||
): cv.port,
|
||||
vol.Optional(CONF_PIN): cv.string,
|
||||
vol.Optional(CONF_VEDO_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -259,11 +231,3 @@ class InvalidAuth(HomeAssistantError):
|
||||
|
||||
class InvalidPin(HomeAssistantError):
|
||||
"""Error to indicate an invalid pin."""
|
||||
|
||||
|
||||
class InvalidVedoPin(HomeAssistantError):
|
||||
"""Error to indicate an invalid VEDO pin."""
|
||||
|
||||
|
||||
class InvalidVedoAuth(HomeAssistantError):
|
||||
"""Error to indicate VEDO authentication failed."""
|
||||
|
||||
@@ -19,7 +19,6 @@ ObjectClassType = (
|
||||
DOMAIN = "comelit"
|
||||
DEFAULT_PORT = 80
|
||||
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
||||
CONF_VEDO_PIN = "vedo_pin"
|
||||
|
||||
SCAN_INTERVAL = 5
|
||||
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
"""Support for Comelit."""
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Mapping
|
||||
from datetime import timedelta
|
||||
from typing import TypeVar, cast
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from aiocomelit.api import ComelitCommonApi, ComeliteSerialBridgeApi, ComelitVedoApi
|
||||
from aiocomelit.api import (
|
||||
AlarmDataObject,
|
||||
ComelitCommonApi,
|
||||
ComeliteSerialBridgeApi,
|
||||
ComelitSerialBridgeObject,
|
||||
ComelitVedoApi,
|
||||
)
|
||||
from aiocomelit.const import (
|
||||
ALARM_AREA,
|
||||
ALARM_ZONE,
|
||||
BRIDGE,
|
||||
CLIMATE,
|
||||
COVER,
|
||||
@@ -34,10 +37,7 @@ type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator]
|
||||
|
||||
T = TypeVar(
|
||||
"T",
|
||||
bound=dict[
|
||||
str,
|
||||
Mapping[int, ObjectClassType],
|
||||
],
|
||||
bound=dict[str, dict[int, ComelitSerialBridgeObject]] | AlarmDataObject,
|
||||
)
|
||||
|
||||
|
||||
@@ -118,8 +118,8 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
|
||||
async def _async_remove_stale_devices(
|
||||
self,
|
||||
previous_list: Mapping[int, ObjectClassType],
|
||||
current_list: Mapping[int, ObjectClassType],
|
||||
previous_list: dict[int, Any],
|
||||
current_list: dict[int, Any],
|
||||
dev_type: str,
|
||||
) -> None:
|
||||
"""Remove stale devices."""
|
||||
@@ -143,7 +143,9 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
)
|
||||
|
||||
|
||||
class ComelitSerialBridge(ComelitBaseCoordinator[T]):
|
||||
class ComelitSerialBridge(
|
||||
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
|
||||
):
|
||||
"""Queries Comelit Serial Bridge."""
|
||||
|
||||
_hw_version = "20003101"
|
||||
@@ -156,23 +158,17 @@ class ComelitSerialBridge(ComelitBaseCoordinator[T]):
|
||||
host: str,
|
||||
port: int,
|
||||
pin: str,
|
||||
vedo_pin: str | None,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
self.api = ComeliteSerialBridgeApi(host, port, pin, session)
|
||||
self.vedo_pin = vedo_pin
|
||||
super().__init__(hass, entry, BRIDGE, host)
|
||||
|
||||
async def _async_update_system_data(
|
||||
self,
|
||||
) -> T:
|
||||
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
||||
"""Specific method for updating data."""
|
||||
data: dict[
|
||||
str,
|
||||
Mapping[int, ObjectClassType],
|
||||
] = {}
|
||||
data.update(await self.api.get_all_devices())
|
||||
data = await self.api.get_all_devices()
|
||||
|
||||
if self.data:
|
||||
for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO):
|
||||
@@ -180,14 +176,10 @@ class ComelitSerialBridge(ComelitBaseCoordinator[T]):
|
||||
self.data[dev_type], data[dev_type], dev_type
|
||||
)
|
||||
|
||||
# Get VEDO alarm data if vedo_pin is configured
|
||||
if self.vedo_pin:
|
||||
data.update(await self.api.get_all_areas_and_zones())
|
||||
|
||||
return cast(T, data)
|
||||
return data
|
||||
|
||||
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator[T]):
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
"""Queries Comelit VEDO system."""
|
||||
|
||||
_hw_version = "VEDO IP"
|
||||
@@ -204,21 +196,20 @@ class ComelitVedoSystem(ComelitBaseCoordinator[T]):
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
self.api = ComelitVedoApi(host, port, pin, session)
|
||||
self.vedo_pin = pin
|
||||
super().__init__(hass, entry, VEDO, host)
|
||||
|
||||
async def _async_update_system_data(
|
||||
self,
|
||||
) -> T:
|
||||
) -> AlarmDataObject:
|
||||
"""Specific method for updating data."""
|
||||
data = await self.api.get_all_areas_and_zones()
|
||||
|
||||
if self.data:
|
||||
for obj_type in (ALARM_AREA, ALARM_ZONE):
|
||||
for obj_type in ("alarm_areas", "alarm_zones"):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[obj_type],
|
||||
data[obj_type],
|
||||
"area" if obj_type == ALARM_AREA else "zone",
|
||||
"area" if obj_type == "alarm_areas" else "zone",
|
||||
)
|
||||
|
||||
return cast(T, data)
|
||||
return data
|
||||
|
||||
@@ -72,7 +72,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
@property
|
||||
def device_status(self) -> int:
|
||||
"""Return current device status."""
|
||||
return cast("int", self.coordinator.data[COVER][self._device.index].status)
|
||||
return self.coordinator.data[COVER][self._device.index].status
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
@@ -86,7 +86,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
@property
|
||||
def is_closing(self) -> bool:
|
||||
"""Return if the cover is closing."""
|
||||
return bool(self._current_action("closing"))
|
||||
return self._current_action("closing")
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool:
|
||||
|
||||
@@ -68,4 +68,4 @@ class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if light is on."""
|
||||
return bool(self.coordinator.data[LIGHT][self._device.index].status == STATE_ON)
|
||||
return self.coordinator.data[LIGHT][self._device.index].status == STATE_ON
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==2.0.0"]
|
||||
"requirements": ["aiocomelit==1.1.2"]
|
||||
}
|
||||
|
||||
@@ -2,17 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Final, cast
|
||||
from typing import Final, cast
|
||||
|
||||
from aiocomelit.api import ComelitSerialBridgeObject, ComelitVedoZoneObject
|
||||
from aiocomelit.const import ALARM_ZONE, OTHER, AlarmZoneState
|
||||
from aiocomelit.const import BRIDGE, OTHER, AlarmZoneState
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import UnitOfPower
|
||||
from homeassistant.const import CONF_TYPE, UnitOfPower
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -52,20 +52,23 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Comelit sensors."""
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
is_bridge = isinstance(coordinator, ComelitSerialBridge)
|
||||
if config_entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
await async_setup_bridge_entry(hass, config_entry, async_add_entities)
|
||||
else:
|
||||
await async_setup_vedo_entry(hass, config_entry, async_add_entities)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
if is_bridge:
|
||||
assert isinstance(coordinator, ComelitSerialBridge)
|
||||
else:
|
||||
assert isinstance(coordinator, ComelitVedoSystem)
|
||||
|
||||
def _add_new_bridge_entities(
|
||||
new_devices: list[ObjectClassType], dev_type: str
|
||||
) -> None:
|
||||
async def async_setup_bridge_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ComelitConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Comelit Bridge sensors."""
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
assert isinstance(coordinator, ComelitSerialBridge)
|
||||
entities = [
|
||||
ComelitBridgeSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
@@ -77,32 +80,36 @@ async def async_setup_entry(
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
def _add_new_vedo_entities(
|
||||
new_devices: list[ObjectClassType], dev_type: str
|
||||
) -> None:
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, OTHER)
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_vedo_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ComelitConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Comelit VEDO sensors."""
|
||||
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_VEDO_TYPES
|
||||
for device in coordinator.data[dev_type].values()
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
# Bridge native sensors
|
||||
if is_bridge:
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_bridge_entities, OTHER)
|
||||
)
|
||||
|
||||
# Alarm sensors (both via Bridge or VedoSystem)
|
||||
if coordinator.vedo_pin:
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_vedo_entities, ALARM_ZONE)
|
||||
)
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
)
|
||||
|
||||
|
||||
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):
|
||||
@@ -134,16 +141,14 @@ class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):
|
||||
)
|
||||
|
||||
|
||||
class ComelitVedoSensorEntity(
|
||||
CoordinatorEntity[ComelitVedoSystem | ComelitSerialBridge], SensorEntity
|
||||
):
|
||||
class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity):
|
||||
"""Sensor device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ComelitVedoSystem | ComelitSerialBridge,
|
||||
coordinator: ComelitVedoSystem,
|
||||
zone: ComelitVedoZoneObject,
|
||||
config_entry_entry_id: str,
|
||||
description: SensorEntityDescription,
|
||||
@@ -161,9 +166,7 @@ class ComelitVedoSensorEntity(
|
||||
@property
|
||||
def _zone_object(self) -> ComelitVedoZoneObject:
|
||||
"""Zone object."""
|
||||
return cast(
|
||||
ComelitVedoZoneObject, self.coordinator.data[ALARM_ZONE][self._zone_index]
|
||||
)
|
||||
return self.coordinator.data["alarm_zones"][self._zone_index]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -5,8 +5,6 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
|
||||
"invalid_vedo_auth": "The provided VEDO PIN is incorrect or VEDO alarm is not enabled on this device.",
|
||||
"invalid_vedo_pin": "The provided VEDO PIN is invalid. It must be a 4-10 digit number.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
@@ -15,34 +13,28 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
|
||||
"invalid_vedo_auth": "[%key:component::comelit::config::abort::invalid_vedo_auth%]",
|
||||
"invalid_vedo_pin": "[%key:component::comelit::config::abort::invalid_vedo_pin%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"flow_title": "{host}",
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"pin": "[%key:common::config_flow::data::pin%]",
|
||||
"vedo_pin": "[%key:component::comelit::config::step::user::data::vedo_pin%]"
|
||||
"pin": "[%key:common::config_flow::data::pin%]"
|
||||
},
|
||||
"data_description": {
|
||||
"pin": "The PIN of your Comelit device.",
|
||||
"vedo_pin": "[%key:component::comelit::config::step::user::data_description::vedo_pin%]"
|
||||
"pin": "The PIN of your Comelit device."
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"pin": "[%key:common::config_flow::data::pin%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"vedo_pin": "[%key:component::comelit::config::step::user::data::vedo_pin%]"
|
||||
"port": "[%key:common::config_flow::data::port%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::comelit::config::step::user::data_description::host%]",
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]",
|
||||
"port": "[%key:component::comelit::config::step::user::data_description::port%]",
|
||||
"vedo_pin": "[%key:component::comelit::config::step::user::data_description::vedo_pin%]"
|
||||
"port": "[%key:component::comelit::config::step::user::data_description::port%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
@@ -50,15 +42,13 @@
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"pin": "[%key:common::config_flow::data::pin%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"type": "Device type",
|
||||
"vedo_pin": "VEDO alarm PIN (optional)"
|
||||
"type": "Device type"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Comelit device.",
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]",
|
||||
"port": "The port of your Comelit device.",
|
||||
"type": "The type of your Comelit device.",
|
||||
"vedo_pin": "Optional PIN for VEDO alarm system on Serial Bridge devices. Leave empty if you don't have VEDO alarm enabled."
|
||||
"type": "The type of your Comelit device."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if switch is on."""
|
||||
return bool(
|
||||
return (
|
||||
self.coordinator.data[self._device.type][self._device.index].status
|
||||
== STATE_ON
|
||||
)
|
||||
|
||||
@@ -66,7 +66,6 @@ async def async_setup_entry(
|
||||
name="light",
|
||||
update_method=async_update_data_non_dimmer,
|
||||
update_interval=timedelta(seconds=runtime_data.scan_interval),
|
||||
config_entry=entry,
|
||||
)
|
||||
dimmer_coordinator = DataUpdateCoordinator[dict[int, dict[str, Any]]](
|
||||
hass,
|
||||
@@ -74,7 +73,6 @@ async def async_setup_entry(
|
||||
name="light",
|
||||
update_method=async_update_data_dimmer,
|
||||
update_interval=timedelta(seconds=runtime_data.scan_interval),
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
|
||||
@@ -110,7 +110,6 @@ async def async_setup_entry(
|
||||
name="room",
|
||||
update_method=async_update_data,
|
||||
update_interval=timedelta(seconds=scan_interval),
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.1"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2025.12.2"]
|
||||
}
|
||||
|
||||
@@ -14,12 +14,7 @@ from .const import DOMAIN
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
from .helpers import cookidoo_from_config_entry
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.SENSOR,
|
||||
Platform.TODO,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.SENSOR, Platform.TODO]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
"""Calendar platform for the Cookidoo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime, timedelta
|
||||
import logging
|
||||
|
||||
from cookidoo_api import CookidooAuthException, CookidooException
|
||||
from cookidoo_api.types import CookidooCalendarDayRecipe
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
from .entity import CookidooBaseEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: CookidooConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the calendar platform for entity."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities([CookidooCalendarEntity(coordinator)])
|
||||
|
||||
|
||||
def recipe_to_event(day_date: date, recipe: CookidooCalendarDayRecipe) -> CalendarEvent:
|
||||
"""Convert a Cookidoo recipe to a CalendarEvent."""
|
||||
return CalendarEvent(
|
||||
start=day_date,
|
||||
end=day_date + timedelta(days=1), # All-day event
|
||||
summary=recipe.name,
|
||||
description=f"Total Time: {recipe.total_time}",
|
||||
)
|
||||
|
||||
|
||||
class CookidooCalendarEntity(CookidooBaseEntity, CalendarEntity):
|
||||
"""A calendar entity."""
|
||||
|
||||
_attr_translation_key = "meal_plan"
|
||||
|
||||
def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
assert coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = coordinator.config_entry.unique_id
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the next upcoming event."""
|
||||
if not self.coordinator.data.week_plan:
|
||||
return None
|
||||
|
||||
today = date.today()
|
||||
for day_data in self.coordinator.data.week_plan:
|
||||
day_date = date.fromisoformat(day_data.id)
|
||||
if day_date >= today and day_data.recipes:
|
||||
recipe = day_data.recipes[0]
|
||||
return recipe_to_event(day_date, recipe)
|
||||
return None
|
||||
|
||||
async def _fetch_week_plan(self, week_day: date) -> list:
|
||||
"""Fetch a single Cookidoo week plan, retrying once on auth failure."""
|
||||
try:
|
||||
return await self.coordinator.cookidoo.get_recipes_in_calendar_week(
|
||||
week_day
|
||||
)
|
||||
except CookidooAuthException:
|
||||
await self.coordinator.cookidoo.refresh_token()
|
||||
return await self.coordinator.cookidoo.get_recipes_in_calendar_week(
|
||||
week_day
|
||||
)
|
||||
except CookidooException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="calendar_fetch_failed",
|
||||
) from e
|
||||
|
||||
async def async_get_events(
|
||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||
) -> list[CalendarEvent]:
|
||||
"""Get all events in a specific time frame."""
|
||||
events: list[CalendarEvent] = []
|
||||
current_day = start_date.date()
|
||||
while current_day <= end_date.date():
|
||||
week_plan = await self._fetch_week_plan(current_day)
|
||||
for day_data in week_plan:
|
||||
day_date = date.fromisoformat(day_data.id)
|
||||
if start_date.date() <= day_date <= end_date.date():
|
||||
events.extend(
|
||||
recipe_to_event(day_date, recipe) for recipe in day_data.recipes
|
||||
)
|
||||
current_day += timedelta(days=7) # Move to the next week
|
||||
return events
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, timedelta
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from cookidoo_api import (
|
||||
@@ -16,7 +16,6 @@ from cookidoo_api import (
|
||||
CookidooSubscription,
|
||||
CookidooUserInfo,
|
||||
)
|
||||
from cookidoo_api.types import CookidooCalendarDay
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_EMAIL
|
||||
@@ -38,7 +37,6 @@ class CookidooData:
|
||||
ingredient_items: list[CookidooIngredientItem]
|
||||
additional_items: list[CookidooAdditionalItem]
|
||||
subscription: CookidooSubscription | None
|
||||
week_plan: list[CookidooCalendarDay]
|
||||
|
||||
|
||||
class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]):
|
||||
@@ -83,7 +81,6 @@ class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]):
|
||||
ingredient_items = await self.cookidoo.get_ingredient_items()
|
||||
additional_items = await self.cookidoo.get_additional_items()
|
||||
subscription = await self.cookidoo.get_active_subscription()
|
||||
week_plan = await self.cookidoo.get_recipes_in_calendar_week(date.today())
|
||||
except CookidooAuthException:
|
||||
try:
|
||||
await self.cookidoo.refresh_token()
|
||||
@@ -109,5 +106,4 @@ class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]):
|
||||
ingredient_items=ingredient_items,
|
||||
additional_items=additional_items,
|
||||
subscription=subscription,
|
||||
week_plan=week_plan,
|
||||
)
|
||||
|
||||
@@ -54,11 +54,6 @@
|
||||
"name": "Clear shopping list and additional purchases"
|
||||
}
|
||||
},
|
||||
"calendar": {
|
||||
"meal_plan": {
|
||||
"name": "Meal plan"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"expires": {
|
||||
"name": "Subscription expiration date"
|
||||
@@ -85,9 +80,6 @@
|
||||
"button_clear_todo_failed": {
|
||||
"message": "Failed to clear all items from the Cookidoo shopping list"
|
||||
},
|
||||
"calendar_fetch_failed": {
|
||||
"message": "Failed to fetch Cookidoo meal plan"
|
||||
},
|
||||
"setup_authentication_exception": {
|
||||
"message": "Authentication failed for {email}, check your email and password"
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_SEND_WAKEUP_PROMPT, CONF_SWING_SUPPORT, DOMAIN
|
||||
from .const import CONF_SWING_SUPPORT, DOMAIN
|
||||
from .coordinator import CoolmasterConfigEntry, CoolmasterDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR]
|
||||
@@ -17,12 +17,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
"""Set up Coolmaster from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[CONF_PORT]
|
||||
send_wakeup_prompt = entry.data.get(CONF_SEND_WAKEUP_PROMPT, False)
|
||||
if not entry.data.get(CONF_SWING_SUPPORT):
|
||||
coolmaster = CoolMasterNet(
|
||||
host,
|
||||
port,
|
||||
send_initial_line_feed=send_wakeup_prompt,
|
||||
)
|
||||
else:
|
||||
# Swing support adds an additional request per unit. The requests are
|
||||
@@ -31,7 +29,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
coolmaster = CoolMasterNet(
|
||||
host,
|
||||
port,
|
||||
send_initial_line_feed=send_wakeup_prompt,
|
||||
read_timeout=5,
|
||||
swing_support=True,
|
||||
)
|
||||
|
||||
@@ -12,13 +12,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import (
|
||||
CONF_SEND_WAKEUP_PROMPT,
|
||||
CONF_SUPPORTED_MODES,
|
||||
CONF_SWING_SUPPORT,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import CONF_SUPPORTED_MODES, CONF_SWING_SUPPORT, DEFAULT_PORT, DOMAIN
|
||||
|
||||
AVAILABLE_MODES = [
|
||||
HVACMode.OFF.value,
|
||||
@@ -31,15 +25,17 @@ AVAILABLE_MODES = [
|
||||
|
||||
MODES_SCHEMA = {vol.Required(mode, default=True): bool for mode in AVAILABLE_MODES}
|
||||
|
||||
DATA_SCHEMA = {
|
||||
vol.Required(CONF_HOST): str,
|
||||
**MODES_SCHEMA,
|
||||
vol.Required(CONF_SWING_SUPPORT, default=False): bool,
|
||||
}
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
**MODES_SCHEMA,
|
||||
vol.Required(CONF_SWING_SUPPORT, default=False): bool,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _validate_connection(host: str, send_wakeup_prompt: bool) -> bool:
|
||||
cool = CoolMasterNet(host, DEFAULT_PORT, send_initial_line_feed=send_wakeup_prompt)
|
||||
async def _validate_connection(host: str) -> bool:
|
||||
cool = CoolMasterNet(host, DEFAULT_PORT)
|
||||
units = await cool.status()
|
||||
return bool(units)
|
||||
|
||||
@@ -49,14 +45,6 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def _get_data_schema(self) -> vol.Schema:
|
||||
schema_dict = DATA_SCHEMA.copy()
|
||||
|
||||
if self.show_advanced_options:
|
||||
schema_dict[vol.Required(CONF_SEND_WAKEUP_PROMPT, default=False)] = bool
|
||||
|
||||
return vol.Schema(schema_dict)
|
||||
|
||||
@callback
|
||||
def _async_get_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
supported_modes = [
|
||||
@@ -69,7 +57,6 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_SUPPORTED_MODES: supported_modes,
|
||||
CONF_SWING_SUPPORT: data[CONF_SWING_SUPPORT],
|
||||
CONF_SEND_WAKEUP_PROMPT: data.get(CONF_SEND_WAKEUP_PROMPT, False),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -77,19 +64,15 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
data_schema = self._get_data_schema()
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=data_schema)
|
||||
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
|
||||
|
||||
errors = {}
|
||||
|
||||
host = user_input[CONF_HOST]
|
||||
|
||||
try:
|
||||
result = await _validate_connection(
|
||||
host, user_input.get(CONF_SEND_WAKEUP_PROMPT, False)
|
||||
)
|
||||
result = await _validate_connection(host)
|
||||
if not result:
|
||||
errors["base"] = "no_units"
|
||||
except OSError:
|
||||
@@ -97,7 +80,7 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=data_schema, errors=errors
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
return self._async_get_entry(user_input)
|
||||
|
||||
@@ -6,6 +6,5 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
CONF_SEND_WAKEUP_PROMPT = "send_wakeup_prompt"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pycoolmasternet_async"],
|
||||
"requirements": ["pycoolmasternet-async==0.2.4"]
|
||||
"requirements": ["pycoolmasternet-async==0.2.2"]
|
||||
}
|
||||
|
||||
@@ -14,12 +14,10 @@
|
||||
"heat_cool": "Support automatic heat/cool mode",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"off": "Can be turned off",
|
||||
"send_wakeup_prompt": "Send wakeup prompt",
|
||||
"swing_support": "Control swing mode"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your CoolMasterNet device.",
|
||||
"send_wakeup_prompt": "Send the coolmaster unit an empty commaand before issuing any actual command. This is required for serial models."
|
||||
"host": "The hostname or IP address of your CoolMasterNet device."
|
||||
},
|
||||
"description": "Set up your CoolMasterNet connection details."
|
||||
}
|
||||
|
||||
@@ -70,7 +70,6 @@ MEDIA_MODES = {
|
||||
"Favorites": "FAVORITES",
|
||||
"Internet Radio": "IRADIO",
|
||||
"USB/IPOD": "USB/IPOD",
|
||||
"USB": "USB",
|
||||
}
|
||||
|
||||
# Sub-modes of 'NET/USB'
|
||||
@@ -280,7 +279,7 @@ class DenonDevice(MediaPlayerEntity):
|
||||
def mute_volume(self, mute: bool) -> None:
|
||||
"""Mute (true) or unmute (false) media player."""
|
||||
mute_status = "ON" if mute else "OFF"
|
||||
self.telnet_command(f"MU{mute_status}")
|
||||
self.telnet_command(f"MU{mute_status})")
|
||||
|
||||
def media_play(self) -> None:
|
||||
"""Play media player."""
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.46.1", "getmac==0.9.5"],
|
||||
"requirements": ["async-upnp-client==0.46.0", "getmac==0.9.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.46.1"],
|
||||
"requirements": ["async-upnp-client==0.46.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||
|
||||
@@ -8,16 +8,25 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import ATTR_CONFIG_ENTRY
|
||||
from .coordinator import DuckDnsConfigEntry, DuckDnsUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
from .helpers import update_duckdns
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_TXT = "txt"
|
||||
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
SERVICE_SET_TXT = "set_txt"
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -31,11 +40,27 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SERVICE_TXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CONFIG_ENTRY): ConfigEntrySelector(
|
||||
{
|
||||
"integration": DOMAIN,
|
||||
}
|
||||
),
|
||||
vol.Optional(ATTR_TXT): vol.Any(None, cv.string),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Initialize the DuckDNS component."""
|
||||
|
||||
async_setup_services(hass)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_TXT,
|
||||
update_domain_service,
|
||||
schema=SERVICE_TXT_SCHEMA,
|
||||
)
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
@@ -62,6 +87,49 @@ async def async_setup_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> b
|
||||
return True
|
||||
|
||||
|
||||
def get_config_entry(
|
||||
hass: HomeAssistant, entry_id: str | None = None
|
||||
) -> DuckDnsConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
|
||||
if entry_id is None:
|
||||
if not (config_entries := hass.config_entries.async_entries(DOMAIN)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
if len(config_entries) != 1:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_selected",
|
||||
)
|
||||
return config_entries[0]
|
||||
|
||||
if not (entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
|
||||
entry = get_config_entry(call.hass, call.data.get(ATTR_CONFIG_ENTRY))
|
||||
|
||||
session = async_get_clientsession(call.hass)
|
||||
|
||||
await update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
|
||||
@@ -5,5 +5,3 @@ from typing import Final
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
ATTR_CONFIG_ENTRY: Final = "config_entry_id"
|
||||
ATTR_TXT: Final = "txt"
|
||||
SERVICE_SET_TXT = "set_txt"
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
"""Actions for Duck DNS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
|
||||
from .const import ATTR_CONFIG_ENTRY, ATTR_TXT, DOMAIN, SERVICE_SET_TXT
|
||||
from .coordinator import DuckDnsConfigEntry
|
||||
from .helpers import update_duckdns
|
||||
|
||||
SERVICE_TXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(ATTR_TXT): vol.Any(None, cv.string),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Habitica integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_TXT,
|
||||
update_domain_service,
|
||||
schema=SERVICE_TXT_SCHEMA,
|
||||
)
|
||||
|
||||
|
||||
def get_config_entry(
|
||||
hass: HomeAssistant, entry_id: str | None = None
|
||||
) -> DuckDnsConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
|
||||
if entry_id is None:
|
||||
if len(entries := hass.config_entries.async_entries(DOMAIN)) != 1:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_selected",
|
||||
)
|
||||
return entries[0]
|
||||
if not (entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
|
||||
entry = get_config_entry(call.hass, call.data.get(ATTR_CONFIG_ENTRY))
|
||||
|
||||
session = async_get_clientsession(call.hass)
|
||||
|
||||
await update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
)
|
||||
@@ -110,7 +110,7 @@ async def async_register_dynalite_frontend(hass: HomeAssistant):
|
||||
frontend_url_path=DOMAIN,
|
||||
config_panel_domain=DOMAIN,
|
||||
webcomponent_name="dynalite-panel",
|
||||
module_url=f"{URL_BASE}/entrypoint.{build_id}.js",
|
||||
module_url=f"{URL_BASE}/entrypoint-{build_id}.js",
|
||||
embed_iframe=True,
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
"cpu_overheating": "CPU overheating",
|
||||
"none": "None",
|
||||
"pellets": "Pellets",
|
||||
"unknown": "Unknown alarm"
|
||||
"unkownn": "Unknown alarm"
|
||||
}
|
||||
},
|
||||
"convector_air_flow": {
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from collections.abc import AsyncIterable
|
||||
from io import BytesIO
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from elevenlabs import AsyncElevenLabs
|
||||
from elevenlabs.core import ApiError
|
||||
@@ -181,17 +180,15 @@ class ElevenLabsSTTEntity(SpeechToTextEntity):
|
||||
)
|
||||
|
||||
try:
|
||||
kwargs: dict[str, Any] = {
|
||||
"file": BytesIO(audio),
|
||||
"file_format": file_format,
|
||||
"model_id": self._stt_model,
|
||||
"tag_audio_events": False,
|
||||
"num_speakers": 1,
|
||||
"diarize": False,
|
||||
}
|
||||
if lang_code is not None:
|
||||
kwargs["language_code"] = lang_code
|
||||
response = await self._client.speech_to_text.convert(**kwargs)
|
||||
response = await self._client.speech_to_text.convert(
|
||||
file=BytesIO(audio),
|
||||
file_format=file_format,
|
||||
model_id=self._stt_model,
|
||||
language_code=lang_code,
|
||||
tag_audio_events=False,
|
||||
num_speakers=1,
|
||||
diarize=False,
|
||||
)
|
||||
except ApiError as exc:
|
||||
_LOGGER.error("Error during processing of STT request: %s", exc)
|
||||
return stt.SpeechResult(None, SpeechResultState.ERROR)
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["enocean"],
|
||||
"requirements": ["enocean==0.50"],
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user