forked from home-assistant/core
Compare commits
7 Commits
rc
...
get_automa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0e00e15fa5 | ||
|
|
3e19b5f6d5 | ||
|
|
3f1d3bfc4a | ||
|
|
08a8bfaf8f | ||
|
|
31f8ea523f | ||
|
|
0924740cb4 | ||
|
|
9f039002ff |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
||||
42
.github/workflows/ci.yaml
vendored
42
.github/workflows/ci.yaml
vendored
@@ -653,7 +653,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.1
|
||||
uses: actions/dependency-review-action@v4.7.0
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -944,8 +944,7 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
@@ -1021,12 +1020,6 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1077,8 +1070,7 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
@@ -1162,12 +1154,6 @@ jobs:
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1216,8 +1202,7 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libxml2-utils
|
||||
libturbojpeg
|
||||
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
@@ -1305,12 +1290,6 @@ jobs:
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1341,7 +1320,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1378,8 +1357,7 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
@@ -1458,12 +1436,6 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1491,7 +1463,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.18
|
||||
uses: github/codeql-action/init@v3.28.17
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.18
|
||||
uses: github/codeql-action/analyze@v3.28.17
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -65,7 +65,6 @@ homeassistant.components.aladdin_connect.*
|
||||
homeassistant.components.alarm_control_panel.*
|
||||
homeassistant.components.alert.*
|
||||
homeassistant.components.alexa.*
|
||||
homeassistant.components.alexa_devices.*
|
||||
homeassistant.components.alpha_vantage.*
|
||||
homeassistant.components.amazon_polly.*
|
||||
homeassistant.components.amberelectric.*
|
||||
@@ -271,7 +270,6 @@ homeassistant.components.image_processing.*
|
||||
homeassistant.components.image_upload.*
|
||||
homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
@@ -387,7 +385,6 @@ homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.paperless_ngx.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.pegel_online.*
|
||||
|
||||
28
CODEOWNERS
generated
28
CODEOWNERS
generated
@@ -89,8 +89,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/alert/ @home-assistant/core @frenck
|
||||
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/homeassistant/components/alexa_devices/ @chemelli74
|
||||
/tests/components/alexa_devices/ @chemelli74
|
||||
/homeassistant/components/amazon_polly/ @jschlyter
|
||||
/homeassistant/components/amberelectric/ @madpilot
|
||||
/tests/components/amberelectric/ @madpilot
|
||||
@@ -204,8 +202,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/blebox/ @bbx-a @swistakm
|
||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||
/tests/components/blink/ @fronzbot @mkmer
|
||||
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
|
||||
/tests/components/blue_current/ @Floris272 @gleeuwen
|
||||
/homeassistant/components/bluemaestro/ @bdraco
|
||||
/tests/components/bluemaestro/ @bdraco
|
||||
/homeassistant/components/blueprint/ @home-assistant/core
|
||||
@@ -305,7 +303,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||
/homeassistant/components/cups/ @fabaff
|
||||
/tests/components/cups/ @fabaff
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
/tests/components/daikin/ @fredrike
|
||||
/homeassistant/components/date/ @home-assistant/core
|
||||
@@ -713,8 +710,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/immich/ @mib1185
|
||||
/tests/components/immich/ @mib1185
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
@@ -1143,8 +1138,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/paperless_ngx/ @fvgarrel
|
||||
/tests/components/paperless_ngx/ @fvgarrel
|
||||
/homeassistant/components/peblar/ @frenck
|
||||
/tests/components/peblar/ @frenck
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
@@ -1183,8 +1176,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/homeassistant/components/private_ble_device/ @Jc2k
|
||||
/tests/components/private_ble_device/ @Jc2k
|
||||
/homeassistant/components/probe_plus/ @pantherale0
|
||||
/tests/components/probe_plus/ @pantherale0
|
||||
/homeassistant/components/profiler/ @bdraco
|
||||
/tests/components/profiler/ @bdraco
|
||||
/homeassistant/components/progettihwsw/ @ardaseremet
|
||||
@@ -1231,7 +1222,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/qnap_qsw/ @Noltari
|
||||
/tests/components/qnap_qsw/ @Noltari
|
||||
/homeassistant/components/quantum_gateway/ @cisasteelersfan
|
||||
/tests/components/quantum_gateway/ @cisasteelersfan
|
||||
/homeassistant/components/qvr_pro/ @oblogic7
|
||||
/homeassistant/components/qwikswitch/ @kellerza
|
||||
/tests/components/qwikswitch/ @kellerza
|
||||
@@ -1420,8 +1410,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/homeassistant/components/smappee/ @bsmappee
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smarla/ @explicatis @rlint-explicatis
|
||||
/tests/components/smarla/ @explicatis @rlint-explicatis
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
@@ -1496,8 +1484,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/subaru/ @G-Two
|
||||
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/sun/ @home-assistant/core
|
||||
/tests/components/sun/ @home-assistant/core
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/supla/ @mwegrzynek
|
||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||
@@ -1510,8 +1498,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/switch_as_x/ @home-assistant/core
|
||||
/homeassistant/components/switchbee/ @jafar-atili
|
||||
/tests/components/switchbee/ @jafar-atili
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
@@ -1551,8 +1539,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @Petro31 @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @home-assistant/core
|
||||
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
"name": "Amazon",
|
||||
"integrations": [
|
||||
"alexa",
|
||||
"alexa_devices",
|
||||
"amazon_polly",
|
||||
"aws",
|
||||
"aws_s3",
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "shelly",
|
||||
"name": "shelly",
|
||||
"integrations": ["shelly"],
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
@@ -40,10 +40,9 @@ class AcmedaFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
entry.unique_id for entry in self._async_current_entries()
|
||||
}
|
||||
|
||||
hubs: list[aiopulse.Hub] = []
|
||||
with suppress(TimeoutError):
|
||||
async with timeout(5):
|
||||
hubs = [
|
||||
hubs: list[aiopulse.Hub] = [
|
||||
hub
|
||||
async for hub in aiopulse.Hub.discover()
|
||||
if hub.id not in already_configured
|
||||
|
||||
@@ -51,16 +51,9 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
try:
|
||||
self._current_version = (
|
||||
await self.client.get_current_measures()
|
||||
).firmware_version
|
||||
except AirGradientError as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
self._current_version = (
|
||||
await self.client.get_current_measures()
|
||||
).firmware_version
|
||||
|
||||
async def _async_update_data(self) -> AirGradientData:
|
||||
try:
|
||||
|
||||
@@ -6,7 +6,6 @@ from typing import Any, Concatenate
|
||||
from airgradient import AirGradientConnectionError, AirGradientError, get_model_name
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -30,7 +29,6 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]):
|
||||
model_id=measures.model,
|
||||
serial_number=coordinator.serial_number,
|
||||
sw_version=measures.firmware_version,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, coordinator.serial_number)},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
|
||||
return AT_TO_HA_STATE[self._airtouch.acs[self._ac_number].AcMode]
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
def hvac_modes(self):
|
||||
"""Return the list of available operation modes."""
|
||||
airtouch_modes = self._airtouch.GetSupportedCoolingModesForAc(self._ac_number)
|
||||
modes = [AT_TO_HA_STATE[mode] for mode in airtouch_modes]
|
||||
@@ -226,12 +226,12 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
||||
return super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
def min_temp(self):
|
||||
"""Return Minimum Temperature for AC of this group."""
|
||||
return self._airtouch.acs[self._unit.BelongsToAc].MinSetpoint
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
def max_temp(self):
|
||||
"""Return Max Temperature for AC of this group."""
|
||||
return self._airtouch.acs[self._unit.BelongsToAc].MaxSetpoint
|
||||
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
"""Alexa Devices integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.NOTIFY,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Set up Alexa Devices platform."""
|
||||
|
||||
coordinator = AmazonDevicesCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await entry.runtime_data.api.close()
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,74 +0,0 @@
|
||||
"""Support for binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Alexa Devices binary sensor entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: Final = (
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="online",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
is_on_fn=lambda _device: _device.online,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="bluetooth",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda _device: _device.bluetooth_state,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Alexa Devices binary sensors based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
|
||||
for sensor_desc in BINARY_SENSORS
|
||||
for serial_num in coordinator.data
|
||||
)
|
||||
|
||||
|
||||
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
||||
"""Binary sensor device."""
|
||||
|
||||
entity_description: AmazonBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the binary sensor is on."""
|
||||
return self.entity_description.is_on_fn(self.device)
|
||||
@@ -1,63 +0,0 @@
|
||||
"""Config flow for Alexa Devices integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.selector import CountrySelector
|
||||
|
||||
from .const import CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Alexa Devices."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
if user_input:
|
||||
client = AmazonEchoApi(
|
||||
user_input[CONF_COUNTRY],
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
)
|
||||
try:
|
||||
data = await client.login_mode_interactive(user_input[CONF_CODE])
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
await self.async_set_unique_id(data["customer_info"]["user_id"])
|
||||
self._abort_if_unique_id_configured()
|
||||
user_input.pop(CONF_CODE)
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_USERNAME],
|
||||
data=user_input | {CONF_LOGIN_DATA: data},
|
||||
)
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_COUNTRY, default=self.hass.config.country
|
||||
): CountrySelector(),
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_CODE): cv.string,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -1,8 +0,0 @@
|
||||
"""Alexa Devices constants."""
|
||||
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "alexa_devices"
|
||||
CONF_LOGIN_DATA = "login_data"
|
||||
@@ -1,58 +0,0 @@
|
||||
"""Support for Alexa Devices."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA
|
||||
|
||||
SCAN_INTERVAL = 30
|
||||
|
||||
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
|
||||
|
||||
|
||||
class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
"""Base coordinator for Alexa Devices."""
|
||||
|
||||
config_entry: AmazonConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=entry.title,
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
entry.data[CONF_COUNTRY],
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
return await self.api.get_devices_data()
|
||||
except (CannotConnect, CannotRetrieveData) as err:
|
||||
raise UpdateFailed(f"Error occurred while updating {self.name}") from err
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryError("Could not authenticate") from err
|
||||
@@ -1,66 +0,0 @@
|
||||
"""Diagnostics support for Alexa Devices integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_PASSWORD, CONF_USERNAME, CONF_NAME, "title"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AmazonConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
devices: list[dict[str, dict[str, Any]]] = [
|
||||
build_device_data(device) for device in coordinator.data.values()
|
||||
]
|
||||
|
||||
return {
|
||||
"entry": async_redact_data(entry.as_dict(), TO_REDACT),
|
||||
"device_info": {
|
||||
"last_update success": coordinator.last_update_success,
|
||||
"last_exception": repr(coordinator.last_exception),
|
||||
"devices": devices,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, entry: AmazonConfigEntry, device_entry: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
assert device_entry.serial_number
|
||||
|
||||
return build_device_data(coordinator.data[device_entry.serial_number])
|
||||
|
||||
|
||||
def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"""Build device data for diagnostics."""
|
||||
return {
|
||||
"account name": device.account_name,
|
||||
"capabilities": device.capabilities,
|
||||
"device family": device.device_family,
|
||||
"device type": device.device_type,
|
||||
"device cluster members": device.device_cluster_members,
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"do not disturb": device.do_not_disturb,
|
||||
"response style": device.response_style,
|
||||
"bluetooth state": device.bluetooth_state,
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AmazonDevicesCoordinator
|
||||
|
||||
|
||||
class AmazonEntity(CoordinatorEntity[AmazonDevicesCoordinator]):
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
serial_num: str,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._serial_num = serial_num
|
||||
model_details = coordinator.api.get_model_details(self.device) or {}
|
||||
model = model_details.get("model")
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial_num)},
|
||||
name=self.device.account_name,
|
||||
model=model,
|
||||
model_id=self.device.device_type,
|
||||
manufacturer=model_details.get("manufacturer", "Amazon"),
|
||||
hw_version=model_details.get("hw_version"),
|
||||
sw_version=(
|
||||
self.device.software_version if model != SPEAKER_GROUP_MODEL else None
|
||||
),
|
||||
serial_number=serial_num if model != SPEAKER_GROUP_MODEL else None,
|
||||
)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{serial_num}-{description.key}"
|
||||
|
||||
@property
|
||||
def device(self) -> AmazonDevice:
|
||||
"""Return the device."""
|
||||
return self.coordinator.data[self._serial_num]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self._serial_num in self.coordinator.data
|
||||
and self.device.online
|
||||
)
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"default": "mdi:bluetooth",
|
||||
"state": {
|
||||
"off": "mdi:bluetooth-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"domain": "alexa_devices",
|
||||
"name": "Alexa Devices",
|
||||
"codeowners": ["@chemelli74"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/alexa_devices",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.1.2"]
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
"""Support for notification entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonNotifyEntityDescription(NotifyEntityDescription):
|
||||
"""Alexa Devices notify entity description."""
|
||||
|
||||
is_supported: Callable[[AmazonDevice], bool] = lambda _device: True
|
||||
method: Callable[[AmazonEchoApi, AmazonDevice, str], Awaitable[None]]
|
||||
subkey: str
|
||||
|
||||
|
||||
NOTIFY: Final = (
|
||||
AmazonNotifyEntityDescription(
|
||||
key="speak",
|
||||
translation_key="speak",
|
||||
subkey="AUDIO_PLAYER",
|
||||
is_supported=lambda _device: _device.device_family != SPEAKER_GROUP_FAMILY,
|
||||
method=lambda api, device, message: api.call_alexa_speak(device, message),
|
||||
),
|
||||
AmazonNotifyEntityDescription(
|
||||
key="announce",
|
||||
translation_key="announce",
|
||||
subkey="AUDIO_PLAYER",
|
||||
method=lambda api, device, message: api.call_alexa_announcement(
|
||||
device, message
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Alexa Devices notification entity based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
|
||||
for sensor_desc in NOTIFY
|
||||
for serial_num in coordinator.data
|
||||
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
and sensor_desc.is_supported(coordinator.data[serial_num])
|
||||
)
|
||||
|
||||
|
||||
class AmazonNotifyEntity(AmazonEntity, NotifyEntity):
|
||||
"""Binary sensor notify platform."""
|
||||
|
||||
entity_description: AmazonNotifyEntityDescription
|
||||
|
||||
async def async_send_message(
|
||||
self, message: str, title: str | None = None, **kwargs: Any
|
||||
) -> None:
|
||||
"""Send a message."""
|
||||
|
||||
await self.entity_description.method(self.coordinator.api, self.device, message)
|
||||
@@ -1,76 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: no actions
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: no actions
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: entities do not explicitly subscribe to events
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: all tests missing
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Network information not relevant
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: There are a ton of mac address ranges in use, but also by kindles which are not supported by this integration
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: automate the cleanup process
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: done
|
||||
@@ -1,60 +0,0 @@
|
||||
{
|
||||
"common": {
|
||||
"data_country": "Country code",
|
||||
"data_code": "One-time password (OTP code)",
|
||||
"data_description_country": "The country of your Amazon account.",
|
||||
"data_description_username": "The email address of your Amazon account.",
|
||||
"data_description_password": "The password of your Amazon account.",
|
||||
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported."
|
||||
},
|
||||
"config": {
|
||||
"flow_title": "{username}",
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"country": "[%key:component::alexa_devices::common::data_country%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"country": "[%key:component::alexa_devices::common::data_description_country%]",
|
||||
"username": "[%key:component::alexa_devices::common::data_description_username%]",
|
||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth"
|
||||
}
|
||||
},
|
||||
"notify": {
|
||||
"speak": {
|
||||
"name": "Speak"
|
||||
},
|
||||
"announce": {
|
||||
"name": "Announce"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"do_not_disturb": {
|
||||
"name": "Do not disturb"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
"""Support for switches."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Alexa Devices switch entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
subkey: str
|
||||
method: str
|
||||
|
||||
|
||||
SWITCHES: Final = (
|
||||
AmazonSwitchEntityDescription(
|
||||
key="do_not_disturb",
|
||||
subkey="AUDIO_PLAYER",
|
||||
translation_key="do_not_disturb",
|
||||
is_on_fn=lambda _device: _device.do_not_disturb,
|
||||
method="set_do_not_disturb",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Alexa Devices switches based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
|
||||
for switch_desc in SWITCHES
|
||||
for serial_num in coordinator.data
|
||||
if switch_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
)
|
||||
|
||||
|
||||
class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
||||
"""Switch device."""
|
||||
|
||||
entity_description: AmazonSwitchEntityDescription
|
||||
|
||||
async def _switch_set_state(self, state: bool) -> None:
|
||||
"""Set desired switch state."""
|
||||
method = getattr(self.coordinator.api, self.entity_description.method)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert method is not None
|
||||
|
||||
await method(self.device, state)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self._switch_set_state(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self._switch_set_state(False)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if switch is on."""
|
||||
return self.entity_description.is_on_fn(self.device)
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"requirements": ["androidtvremote2==0.2.2"],
|
||||
"requirements": ["androidtvremote2==0.2.1"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -51,10 +51,6 @@
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
},
|
||||
"data_description": {
|
||||
"app_id": "E.g. com.plexapp.android for https://play.google.com/store/apps/details?id=com.plexapp.android",
|
||||
"app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,11 +17,4 @@ CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
THINKING_MODELS = [
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-7-sonnet-latest",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-opus-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
]
|
||||
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]
|
||||
|
||||
@@ -294,8 +294,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
@@ -328,7 +326,6 @@ class AnthropicConversationEntity(
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.52.0"]
|
||||
"requirements": ["anthropic==0.47.2"]
|
||||
}
|
||||
|
||||
@@ -46,7 +46,11 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(step_id="user", data_schema=_SCHEMA)
|
||||
|
||||
host, port = user_input[CONF_HOST], user_input[CONF_PORT]
|
||||
|
||||
# Abort if an entry with same host and port is present.
|
||||
self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port})
|
||||
|
||||
# Test the connection to the host and get the current status for serial number.
|
||||
try:
|
||||
async with asyncio.timeout(CONNECTION_TIMEOUT):
|
||||
data = APCUPSdData(await aioapcaccess.request_status(host, port))
|
||||
@@ -63,30 +67,3 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
title = data.name or data.model or data.serial_no or "APC UPS"
|
||||
return self.async_create_entry(title=title, data=user_input)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of an existing entry."""
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reconfigure", data_schema=_SCHEMA)
|
||||
|
||||
host, port = user_input[CONF_HOST], user_input[CONF_PORT]
|
||||
self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port})
|
||||
try:
|
||||
async with asyncio.timeout(CONNECTION_TIMEOUT):
|
||||
data = APCUPSdData(await aioapcaccess.request_status(host, port))
|
||||
except (OSError, asyncio.IncompleteReadError, TimeoutError):
|
||||
errors = {"base": "cannot_connect"}
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(data.serial_no)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_apcupsd_daemon")
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"wrong_apcupsd_daemon": "The reconfigured APC UPS Daemon is not the same as the one already configured.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
|
||||
@@ -62,8 +62,6 @@ async def async_setup_entry(
|
||||
target_humidity_key=Attribute.HUMIDIFICATION_SETPOINT,
|
||||
min_humidity=10,
|
||||
max_humidity=50,
|
||||
auto_status_key=Attribute.HUMIDIFICATION_AVAILABLE,
|
||||
auto_status_value=1,
|
||||
default_humidity=30,
|
||||
set_humidity_fn=coordinator.client.set_humidification_setpoint,
|
||||
)
|
||||
@@ -79,8 +77,6 @@ async def async_setup_entry(
|
||||
action_map=DEHUMIDIFIER_ACTION_MAP,
|
||||
current_humidity_key=Attribute.INDOOR_HUMIDITY_CONTROLLING_SENSOR_VALUE,
|
||||
target_humidity_key=Attribute.DEHUMIDIFICATION_SETPOINT,
|
||||
auto_status_key=None,
|
||||
auto_status_value=None,
|
||||
min_humidity=40,
|
||||
max_humidity=90,
|
||||
default_humidity=60,
|
||||
@@ -104,8 +100,6 @@ class AprilaireHumidifierDescription(HumidifierEntityDescription):
|
||||
target_humidity_key: str
|
||||
min_humidity: int
|
||||
max_humidity: int
|
||||
auto_status_key: str | None
|
||||
auto_status_value: int | None
|
||||
default_humidity: int
|
||||
set_humidity_fn: Callable[[int], Awaitable]
|
||||
|
||||
@@ -169,31 +163,14 @@ class AprilaireHumidifierEntity(BaseAprilaireEntity, HumidifierEntity):
|
||||
def min_humidity(self) -> float:
|
||||
"""Return the minimum humidity."""
|
||||
|
||||
if self.is_auto_humidity_mode():
|
||||
return 1
|
||||
|
||||
return self.entity_description.min_humidity
|
||||
|
||||
@property
|
||||
def max_humidity(self) -> float:
|
||||
"""Return the maximum humidity."""
|
||||
|
||||
if self.is_auto_humidity_mode():
|
||||
return 7
|
||||
|
||||
return self.entity_description.max_humidity
|
||||
|
||||
def is_auto_humidity_mode(self) -> bool:
|
||||
"""Return whether the humidifier is in auto mode."""
|
||||
|
||||
if self.entity_description.auto_status_key is None:
|
||||
return False
|
||||
|
||||
return (
|
||||
self.coordinator.data.get(self.entity_description.auto_status_key)
|
||||
== self.entity_description.auto_status_value
|
||||
)
|
||||
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set the humidity."""
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.9.1"]
|
||||
"requirements": ["pyaprilaire==0.8.1"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["APsystemsEZ1"],
|
||||
"requirements": ["apsystems-ez1==2.7.0"]
|
||||
"requirements": ["apsystems-ez1==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"last_update": {
|
||||
"default": "mdi:update"
|
||||
},
|
||||
"salt_left_side_percentage": {
|
||||
"default": "mdi:basket-fill"
|
||||
},
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from aioaquacell import Softener
|
||||
|
||||
@@ -29,7 +28,7 @@ PARALLEL_UPDATES = 1
|
||||
class SoftenerSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Softener sensor entity."""
|
||||
|
||||
value_fn: Callable[[Softener], StateType | datetime]
|
||||
value_fn: Callable[[Softener], StateType]
|
||||
|
||||
|
||||
SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
|
||||
@@ -78,12 +77,6 @@ SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
|
||||
"low",
|
||||
],
|
||||
),
|
||||
SoftenerSensorEntityDescription(
|
||||
key="last_update",
|
||||
translation_key="last_update",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda softener: softener.lastUpdate,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -118,6 +111,6 @@ class SoftenerSensor(AquacellEntity, SensorEntity):
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.softener)
|
||||
|
||||
@@ -21,9 +21,6 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"last_update": {
|
||||
"name": "Last update"
|
||||
},
|
||||
"salt_left_side_percentage": {
|
||||
"name": "Salt left side percentage"
|
||||
},
|
||||
|
||||
@@ -89,7 +89,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
def get_aruba_data(self) -> dict[str, dict[str, str]] | None:
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host}"
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
|
||||
@@ -20,6 +20,9 @@ import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -89,8 +92,6 @@ KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
|
||||
"pipeline_conversation_data"
|
||||
)
|
||||
# Number of response parts to handle before streaming the response
|
||||
STREAM_RESPONSE_CHARS = 60
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
@@ -554,7 +555,7 @@ class PipelineRun:
|
||||
event_callback: PipelineEventCallback
|
||||
language: str = None # type: ignore[assignment]
|
||||
runner_data: Any | None = None
|
||||
intent_agent: conversation.AgentInfo | None = None
|
||||
intent_agent: str | None = None
|
||||
tts_audio_output: str | dict[str, Any] | None = None
|
||||
wake_word_settings: WakeWordSettings | None = None
|
||||
audio_settings: AudioSettings = field(default_factory=AudioSettings)
|
||||
@@ -590,9 +591,6 @@ class PipelineRun:
|
||||
_intent_agent_only = False
|
||||
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
|
||||
|
||||
_streamed_response_text = False
|
||||
"""If the conversation agent streamed response text to TTS result."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Set language for pipeline."""
|
||||
self.language = self.pipeline.language or self.hass.config.language
|
||||
@@ -654,11 +652,6 @@ class PipelineRun:
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
"stream_response": (
|
||||
self.tts_stream.supports_streaming_input
|
||||
and self.intent_agent
|
||||
and self.intent_agent.supports_streaming
|
||||
),
|
||||
}
|
||||
|
||||
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
||||
@@ -906,12 +899,12 @@ class PipelineRun:
|
||||
) -> str:
|
||||
"""Run speech-to-text portion of pipeline. Returns the spoken text."""
|
||||
# Create a background task to prepare the conversation agent
|
||||
if self.end_stage >= PipelineStage.INTENT and self.intent_agent:
|
||||
if self.end_stage >= PipelineStage.INTENT:
|
||||
self.hass.async_create_background_task(
|
||||
conversation.async_prepare_agent(
|
||||
self.hass, self.intent_agent.id, self.language
|
||||
self.hass, self.intent_agent, self.language
|
||||
),
|
||||
f"prepare conversation agent {self.intent_agent.id}",
|
||||
f"prepare conversation agent {self.intent_agent}",
|
||||
)
|
||||
|
||||
if isinstance(self.stt_provider, stt.Provider):
|
||||
@@ -1052,7 +1045,7 @@ class PipelineRun:
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
)
|
||||
|
||||
self.intent_agent = agent_info
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
async def recognize_intent(
|
||||
self,
|
||||
@@ -1085,7 +1078,7 @@ class PipelineRun:
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_START,
|
||||
{
|
||||
"engine": self.intent_agent.id,
|
||||
"engine": self.intent_agent,
|
||||
"language": input_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
@@ -1102,11 +1095,11 @@ class PipelineRun:
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
agent_id=self.intent_agent,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
agent_id = self.intent_agent
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
@@ -1128,7 +1121,7 @@ class PipelineRun:
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
@@ -1150,13 +1143,6 @@ class PipelineRun:
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if self.tts_stream and self.tts_stream.supports_streaming_input:
|
||||
tts_input_stream: asyncio.Queue[str | None] | None = asyncio.Queue()
|
||||
else:
|
||||
tts_input_stream = None
|
||||
chat_log_role = None
|
||||
delta_character_count = 0
|
||||
|
||||
@callback
|
||||
def chat_log_delta_listener(
|
||||
chat_log: conversation.ChatLog, delta: dict
|
||||
@@ -1170,61 +1156,6 @@ class PipelineRun:
|
||||
},
|
||||
)
|
||||
)
|
||||
if tts_input_stream is None:
|
||||
return
|
||||
|
||||
nonlocal chat_log_role
|
||||
|
||||
if role := delta.get("role"):
|
||||
chat_log_role = role
|
||||
|
||||
# We are only interested in assistant deltas
|
||||
if chat_log_role != "assistant":
|
||||
return
|
||||
|
||||
if content := delta.get("content"):
|
||||
tts_input_stream.put_nowait(content)
|
||||
|
||||
if self._streamed_response_text:
|
||||
return
|
||||
|
||||
nonlocal delta_character_count
|
||||
|
||||
# Streamed responses are not cached. That's why we only start streaming text after
|
||||
# we have received enough characters that indicates it will be a long response
|
||||
# or if we have received text, and then a tool call.
|
||||
|
||||
# Tool call after we already received text
|
||||
start_streaming = delta_character_count > 0 and delta.get("tool_calls")
|
||||
|
||||
# Count characters in the content and test if we exceed streaming threshold
|
||||
if not start_streaming and content:
|
||||
delta_character_count += len(content)
|
||||
start_streaming = delta_character_count > STREAM_RESPONSE_CHARS
|
||||
|
||||
if not start_streaming:
|
||||
return
|
||||
|
||||
self._streamed_response_text = True
|
||||
|
||||
async def tts_input_stream_generator() -> AsyncGenerator[str]:
|
||||
"""Yield TTS input stream."""
|
||||
while (tts_input := await tts_input_stream.get()) is not None:
|
||||
yield tts_input
|
||||
|
||||
# Concatenate all existing queue items
|
||||
parts = []
|
||||
while not tts_input_stream.empty():
|
||||
parts.append(tts_input_stream.get_nowait())
|
||||
tts_input_stream.put_nowait(
|
||||
"".join(
|
||||
# At this point parts is only strings, None indicates end of queue
|
||||
cast(list[str], parts)
|
||||
)
|
||||
)
|
||||
|
||||
assert self.tts_stream is not None
|
||||
self.tts_stream.async_set_message_stream(tts_input_stream_generator())
|
||||
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
@@ -1268,8 +1199,6 @@ class PipelineRun:
|
||||
speech = conversation_result.response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
if tts_input_stream and self._streamed_response_text:
|
||||
tts_input_stream.put_nowait(None)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
@@ -1347,11 +1276,26 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
if not self._streamed_response_text:
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
try:
|
||||
# Synthesize audio and get URL
|
||||
tts_media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
tts_input,
|
||||
engine=self.tts_stream.engine,
|
||||
language=self.tts_stream.language,
|
||||
options=self.tts_stream.options,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||
raise TextToSpeechError(
|
||||
code="tts-failed",
|
||||
message="Unexpected error during text-to-speech",
|
||||
) from src_error
|
||||
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
"media_id": self.tts_stream.media_source_id,
|
||||
"media_id": tts_media_id,
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
|
||||
@@ -47,7 +47,7 @@ from .const import (
|
||||
CONF_VIDEO_SOURCE,
|
||||
DEFAULT_STREAM_PROFILE,
|
||||
DEFAULT_VIDEO_SOURCE,
|
||||
DOMAIN,
|
||||
DOMAIN as AXIS_DOMAIN,
|
||||
)
|
||||
from .errors import AuthenticationRequired, CannotConnect
|
||||
from .hub import AxisHub, get_axis_api
|
||||
@@ -58,7 +58,7 @@ DEFAULT_PROTOCOL = "https"
|
||||
PROTOCOL_CHOICES = ["https", "http"]
|
||||
|
||||
|
||||
class AxisFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
"""Handle a Axis config flow."""
|
||||
|
||||
VERSION = 3
|
||||
@@ -146,7 +146,7 @@ class AxisFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
model = self.config[CONF_MODEL]
|
||||
same_model = [
|
||||
entry.data[CONF_NAME]
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN)
|
||||
for entry in self.hass.config_entries.async_entries(AXIS_DOMAIN)
|
||||
if entry.source != SOURCE_IGNORE and entry.data[CONF_MODEL] == model
|
||||
]
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
from aiohttp import ClientTimeout
|
||||
from azure.core.exceptions import (
|
||||
AzureError,
|
||||
ClientAuthenticationError,
|
||||
HttpResponseError,
|
||||
ResourceNotFoundError,
|
||||
)
|
||||
from azure.core.pipeline.transport._aiohttp import (
|
||||
@@ -39,20 +39,11 @@ async def async_setup_entry(
|
||||
session = async_create_clientsession(
|
||||
hass, timeout=ClientTimeout(connect=10, total=12 * 60 * 60)
|
||||
)
|
||||
|
||||
def create_container_client() -> ContainerClient:
|
||||
"""Create a ContainerClient."""
|
||||
|
||||
return ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
# has a blocking call to open in cpython
|
||||
container_client: ContainerClient = await hass.async_add_executor_job(
|
||||
create_container_client
|
||||
container_client = ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -70,7 +61,7 @@ async def async_setup_entry(
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except AzureError as err:
|
||||
except HttpResponseError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
|
||||
@@ -8,7 +8,7 @@ import json
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
|
||||
from azure.core.exceptions import HttpResponseError
|
||||
from azure.storage.blob import BlobProperties
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
@@ -80,20 +80,6 @@ def handle_backup_errors[_R, **P](
|
||||
f"Error during backup operation in {func.__name__}:"
|
||||
f" Status {err.status_code}, message: {err.message}"
|
||||
) from err
|
||||
except ServiceRequestError as err:
|
||||
raise BackupAgentError(
|
||||
f"Timeout during backup operation in {func.__name__}"
|
||||
) from err
|
||||
except AzureError as err:
|
||||
_LOGGER.debug(
|
||||
"Error during backup in %s: %s",
|
||||
func.__name__,
|
||||
err,
|
||||
exc_info=True,
|
||||
)
|
||||
raise BackupAgentError(
|
||||
f"Error during backup operation in {func.__name__}: {err}"
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -27,25 +27,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for azure storage."""
|
||||
|
||||
async def get_container_client(
|
||||
self, account_name: str, container_name: str, storage_account_key: str
|
||||
) -> ContainerClient:
|
||||
"""Get the container client.
|
||||
|
||||
ContainerClient has a blocking call to open in cpython
|
||||
"""
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
|
||||
def create_container_client() -> ContainerClient:
|
||||
return ContainerClient(
|
||||
account_url=f"https://{account_name}.blob.core.windows.net/",
|
||||
container_name=container_name,
|
||||
credential=storage_account_key,
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
return await self.hass.async_add_executor_job(create_container_client)
|
||||
def get_account_url(self, account_name: str) -> str:
|
||||
"""Get the account URL."""
|
||||
return f"https://{account_name}.blob.core.windows.net/"
|
||||
|
||||
async def validate_config(
|
||||
self, container_client: ContainerClient
|
||||
@@ -74,10 +58,11 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._async_abort_entries_match(
|
||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||
)
|
||||
container_client = await self.get_container_client(
|
||||
account_name=user_input[CONF_ACCOUNT_NAME],
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
|
||||
@@ -114,12 +99,12 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = await self.get_container_client(
|
||||
account_name=reauth_entry.data[CONF_ACCOUNT_NAME],
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
|
||||
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
@@ -144,10 +129,13 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = await self.get_container_client(
|
||||
account_name=reconfigure_entry.data[CONF_ACCOUNT_NAME],
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(
|
||||
reconfigure_entry.data[CONF_ACCOUNT_NAME]
|
||||
),
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
|
||||
@@ -23,7 +23,6 @@ from .const import DATA_MANAGER, DOMAIN
|
||||
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
|
||||
from .http import async_register_http_views
|
||||
from .manager import (
|
||||
AddonErrorData,
|
||||
BackupManager,
|
||||
BackupManagerError,
|
||||
BackupPlatformEvent,
|
||||
@@ -49,7 +48,6 @@ from .util import suggested_filename, suggested_filename_from_name_date
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
__all__ = [
|
||||
"AddonErrorData",
|
||||
"AddonInfo",
|
||||
"AgentBackup",
|
||||
"BackupAgent",
|
||||
@@ -81,7 +79,7 @@ __all__ = [
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
|
||||
PLATFORMS = [Platform.EVENT, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
@@ -33,7 +33,6 @@ class BackupCoordinatorData:
|
||||
last_attempted_automatic_backup: datetime | None
|
||||
last_successful_automatic_backup: datetime | None
|
||||
next_scheduled_automatic_backup: datetime | None
|
||||
last_event: ManagerStateEvent | BackupPlatformEvent | None
|
||||
|
||||
|
||||
class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
@@ -61,13 +60,11 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
]
|
||||
|
||||
self.backup_manager = backup_manager
|
||||
self._last_event: ManagerStateEvent | BackupPlatformEvent | None = None
|
||||
|
||||
@callback
|
||||
def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None:
|
||||
"""Handle new event."""
|
||||
LOGGER.debug("Received backup event: %s", event)
|
||||
self._last_event = event
|
||||
self.config_entry.async_create_task(self.hass, self.async_refresh())
|
||||
|
||||
async def _async_update_data(self) -> BackupCoordinatorData:
|
||||
@@ -77,7 +74,6 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
self.backup_manager.config.data.last_attempted_automatic_backup,
|
||||
self.backup_manager.config.data.last_completed_automatic_backup,
|
||||
self.backup_manager.config.data.schedule.next_automatic_backup,
|
||||
self._last_event,
|
||||
)
|
||||
|
||||
@callback
|
||||
|
||||
@@ -11,7 +11,7 @@ from .const import DOMAIN
|
||||
from .coordinator import BackupDataUpdateCoordinator
|
||||
|
||||
|
||||
class BackupManagerBaseEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
"""Base entity for backup manager."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -19,9 +19,12 @@ class BackupManagerBaseEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BackupDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = entity_description.key
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, "backup_manager")},
|
||||
manufacturer="Home Assistant",
|
||||
@@ -31,17 +34,3 @@ class BackupManagerBaseEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
configuration_url="homeassistant://config/backup",
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerEntity(BackupManagerBaseEntity):
|
||||
"""Entity for backup manager."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BackupDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = entity_description.key
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
"""Event platform for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.components.event import EventEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
|
||||
from .entity import BackupManagerBaseEntity
|
||||
from .manager import CreateBackupEvent, CreateBackupState
|
||||
|
||||
ATTR_BACKUP_STAGE: Final[str] = "backup_stage"
|
||||
ATTR_FAILED_REASON: Final[str] = "failed_reason"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BackupConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Event set up for backup config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
async_add_entities([AutomaticBackupEvent(coordinator)])
|
||||
|
||||
|
||||
class AutomaticBackupEvent(BackupManagerBaseEntity, EventEntity):
|
||||
"""Representation of an automatic backup event."""
|
||||
|
||||
_attr_event_types = [s.value for s in CreateBackupState]
|
||||
_unrecorded_attributes = frozenset({ATTR_FAILED_REASON, ATTR_BACKUP_STAGE})
|
||||
coordinator: BackupDataUpdateCoordinator
|
||||
|
||||
def __init__(self, coordinator: BackupDataUpdateCoordinator) -> None:
|
||||
"""Initialize the automatic backup event."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = "automatic_backup_event"
|
||||
self._attr_translation_key = "automatic_backup_event"
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if (
|
||||
not (data := self.coordinator.data)
|
||||
or (event := data.last_event) is None
|
||||
or not isinstance(event, CreateBackupEvent)
|
||||
):
|
||||
return
|
||||
|
||||
self._trigger_event(
|
||||
event.state,
|
||||
{
|
||||
ATTR_BACKUP_STAGE: event.stage,
|
||||
ATTR_FAILED_REASON: event.reason,
|
||||
},
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
@@ -1,11 +1,4 @@
|
||||
{
|
||||
"entity": {
|
||||
"event": {
|
||||
"automatic_backup_event": {
|
||||
"default": "mdi:database"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create": {
|
||||
"service": "mdi:cloud-upload"
|
||||
|
||||
@@ -62,7 +62,6 @@ from .const import (
|
||||
LOGGER,
|
||||
)
|
||||
from .models import (
|
||||
AddonInfo,
|
||||
AgentBackup,
|
||||
BackupError,
|
||||
BackupManagerError,
|
||||
@@ -103,27 +102,15 @@ class ManagerBackup(BaseBackup):
|
||||
"""Backup class."""
|
||||
|
||||
agents: dict[str, AgentBackupStatus]
|
||||
failed_addons: list[AddonInfo]
|
||||
failed_agent_ids: list[str]
|
||||
failed_folders: list[Folder]
|
||||
with_automatic_settings: bool | None
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class AddonErrorData:
|
||||
"""Addon error class."""
|
||||
|
||||
addon: AddonInfo
|
||||
errors: list[tuple[str, str]]
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class WrittenBackup:
|
||||
"""Written backup class."""
|
||||
|
||||
addon_errors: dict[str, AddonErrorData]
|
||||
backup: AgentBackup
|
||||
folder_errors: dict[Folder, list[tuple[str, str]]]
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]]
|
||||
release_stream: Callable[[], Coroutine[Any, Any, None]]
|
||||
|
||||
@@ -649,13 +636,9 @@ class BackupManager:
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
failed_addons = known_backup.failed_addons
|
||||
failed_agent_ids = known_backup.failed_agent_ids
|
||||
failed_folders = known_backup.failed_folders
|
||||
else:
|
||||
failed_addons = []
|
||||
failed_agent_ids = []
|
||||
failed_folders = []
|
||||
with_automatic_settings = self.is_our_automatic_backup(
|
||||
agent_backup, await instance_id.async_get(self.hass)
|
||||
)
|
||||
@@ -666,9 +649,7 @@ class BackupManager:
|
||||
date=agent_backup.date,
|
||||
database_included=agent_backup.database_included,
|
||||
extra_metadata=agent_backup.extra_metadata,
|
||||
failed_addons=failed_addons,
|
||||
failed_agent_ids=failed_agent_ids,
|
||||
failed_folders=failed_folders,
|
||||
folders=agent_backup.folders,
|
||||
homeassistant_included=agent_backup.homeassistant_included,
|
||||
homeassistant_version=agent_backup.homeassistant_version,
|
||||
@@ -723,13 +704,9 @@ class BackupManager:
|
||||
continue
|
||||
if backup is None:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
failed_addons = known_backup.failed_addons
|
||||
failed_agent_ids = known_backup.failed_agent_ids
|
||||
failed_folders = known_backup.failed_folders
|
||||
else:
|
||||
failed_addons = []
|
||||
failed_agent_ids = []
|
||||
failed_folders = []
|
||||
with_automatic_settings = self.is_our_automatic_backup(
|
||||
result, await instance_id.async_get(self.hass)
|
||||
)
|
||||
@@ -740,9 +717,7 @@ class BackupManager:
|
||||
date=result.date,
|
||||
database_included=result.database_included,
|
||||
extra_metadata=result.extra_metadata,
|
||||
failed_addons=failed_addons,
|
||||
failed_agent_ids=failed_agent_ids,
|
||||
failed_folders=failed_folders,
|
||||
folders=result.folders,
|
||||
homeassistant_included=result.homeassistant_included,
|
||||
homeassistant_version=result.homeassistant_version,
|
||||
@@ -985,7 +960,7 @@ class BackupManager:
|
||||
password=None,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors, {}, {}, [])
|
||||
self.known_backups.add(written_backup.backup, agent_errors, [])
|
||||
return written_backup.backup.backup_id
|
||||
|
||||
async def async_create_backup(
|
||||
@@ -1223,11 +1198,7 @@ class BackupManager:
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(
|
||||
written_backup.backup,
|
||||
agent_errors,
|
||||
written_backup.addon_errors,
|
||||
written_backup.folder_errors,
|
||||
unavailable_agents,
|
||||
written_backup.backup, agent_errors, unavailable_agents
|
||||
)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
@@ -1237,9 +1208,7 @@ class BackupManager:
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(
|
||||
written_backup, agent_errors, unavailable_agents
|
||||
)
|
||||
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
@@ -1385,10 +1354,8 @@ class BackupManager:
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
def _create_automatic_backup_failed_issue(
|
||||
self, translation_key: str, translation_placeholders: dict[str, str] | None
|
||||
) -> None:
|
||||
"""Create an issue in the issue registry for automatic backup failures."""
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
@@ -1397,73 +1364,37 @@ class BackupManager:
|
||||
is_persistent=True,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=translation_placeholders,
|
||||
)
|
||||
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_create", None
|
||||
translation_key="automatic_backup_failed_create",
|
||||
)
|
||||
|
||||
def _update_issue_after_agent_upload(
|
||||
self,
|
||||
written_backup: WrittenBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
unavailable_agents: list[str],
|
||||
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
|
||||
) -> None:
|
||||
"""Update issue registry after a backup is uploaded to agents."""
|
||||
|
||||
addon_errors = written_backup.addon_errors
|
||||
failed_agents = unavailable_agents + [
|
||||
self.backup_agents[agent_id].name for agent_id in agent_errors
|
||||
]
|
||||
folder_errors = written_backup.folder_errors
|
||||
|
||||
if not failed_agents and not addon_errors and not folder_errors:
|
||||
# No issues to report, clear previous error
|
||||
if not agent_errors and not unavailable_agents:
|
||||
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
|
||||
return
|
||||
if failed_agents and not (addon_errors or folder_errors):
|
||||
# No issues with add-ons or folders, but issues with agents
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_upload_agents",
|
||||
{"failed_agents": ", ".join(failed_agents)},
|
||||
)
|
||||
elif addon_errors and not (failed_agents or folder_errors):
|
||||
# No issues with agents or folders, but issues with add-ons
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_addons",
|
||||
{
|
||||
"failed_addons": ", ".join(
|
||||
val.addon.name or val.addon.slug
|
||||
for val in addon_errors.values()
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"automatic_backup_failed",
|
||||
is_fixable=False,
|
||||
is_persistent=True,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_failed_upload_agents",
|
||||
translation_placeholders={
|
||||
"failed_agents": ", ".join(
|
||||
chain(
|
||||
(
|
||||
self.backup_agents[agent_id].name
|
||||
for agent_id in agent_errors
|
||||
),
|
||||
unavailable_agents,
|
||||
)
|
||||
},
|
||||
)
|
||||
elif folder_errors and not (failed_agents or addon_errors):
|
||||
# No issues with agents or add-ons, but issues with folders
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_folders",
|
||||
{"failed_folders": ", ".join(folder for folder in folder_errors)},
|
||||
)
|
||||
else:
|
||||
# Issues with agents, add-ons, and/or folders
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_agents_addons_folders",
|
||||
{
|
||||
"failed_agents": ", ".join(failed_agents) or "-",
|
||||
"failed_addons": (
|
||||
", ".join(
|
||||
val.addon.name or val.addon.slug
|
||||
for val in addon_errors.values()
|
||||
)
|
||||
or "-"
|
||||
),
|
||||
"failed_folders": ", ".join(f for f in folder_errors) or "-",
|
||||
},
|
||||
)
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
async def async_can_decrypt_on_download(
|
||||
self,
|
||||
@@ -1529,12 +1460,7 @@ class KnownBackups:
|
||||
self._backups = {
|
||||
backup["backup_id"]: KnownBackup(
|
||||
backup_id=backup["backup_id"],
|
||||
failed_addons=[
|
||||
AddonInfo(name=a["name"], slug=a["slug"], version=a["version"])
|
||||
for a in backup["failed_addons"]
|
||||
],
|
||||
failed_agent_ids=backup["failed_agent_ids"],
|
||||
failed_folders=[Folder(f) for f in backup["failed_folders"]],
|
||||
)
|
||||
for backup in stored_backups
|
||||
}
|
||||
@@ -1547,16 +1473,12 @@ class KnownBackups:
|
||||
self,
|
||||
backup: AgentBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
failed_addons: dict[str, AddonErrorData],
|
||||
failed_folders: dict[Folder, list[tuple[str, str]]],
|
||||
unavailable_agents: list[str],
|
||||
) -> None:
|
||||
"""Add a backup."""
|
||||
self._backups[backup.backup_id] = KnownBackup(
|
||||
backup_id=backup.backup_id,
|
||||
failed_addons=[val.addon for val in failed_addons.values()],
|
||||
failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
|
||||
failed_folders=list(failed_folders),
|
||||
)
|
||||
self._manager.store.save()
|
||||
|
||||
@@ -1577,38 +1499,21 @@ class KnownBackup:
|
||||
"""Persistent backup data."""
|
||||
|
||||
backup_id: str
|
||||
failed_addons: list[AddonInfo]
|
||||
failed_agent_ids: list[str]
|
||||
failed_folders: list[Folder]
|
||||
|
||||
def to_dict(self) -> StoredKnownBackup:
|
||||
"""Convert known backup to a dict."""
|
||||
return {
|
||||
"backup_id": self.backup_id,
|
||||
"failed_addons": [
|
||||
{"name": a.name, "slug": a.slug, "version": a.version}
|
||||
for a in self.failed_addons
|
||||
],
|
||||
"failed_agent_ids": self.failed_agent_ids,
|
||||
"failed_folders": [f.value for f in self.failed_folders],
|
||||
}
|
||||
|
||||
|
||||
class StoredAddonInfo(TypedDict):
|
||||
"""Stored add-on info."""
|
||||
|
||||
name: str | None
|
||||
slug: str
|
||||
version: str | None
|
||||
|
||||
|
||||
class StoredKnownBackup(TypedDict):
|
||||
"""Stored persistent backup data."""
|
||||
|
||||
backup_id: str
|
||||
failed_addons: list[StoredAddonInfo]
|
||||
failed_agent_ids: list[str]
|
||||
failed_folders: list[str]
|
||||
|
||||
|
||||
class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
@@ -1772,11 +1677,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
addon_errors={},
|
||||
backup=backup,
|
||||
folder_errors={},
|
||||
open_stream=open_backup,
|
||||
release_stream=remove_backup,
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
@@ -1915,11 +1816,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
await async_add_executor_job(temp_file.unlink, True)
|
||||
|
||||
return WrittenBackup(
|
||||
addon_errors={},
|
||||
backup=backup,
|
||||
folder_errors={},
|
||||
open_stream=open_backup,
|
||||
release_stream=remove_backup,
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
|
||||
async def async_restore_backup(
|
||||
|
||||
@@ -13,9 +13,9 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
class AddonInfo:
|
||||
"""Addon information."""
|
||||
|
||||
name: str | None
|
||||
name: str
|
||||
slug: str
|
||||
version: str | None
|
||||
version: str
|
||||
|
||||
|
||||
class Folder(StrEnum):
|
||||
|
||||
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 7
|
||||
STORAGE_VERSION_MINOR = 6
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@@ -76,16 +76,8 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
# Version 1.6 adds agent retention settings
|
||||
for agent in data["config"]["agents"]:
|
||||
data["config"]["agents"][agent]["retention"] = None
|
||||
if old_minor_version < 7:
|
||||
# Version 1.7 adds failing addons and folders
|
||||
for backup in data["backups"]:
|
||||
backup["failed_addons"] = []
|
||||
backup["failed_folders"] = []
|
||||
|
||||
# Note: We allow reading data with major version 2 in which the unused key
|
||||
# data["config"]["schedule"]["state"] will be removed. The bump to 2 is
|
||||
# planned to happen after a 6 month quiet period with no minor version
|
||||
# changes.
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
if old_major_version > 2:
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -11,18 +11,6 @@
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_addons": {
|
||||
"title": "Not all add-ons could be included in automatic backup",
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_agents_addons_folders": {
|
||||
"title": "Automatic backup was created with errors",
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the core and supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_folders": {
|
||||
"title": "Not all folders could be included in automatic backup",
|
||||
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -36,22 +24,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"automatic_backup_event": {
|
||||
"name": "Automatic backup",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"completed": "Completed successfully",
|
||||
"failed": "Failed",
|
||||
"in_progress": "In progress"
|
||||
}
|
||||
},
|
||||
"backup_stage": { "name": "Backup stage" },
|
||||
"failed_reason": { "name": "Failure reason" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"backup_manager_state": {
|
||||
"name": "Backup Manager state",
|
||||
|
||||
@@ -21,6 +21,7 @@ from .entity import BleBoxEntity
|
||||
SCAN_INTERVAL = timedelta(seconds=5)
|
||||
|
||||
BLEBOX_TO_HVACMODE = {
|
||||
None: None,
|
||||
0: HVACMode.OFF,
|
||||
1: HVACMode.HEAT,
|
||||
2: HVACMode.COOL,
|
||||
@@ -58,14 +59,12 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
def hvac_modes(self):
|
||||
"""Return list of supported HVAC modes."""
|
||||
if self._feature.mode is None:
|
||||
return [HVACMode.OFF]
|
||||
return [HVACMode.OFF, BLEBOX_TO_HVACMODE[self._feature.mode]]
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
def hvac_mode(self):
|
||||
"""Return the desired HVAC mode."""
|
||||
if self._feature.is_on is None:
|
||||
return None
|
||||
@@ -76,7 +75,7 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
|
||||
return HVACMode.HEAT if self._feature.is_on else HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
def hvac_action(self):
|
||||
"""Return the actual current HVAC action."""
|
||||
if self._feature.hvac_action is not None:
|
||||
if not self._feature.is_on:
|
||||
@@ -89,22 +88,22 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
|
||||
return HVACAction.HEATING if self._feature.is_heating else HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
def max_temp(self):
|
||||
"""Return the maximum temperature supported."""
|
||||
return self._feature.max_temp
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
def min_temp(self):
|
||||
"""Return the maximum temperature supported."""
|
||||
return self._feature.min_temp
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
def current_temperature(self):
|
||||
"""Return the current temperature."""
|
||||
return self._feature.current
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
def target_temperature(self):
|
||||
"""Return the desired thermostat temperature."""
|
||||
return self._feature.desired
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
return color_util.color_temperature_mired_to_kelvin(self._feature.color_temp)
|
||||
|
||||
@property
|
||||
def color_mode(self) -> ColorMode:
|
||||
def color_mode(self):
|
||||
"""Return the color mode.
|
||||
|
||||
Set values to _attr_ibutes if needed.
|
||||
@@ -92,7 +92,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
return COLOR_MODE_MAP.get(self._feature.color_mode, ColorMode.ONOFF)
|
||||
|
||||
@property
|
||||
def supported_color_modes(self) -> set[ColorMode]:
|
||||
def supported_color_modes(self):
|
||||
"""Return supported color modes."""
|
||||
return {self.color_mode}
|
||||
|
||||
@@ -107,7 +107,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
return self._feature.effect
|
||||
|
||||
@property
|
||||
def rgb_color(self) -> tuple[int, int, int] | None:
|
||||
def rgb_color(self):
|
||||
"""Return value for rgb."""
|
||||
if (rgb_hex := self._feature.rgb_hex) is None:
|
||||
return None
|
||||
@@ -118,14 +118,14 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def rgbw_color(self) -> tuple[int, int, int, int] | None:
|
||||
def rgbw_color(self):
|
||||
"""Return the hue and saturation."""
|
||||
if (rgbw_hex := self._feature.rgbw_hex) is None:
|
||||
return None
|
||||
return tuple(blebox_uniapi.light.Light.rgb_hex_to_rgb_list(rgbw_hex)[0:4])
|
||||
|
||||
@property
|
||||
def rgbww_color(self) -> tuple[int, int, int, int, int] | None:
|
||||
def rgbww_color(self):
|
||||
"""Return value for rgbww."""
|
||||
if (rgbww_hex := self._feature.rgbww_hex) is None:
|
||||
return None
|
||||
|
||||
@@ -24,7 +24,7 @@ from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE
|
||||
|
||||
type BlueCurrentConfigEntry = ConfigEntry[Connector]
|
||||
|
||||
PLATFORMS = [Platform.BUTTON, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
CHARGE_POINTS = "CHARGE_POINTS"
|
||||
DATA = "data"
|
||||
DELAY = 5
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
"""Support for Blue Current buttons."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from bluecurrent_api.client import Client
|
||||
|
||||
from homeassistant.components.button import (
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BlueCurrentConfigEntry, Connector
|
||||
from .entity import ChargepointEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class ChargePointButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Describes a Blue Current button entity."""
|
||||
|
||||
function: Callable[[Client, str], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
CHARGE_POINT_BUTTONS = (
|
||||
ChargePointButtonEntityDescription(
|
||||
key="reset",
|
||||
translation_key="reset",
|
||||
function=lambda client, evse_id: client.reset(evse_id),
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
),
|
||||
ChargePointButtonEntityDescription(
|
||||
key="reboot",
|
||||
translation_key="reboot",
|
||||
function=lambda client, evse_id: client.reboot(evse_id),
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
),
|
||||
ChargePointButtonEntityDescription(
|
||||
key="stop_charge_session",
|
||||
translation_key="stop_charge_session",
|
||||
function=lambda client, evse_id: client.stop_session(evse_id),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: BlueCurrentConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Blue Current buttons."""
|
||||
connector: Connector = entry.runtime_data
|
||||
async_add_entities(
|
||||
ChargePointButton(
|
||||
connector,
|
||||
button,
|
||||
evse_id,
|
||||
)
|
||||
for evse_id in connector.charge_points
|
||||
for button in CHARGE_POINT_BUTTONS
|
||||
)
|
||||
|
||||
|
||||
class ChargePointButton(ChargepointEntity, ButtonEntity):
|
||||
"""Define a charge point button."""
|
||||
|
||||
has_value = True
|
||||
entity_description: ChargePointButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
connector: Connector,
|
||||
description: ChargePointButtonEntityDescription,
|
||||
evse_id: str,
|
||||
) -> None:
|
||||
"""Initialize the button."""
|
||||
super().__init__(connector, evse_id)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{description.key}_{evse_id}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
await self.entity_description.function(self.connector.client, self.evse_id)
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Entity representing a Blue Current charge point."""
|
||||
|
||||
from abc import abstractmethod
|
||||
|
||||
from homeassistant.const import ATTR_NAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -15,12 +17,12 @@ class BlueCurrentEntity(Entity):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
has_value = False
|
||||
|
||||
def __init__(self, connector: Connector, signal: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.connector = connector
|
||||
self.signal = signal
|
||||
self.has_value = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
@@ -41,6 +43,7 @@ class BlueCurrentEntity(Entity):
|
||||
return self.connector.connected and self.has_value
|
||||
|
||||
@callback
|
||||
@abstractmethod
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Update the entity from the latest data."""
|
||||
|
||||
|
||||
@@ -19,17 +19,6 @@
|
||||
"current_left": {
|
||||
"default": "mdi:gauge"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"reset": {
|
||||
"default": "mdi:restart"
|
||||
},
|
||||
"reboot": {
|
||||
"default": "mdi:restart-alert"
|
||||
},
|
||||
"stop_charge_session": {
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "blue_current",
|
||||
"name": "Blue Current",
|
||||
"codeowners": ["@gleeuwen", "@NickKoepr", "@jtodorova23"],
|
||||
"codeowners": ["@Floris272", "@gleeuwen"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
|
||||
@@ -113,17 +113,6 @@
|
||||
"grid_max_current": {
|
||||
"name": "Max grid current"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"stop_charge_session": {
|
||||
"name": "Stop charge session"
|
||||
},
|
||||
"reboot": {
|
||||
"name": "Reboot"
|
||||
},
|
||||
"reset": {
|
||||
"name": "Reset"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,9 +18,9 @@
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-auto-recovery==1.5.1",
|
||||
"bluetooth-data-tools==1.28.1",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.49.0"
|
||||
"habluetooth==3.48.2"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -22,7 +22,13 @@ from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS
|
||||
from .const import (
|
||||
CONF_GCID,
|
||||
CONF_READ_ONLY,
|
||||
CONF_REFRESH_TOKEN,
|
||||
DOMAIN as BMW_DOMAIN,
|
||||
SCAN_INTERVALS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,7 +63,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}-{config_entry.data[CONF_USERNAME]}",
|
||||
name=f"{BMW_DOMAIN}-{config_entry.data[CONF_USERNAME]}",
|
||||
update_interval=timedelta(
|
||||
seconds=SCAN_INTERVALS[config_entry.data[CONF_REGION]]
|
||||
),
|
||||
@@ -75,26 +81,26 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
except MyBMWCaptchaMissingError as err:
|
||||
# If a captcha is required (user/password login flow), always trigger the reauth flow
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="missing_captcha",
|
||||
) from err
|
||||
except MyBMWAuthError as err:
|
||||
# Allow one retry interval before raising AuthFailed to avoid flaky API issues
|
||||
if self.last_update_success:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"exception": str(err)},
|
||||
) from err
|
||||
# Clear refresh token and trigger reauth if previous update failed as well
|
||||
self._update_config_entry_refresh_token(None)
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
) from err
|
||||
except (MyBMWAPIError, RequestError) as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"exception": str(err)},
|
||||
) from err
|
||||
|
||||
@@ -6,31 +6,21 @@ from ssl import SSLError
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
from .services import setup_services
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up bosch alarm services."""
|
||||
setup_services(hass)
|
||||
return True
|
||||
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||
@@ -62,11 +52,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
mac = entry.data.get(CONF_MAC)
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(CONNECTION_NETWORK_MAC, mac)} if mac else set(),
|
||||
identifiers={(DOMAIN, entry.unique_id or entry.entry_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
manufacturer="Bosch Security Systems",
|
||||
|
||||
@@ -12,8 +12,8 @@ from homeassistant.components.alarm_control_panel import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -34,9 +34,6 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
|
||||
"""An alarm control panel entity for a bosch alarm panel."""
|
||||
|
||||
@@ -50,7 +47,7 @@ class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
|
||||
|
||||
def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None:
|
||||
"""Initialise a Bosch Alarm control panel entity."""
|
||||
super().__init__(panel, area_id, unique_id, True, False, True)
|
||||
super().__init__(panel, area_id, unique_id, False, False, True)
|
||||
self._attr_unique_id = self._area_unique_id
|
||||
|
||||
@property
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
"""Support for Bosch Alarm Panel binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
from bosch_alarm_mode2.const import ALARM_PANEL_FAULTS
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity, BoschAlarmEntity, BoschAlarmPointEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BoschAlarmFaultEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Bosch Alarm sensor entity."""
|
||||
|
||||
fault: int
|
||||
|
||||
|
||||
FAULT_TYPES = [
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_battery_low",
|
||||
entity_registry_enabled_default=True,
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
fault=ALARM_PANEL_FAULTS.BATTERY_LOW,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_battery_mising",
|
||||
translation_key="panel_fault_battery_mising",
|
||||
entity_registry_enabled_default=True,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.BATTERY_MISING,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_ac_fail",
|
||||
translation_key="panel_fault_ac_fail",
|
||||
entity_registry_enabled_default=True,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.AC_FAIL,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_phone_line_failure",
|
||||
translation_key="panel_fault_phone_line_failure",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
fault=ALARM_PANEL_FAULTS.PHONE_LINE_FAILURE,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_parameter_crc_fail_in_pif",
|
||||
translation_key="panel_fault_parameter_crc_fail_in_pif",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.PARAMETER_CRC_FAIL_IN_PIF,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_communication_fail_since_rps_hang_up",
|
||||
translation_key="panel_fault_communication_fail_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.COMMUNICATION_FAIL_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_sdi_fail_since_rps_hang_up",
|
||||
translation_key="panel_fault_sdi_fail_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.SDI_FAIL_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_user_code_tamper_since_rps_hang_up",
|
||||
translation_key="panel_fault_user_code_tamper_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.USER_CODE_TAMPER_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_fail_to_call_rps_since_rps_hang_up",
|
||||
translation_key="panel_fault_fail_to_call_rps_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
fault=ALARM_PANEL_FAULTS.FAIL_TO_CALL_RPS_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_point_bus_fail_since_rps_hang_up",
|
||||
translation_key="panel_fault_point_bus_fail_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.POINT_BUS_FAIL_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_log_overflow",
|
||||
translation_key="panel_fault_log_overflow",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.LOG_OVERFLOW,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_log_threshold",
|
||||
translation_key="panel_fault_log_threshold",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.LOG_THRESHOLD,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BoschAlarmConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up binary sensors for alarm points and the connection status."""
|
||||
panel = config_entry.runtime_data
|
||||
|
||||
entities: list[BinarySensorEntity] = [
|
||||
PointSensor(panel, point_id, config_entry.unique_id or config_entry.entry_id)
|
||||
for point_id in panel.points
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
PanelFaultsSensor(
|
||||
panel,
|
||||
config_entry.unique_id or config_entry.entry_id,
|
||||
fault_type,
|
||||
)
|
||||
for fault_type in FAULT_TYPES
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
AreaReadyToArmSensor(
|
||||
panel, area_id, config_entry.unique_id or config_entry.entry_id, "away"
|
||||
)
|
||||
for area_id in panel.areas
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
AreaReadyToArmSensor(
|
||||
panel, area_id, config_entry.unique_id or config_entry.entry_id, "home"
|
||||
)
|
||||
for area_id in panel.areas
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class PanelFaultsSensor(BoschAlarmEntity, BinarySensorEntity):
|
||||
"""A binary sensor entity for each fault type in a bosch alarm panel."""
|
||||
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
entity_description: BoschAlarmFaultEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
panel: Panel,
|
||||
unique_id: str,
|
||||
entity_description: BoschAlarmFaultEntityDescription,
|
||||
) -> None:
|
||||
"""Set up a binary sensor entity for each fault type in a bosch alarm panel."""
|
||||
super().__init__(panel, unique_id, True)
|
||||
self.entity_description = entity_description
|
||||
self._fault_type = entity_description.fault
|
||||
self._attr_unique_id = f"{unique_id}_fault_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return if this fault has occurred."""
|
||||
return self._fault_type in self.panel.panel_faults_ids
|
||||
|
||||
|
||||
class AreaReadyToArmSensor(BoschAlarmAreaEntity, BinarySensorEntity):
|
||||
"""A binary sensor entity showing if a panel is ready to arm."""
|
||||
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def __init__(
|
||||
self, panel: Panel, area_id: int, unique_id: str, arm_type: str
|
||||
) -> None:
|
||||
"""Set up a binary sensor entity for the arming status in a bosch alarm panel."""
|
||||
super().__init__(panel, area_id, unique_id, False, False, True)
|
||||
self.panel = panel
|
||||
self._arm_type = arm_type
|
||||
self._attr_translation_key = f"area_ready_to_arm_{arm_type}"
|
||||
self._attr_unique_id = f"{self._area_unique_id}_ready_to_arm_{arm_type}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return if this panel is ready to arm."""
|
||||
if self._arm_type == "away":
|
||||
return self._area.all_ready
|
||||
if self._arm_type == "home":
|
||||
return self._area.all_ready or self._area.part_ready
|
||||
return False
|
||||
|
||||
|
||||
class PointSensor(BoschAlarmPointEntity, BinarySensorEntity):
|
||||
"""A binary sensor entity for a point in a bosch alarm panel."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, panel: Panel, point_id: int, unique_id: str) -> None:
|
||||
"""Set up a binary sensor entity for a point in a bosch alarm panel."""
|
||||
super().__init__(panel, point_id, unique_id)
|
||||
self._attr_unique_id = self._point_unique_id
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return if this point sensor is on."""
|
||||
return self._point.is_open()
|
||||
@@ -6,30 +6,25 @@ import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Any, Self
|
||||
from typing import Any
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_DHCP,
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_CODE,
|
||||
CONF_HOST,
|
||||
CONF_MAC,
|
||||
CONF_MODEL,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
|
||||
@@ -93,12 +88,6 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Init config flow."""
|
||||
|
||||
self._data: dict[str, Any] = {}
|
||||
self.mac: str | None = None
|
||||
self.host: str | None = None
|
||||
|
||||
def is_matching(self, other_flow: Self) -> bool:
|
||||
"""Return True if other_flow is matching this flow."""
|
||||
return self.mac == other_flow.mac or self.host == other_flow.host
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -107,12 +96,9 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.host = user_input[CONF_HOST]
|
||||
if self.source == SOURCE_USER:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
try:
|
||||
# Use load_selector = 0 to fetch the panel model without authentication.
|
||||
(model, _) = await try_connect(user_input, 0)
|
||||
(model, serial) = await try_connect(user_input, 0)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
@@ -143,70 +129,6 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
self.mac = format_mac(discovery_info.macaddress)
|
||||
self.host = discovery_info.ip
|
||||
if self.hass.config_entries.flow.async_has_matching_flow(self):
|
||||
return self.async_abort(reason="already_in_progress")
|
||||
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN):
|
||||
if entry.data.get(CONF_MAC) == self.mac:
|
||||
result = self.hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_HOST: discovery_info.ip,
|
||||
},
|
||||
)
|
||||
if result:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
if entry.data[CONF_HOST] == discovery_info.ip:
|
||||
if (
|
||||
not entry.data.get(CONF_MAC)
|
||||
and entry.state is ConfigEntryState.LOADED
|
||||
):
|
||||
result = self.hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
if result:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
try:
|
||||
# Use load_selector = 0 to fetch the panel model without authentication.
|
||||
(model, _) = await try_connect(
|
||||
{CONF_HOST: discovery_info.ip, CONF_PORT: 7700}, 0
|
||||
)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
asyncio.exceptions.TimeoutError,
|
||||
):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
self.context["title_placeholders"] = {
|
||||
"model": model,
|
||||
"host": discovery_info.ip,
|
||||
}
|
||||
self._data = {
|
||||
CONF_HOST: discovery_info.ip,
|
||||
CONF_MAC: self.mac,
|
||||
CONF_MODEL: model,
|
||||
CONF_PORT: 7700,
|
||||
}
|
||||
|
||||
return await self.async_step_auth()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -250,7 +172,7 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
if serial_number:
|
||||
await self.async_set_unique_id(str(serial_number))
|
||||
if self.source in (SOURCE_USER, SOURCE_DHCP):
|
||||
if self.source == SOURCE_USER:
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
@@ -262,7 +184,6 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_mismatch(reason="device_mismatch")
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data=self._data,
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
"""Constants for the Bosch Alarm integration."""
|
||||
|
||||
DOMAIN = "bosch_alarm"
|
||||
ATTR_HISTORY = "history"
|
||||
HISTORY_ATTR = "history"
|
||||
CONF_INSTALLER_CODE = "installer_code"
|
||||
CONF_USER_CODE = "user_code"
|
||||
ATTR_DATETIME = "datetime"
|
||||
SERVICE_SET_DATE_TIME = "set_date_time"
|
||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
|
||||
@@ -6,8 +6,8 @@ from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
TO_REDACT = [CONF_INSTALLER_CODE, CONF_USER_CODE, CONF_PASSWORD]
|
||||
|
||||
|
||||
@@ -17,13 +17,9 @@ class BoschAlarmEntity(Entity):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self, panel: Panel, unique_id: str, observe_faults: bool = False
|
||||
) -> None:
|
||||
def __init__(self, panel: Panel, unique_id: str) -> None:
|
||||
"""Set up a entity for a bosch alarm panel."""
|
||||
self.panel = panel
|
||||
self._observe_faults = observe_faults
|
||||
self._attr_should_poll = False
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
@@ -38,14 +34,10 @@ class BoschAlarmEntity(Entity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Observe state changes."""
|
||||
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
|
||||
if self._observe_faults:
|
||||
self.panel.faults_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop observing state changes."""
|
||||
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)
|
||||
if self._observe_faults:
|
||||
self.panel.faults_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
|
||||
class BoschAlarmAreaEntity(BoschAlarmEntity):
|
||||
@@ -96,33 +88,6 @@ class BoschAlarmAreaEntity(BoschAlarmEntity):
|
||||
self._area.status_observer.detach(self.schedule_update_ha_state)
|
||||
|
||||
|
||||
class BoschAlarmPointEntity(BoschAlarmEntity):
|
||||
"""A base entity for point related entities within a bosch alarm panel."""
|
||||
|
||||
def __init__(self, panel: Panel, point_id: int, unique_id: str) -> None:
|
||||
"""Set up a area related entity for a bosch alarm panel."""
|
||||
super().__init__(panel, unique_id)
|
||||
self._point_id = point_id
|
||||
self._point_unique_id = f"{unique_id}_point_{point_id}"
|
||||
self._point = panel.points[point_id]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._point_unique_id)},
|
||||
name=self._point.name,
|
||||
manufacturer="Bosch Security Systems",
|
||||
via_device=(DOMAIN, unique_id),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Observe state changes."""
|
||||
await super().async_added_to_hass()
|
||||
self._point.status_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop observing state changes."""
|
||||
await super().async_added_to_hass()
|
||||
self._point.status_observer.detach(self.schedule_update_ha_state)
|
||||
|
||||
|
||||
class BoschAlarmDoorEntity(BoschAlarmEntity):
|
||||
"""A base entity for area related entities within a bosch alarm panel."""
|
||||
|
||||
|
||||
@@ -1,20 +1,6 @@
|
||||
{
|
||||
"services": {
|
||||
"set_date_time": {
|
||||
"service": "mdi:clock-edit"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"alarms_gas": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"alarms_fire": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"alarms_burglary": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"faulting_points": {
|
||||
"default": "mdi:alert-circle"
|
||||
}
|
||||
@@ -38,44 +24,6 @@
|
||||
"on": "mdi:lock-open"
|
||||
}
|
||||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
"panel_fault_parameter_crc_fail_in_pif": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_phone_line_failure": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_sdi_fail_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_user_code_tamper_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_fail_to_call_rps_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_point_bus_fail_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_log_overflow": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_log_threshold": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"area_ready_to_arm_away": {
|
||||
"default": "mdi:shield",
|
||||
"state": {
|
||||
"on": "mdi:shield-lock"
|
||||
}
|
||||
},
|
||||
"area_ready_to_arm_home": {
|
||||
"default": "mdi:shield",
|
||||
"state": {
|
||||
"on": "mdi:shield-home"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,11 +3,6 @@
|
||||
"name": "Bosch Alarm",
|
||||
"codeowners": ["@mag1024", "@sanjay900"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"macaddress": "000463*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_alarm",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -13,7 +13,10 @@ rules:
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
@@ -26,22 +29,25 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
|
||||
@@ -6,7 +6,6 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
from bosch_alarm_mode2.const import ALARM_MEMORY_PRIORITIES
|
||||
from bosch_alarm_mode2.panel import Area
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
@@ -16,53 +15,18 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity
|
||||
|
||||
ALARM_TYPES = {
|
||||
"burglary": {
|
||||
ALARM_MEMORY_PRIORITIES.BURGLARY_SUPERVISORY: "supervisory",
|
||||
ALARM_MEMORY_PRIORITIES.BURGLARY_TROUBLE: "trouble",
|
||||
ALARM_MEMORY_PRIORITIES.BURGLARY_ALARM: "alarm",
|
||||
},
|
||||
"gas": {
|
||||
ALARM_MEMORY_PRIORITIES.GAS_SUPERVISORY: "supervisory",
|
||||
ALARM_MEMORY_PRIORITIES.GAS_TROUBLE: "trouble",
|
||||
ALARM_MEMORY_PRIORITIES.GAS_ALARM: "alarm",
|
||||
},
|
||||
"fire": {
|
||||
ALARM_MEMORY_PRIORITIES.FIRE_SUPERVISORY: "supervisory",
|
||||
ALARM_MEMORY_PRIORITIES.FIRE_TROUBLE: "trouble",
|
||||
ALARM_MEMORY_PRIORITIES.FIRE_ALARM: "alarm",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BoschAlarmSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Bosch Alarm sensor entity."""
|
||||
|
||||
value_fn: Callable[[Area], str | int]
|
||||
value_fn: Callable[[Area], int]
|
||||
observe_alarms: bool = False
|
||||
observe_ready: bool = False
|
||||
observe_status: bool = False
|
||||
|
||||
|
||||
def priority_value_fn(priority_info: dict[int, str]) -> Callable[[Area], str]:
|
||||
"""Build a value_fn for a given priority type."""
|
||||
return lambda area: next(
|
||||
(key for priority, key in priority_info.items() if priority in area.alarms_ids),
|
||||
"no_issues",
|
||||
)
|
||||
|
||||
|
||||
SENSOR_TYPES: list[BoschAlarmSensorEntityDescription] = [
|
||||
*[
|
||||
BoschAlarmSensorEntityDescription(
|
||||
key=f"alarms_{key}",
|
||||
translation_key=f"alarms_{key}",
|
||||
value_fn=priority_value_fn(priority_type),
|
||||
observe_alarms=True,
|
||||
)
|
||||
for key, priority_type in ALARM_TYPES.items()
|
||||
],
|
||||
BoschAlarmSensorEntityDescription(
|
||||
key="faulting_points",
|
||||
translation_key="faulting_points",
|
||||
@@ -117,6 +81,6 @@ class BoschAreaSensor(BoschAlarmAreaEntity, SensorEntity):
|
||||
self._attr_unique_id = f"{self._area_unique_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int:
|
||||
def native_value(self) -> int:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self._area)
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
"""Services for the bosch_alarm integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_CONFIG_ENTRY_ID, ATTR_DATETIME, DOMAIN, SERVICE_SET_DATE_TIME
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
|
||||
def validate_datetime(value: Any) -> dt.datetime:
|
||||
"""Validate that a provided datetime is supported on a bosch alarm panel."""
|
||||
date_val = cv.datetime(value)
|
||||
if date_val.year < 2010:
|
||||
raise vol.RangeInvalid("datetime must be after 2009")
|
||||
|
||||
if date_val.year > 2037:
|
||||
raise vol.RangeInvalid("datetime must be before 2038")
|
||||
|
||||
return date_val
|
||||
|
||||
|
||||
SET_DATE_TIME_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string,
|
||||
vol.Optional(ATTR_DATETIME): validate_datetime,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_set_panel_date(call: ServiceCall) -> None:
|
||||
"""Set the date and time on a bosch alarm panel."""
|
||||
config_entry: BoschAlarmConfigEntry | None
|
||||
value: dt.datetime = call.data.get(ATTR_DATETIME, dt_util.now())
|
||||
entry_id = call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
if not (config_entry := call.hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": entry_id},
|
||||
)
|
||||
if config_entry.state is not ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
panel = config_entry.runtime_data
|
||||
try:
|
||||
await panel.set_panel_date(value)
|
||||
except asyncio.InvalidStateError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
) from err
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the bosch alarm integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_DATE_TIME,
|
||||
async_set_panel_date,
|
||||
schema=SET_DATE_TIME_SCHEMA,
|
||||
)
|
||||
@@ -1,12 +0,0 @@
|
||||
set_date_time:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: bosch_alarm
|
||||
datetime:
|
||||
required: false
|
||||
example: "2025-05-10 00:00:00"
|
||||
selector:
|
||||
datetime:
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "{model} ({host})",
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
@@ -43,7 +42,6 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
@@ -51,18 +49,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"connection_error": {
|
||||
"message": "Could not connect to \"{target}\"."
|
||||
},
|
||||
"unknown_error": {
|
||||
"message": "An unknown error occurred while setting the date and time on \"{target}\"."
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "Could not connect to panel."
|
||||
},
|
||||
@@ -70,111 +56,22 @@
|
||||
"message": "Incorrect credentials for panel."
|
||||
},
|
||||
"incorrect_door_state": {
|
||||
"message": "Door cannot be manipulated while it is momentarily unlocked."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_date_time": {
|
||||
"name": "Set date & time",
|
||||
"description": "Sets the date and time on the alarm panel.",
|
||||
"fields": {
|
||||
"datetime": {
|
||||
"name": "Date & time",
|
||||
"description": "The date and time to set. The time zone of the Home Assistant instance is assumed. If omitted, the current date and time is used."
|
||||
},
|
||||
"config_entry_id": {
|
||||
"name": "Config entry",
|
||||
"description": "The Bosch Alarm integration ID."
|
||||
}
|
||||
}
|
||||
"message": "Door cannot be manipulated while it is being cycled."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"panel_fault_battery_mising": {
|
||||
"name": "Battery missing"
|
||||
},
|
||||
"panel_fault_ac_fail": {
|
||||
"name": "AC Failure"
|
||||
},
|
||||
"panel_fault_parameter_crc_fail_in_pif": {
|
||||
"name": "CRC failure in panel configuration"
|
||||
},
|
||||
"panel_fault_phone_line_failure": {
|
||||
"name": "Phone line failure"
|
||||
},
|
||||
"panel_fault_sdi_fail_since_rps_hang_up": {
|
||||
"name": "SDI failure since last RPS connection"
|
||||
},
|
||||
"panel_fault_user_code_tamper_since_rps_hang_up": {
|
||||
"name": "User code tamper since last RPS connection"
|
||||
},
|
||||
"panel_fault_fail_to_call_rps_since_rps_hang_up": {
|
||||
"name": "Failure to call RPS since last RPS connection"
|
||||
},
|
||||
"panel_fault_point_bus_fail_since_rps_hang_up": {
|
||||
"name": "Point bus failure since last RPS connection"
|
||||
},
|
||||
"panel_fault_log_overflow": {
|
||||
"name": "Log overflow"
|
||||
},
|
||||
"panel_fault_log_threshold": {
|
||||
"name": "Log threshold reached"
|
||||
},
|
||||
"area_ready_to_arm_away": {
|
||||
"name": "Area ready to arm away",
|
||||
"state": {
|
||||
"on": "Ready",
|
||||
"off": "Not ready"
|
||||
}
|
||||
},
|
||||
"area_ready_to_arm_home": {
|
||||
"name": "Area ready to arm home",
|
||||
"state": {
|
||||
"on": "Ready",
|
||||
"off": "Not ready"
|
||||
}
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"secured": {
|
||||
"name": "Secured"
|
||||
},
|
||||
"cycling": {
|
||||
"name": "Momentarily unlocked"
|
||||
"name": "Cycling"
|
||||
},
|
||||
"locked": {
|
||||
"name": "Locked"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"alarms_gas": {
|
||||
"name": "Gas alarm issues",
|
||||
"state": {
|
||||
"supervisory": "Supervisory",
|
||||
"trouble": "Trouble",
|
||||
"alarm": "Alarm",
|
||||
"no_issues": "No issues"
|
||||
}
|
||||
},
|
||||
"alarms_fire": {
|
||||
"name": "Fire alarm issues",
|
||||
"state": {
|
||||
"supervisory": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::supervisory%]",
|
||||
"trouble": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::trouble%]",
|
||||
"alarm": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::alarm%]",
|
||||
"no_issues": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::no_issues%]"
|
||||
}
|
||||
},
|
||||
"alarms_burglary": {
|
||||
"name": "Burglary alarm issues",
|
||||
"state": {
|
||||
"supervisory": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::supervisory%]",
|
||||
"trouble": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::trouble%]",
|
||||
"alarm": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::alarm%]",
|
||||
"no_issues": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::no_issues%]"
|
||||
}
|
||||
},
|
||||
"faulting_points": {
|
||||
"name": "Faulting points",
|
||||
"unit_of_measurement": "points"
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
"""Types for the Bosch Alarm integration."""
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["caldav", "vobject"],
|
||||
"requirements": ["caldav==1.6.0", "icalendar==6.1.0"]
|
||||
"requirements": ["caldav==1.3.9", "icalendar==6.1.0"]
|
||||
}
|
||||
|
||||
@@ -55,6 +55,7 @@ from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
deprecated_function,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
@@ -85,10 +86,10 @@ from .prefs import CameraPreferences, DynamicStreamSettings # noqa: F401
|
||||
from .webrtc import (
|
||||
DATA_ICE_SERVERS,
|
||||
CameraWebRTCProvider,
|
||||
WebRTCAnswer, # noqa: F401
|
||||
WebRTCAnswer,
|
||||
WebRTCCandidate, # noqa: F401
|
||||
WebRTCClientConfiguration,
|
||||
WebRTCError, # noqa: F401
|
||||
WebRTCError,
|
||||
WebRTCMessage, # noqa: F401
|
||||
WebRTCSendMessage,
|
||||
async_get_supported_provider,
|
||||
@@ -472,6 +473,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self.async_update_token()
|
||||
self._create_stream_lock: asyncio.Lock | None = None
|
||||
self._webrtc_provider: CameraWebRTCProvider | None = None
|
||||
self._supports_native_sync_webrtc = (
|
||||
type(self).async_handle_web_rtc_offer != Camera.async_handle_web_rtc_offer
|
||||
)
|
||||
self._supports_native_async_webrtc = (
|
||||
type(self).async_handle_async_webrtc_offer
|
||||
!= Camera.async_handle_async_webrtc_offer
|
||||
@@ -575,6 +579,15 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
return None
|
||||
|
||||
async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None:
|
||||
"""Handle the WebRTC offer and return an answer.
|
||||
|
||||
This is used by cameras with CameraEntityFeature.STREAM
|
||||
and StreamType.WEB_RTC.
|
||||
|
||||
Integrations can override with a native WebRTC implementation.
|
||||
"""
|
||||
|
||||
async def async_handle_async_webrtc_offer(
|
||||
self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage
|
||||
) -> None:
|
||||
@@ -587,6 +600,42 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
Integrations can override with a native WebRTC implementation.
|
||||
"""
|
||||
if self._supports_native_sync_webrtc:
|
||||
try:
|
||||
answer = await deprecated_function(
|
||||
"async_handle_async_webrtc_offer",
|
||||
breaks_in_ha_version="2025.6",
|
||||
)(self.async_handle_web_rtc_offer)(offer_sdp)
|
||||
except ValueError as ex:
|
||||
_LOGGER.error("Error handling WebRTC offer: %s", ex)
|
||||
send_message(
|
||||
WebRTCError(
|
||||
"webrtc_offer_failed",
|
||||
str(ex),
|
||||
)
|
||||
)
|
||||
except TimeoutError:
|
||||
# This catch was already here and should stay through the deprecation
|
||||
_LOGGER.error("Timeout handling WebRTC offer")
|
||||
send_message(
|
||||
WebRTCError(
|
||||
"webrtc_offer_failed",
|
||||
"Timeout handling WebRTC offer",
|
||||
)
|
||||
)
|
||||
else:
|
||||
if answer:
|
||||
send_message(WebRTCAnswer(answer))
|
||||
else:
|
||||
_LOGGER.error("Error handling WebRTC offer: No answer")
|
||||
send_message(
|
||||
WebRTCError(
|
||||
"webrtc_offer_failed",
|
||||
"No answer on WebRTC offer",
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
if self._webrtc_provider:
|
||||
await self._webrtc_provider.async_handle_async_webrtc_offer(
|
||||
self, offer_sdp, session_id, send_message
|
||||
@@ -715,7 +764,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
new_provider = None
|
||||
|
||||
# Skip all providers if the camera has a native WebRTC implementation
|
||||
if not self._supports_native_async_webrtc:
|
||||
if not (
|
||||
self._supports_native_sync_webrtc or self._supports_native_async_webrtc
|
||||
):
|
||||
# Camera doesn't have a native WebRTC implementation
|
||||
new_provider = await self._async_get_supported_webrtc_provider(
|
||||
async_get_supported_provider
|
||||
@@ -747,12 +798,17 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Return the WebRTC client configuration and extend it with the registered ice servers."""
|
||||
config = self._async_get_webrtc_client_configuration()
|
||||
|
||||
ice_servers = [
|
||||
server
|
||||
for servers in self.hass.data.get(DATA_ICE_SERVERS, [])
|
||||
for server in servers()
|
||||
]
|
||||
config.configuration.ice_servers.extend(ice_servers)
|
||||
if not self._supports_native_sync_webrtc:
|
||||
# Until 2024.11, the frontend was not resolving any ice servers
|
||||
# The async approach was added 2024.11 and new integrations need to use it
|
||||
ice_servers = [
|
||||
server
|
||||
for servers in self.hass.data.get(DATA_ICE_SERVERS, [])
|
||||
for server in servers()
|
||||
]
|
||||
config.configuration.ice_servers.extend(ice_servers)
|
||||
|
||||
config.get_candidates_upfront = self._supports_native_sync_webrtc
|
||||
|
||||
return config
|
||||
|
||||
@@ -782,7 +838,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if self._supports_native_async_webrtc:
|
||||
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
else:
|
||||
|
||||
@@ -111,11 +111,13 @@ class WebRTCClientConfiguration:
|
||||
|
||||
configuration: RTCConfiguration = field(default_factory=RTCConfiguration)
|
||||
data_channel: str | None = None
|
||||
get_candidates_upfront: bool = False
|
||||
|
||||
def to_frontend_dict(self) -> dict[str, Any]:
|
||||
"""Return a dict that can be used by the frontend."""
|
||||
data: dict[str, Any] = {
|
||||
"configuration": self.configuration.to_dict(),
|
||||
"getCandidatesUpfront": self.get_candidates_upfront,
|
||||
}
|
||||
if self.data_channel is not None:
|
||||
data["dataChannel"] = self.data_channel
|
||||
|
||||
@@ -60,7 +60,7 @@ from .const import (
|
||||
ADDED_CAST_DEVICES_KEY,
|
||||
CAST_MULTIZONE_MANAGER_KEY,
|
||||
CONF_IGNORE_CEC,
|
||||
DOMAIN,
|
||||
DOMAIN as CAST_DOMAIN,
|
||||
SIGNAL_CAST_DISCOVERED,
|
||||
SIGNAL_CAST_REMOVED,
|
||||
SIGNAL_HASS_CAST_SHOW_VIEW,
|
||||
@@ -315,7 +315,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
self._cast_view_remove_handler: CALLBACK_TYPE | None = None
|
||||
self._attr_unique_id = str(cast_info.uuid)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(cast_info.uuid).replace("-", ""))},
|
||||
identifiers={(CAST_DOMAIN, str(cast_info.uuid).replace("-", ""))},
|
||||
manufacturer=str(cast_info.cast_info.manufacturer),
|
||||
model=cast_info.cast_info.model_name,
|
||||
name=str(cast_info.friendly_name),
|
||||
@@ -591,7 +591,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
"""Generate root node."""
|
||||
children = []
|
||||
# Add media browsers
|
||||
for platform in self.hass.data[DOMAIN]["cast_platform"].values():
|
||||
for platform in self.hass.data[CAST_DOMAIN]["cast_platform"].values():
|
||||
children.extend(
|
||||
await platform.async_get_media_browser_root_object(
|
||||
self.hass, self._chromecast.cast_type
|
||||
@@ -650,7 +650,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
|
||||
platform: CastProtocol
|
||||
assert media_content_type is not None
|
||||
for platform in self.hass.data[DOMAIN]["cast_platform"].values():
|
||||
for platform in self.hass.data[CAST_DOMAIN]["cast_platform"].values():
|
||||
browse_media = await platform.async_browse_media(
|
||||
self.hass,
|
||||
media_content_type,
|
||||
@@ -680,7 +680,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
extra = kwargs.get(ATTR_MEDIA_EXTRA, {})
|
||||
|
||||
# Handle media supported by a known cast app
|
||||
if media_type == DOMAIN:
|
||||
if media_type == CAST_DOMAIN:
|
||||
try:
|
||||
app_data = json.loads(media_id)
|
||||
if metadata := extra.get("metadata"):
|
||||
@@ -712,7 +712,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
return
|
||||
|
||||
# Try the cast platforms
|
||||
for platform in self.hass.data[DOMAIN]["cast_platform"].values():
|
||||
for platform in self.hass.data[CAST_DOMAIN]["cast_platform"].values():
|
||||
result = await platform.async_play_media(
|
||||
self.hass, self.entity_id, chromecast, media_type, media_id
|
||||
)
|
||||
|
||||
@@ -18,20 +18,23 @@ from homeassistant.const import (
|
||||
SERVICE_TOGGLE,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.temperature import display_temp as show_temp
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_suggest_report_issue
|
||||
from homeassistant.loader import async_get_issue_tracker, async_suggest_report_issue
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
from .const import ( # noqa: F401
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_FAN_MODE,
|
||||
@@ -74,6 +77,7 @@ from .const import ( # noqa: F401
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
PRESET_SLEEP,
|
||||
SERVICE_SET_AUX_HEAT,
|
||||
SERVICE_SET_FAN_MODE,
|
||||
SERVICE_SET_HUMIDITY,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
@@ -164,6 +168,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"async_handle_set_preset_mode_service",
|
||||
[ClimateEntityFeature.PRESET_MODE],
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_SET_AUX_HEAT,
|
||||
{vol.Required(ATTR_AUX_HEAT): cv.boolean},
|
||||
async_service_aux_heat,
|
||||
[ClimateEntityFeature.AUX_HEAT],
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
SET_TEMPERATURE_SCHEMA,
|
||||
@@ -229,6 +239,7 @@ CACHED_PROPERTIES_WITH_ATTR_ = {
|
||||
"target_temperature_low",
|
||||
"preset_mode",
|
||||
"preset_modes",
|
||||
"is_aux_heat",
|
||||
"fan_mode",
|
||||
"fan_modes",
|
||||
"swing_mode",
|
||||
@@ -268,6 +279,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
_attr_hvac_action: HVACAction | None = None
|
||||
_attr_hvac_mode: HVACMode | None
|
||||
_attr_hvac_modes: list[HVACMode]
|
||||
_attr_is_aux_heat: bool | None
|
||||
_attr_max_humidity: float = DEFAULT_MAX_HUMIDITY
|
||||
_attr_max_temp: float
|
||||
_attr_min_humidity: float = DEFAULT_MIN_HUMIDITY
|
||||
@@ -287,6 +299,52 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
_attr_target_temperature: float | None = None
|
||||
_attr_temperature_unit: str
|
||||
|
||||
__climate_reported_legacy_aux = False
|
||||
|
||||
def _report_legacy_aux(self) -> None:
|
||||
"""Log warning and create an issue if the entity implements legacy auxiliary heater."""
|
||||
|
||||
report_issue = async_suggest_report_issue(
|
||||
self.hass,
|
||||
integration_domain=self.platform.platform_name,
|
||||
module=type(self).__module__,
|
||||
)
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s::%s implements the `is_aux_heat` property or uses the auxiliary "
|
||||
"heater methods in a subclass of ClimateEntity which is "
|
||||
"deprecated and will be unsupported from Home Assistant 2025.4."
|
||||
" Please %s"
|
||||
),
|
||||
self.platform.platform_name,
|
||||
self.__class__.__name__,
|
||||
report_issue,
|
||||
)
|
||||
|
||||
translation_placeholders = {"platform": self.platform.platform_name}
|
||||
translation_key = "deprecated_climate_aux_no_url"
|
||||
issue_tracker = async_get_issue_tracker(
|
||||
self.hass,
|
||||
integration_domain=self.platform.platform_name,
|
||||
module=type(self).__module__,
|
||||
)
|
||||
if issue_tracker:
|
||||
translation_placeholders["issue_tracker"] = issue_tracker
|
||||
translation_key = "deprecated_climate_aux_url_custom"
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_climate_aux_{self.platform.platform_name}",
|
||||
breaks_in_ha_version="2025.4.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
issue_domain=self.platform.platform_name,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=translation_placeholders,
|
||||
)
|
||||
self.__climate_reported_legacy_aux = True
|
||||
|
||||
@final
|
||||
@property
|
||||
def state(self) -> str | None:
|
||||
@@ -395,6 +453,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if ClimateEntityFeature.SWING_HORIZONTAL_MODE in supported_features:
|
||||
data[ATTR_SWING_HORIZONTAL_MODE] = self.swing_horizontal_mode
|
||||
|
||||
if ClimateEntityFeature.AUX_HEAT in supported_features:
|
||||
data[ATTR_AUX_HEAT] = STATE_ON if self.is_aux_heat else STATE_OFF
|
||||
if (
|
||||
self.__climate_reported_legacy_aux is False
|
||||
and "custom_components" in type(self).__module__
|
||||
):
|
||||
self._report_legacy_aux()
|
||||
|
||||
return data
|
||||
|
||||
@cached_property
|
||||
@@ -474,6 +540,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
return self._attr_preset_modes
|
||||
|
||||
@cached_property
|
||||
def is_aux_heat(self) -> bool | None:
|
||||
"""Return true if aux heater.
|
||||
|
||||
Requires ClimateEntityFeature.AUX_HEAT.
|
||||
"""
|
||||
return self._attr_is_aux_heat
|
||||
|
||||
@cached_property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the fan setting.
|
||||
@@ -658,6 +732,22 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Set new preset mode."""
|
||||
await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode)
|
||||
|
||||
def turn_aux_heat_on(self) -> None:
|
||||
"""Turn auxiliary heater on."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_turn_aux_heat_on(self) -> None:
|
||||
"""Turn auxiliary heater on."""
|
||||
await self.hass.async_add_executor_job(self.turn_aux_heat_on)
|
||||
|
||||
def turn_aux_heat_off(self) -> None:
|
||||
"""Turn auxiliary heater off."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_turn_aux_heat_off(self) -> None:
|
||||
"""Turn auxiliary heater off."""
|
||||
await self.hass.async_add_executor_job(self.turn_aux_heat_off)
|
||||
|
||||
def turn_on(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
raise NotImplementedError
|
||||
@@ -755,6 +845,16 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return self._attr_max_humidity
|
||||
|
||||
|
||||
async def async_service_aux_heat(
|
||||
entity: ClimateEntity, service_call: ServiceCall
|
||||
) -> None:
|
||||
"""Handle aux heat service."""
|
||||
if service_call.data[ATTR_AUX_HEAT]:
|
||||
await entity.async_turn_aux_heat_on()
|
||||
else:
|
||||
await entity.async_turn_aux_heat_off()
|
||||
|
||||
|
||||
async def async_service_humidity_set(
|
||||
entity: ClimateEntity, service_call: ServiceCall
|
||||
) -> None:
|
||||
|
||||
@@ -96,6 +96,7 @@ class HVACAction(StrEnum):
|
||||
CURRENT_HVAC_ACTIONS = [cls.value for cls in HVACAction]
|
||||
|
||||
|
||||
ATTR_AUX_HEAT = "aux_heat"
|
||||
ATTR_CURRENT_HUMIDITY = "current_humidity"
|
||||
ATTR_CURRENT_TEMPERATURE = "current_temperature"
|
||||
ATTR_FAN_MODES = "fan_modes"
|
||||
@@ -127,6 +128,7 @@ DOMAIN = "climate"
|
||||
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
SERVICE_SET_FAN_MODE = "set_fan_mode"
|
||||
SERVICE_SET_PRESET_MODE = "set_preset_mode"
|
||||
SERVICE_SET_HUMIDITY = "set_humidity"
|
||||
@@ -145,6 +147,7 @@ class ClimateEntityFeature(IntFlag):
|
||||
FAN_MODE = 8
|
||||
PRESET_MODE = 16
|
||||
SWING_MODE = 32
|
||||
AUX_HEAT = 64
|
||||
TURN_OFF = 128
|
||||
TURN_ON = 256
|
||||
SWING_HORIZONTAL_MODE = 512
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
# Describes the format for available climate services
|
||||
|
||||
set_aux_heat:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
supported_features:
|
||||
- climate.ClimateEntityFeature.AUX_HEAT
|
||||
fields:
|
||||
aux_heat:
|
||||
required: true
|
||||
selector:
|
||||
boolean:
|
||||
|
||||
set_preset_mode:
|
||||
target:
|
||||
entity:
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.helpers.significant_change import (
|
||||
)
|
||||
|
||||
from . import (
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_FAN_MODE,
|
||||
@@ -26,6 +27,7 @@ from . import (
|
||||
)
|
||||
|
||||
SIGNIFICANT_ATTRIBUTES: set[str] = {
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_FAN_MODE,
|
||||
@@ -65,6 +67,7 @@ def async_check_significant_change(
|
||||
|
||||
for attr_name in changed_attrs:
|
||||
if attr_name in [
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_FAN_MODE,
|
||||
ATTR_HVAC_ACTION,
|
||||
ATTR_PRESET_MODE,
|
||||
|
||||
@@ -36,6 +36,9 @@
|
||||
"fan_only": "Fan only"
|
||||
},
|
||||
"state_attributes": {
|
||||
"aux_heat": {
|
||||
"name": "Aux heat"
|
||||
},
|
||||
"current_humidity": {
|
||||
"name": "Current humidity"
|
||||
},
|
||||
@@ -146,6 +149,16 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_aux_heat": {
|
||||
"name": "Turn on/off auxiliary heater",
|
||||
"description": "Turns auxiliary heater on/off.",
|
||||
"fields": {
|
||||
"aux_heat": {
|
||||
"name": "Auxiliary heating",
|
||||
"description": "New value of auxiliary heater."
|
||||
}
|
||||
}
|
||||
},
|
||||
"set_preset_mode": {
|
||||
"name": "Set preset mode",
|
||||
"description": "Sets preset mode.",
|
||||
@@ -254,6 +267,16 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_climate_aux_url_custom": {
|
||||
"title": "The {platform} custom integration is using deprecated climate auxiliary heater",
|
||||
"description": "The custom integration `{platform}` implements the `is_aux_heat` property or uses the auxiliary heater methods in a subclass of ClimateEntity.\n\nPlease create a bug report at {issue_tracker}.\n\nOnce an updated version of `{platform}` is available, install it and restart Home Assistant to fix this issue."
|
||||
},
|
||||
"deprecated_climate_aux_no_url": {
|
||||
"title": "[%key:component::climate::issues::deprecated_climate_aux_url_custom::title%]",
|
||||
"description": "The custom integration `{platform}` implements the `is_aux_heat` property or uses the auxiliary heater methods in a subclass of ClimateEntity.\n\nPlease report it to the author of the {platform} integration.\n\nOnce an updated version of `{platform}` is available, install it and restart Home Assistant to fix this issue."
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"not_valid_preset_mode": {
|
||||
"message": "Preset mode {mode} is not valid. Valid preset modes are: {modes}."
|
||||
|
||||
@@ -43,7 +43,7 @@ from homeassistant.util.dt import utcnow
|
||||
from .const import (
|
||||
CONF_ENTITY_CONFIG,
|
||||
CONF_FILTER,
|
||||
DOMAIN,
|
||||
DOMAIN as CLOUD_DOMAIN,
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_ENABLE_ALEXA,
|
||||
PREF_SHOULD_EXPOSE,
|
||||
@@ -55,7 +55,7 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CLOUD_ALEXA = f"{DOMAIN}.{ALEXA_DOMAIN}"
|
||||
CLOUD_ALEXA = f"{CLOUD_DOMAIN}.{ALEXA_DOMAIN}"
|
||||
|
||||
# Time to wait when entity preferences have changed before syncing it to
|
||||
# the cloud.
|
||||
|
||||
@@ -43,7 +43,6 @@ VALID_REPAIR_TRANSLATION_KEYS = {
|
||||
"no_subscription",
|
||||
"warn_bad_custom_domain_configuration",
|
||||
"reset_bad_custom_domain_configuration",
|
||||
"subscription_expired",
|
||||
}
|
||||
|
||||
|
||||
@@ -405,12 +404,7 @@ class CloudClient(Interface):
|
||||
) -> None:
|
||||
"""Create a repair issue."""
|
||||
if translation_key not in VALID_REPAIR_TRANSLATION_KEYS:
|
||||
_LOGGER.error(
|
||||
"Invalid translation key %s for repair issue %s",
|
||||
translation_key,
|
||||
identifier,
|
||||
)
|
||||
return
|
||||
raise ValueError(f"Invalid translation key {translation_key}")
|
||||
async_create_issue(
|
||||
hass=self._hass,
|
||||
domain=DOMAIN,
|
||||
|
||||
@@ -41,7 +41,7 @@ from .const import (
|
||||
CONF_ENTITY_CONFIG,
|
||||
CONF_FILTER,
|
||||
DEFAULT_DISABLE_2FA,
|
||||
DOMAIN,
|
||||
DOMAIN as CLOUD_DOMAIN,
|
||||
PREF_DISABLE_2FA,
|
||||
PREF_SHOULD_EXPOSE,
|
||||
)
|
||||
@@ -52,7 +52,7 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CLOUD_GOOGLE = f"{DOMAIN}.{GOOGLE_DOMAIN}"
|
||||
CLOUD_GOOGLE = f"{CLOUD_DOMAIN}.{GOOGLE_DOMAIN}"
|
||||
|
||||
|
||||
SUPPORTED_DOMAINS = {
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.101.0"],
|
||||
"requirements": ["hass-nabucasa==0.100.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -73,10 +73,6 @@
|
||||
"reset_bad_custom_domain_configuration": {
|
||||
"title": "Custom domain ignored",
|
||||
"description": "The DNS configuration for your custom domain ({custom_domains}) is not correct. This domain has now been ignored and will not be used for Home Assistant Cloud. If you want to use this domain, please fix the DNS configuration and restart Home Assistant. If you do not need this anymore, you can remove it from the account page."
|
||||
},
|
||||
"subscription_expired": {
|
||||
"title": "Subscription has expired",
|
||||
"description": "Your Home Assistant Cloud subscription has expired. Resubscribe at {account_url}."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -9,7 +9,6 @@ from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import CLIMATE
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
DOMAIN as CLIMATE_DOMAIN,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
@@ -18,12 +17,12 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import PRESET_MODE_AUTO, PRESET_MODE_AUTO_TARGET_TEMP, PRESET_MODE_MANUAL
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call, cleanup_stale_entity, load_api_data
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -41,13 +40,11 @@ class ClimaComelitMode(StrEnum):
|
||||
class ClimaComelitCommand(StrEnum):
|
||||
"""Serial Bridge clima commands."""
|
||||
|
||||
AUTO = "auto"
|
||||
MANUAL = "man"
|
||||
OFF = "off"
|
||||
ON = "on"
|
||||
MANUAL = "man"
|
||||
SET = "set"
|
||||
SNOW = "lower"
|
||||
SUN = "upper"
|
||||
AUTO = "auto"
|
||||
|
||||
|
||||
class ClimaComelitApiStatus(TypedDict):
|
||||
@@ -69,15 +66,11 @@ API_STATUS: dict[str, ClimaComelitApiStatus] = {
|
||||
),
|
||||
}
|
||||
|
||||
HVACMODE_TO_ACTION: dict[HVACMode, ClimaComelitCommand] = {
|
||||
MODE_TO_ACTION: dict[HVACMode, ClimaComelitCommand] = {
|
||||
HVACMode.OFF: ClimaComelitCommand.OFF,
|
||||
HVACMode.COOL: ClimaComelitCommand.SNOW,
|
||||
HVACMode.HEAT: ClimaComelitCommand.SUN,
|
||||
}
|
||||
|
||||
PRESET_MODE_TO_ACTION: dict[str, ClimaComelitCommand] = {
|
||||
PRESET_MODE_MANUAL: ClimaComelitCommand.MANUAL,
|
||||
PRESET_MODE_AUTO: ClimaComelitCommand.AUTO,
|
||||
HVACMode.AUTO: ClimaComelitCommand.AUTO,
|
||||
HVACMode.COOL: ClimaComelitCommand.MANUAL,
|
||||
HVACMode.HEAT: ClimaComelitCommand.MANUAL,
|
||||
}
|
||||
|
||||
|
||||
@@ -90,42 +83,26 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
entities: list[ClimateEntity] = []
|
||||
for device in coordinator.data[CLIMATE].values():
|
||||
values = load_api_data(device, CLIMATE_DOMAIN)
|
||||
if values[0] == 0 and values[4] == 0:
|
||||
# No climate data, device is only a humidifier/dehumidifier
|
||||
|
||||
await cleanup_stale_entity(
|
||||
hass, config_entry, f"{config_entry.entry_id}-{device.index}", device
|
||||
)
|
||||
|
||||
continue
|
||||
|
||||
entities.append(
|
||||
ComelitClimateEntity(coordinator, device, config_entry.entry_id)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
async_add_entities(
|
||||
ComelitClimateEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[CLIMATE].values()
|
||||
)
|
||||
|
||||
|
||||
class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
"""Climate device."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.COOL, HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_preset_modes = [PRESET_MODE_AUTO, PRESET_MODE_MANUAL]
|
||||
_attr_hvac_modes = [HVACMode.AUTO, HVACMode.COOL, HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_max_temp = 30
|
||||
_attr_min_temp = 5
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_attr_target_temperature_step = PRECISION_TENTHS
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_name = None
|
||||
_attr_translation_key = "thermostat"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -140,23 +117,35 @@ class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
values = load_api_data(device, CLIMATE_DOMAIN)
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_clima_data"
|
||||
)
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
values = device.val[0]
|
||||
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
_automatic = values[3] == ClimaComelitMode.AUTO
|
||||
|
||||
self._attr_preset_mode = PRESET_MODE_AUTO if _automatic else PRESET_MODE_MANUAL
|
||||
|
||||
self._attr_current_temperature = values[0] / 10
|
||||
|
||||
self._attr_hvac_action = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
if not _active:
|
||||
self._attr_hvac_action = HVACAction.IDLE
|
||||
elif _mode in API_STATUS:
|
||||
if _mode in API_STATUS:
|
||||
self._attr_hvac_action = API_STATUS[_mode]["hvac_action"]
|
||||
|
||||
self._attr_hvac_mode = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
if _automatic:
|
||||
self._attr_hvac_mode = HVACMode.AUTO
|
||||
if _mode in API_STATUS:
|
||||
self._attr_hvac_mode = API_STATUS[_mode]["hvac_mode"]
|
||||
|
||||
@@ -168,48 +157,31 @@ class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
self._update_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if (
|
||||
(target_temp := kwargs.get(ATTR_TEMPERATURE)) is None
|
||||
or self.hvac_mode == HVACMode.OFF
|
||||
or self._attr_preset_mode == PRESET_MODE_AUTO
|
||||
):
|
||||
target_temp := kwargs.get(ATTR_TEMPERATURE)
|
||||
) is None or self.hvac_mode == HVACMode.OFF:
|
||||
return
|
||||
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, ClimaComelitCommand.MANUAL
|
||||
)
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, ClimaComelitCommand.SET, target_temp
|
||||
)
|
||||
self._attr_target_temperature = target_temp
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
|
||||
if self._attr_hvac_mode == HVACMode.OFF:
|
||||
if hvac_mode != HVACMode.OFF:
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, ClimaComelitCommand.ON
|
||||
)
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, HVACMODE_TO_ACTION[hvac_mode]
|
||||
self._device.index, MODE_TO_ACTION[hvac_mode]
|
||||
)
|
||||
self._attr_hvac_mode = hvac_mode
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new target preset mode."""
|
||||
|
||||
if self._attr_hvac_mode == HVACMode.OFF:
|
||||
return
|
||||
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, PRESET_MODE_TO_ACTION[preset_mode]
|
||||
)
|
||||
self._attr_preset_mode = preset_mode
|
||||
|
||||
if preset_mode == PRESET_MODE_AUTO:
|
||||
self._attr_target_temperature = PRESET_MODE_AUTO_TARGET_TEMP
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -28,22 +28,20 @@ DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = 111111
|
||||
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
def user_form_schema(user_input: dict[str, Any] | None) -> vol.Schema:
|
||||
"""Return user form schema."""
|
||||
user_input = user_input or {}
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
@@ -89,11 +87,13 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=USER_SCHEMA)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=user_form_schema(user_input)
|
||||
)
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
errors = {}
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
@@ -108,21 +108,21 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=USER_SCHEMA, errors=errors
|
||||
step_id="user", data_schema=user_form_schema(user_input), errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth flow."""
|
||||
self.context["title_placeholders"] = {CONF_HOST: entry_data[CONF_HOST]}
|
||||
self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]}
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth confirm."""
|
||||
errors: dict[str, str] = {}
|
||||
errors = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
entry_data = reauth_entry.data
|
||||
@@ -163,42 +163,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the device."""
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||
)
|
||||
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -11,8 +11,3 @@ DEFAULT_PORT = 80
|
||||
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
||||
|
||||
SCAN_INTERVAL = 5
|
||||
|
||||
PRESET_MODE_AUTO = "automatic"
|
||||
PRESET_MODE_MANUAL = "manual"
|
||||
|
||||
PRESET_MODE_AUTO_TARGET_TEMP = 20
|
||||
|
||||
@@ -7,14 +7,13 @@ from typing import Any, cast
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -69,10 +68,16 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Return if the cover is closed."""
|
||||
|
||||
if self._last_state in [None, "unknown"]:
|
||||
return None
|
||||
|
||||
if self.device_status != STATE_COVER.index("stopped"):
|
||||
return False
|
||||
|
||||
if self._last_action:
|
||||
return self._last_action == STATE_COVER.index("closing")
|
||||
|
||||
return None
|
||||
return self._last_state == CoverState.CLOSED
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool:
|
||||
@@ -84,7 +89,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
"""Return if the cover is opening."""
|
||||
return self._current_action("opening")
|
||||
|
||||
@bridge_api_call
|
||||
async def _cover_set_state(self, action: int, state: int) -> None:
|
||||
"""Set desired cover state."""
|
||||
self._last_state = self.state
|
||||
|
||||
@@ -9,7 +9,6 @@ from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import CLIMATE
|
||||
|
||||
from homeassistant.components.humidifier import (
|
||||
DOMAIN as HUMIDIFIER_DOMAIN,
|
||||
MODE_AUTO,
|
||||
MODE_NORMAL,
|
||||
HumidifierAction,
|
||||
@@ -18,13 +17,12 @@ from homeassistant.components.humidifier import (
|
||||
HumidifierEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call, cleanup_stale_entity, load_api_data
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -68,23 +66,6 @@ async def async_setup_entry(
|
||||
|
||||
entities: list[ComelitHumidifierEntity] = []
|
||||
for device in coordinator.data[CLIMATE].values():
|
||||
values = load_api_data(device, HUMIDIFIER_DOMAIN)
|
||||
if values[0] == 0 and values[4] == 0:
|
||||
# No humidity data, device is only a climate
|
||||
|
||||
for device_class in (
|
||||
HumidifierDeviceClass.HUMIDIFIER,
|
||||
HumidifierDeviceClass.DEHUMIDIFIER,
|
||||
):
|
||||
await cleanup_stale_entity(
|
||||
hass,
|
||||
config_entry,
|
||||
f"{config_entry.entry_id}-{device.index}-{device_class}",
|
||||
device,
|
||||
)
|
||||
|
||||
continue
|
||||
|
||||
entities.append(
|
||||
ComelitHumidifierEntity(
|
||||
coordinator,
|
||||
@@ -142,7 +123,15 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
values = load_api_data(device, HUMIDIFIER_DOMAIN)
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_clima_data"
|
||||
)
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
values = device.val[1]
|
||||
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
@@ -165,7 +154,6 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._update_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set new target humidity."""
|
||||
if not self._attr_is_on:
|
||||
@@ -183,7 +171,6 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._attr_target_humidity = humidity
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_mode(self, mode: str) -> None:
|
||||
"""Set humidifier mode."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
@@ -192,7 +179,6 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._attr_mode = mode
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
@@ -201,7 +187,6 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._attr_is_on = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
|
||||
@@ -4,18 +4,6 @@
|
||||
"zone_status": {
|
||||
"default": "mdi:shield-check"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"automatic": "mdi:refresh-auto",
|
||||
"manual": "mdi:alpha-m"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -40,7 +39,6 @@ class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
|
||||
_attr_name = None
|
||||
_attr_supported_color_modes = {ColorMode.ONOFF}
|
||||
|
||||
@bridge_api_call
|
||||
async def _light_set_state(self, state: int) -> None:
|
||||
"""Set desired light state."""
|
||||
await self.coordinator.api.set_device_status(LIGHT, self._device.index, state)
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiocomelit==0.12.0"]
|
||||
}
|
||||
|
||||
@@ -26,7 +26,9 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
action-exceptions:
|
||||
status: todo
|
||||
comment: wrap api calls in try block
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
@@ -53,8 +55,10 @@ rules:
|
||||
docs-known-limitations:
|
||||
status: exempt
|
||||
comment: no known limitations, yet
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
comment: review and complete missing ones
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
@@ -68,7 +72,9 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
reconfiguration-flow:
|
||||
status: todo
|
||||
comment: PR in progress
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user