mirror of
https://github.com/home-assistant/core.git
synced 2025-08-03 20:55:10 +02:00
Merge branch 'dev' into AddClimate_MideaCCM15
This commit is contained in:
@@ -30,6 +30,7 @@ base_platforms: &base_platforms
|
||||
- homeassistant/components/humidifier/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/image_processing/**
|
||||
- homeassistant/components/lawn_mower/**
|
||||
- homeassistant/components/light/**
|
||||
- homeassistant/components/lock/**
|
||||
- homeassistant/components/media_player/**
|
||||
|
@@ -727,8 +727,6 @@ omit =
|
||||
homeassistant/components/meteoclimatic/__init__.py
|
||||
homeassistant/components/meteoclimatic/sensor.py
|
||||
homeassistant/components/meteoclimatic/weather.py
|
||||
homeassistant/components/metoffice/sensor.py
|
||||
homeassistant/components/metoffice/weather.py
|
||||
homeassistant/components/microsoft/tts.py
|
||||
homeassistant/components/mikrotik/hub.py
|
||||
homeassistant/components/mill/climate.py
|
||||
@@ -783,6 +781,7 @@ omit =
|
||||
homeassistant/components/neato/__init__.py
|
||||
homeassistant/components/neato/api.py
|
||||
homeassistant/components/neato/camera.py
|
||||
homeassistant/components/neato/entity.py
|
||||
homeassistant/components/neato/hub.py
|
||||
homeassistant/components/neato/sensor.py
|
||||
homeassistant/components/neato/switch.py
|
||||
|
57
.github/workflows/ci.yaml
vendored
57
.github/workflows/ci.yaml
vendored
@@ -19,6 +19,10 @@ on:
|
||||
description: "Skip pytest"
|
||||
default: false
|
||||
type: boolean
|
||||
skip-coverage:
|
||||
description: "Skip coverage"
|
||||
default: false
|
||||
type: boolean
|
||||
pylint-only:
|
||||
description: "Only run pylint"
|
||||
default: false
|
||||
@@ -79,6 +83,7 @@ jobs:
|
||||
test_groups: ${{ steps.info.outputs.test_groups }}
|
||||
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
||||
tests: ${{ steps.info.outputs.tests }}
|
||||
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
@@ -127,6 +132,7 @@ jobs:
|
||||
test_group_count=10
|
||||
tests="[]"
|
||||
tests_glob=""
|
||||
skip_coverage=""
|
||||
|
||||
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
||||
then
|
||||
@@ -176,6 +182,12 @@ jobs:
|
||||
test_full_suite="true"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
||||
then
|
||||
skip_coverage="true"
|
||||
fi
|
||||
|
||||
# Output & sent to GitHub Actions
|
||||
echo "mariadb_groups: ${mariadb_groups}"
|
||||
echo "mariadb_groups=${mariadb_groups}" >> $GITHUB_OUTPUT
|
||||
@@ -195,6 +207,8 @@ jobs:
|
||||
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
||||
echo "tests_glob: ${tests_glob}"
|
||||
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
||||
echo "skip_coverage: ${skip_coverage}"
|
||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||
|
||||
pre-commit:
|
||||
name: Prepare pre-commit base
|
||||
@@ -741,6 +755,11 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
set -o pipefail
|
||||
cov_params=()
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant")
|
||||
cov_params+=(--cov-report=xml)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
-qq \
|
||||
@@ -750,8 +769,7 @@ jobs:
|
||||
--dist=loadfile \
|
||||
--test-group-count ${{ needs.info.outputs.test_group_count }} \
|
||||
--test-group=${{ matrix.group }} \
|
||||
--cov="homeassistant" \
|
||||
--cov-report=xml \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
-p no:sugar \
|
||||
tests \
|
||||
@@ -773,13 +791,18 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cov_params=()
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
-n auto \
|
||||
--cov="homeassistant.components.${{ matrix.group }}" \
|
||||
--cov-report=xml \
|
||||
--cov-report=term-missing \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
--durations-min=1 \
|
||||
@@ -793,6 +816,7 @@ jobs:
|
||||
name: pytest-${{ github.run_number }}
|
||||
path: pytest-*.txt
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v3.1.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
@@ -888,14 +912,18 @@ jobs:
|
||||
python --version
|
||||
set -o pipefail
|
||||
mariadb=$(echo "${{ matrix.mariadb-group }}" | sed "s/:/-/g")
|
||||
cov_params=()
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=20 \
|
||||
-n 1 \
|
||||
--cov="homeassistant.components.recorder" \
|
||||
--cov-report=xml \
|
||||
--cov-report=term-missing \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=10 \
|
||||
-p no:sugar \
|
||||
@@ -912,6 +940,7 @@ jobs:
|
||||
name: pytest-${{ github.run_number }}
|
||||
path: pytest-*.txt
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v3.1.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-mariadb
|
||||
@@ -1007,14 +1036,18 @@ jobs:
|
||||
python --version
|
||||
set -o pipefail
|
||||
postgresql=$(echo "${{ matrix.postgresql-group }}" | sed "s/:/-/g")
|
||||
cov_params=()
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
-n 1 \
|
||||
--cov="homeassistant.components.recorder" \
|
||||
--cov-report=xml \
|
||||
--cov-report=term-missing \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
--durations-min=10 \
|
||||
@@ -1032,6 +1065,7 @@ jobs:
|
||||
name: pytest-${{ github.run_number }}
|
||||
path: pytest-*.txt
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v3.1.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-postgresql
|
||||
@@ -1042,6 +1076,7 @@ jobs:
|
||||
|
||||
coverage:
|
||||
name: Upload test coverage to Codecov
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- info
|
||||
|
@@ -104,6 +104,7 @@ homeassistant.components.dhcp.*
|
||||
homeassistant.components.diagnostics.*
|
||||
homeassistant.components.dlna_dmr.*
|
||||
homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.dunehd.*
|
||||
@@ -194,6 +195,7 @@ homeassistant.components.lacrosse.*
|
||||
homeassistant.components.lacrosse_view.*
|
||||
homeassistant.components.lametric.*
|
||||
homeassistant.components.laundrify.*
|
||||
homeassistant.components.lawn_mower.*
|
||||
homeassistant.components.lcn.*
|
||||
homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.lidarr.*
|
||||
|
@@ -675,6 +675,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/launch_library/ @ludeeus @DurgNomis-drol
|
||||
/homeassistant/components/laundrify/ @xLarry
|
||||
/tests/components/laundrify/ @xLarry
|
||||
/homeassistant/components/lawn_mower/ @home-assistant/core
|
||||
/tests/components/lawn_mower/ @home-assistant/core
|
||||
/homeassistant/components/lcn/ @alengwenus
|
||||
/tests/components/lcn/ @alengwenus
|
||||
/homeassistant/components/ld2410_ble/ @930913
|
||||
|
@@ -8,7 +8,7 @@ from typing import Any, Generic, Self, TypeVar, overload
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class cached_property(Generic[_T]): # pylint: disable=invalid-name
|
||||
class cached_property(Generic[_T]):
|
||||
"""Backport of Python 3.12's cached_property.
|
||||
|
||||
Includes https://github.com/python/cpython/pull/101890/files
|
||||
|
@@ -6,6 +6,7 @@ from aemet_opendata.interface import AEMET
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import (
|
||||
CONF_STATION_UPDATES,
|
||||
@@ -27,7 +28,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
longitude = entry.data[CONF_LONGITUDE]
|
||||
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
|
||||
|
||||
aemet = AEMET(api_key)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), api_key)
|
||||
weather_coordinator = WeatherUpdateCoordinator(
|
||||
hass, aemet, latitude, longitude, station_updates
|
||||
)
|
||||
|
@@ -2,12 +2,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from aemet_opendata import AEMET
|
||||
from aemet_opendata.exceptions import AuthError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaFlowFormStep,
|
||||
SchemaOptionsFlowHandler,
|
||||
@@ -39,8 +40,13 @@ class AemetConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
api_online = await _is_aemet_api_online(self.hass, user_input[CONF_API_KEY])
|
||||
if not api_online:
|
||||
aemet = AEMET(
|
||||
aiohttp_client.async_get_clientsession(self.hass),
|
||||
user_input[CONF_API_KEY],
|
||||
)
|
||||
try:
|
||||
await aemet.get_conventional_observation_stations(False)
|
||||
except AuthError:
|
||||
errors["base"] = "invalid_api_key"
|
||||
|
||||
if not errors:
|
||||
@@ -70,10 +76,3 @@ class AemetConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
) -> SchemaOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return SchemaOptionsFlowHandler(config_entry, OPTIONS_FLOW)
|
||||
|
||||
|
||||
async def _is_aemet_api_online(hass, api_key):
|
||||
aemet = AEMET(api_key)
|
||||
return await hass.async_add_executor_job(
|
||||
aemet.get_conventional_observation_stations, False
|
||||
)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.2.2"]
|
||||
"requirements": ["AEMET-OpenData==0.3.0"]
|
||||
}
|
||||
|
@@ -11,8 +11,8 @@ from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_TIME,
|
||||
ATTR_FORECAST_WIND_BEARING,
|
||||
DOMAIN as WEATHER_DOMAIN,
|
||||
CoordinatorWeatherEntity,
|
||||
Forecast,
|
||||
WeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -22,10 +22,9 @@ from homeassistant.const import (
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
ATTR_API_CONDITION,
|
||||
@@ -111,7 +110,7 @@ async def async_setup_entry(
|
||||
async_add_entities(entities, False)
|
||||
|
||||
|
||||
class AemetWeather(CoordinatorEntity[WeatherUpdateCoordinator], WeatherEntity):
|
||||
class AemetWeather(CoordinatorWeatherEntity[WeatherUpdateCoordinator]):
|
||||
"""Implementation of an AEMET OpenData sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
@@ -139,15 +138,6 @@ class AemetWeather(CoordinatorEntity[WeatherUpdateCoordinator], WeatherEntity):
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
super()._handle_coordinator_update()
|
||||
assert self.platform.config_entry
|
||||
self.platform.config_entry.async_create_task(
|
||||
self.hass, self.async_update_listeners(("daily", "hourly"))
|
||||
)
|
||||
|
||||
@property
|
||||
def condition(self):
|
||||
"""Return the current condition."""
|
||||
|
@@ -146,13 +146,13 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
|
||||
async def _get_aemet_weather(self):
|
||||
"""Poll weather data from AEMET OpenData."""
|
||||
weather = await self.hass.async_add_executor_job(self._get_weather_and_forecast)
|
||||
weather = await self._get_weather_and_forecast()
|
||||
return weather
|
||||
|
||||
def _get_weather_station(self):
|
||||
async def _get_weather_station(self):
|
||||
if not self._station:
|
||||
self._station = (
|
||||
self._aemet.get_conventional_observation_station_by_coordinates(
|
||||
await self._aemet.get_conventional_observation_station_by_coordinates(
|
||||
self._latitude, self._longitude
|
||||
)
|
||||
)
|
||||
@@ -171,9 +171,9 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
)
|
||||
return self._station
|
||||
|
||||
def _get_weather_town(self):
|
||||
async def _get_weather_town(self):
|
||||
if not self._town:
|
||||
self._town = self._aemet.get_town_by_coordinates(
|
||||
self._town = await self._aemet.get_town_by_coordinates(
|
||||
self._latitude, self._longitude
|
||||
)
|
||||
if self._town:
|
||||
@@ -192,18 +192,20 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
raise TownNotFound
|
||||
return self._town
|
||||
|
||||
def _get_weather_and_forecast(self):
|
||||
async def _get_weather_and_forecast(self):
|
||||
"""Get weather and forecast data from AEMET OpenData."""
|
||||
|
||||
self._get_weather_town()
|
||||
await self._get_weather_town()
|
||||
|
||||
daily = self._aemet.get_specific_forecast_town_daily(self._town[AEMET_ATTR_ID])
|
||||
daily = await self._aemet.get_specific_forecast_town_daily(
|
||||
self._town[AEMET_ATTR_ID]
|
||||
)
|
||||
if not daily:
|
||||
_LOGGER.error(
|
||||
'Error fetching daily data for town "%s"', self._town[AEMET_ATTR_ID]
|
||||
)
|
||||
|
||||
hourly = self._aemet.get_specific_forecast_town_hourly(
|
||||
hourly = await self._aemet.get_specific_forecast_town_hourly(
|
||||
self._town[AEMET_ATTR_ID]
|
||||
)
|
||||
if not hourly:
|
||||
@@ -212,8 +214,8 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
)
|
||||
|
||||
station = None
|
||||
if self._station_updates and self._get_weather_station():
|
||||
station = self._aemet.get_conventional_observation_station_data(
|
||||
if self._station_updates and await self._get_weather_station():
|
||||
station = await self._aemet.get_conventional_observation_station_data(
|
||||
self._station[AEMET_ATTR_IDEMA]
|
||||
)
|
||||
if not station:
|
||||
|
@@ -10,6 +10,7 @@ from aioairzone.const import (
|
||||
AZD_AVAILABLE,
|
||||
AZD_FIRMWARE,
|
||||
AZD_FULL_NAME,
|
||||
AZD_HOT_WATER,
|
||||
AZD_ID,
|
||||
AZD_MAC,
|
||||
AZD_MODEL,
|
||||
@@ -81,6 +82,31 @@ class AirzoneSystemEntity(AirzoneEntity):
|
||||
return value
|
||||
|
||||
|
||||
class AirzoneHotWaterEntity(AirzoneEntity):
|
||||
"""Define an Airzone Hot Water entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{entry.entry_id}_dhw")},
|
||||
manufacturer=MANUFACTURER,
|
||||
model="DHW",
|
||||
name=self.get_airzone_value(AZD_NAME),
|
||||
via_device=(DOMAIN, f"{entry.entry_id}_ws"),
|
||||
)
|
||||
self._attr_unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
def get_airzone_value(self, key: str) -> Any:
|
||||
"""Return DHW value by key."""
|
||||
return self.coordinator.data[AZD_HOT_WATER].get(key)
|
||||
|
||||
|
||||
class AirzoneWebServerEntity(AirzoneEntity):
|
||||
"""Define an Airzone WebServer entity."""
|
||||
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.6.6"]
|
||||
"requirements": ["aioairzone==0.6.7"]
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
from typing import Any, Final
|
||||
|
||||
from aioairzone.const import (
|
||||
AZD_HOT_WATER,
|
||||
AZD_HUMIDITY,
|
||||
AZD_NAME,
|
||||
AZD_TEMP,
|
||||
@@ -31,7 +32,21 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, TEMP_UNIT_LIB_TO_HASS
|
||||
from .coordinator import AirzoneUpdateCoordinator
|
||||
from .entity import AirzoneEntity, AirzoneWebServerEntity, AirzoneZoneEntity
|
||||
from .entity import (
|
||||
AirzoneEntity,
|
||||
AirzoneHotWaterEntity,
|
||||
AirzoneWebServerEntity,
|
||||
AirzoneZoneEntity,
|
||||
)
|
||||
|
||||
HOT_WATER_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
SensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key=AZD_TEMP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
WEBSERVER_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
SensorEntityDescription(
|
||||
@@ -71,6 +86,18 @@ async def async_setup_entry(
|
||||
|
||||
sensors: list[AirzoneSensor] = []
|
||||
|
||||
if AZD_HOT_WATER in coordinator.data:
|
||||
dhw_data = coordinator.data[AZD_HOT_WATER]
|
||||
for description in HOT_WATER_SENSOR_TYPES:
|
||||
if description.key in dhw_data:
|
||||
sensors.append(
|
||||
AirzoneHotWaterSensor(
|
||||
coordinator,
|
||||
description,
|
||||
entry,
|
||||
)
|
||||
)
|
||||
|
||||
if AZD_WEBSERVER in coordinator.data:
|
||||
ws_data = coordinator.data[AZD_WEBSERVER]
|
||||
for description in WEBSERVER_SENSOR_TYPES:
|
||||
@@ -114,6 +141,30 @@ class AirzoneSensor(AirzoneEntity, SensorEntity):
|
||||
self._attr_native_value = self.get_airzone_value(self.entity_description.key)
|
||||
|
||||
|
||||
class AirzoneHotWaterSensor(AirzoneHotWaterEntity, AirzoneSensor):
|
||||
"""Define an Airzone Hot Water sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator, entry)
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_dhw_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_native_unit_of_measurement = TEMP_UNIT_LIB_TO_HASS.get(
|
||||
self.get_airzone_value(AZD_TEMP_UNIT)
|
||||
)
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
|
||||
class AirzoneWebServerSensor(AirzoneWebServerEntity, AirzoneSensor):
|
||||
"""Define an Airzone WebServer sensor."""
|
||||
|
||||
|
@@ -9,6 +9,7 @@ from aioairzone_cloud.const import (
|
||||
AZD_AIDOOS,
|
||||
AZD_ERRORS,
|
||||
AZD_PROBLEMS,
|
||||
AZD_SYSTEMS,
|
||||
AZD_WARNINGS,
|
||||
AZD_ZONES,
|
||||
)
|
||||
@@ -25,7 +26,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirzoneUpdateCoordinator
|
||||
from .entity import AirzoneAidooEntity, AirzoneEntity, AirzoneZoneEntity
|
||||
from .entity import (
|
||||
AirzoneAidooEntity,
|
||||
AirzoneEntity,
|
||||
AirzoneSystemEntity,
|
||||
AirzoneZoneEntity,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -51,6 +57,20 @@ AIDOO_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ...
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
SYSTEM_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ...]] = (
|
||||
AirzoneBinarySensorEntityDescription(
|
||||
attributes={
|
||||
"errors": AZD_ERRORS,
|
||||
"warnings": AZD_WARNINGS,
|
||||
},
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
key=AZD_PROBLEMS,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
ZONE_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ...]] = (
|
||||
AirzoneBinarySensorEntityDescription(
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
@@ -87,6 +107,18 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
for system_id, system_data in coordinator.data.get(AZD_SYSTEMS, {}).items():
|
||||
for description in SYSTEM_BINARY_SENSOR_TYPES:
|
||||
if description.key in system_data:
|
||||
binary_sensors.append(
|
||||
AirzoneSystemBinarySensor(
|
||||
coordinator,
|
||||
description,
|
||||
system_id,
|
||||
system_data,
|
||||
)
|
||||
)
|
||||
|
||||
for zone_id, zone_data in coordinator.data.get(AZD_ZONES, {}).items():
|
||||
for description in ZONE_BINARY_SENSOR_TYPES:
|
||||
if description.key in zone_data:
|
||||
@@ -145,6 +177,27 @@ class AirzoneAidooBinarySensor(AirzoneAidooEntity, AirzoneBinarySensor):
|
||||
self._async_update_attrs()
|
||||
|
||||
|
||||
class AirzoneSystemBinarySensor(AirzoneSystemEntity, AirzoneBinarySensor):
|
||||
"""Define an Airzone Cloud System binary sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
description: AirzoneBinarySensorEntityDescription,
|
||||
system_id: str,
|
||||
system_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator, system_id, system_data)
|
||||
|
||||
self._attr_unique_id = f"{system_id}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
|
||||
class AirzoneZoneBinarySensor(AirzoneZoneEntity, AirzoneBinarySensor):
|
||||
"""Define an Airzone Cloud Zone binary sensor."""
|
||||
|
||||
|
@@ -10,6 +10,7 @@ from aioairzone_cloud.const import (
|
||||
AZD_FIRMWARE,
|
||||
AZD_NAME,
|
||||
AZD_SYSTEM_ID,
|
||||
AZD_SYSTEMS,
|
||||
AZD_WEBSERVER,
|
||||
AZD_WEBSERVERS,
|
||||
AZD_ZONES,
|
||||
@@ -65,6 +66,35 @@ class AirzoneAidooEntity(AirzoneEntity):
|
||||
return value
|
||||
|
||||
|
||||
class AirzoneSystemEntity(AirzoneEntity):
|
||||
"""Define an Airzone Cloud System entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
system_id: str,
|
||||
system_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.system_id = system_id
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, system_id)},
|
||||
manufacturer=MANUFACTURER,
|
||||
name=system_data[AZD_NAME],
|
||||
via_device=(DOMAIN, system_data[AZD_WEBSERVER]),
|
||||
)
|
||||
|
||||
def get_airzone_value(self, key: str) -> Any:
|
||||
"""Return system value by key."""
|
||||
value = None
|
||||
if system := self.coordinator.data[AZD_SYSTEMS].get(self.system_id):
|
||||
value = system.get(key)
|
||||
return value
|
||||
|
||||
|
||||
class AirzoneWebServerEntity(AirzoneEntity):
|
||||
"""Define an Airzone Cloud WebServer entity."""
|
||||
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["boto3", "botocore", "s3transfer"],
|
||||
"requirements": ["boto3==1.20.24"]
|
||||
"requirements": ["boto3==1.28.17"]
|
||||
}
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.0.13"],
|
||||
"requirements": ["androidtvremote2==0.0.14"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Consume the august activity stream."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientError
|
||||
@@ -9,7 +10,7 @@ from yalexs.api_async import ApiAsync
|
||||
from yalexs.pubnub_async import AugustPubNub
|
||||
from yalexs.util import get_latest_activity
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util.dt import utcnow
|
||||
@@ -58,33 +59,38 @@ class ActivityStream(AugustSubscriberMixin):
|
||||
self._did_first_update = False
|
||||
self.pubnub = pubnub
|
||||
self._update_debounce: dict[str, Debouncer] = {}
|
||||
self._update_debounce_jobs: dict[str, HassJob] = {}
|
||||
|
||||
async def async_setup(self):
|
||||
async def _async_update_house_id_later(
|
||||
self, debouncer: Debouncer, _: datetime
|
||||
) -> None:
|
||||
"""Call a debouncer from async_call_later."""
|
||||
await debouncer.async_call()
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Token refresh check and catch up the activity stream."""
|
||||
self._update_debounce = {
|
||||
house_id: self._async_create_debouncer(house_id)
|
||||
for house_id in self._house_ids
|
||||
}
|
||||
await self._async_refresh(utcnow())
|
||||
self._did_first_update = True
|
||||
|
||||
@callback
|
||||
def _async_create_debouncer(self, house_id):
|
||||
"""Create a debouncer for the house id."""
|
||||
|
||||
async def _async_update_house_id():
|
||||
await self._async_update_house_id(house_id)
|
||||
|
||||
return Debouncer(
|
||||
update_debounce = self._update_debounce
|
||||
update_debounce_jobs = self._update_debounce_jobs
|
||||
for house_id in self._house_ids:
|
||||
debouncer = Debouncer(
|
||||
self._hass,
|
||||
_LOGGER,
|
||||
cooldown=ACTIVITY_DEBOUNCE_COOLDOWN,
|
||||
immediate=True,
|
||||
function=_async_update_house_id,
|
||||
function=partial(self._async_update_house_id, house_id),
|
||||
)
|
||||
update_debounce[house_id] = debouncer
|
||||
update_debounce_jobs[house_id] = HassJob(
|
||||
partial(self._async_update_house_id_later, debouncer),
|
||||
f"debounced august activity update for {house_id}",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
await self._async_refresh(utcnow())
|
||||
self._did_first_update = True
|
||||
|
||||
@callback
|
||||
def async_stop(self):
|
||||
def async_stop(self) -> None:
|
||||
"""Cleanup any debounces."""
|
||||
for debouncer in self._update_debounce.values():
|
||||
debouncer.async_cancel()
|
||||
@@ -127,28 +133,23 @@ class ActivityStream(AugustSubscriberMixin):
|
||||
@callback
|
||||
def async_schedule_house_id_refresh(self, house_id: str) -> None:
|
||||
"""Update for a house activities now and once in the future."""
|
||||
if cancels := self._schedule_updates.get(house_id):
|
||||
_async_cancel_future_scheduled_updates(cancels)
|
||||
if future_updates := self._schedule_updates.setdefault(house_id, []):
|
||||
_async_cancel_future_scheduled_updates(future_updates)
|
||||
|
||||
debouncer = self._update_debounce[house_id]
|
||||
|
||||
self._hass.async_create_task(debouncer.async_call())
|
||||
# Schedule two updates past the debounce time
|
||||
# to ensure we catch the case where the activity
|
||||
# api does not update right away and we need to poll
|
||||
# it again. Sometimes the lock operator or a doorbell
|
||||
# will not show up in the activity stream right away.
|
||||
future_updates = self._schedule_updates.setdefault(house_id, [])
|
||||
|
||||
async def _update_house_activities(now: datetime) -> None:
|
||||
await debouncer.async_call()
|
||||
|
||||
job = self._update_debounce_jobs[house_id]
|
||||
for step in (1, 2):
|
||||
future_updates.append(
|
||||
async_call_later(
|
||||
self._hass,
|
||||
(step * ACTIVITY_DEBOUNCE_COOLDOWN) + 0.1,
|
||||
_update_house_activities,
|
||||
job,
|
||||
)
|
||||
)
|
||||
|
||||
|
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==1.5.2", "yalexs-ble==2.2.3"]
|
||||
"requirements": ["yalexs==1.8.0", "yalexs-ble==2.2.3"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aws",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiobotocore", "botocore"],
|
||||
"requirements": ["aiobotocore==2.1.0"]
|
||||
"requirements": ["aiobotocore==2.6.0"]
|
||||
}
|
||||
|
@@ -19,6 +19,6 @@
|
||||
"bluetooth-adapters==0.16.0",
|
||||
"bluetooth-auto-recovery==1.2.1",
|
||||
"bluetooth-data-tools==1.8.0",
|
||||
"dbus-fast==1.92.0"
|
||||
"dbus-fast==1.93.0"
|
||||
]
|
||||
}
|
||||
|
@@ -62,6 +62,8 @@ async def async_setup_entry(
|
||||
class ShutterContactSensor(SHCEntity, BinarySensorEntity):
|
||||
"""Representation of an SHC shutter contact sensor."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC shutter contact sensor.."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
@@ -89,7 +91,6 @@ class BatterySensor(SHCEntity, BinarySensorEntity):
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC battery reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Battery"
|
||||
self._attr_unique_id = f"{device.serial}_battery"
|
||||
|
||||
@property
|
||||
|
@@ -42,6 +42,7 @@ async def async_setup_entry(
|
||||
class ShutterControlCover(SHCEntity, CoverEntity):
|
||||
"""Representation of a SHC shutter control device."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_device_class = CoverDeviceClass.SHUTTER
|
||||
_attr_supported_features = (
|
||||
CoverEntityFeature.OPEN
|
||||
|
@@ -24,6 +24,7 @@ class SHCBaseEntity(Entity):
|
||||
"""Base representation of a SHC entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self, device: SHCDevice | SHCIntrusionSystem, parent_id: str, entry_id: str
|
||||
@@ -31,7 +32,6 @@ class SHCBaseEntity(Entity):
|
||||
"""Initialize the generic SHC device."""
|
||||
self._device = device
|
||||
self._entry_id = entry_id
|
||||
self._attr_name = device.name
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to SHC events."""
|
||||
|
@@ -170,7 +170,6 @@ class TemperatureSensor(SHCEntity, SensorEntity):
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC temperature reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Temperature"
|
||||
self._attr_unique_id = f"{device.serial}_temperature"
|
||||
|
||||
@property
|
||||
@@ -188,7 +187,6 @@ class HumiditySensor(SHCEntity, SensorEntity):
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC humidity reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Humidity"
|
||||
self._attr_unique_id = f"{device.serial}_humidity"
|
||||
|
||||
@property
|
||||
@@ -200,13 +198,13 @@ class HumiditySensor(SHCEntity, SensorEntity):
|
||||
class PuritySensor(SHCEntity, SensorEntity):
|
||||
"""Representation of an SHC purity reporting sensor."""
|
||||
|
||||
_attr_translation_key = "purity"
|
||||
_attr_icon = "mdi:molecule-co2"
|
||||
_attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC purity reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Purity"
|
||||
self._attr_unique_id = f"{device.serial}_purity"
|
||||
|
||||
@property
|
||||
@@ -218,10 +216,11 @@ class PuritySensor(SHCEntity, SensorEntity):
|
||||
class AirQualitySensor(SHCEntity, SensorEntity):
|
||||
"""Representation of an SHC airquality reporting sensor."""
|
||||
|
||||
_attr_translation_key = "air_quality"
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC airquality reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Air Quality"
|
||||
self._attr_unique_id = f"{device.serial}_airquality"
|
||||
|
||||
@property
|
||||
@@ -240,10 +239,11 @@ class AirQualitySensor(SHCEntity, SensorEntity):
|
||||
class TemperatureRatingSensor(SHCEntity, SensorEntity):
|
||||
"""Representation of an SHC temperature rating sensor."""
|
||||
|
||||
_attr_translation_key = "temperature_rating"
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC temperature rating sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Temperature Rating"
|
||||
self._attr_unique_id = f"{device.serial}_temperature_rating"
|
||||
|
||||
@property
|
||||
@@ -255,12 +255,12 @@ class TemperatureRatingSensor(SHCEntity, SensorEntity):
|
||||
class CommunicationQualitySensor(SHCEntity, SensorEntity):
|
||||
"""Representation of an SHC communication quality reporting sensor."""
|
||||
|
||||
_attr_translation_key = "communication_quality"
|
||||
_attr_icon = "mdi:wifi"
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC communication quality reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Communication Quality"
|
||||
self._attr_unique_id = f"{device.serial}_communication_quality"
|
||||
|
||||
@property
|
||||
@@ -272,10 +272,11 @@ class CommunicationQualitySensor(SHCEntity, SensorEntity):
|
||||
class HumidityRatingSensor(SHCEntity, SensorEntity):
|
||||
"""Representation of an SHC humidity rating sensor."""
|
||||
|
||||
_attr_translation_key = "humidity_rating"
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC humidity rating sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Humidity Rating"
|
||||
self._attr_unique_id = f"{device.serial}_humidity_rating"
|
||||
|
||||
@property
|
||||
@@ -287,10 +288,11 @@ class HumidityRatingSensor(SHCEntity, SensorEntity):
|
||||
class PurityRatingSensor(SHCEntity, SensorEntity):
|
||||
"""Representation of an SHC purity rating sensor."""
|
||||
|
||||
_attr_translation_key = "purity_rating"
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC purity rating sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Purity Rating"
|
||||
self._attr_unique_id = f"{device.serial}_purity_rating"
|
||||
|
||||
@property
|
||||
@@ -308,7 +310,6 @@ class PowerSensor(SHCEntity, SensorEntity):
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC power reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Power"
|
||||
self._attr_unique_id = f"{device.serial}_power"
|
||||
|
||||
@property
|
||||
@@ -327,7 +328,6 @@ class EnergySensor(SHCEntity, SensorEntity):
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC energy reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{self._device.name} Energy"
|
||||
self._attr_unique_id = f"{self._device.serial}_energy"
|
||||
|
||||
@property
|
||||
@@ -340,13 +340,13 @@ class ValveTappetSensor(SHCEntity, SensorEntity):
|
||||
"""Representation of an SHC valve tappet reporting sensor."""
|
||||
|
||||
_attr_icon = "mdi:gauge"
|
||||
_attr_translation_key = "valvetappet"
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC valve tappet reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Valvetappet"
|
||||
self._attr_unique_id = f"{device.serial}_valvetappet"
|
||||
|
||||
@property
|
||||
|
@@ -36,5 +36,35 @@
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"flow_title": "Bosch SHC: {name}"
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"purity_rating": {
|
||||
"name": "Purity rating"
|
||||
},
|
||||
"purity": {
|
||||
"name": "Purity"
|
||||
},
|
||||
"valvetappet": {
|
||||
"name": "Valvetappet"
|
||||
},
|
||||
"air_quality": {
|
||||
"name": "Air quality"
|
||||
},
|
||||
"temperature_rating": {
|
||||
"name": "Temperature rating"
|
||||
},
|
||||
"humidity_rating": {
|
||||
"name": "Humidity rating"
|
||||
},
|
||||
"communication_quality": {
|
||||
"name": "Communication quality"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"routing": {
|
||||
"name": "Routing"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -200,12 +200,12 @@ class SHCRoutingSwitch(SHCEntity, SwitchEntity):
|
||||
"""Representation of a SHC routing switch."""
|
||||
|
||||
_attr_icon = "mdi:wifi"
|
||||
_attr_translation_key = "routing"
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
def __init__(self, device: SHCDevice, parent_id: str, entry_id: str) -> None:
|
||||
"""Initialize an SHC communication quality reporting sensor."""
|
||||
super().__init__(device, parent_id, entry_id)
|
||||
self._attr_name = f"{device.name} Routing"
|
||||
self._attr_unique_id = f"{device.serial}_routing"
|
||||
|
||||
@property
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.0.0"]
|
||||
"requirements": ["bthome-ble==3.1.0"]
|
||||
}
|
||||
|
@@ -2,6 +2,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from bthome_ble import SensorDeviceClass as BTHomeSensorDeviceClass, SensorUpdate, Units
|
||||
from bthome_ble.const import (
|
||||
ExtendedSensorDeviceClass as BTHomeExtendedSensorDeviceClass,
|
||||
)
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
@@ -66,7 +69,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
),
|
||||
# Count (-)
|
||||
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.COUNT}",
|
||||
key=str(BTHomeSensorDeviceClass.COUNT),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# CO2 (parts per million)
|
||||
@@ -186,7 +189,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
),
|
||||
# Packet Id (-)
|
||||
(BTHomeSensorDeviceClass.PACKET_ID, None): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.PACKET_ID}",
|
||||
key=str(BTHomeSensorDeviceClass.PACKET_ID),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -260,12 +263,16 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Text (-)
|
||||
(BTHomeExtendedSensorDeviceClass.TEXT, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.TEXT),
|
||||
),
|
||||
# Timestamp (datetime object)
|
||||
(
|
||||
BTHomeSensorDeviceClass.TIMESTAMP,
|
||||
None,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.TIMESTAMP}",
|
||||
key=str(BTHomeSensorDeviceClass.TIMESTAMP),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
@@ -274,7 +281,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
BTHomeSensorDeviceClass.UV_INDEX,
|
||||
None,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.UV_INDEX}",
|
||||
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Volatile organic Compounds (VOC) (µg/m3)
|
||||
|
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
import logging
|
||||
from typing import final
|
||||
|
||||
@@ -110,7 +110,7 @@ class DateTimeEntity(Entity):
|
||||
"which is missing timezone information"
|
||||
)
|
||||
|
||||
return value.astimezone(timezone.utc).isoformat(timespec="seconds")
|
||||
return value.astimezone(UTC).isoformat(timespec="seconds")
|
||||
|
||||
@property
|
||||
def native_value(self) -> datetime | None:
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Demo platform that offers a fake date/time entity."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from homeassistant.components.datetime import DateTimeEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -23,7 +23,7 @@ async def async_setup_entry(
|
||||
DemoDateTime(
|
||||
"datetime",
|
||||
"Date and Time",
|
||||
datetime(2020, 1, 1, 12, 0, 0, tzinfo=timezone.utc),
|
||||
datetime(2020, 1, 1, 12, 0, 0, tzinfo=UTC),
|
||||
"mdi:calendar-clock",
|
||||
False,
|
||||
),
|
||||
|
@@ -9,6 +9,7 @@ from devolo_plc_api import Device
|
||||
from devolo_plc_api.device_api import (
|
||||
ConnectedStationInfo,
|
||||
NeighborAPInfo,
|
||||
UpdateFirmwareCheck,
|
||||
WifiGuestAccessGet,
|
||||
)
|
||||
from devolo_plc_api.exceptions.device import (
|
||||
@@ -37,6 +38,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
LONG_UPDATE_INTERVAL,
|
||||
NEIGHBORING_WIFI_NETWORKS,
|
||||
REGULAR_FIRMWARE,
|
||||
SHORT_UPDATE_INTERVAL,
|
||||
SWITCH_GUEST_WIFI,
|
||||
SWITCH_LEDS,
|
||||
@@ -45,7 +47,9 @@ from .const import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry( # noqa: C901
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> bool:
|
||||
"""Set up devolo Home Network from a config entry."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
zeroconf_instance = await zeroconf.async_get_async_instance(hass)
|
||||
@@ -66,6 +70,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {"device": device}
|
||||
|
||||
async def async_update_firmware_available() -> UpdateFirmwareCheck:
|
||||
"""Fetch data from API endpoint."""
|
||||
assert device.device
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
return await device.device.async_check_firmware_available()
|
||||
except DeviceUnavailable as err:
|
||||
raise UpdateFailed(err) from err
|
||||
|
||||
async def async_update_connected_plc_devices() -> LogicalNetwork:
|
||||
"""Fetch data from API endpoint."""
|
||||
assert device.plcnet
|
||||
@@ -134,6 +147,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
update_method=async_update_led_status,
|
||||
update_interval=SHORT_UPDATE_INTERVAL,
|
||||
)
|
||||
if device.device and "update" in device.device.features:
|
||||
coordinators[REGULAR_FIRMWARE] = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=REGULAR_FIRMWARE,
|
||||
update_method=async_update_firmware_available,
|
||||
update_interval=LONG_UPDATE_INTERVAL,
|
||||
)
|
||||
if device.device and "wifi1" in device.device.features:
|
||||
coordinators[CONNECTED_WIFI_CLIENTS] = DataUpdateCoordinator(
|
||||
hass,
|
||||
@@ -192,4 +213,6 @@ def platforms(device: Device) -> set[Platform]:
|
||||
supported_platforms.add(Platform.BINARY_SENSOR)
|
||||
if device.device and "wifi1" in device.device.features:
|
||||
supported_platforms.add(Platform.DEVICE_TRACKER)
|
||||
if device.device and "update" in device.device.features:
|
||||
supported_platforms.add(Platform.UPDATE)
|
||||
return supported_platforms
|
||||
|
@@ -23,6 +23,7 @@ CONNECTED_WIFI_CLIENTS = "connected_wifi_clients"
|
||||
IDENTIFY = "identify"
|
||||
NEIGHBORING_WIFI_NETWORKS = "neighboring_wifi_networks"
|
||||
PAIRING = "pairing"
|
||||
REGULAR_FIRMWARE = "regular_firmware"
|
||||
RESTART = "restart"
|
||||
START_WPS = "start_wps"
|
||||
SWITCH_GUEST_WIFI = "switch_guest_wifi"
|
||||
|
132
homeassistant/components/devolo_home_network/update.py
Normal file
132
homeassistant/components/devolo_home_network/update.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""Platform for update integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from devolo_plc_api.device import Device
|
||||
from devolo_plc_api.device_api import UpdateFirmwareCheck
|
||||
from devolo_plc_api.exceptions.device import DevicePasswordProtected, DeviceUnavailable
|
||||
|
||||
from homeassistant.components.update import (
|
||||
UpdateDeviceClass,
|
||||
UpdateEntity,
|
||||
UpdateEntityDescription,
|
||||
UpdateEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, REGULAR_FIRMWARE
|
||||
from .entity import DevoloCoordinatorEntity
|
||||
|
||||
|
||||
@dataclass
|
||||
class DevoloUpdateRequiredKeysMixin:
|
||||
"""Mixin for required keys."""
|
||||
|
||||
latest_version: Callable[[UpdateFirmwareCheck], str]
|
||||
update_func: Callable[[Device], Awaitable[bool]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class DevoloUpdateEntityDescription(
|
||||
UpdateEntityDescription, DevoloUpdateRequiredKeysMixin
|
||||
):
|
||||
"""Describes devolo update entity."""
|
||||
|
||||
|
||||
UPDATE_TYPES: dict[str, DevoloUpdateEntityDescription] = {
|
||||
REGULAR_FIRMWARE: DevoloUpdateEntityDescription(
|
||||
key=REGULAR_FIRMWARE,
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
latest_version=lambda data: data.new_firmware_version.split("_")[0],
|
||||
update_func=lambda device: device.device.async_start_firmware_update(), # type: ignore[union-attr]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Get all devices and sensors and setup them via config entry."""
|
||||
device: Device = hass.data[DOMAIN][entry.entry_id]["device"]
|
||||
coordinators: dict[str, DataUpdateCoordinator[Any]] = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
]["coordinators"]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
DevoloUpdateEntity(
|
||||
entry,
|
||||
coordinators[REGULAR_FIRMWARE],
|
||||
UPDATE_TYPES[REGULAR_FIRMWARE],
|
||||
device,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DevoloUpdateEntity(DevoloCoordinatorEntity, UpdateEntity):
|
||||
"""Representation of a devolo update."""
|
||||
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
|
||||
)
|
||||
|
||||
entity_description: DevoloUpdateEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: ConfigEntry,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
description: DevoloUpdateEntityDescription,
|
||||
device: Device,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
self.entity_description = description
|
||||
super().__init__(entry, coordinator, device)
|
||||
self._attr_translation_key = None
|
||||
self._in_progress_old_version: str | None = None
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str:
|
||||
"""Version currently in use."""
|
||||
return self.device.firmware_version
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Latest version available for install."""
|
||||
if latest_version := self.entity_description.latest_version(
|
||||
self.coordinator.data
|
||||
):
|
||||
return latest_version
|
||||
return self.device.firmware_version
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
"""Update installation in progress."""
|
||||
return self._in_progress_old_version == self.installed_version
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
"""Turn the entity on."""
|
||||
self._in_progress_old_version = self.installed_version
|
||||
try:
|
||||
await self.entity_description.update_func(self.device)
|
||||
except DevicePasswordProtected as ex:
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
raise HomeAssistantError(
|
||||
f"Device {self.entry.title} require re-authenticatication to set or change the password"
|
||||
) from ex
|
||||
except DeviceUnavailable as ex:
|
||||
raise HomeAssistantError(
|
||||
f"Device {self.entry.title} did not respond"
|
||||
) from ex
|
@@ -5,6 +5,7 @@ from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
@@ -25,8 +26,10 @@ async def async_setup_entry(
|
||||
unit_of_measurement = config_entry.options[CONF_UNIT_OF_MEASUREMENT]
|
||||
async_add_entities(
|
||||
[
|
||||
DexcomGlucoseTrendSensor(coordinator, username),
|
||||
DexcomGlucoseValueSensor(coordinator, username, unit_of_measurement),
|
||||
DexcomGlucoseTrendSensor(coordinator, username, config_entry.entry_id),
|
||||
DexcomGlucoseValueSensor(
|
||||
coordinator, username, config_entry.entry_id, unit_of_measurement
|
||||
),
|
||||
],
|
||||
False,
|
||||
)
|
||||
@@ -35,30 +38,37 @@ async def async_setup_entry(
|
||||
class DexcomSensorEntity(CoordinatorEntity, SensorEntity):
|
||||
"""Base Dexcom sensor entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator, username: str, key: str
|
||||
self, coordinator: DataUpdateCoordinator, username: str, entry_id: str, key: str
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{username}-{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, entry_id)},
|
||||
name=username,
|
||||
)
|
||||
|
||||
|
||||
class DexcomGlucoseValueSensor(DexcomSensorEntity):
|
||||
"""Representation of a Dexcom glucose value sensor."""
|
||||
|
||||
_attr_icon = GLUCOSE_VALUE_ICON
|
||||
_attr_translation_key = "glucose_value"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
username: str,
|
||||
entry_id: str,
|
||||
unit_of_measurement: str,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, username, "value")
|
||||
super().__init__(coordinator, username, entry_id, "value")
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._key = "mg_dl" if unit_of_measurement == MG_DL else "mmol_l"
|
||||
self._attr_name = f"{DOMAIN}_{username}_glucose_value"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
@@ -71,10 +81,13 @@ class DexcomGlucoseValueSensor(DexcomSensorEntity):
|
||||
class DexcomGlucoseTrendSensor(DexcomSensorEntity):
|
||||
"""Representation of a Dexcom glucose trend sensor."""
|
||||
|
||||
def __init__(self, coordinator: DataUpdateCoordinator, username: str) -> None:
|
||||
_attr_translation_key = "glucose_trend"
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator, username: str, entry_id: str
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, username, "trend")
|
||||
self._attr_name = f"{DOMAIN}_{username}_glucose_trend"
|
||||
super().__init__(coordinator, username, entry_id, "trend")
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
|
@@ -28,5 +28,15 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"glucose_value": {
|
||||
"name": "Glucose value"
|
||||
},
|
||||
"glucose_trend": {
|
||||
"name": "Glucose trend"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,7 +7,6 @@ from typing import Any
|
||||
|
||||
from doorbirdpy import DoorBird
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -32,19 +31,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_CUSTOM_URL = "hass_url_override"
|
||||
|
||||
|
||||
DEVICE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_TOKEN): cv.string,
|
||||
vol.Optional(CONF_EVENTS, default=[]): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_CUSTOM_URL): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
|
||||
@@ -157,7 +143,9 @@ async def _update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
|
||||
|
||||
@callback
|
||||
def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry):
|
||||
def _async_import_options_from_data_if_missing(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
options = dict(entry.options)
|
||||
modified = False
|
||||
for importable_option in (CONF_EVENTS,):
|
||||
|
@@ -87,7 +87,7 @@ class DoorBirdCamera(DoorBirdEntity, Camera):
|
||||
self._last_update = datetime.datetime.min
|
||||
self._attr_unique_id = f"{self._mac_addr}_{camera_id}"
|
||||
|
||||
async def stream_source(self):
|
||||
async def stream_source(self) -> str | None:
|
||||
"""Return the stream source."""
|
||||
return self._stream_url
|
||||
|
||||
|
@@ -23,7 +23,9 @@ from .util import get_mac_address_from_door_station_info
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _schema_with_defaults(host=None, name=None):
|
||||
def _schema_with_defaults(
|
||||
host: str | None = None, name: str | None = None
|
||||
) -> vol.Schema:
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=host): str,
|
||||
@@ -39,7 +41,9 @@ def _check_device(device: DoorBird) -> tuple[tuple[bool, int], dict[str, Any]]:
|
||||
return device.ready(), device.info()
|
||||
|
||||
|
||||
async def validate_input(hass: core.HomeAssistant, data):
|
||||
async def validate_input(
|
||||
hass: core.HomeAssistant, data: dict[str, Any]
|
||||
) -> dict[str, str]:
|
||||
"""Validate the user input allows us to connect."""
|
||||
device = DoorBird(data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD])
|
||||
try:
|
||||
@@ -78,13 +82,15 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the DoorBird config flow."""
|
||||
self.discovery_schema = {}
|
||||
self.discovery_schema: vol.Schema | None = None
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
info, errors = await self._async_validate_or_error(user_input)
|
||||
if not errors:
|
||||
@@ -128,7 +134,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return await self.async_step_user()
|
||||
|
||||
async def _async_validate_or_error(self, user_input):
|
||||
async def _async_validate_or_error(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> tuple[dict[str, Any], dict[str, Any]]:
|
||||
"""Validate doorbird or error."""
|
||||
errors = {}
|
||||
info = {}
|
||||
@@ -159,7 +167,9 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, user_input=None):
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle options flow."""
|
||||
if user_input is not None:
|
||||
events = [event.strip() for event in user_input[CONF_EVENTS].split(",")]
|
||||
|
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from doorbirdpy import DoorBird
|
||||
|
||||
@@ -131,7 +131,7 @@ class ConfiguredDoorBird:
|
||||
|
||||
for fav_id in favs["http"]:
|
||||
if favs["http"][fav_id]["value"] == url:
|
||||
return fav_id
|
||||
return cast(str, fav_id)
|
||||
|
||||
return None
|
||||
|
||||
|
@@ -1,6 +1,8 @@
|
||||
"""Describe logbook events."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from homeassistant.components.logbook import (
|
||||
LOGBOOK_ENTRY_ENTITY_ID,
|
||||
LOGBOOK_ENTRY_MESSAGE,
|
||||
@@ -14,11 +16,16 @@ from .models import DoorBirdData
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_events(hass: HomeAssistant, async_describe_event):
|
||||
def async_describe_events(
|
||||
hass: HomeAssistant,
|
||||
async_describe_event: Callable[
|
||||
[str, str, Callable[[Event], dict[str, str | None]]], None
|
||||
],
|
||||
) -> None:
|
||||
"""Describe logbook events."""
|
||||
|
||||
@callback
|
||||
def async_describe_logbook_event(event: Event):
|
||||
def async_describe_logbook_event(event: Event) -> dict[str, str | None]:
|
||||
"""Describe a logbook event."""
|
||||
return {
|
||||
LOGBOOK_ENTRY_NAME: "Doorbird",
|
||||
|
@@ -2,7 +2,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
@@ -13,8 +12,6 @@ from .const import API_URL, DOMAIN
|
||||
from .device import async_reset_device_favorites
|
||||
from .util import get_door_station_by_token
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DoorBirdRequestView(HomeAssistantView):
|
||||
"""Provide a page for the device to call."""
|
||||
|
@@ -6,6 +6,7 @@ from pyenphase import Envoy
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
@@ -24,6 +25,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if not entry.unique_id:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=envoy.serial_number)
|
||||
|
||||
if entry.unique_id != envoy.serial_number:
|
||||
# If the serial number of the device does not match the unique_id
|
||||
# of the config entry, it likely means the DHCP lease has expired
|
||||
# and the device has been assigned a new IP address. We need to
|
||||
# wait for the next discovery to find the device at its new address
|
||||
# and update the config entry so we do not mix up devices.
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unexpected device found at {host}; expected {entry.unique_id}, "
|
||||
f"found {envoy.serial_number}"
|
||||
)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
@@ -5,7 +5,6 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyenphase import EnvoyEncharge, EnvoyEnpower
|
||||
from pyenphase.models.dry_contacts import DryContactStatus
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -53,12 +52,6 @@ ENCHARGE_SENSORS = (
|
||||
),
|
||||
)
|
||||
|
||||
RELAY_STATUS_SENSOR = BinarySensorEntityDescription(
|
||||
key="relay_status",
|
||||
translation_key="relay",
|
||||
icon="mdi:power-plug",
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnvoyEnpowerRequiredKeysMixin:
|
||||
@@ -114,11 +107,6 @@ async def async_setup_entry(
|
||||
for description in ENPOWER_SENSORS
|
||||
)
|
||||
|
||||
if envoy_data.dry_contact_status:
|
||||
entities.extend(
|
||||
EnvoyRelayBinarySensorEntity(coordinator, RELAY_STATUS_SENSOR, relay)
|
||||
for relay in envoy_data.dry_contact_status
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -190,34 +178,3 @@ class EnvoyEnpowerBinarySensorEntity(EnvoyBaseBinarySensorEntity):
|
||||
enpower = self.data.enpower
|
||||
assert enpower is not None
|
||||
return self.entity_description.value_fn(enpower)
|
||||
|
||||
|
||||
class EnvoyRelayBinarySensorEntity(EnvoyBaseBinarySensorEntity):
|
||||
"""Defines an Enpower dry contact binary_sensor entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EnphaseUpdateCoordinator,
|
||||
description: BinarySensorEntityDescription,
|
||||
relay_id: str,
|
||||
) -> None:
|
||||
"""Init the Enpower base entity."""
|
||||
super().__init__(coordinator, description)
|
||||
enpower = self.data.enpower
|
||||
assert enpower is not None
|
||||
self._relay_id = relay_id
|
||||
self._attr_unique_id = f"{enpower.serial_number}_relay_{relay_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, relay_id)},
|
||||
manufacturer="Enphase",
|
||||
model="Dry contact relay",
|
||||
name=self.data.dry_contact_settings[relay_id].load_name,
|
||||
sw_version=str(enpower.firmware_version),
|
||||
via_device=(DOMAIN, enpower.serial_number),
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the Enpower binary_sensor."""
|
||||
relay = self.data.dry_contact_status[self._relay_id]
|
||||
return relay.status == DryContactStatus.CLOSED
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"requirements": ["pyenphase==1.6.0"],
|
||||
"requirements": ["pyenphase==1.8.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
@@ -31,9 +31,6 @@
|
||||
},
|
||||
"grid_status": {
|
||||
"name": "Grid status"
|
||||
},
|
||||
"relay": {
|
||||
"name": "Relay status"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
|
@@ -6,7 +6,8 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyenphase import Envoy, EnvoyEnpower
|
||||
from pyenphase import Envoy, EnvoyDryContactStatus, EnvoyEnpower
|
||||
from pyenphase.models.dry_contacts import DryContactStatus
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -37,6 +38,22 @@ class EnvoyEnpowerSwitchEntityDescription(
|
||||
"""Describes an Envoy Enpower switch entity."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnvoyDryContactRequiredKeysMixin:
|
||||
"""Mixin for required keys."""
|
||||
|
||||
value_fn: Callable[[EnvoyDryContactStatus], bool]
|
||||
turn_on_fn: Callable[[Envoy, str], Coroutine[Any, Any, dict[str, Any]]]
|
||||
turn_off_fn: Callable[[Envoy, str], Coroutine[Any, Any, dict[str, Any]]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnvoyDryContactSwitchEntityDescription(
|
||||
SwitchEntityDescription, EnvoyDryContactRequiredKeysMixin
|
||||
):
|
||||
"""Describes an Envoy Enpower dry contact switch entity."""
|
||||
|
||||
|
||||
ENPOWER_GRID_SWITCH = EnvoyEnpowerSwitchEntityDescription(
|
||||
key="mains_admin_state",
|
||||
translation_key="grid_enabled",
|
||||
@@ -45,6 +62,13 @@ ENPOWER_GRID_SWITCH = EnvoyEnpowerSwitchEntityDescription(
|
||||
turn_off_fn=lambda envoy: envoy.go_off_grid(),
|
||||
)
|
||||
|
||||
RELAY_STATE_SWITCH = EnvoyDryContactSwitchEntityDescription(
|
||||
key="relay_status",
|
||||
value_fn=lambda dry_contact: dry_contact.status == DryContactStatus.CLOSED,
|
||||
turn_on_fn=lambda envoy, id: envoy.close_dry_contact(id),
|
||||
turn_off_fn=lambda envoy, id: envoy.open_dry_contact(id),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -64,6 +88,13 @@ async def async_setup_entry(
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
if envoy_data.dry_contact_status:
|
||||
entities.extend(
|
||||
EnvoyDryContactSwitchEntity(coordinator, RELAY_STATE_SWITCH, relay)
|
||||
for relay in envoy_data.dry_contact_status
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -109,3 +140,51 @@ class EnvoyEnpowerSwitchEntity(EnvoyBaseEntity, SwitchEntity):
|
||||
"""Turn off the Enpower switch."""
|
||||
await self.entity_description.turn_off_fn(self.envoy)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
class EnvoyDryContactSwitchEntity(EnvoyBaseEntity, SwitchEntity):
|
||||
"""Representation of an Enphase dry contact switch entity."""
|
||||
|
||||
entity_description: EnvoyDryContactSwitchEntityDescription
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EnphaseUpdateCoordinator,
|
||||
description: EnvoyDryContactSwitchEntityDescription,
|
||||
relay_id: str,
|
||||
) -> None:
|
||||
"""Initialize the Enphase dry contact switch entity."""
|
||||
super().__init__(coordinator, description)
|
||||
self.envoy = coordinator.envoy
|
||||
enpower = self.data.enpower
|
||||
assert enpower is not None
|
||||
self.relay_id = relay_id
|
||||
serial_number = enpower.serial_number
|
||||
self._attr_unique_id = f"{serial_number}_relay_{relay_id}_{description.key}"
|
||||
relay = self.data.dry_contact_settings[relay_id]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, relay_id)},
|
||||
manufacturer="Enphase",
|
||||
model="Dry contact relay",
|
||||
name=relay.load_name,
|
||||
sw_version=str(enpower.firmware_version),
|
||||
via_device=(DOMAIN, enpower.serial_number),
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the dry contact."""
|
||||
relay = self.data.dry_contact_status[self.relay_id]
|
||||
assert relay is not None
|
||||
return self.entity_description.value_fn(relay)
|
||||
|
||||
async def async_turn_on(self):
|
||||
"""Turn on (close) the dry contact."""
|
||||
if await self.entity_description.turn_on_fn(self.envoy, self.relay_id):
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self):
|
||||
"""Turn off (open) the dry contact."""
|
||||
if await self.entity_description.turn_off_fn(self.envoy, self.relay_id):
|
||||
self.async_write_ha_state()
|
||||
|
@@ -22,8 +22,8 @@ from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||
ATTR_FORECAST_TIME,
|
||||
DOMAIN as WEATHER_DOMAIN,
|
||||
CoordinatorWeatherEntity,
|
||||
Forecast,
|
||||
WeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -33,10 +33,9 @@ from homeassistant.const import (
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import device_info
|
||||
@@ -87,7 +86,7 @@ def _calculate_unique_id(config_entry_unique_id: str | None, hourly: bool) -> st
|
||||
return f"{config_entry_unique_id}{'-hourly' if hourly else '-daily'}"
|
||||
|
||||
|
||||
class ECWeather(CoordinatorEntity, WeatherEntity):
|
||||
class ECWeather(CoordinatorWeatherEntity):
|
||||
"""Representation of a weather condition."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -112,15 +111,6 @@ class ECWeather(CoordinatorEntity, WeatherEntity):
|
||||
self._hourly = hourly
|
||||
self._attr_device_info = device_info(coordinator.config_entry)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
super()._handle_coordinator_update()
|
||||
assert self.platform.config_entry
|
||||
self.platform.config_entry.async_create_task(
|
||||
self.hass, self.async_update_listeners(("daily", "hourly"))
|
||||
)
|
||||
|
||||
@property
|
||||
def native_temperature(self):
|
||||
"""Return the temperature."""
|
||||
|
@@ -18,7 +18,6 @@ from aioesphomeapi import (
|
||||
UserServiceArgType,
|
||||
VoiceAssistantEventType,
|
||||
)
|
||||
from aioesphomeapi.model import VoiceAssistantCommandFlag
|
||||
from awesomeversion import AwesomeVersion
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -320,7 +319,7 @@ class ESPHomeManager:
|
||||
self.voice_assistant_udp_server = None
|
||||
|
||||
async def _handle_pipeline_start(
|
||||
self, conversation_id: str, use_vad: int
|
||||
self, conversation_id: str, flags: int
|
||||
) -> int | None:
|
||||
"""Start a voice assistant pipeline."""
|
||||
if self.voice_assistant_udp_server is not None:
|
||||
@@ -340,12 +339,10 @@ class ESPHomeManager:
|
||||
voice_assistant_udp_server.run_pipeline(
|
||||
device_id=self.device_id,
|
||||
conversation_id=conversation_id or None,
|
||||
use_vad=VoiceAssistantCommandFlag(use_vad)
|
||||
== VoiceAssistantCommandFlag.USE_VAD,
|
||||
flags=flags,
|
||||
),
|
||||
"esphome.voice_assistant_udp_server.run_pipeline",
|
||||
)
|
||||
self.entry_data.async_set_assist_pipeline_state(True)
|
||||
|
||||
return port
|
||||
|
||||
@@ -357,23 +354,65 @@ class ESPHomeManager:
|
||||
async def on_connect(self) -> None:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
entry = self.entry
|
||||
unique_id = entry.unique_id
|
||||
entry_data = self.entry_data
|
||||
reconnect_logic = self.reconnect_logic
|
||||
assert reconnect_logic is not None, "Reconnect logic must be set"
|
||||
hass = self.hass
|
||||
cli = self.cli
|
||||
stored_device_name = entry.data.get(CONF_DEVICE_NAME)
|
||||
unique_id_is_mac_address = unique_id and ":" in unique_id
|
||||
try:
|
||||
device_info = await cli.device_info()
|
||||
except APIConnectionError as err:
|
||||
_LOGGER.warning("Error getting device info for %s: %s", self.host, err)
|
||||
# Re-connection logic will trigger after this
|
||||
await cli.disconnect()
|
||||
return
|
||||
|
||||
# Migrate config entry to new unique ID if necessary
|
||||
device_mac = format_mac(device_info.mac_address)
|
||||
mac_address_matches = unique_id == device_mac
|
||||
#
|
||||
# Migrate config entry to new unique ID if the current
|
||||
# unique id is not a mac address.
|
||||
#
|
||||
# This was changed in 2023.1
|
||||
if entry.unique_id != format_mac(device_info.mac_address):
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=format_mac(device_info.mac_address)
|
||||
if not mac_address_matches and not unique_id_is_mac_address:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=device_mac)
|
||||
|
||||
if not mac_address_matches and unique_id_is_mac_address:
|
||||
# If the unique id is a mac address
|
||||
# and does not match we have the wrong device and we need
|
||||
# to abort the connection. This can happen if the DHCP
|
||||
# server changes the IP address of the device and we end up
|
||||
# connecting to the wrong device.
|
||||
_LOGGER.error(
|
||||
"Unexpected device found at %s; "
|
||||
"expected `%s` with mac address `%s`, "
|
||||
"found `%s` with mac address `%s`",
|
||||
self.host,
|
||||
stored_device_name,
|
||||
unique_id,
|
||||
device_info.name,
|
||||
device_mac,
|
||||
)
|
||||
await cli.disconnect()
|
||||
await reconnect_logic.stop()
|
||||
# We don't want to reconnect to the wrong device
|
||||
# so we stop the reconnect logic and disconnect
|
||||
# the client. When discovery finds the new IP address
|
||||
# for the device, the config entry will be updated
|
||||
# and we will connect to the correct device when
|
||||
# the config entry gets reloaded by the discovery
|
||||
# flow.
|
||||
return
|
||||
|
||||
# Make sure we have the correct device name stored
|
||||
# so we can map the device to ESPHome Dashboard config
|
||||
if entry.data.get(CONF_DEVICE_NAME) != device_info.name:
|
||||
# If we got here, we know the mac address matches or we
|
||||
# did a migration to the mac address so we can update
|
||||
# the device name.
|
||||
if stored_device_name != device_info.name:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, CONF_DEVICE_NAME: device_info.name}
|
||||
)
|
||||
@@ -388,9 +427,8 @@ class ESPHomeManager:
|
||||
# We use this to determine if a deep sleep device should
|
||||
# be marked as unavailable or not.
|
||||
entry_data.expected_disconnect = True
|
||||
if entry_data.device_info.name:
|
||||
assert reconnect_logic is not None, "Reconnect logic must be set"
|
||||
reconnect_logic.name = entry_data.device_info.name
|
||||
if device_info.name:
|
||||
reconnect_logic.name = device_info.name
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(cli.api_version):
|
||||
entry_data.disconnect_callbacks.append(
|
||||
@@ -402,6 +440,7 @@ class ESPHomeManager:
|
||||
self.device_id = _async_setup_device_registry(hass, entry, entry_data)
|
||||
entry_data.async_update_device_state(hass)
|
||||
|
||||
try:
|
||||
entity_infos, services = await cli.list_entities_services()
|
||||
await entry_data.async_update_static_infos(hass, entry, entity_infos)
|
||||
await _setup_services(hass, entry_data, services)
|
||||
|
@@ -2,26 +2,23 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import deque
|
||||
from collections.abc import AsyncIterable, Callable, MutableSequence, Sequence
|
||||
from collections.abc import AsyncIterable, Callable
|
||||
import logging
|
||||
import socket
|
||||
from typing import cast
|
||||
|
||||
from aioesphomeapi import VoiceAssistantEventType
|
||||
from aioesphomeapi import VoiceAssistantCommandFlag, VoiceAssistantEventType
|
||||
|
||||
from homeassistant.components import stt, tts
|
||||
from homeassistant.components.assist_pipeline import (
|
||||
PipelineEvent,
|
||||
PipelineEventType,
|
||||
PipelineNotFound,
|
||||
PipelineStage,
|
||||
async_pipeline_from_audio_stream,
|
||||
select as pipeline_select,
|
||||
)
|
||||
from homeassistant.components.assist_pipeline.vad import (
|
||||
VadSensitivity,
|
||||
VoiceCommandSegmenter,
|
||||
)
|
||||
from homeassistant.components.assist_pipeline.error import WakeWordDetectionError
|
||||
from homeassistant.components.media_player import async_process_play_media_url
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
|
||||
@@ -47,6 +44,8 @@ _VOICE_ASSISTANT_EVENT_TYPES: EsphomeEnumMapper[
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: PipelineEventType.INTENT_END,
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: PipelineEventType.TTS_START,
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: PipelineEventType.TTS_END,
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START: PipelineEventType.WAKE_WORD_START,
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: PipelineEventType.WAKE_WORD_END,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -72,6 +71,7 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol):
|
||||
self.hass = hass
|
||||
|
||||
assert entry_data.device_info is not None
|
||||
self.entry_data = entry_data
|
||||
self.device_info = entry_data.device_info
|
||||
|
||||
self.queue: asyncio.Queue[bytes] = asyncio.Queue()
|
||||
@@ -159,7 +159,9 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol):
|
||||
|
||||
data_to_send = None
|
||||
error = False
|
||||
if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END:
|
||||
if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_START:
|
||||
self.entry_data.async_set_assist_pipeline_state(True)
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END:
|
||||
assert event.data is not None
|
||||
data_to_send = {"text": event.data["stt_output"]["text"]}
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END:
|
||||
@@ -183,121 +185,33 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol):
|
||||
)
|
||||
else:
|
||||
self._tts_done.set()
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END:
|
||||
assert event.data is not None
|
||||
if not event.data["wake_word_output"]:
|
||||
event_type = VoiceAssistantEventType.VOICE_ASSISTANT_ERROR
|
||||
data_to_send = {
|
||||
"code": "no_wake_word",
|
||||
"message": "No wake word detected",
|
||||
}
|
||||
error = True
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR:
|
||||
assert event.data is not None
|
||||
data_to_send = {
|
||||
"code": event.data["code"],
|
||||
"message": event.data["message"],
|
||||
}
|
||||
self._tts_done.set()
|
||||
error = True
|
||||
|
||||
self.handle_event(event_type, data_to_send)
|
||||
if error:
|
||||
self._tts_done.set()
|
||||
self.handle_finished()
|
||||
|
||||
async def _wait_for_speech(
|
||||
self,
|
||||
segmenter: VoiceCommandSegmenter,
|
||||
chunk_buffer: MutableSequence[bytes],
|
||||
) -> bool:
|
||||
"""Buffer audio chunks until speech is detected.
|
||||
|
||||
Raises asyncio.TimeoutError if no audio data is retrievable from the queue (device stops sending packets / networking issue).
|
||||
|
||||
Returns True if speech was detected
|
||||
Returns False if the connection was stopped gracefully (b"" put onto the queue).
|
||||
"""
|
||||
# Timeout if no audio comes in for a while.
|
||||
async with asyncio.timeout(self.audio_timeout):
|
||||
chunk = await self.queue.get()
|
||||
|
||||
while chunk:
|
||||
segmenter.process(chunk)
|
||||
# Buffer the data we have taken from the queue
|
||||
chunk_buffer.append(chunk)
|
||||
if segmenter.in_command:
|
||||
return True
|
||||
|
||||
async with asyncio.timeout(self.audio_timeout):
|
||||
chunk = await self.queue.get()
|
||||
|
||||
# If chunk is falsey, `stop()` was called
|
||||
return False
|
||||
|
||||
async def _segment_audio(
|
||||
self,
|
||||
segmenter: VoiceCommandSegmenter,
|
||||
chunk_buffer: Sequence[bytes],
|
||||
) -> AsyncIterable[bytes]:
|
||||
"""Yield audio chunks until voice command has finished.
|
||||
|
||||
Raises asyncio.TimeoutError if no audio data is retrievable from the queue.
|
||||
"""
|
||||
# Buffered chunks first
|
||||
for buffered_chunk in chunk_buffer:
|
||||
yield buffered_chunk
|
||||
|
||||
# Timeout if no audio comes in for a while.
|
||||
async with asyncio.timeout(self.audio_timeout):
|
||||
chunk = await self.queue.get()
|
||||
|
||||
while chunk:
|
||||
if not segmenter.process(chunk):
|
||||
# Voice command is finished
|
||||
break
|
||||
|
||||
yield chunk
|
||||
|
||||
async with asyncio.timeout(self.audio_timeout):
|
||||
chunk = await self.queue.get()
|
||||
|
||||
async def _iterate_packets_with_vad(
|
||||
self, pipeline_timeout: float, silence_seconds: float
|
||||
) -> Callable[[], AsyncIterable[bytes]] | None:
|
||||
segmenter = VoiceCommandSegmenter(silence_seconds=silence_seconds)
|
||||
chunk_buffer: deque[bytes] = deque(maxlen=100)
|
||||
try:
|
||||
async with asyncio.timeout(pipeline_timeout):
|
||||
speech_detected = await self._wait_for_speech(segmenter, chunk_buffer)
|
||||
if not speech_detected:
|
||||
_LOGGER.debug(
|
||||
"Device stopped sending audio before speech was detected"
|
||||
)
|
||||
self.handle_finished()
|
||||
return None
|
||||
except asyncio.TimeoutError:
|
||||
self.handle_event(
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_ERROR,
|
||||
{
|
||||
"code": "speech-timeout",
|
||||
"message": "Timed out waiting for speech",
|
||||
},
|
||||
)
|
||||
self.handle_finished()
|
||||
return None
|
||||
|
||||
async def _stream_packets() -> AsyncIterable[bytes]:
|
||||
try:
|
||||
async for chunk in self._segment_audio(segmenter, chunk_buffer):
|
||||
yield chunk
|
||||
except asyncio.TimeoutError:
|
||||
self.handle_event(
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_ERROR,
|
||||
{
|
||||
"code": "speech-timeout",
|
||||
"message": "No speech detected",
|
||||
},
|
||||
)
|
||||
self.handle_finished()
|
||||
|
||||
return _stream_packets
|
||||
|
||||
async def run_pipeline(
|
||||
self,
|
||||
device_id: str,
|
||||
conversation_id: str | None,
|
||||
use_vad: bool = False,
|
||||
flags: int = 0,
|
||||
pipeline_timeout: float = 30.0,
|
||||
) -> None:
|
||||
"""Run the Voice Assistant pipeline."""
|
||||
@@ -306,24 +220,11 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol):
|
||||
"raw" if self.device_info.voice_assistant_version >= 2 else "mp3"
|
||||
)
|
||||
|
||||
if use_vad:
|
||||
stt_stream = await self._iterate_packets_with_vad(
|
||||
pipeline_timeout,
|
||||
silence_seconds=VadSensitivity.to_seconds(
|
||||
pipeline_select.get_vad_sensitivity(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
self.device_info.mac_address,
|
||||
)
|
||||
),
|
||||
)
|
||||
# Error or timeout occurred and was handled already
|
||||
if stt_stream is None:
|
||||
return
|
||||
else:
|
||||
stt_stream = self._iterate_packets
|
||||
|
||||
_LOGGER.debug("Starting pipeline")
|
||||
if flags & VoiceAssistantCommandFlag.USE_WAKE_WORD:
|
||||
start_stage = PipelineStage.WAKE_WORD
|
||||
else:
|
||||
start_stage = PipelineStage.STT
|
||||
try:
|
||||
async with asyncio.timeout(pipeline_timeout):
|
||||
await async_pipeline_from_audio_stream(
|
||||
@@ -338,13 +239,14 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol):
|
||||
sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,
|
||||
channel=stt.AudioChannels.CHANNEL_MONO,
|
||||
),
|
||||
stt_stream=stt_stream(),
|
||||
stt_stream=self._iterate_packets(),
|
||||
pipeline_id=pipeline_select.get_chosen_pipeline(
|
||||
self.hass, DOMAIN, self.device_info.mac_address
|
||||
),
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
tts_audio_output=tts_audio_output,
|
||||
start_stage=start_stage,
|
||||
)
|
||||
|
||||
# Block until TTS is done sending
|
||||
@@ -356,11 +258,23 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol):
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_ERROR,
|
||||
{
|
||||
"code": "pipeline not found",
|
||||
"message": "Selected pipeline timeout",
|
||||
"message": "Selected pipeline not found",
|
||||
},
|
||||
)
|
||||
_LOGGER.warning("Pipeline not found")
|
||||
except WakeWordDetectionError as e:
|
||||
self.handle_event(
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_ERROR,
|
||||
{
|
||||
"code": e.code,
|
||||
"message": e.message,
|
||||
},
|
||||
)
|
||||
_LOGGER.warning("No Wake word provider found")
|
||||
except asyncio.TimeoutError:
|
||||
if self.stopped:
|
||||
# The pipeline was stopped gracefully
|
||||
return
|
||||
self.handle_event(
|
||||
VoiceAssistantEventType.VOICE_ASSISTANT_ERROR,
|
||||
{
|
||||
@@ -397,7 +311,7 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol):
|
||||
|
||||
self.transport.sendto(chunk, self.remote_addr)
|
||||
await asyncio.sleep(
|
||||
samples_in_chunk / stt.AudioSampleRates.SAMPLERATE_16000 * 0.99
|
||||
samples_in_chunk / stt.AudioSampleRates.SAMPLERATE_16000 * 0.9
|
||||
)
|
||||
|
||||
sample_offset += samples_in_chunk
|
||||
|
@@ -1,22 +1,23 @@
|
||||
"""Support for RSS/Atom feeds."""
|
||||
from __future__ import annotations
|
||||
|
||||
from calendar import timegm
|
||||
from datetime import datetime, timedelta
|
||||
from logging import getLogger
|
||||
from os.path import exists
|
||||
import os
|
||||
import pickle
|
||||
from threading import Lock
|
||||
from time import struct_time
|
||||
from typing import cast
|
||||
from time import gmtime, struct_time
|
||||
|
||||
import feedparser
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import track_time_interval
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
@@ -25,10 +26,12 @@ CONF_MAX_ENTRIES = "max_entries"
|
||||
|
||||
DEFAULT_MAX_ENTRIES = 20
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(hours=1)
|
||||
DELAY_SAVE = 30
|
||||
|
||||
DOMAIN = "feedreader"
|
||||
|
||||
EVENT_FEEDREADER = "feedreader"
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -46,17 +49,25 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Feedreader component."""
|
||||
urls: list[str] = config[DOMAIN][CONF_URLS]
|
||||
if not urls:
|
||||
return False
|
||||
|
||||
scan_interval: timedelta = config[DOMAIN][CONF_SCAN_INTERVAL]
|
||||
max_entries: int = config[DOMAIN][CONF_MAX_ENTRIES]
|
||||
data_file = hass.config.path(f"{DOMAIN}.pickle")
|
||||
storage = StoredData(data_file)
|
||||
old_data_file = hass.config.path(f"{DOMAIN}.pickle")
|
||||
storage = StoredData(hass, old_data_file)
|
||||
await storage.async_setup()
|
||||
feeds = [
|
||||
FeedManager(url, scan_interval, max_entries, hass, storage) for url in urls
|
||||
FeedManager(hass, url, scan_interval, max_entries, storage) for url in urls
|
||||
]
|
||||
return len(feeds) > 0
|
||||
|
||||
for feed in feeds:
|
||||
feed.async_setup()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FeedManager:
|
||||
@@ -64,50 +75,47 @@ class FeedManager:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
url: str,
|
||||
scan_interval: timedelta,
|
||||
max_entries: int,
|
||||
hass: HomeAssistant,
|
||||
storage: StoredData,
|
||||
) -> None:
|
||||
"""Initialize the FeedManager object, poll as per scan interval."""
|
||||
self._hass = hass
|
||||
self._url = url
|
||||
self._scan_interval = scan_interval
|
||||
self._max_entries = max_entries
|
||||
self._feed: feedparser.FeedParserDict | None = None
|
||||
self._hass = hass
|
||||
self._firstrun = True
|
||||
self._storage = storage
|
||||
self._last_entry_timestamp: struct_time | None = None
|
||||
self._last_update_successful = False
|
||||
self._has_published_parsed = False
|
||||
self._has_updated_parsed = False
|
||||
self._event_type = EVENT_FEEDREADER
|
||||
self._feed_id = url
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: self._update())
|
||||
self._init_regular_updates(hass)
|
||||
|
||||
@callback
|
||||
def async_setup(self) -> None:
|
||||
"""Set up the feed manager."""
|
||||
self._hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, self._async_update)
|
||||
async_track_time_interval(
|
||||
self._hass, self._async_update, self._scan_interval, cancel_on_shutdown=True
|
||||
)
|
||||
|
||||
def _log_no_entries(self) -> None:
|
||||
"""Send no entries log at debug level."""
|
||||
_LOGGER.debug("No new entries to be published in feed %s", self._url)
|
||||
|
||||
def _init_regular_updates(self, hass: HomeAssistant) -> None:
|
||||
"""Schedule regular updates at the top of the clock."""
|
||||
track_time_interval(
|
||||
hass,
|
||||
lambda now: self._update(),
|
||||
self._scan_interval,
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def last_update_successful(self) -> bool:
|
||||
"""Return True if the last feed update was successful."""
|
||||
return self._last_update_successful
|
||||
|
||||
def _update(self) -> None:
|
||||
async def _async_update(self, _: datetime | Event) -> None:
|
||||
"""Update the feed and publish new entries to the event bus."""
|
||||
_LOGGER.info("Fetching new data from feed %s", self._url)
|
||||
last_entry_timestamp = await self._hass.async_add_executor_job(self._update)
|
||||
if last_entry_timestamp:
|
||||
self._storage.async_put_timestamp(self._feed_id, last_entry_timestamp)
|
||||
|
||||
def _update(self) -> struct_time | None:
|
||||
"""Update the feed and publish new entries to the event bus."""
|
||||
_LOGGER.debug("Fetching new data from feed %s", self._url)
|
||||
self._feed: feedparser.FeedParserDict = feedparser.parse( # type: ignore[no-redef]
|
||||
self._url,
|
||||
etag=None if not self._feed else self._feed.get("etag"),
|
||||
@@ -115,8 +123,7 @@ class FeedManager:
|
||||
)
|
||||
if not self._feed:
|
||||
_LOGGER.error("Error fetching feed data from %s", self._url)
|
||||
self._last_update_successful = False
|
||||
else:
|
||||
return None
|
||||
# The 'bozo' flag really only indicates that there was an issue
|
||||
# during the initial parsing of the XML, but it doesn't indicate
|
||||
# whether this is an unrecoverable error. In this case the
|
||||
@@ -131,22 +138,26 @@ class FeedManager:
|
||||
)
|
||||
# Using etag and modified, if there's no new data available,
|
||||
# the entries list will be empty
|
||||
if self._feed.entries:
|
||||
_LOGGER.debug(
|
||||
"%s entri(es) available in feed %s",
|
||||
len(self._feed.entries),
|
||||
self._url,
|
||||
)
|
||||
if not self._feed.entries:
|
||||
self._log_no_entries()
|
||||
return None
|
||||
|
||||
self._filter_entries()
|
||||
self._publish_new_entries()
|
||||
if self._has_published_parsed or self._has_updated_parsed:
|
||||
self._storage.put_timestamp(
|
||||
self._feed_id, cast(struct_time, self._last_entry_timestamp)
|
||||
)
|
||||
else:
|
||||
self._log_no_entries()
|
||||
self._last_update_successful = True
|
||||
_LOGGER.info("Fetch from feed %s completed", self._url)
|
||||
|
||||
_LOGGER.debug("Fetch from feed %s completed", self._url)
|
||||
|
||||
if (
|
||||
self._has_published_parsed or self._has_updated_parsed
|
||||
) and self._last_entry_timestamp:
|
||||
return self._last_entry_timestamp
|
||||
|
||||
return None
|
||||
|
||||
def _filter_entries(self) -> None:
|
||||
"""Filter the entries provided and return the ones to keep."""
|
||||
@@ -219,47 +230,62 @@ class FeedManager:
|
||||
|
||||
|
||||
class StoredData:
|
||||
"""Abstraction over pickle data storage."""
|
||||
"""Represent a data storage."""
|
||||
|
||||
def __init__(self, data_file: str) -> None:
|
||||
"""Initialize pickle data storage."""
|
||||
self._data_file = data_file
|
||||
self._lock = Lock()
|
||||
self._cache_outdated = True
|
||||
def __init__(self, hass: HomeAssistant, legacy_data_file: str) -> None:
|
||||
"""Initialize data storage."""
|
||||
self._legacy_data_file = legacy_data_file
|
||||
self._data: dict[str, struct_time] = {}
|
||||
self._fetch_data()
|
||||
self._hass = hass
|
||||
self._store: Store[dict[str, str]] = Store(hass, STORAGE_VERSION, DOMAIN)
|
||||
|
||||
def _fetch_data(self) -> None:
|
||||
"""Fetch data stored into pickle file."""
|
||||
if self._cache_outdated and exists(self._data_file):
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up storage."""
|
||||
if not os.path.exists(self._store.path):
|
||||
# Remove the legacy store loading after deprecation period.
|
||||
data = await self._hass.async_add_executor_job(self._legacy_fetch_data)
|
||||
else:
|
||||
if (store_data := await self._store.async_load()) is None:
|
||||
return
|
||||
# Make sure that dst is set to 0, by using gmtime() on the timestamp.
|
||||
data = {
|
||||
feed_id: gmtime(datetime.fromisoformat(timestamp_string).timestamp())
|
||||
for feed_id, timestamp_string in store_data.items()
|
||||
}
|
||||
|
||||
self._data = data
|
||||
|
||||
def _legacy_fetch_data(self) -> dict[str, struct_time]:
|
||||
"""Fetch data stored in pickle file."""
|
||||
_LOGGER.debug("Fetching data from legacy file %s", self._legacy_data_file)
|
||||
try:
|
||||
_LOGGER.debug("Fetching data from file %s", self._data_file)
|
||||
with self._lock, open(self._data_file, "rb") as myfile:
|
||||
self._data = pickle.load(myfile) or {}
|
||||
self._cache_outdated = False
|
||||
except Exception: # pylint: disable=broad-except
|
||||
with open(self._legacy_data_file, "rb") as myfile:
|
||||
return pickle.load(myfile) or {}
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except (OSError, pickle.PickleError) as err:
|
||||
_LOGGER.error(
|
||||
"Error loading data from pickled file %s", self._data_file
|
||||
"Error loading data from pickled file %s: %s",
|
||||
self._legacy_data_file,
|
||||
err,
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
def get_timestamp(self, feed_id: str) -> struct_time | None:
|
||||
"""Return stored timestamp for given feed id (usually the url)."""
|
||||
self._fetch_data()
|
||||
"""Return stored timestamp for given feed id."""
|
||||
return self._data.get(feed_id)
|
||||
|
||||
def put_timestamp(self, feed_id: str, timestamp: struct_time) -> None:
|
||||
"""Update timestamp for given feed id (usually the url)."""
|
||||
self._fetch_data()
|
||||
with self._lock, open(self._data_file, "wb") as myfile:
|
||||
self._data.update({feed_id: timestamp})
|
||||
_LOGGER.debug(
|
||||
"Overwriting feed %s timestamp in storage file %s: %s",
|
||||
feed_id,
|
||||
self._data_file,
|
||||
timestamp,
|
||||
)
|
||||
try:
|
||||
pickle.dump(self._data, myfile)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.error("Error saving pickled data to %s", self._data_file)
|
||||
self._cache_outdated = True
|
||||
@callback
|
||||
def async_put_timestamp(self, feed_id: str, timestamp: struct_time) -> None:
|
||||
"""Update timestamp for given feed id."""
|
||||
self._data[feed_id] = timestamp
|
||||
self._store.async_delay_save(self._async_save_data, DELAY_SAVE)
|
||||
|
||||
@callback
|
||||
def _async_save_data(self) -> dict[str, str]:
|
||||
"""Save feed data to storage."""
|
||||
return {
|
||||
feed_id: utc_from_timestamp(timegm(struct_utc)).isoformat()
|
||||
for feed_id, struct_utc in self._data.items()
|
||||
}
|
||||
|
@@ -5,12 +5,38 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import (
|
||||
CONF_DAMPING,
|
||||
CONF_DAMPING_EVENING,
|
||||
CONF_DAMPING_MORNING,
|
||||
CONF_MODULES_POWER,
|
||||
DOMAIN,
|
||||
)
|
||||
from .coordinator import ForecastSolarDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate old config entry."""
|
||||
|
||||
if entry.version == 1:
|
||||
new_options = entry.options.copy()
|
||||
new_options |= {
|
||||
CONF_MODULES_POWER: new_options.pop("modules power"),
|
||||
CONF_DAMPING_MORNING: new_options.get(CONF_DAMPING, 0.0),
|
||||
CONF_DAMPING_EVENING: new_options.pop(CONF_DAMPING, 0.0),
|
||||
}
|
||||
|
||||
entry.version = 2
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data=entry.data, options=new_options
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Forecast.Solar from a config entry."""
|
||||
coordinator = ForecastSolarDataUpdateCoordinator(hass, entry)
|
||||
|
@@ -14,7 +14,8 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
CONF_AZIMUTH,
|
||||
CONF_DAMPING,
|
||||
CONF_DAMPING_EVENING,
|
||||
CONF_DAMPING_MORNING,
|
||||
CONF_DECLINATION,
|
||||
CONF_INVERTER_SIZE,
|
||||
CONF_MODULES_POWER,
|
||||
@@ -27,7 +28,7 @@ RE_API_KEY = re.compile(r"^[a-zA-Z0-9]{16}$")
|
||||
class ForecastSolarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Forecast.Solar."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
@@ -127,8 +128,16 @@ class ForecastSolarOptionFlowHandler(OptionsFlow):
|
||||
default=self.config_entry.options[CONF_MODULES_POWER],
|
||||
): vol.Coerce(int),
|
||||
vol.Optional(
|
||||
CONF_DAMPING,
|
||||
default=self.config_entry.options.get(CONF_DAMPING, 0.0),
|
||||
CONF_DAMPING_MORNING,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_DAMPING_MORNING, 0.0
|
||||
),
|
||||
): vol.Coerce(float),
|
||||
vol.Optional(
|
||||
CONF_DAMPING_EVENING,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_DAMPING_EVENING, 0.0
|
||||
),
|
||||
): vol.Coerce(float),
|
||||
vol.Optional(
|
||||
CONF_INVERTER_SIZE,
|
||||
|
@@ -8,6 +8,8 @@ LOGGER = logging.getLogger(__package__)
|
||||
|
||||
CONF_DECLINATION = "declination"
|
||||
CONF_AZIMUTH = "azimuth"
|
||||
CONF_MODULES_POWER = "modules power"
|
||||
CONF_MODULES_POWER = "modules_power"
|
||||
CONF_DAMPING = "damping"
|
||||
CONF_DAMPING_MORNING = "damping_morning"
|
||||
CONF_DAMPING_EVENING = "damping_evening"
|
||||
CONF_INVERTER_SIZE = "inverter_size"
|
||||
|
@@ -13,7 +13,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import (
|
||||
CONF_AZIMUTH,
|
||||
CONF_DAMPING,
|
||||
CONF_DAMPING_EVENING,
|
||||
CONF_DAMPING_MORNING,
|
||||
CONF_DECLINATION,
|
||||
CONF_INVERTER_SIZE,
|
||||
CONF_MODULES_POWER,
|
||||
@@ -48,7 +49,8 @@ class ForecastSolarDataUpdateCoordinator(DataUpdateCoordinator[Estimate]):
|
||||
declination=entry.options[CONF_DECLINATION],
|
||||
azimuth=(entry.options[CONF_AZIMUTH] - 180),
|
||||
kwp=(entry.options[CONF_MODULES_POWER] / 1000),
|
||||
damping=entry.options.get(CONF_DAMPING, 0),
|
||||
damping_morning=entry.options.get(CONF_DAMPING_MORNING, 0.0),
|
||||
damping_evening=entry.options.get(CONF_DAMPING_EVENING, 0.0),
|
||||
inverter=inverter_size,
|
||||
)
|
||||
|
||||
|
@@ -24,10 +24,11 @@
|
||||
"data": {
|
||||
"api_key": "Forecast.Solar API Key (optional)",
|
||||
"azimuth": "[%key:component::forecast_solar::config::step::user::data::azimuth%]",
|
||||
"damping": "Damping factor: adjusts the results in the morning and evening",
|
||||
"damping_morning": "Damping factor: adjusts the results in the morning",
|
||||
"damping_evening": "Damping factor: adjusts the results in the evening",
|
||||
"inverter_size": "Inverter size (Watt)",
|
||||
"declination": "[%key:component::forecast_solar::config::step::user::data::declination%]",
|
||||
"modules power": "[%key:component::forecast_solar::config::step::user::data::modules_power%]"
|
||||
"modules_power": "[%key:component::forecast_solar::config::step::user::data::modules_power%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -11,9 +11,17 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import CONF_RTSP_PORT, CONF_STREAM, LOGGER, SERVICE_PTZ, SERVICE_PTZ_PRESET
|
||||
from .const import (
|
||||
CONF_RTSP_PORT,
|
||||
CONF_STREAM,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
SERVICE_PTZ,
|
||||
SERVICE_PTZ_PRESET,
|
||||
)
|
||||
|
||||
DIR_UP = "up"
|
||||
DIR_DOWN = "down"
|
||||
@@ -94,12 +102,14 @@ async def async_setup_entry(
|
||||
class HassFoscamCamera(Camera):
|
||||
"""An implementation of a Foscam IP camera."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, camera: FoscamCamera, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize a Foscam camera."""
|
||||
super().__init__()
|
||||
|
||||
self._foscam_session = camera
|
||||
self._attr_name = config_entry.title
|
||||
self._username = config_entry.data[CONF_USERNAME]
|
||||
self._password = config_entry.data[CONF_PASSWORD]
|
||||
self._stream = config_entry.data[CONF_STREAM]
|
||||
@@ -107,6 +117,10 @@ class HassFoscamCamera(Camera):
|
||||
self._rtsp_port = config_entry.data[CONF_RTSP_PORT]
|
||||
if self._rtsp_port:
|
||||
self._attr_supported_features = CameraEntityFeature.STREAM
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, config_entry.entry_id)},
|
||||
manufacturer="Foscam",
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity addition to hass."""
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Config flow to configure the Freebox integration."""
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from freebox_api.exceptions import AuthorizationError, HttpRequestError
|
||||
import voluptuous as vol
|
||||
@@ -21,44 +22,36 @@ class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Freebox config flow."""
|
||||
self._host: str
|
||||
self._port = None
|
||||
|
||||
def _show_setup_form(self, user_input=None, errors=None):
|
||||
"""Show the setup form to the user."""
|
||||
"""Initialize config flow."""
|
||||
self._data: dict[str, Any] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle a flow initiated by the user."""
|
||||
if user_input is None:
|
||||
user_input = {}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
|
||||
vol.Required(CONF_PORT, default=user_input.get(CONF_PORT, "")): int,
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT): int,
|
||||
}
|
||||
),
|
||||
errors=errors or {},
|
||||
errors={},
|
||||
)
|
||||
|
||||
async def async_step_user(self, user_input=None) -> FlowResult:
|
||||
"""Handle a flow initiated by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is None:
|
||||
return self._show_setup_form(user_input, errors)
|
||||
|
||||
self._host = user_input[CONF_HOST]
|
||||
self._port = user_input[CONF_PORT]
|
||||
self._data = user_input
|
||||
|
||||
# Check if already configured
|
||||
await self.async_set_unique_id(self._host)
|
||||
await self.async_set_unique_id(self._data[CONF_HOST])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return await self.async_step_link()
|
||||
|
||||
async def async_step_link(self, user_input=None) -> FlowResult:
|
||||
async def async_step_link(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Attempt to link with the Freebox router.
|
||||
|
||||
Given a configured host, will ask the user to press the button
|
||||
@@ -69,10 +62,10 @@ class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
errors = {}
|
||||
|
||||
fbx = await get_api(self.hass, self._host)
|
||||
fbx = await get_api(self.hass, self._data[CONF_HOST])
|
||||
try:
|
||||
# Open connection and check authentication
|
||||
await fbx.open(self._host, self._port)
|
||||
await fbx.open(self._data[CONF_HOST], self._data[CONF_PORT])
|
||||
|
||||
# Check permissions
|
||||
await fbx.system.get_config()
|
||||
@@ -82,8 +75,8 @@ class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await fbx.close()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._host,
|
||||
data={CONF_HOST: self._host, CONF_PORT: self._port},
|
||||
title=self._data[CONF_HOST],
|
||||
data=self._data,
|
||||
)
|
||||
|
||||
except AuthorizationError as error:
|
||||
@@ -91,18 +84,23 @@ class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "register_failed"
|
||||
|
||||
except HttpRequestError:
|
||||
_LOGGER.error("Error connecting to the Freebox router at %s", self._host)
|
||||
_LOGGER.error(
|
||||
"Error connecting to the Freebox router at %s", self._data[CONF_HOST]
|
||||
)
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Unknown error connecting with Freebox router at %s", self._host
|
||||
"Unknown error connecting with Freebox router at %s",
|
||||
self._data[CONF_HOST],
|
||||
)
|
||||
errors["base"] = "unknown"
|
||||
|
||||
return self.async_show_form(step_id="link", errors=errors)
|
||||
|
||||
async def async_step_import(self, user_input=None) -> FlowResult:
|
||||
async def async_step_import(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Import a config entry."""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
|
@@ -71,6 +71,7 @@ class FreeboxRouter:
|
||||
|
||||
self.devices: dict[str, dict[str, Any]] = {}
|
||||
self.disks: dict[int, dict[str, Any]] = {}
|
||||
self.supports_raid = True
|
||||
self.raids: dict[int, dict[str, Any]] = {}
|
||||
self.sensors_temperature: dict[str, int] = {}
|
||||
self.sensors_connection: dict[str, float] = {}
|
||||
@@ -159,12 +160,19 @@ class FreeboxRouter:
|
||||
|
||||
async def _update_raids_sensors(self) -> None:
|
||||
"""Update Freebox raids."""
|
||||
# None at first request
|
||||
if not self.supports_raid:
|
||||
return
|
||||
|
||||
try:
|
||||
fbx_raids: list[dict[str, Any]] = await self._api.storage.get_raids() or []
|
||||
except HttpRequestError:
|
||||
_LOGGER.warning("Unable to enumerate raid disks")
|
||||
else:
|
||||
self.supports_raid = False
|
||||
_LOGGER.info(
|
||||
"Router %s API does not support RAID",
|
||||
self.name,
|
||||
)
|
||||
return
|
||||
|
||||
for fbx_raid in fbx_raids:
|
||||
self.raids[fbx_raid["id"]] = fbx_raid
|
||||
|
||||
|
@@ -3,7 +3,7 @@ import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from odp_amsterdam import ODPAmsterdam
|
||||
from odp_amsterdam import ODPAmsterdam, VehicleType
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
@@ -45,7 +45,7 @@ async def get_coordinator(
|
||||
garage.garage_name: garage
|
||||
for garage in await ODPAmsterdam(
|
||||
session=aiohttp_client.async_get_clientsession(hass)
|
||||
).all_garages(vehicle="car")
|
||||
).all_garages(vehicle=VehicleType.CAR)
|
||||
}
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from odp_amsterdam import ODPAmsterdam
|
||||
from odp_amsterdam import ODPAmsterdam, VehicleType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
@@ -32,7 +32,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
api_data = await ODPAmsterdam(
|
||||
session=aiohttp_client.async_get_clientsession(self.hass)
|
||||
).all_garages(vehicle="car")
|
||||
).all_garages(vehicle=VehicleType.CAR)
|
||||
except ClientResponseError:
|
||||
_LOGGER.error("Unexpected response from server")
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/garages_amsterdam",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["odp-amsterdam==5.3.0"]
|
||||
"requirements": ["odp-amsterdam==5.3.1"]
|
||||
}
|
||||
|
@@ -5,9 +5,9 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from gardena_bluetooth.client import Client
|
||||
from gardena_bluetooth.const import DeviceInformation, ScanService
|
||||
from gardena_bluetooth.const import PRODUCT_NAMES, DeviceInformation, ScanService
|
||||
from gardena_bluetooth.exceptions import CharacteristicNotFound, CommunicationFailure
|
||||
from gardena_bluetooth.parse import ManufacturerData, ProductGroup
|
||||
from gardena_bluetooth.parse import ManufacturerData, ProductType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
@@ -34,7 +34,13 @@ def _is_supported(discovery_info: BluetoothServiceInfo):
|
||||
return False
|
||||
|
||||
manufacturer_data = ManufacturerData.decode(data)
|
||||
if manufacturer_data.group != ProductGroup.WATER_CONTROL:
|
||||
product_type = ProductType.from_manufacturer_data(manufacturer_data)
|
||||
|
||||
if product_type not in (
|
||||
ProductType.PUMP,
|
||||
ProductType.VALVE,
|
||||
ProductType.WATER_COMPUTER,
|
||||
):
|
||||
_LOGGER.debug("Unsupported device: %s", manufacturer_data)
|
||||
return False
|
||||
|
||||
@@ -42,9 +48,11 @@ def _is_supported(discovery_info: BluetoothServiceInfo):
|
||||
|
||||
|
||||
def _get_name(discovery_info: BluetoothServiceInfo):
|
||||
if discovery_info.name and discovery_info.name != discovery_info.address:
|
||||
return discovery_info.name
|
||||
return "Gardena Device"
|
||||
data = discovery_info.manufacturer_data[ManufacturerData.company]
|
||||
manufacturer_data = ManufacturerData.decode(data)
|
||||
product_type = ProductType.from_manufacturer_data(manufacturer_data)
|
||||
|
||||
return PRODUCT_NAMES.get(product_type, "Gardena Device")
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
@@ -13,5 +13,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["gardena_bluetooth==1.0.2"]
|
||||
"requirements": ["gardena_bluetooth==1.3.0"]
|
||||
}
|
||||
|
@@ -71,15 +71,15 @@ DESCRIPTIONS = (
|
||||
char=DeviceConfiguration.rain_pause,
|
||||
),
|
||||
GardenaBluetoothNumberEntityDescription(
|
||||
key=DeviceConfiguration.season_pause.uuid,
|
||||
translation_key="season_pause",
|
||||
key=DeviceConfiguration.seasonal_adjust.uuid,
|
||||
translation_key="seasonal_adjust",
|
||||
native_unit_of_measurement=UnitOfTime.DAYS,
|
||||
mode=NumberMode.BOX,
|
||||
native_min_value=0.0,
|
||||
native_max_value=365.0,
|
||||
native_min_value=-128.0,
|
||||
native_max_value=127.0,
|
||||
native_step=1.0,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
char=DeviceConfiguration.season_pause,
|
||||
char=DeviceConfiguration.seasonal_adjust,
|
||||
),
|
||||
)
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from gardena_bluetooth.const import Battery, Valve
|
||||
from gardena_bluetooth.parse import Characteristic
|
||||
@@ -106,7 +106,7 @@ class GardenaBluetoothRemainSensor(GardenaBluetoothEntity, SensorEntity):
|
||||
super()._handle_coordinator_update()
|
||||
return
|
||||
|
||||
time = datetime.now(timezone.utc) + timedelta(seconds=value)
|
||||
time = datetime.now(UTC) + timedelta(seconds=value)
|
||||
if not self._attr_native_value:
|
||||
self._attr_native_value = time
|
||||
super()._handle_coordinator_update()
|
||||
|
@@ -43,8 +43,8 @@
|
||||
"rain_pause": {
|
||||
"name": "Rain pause"
|
||||
},
|
||||
"season_pause": {
|
||||
"name": "Season pause"
|
||||
"seasonal_adjust": {
|
||||
"name": "Seasonal adjust"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Feed Entity Manager Sensor support for GDACS Feed."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
@@ -33,21 +34,22 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the GDACS Feed platform."""
|
||||
manager = hass.data[DOMAIN][FEED][entry.entry_id]
|
||||
sensor = GdacsSensor(entry.entry_id, entry.unique_id, entry.title, manager)
|
||||
sensor = GdacsSensor(entry, manager)
|
||||
async_add_entities([sensor])
|
||||
_LOGGER.debug("Sensor setup done")
|
||||
|
||||
|
||||
class GdacsSensor(SensorEntity):
|
||||
"""Status sensor for the GDACS integration."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_icon = DEFAULT_ICON
|
||||
_attr_native_unit_of_measurement = DEFAULT_UNIT_OF_MEASUREMENT
|
||||
|
||||
def __init__(self, config_entry_id, config_unique_id, config_title, manager):
|
||||
def __init__(self, config_entry: ConfigEntry, manager) -> None:
|
||||
"""Initialize entity."""
|
||||
self._config_entry_id = config_entry_id
|
||||
self._config_unique_id = config_unique_id
|
||||
self._config_title = config_title
|
||||
self._config_entry_id = config_entry.entry_id
|
||||
self._attr_unique_id = config_entry.unique_id
|
||||
self._attr_name = f"GDACS ({config_entry.title})"
|
||||
self._manager = manager
|
||||
self._status = None
|
||||
self._last_update = None
|
||||
@@ -57,7 +59,7 @@ class GdacsSensor(SensorEntity):
|
||||
self._created = None
|
||||
self._updated = None
|
||||
self._removed = None
|
||||
self._remove_signal_status = None
|
||||
self._remove_signal_status: Callable[[], None] | None = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
@@ -112,26 +114,6 @@ class GdacsSensor(SensorEntity):
|
||||
"""Return the state of the sensor."""
|
||||
return self._total
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str | None:
|
||||
"""Return a unique ID containing latitude/longitude."""
|
||||
return self._config_unique_id
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
"""Return the name of the entity."""
|
||||
return f"GDACS ({self._config_title})"
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon to use in the frontend, if any."""
|
||||
return DEFAULT_ICON
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
"""Return the unit of measurement."""
|
||||
return DEFAULT_UNIT_OF_MEASUREMENT
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the device state attributes."""
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Support for Google Mail Sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from googleapiclient.http import HttpRequest
|
||||
|
||||
@@ -46,7 +46,7 @@ class GoogleMailSensor(GoogleMailEntity, SensorEntity):
|
||||
data: dict = await self.hass.async_add_executor_job(settings.execute)
|
||||
|
||||
if data["enableAutoReply"] and (end := data.get("endTime")):
|
||||
value = datetime.fromtimestamp(int(end) / 1000, tz=timezone.utc)
|
||||
value = datetime.fromtimestamp(int(end) / 1000, tz=UTC)
|
||||
else:
|
||||
value = None
|
||||
self._attr_native_value = value
|
||||
|
@@ -1,6 +1,9 @@
|
||||
"""Platform allowing several binary sensor to be grouped into one binary sensor."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -21,7 +24,7 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
@@ -100,7 +103,7 @@ class BinarySensorGroup(GroupEntity, BinarySensorEntity):
|
||||
name: str,
|
||||
device_class: BinarySensorDeviceClass | None,
|
||||
entity_ids: list[str],
|
||||
mode: str | None,
|
||||
mode: bool | None,
|
||||
) -> None:
|
||||
"""Initialize a BinarySensorGroup entity."""
|
||||
super().__init__()
|
||||
@@ -113,6 +116,26 @@ class BinarySensorGroup(GroupEntity, BinarySensorEntity):
|
||||
if mode:
|
||||
self.mode = all
|
||||
|
||||
@callback
|
||||
def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[str, Mapping[str, Any]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData] | None,
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_update_group_state()
|
||||
preview_callback(*self._async_generate_attributes())
|
||||
|
||||
async_state_changed_listener(None)
|
||||
return async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
|
@@ -3,12 +3,14 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine, Mapping
|
||||
from functools import partial
|
||||
from typing import Any, cast
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.const import CONF_ENTITIES, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er, selector
|
||||
from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaCommonFlowHandler,
|
||||
@@ -20,8 +22,9 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
)
|
||||
|
||||
from . import DOMAIN
|
||||
from .binary_sensor import CONF_ALL
|
||||
from .binary_sensor import CONF_ALL, BinarySensorGroup
|
||||
from .const import CONF_HIDE_MEMBERS, CONF_IGNORE_NON_NUMERIC
|
||||
from .sensor import SensorGroup
|
||||
|
||||
_STATISTIC_MEASURES = [
|
||||
"min",
|
||||
@@ -36,15 +39,22 @@ _STATISTIC_MEASURES = [
|
||||
|
||||
|
||||
async def basic_group_options_schema(
|
||||
domain: str | list[str], handler: SchemaCommonFlowHandler
|
||||
domain: str | list[str], handler: SchemaCommonFlowHandler | None
|
||||
) -> vol.Schema:
|
||||
"""Generate options schema."""
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ENTITIES): entity_selector_without_own_entities(
|
||||
if handler is None:
|
||||
entity_selector = selector.selector(
|
||||
{"entity": {"domain": domain, "multiple": True}}
|
||||
)
|
||||
else:
|
||||
entity_selector = entity_selector_without_own_entities(
|
||||
cast(SchemaOptionsFlowHandler, handler.parent_handler),
|
||||
selector.EntitySelectorConfig(domain=domain, multiple=True),
|
||||
),
|
||||
)
|
||||
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ENTITIES): entity_selector,
|
||||
vol.Required(CONF_HIDE_MEMBERS, default=False): selector.BooleanSelector(),
|
||||
}
|
||||
)
|
||||
@@ -63,7 +73,9 @@ def basic_group_config_schema(domain: str | list[str]) -> vol.Schema:
|
||||
)
|
||||
|
||||
|
||||
async def binary_sensor_options_schema(handler: SchemaCommonFlowHandler) -> vol.Schema:
|
||||
async def binary_sensor_options_schema(
|
||||
handler: SchemaCommonFlowHandler | None,
|
||||
) -> vol.Schema:
|
||||
"""Generate options schema."""
|
||||
return (await basic_group_options_schema("binary_sensor", handler)).extend(
|
||||
{
|
||||
@@ -96,7 +108,7 @@ SENSOR_OPTIONS = {
|
||||
|
||||
|
||||
async def sensor_options_schema(
|
||||
domain: str, handler: SchemaCommonFlowHandler
|
||||
domain: str, handler: SchemaCommonFlowHandler | None
|
||||
) -> vol.Schema:
|
||||
"""Generate options schema."""
|
||||
return (
|
||||
@@ -160,6 +172,7 @@ CONFIG_FLOW = {
|
||||
"binary_sensor": SchemaFlowFormStep(
|
||||
BINARY_SENSOR_CONFIG_SCHEMA,
|
||||
validate_user_input=set_group_type("binary_sensor"),
|
||||
preview="group_binary_sensor",
|
||||
),
|
||||
"cover": SchemaFlowFormStep(
|
||||
basic_group_config_schema("cover"),
|
||||
@@ -184,6 +197,7 @@ CONFIG_FLOW = {
|
||||
"sensor": SchemaFlowFormStep(
|
||||
SENSOR_CONFIG_SCHEMA,
|
||||
validate_user_input=set_group_type("sensor"),
|
||||
preview="group_sensor",
|
||||
),
|
||||
"switch": SchemaFlowFormStep(
|
||||
basic_group_config_schema("switch"),
|
||||
@@ -194,7 +208,10 @@ CONFIG_FLOW = {
|
||||
|
||||
OPTIONS_FLOW = {
|
||||
"init": SchemaFlowFormStep(next_step=choose_options_step),
|
||||
"binary_sensor": SchemaFlowFormStep(binary_sensor_options_schema),
|
||||
"binary_sensor": SchemaFlowFormStep(
|
||||
binary_sensor_options_schema,
|
||||
preview="group_binary_sensor",
|
||||
),
|
||||
"cover": SchemaFlowFormStep(partial(basic_group_options_schema, "cover")),
|
||||
"fan": SchemaFlowFormStep(partial(basic_group_options_schema, "fan")),
|
||||
"light": SchemaFlowFormStep(partial(light_switch_options_schema, "light")),
|
||||
@@ -202,7 +219,10 @@ OPTIONS_FLOW = {
|
||||
"media_player": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "media_player")
|
||||
),
|
||||
"sensor": SchemaFlowFormStep(partial(sensor_options_schema, "sensor")),
|
||||
"sensor": SchemaFlowFormStep(
|
||||
partial(sensor_options_schema, "sensor"),
|
||||
preview="group_sensor",
|
||||
),
|
||||
"switch": SchemaFlowFormStep(partial(light_switch_options_schema, "switch")),
|
||||
}
|
||||
|
||||
@@ -241,6 +261,13 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
)
|
||||
_async_hide_members(hass, options[CONF_ENTITIES], hidden_by)
|
||||
|
||||
@callback
|
||||
@staticmethod
|
||||
def async_setup_preview(hass: HomeAssistant) -> None:
|
||||
"""Set up preview WS API."""
|
||||
websocket_api.async_register_command(hass, ws_preview_sensor)
|
||||
websocket_api.async_register_command(hass, ws_preview_binary_sensor)
|
||||
|
||||
|
||||
def _async_hide_members(
|
||||
hass: HomeAssistant, members: list[str], hidden_by: er.RegistryEntryHider | None
|
||||
@@ -253,3 +280,129 @@ def _async_hide_members(
|
||||
if entity_id not in registry.entities:
|
||||
continue
|
||||
registry.async_update_entity(entity_id, hidden_by=hidden_by)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_handle_ws_preview(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
config_schema: vol.Schema,
|
||||
options_schema: vol.Schema,
|
||||
create_preview_entity: Callable[
|
||||
[Literal["config_flow", "options_flow"], str, dict[str, Any]],
|
||||
BinarySensorGroup | SensorGroup,
|
||||
],
|
||||
) -> None:
|
||||
"""Generate a preview."""
|
||||
if msg["flow_type"] == "config_flow":
|
||||
validated = config_schema(msg["user_input"])
|
||||
name = validated["name"]
|
||||
else:
|
||||
validated = options_schema(msg["user_input"])
|
||||
flow_status = hass.config_entries.options.async_get(msg["flow_id"])
|
||||
config_entry = hass.config_entries.async_get_entry(flow_status["handler"])
|
||||
if not config_entry:
|
||||
raise HomeAssistantError
|
||||
name = config_entry.options["name"]
|
||||
|
||||
@callback
|
||||
def async_preview_updated(state: str, attributes: Mapping[str, Any]) -> None:
|
||||
"""Forward config entry state events to websocket."""
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"], {"state": state, "attributes": attributes}
|
||||
)
|
||||
)
|
||||
|
||||
preview_entity = create_preview_entity(msg["flow_type"], name, validated)
|
||||
preview_entity.hass = hass
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
connection.subscriptions[msg["id"]] = preview_entity.async_start_preview(
|
||||
async_preview_updated
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "group/binary_sensor/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Required("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Required("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_preview_binary_sensor(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Generate a preview."""
|
||||
|
||||
def create_preview_binary_sensor(
|
||||
flow_type: Literal["config_flow", "options_flow"],
|
||||
name: str,
|
||||
validated_config: dict[str, Any],
|
||||
) -> BinarySensorGroup:
|
||||
"""Create a preview sensor."""
|
||||
return BinarySensorGroup(
|
||||
None,
|
||||
name,
|
||||
None,
|
||||
validated_config[CONF_ENTITIES],
|
||||
validated_config[CONF_ALL],
|
||||
)
|
||||
|
||||
_async_handle_ws_preview(
|
||||
hass,
|
||||
connection,
|
||||
msg,
|
||||
BINARY_SENSOR_CONFIG_SCHEMA,
|
||||
await binary_sensor_options_schema(None),
|
||||
create_preview_binary_sensor,
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "group/sensor/start_preview",
|
||||
vol.Required("flow_id"): str,
|
||||
vol.Required("flow_type"): vol.Any("config_flow", "options_flow"),
|
||||
vol.Required("user_input"): dict,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_preview_sensor(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Generate a preview."""
|
||||
|
||||
def create_preview_sensor(
|
||||
flow_type: Literal["config_flow", "options_flow"],
|
||||
name: str,
|
||||
validated_config: dict[str, Any],
|
||||
) -> SensorGroup:
|
||||
"""Create a preview sensor."""
|
||||
ignore_non_numeric = (
|
||||
False
|
||||
if flow_type == "config_flow"
|
||||
else validated_config[CONF_IGNORE_NON_NUMERIC]
|
||||
)
|
||||
return SensorGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
ignore_non_numeric,
|
||||
validated_config[CONF_TYPE],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
_async_handle_ws_preview(
|
||||
hass,
|
||||
connection,
|
||||
msg,
|
||||
SENSOR_CONFIG_SCHEMA,
|
||||
await sensor_options_schema("sensor", None),
|
||||
create_preview_sensor,
|
||||
)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Platform allowing several sensors to be grouped into one sensor to provide numeric combinations."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Mapping
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import statistics
|
||||
@@ -33,7 +33,7 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import (
|
||||
@@ -303,6 +303,26 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
self._state_incorrect: set[str] = set()
|
||||
self._extra_state_attribute: dict[str, Any] = {}
|
||||
|
||||
@callback
|
||||
def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[str, Mapping[str, Any]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(
|
||||
event: EventType[EventStateChangedData] | None,
|
||||
) -> None:
|
||||
"""Handle child updates."""
|
||||
self.async_update_group_state()
|
||||
preview_callback(*self._async_generate_attributes())
|
||||
|
||||
async_state_changed_listener(None)
|
||||
return async_track_state_change_event(
|
||||
self.hass, self._entity_ids, async_state_changed_listener
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
|
||||
|
@@ -9,6 +9,7 @@ ATTR_ADMIN = "admin"
|
||||
ATTR_COMPRESSED = "compressed"
|
||||
ATTR_CONFIG = "config"
|
||||
ATTR_DATA = "data"
|
||||
ATTR_SESSION_DATA_USER_ID = "user_id"
|
||||
ATTR_DISCOVERY = "discovery"
|
||||
ATTR_ENABLE = "enable"
|
||||
ATTR_ENDPOINT = "endpoint"
|
||||
|
@@ -22,6 +22,7 @@ from .const import (
|
||||
ATTR_ENDPOINT,
|
||||
ATTR_METHOD,
|
||||
ATTR_RESULT,
|
||||
ATTR_SESSION_DATA_USER_ID,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_WS_EVENT,
|
||||
DOMAIN,
|
||||
@@ -115,12 +116,21 @@ async def websocket_supervisor_api(
|
||||
):
|
||||
raise Unauthorized()
|
||||
supervisor: HassIO = hass.data[DOMAIN]
|
||||
|
||||
command = msg[ATTR_ENDPOINT]
|
||||
payload = msg.get(ATTR_DATA, {})
|
||||
|
||||
if command == "/ingress/session":
|
||||
# Send user ID on session creation, so the supervisor can correlate session tokens with users
|
||||
# for every request that is authenticated with the given ingress session token.
|
||||
payload[ATTR_SESSION_DATA_USER_ID] = connection.user.id
|
||||
|
||||
try:
|
||||
result = await supervisor.send_command(
|
||||
msg[ATTR_ENDPOINT],
|
||||
command,
|
||||
method=msg[ATTR_METHOD],
|
||||
timeout=msg.get(ATTR_TIMEOUT, 10),
|
||||
payload=msg.get(ATTR_DATA, {}),
|
||||
payload=payload,
|
||||
source="core.websocket_api",
|
||||
)
|
||||
|
||||
|
@@ -23,6 +23,10 @@ from homeassistant.components.climate import (
|
||||
DEFAULT_MAX_TEMP,
|
||||
DEFAULT_MIN_TEMP,
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
FAN_OFF,
|
||||
FAN_ON,
|
||||
SWING_OFF,
|
||||
SWING_VERTICAL,
|
||||
@@ -35,6 +39,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.percentage import (
|
||||
percentage_to_ranged_value,
|
||||
ranged_value_to_percentage,
|
||||
)
|
||||
|
||||
from . import KNOWN_DEVICES
|
||||
from .connection import HKDevice
|
||||
@@ -86,6 +94,16 @@ SWING_MODE_HASS_TO_HOMEKIT = {v: k for k, v in SWING_MODE_HOMEKIT_TO_HASS.items(
|
||||
|
||||
DEFAULT_MIN_STEP: Final = 1.0
|
||||
|
||||
ROTATION_SPEED_LOW = 33
|
||||
ROTATION_SPEED_MEDIUM = 66
|
||||
ROTATION_SPEED_HIGH = 100
|
||||
|
||||
HASS_FAN_MODE_TO_HOMEKIT_ROTATION = {
|
||||
FAN_LOW: ROTATION_SPEED_LOW,
|
||||
FAN_MEDIUM: ROTATION_SPEED_MEDIUM,
|
||||
FAN_HIGH: ROTATION_SPEED_HIGH,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -170,8 +188,45 @@ class HomeKitHeaterCoolerEntity(HomeKitBaseClimateEntity):
|
||||
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD,
|
||||
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD,
|
||||
CharacteristicsTypes.SWING_MODE,
|
||||
CharacteristicsTypes.ROTATION_SPEED,
|
||||
]
|
||||
|
||||
def _get_rotation_speed_range(self) -> tuple[float, float]:
|
||||
rotation_speed = self.service[CharacteristicsTypes.ROTATION_SPEED]
|
||||
return round(rotation_speed.minValue or 0) + 1, round(
|
||||
rotation_speed.maxValue or 100
|
||||
)
|
||||
|
||||
@property
|
||||
def fan_modes(self) -> list[str]:
|
||||
"""Return the available fan modes."""
|
||||
return [FAN_OFF, FAN_LOW, FAN_MEDIUM, FAN_HIGH]
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
speed_range = self._get_rotation_speed_range()
|
||||
speed_percentage = ranged_value_to_percentage(
|
||||
speed_range, self.service.value(CharacteristicsTypes.ROTATION_SPEED)
|
||||
)
|
||||
# homekit value 0 33 66 100
|
||||
if speed_percentage > ROTATION_SPEED_MEDIUM:
|
||||
return FAN_HIGH
|
||||
if speed_percentage > ROTATION_SPEED_LOW:
|
||||
return FAN_MEDIUM
|
||||
if speed_percentage > 0:
|
||||
return FAN_LOW
|
||||
return FAN_OFF
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
rotation = HASS_FAN_MODE_TO_HOMEKIT_ROTATION.get(fan_mode, 0)
|
||||
speed_range = self._get_rotation_speed_range()
|
||||
speed = round(percentage_to_ranged_value(speed_range, rotation))
|
||||
await self.async_put_characteristics(
|
||||
{CharacteristicsTypes.ROTATION_SPEED: speed}
|
||||
)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
@@ -387,6 +442,9 @@ class HomeKitHeaterCoolerEntity(HomeKitBaseClimateEntity):
|
||||
if self.service.has(CharacteristicsTypes.SWING_MODE):
|
||||
features |= ClimateEntityFeature.SWING_MODE
|
||||
|
||||
if self.service.has(CharacteristicsTypes.ROTATION_SPEED):
|
||||
features |= ClimateEntityFeature.FAN_MODE
|
||||
|
||||
return features
|
||||
|
||||
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/honeywell",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["somecomfort"],
|
||||
"requirements": ["AIOSomecomfort==0.0.15"]
|
||||
"requirements": ["AIOSomecomfort==0.0.16"]
|
||||
}
|
||||
|
@@ -44,7 +44,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
HYPERION_MANUFACTURER_NAME,
|
||||
HYPERION_MODEL_NAME,
|
||||
NAME_SUFFIX_HYPERION_CAMERA,
|
||||
SIGNAL_ENTITY_REMOVE,
|
||||
TYPE_HYPERION_CAMERA,
|
||||
)
|
||||
@@ -107,6 +106,9 @@ async def async_setup_entry(
|
||||
class HyperionCamera(Camera):
|
||||
"""ComponentBinarySwitch switch class."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server_id: str,
|
||||
@@ -120,7 +122,6 @@ class HyperionCamera(Camera):
|
||||
self._unique_id = get_hyperion_unique_id(
|
||||
server_id, instance_num, TYPE_HYPERION_CAMERA
|
||||
)
|
||||
self._name = f"{instance_name} {NAME_SUFFIX_HYPERION_CAMERA}".strip()
|
||||
self._device_id = get_hyperion_device_id(server_id, instance_num)
|
||||
self._instance_name = instance_name
|
||||
self._client = hyperion_client
|
||||
@@ -140,11 +141,6 @@ class HyperionCamera(Camera):
|
||||
"""Return a unique id for this instance."""
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the switch."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the camera is on."""
|
||||
|
@@ -21,9 +21,6 @@ HYPERION_MODEL_NAME = f"{HYPERION_MANUFACTURER_NAME}-NG"
|
||||
HYPERION_RELEASES_URL = "https://github.com/hyperion-project/hyperion.ng/releases"
|
||||
HYPERION_VERSION_WARN_CUTOFF = "2.0.0-alpha.9"
|
||||
|
||||
NAME_SUFFIX_HYPERION_COMPONENT_SWITCH = "Component"
|
||||
NAME_SUFFIX_HYPERION_CAMERA = ""
|
||||
|
||||
SIGNAL_INSTANCE_ADD = f"{DOMAIN}_instance_add_signal.{{}}"
|
||||
SIGNAL_INSTANCE_REMOVE = f"{DOMAIN}_instance_remove_signal.{{}}"
|
||||
SIGNAL_ENTITY_REMOVE = f"{DOMAIN}_entity_remove_signal.{{}}"
|
||||
|
@@ -116,6 +116,8 @@ async def async_setup_entry(
|
||||
class HyperionLight(LightEntity):
|
||||
"""A Hyperion light that acts as a client for the configured priority."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_color_mode = ColorMode.HS
|
||||
_attr_should_poll = False
|
||||
_attr_supported_color_modes = {ColorMode.HS}
|
||||
@@ -131,7 +133,6 @@ class HyperionLight(LightEntity):
|
||||
) -> None:
|
||||
"""Initialize the light."""
|
||||
self._unique_id = self._compute_unique_id(server_id, instance_num)
|
||||
self._name = self._compute_name(instance_name)
|
||||
self._device_id = get_hyperion_device_id(server_id, instance_num)
|
||||
self._instance_name = instance_name
|
||||
self._options = options
|
||||
@@ -157,20 +158,11 @@ class HyperionLight(LightEntity):
|
||||
"""Compute a unique id for this instance."""
|
||||
return get_hyperion_unique_id(server_id, instance_num, TYPE_HYPERION_LIGHT)
|
||||
|
||||
def _compute_name(self, instance_name: str) -> str:
|
||||
"""Compute the name of the light."""
|
||||
return f"{instance_name}".strip()
|
||||
|
||||
@property
|
||||
def entity_registry_enabled_default(self) -> bool:
|
||||
"""Whether or not the entity is enabled by default."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the light."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def brightness(self) -> int:
|
||||
"""Return the brightness of this light between 0..255."""
|
||||
|
@@ -50,5 +50,33 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"switch": {
|
||||
"all": {
|
||||
"name": "Component all"
|
||||
},
|
||||
"smoothing": {
|
||||
"name": "Component smoothing"
|
||||
},
|
||||
"blackbar_detection": {
|
||||
"name": "Component blackbar detection"
|
||||
},
|
||||
"forwarder": {
|
||||
"name": "Component forwarder"
|
||||
},
|
||||
"boblight_server": {
|
||||
"name": "Component boblight server"
|
||||
},
|
||||
"platform_capture": {
|
||||
"name": "Component platform capture"
|
||||
},
|
||||
"led_device": {
|
||||
"name": "Component LED device"
|
||||
},
|
||||
"usb_capture": {
|
||||
"name": "Component USB capture"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -46,7 +46,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
HYPERION_MANUFACTURER_NAME,
|
||||
HYPERION_MODEL_NAME,
|
||||
NAME_SUFFIX_HYPERION_COMPONENT_SWITCH,
|
||||
SIGNAL_ENTITY_REMOVE,
|
||||
TYPE_HYPERION_COMPONENT_SWITCH_BASE,
|
||||
)
|
||||
@@ -74,13 +73,17 @@ def _component_to_unique_id(server_id: str, component: str, instance_num: int) -
|
||||
)
|
||||
|
||||
|
||||
def _component_to_switch_name(component: str, instance_name: str) -> str:
|
||||
"""Convert a component to a switch name."""
|
||||
return (
|
||||
f"{instance_name} "
|
||||
f"{NAME_SUFFIX_HYPERION_COMPONENT_SWITCH} "
|
||||
f"{KEY_COMPONENTID_TO_NAME.get(component, component.capitalize())}"
|
||||
)
|
||||
def _component_to_translation_key(component: str) -> str:
|
||||
return {
|
||||
KEY_COMPONENTID_ALL: "all",
|
||||
KEY_COMPONENTID_SMOOTHING: "smoothing",
|
||||
KEY_COMPONENTID_BLACKBORDER: "blackbar_detection",
|
||||
KEY_COMPONENTID_FORWARDER: "forwarder",
|
||||
KEY_COMPONENTID_BOBLIGHTSERVER: "boblight_server",
|
||||
KEY_COMPONENTID_GRABBER: "platform_capture",
|
||||
KEY_COMPONENTID_LEDDEVICE: "led_device",
|
||||
KEY_COMPONENTID_V4L: "usb_capture",
|
||||
}[component]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -129,6 +132,7 @@ class HyperionComponentSwitch(SwitchEntity):
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -143,7 +147,7 @@ class HyperionComponentSwitch(SwitchEntity):
|
||||
server_id, component_name, instance_num
|
||||
)
|
||||
self._device_id = get_hyperion_device_id(server_id, instance_num)
|
||||
self._name = _component_to_switch_name(component_name, instance_name)
|
||||
self._attr_translation_key = _component_to_translation_key(component_name)
|
||||
self._instance_name = instance_name
|
||||
self._component_name = component_name
|
||||
self._client = hyperion_client
|
||||
@@ -162,11 +166,6 @@ class HyperionComponentSwitch(SwitchEntity):
|
||||
"""Return a unique id for this instance."""
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the switch."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the switch is on."""
|
||||
|
@@ -1,53 +1,64 @@
|
||||
"""Config flow to configure IPMA component."""
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyipma import IPMAException
|
||||
from pyipma.api import IPMA_API
|
||||
from pyipma.location import Location
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .const import DOMAIN, HOME_LOCATION_NAME
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IpmaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
class IpmaFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for IPMA component."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
"""Init IpmaFlowHandler."""
|
||||
self._errors = {}
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
self._errors = {}
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_LATITUDE: user_input[CONF_LATITUDE],
|
||||
CONF_LONGITUDE: user_input[CONF_LONGITUDE],
|
||||
}
|
||||
self._async_abort_entries_match(user_input)
|
||||
|
||||
api = IPMA_API(async_get_clientsession(self.hass))
|
||||
|
||||
try:
|
||||
location = await Location.get(
|
||||
api,
|
||||
user_input[CONF_LATITUDE],
|
||||
user_input[CONF_LONGITUDE],
|
||||
)
|
||||
except IPMAException as err:
|
||||
_LOGGER.exception(err)
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(title=location.name, data=user_input)
|
||||
|
||||
return self.async_create_entry(title=user_input[CONF_NAME], data=user_input)
|
||||
|
||||
# default location is set hass configuration
|
||||
return await self._show_config_form(
|
||||
name=HOME_LOCATION_NAME,
|
||||
latitude=self.hass.config.latitude,
|
||||
longitude=self.hass.config.longitude,
|
||||
)
|
||||
|
||||
async def _show_config_form(self, name=None, latitude=None, longitude=None):
|
||||
"""Show the configuration form to edit location data."""
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME, default=name): str,
|
||||
vol.Required(CONF_LATITUDE, default=latitude): cv.latitude,
|
||||
vol.Required(CONF_LONGITUDE, default=longitude): cv.longitude,
|
||||
vol.Required(CONF_LATITUDE): cv.latitude,
|
||||
vol.Required(CONF_LONGITUDE): cv.longitude,
|
||||
}
|
||||
),
|
||||
errors=self._errors,
|
||||
{
|
||||
CONF_LATITUDE: self.hass.config.latitude,
|
||||
CONF_LONGITUDE: self.hass.config.longitude,
|
||||
},
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
@@ -12,6 +12,9 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]"
|
||||
}
|
||||
|
@@ -19,6 +19,10 @@ PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up IPP from a config entry."""
|
||||
# config flow sets this to either UUID, serial number or None
|
||||
if (device_id := entry.unique_id) is None:
|
||||
device_id = entry.entry_id
|
||||
|
||||
coordinator = IPPDataUpdateCoordinator(
|
||||
hass,
|
||||
host=entry.data[CONF_HOST],
|
||||
@@ -26,6 +30,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
base_path=entry.data[CONF_BASE_PATH],
|
||||
tls=entry.data[CONF_SSL],
|
||||
verify_ssl=entry.data[CONF_VERIFY_SSL],
|
||||
device_id=device_id,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
|
@@ -29,8 +29,10 @@ class IPPDataUpdateCoordinator(DataUpdateCoordinator[IPPPrinter]):
|
||||
base_path: str,
|
||||
tls: bool,
|
||||
verify_ssl: bool,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize global IPP data updater."""
|
||||
self.device_id = device_id
|
||||
self.ipp = IPP(
|
||||
host=host,
|
||||
port=port,
|
||||
|
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -11,32 +12,21 @@ from .coordinator import IPPDataUpdateCoordinator
|
||||
class IPPEntity(CoordinatorEntity[IPPDataUpdateCoordinator]):
|
||||
"""Defines a base IPP entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
entry_id: str,
|
||||
device_id: str,
|
||||
coordinator: IPPDataUpdateCoordinator,
|
||||
name: str,
|
||||
icon: str,
|
||||
enabled_default: bool = True,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the IPP entity."""
|
||||
super().__init__(coordinator)
|
||||
self._device_id = device_id
|
||||
self._entry_id = entry_id
|
||||
self._attr_name = name
|
||||
self._attr_icon = icon
|
||||
self._attr_entity_registry_enabled_default = enabled_default
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo | None:
|
||||
"""Return device information about this IPP device."""
|
||||
if self._device_id is None:
|
||||
return None
|
||||
self.entity_description = description
|
||||
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self._device_id)},
|
||||
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.device_id)},
|
||||
manufacturer=self.coordinator.data.info.manufacturer,
|
||||
model=self.coordinator.data.info.model,
|
||||
name=self.coordinator.data.info.name,
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["deepmerge", "pyipp"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyipp==0.14.3"],
|
||||
"requirements": ["pyipp==0.14.4"],
|
||||
"zeroconf": ["_ipps._tcp.local.", "_ipp._tcp.local."]
|
||||
}
|
||||
|
@@ -1,14 +1,23 @@
|
||||
"""Support for IPP sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from pyipp import Marker, Printer
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_LOCATION, PERCENTAGE
|
||||
from homeassistant.const import ATTR_LOCATION, PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from .const import (
|
||||
@@ -27,6 +36,65 @@ from .coordinator import IPPDataUpdateCoordinator
|
||||
from .entity import IPPEntity
|
||||
|
||||
|
||||
@dataclass
|
||||
class IPPSensorEntityDescriptionMixin:
|
||||
"""Mixin for required keys."""
|
||||
|
||||
value_fn: Callable[[Printer], StateType | datetime]
|
||||
|
||||
|
||||
@dataclass
|
||||
class IPPSensorEntityDescription(
|
||||
SensorEntityDescription, IPPSensorEntityDescriptionMixin
|
||||
):
|
||||
"""Describes IPP sensor entity."""
|
||||
|
||||
attributes_fn: Callable[[Printer], dict[Any, StateType]] = lambda _: {}
|
||||
|
||||
|
||||
def _get_marker_attributes_fn(
|
||||
marker_index: int, attributes_fn: Callable[[Marker], dict[Any, StateType]]
|
||||
) -> Callable[[Printer], dict[Any, StateType]]:
|
||||
return lambda printer: attributes_fn(printer.markers[marker_index])
|
||||
|
||||
|
||||
def _get_marker_value_fn(
|
||||
marker_index: int, value_fn: Callable[[Marker], StateType | datetime]
|
||||
) -> Callable[[Printer], StateType | datetime]:
|
||||
return lambda printer: value_fn(printer.markers[marker_index])
|
||||
|
||||
|
||||
PRINTER_SENSORS: tuple[IPPSensorEntityDescription, ...] = (
|
||||
IPPSensorEntityDescription(
|
||||
key="printer",
|
||||
name=None,
|
||||
translation_key="printer",
|
||||
icon="mdi:printer",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["idle", "printing", "stopped"],
|
||||
attributes_fn=lambda printer: {
|
||||
ATTR_INFO: printer.info.printer_info,
|
||||
ATTR_SERIAL: printer.info.serial,
|
||||
ATTR_LOCATION: printer.info.location,
|
||||
ATTR_STATE_MESSAGE: printer.state.message,
|
||||
ATTR_STATE_REASON: printer.state.reasons,
|
||||
ATTR_COMMAND_SET: printer.info.command_set,
|
||||
ATTR_URI_SUPPORTED: ",".join(printer.info.printer_uri_supported),
|
||||
},
|
||||
value_fn=lambda printer: printer.state.printer_state,
|
||||
),
|
||||
IPPSensorEntityDescription(
|
||||
key="uptime",
|
||||
translation_key="uptime",
|
||||
icon="mdi:clock-outline",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda printer: (utcnow() - timedelta(seconds=printer.info.uptime)),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
@@ -34,19 +102,34 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up IPP sensor based on a config entry."""
|
||||
coordinator: IPPDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
sensors: list[SensorEntity] = [
|
||||
IPPSensor(
|
||||
coordinator,
|
||||
description,
|
||||
)
|
||||
for description in PRINTER_SENSORS
|
||||
]
|
||||
|
||||
# config flow sets this to either UUID, serial number or None
|
||||
if (unique_id := entry.unique_id) is None:
|
||||
unique_id = entry.entry_id
|
||||
|
||||
sensors: list[SensorEntity] = []
|
||||
|
||||
sensors.append(IPPPrinterSensor(entry.entry_id, unique_id, coordinator))
|
||||
sensors.append(IPPUptimeSensor(entry.entry_id, unique_id, coordinator))
|
||||
|
||||
for marker_index in range(len(coordinator.data.markers)):
|
||||
for index, marker in enumerate(coordinator.data.markers):
|
||||
sensors.append(
|
||||
IPPMarkerSensor(entry.entry_id, unique_id, coordinator, marker_index)
|
||||
IPPSensor(
|
||||
coordinator,
|
||||
IPPSensorEntityDescription(
|
||||
key=f"marker_{index}",
|
||||
name=marker.name,
|
||||
icon="mdi:water",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
attributes_fn=_get_marker_attributes_fn(
|
||||
index,
|
||||
lambda marker: {
|
||||
ATTR_MARKER_HIGH_LEVEL: marker.high_level,
|
||||
ATTR_MARKER_LOW_LEVEL: marker.low_level,
|
||||
ATTR_MARKER_TYPE: marker.marker_type,
|
||||
},
|
||||
),
|
||||
value_fn=_get_marker_value_fn(index, lambda marker: marker.level),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(sensors, True)
|
||||
@@ -55,146 +138,14 @@ async def async_setup_entry(
|
||||
class IPPSensor(IPPEntity, SensorEntity):
|
||||
"""Defines an IPP sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
coordinator: IPPDataUpdateCoordinator,
|
||||
enabled_default: bool = True,
|
||||
entry_id: str,
|
||||
unique_id: str,
|
||||
icon: str,
|
||||
key: str,
|
||||
name: str,
|
||||
unit_of_measurement: str | None = None,
|
||||
translation_key: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize IPP sensor."""
|
||||
self._key = key
|
||||
self._attr_unique_id = f"{unique_id}_{key}"
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._attr_translation_key = translation_key
|
||||
|
||||
super().__init__(
|
||||
entry_id=entry_id,
|
||||
device_id=unique_id,
|
||||
coordinator=coordinator,
|
||||
name=name,
|
||||
icon=icon,
|
||||
enabled_default=enabled_default,
|
||||
)
|
||||
|
||||
|
||||
class IPPMarkerSensor(IPPSensor):
|
||||
"""Defines an IPP marker sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry_id: str,
|
||||
unique_id: str,
|
||||
coordinator: IPPDataUpdateCoordinator,
|
||||
marker_index: int,
|
||||
) -> None:
|
||||
"""Initialize IPP marker sensor."""
|
||||
self.marker_index = marker_index
|
||||
|
||||
super().__init__(
|
||||
coordinator=coordinator,
|
||||
entry_id=entry_id,
|
||||
unique_id=unique_id,
|
||||
icon="mdi:water",
|
||||
key=f"marker_{marker_index}",
|
||||
name=(
|
||||
f"{coordinator.data.info.name} {coordinator.data.markers[marker_index].name}"
|
||||
),
|
||||
unit_of_measurement=PERCENTAGE,
|
||||
)
|
||||
entity_description: IPPSensorEntityDescription
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the entity."""
|
||||
return {
|
||||
ATTR_MARKER_HIGH_LEVEL: self.coordinator.data.markers[
|
||||
self.marker_index
|
||||
].high_level,
|
||||
ATTR_MARKER_LOW_LEVEL: self.coordinator.data.markers[
|
||||
self.marker_index
|
||||
].low_level,
|
||||
ATTR_MARKER_TYPE: self.coordinator.data.markers[
|
||||
self.marker_index
|
||||
].marker_type,
|
||||
}
|
||||
return self.entity_description.attributes_fn(self.coordinator.data)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | None:
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the sensor."""
|
||||
level = self.coordinator.data.markers[self.marker_index].level
|
||||
|
||||
if level >= 0:
|
||||
return level
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class IPPPrinterSensor(IPPSensor):
|
||||
"""Defines an IPP printer sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.ENUM
|
||||
_attr_options = ["idle", "printing", "stopped"]
|
||||
|
||||
def __init__(
|
||||
self, entry_id: str, unique_id: str, coordinator: IPPDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Initialize IPP printer sensor."""
|
||||
super().__init__(
|
||||
coordinator=coordinator,
|
||||
entry_id=entry_id,
|
||||
unique_id=unique_id,
|
||||
icon="mdi:printer",
|
||||
key="printer",
|
||||
name=coordinator.data.info.name,
|
||||
unit_of_measurement=None,
|
||||
translation_key="printer",
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes of the entity."""
|
||||
return {
|
||||
ATTR_INFO: self.coordinator.data.info.printer_info,
|
||||
ATTR_SERIAL: self.coordinator.data.info.serial,
|
||||
ATTR_LOCATION: self.coordinator.data.info.location,
|
||||
ATTR_STATE_MESSAGE: self.coordinator.data.state.message,
|
||||
ATTR_STATE_REASON: self.coordinator.data.state.reasons,
|
||||
ATTR_COMMAND_SET: self.coordinator.data.info.command_set,
|
||||
ATTR_URI_SUPPORTED: self.coordinator.data.info.printer_uri_supported,
|
||||
}
|
||||
|
||||
@property
|
||||
def native_value(self) -> str:
|
||||
"""Return the state of the sensor."""
|
||||
return self.coordinator.data.state.printer_state
|
||||
|
||||
|
||||
class IPPUptimeSensor(IPPSensor):
|
||||
"""Defines a IPP uptime sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
||||
|
||||
def __init__(
|
||||
self, entry_id: str, unique_id: str, coordinator: IPPDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Initialize IPP uptime sensor."""
|
||||
super().__init__(
|
||||
coordinator=coordinator,
|
||||
enabled_default=False,
|
||||
entry_id=entry_id,
|
||||
unique_id=unique_id,
|
||||
icon="mdi:clock-outline",
|
||||
key="uptime",
|
||||
name=f"{coordinator.data.info.name} Uptime",
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> datetime:
|
||||
"""Return the state of the sensor."""
|
||||
return utcnow() - timedelta(seconds=self.coordinator.data.info.uptime)
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
|
@@ -40,6 +40,9 @@
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"stopped": "Stopped"
|
||||
}
|
||||
},
|
||||
"uptime": {
|
||||
"name": "Uptime"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -27,9 +27,10 @@ DOMAIN = "kitchen_sink"
|
||||
|
||||
|
||||
COMPONENTS_WITH_DEMO_PLATFORM = [
|
||||
Platform.SENSOR,
|
||||
Platform.LOCK,
|
||||
Platform.IMAGE,
|
||||
Platform.LAWN_MOWER,
|
||||
Platform.LOCK,
|
||||
Platform.SENSOR,
|
||||
Platform.WEATHER,
|
||||
]
|
||||
|
||||
|
100
homeassistant/components/kitchen_sink/lawn_mower.py
Normal file
100
homeassistant/components/kitchen_sink/lawn_mower.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Demo platform that has a couple fake lawn mowers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.lawn_mower import (
|
||||
LawnMowerActivity,
|
||||
LawnMowerEntity,
|
||||
LawnMowerEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Demo lawn mowers."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoLawnMower(
|
||||
"kitchen_sink_mower_001",
|
||||
"Mower can mow",
|
||||
LawnMowerActivity.DOCKED,
|
||||
LawnMowerEntityFeature.START_MOWING,
|
||||
),
|
||||
DemoLawnMower(
|
||||
"kitchen_sink_mower_002",
|
||||
"Mower can dock",
|
||||
LawnMowerActivity.MOWING,
|
||||
LawnMowerEntityFeature.DOCK | LawnMowerEntityFeature.START_MOWING,
|
||||
),
|
||||
DemoLawnMower(
|
||||
"kitchen_sink_mower_003",
|
||||
"Mower can pause",
|
||||
LawnMowerActivity.DOCKED,
|
||||
LawnMowerEntityFeature.PAUSE | LawnMowerEntityFeature.START_MOWING,
|
||||
),
|
||||
DemoLawnMower(
|
||||
"kitchen_sink_mower_004",
|
||||
"Mower can do all",
|
||||
LawnMowerActivity.DOCKED,
|
||||
LawnMowerEntityFeature.DOCK
|
||||
| LawnMowerEntityFeature.PAUSE
|
||||
| LawnMowerEntityFeature.START_MOWING,
|
||||
),
|
||||
DemoLawnMower(
|
||||
"kitchen_sink_mower_005",
|
||||
"Mower is paused",
|
||||
LawnMowerActivity.PAUSED,
|
||||
LawnMowerEntityFeature.DOCK
|
||||
| LawnMowerEntityFeature.PAUSE
|
||||
| LawnMowerEntityFeature.START_MOWING,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Everything but the Kitchen Sink config entry."""
|
||||
await async_setup_platform(hass, {}, async_add_entities)
|
||||
|
||||
|
||||
class DemoLawnMower(LawnMowerEntity):
|
||||
"""Representation of a Demo lawn mower."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
name: str,
|
||||
activity: LawnMowerActivity,
|
||||
features: LawnMowerEntityFeature = LawnMowerEntityFeature(0),
|
||||
) -> None:
|
||||
"""Initialize the lawn mower."""
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_supported_features = features
|
||||
self._attr_activity = activity
|
||||
|
||||
async def async_start_mowing(self) -> None:
|
||||
"""Start mowing."""
|
||||
self._attr_activity = LawnMowerActivity.MOWING
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_dock(self) -> None:
|
||||
"""Start docking."""
|
||||
self._attr_activity = LawnMowerActivity.DOCKED
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_pause(self) -> None:
|
||||
"""Pause mower."""
|
||||
self._attr_activity = LawnMowerActivity.PAUSED
|
||||
self.async_write_ha_state()
|
@@ -47,93 +47,93 @@ class KrakenSensorEntityDescription(SensorEntityDescription, KrakenRequiredKeysM
|
||||
SENSOR_TYPES: tuple[KrakenSensorEntityDescription, ...] = (
|
||||
KrakenSensorEntityDescription(
|
||||
key="ask",
|
||||
name="Ask",
|
||||
translation_key="ask",
|
||||
value_fn=lambda x, y: x.data[y]["ask"][0],
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="ask_volume",
|
||||
name="Ask Volume",
|
||||
translation_key="ask_volume",
|
||||
value_fn=lambda x, y: x.data[y]["ask"][1],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="bid",
|
||||
name="Bid",
|
||||
translation_key="bid",
|
||||
value_fn=lambda x, y: x.data[y]["bid"][0],
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="bid_volume",
|
||||
name="Bid Volume",
|
||||
translation_key="bid_volume",
|
||||
value_fn=lambda x, y: x.data[y]["bid"][1],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="volume_today",
|
||||
name="Volume Today",
|
||||
translation_key="volume_today",
|
||||
value_fn=lambda x, y: x.data[y]["volume"][0],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="volume_last_24h",
|
||||
name="Volume last 24h",
|
||||
translation_key="volume_last_24h",
|
||||
value_fn=lambda x, y: x.data[y]["volume"][1],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="volume_weighted_average_today",
|
||||
name="Volume weighted average today",
|
||||
translation_key="volume_weighted_average_today",
|
||||
value_fn=lambda x, y: x.data[y]["volume_weighted_average"][0],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="volume_weighted_average_last_24h",
|
||||
name="Volume weighted average last 24h",
|
||||
translation_key="volume_weighted_average_last_24h",
|
||||
value_fn=lambda x, y: x.data[y]["volume_weighted_average"][1],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="number_of_trades_today",
|
||||
name="Number of trades today",
|
||||
translation_key="number_of_trades_today",
|
||||
value_fn=lambda x, y: x.data[y]["number_of_trades"][0],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="number_of_trades_last_24h",
|
||||
name="Number of trades last 24h",
|
||||
translation_key="number_of_trades_last_24h",
|
||||
value_fn=lambda x, y: x.data[y]["number_of_trades"][1],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="last_trade_closed",
|
||||
name="Last trade closed",
|
||||
translation_key="last_trade_closed",
|
||||
value_fn=lambda x, y: x.data[y]["last_trade_closed"][0],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="low_today",
|
||||
name="Low today",
|
||||
translation_key="low_today",
|
||||
value_fn=lambda x, y: x.data[y]["low"][0],
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="low_last_24h",
|
||||
name="Low last 24h",
|
||||
translation_key="low_last_24h",
|
||||
value_fn=lambda x, y: x.data[y]["low"][1],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="high_today",
|
||||
name="High today",
|
||||
translation_key="high_today",
|
||||
value_fn=lambda x, y: x.data[y]["high"][0],
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="high_last_24h",
|
||||
name="High last 24h",
|
||||
translation_key="high_last_24h",
|
||||
value_fn=lambda x, y: x.data[y]["high"][1],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
KrakenSensorEntityDescription(
|
||||
key="opening_price_today",
|
||||
name="Opening price today",
|
||||
translation_key="opening_price_today",
|
||||
value_fn=lambda x, y: x.data[y]["opening_price"],
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -207,6 +207,9 @@ class KrakenSensor(
|
||||
|
||||
entity_description: KrakenSensorEntityDescription
|
||||
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
kraken_data: KrakenData,
|
||||
@@ -233,7 +236,6 @@ class KrakenSensor(
|
||||
).lower()
|
||||
self._received_data_at_least_once = False
|
||||
self._available = True
|
||||
self._attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url="https://www.kraken.com/",
|
||||
@@ -242,7 +244,6 @@ class KrakenSensor(
|
||||
manufacturer="Kraken.com",
|
||||
name=self._device_name,
|
||||
)
|
||||
self._attr_has_entity_name = True
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
|
@@ -18,5 +18,57 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"ask": {
|
||||
"name": "Ask"
|
||||
},
|
||||
"ask_volume": {
|
||||
"name": "Ask volume"
|
||||
},
|
||||
"bid": {
|
||||
"name": "Bid"
|
||||
},
|
||||
"bid_volume": {
|
||||
"name": "Bid volume"
|
||||
},
|
||||
"volume_today": {
|
||||
"name": "Volume today"
|
||||
},
|
||||
"volume_last_24h": {
|
||||
"name": "Volume last 24h"
|
||||
},
|
||||
"volume_weighted_average_today": {
|
||||
"name": "Volume weighted average today"
|
||||
},
|
||||
"volume_weighted_average_last_24h": {
|
||||
"name": "Volume weighted average last 24h"
|
||||
},
|
||||
"number_of_trades_today": {
|
||||
"name": "Number of trades today"
|
||||
},
|
||||
"number_of_trades_last_24h": {
|
||||
"name": "Number of trades last 24h"
|
||||
},
|
||||
"last_trade_closed": {
|
||||
"name": "Last trade closed"
|
||||
},
|
||||
"low_today": {
|
||||
"name": "Low today"
|
||||
},
|
||||
"low_last_24h": {
|
||||
"name": "Low last 24h"
|
||||
},
|
||||
"high_today": {
|
||||
"name": "High today"
|
||||
},
|
||||
"high_last_24h": {
|
||||
"name": "High last 24h"
|
||||
},
|
||||
"opening_price_today": {
|
||||
"name": "Opening price today"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -87,6 +87,8 @@ class LastFmSensor(CoordinatorEntity[LastFMDataUpdateCoordinator], SensorEntity)
|
||||
|
||||
_attr_attribution = "Data provided by Last.fm"
|
||||
_attr_icon = "mdi:radio-fm"
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -98,7 +100,6 @@ class LastFmSensor(CoordinatorEntity[LastFMDataUpdateCoordinator], SensorEntity)
|
||||
super().__init__(coordinator)
|
||||
self._username = username
|
||||
self._attr_unique_id = hashlib.sha256(username.encode("utf-8")).hexdigest()
|
||||
self._attr_name = username
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url="https://www.last.fm",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
|
120
homeassistant/components/lawn_mower/__init__.py
Normal file
120
homeassistant/components/lawn_mower/__init__.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""The lawn mower integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import final
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.config_validation import ( # noqa: F401
|
||||
PLATFORM_SCHEMA,
|
||||
PLATFORM_SCHEMA_BASE,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
SERVICE_DOCK,
|
||||
SERVICE_PAUSE,
|
||||
SERVICE_START_MOWING,
|
||||
LawnMowerActivity,
|
||||
LawnMowerEntityFeature,
|
||||
)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the lawn_mower component."""
|
||||
component = hass.data[DOMAIN] = EntityComponent[LawnMowerEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
await component.async_setup(config)
|
||||
|
||||
component.async_register_entity_service(
|
||||
SERVICE_START_MOWING,
|
||||
{},
|
||||
"async_start_mowing",
|
||||
[LawnMowerEntityFeature.START_MOWING],
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_PAUSE, {}, "async_pause", [LawnMowerEntityFeature.PAUSE]
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_DOCK, {}, "async_dock", [LawnMowerEntityFeature.DOCK]
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up lawn mower devices."""
|
||||
component: EntityComponent[LawnMowerEntity] = hass.data[DOMAIN]
|
||||
return await component.async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
component: EntityComponent[LawnMowerEntity] = hass.data[DOMAIN]
|
||||
return await component.async_unload_entry(entry)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LawnMowerEntityEntityDescription(EntityDescription):
|
||||
"""A class that describes lawn mower entities."""
|
||||
|
||||
|
||||
class LawnMowerEntity(Entity):
|
||||
"""Base class for lawn mower entities."""
|
||||
|
||||
entity_description: LawnMowerEntityEntityDescription
|
||||
_attr_activity: LawnMowerActivity | None = None
|
||||
_attr_supported_features: LawnMowerEntityFeature = LawnMowerEntityFeature(0)
|
||||
|
||||
@final
|
||||
@property
|
||||
def state(self) -> str | None:
|
||||
"""Return the current state."""
|
||||
if (activity := self.activity) is None:
|
||||
return None
|
||||
return str(activity)
|
||||
|
||||
@property
|
||||
def activity(self) -> LawnMowerActivity | None:
|
||||
"""Return the current lawn mower activity."""
|
||||
return self._attr_activity
|
||||
|
||||
@property
|
||||
def supported_features(self) -> LawnMowerEntityFeature:
|
||||
"""Flag lawn mower features that are supported."""
|
||||
return self._attr_supported_features
|
||||
|
||||
def start_mowing(self) -> None:
|
||||
"""Start or resume mowing."""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_start_mowing(self) -> None:
|
||||
"""Start or resume mowing."""
|
||||
await self.hass.async_add_executor_job(self.start_mowing)
|
||||
|
||||
def dock(self) -> None:
|
||||
"""Dock the mower."""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_dock(self) -> None:
|
||||
"""Dock the mower."""
|
||||
await self.hass.async_add_executor_job(self.dock)
|
||||
|
||||
def pause(self) -> None:
|
||||
"""Pause the lawn mower."""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_pause(self) -> None:
|
||||
"""Pause the lawn mower."""
|
||||
await self.hass.async_add_executor_job(self.pause)
|
33
homeassistant/components/lawn_mower/const.py
Normal file
33
homeassistant/components/lawn_mower/const.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Constants for the lawn mower integration."""
|
||||
from enum import IntFlag, StrEnum
|
||||
|
||||
|
||||
class LawnMowerActivity(StrEnum):
|
||||
"""Activity state of lawn mower devices."""
|
||||
|
||||
ERROR = "error"
|
||||
"""Device is in error state, needs assistance."""
|
||||
|
||||
PAUSED = "paused"
|
||||
"""Paused during activity."""
|
||||
|
||||
MOWING = "mowing"
|
||||
"""Device is mowing."""
|
||||
|
||||
DOCKED = "docked"
|
||||
"""Device is docked."""
|
||||
|
||||
|
||||
class LawnMowerEntityFeature(IntFlag):
|
||||
"""Supported features of the lawn mower entity."""
|
||||
|
||||
START_MOWING = 1
|
||||
PAUSE = 2
|
||||
DOCK = 4
|
||||
|
||||
|
||||
DOMAIN = "lawn_mower"
|
||||
|
||||
SERVICE_START_MOWING = "start_mowing"
|
||||
SERVICE_PAUSE = "pause"
|
||||
SERVICE_DOCK = "dock"
|
8
homeassistant/components/lawn_mower/manifest.json
Normal file
8
homeassistant/components/lawn_mower/manifest.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"domain": "lawn_mower",
|
||||
"name": "Lawn Mower",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lawn_mower",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal"
|
||||
}
|
22
homeassistant/components/lawn_mower/services.yaml
Normal file
22
homeassistant/components/lawn_mower/services.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
# Describes the format for available lawn_mower services
|
||||
|
||||
start_mowing:
|
||||
target:
|
||||
entity:
|
||||
domain: lawn_mower
|
||||
supported_features:
|
||||
- lawn_mower.LawnMowerEntityFeature.START_MOWING
|
||||
|
||||
dock:
|
||||
target:
|
||||
entity:
|
||||
domain: lawn_mower
|
||||
supported_features:
|
||||
- lawn_mower.LawnMowerEntityFeature.DOCK
|
||||
|
||||
pause:
|
||||
target:
|
||||
entity:
|
||||
domain: lawn_mower
|
||||
supported_features:
|
||||
- lawn_mower.LawnMowerEntityFeature.PAUSE
|
28
homeassistant/components/lawn_mower/strings.json
Normal file
28
homeassistant/components/lawn_mower/strings.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"title": "Lawn mower",
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::lawn_mower::title%]",
|
||||
"state": {
|
||||
"error": "Error",
|
||||
"paused": "Paused",
|
||||
"mowing": "Mowing",
|
||||
"docked": "Docked"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"start_mowing": {
|
||||
"name": "Start mowing",
|
||||
"description": "Starts the mowing task."
|
||||
},
|
||||
"dock": {
|
||||
"name": "Return to dock",
|
||||
"description": "Stops the mowing task and returns to the dock."
|
||||
},
|
||||
"pause": {
|
||||
"name": "Pause",
|
||||
"description": "Pauses the mowing task."
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user